diff --git a/.env.example b/.env.example index 1328b838..a7c0259f 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,4 @@ +# Default provider is `codex_oauth` and does not require OPENAI_API_KEY. # LLM Providers (set the one you use) OPENAI_API_KEY= GOOGLE_API_KEY= diff --git a/.gitignore b/.gitignore index 9a2904a9..3d76faa6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +reports +results +tests # Byte-compiled / optimized / DLL files __pycache__/ *.py[codz] diff --git a/README.md b/README.md index 34310010..55264e26 100644 --- a/README.md +++ b/README.md @@ -116,9 +116,32 @@ Install dependencies: pip install -r requirements.txt ``` -### Required APIs +### Authentication / APIs -TradingAgents supports multiple LLM providers. Set the API key for your chosen provider: +TradingAgents supports multiple LLM providers. + +#### Option A (Default): ChatGPT OAuth for Codex models (no `OPENAI_API_KEY`) + +If you have a ChatGPT Plus/Pro account, login once: + +```bash +tradingagents auth login +# If local callback port 1455 is busy: +tradingagents auth login --manual +``` + +Check status / logout: + +```bash +tradingagents auth status +tradingagents auth logout +``` + +`codex_oauth` currently uses a Codex model whitelist: +- `gpt-5.2-codex` +- `gpt-5.2` + +#### Option B: API keys for other providers ```bash export OPENAI_API_KEY=... # OpenAI (GPT) @@ -162,7 +185,7 @@ An interface will appear showing results as they load, letting you track the age ### Implementation Details -We built TradingAgents with LangGraph to ensure flexibility and modularity. The framework supports multiple LLM providers: OpenAI, Google, Anthropic, xAI, OpenRouter, and Ollama. +We built TradingAgents with LangGraph to ensure flexibility and modularity. The framework supports multiple LLM providers: Codex OAuth (ChatGPT Plus/Pro), OpenAI, Google, Anthropic, xAI, OpenRouter, and Ollama. ### Python Usage @@ -186,9 +209,9 @@ from tradingagents.graph.trading_graph import TradingAgentsGraph from tradingagents.default_config import DEFAULT_CONFIG config = DEFAULT_CONFIG.copy() -config["llm_provider"] = "openai" # openai, google, anthropic, xai, openrouter, ollama -config["deep_think_llm"] = "gpt-5.2" # Model for complex reasoning -config["quick_think_llm"] = "gpt-5-mini" # Model for quick tasks +config["llm_provider"] = "codex_oauth" # codex_oauth, openai, google, anthropic, xai, openrouter, ollama +config["deep_think_llm"] = "gpt-5.4" +config["quick_think_llm"] = "gpt-5.2" config["max_debate_rounds"] = 2 ta = TradingAgentsGraph(debug=True, config=config) diff --git a/cli/main.py b/cli/main.py index fb97d189..13188654 100644 --- a/cli/main.py +++ b/cli/main.py @@ -23,8 +23,6 @@ from rich import box from rich.align import Align from rich.rule import Rule -from tradingagents.graph.trading_graph import TradingAgentsGraph -from tradingagents.default_config import DEFAULT_CONFIG from cli.models import AnalystType from cli.utils import * from cli.announcements import fetch_announcements, display_announcements @@ -37,6 +35,8 @@ app = typer.Typer( help="TradingAgents CLI: Multi-Agents LLM Financial Trading Framework", add_completion=True, # Enable shell completion ) +auth_app = typer.Typer(help="Manage ChatGPT OAuth credentials for codex_oauth.") +app.add_typer(auth_app, name="auth") # Create a deque to store recent messages with a maximum length @@ -536,10 +536,10 @@ def get_user_selections(): ) selected_research_depth = select_research_depth() - # Step 5: OpenAI backend + # Step 5: LLM backend console.print( create_question_box( - "Step 5: OpenAI backend", "Select which service to talk to" + "Step 5: LLM backend", "Select which service to talk to" ) ) selected_llm_provider, backend_url = select_llm_provider() @@ -566,11 +566,11 @@ def get_user_selections(): ) ) thinking_level = ask_gemini_thinking_config() - elif provider_lower == "openai": + elif provider_lower in ("openai", "codex_oauth"): console.print( create_question_box( "Step 7: Reasoning Effort", - "Configure OpenAI reasoning effort level" + "Configure reasoning effort level" ) ) reasoning_effort = ask_openai_reasoning_effort() @@ -897,6 +897,9 @@ def format_tool_args(args, max_length=80) -> str: return result def run_analysis(): + from tradingagents.default_config import DEFAULT_CONFIG + from tradingagents.graph.trading_graph import TradingAgentsGraph + # First get all user selections selections = get_user_selections() @@ -1167,6 +1170,110 @@ def run_analysis(): display_complete_report(final_state) +def _format_ms(ms: int) -> str: + if not ms: + return "unknown" + return datetime.datetime.fromtimestamp(ms / 1000, tz=datetime.timezone.utc).isoformat() + + +def _load_codex_oauth_modules(): + try: + from codex_oauth.auth import ( + decode_jwt_payload, + exchange_authorization_code, + extract_chatgpt_account_id, + login_manual, + login_via_browser, + ) + from codex_oauth.exceptions import NotAuthenticatedError, OAuthFlowError + from codex_oauth.store import AuthStore, OAuthCredentials + except ModuleNotFoundError as exc: + console.print( + "[red]Missing dependency `langchain-codex-oauth`. " + "Install project dependencies first.[/red]" + ) + raise typer.Exit(code=1) from exc + + return { + "decode_jwt_payload": decode_jwt_payload, + "exchange_authorization_code": exchange_authorization_code, + "extract_chatgpt_account_id": extract_chatgpt_account_id, + "login_manual": login_manual, + "login_via_browser": login_via_browser, + "NotAuthenticatedError": NotAuthenticatedError, + "OAuthFlowError": OAuthFlowError, + "AuthStore": AuthStore, + "OAuthCredentials": OAuthCredentials, + } + + +@auth_app.command("login") +def auth_login( + manual: bool = typer.Option(False, "--manual", help="Paste redirect URL/code manually."), + timeout_s: int = typer.Option(180, "--timeout-s", min=30, help="Browser callback timeout in seconds."), +): + """Login via ChatGPT OAuth and save local credentials.""" + modules = _load_codex_oauth_modules() + try: + if manual: + code, verifier = modules["login_manual"]() + else: + result = modules["login_via_browser"](timeout_s=timeout_s) + if not result: + raise modules["OAuthFlowError"]( + "OAuth callback timed out. Retry with --manual or try again." + ) + code, verifier = result + + token = modules["exchange_authorization_code"](code=code, verifier=verifier) + payload = modules["decode_jwt_payload"](token.access) + if not payload: + raise modules["OAuthFlowError"]("Received invalid access token.") + account_id = modules["extract_chatgpt_account_id"](payload) + if not account_id: + raise modules["OAuthFlowError"]( + "Failed to extract chatgpt_account_id from access token." + ) + + modules["AuthStore"]().save( + modules["OAuthCredentials"]( + access=token.access, + refresh=token.refresh, + expires=token.expires_at_ms, + account_id=account_id, + ) + ) + except modules["OAuthFlowError"] as exc: + console.print(f"[red]{exc}[/red]") + raise typer.Exit(code=2) + + console.print("[green]Login successful. OAuth credentials saved.[/green]") + + +@auth_app.command("status") +def auth_status(): + """Show current OAuth credential status.""" + modules = _load_codex_oauth_modules() + store = modules["AuthStore"]() + try: + creds = store.load() + except modules["NotAuthenticatedError"] as exc: + console.print(f"[red]{exc}[/red]") + raise typer.Exit(code=1) + + console.print("[green]Logged in: yes[/green]") + console.print(f"Account id: {creds.account_id}") + console.print(f"Expires (UTC): {_format_ms(creds.expires)}") + + +@auth_app.command("logout") +def auth_logout(): + """Delete local OAuth credentials.""" + modules = _load_codex_oauth_modules() + modules["AuthStore"]().delete() + console.print("[green]Logged out.[/green]") + + @app.command() def analyze(): run_analysis() diff --git a/cli/utils.py b/cli/utils.py index aa097fb5..08d01ad6 100644 --- a/cli/utils.py +++ b/cli/utils.py @@ -127,6 +127,12 @@ def select_shallow_thinking_agent(provider) -> str: # Define shallow thinking llm engine options with their corresponding model names SHALLOW_AGENT_OPTIONS = { + "codex_oauth": [ + ("GPT-5.4", "gpt-5.4"), + ("GPT-5.2", "gpt-5.2"), + ("GPT-5.3-Codex", "gpt-5.3-codex"), + ("GPT-5.2-Codex", "gpt-5.2-codex"), + ], "openai": [ ("GPT-5 Mini - Cost-optimized reasoning", "gpt-5-mini"), ("GPT-5 Nano - Ultra-fast, high-throughput", "gpt-5-nano"), @@ -192,6 +198,12 @@ def select_deep_thinking_agent(provider) -> str: # Define deep thinking llm engine options with their corresponding model names DEEP_AGENT_OPTIONS = { + "codex_oauth": [ + ("GPT-5.4", "gpt-5.4"), + ("GPT-5.2", "gpt-5.2"), + ("GPT-5.3-Codex", "gpt-5.3-codex"), + ("GPT-5.2-Codex", "gpt-5.2-codex"), + ], "openai": [ ("GPT-5.2 - Latest flagship", "gpt-5.2"), ("GPT-5.1 - Flexible reasoning", "gpt-5.1"), @@ -253,22 +265,22 @@ def select_deep_thinking_agent(provider) -> str: return choice def select_llm_provider() -> tuple[str, str]: - """Select the OpenAI api url using interactive selection.""" - # Define OpenAI api options with their corresponding endpoints + """Select LLM provider and backend URL using interactive selection.""" BASE_URLS = [ - ("OpenAI", "https://api.openai.com/v1"), - ("Google", "https://generativelanguage.googleapis.com/v1"), - ("Anthropic", "https://api.anthropic.com/"), - ("xAI", "https://api.x.ai/v1"), - ("Openrouter", "https://openrouter.ai/api/v1"), - ("Ollama", "http://localhost:11434/v1"), + ("Codex OAuth (ChatGPT Plus/Pro)", "codex_oauth", "https://chatgpt.com/backend-api"), + ("OpenAI", "openai", "https://api.openai.com/v1"), + ("Google", "google", "https://generativelanguage.googleapis.com/v1"), + ("Anthropic", "anthropic", "https://api.anthropic.com/"), + ("xAI", "xai", "https://api.x.ai/v1"), + ("Openrouter", "openrouter", "https://openrouter.ai/api/v1"), + ("Ollama", "ollama", "http://localhost:11434/v1"), ] choice = questionary.select( "Select your LLM Provider:", choices=[ - questionary.Choice(display, value=(display, value)) - for display, value in BASE_URLS + questionary.Choice(display, value=(display, provider, value)) + for display, provider, value in BASE_URLS ], instruction="\n- Use arrow keys to navigate\n- Press Enter to select", style=questionary.Style( @@ -281,13 +293,13 @@ def select_llm_provider() -> tuple[str, str]: ).ask() if choice is None: - console.print("\n[red]no OpenAI backend selected. Exiting...[/red]") + console.print("\n[red]No LLM backend selected. Exiting...[/red]") exit(1) - display_name, url = choice + display_name, provider_name, url = choice print(f"You selected: {display_name}\tURL: {url}") - return display_name, url + return provider_name, url def ask_openai_reasoning_effort() -> str: diff --git a/pyproject.toml b/pyproject.toml index 9213d7f6..3ef437b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,13 +9,13 @@ description = "TradingAgents: Multi-Agents LLM Financial Trading Framework" readme = "README.md" requires-python = ">=3.10" dependencies = [ - "langchain-core>=0.3.81", + "langchain-core>=1.0.0,<2.0.0", "backtrader>=1.9.78.123", "chainlit>=2.5.5", - "langchain-anthropic>=0.3.15", - "langchain-experimental>=0.3.4", + "langchain-anthropic>=1.0.0,<2.0.0", "langchain-google-genai>=2.1.5", - "langchain-openai>=0.3.23", + "langchain-openai>=1.0.0,<2.0.0", + "langchain-codex-oauth>=1.0,<1.1", "langgraph>=0.4.8", "pandas>=2.3.0", "parsel>=1.10.0", @@ -38,3 +38,7 @@ tradingagents = "cli.main:app" [tool.setuptools.packages.find] include = ["tradingagents*", "cli*"] + +[tool.pytest.ini_options] +pythonpath = ["."] +testpaths = ["tests"] diff --git a/requirements.txt b/requirements.txt index 9e51ed98..c3f28e45 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ typing-extensions langchain-core langchain-openai -langchain-experimental +langchain-codex-oauth pandas yfinance stockstats diff --git a/tradingagents/default_config.py b/tradingagents/default_config.py index ecf0dc29..8c7de36e 100644 --- a/tradingagents/default_config.py +++ b/tradingagents/default_config.py @@ -8,10 +8,10 @@ DEFAULT_CONFIG = { "dataflows/data_cache", ), # LLM settings - "llm_provider": "openai", - "deep_think_llm": "gpt-5.2", - "quick_think_llm": "gpt-5-mini", - "backend_url": "https://api.openai.com/v1", + "llm_provider": "codex_oauth", + "deep_think_llm": "gpt-5.4", + "quick_think_llm": "gpt-5.2", + "backend_url": "https://chatgpt.com/backend-api", # Provider-specific thinking configuration "google_thinking_level": None, # "high", "minimal", etc. "openai_reasoning_effort": None, # "medium", "high", "low" diff --git a/tradingagents/graph/trading_graph.py b/tradingagents/graph/trading_graph.py index 44ecca0c..4adc6851 100644 --- a/tradingagents/graph/trading_graph.py +++ b/tradingagents/graph/trading_graph.py @@ -140,7 +140,7 @@ class TradingAgentsGraph: if thinking_level: kwargs["thinking_level"] = thinking_level - elif provider == "openai": + elif provider in ("openai", "codex_oauth"): reasoning_effort = self.config.get("openai_reasoning_effort") if reasoning_effort: kwargs["reasoning_effort"] = reasoning_effort diff --git a/tradingagents/llm_clients/codex_oauth_client.py b/tradingagents/llm_clients/codex_oauth_client.py new file mode 100644 index 00000000..68a89709 --- /dev/null +++ b/tradingagents/llm_clients/codex_oauth_client.py @@ -0,0 +1,43 @@ +from typing import Any, Optional + +from .base_client import BaseLLMClient +from .validators import validate_model + + +class CodexOAuthClient(BaseLLMClient): + """Client for ChatGPT OAuth Codex models.""" + + def __init__(self, model: str, base_url: Optional[str] = None, **kwargs): + super().__init__(model, base_url, **kwargs) + + def get_llm(self) -> Any: + """Return configured ChatCodexOAuth instance.""" + try: + from langchain_codex_oauth import ChatCodexOAuth + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + "langchain-codex-oauth is required for llm_provider='codex_oauth'. " + "Install dependencies and run `tradingagents auth login`." + ) from exc + + llm_kwargs = {"model": self.model} + + if self.base_url: + llm_kwargs["base_url"] = self.base_url + + for key in ( + "timeout", + "max_retries", + "reasoning_effort", + "max_tokens", + "temperature", + "callbacks", + ): + if key in self.kwargs: + llm_kwargs[key] = self.kwargs[key] + + return ChatCodexOAuth(**llm_kwargs) + + def validate_model(self) -> bool: + """Validate model for codex_oauth.""" + return validate_model("codex_oauth", self.model) diff --git a/tradingagents/llm_clients/factory.py b/tradingagents/llm_clients/factory.py index 028c88a2..27cc424d 100644 --- a/tradingagents/llm_clients/factory.py +++ b/tradingagents/llm_clients/factory.py @@ -1,9 +1,6 @@ from typing import Optional from .base_client import BaseLLMClient -from .openai_client import OpenAIClient -from .anthropic_client import AnthropicClient -from .google_client import GoogleClient def create_llm_client( @@ -15,7 +12,7 @@ def create_llm_client( """Create an LLM client for the specified provider. Args: - provider: LLM provider (openai, anthropic, google, xai, ollama, openrouter) + provider: LLM provider (openai, codex_oauth, anthropic, google, xai, ollama, openrouter) model: Model name/identifier base_url: Optional base URL for API endpoint **kwargs: Additional provider-specific arguments @@ -29,15 +26,57 @@ def create_llm_client( provider_lower = provider.lower() if provider_lower in ("openai", "ollama", "openrouter"): + try: + from .openai_client import OpenAIClient + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + "Missing dependency for OpenAI-compatible providers. " + "Install `langchain-openai`." + ) from exc + return OpenAIClient(model, base_url, provider=provider_lower, **kwargs) + if provider_lower == "codex_oauth": + try: + from .codex_oauth_client import CodexOAuthClient + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + "Missing dependency for codex_oauth provider. " + "Install `langchain-codex-oauth`." + ) from exc + + return CodexOAuthClient(model, base_url, **kwargs) + if provider_lower == "xai": + try: + from .openai_client import OpenAIClient + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + "Missing dependency for xAI provider. Install `langchain-openai`." + ) from exc + return OpenAIClient(model, base_url, provider="xai", **kwargs) if provider_lower == "anthropic": + try: + from .anthropic_client import AnthropicClient + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + "Missing dependency for Anthropic provider. " + "Install `langchain-anthropic`." + ) from exc + return AnthropicClient(model, base_url, **kwargs) if provider_lower == "google": + try: + from .google_client import GoogleClient + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + "Missing dependency for Google provider. " + "Install `langchain-google-genai`." + ) from exc + return GoogleClient(model, base_url, **kwargs) raise ValueError(f"Unsupported LLM provider: {provider}") diff --git a/tradingagents/llm_clients/validators.py b/tradingagents/llm_clients/validators.py index 3c0f2290..dd8ba049 100644 --- a/tradingagents/llm_clients/validators.py +++ b/tradingagents/llm_clients/validators.py @@ -26,6 +26,13 @@ VALID_MODELS = { "gpt-4o", "gpt-4o-mini", ], + "codex_oauth": [ + # Codex chat models via ChatGPT OAuth backend + "gpt-5.4", + "gpt-5.2", + "gpt-5.3-codex", + "gpt-5.2-codex", + ], "anthropic": [ # Claude 4.5 series (2025) "claude-opus-4-5",