Switch default LLM provider from OpenAI API to Anthropic Claude subscription

- Change default llm_provider from "openai" to "anthropic"
- Update default models: deep_think_llm to claude-sonnet-4-6, quick_think_llm to claude-haiku-4-5
- Update backend_url to Anthropic API endpoint
- Reorder provider lists to show Anthropic first in CLI and docs
- Update README examples and .env.example to reflect new defaults

https://claude.ai/code/session_01AbNbJYL7gHy47BQ9BJJTAv
This commit is contained in:
Claude 2026-03-27 03:11:03 +00:00
parent 589b351f2a
commit eb2e82305c
No known key found for this signature in database
6 changed files with 17 additions and 17 deletions

View File

@ -1,6 +1,6 @@
# LLM Providers (set the one you use) # LLM Providers (set the one you use)
ANTHROPIC_API_KEY=
OPENAI_API_KEY= OPENAI_API_KEY=
GOOGLE_API_KEY= GOOGLE_API_KEY=
ANTHROPIC_API_KEY=
XAI_API_KEY= XAI_API_KEY=
OPENROUTER_API_KEY= OPENROUTER_API_KEY=

View File

@ -122,9 +122,9 @@ pip install .
TradingAgents supports multiple LLM providers. Set the API key for your chosen provider: TradingAgents supports multiple LLM providers. Set the API key for your chosen provider:
```bash ```bash
export ANTHROPIC_API_KEY=... # Anthropic (Claude) - default provider
export OPENAI_API_KEY=... # OpenAI (GPT) export OPENAI_API_KEY=... # OpenAI (GPT)
export GOOGLE_API_KEY=... # Google (Gemini) export GOOGLE_API_KEY=... # Google (Gemini)
export ANTHROPIC_API_KEY=... # Anthropic (Claude)
export XAI_API_KEY=... # xAI (Grok) export XAI_API_KEY=... # xAI (Grok)
export OPENROUTER_API_KEY=... # OpenRouter export OPENROUTER_API_KEY=... # OpenRouter
export ALPHA_VANTAGE_API_KEY=... # Alpha Vantage export ALPHA_VANTAGE_API_KEY=... # Alpha Vantage
@ -188,9 +188,9 @@ from tradingagents.graph.trading_graph import TradingAgentsGraph
from tradingagents.default_config import DEFAULT_CONFIG from tradingagents.default_config import DEFAULT_CONFIG
config = DEFAULT_CONFIG.copy() config = DEFAULT_CONFIG.copy()
config["llm_provider"] = "openai" # openai, google, anthropic, xai, openrouter, ollama config["llm_provider"] = "anthropic" # anthropic, openai, google, xai, openrouter, ollama
config["deep_think_llm"] = "gpt-5.2" # Model for complex reasoning config["deep_think_llm"] = "claude-sonnet-4-6" # Model for complex reasoning
config["quick_think_llm"] = "gpt-5-mini" # Model for quick tasks config["quick_think_llm"] = "claude-haiku-4-5" # Model for quick tasks
config["max_debate_rounds"] = 2 config["max_debate_rounds"] = 2
ta = TradingAgentsGraph(debug=True, config=config) ta = TradingAgentsGraph(debug=True, config=config)

View File

@ -538,10 +538,10 @@ def get_user_selections():
) )
selected_research_depth = select_research_depth() selected_research_depth = select_research_depth()
# Step 5: OpenAI backend # Step 5: LLM Provider
console.print( console.print(
create_question_box( create_question_box(
"Step 5: OpenAI backend", "Select which service to talk to" "Step 5: LLM Provider", "Select which service to talk to"
) )
) )
selected_llm_provider, backend_url = select_llm_provider() selected_llm_provider, backend_url = select_llm_provider()

View File

@ -263,12 +263,12 @@ def select_deep_thinking_agent(provider) -> str:
return choice return choice
def select_llm_provider() -> tuple[str, str]: def select_llm_provider() -> tuple[str, str]:
"""Select the OpenAI api url using interactive selection.""" """Select the LLM provider using interactive selection."""
# Define OpenAI api options with their corresponding endpoints # Define LLM provider options with their corresponding endpoints
BASE_URLS = [ BASE_URLS = [
("Anthropic", "https://api.anthropic.com/"),
("OpenAI", "https://api.openai.com/v1"), ("OpenAI", "https://api.openai.com/v1"),
("Google", "https://generativelanguage.googleapis.com/v1"), ("Google", "https://generativelanguage.googleapis.com/v1"),
("Anthropic", "https://api.anthropic.com/"),
("xAI", "https://api.x.ai/v1"), ("xAI", "https://api.x.ai/v1"),
("Openrouter", "https://openrouter.ai/api/v1"), ("Openrouter", "https://openrouter.ai/api/v1"),
("Ollama", "http://localhost:11434/v1"), ("Ollama", "http://localhost:11434/v1"),
@ -291,7 +291,7 @@ def select_llm_provider() -> tuple[str, str]:
).ask() ).ask()
if choice is None: if choice is None:
console.print("\n[red]no OpenAI backend selected. Exiting...[/red]") console.print("\n[red]No LLM provider selected. Exiting...[/red]")
exit(1) exit(1)
display_name, url = choice display_name, url = choice

View File

@ -8,8 +8,8 @@ load_dotenv()
# Create a custom config # Create a custom config
config = DEFAULT_CONFIG.copy() config = DEFAULT_CONFIG.copy()
config["deep_think_llm"] = "gpt-5-mini" # Use a different model config["deep_think_llm"] = "claude-sonnet-4-6" # Use a different model
config["quick_think_llm"] = "gpt-5-mini" # Use a different model config["quick_think_llm"] = "claude-haiku-4-5" # Use a different model
config["max_debate_rounds"] = 1 # Increase debate rounds config["max_debate_rounds"] = 1 # Increase debate rounds
# Configure data vendors (default uses yfinance, no extra API keys needed) # Configure data vendors (default uses yfinance, no extra API keys needed)

View File

@ -8,10 +8,10 @@ DEFAULT_CONFIG = {
"dataflows/data_cache", "dataflows/data_cache",
), ),
# LLM settings # LLM settings
"llm_provider": "openai", "llm_provider": "anthropic",
"deep_think_llm": "gpt-5.2", "deep_think_llm": "claude-sonnet-4-6",
"quick_think_llm": "gpt-5-mini", "quick_think_llm": "claude-haiku-4-5",
"backend_url": "https://api.openai.com/v1", "backend_url": "https://api.anthropic.com/",
# Provider-specific thinking configuration # Provider-specific thinking configuration
"google_thinking_level": None, # "high", "minimal", etc. "google_thinking_level": None, # "high", "minimal", etc.
"openai_reasoning_effort": None, # "medium", "high", "low" "openai_reasoning_effort": None, # "medium", "high", "low"