Switch default LLM provider from OpenAI API to Anthropic Claude subscription
- Change default llm_provider from "openai" to "anthropic" - Update default models: deep_think_llm to claude-sonnet-4-6, quick_think_llm to claude-haiku-4-5 - Update backend_url to Anthropic API endpoint - Reorder provider lists to show Anthropic first in CLI and docs - Update README examples and .env.example to reflect new defaults https://claude.ai/code/session_01AbNbJYL7gHy47BQ9BJJTAv
This commit is contained in:
parent
589b351f2a
commit
eb2e82305c
|
|
@ -1,6 +1,6 @@
|
|||
# LLM Providers (set the one you use)
|
||||
ANTHROPIC_API_KEY=
|
||||
OPENAI_API_KEY=
|
||||
GOOGLE_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
XAI_API_KEY=
|
||||
OPENROUTER_API_KEY=
|
||||
|
|
|
|||
|
|
@ -122,9 +122,9 @@ pip install .
|
|||
TradingAgents supports multiple LLM providers. Set the API key for your chosen provider:
|
||||
|
||||
```bash
|
||||
export ANTHROPIC_API_KEY=... # Anthropic (Claude) - default provider
|
||||
export OPENAI_API_KEY=... # OpenAI (GPT)
|
||||
export GOOGLE_API_KEY=... # Google (Gemini)
|
||||
export ANTHROPIC_API_KEY=... # Anthropic (Claude)
|
||||
export XAI_API_KEY=... # xAI (Grok)
|
||||
export OPENROUTER_API_KEY=... # OpenRouter
|
||||
export ALPHA_VANTAGE_API_KEY=... # Alpha Vantage
|
||||
|
|
@ -188,9 +188,9 @@ from tradingagents.graph.trading_graph import TradingAgentsGraph
|
|||
from tradingagents.default_config import DEFAULT_CONFIG
|
||||
|
||||
config = DEFAULT_CONFIG.copy()
|
||||
config["llm_provider"] = "openai" # openai, google, anthropic, xai, openrouter, ollama
|
||||
config["deep_think_llm"] = "gpt-5.2" # Model for complex reasoning
|
||||
config["quick_think_llm"] = "gpt-5-mini" # Model for quick tasks
|
||||
config["llm_provider"] = "anthropic" # anthropic, openai, google, xai, openrouter, ollama
|
||||
config["deep_think_llm"] = "claude-sonnet-4-6" # Model for complex reasoning
|
||||
config["quick_think_llm"] = "claude-haiku-4-5" # Model for quick tasks
|
||||
config["max_debate_rounds"] = 2
|
||||
|
||||
ta = TradingAgentsGraph(debug=True, config=config)
|
||||
|
|
|
|||
|
|
@ -538,10 +538,10 @@ def get_user_selections():
|
|||
)
|
||||
selected_research_depth = select_research_depth()
|
||||
|
||||
# Step 5: OpenAI backend
|
||||
# Step 5: LLM Provider
|
||||
console.print(
|
||||
create_question_box(
|
||||
"Step 5: OpenAI backend", "Select which service to talk to"
|
||||
"Step 5: LLM Provider", "Select which service to talk to"
|
||||
)
|
||||
)
|
||||
selected_llm_provider, backend_url = select_llm_provider()
|
||||
|
|
|
|||
|
|
@ -263,12 +263,12 @@ def select_deep_thinking_agent(provider) -> str:
|
|||
return choice
|
||||
|
||||
def select_llm_provider() -> tuple[str, str]:
|
||||
"""Select the OpenAI api url using interactive selection."""
|
||||
# Define OpenAI api options with their corresponding endpoints
|
||||
"""Select the LLM provider using interactive selection."""
|
||||
# Define LLM provider options with their corresponding endpoints
|
||||
BASE_URLS = [
|
||||
("Anthropic", "https://api.anthropic.com/"),
|
||||
("OpenAI", "https://api.openai.com/v1"),
|
||||
("Google", "https://generativelanguage.googleapis.com/v1"),
|
||||
("Anthropic", "https://api.anthropic.com/"),
|
||||
("xAI", "https://api.x.ai/v1"),
|
||||
("Openrouter", "https://openrouter.ai/api/v1"),
|
||||
("Ollama", "http://localhost:11434/v1"),
|
||||
|
|
@ -291,7 +291,7 @@ def select_llm_provider() -> tuple[str, str]:
|
|||
).ask()
|
||||
|
||||
if choice is None:
|
||||
console.print("\n[red]no OpenAI backend selected. Exiting...[/red]")
|
||||
console.print("\n[red]No LLM provider selected. Exiting...[/red]")
|
||||
exit(1)
|
||||
|
||||
display_name, url = choice
|
||||
|
|
|
|||
4
main.py
4
main.py
|
|
@ -8,8 +8,8 @@ load_dotenv()
|
|||
|
||||
# Create a custom config
|
||||
config = DEFAULT_CONFIG.copy()
|
||||
config["deep_think_llm"] = "gpt-5-mini" # Use a different model
|
||||
config["quick_think_llm"] = "gpt-5-mini" # Use a different model
|
||||
config["deep_think_llm"] = "claude-sonnet-4-6" # Use a different model
|
||||
config["quick_think_llm"] = "claude-haiku-4-5" # Use a different model
|
||||
config["max_debate_rounds"] = 1 # Increase debate rounds
|
||||
|
||||
# Configure data vendors (default uses yfinance, no extra API keys needed)
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@ DEFAULT_CONFIG = {
|
|||
"dataflows/data_cache",
|
||||
),
|
||||
# LLM settings
|
||||
"llm_provider": "openai",
|
||||
"deep_think_llm": "gpt-5.2",
|
||||
"quick_think_llm": "gpt-5-mini",
|
||||
"backend_url": "https://api.openai.com/v1",
|
||||
"llm_provider": "anthropic",
|
||||
"deep_think_llm": "claude-sonnet-4-6",
|
||||
"quick_think_llm": "claude-haiku-4-5",
|
||||
"backend_url": "https://api.anthropic.com/",
|
||||
# Provider-specific thinking configuration
|
||||
"google_thinking_level": None, # "high", "minimal", etc.
|
||||
"openai_reasoning_effort": None, # "medium", "high", "low"
|
||||
|
|
|
|||
Loading…
Reference in New Issue