From fcca3236e9acb9f5e0575ae6b90acd6d2284b6e3 Mon Sep 17 00:00:00 2001 From: Jiaxu Liu Date: Mon, 23 Mar 2026 14:12:01 +0000 Subject: [PATCH] update --- cli/main.py | 12 +++++------- cli/utils.py | 8 ++++++-- tradingagents/default_config.py | 3 --- 3 files changed, 11 insertions(+), 12 deletions(-) diff --git a/cli/main.py b/cli/main.py index c0048bfb..57e42745 100644 --- a/cli/main.py +++ b/cli/main.py @@ -546,10 +546,8 @@ def get_user_selections(): ) selected_llm_provider, backend_url = select_llm_provider() - provider_id = selected_llm_provider.lower() - # GitHub Copilot: run OAuth before proceeding - if provider_id == "copilot": + if selected_llm_provider.lower() == "copilot": console.print( create_question_box( "Step 5b: Copilot Auth", @@ -565,15 +563,15 @@ def get_user_selections(): "Step 6: Thinking Agents", "Select your thinking agents for analysis" ) ) - selected_shallow_thinker = select_shallow_thinking_agent(provider_id) - selected_deep_thinker = select_deep_thinking_agent(provider_id) + selected_shallow_thinker = select_shallow_thinking_agent(selected_llm_provider) + selected_deep_thinker = select_deep_thinking_agent(selected_llm_provider) # Step 7: Provider-specific thinking configuration thinking_level = None reasoning_effort = None anthropic_effort = None - provider_lower = provider_id + provider_lower = selected_llm_provider.lower() if provider_lower == "google": console.print( create_question_box( @@ -604,7 +602,7 @@ def get_user_selections(): "analysis_date": analysis_date, "analysts": selected_analysts, "research_depth": selected_research_depth, - "llm_provider": provider_id, + "llm_provider": selected_llm_provider.lower(), "backend_url": backend_url, "shallow_thinker": selected_shallow_thinker, "deep_thinker": selected_deep_thinker, diff --git a/cli/utils.py b/cli/utils.py index c5e77e0e..db275806 100644 --- a/cli/utils.py +++ b/cli/utils.py @@ -137,7 +137,9 @@ def select_research_depth() -> int: def select_shallow_thinking_agent(provider) -> str: """Select shallow thinking llm engine using an interactive selection.""" + # Define shallow thinking llm engine options with their corresponding model names # Ordering: medium → light → heavy (balanced first for quick tasks) + # Within same tier, newer models first SHALLOW_AGENT_OPTIONS = { "openai": [ ("GPT-5 Mini - Balanced speed, cost, and capability", "gpt-5-mini"), @@ -170,7 +172,7 @@ def select_shallow_thinking_agent(provider) -> str: ("GPT-OSS:latest (20B, local)", "gpt-oss:latest"), ("GLM-4.7-Flash:latest (30B, local)", "glm-4.7-flash:latest"), ], - "copilot": [], # populated dynamically by fetch_copilot_models() + "copilot": [], } if provider.lower() == "copilot": @@ -212,7 +214,9 @@ def select_shallow_thinking_agent(provider) -> str: def select_deep_thinking_agent(provider) -> str: """Select deep thinking llm engine using an interactive selection.""" - # Ordering: heavy → medium → light (most capable first for deep tasks) + # Define shallow thinking llm engine options with their corresponding model names + # Ordering: medium → light → heavy (balanced first for quick tasks) + # Within same tier, newer models first DEEP_AGENT_OPTIONS = { "openai": [ ("GPT-5.4 - Latest frontier, 1M context", "gpt-5.4"), diff --git a/tradingagents/default_config.py b/tradingagents/default_config.py index 5c3c1d87..898e1e1e 100644 --- a/tradingagents/default_config.py +++ b/tradingagents/default_config.py @@ -8,9 +8,6 @@ DEFAULT_CONFIG = { "dataflows/data_cache", ), # LLM settings - # Set llm_provider to "copilot" to use GitHub Copilot (no explicit API key - # needed — authenticates via `gh auth token` from the GitHub CLI). - # Available models are fetched dynamically from the Copilot inference API. "llm_provider": "openai", "deep_think_llm": "gpt-5.2", "quick_think_llm": "gpt-5-mini",