This commit is contained in:
Jiaxu Liu 2026-03-23 14:12:01 +00:00
parent a84c69e42f
commit fcca3236e9
3 changed files with 11 additions and 12 deletions

View File

@ -546,10 +546,8 @@ def get_user_selections():
)
selected_llm_provider, backend_url = select_llm_provider()
provider_id = selected_llm_provider.lower()
# GitHub Copilot: run OAuth before proceeding
if provider_id == "copilot":
if selected_llm_provider.lower() == "copilot":
console.print(
create_question_box(
"Step 5b: Copilot Auth",
@ -565,15 +563,15 @@ def get_user_selections():
"Step 6: Thinking Agents", "Select your thinking agents for analysis"
)
)
selected_shallow_thinker = select_shallow_thinking_agent(provider_id)
selected_deep_thinker = select_deep_thinking_agent(provider_id)
selected_shallow_thinker = select_shallow_thinking_agent(selected_llm_provider)
selected_deep_thinker = select_deep_thinking_agent(selected_llm_provider)
# Step 7: Provider-specific thinking configuration
thinking_level = None
reasoning_effort = None
anthropic_effort = None
provider_lower = provider_id
provider_lower = selected_llm_provider.lower()
if provider_lower == "google":
console.print(
create_question_box(
@ -604,7 +602,7 @@ def get_user_selections():
"analysis_date": analysis_date,
"analysts": selected_analysts,
"research_depth": selected_research_depth,
"llm_provider": provider_id,
"llm_provider": selected_llm_provider.lower(),
"backend_url": backend_url,
"shallow_thinker": selected_shallow_thinker,
"deep_thinker": selected_deep_thinker,

View File

@ -137,7 +137,9 @@ def select_research_depth() -> int:
def select_shallow_thinking_agent(provider) -> str:
"""Select shallow thinking llm engine using an interactive selection."""
# Define shallow thinking llm engine options with their corresponding model names
# Ordering: medium → light → heavy (balanced first for quick tasks)
# Within same tier, newer models first
SHALLOW_AGENT_OPTIONS = {
"openai": [
("GPT-5 Mini - Balanced speed, cost, and capability", "gpt-5-mini"),
@ -170,7 +172,7 @@ def select_shallow_thinking_agent(provider) -> str:
("GPT-OSS:latest (20B, local)", "gpt-oss:latest"),
("GLM-4.7-Flash:latest (30B, local)", "glm-4.7-flash:latest"),
],
"copilot": [], # populated dynamically by fetch_copilot_models()
"copilot": [],
}
if provider.lower() == "copilot":
@ -212,7 +214,9 @@ def select_shallow_thinking_agent(provider) -> str:
def select_deep_thinking_agent(provider) -> str:
"""Select deep thinking llm engine using an interactive selection."""
# Ordering: heavy → medium → light (most capable first for deep tasks)
# Define shallow thinking llm engine options with their corresponding model names
# Ordering: medium → light → heavy (balanced first for quick tasks)
# Within same tier, newer models first
DEEP_AGENT_OPTIONS = {
"openai": [
("GPT-5.4 - Latest frontier, 1M context", "gpt-5.4"),

View File

@ -8,9 +8,6 @@ DEFAULT_CONFIG = {
"dataflows/data_cache",
),
# LLM settings
# Set llm_provider to "copilot" to use GitHub Copilot (no explicit API key
# needed — authenticates via `gh auth token` from the GitHub CLI).
# Available models are fetched dynamically from the Copilot inference API.
"llm_provider": "openai",
"deep_think_llm": "gpt-5.2",
"quick_think_llm": "gpt-5-mini",