This commit is contained in:
Charlie Tonneslan 2026-03-22 12:50:38 -04:00 committed by GitHub
commit 1aac79d406
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 25 additions and 0 deletions

View File

@ -158,6 +158,10 @@ def select_shallow_thinking_agent(provider) -> str:
"openrouter": [
("NVIDIA Nemotron 3 Nano 30B (free)", "nvidia/nemotron-3-nano-30b-a3b:free"),
("Z.AI GLM 4.5 Air (free)", "z-ai/glm-4.5-air:free"),
("Anthropic Claude Sonnet 4 (paid)", "anthropic/claude-sonnet-4"),
("Google Gemini 2.5 Flash (paid)", "google/gemini-2.5-flash"),
("OpenAI GPT-5 Mini (paid)", "openai/gpt-5-mini"),
("Enter custom model ID", "__custom__"),
],
"ollama": [
("Qwen3:latest (8B, local)", "qwen3:latest"),
@ -188,9 +192,23 @@ def select_shallow_thinking_agent(provider) -> str:
)
exit(1)
if choice == "__custom__":
choice = _prompt_custom_model_id("anthropic/claude-sonnet-4")
return choice
def _prompt_custom_model_id(example: str) -> str:
"""Prompt user for a custom model ID and exit if none provided."""
model_id = questionary.text(
f"Enter your model ID (e.g. {example}):"
).ask()
if not model_id:
console.print("\n[red]No model entered. Exiting...[/red]")
exit(1)
return model_id
def select_deep_thinking_agent(provider) -> str:
"""Select deep thinking llm engine using an interactive selection."""
@ -225,6 +243,10 @@ def select_deep_thinking_agent(provider) -> str:
"openrouter": [
("Z.AI GLM 4.5 Air (free)", "z-ai/glm-4.5-air:free"),
("NVIDIA Nemotron 3 Nano 30B (free)", "nvidia/nemotron-3-nano-30b-a3b:free"),
("Anthropic Claude Sonnet 4 (paid)", "anthropic/claude-sonnet-4"),
("Google Gemini 2.5 Pro (paid)", "google/gemini-2.5-pro"),
("OpenAI GPT-5.2 (paid)", "openai/gpt-5.2"),
("Enter custom model ID", "__custom__"),
],
"ollama": [
("GLM-4.7-Flash:latest (30B, local)", "glm-4.7-flash:latest"),
@ -253,6 +275,9 @@ def select_deep_thinking_agent(provider) -> str:
console.print("\n[red]No deep thinking llm engine selected. Exiting...[/red]")
exit(1)
if choice == "__custom__":
choice = _prompt_custom_model_id("openai/gpt-5.2")
return choice
def select_llm_provider() -> tuple[str, str]: