Add popular paid models and custom model input for OpenRouter
The OpenRouter provider only listed 2 free models, so users with paid accounts couldn't access their models through the CLI (#337). Added popular paid models (Claude Sonnet 4, Gemini 2.5, GPT-5) to both the quick-thinking and deep-thinking model lists. Also added an 'Enter custom model ID' option that prompts for any OpenRouter model ID, so users aren't limited to the hardcoded list. Fixes #337
This commit is contained in:
parent
f362a160c3
commit
9c7340ed36
24
cli/utils.py
24
cli/utils.py
|
|
@ -158,6 +158,10 @@ def select_shallow_thinking_agent(provider) -> str:
|
|||
"openrouter": [
|
||||
("NVIDIA Nemotron 3 Nano 30B (free)", "nvidia/nemotron-3-nano-30b-a3b:free"),
|
||||
("Z.AI GLM 4.5 Air (free)", "z-ai/glm-4.5-air:free"),
|
||||
("Anthropic Claude Sonnet 4 (paid)", "anthropic/claude-sonnet-4"),
|
||||
("Google Gemini 2.5 Flash (paid)", "google/gemini-2.5-flash"),
|
||||
("OpenAI GPT-5 Mini (paid)", "openai/gpt-5-mini"),
|
||||
("Enter custom model ID", "__custom__"),
|
||||
],
|
||||
"ollama": [
|
||||
("Qwen3:latest (8B, local)", "qwen3:latest"),
|
||||
|
|
@ -188,6 +192,14 @@ def select_shallow_thinking_agent(provider) -> str:
|
|||
)
|
||||
exit(1)
|
||||
|
||||
if choice == "__custom__":
|
||||
choice = questionary.text(
|
||||
"Enter your model ID (e.g. anthropic/claude-sonnet-4):"
|
||||
).ask()
|
||||
if not choice:
|
||||
console.print("\n[red]No model entered. Exiting...[/red]")
|
||||
exit(1)
|
||||
|
||||
return choice
|
||||
|
||||
|
||||
|
|
@ -225,6 +237,10 @@ def select_deep_thinking_agent(provider) -> str:
|
|||
"openrouter": [
|
||||
("Z.AI GLM 4.5 Air (free)", "z-ai/glm-4.5-air:free"),
|
||||
("NVIDIA Nemotron 3 Nano 30B (free)", "nvidia/nemotron-3-nano-30b-a3b:free"),
|
||||
("Anthropic Claude Sonnet 4 (paid)", "anthropic/claude-sonnet-4"),
|
||||
("Google Gemini 2.5 Pro (paid)", "google/gemini-2.5-pro"),
|
||||
("OpenAI GPT-5.2 (paid)", "openai/gpt-5.2"),
|
||||
("Enter custom model ID", "__custom__"),
|
||||
],
|
||||
"ollama": [
|
||||
("GLM-4.7-Flash:latest (30B, local)", "glm-4.7-flash:latest"),
|
||||
|
|
@ -253,6 +269,14 @@ def select_deep_thinking_agent(provider) -> str:
|
|||
console.print("\n[red]No deep thinking llm engine selected. Exiting...[/red]")
|
||||
exit(1)
|
||||
|
||||
if choice == "__custom__":
|
||||
choice = questionary.text(
|
||||
"Enter your model ID (e.g. openai/gpt-5.2):"
|
||||
).ask()
|
||||
if not choice:
|
||||
console.print("\n[red]No model entered. Exiting...[/red]")
|
||||
exit(1)
|
||||
|
||||
return choice
|
||||
|
||||
def select_llm_provider() -> tuple[str, str]:
|
||||
|
|
|
|||
Loading…
Reference in New Issue