From c744aa3c4b4974399c441d74a2f5e17a22b93d9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=98=B3=E8=99=8E?= Date: Mon, 16 Mar 2026 07:44:37 +0800 Subject: [PATCH] feat: expand OpenRouter model list with popular paid models - Add 7 popular paid models (Claude, GPT-4o, Gemini, DeepSeek, Llama) - Add custom model input option for any OpenRouter model - Fixes #337 - users can now use their paid OpenRouter credits Fixes #337 --- cli/utils.py | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/cli/utils.py b/cli/utils.py index 5a8ec16c..bb8a2c4b 100644 --- a/cli/utils.py +++ b/cli/utils.py @@ -156,8 +156,14 @@ def select_shallow_thinking_agent(provider) -> str: ("Grok 4.1 Fast (Reasoning) - High-performance, 2M ctx", "grok-4-1-fast-reasoning"), ], "openrouter": [ - ("NVIDIA Nemotron 3 Nano 30B (free)", "nvidia/nemotron-3-nano-30b-a3b:free"), ("Z.AI GLM 4.5 Air (free)", "z-ai/glm-4.5-air:free"), + ("NVIDIA Nemotron 3 Nano 30B (free)", "nvidia/nemotron-3-nano-30b-a3b:free"), + ("Claude Sonnet 4 (paid)", "anthropic/claude-sonnet-4"), + ("GPT-4o (paid)", "openai/gpt-4o"), + ("Gemini 2.5 Pro (paid)", "google/gemini-2.5-pro-preview"), + ("DeepSeek V3 (paid)", "deepseek/deepseek-chat"), + ("Llama 3.3 70B (paid)", "meta-llama/llama-3.3-70b-instruct"), + ("Other (enter model name)", "__custom__"), ], "ollama": [ ("Qwen3:latest (8B, local)", "qwen3:latest"), @@ -188,6 +194,16 @@ def select_shallow_thinking_agent(provider) -> str: ) exit(1) + # Handle custom model input for openrouter + if choice == "__custom__": + choice = questionary.text( + "Enter the model name (e.g., openai/gpt-4o-mini):", + style=questionary.Style([("question", "fg:magenta noinherit")]) + ).ask() + if not choice: + console.print("\n[red]No model name entered. Exiting...[/red]") + exit(1) + return choice @@ -223,8 +239,14 @@ def select_deep_thinking_agent(provider) -> str: ("Grok 4.1 Fast (Non-Reasoning) - Speed optimized, 2M ctx", "grok-4-1-fast-non-reasoning"), ], "openrouter": [ + ("Claude Opus 4 (paid)", "anthropic/claude-opus-4"), + ("Claude Sonnet 4 (paid)", "anthropic/claude-sonnet-4"), + ("GPT-4o (paid)", "openai/gpt-4o"), + ("Gemini 2.5 Pro (paid)", "google/gemini-2.5-pro-preview"), + ("DeepSeek R1 (paid)", "deepseek/deepseek-r1"), ("Z.AI GLM 4.5 Air (free)", "z-ai/glm-4.5-air:free"), ("NVIDIA Nemotron 3 Nano 30B (free)", "nvidia/nemotron-3-nano-30b-a3b:free"), + ("Other (enter model name)", "__custom__"), ], "ollama": [ ("GLM-4.7-Flash:latest (30B, local)", "glm-4.7-flash:latest"), @@ -253,6 +275,16 @@ def select_deep_thinking_agent(provider) -> str: console.print("\n[red]No deep thinking llm engine selected. Exiting...[/red]") exit(1) + # Handle custom model input for openrouter + if choice == "__custom__": + choice = questionary.text( + "Enter the model name (e.g., anthropic/claude-opus-4):", + style=questionary.Style([("question", "fg:magenta noinherit")]) + ).ask() + if not choice: + console.print("\n[red]No model name entered. Exiting...[/red]") + exit(1) + return choice def select_llm_provider() -> tuple[str, str]: