Extract _prompt_custom_model_id helper to avoid duplication
Moved the custom model input logic into a shared helper function as suggested in review.
This commit is contained in:
parent
9c7340ed36
commit
332f24a56d
25
cli/utils.py
25
cli/utils.py
|
|
@ -193,16 +193,22 @@ def select_shallow_thinking_agent(provider) -> str:
|
|||
exit(1)
|
||||
|
||||
if choice == "__custom__":
|
||||
choice = questionary.text(
|
||||
"Enter your model ID (e.g. anthropic/claude-sonnet-4):"
|
||||
).ask()
|
||||
if not choice:
|
||||
console.print("\n[red]No model entered. Exiting...[/red]")
|
||||
exit(1)
|
||||
choice = _prompt_custom_model_id("anthropic/claude-sonnet-4")
|
||||
|
||||
return choice
|
||||
|
||||
|
||||
def _prompt_custom_model_id(example: str) -> str:
|
||||
"""Prompt user for a custom model ID and exit if none provided."""
|
||||
model_id = questionary.text(
|
||||
f"Enter your model ID (e.g. {example}):"
|
||||
).ask()
|
||||
if not model_id:
|
||||
console.print("\n[red]No model entered. Exiting...[/red]")
|
||||
exit(1)
|
||||
return model_id
|
||||
|
||||
|
||||
def select_deep_thinking_agent(provider) -> str:
|
||||
"""Select deep thinking llm engine using an interactive selection."""
|
||||
|
||||
|
|
@ -270,12 +276,7 @@ def select_deep_thinking_agent(provider) -> str:
|
|||
exit(1)
|
||||
|
||||
if choice == "__custom__":
|
||||
choice = questionary.text(
|
||||
"Enter your model ID (e.g. openai/gpt-5.2):"
|
||||
).ask()
|
||||
if not choice:
|
||||
console.print("\n[red]No model entered. Exiting...[/red]")
|
||||
exit(1)
|
||||
choice = _prompt_custom_model_id("openai/gpt-5.2")
|
||||
|
||||
return choice
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue