Extract _prompt_custom_model_id helper to avoid duplication

Moved the custom model input logic into a shared helper function
as suggested in review.
This commit is contained in:
Charlie Tonneslan 2026-03-22 12:49:28 -04:00
parent 9c7340ed36
commit 332f24a56d
1 changed files with 13 additions and 12 deletions

View File

@ -193,16 +193,22 @@ def select_shallow_thinking_agent(provider) -> str:
exit(1)
if choice == "__custom__":
choice = questionary.text(
"Enter your model ID (e.g. anthropic/claude-sonnet-4):"
).ask()
if not choice:
console.print("\n[red]No model entered. Exiting...[/red]")
exit(1)
choice = _prompt_custom_model_id("anthropic/claude-sonnet-4")
return choice
def _prompt_custom_model_id(example: str) -> str:
"""Prompt user for a custom model ID and exit if none provided."""
model_id = questionary.text(
f"Enter your model ID (e.g. {example}):"
).ask()
if not model_id:
console.print("\n[red]No model entered. Exiting...[/red]")
exit(1)
return model_id
def select_deep_thinking_agent(provider) -> str:
"""Select deep thinking llm engine using an interactive selection."""
@ -270,12 +276,7 @@ def select_deep_thinking_agent(provider) -> str:
exit(1)
if choice == "__custom__":
choice = questionary.text(
"Enter your model ID (e.g. openai/gpt-5.2):"
).ask()
if not choice:
console.print("\n[red]No model entered. Exiting...[/red]")
exit(1)
choice = _prompt_custom_model_id("openai/gpt-5.2")
return choice