Added option to select own locally running Ollama model when running cli.main

This commit is contained in:
priteshpatel3 2025-09-10 16:16:56 +08:00
parent a438acdbbd
commit 669b2dbbd2
1 changed files with 24 additions and 0 deletions

View File

@ -152,6 +152,7 @@ def select_shallow_thinking_agent(provider) -> str:
"ollama": [
("llama3.1 local", "llama3.1"),
("llama3.2 local", "llama3.2"),
("Custom (enter model name)", "__custom__"),
]
}
@ -177,6 +178,17 @@ def select_shallow_thinking_agent(provider) -> str:
)
exit(1)
# If custom is selected, prompt for the model name to use with Ollama
if choice == "__custom__":
custom_model = questionary.text(
"Enter your Ollama model name (e.g., mistral-nemo:latest):",
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid model name.",
).ask()
if not custom_model:
console.print("\n[red]No model name provided. Exiting...[/red]")
exit(1)
return custom_model.strip()
return choice
@ -214,6 +226,7 @@ def select_deep_thinking_agent(provider) -> str:
"ollama": [
("llama3.1 local", "llama3.1"),
("qwen3", "qwen3"),
("Custom (enter model name)", "__custom__"),
]
}
@ -237,6 +250,17 @@ def select_deep_thinking_agent(provider) -> str:
console.print("\n[red]No deep thinking llm engine selected. Exiting...[/red]")
exit(1)
# If custom is selected, prompt for the model name to use with Ollama
if choice == "__custom__":
custom_model = questionary.text(
"Enter your Ollama model name (e.g., llama3.1:latest):",
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid model name.",
).ask()
if not custom_model:
console.print("\n[red]No model name provided. Exiting...[/red]")
exit(1)
return custom_model.strip()
return choice
def select_llm_provider() -> tuple[str, str]: