Added option to select own locally running Ollama model when running cli.main
This commit is contained in:
parent
a438acdbbd
commit
669b2dbbd2
24
cli/utils.py
24
cli/utils.py
|
|
@ -152,6 +152,7 @@ def select_shallow_thinking_agent(provider) -> str:
|
||||||
"ollama": [
|
"ollama": [
|
||||||
("llama3.1 local", "llama3.1"),
|
("llama3.1 local", "llama3.1"),
|
||||||
("llama3.2 local", "llama3.2"),
|
("llama3.2 local", "llama3.2"),
|
||||||
|
("Custom (enter model name)", "__custom__"),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -177,6 +178,17 @@ def select_shallow_thinking_agent(provider) -> str:
|
||||||
)
|
)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
# If custom is selected, prompt for the model name to use with Ollama
|
||||||
|
if choice == "__custom__":
|
||||||
|
custom_model = questionary.text(
|
||||||
|
"Enter your Ollama model name (e.g., mistral-nemo:latest):",
|
||||||
|
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid model name.",
|
||||||
|
).ask()
|
||||||
|
if not custom_model:
|
||||||
|
console.print("\n[red]No model name provided. Exiting...[/red]")
|
||||||
|
exit(1)
|
||||||
|
return custom_model.strip()
|
||||||
|
|
||||||
return choice
|
return choice
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -214,6 +226,7 @@ def select_deep_thinking_agent(provider) -> str:
|
||||||
"ollama": [
|
"ollama": [
|
||||||
("llama3.1 local", "llama3.1"),
|
("llama3.1 local", "llama3.1"),
|
||||||
("qwen3", "qwen3"),
|
("qwen3", "qwen3"),
|
||||||
|
("Custom (enter model name)", "__custom__"),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -237,6 +250,17 @@ def select_deep_thinking_agent(provider) -> str:
|
||||||
console.print("\n[red]No deep thinking llm engine selected. Exiting...[/red]")
|
console.print("\n[red]No deep thinking llm engine selected. Exiting...[/red]")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
# If custom is selected, prompt for the model name to use with Ollama
|
||||||
|
if choice == "__custom__":
|
||||||
|
custom_model = questionary.text(
|
||||||
|
"Enter your Ollama model name (e.g., llama3.1:latest):",
|
||||||
|
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid model name.",
|
||||||
|
).ask()
|
||||||
|
if not custom_model:
|
||||||
|
console.print("\n[red]No model name provided. Exiting...[/red]")
|
||||||
|
exit(1)
|
||||||
|
return custom_model.strip()
|
||||||
|
|
||||||
return choice
|
return choice
|
||||||
|
|
||||||
def select_llm_provider() -> tuple[str, str]:
|
def select_llm_provider() -> tuple[str, str]:
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue