This commit is contained in:
Opqrst 2025-10-19 15:40:29 +08:00 committed by GitHub
commit abedf7a63a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 67 additions and 3 deletions

View File

@ -125,6 +125,25 @@ def select_research_depth() -> int:
def select_shallow_thinking_agent(provider) -> str: def select_shallow_thinking_agent(provider) -> str:
"""Select shallow thinking llm engine using an interactive selection.""" """Select shallow thinking llm engine using an interactive selection."""
# if provider is custom, let user input model name
if provider.lower() == "custom":
model = questionary.text(
"Enter your custom Quick-Thinking model name:",
validate=lambda x: len(
x.strip()) > 0 or "Please enter a valid model name.",
style=questionary.Style(
[
("text", "fg:green"),
("highlighted", "noinherit"),
]
),
).ask()
if not model:
console.print("\n[red]No model name provided. Exiting...[/red]")
exit(1)
return model.strip()
# Define shallow thinking llm engine options with their corresponding model names # Define shallow thinking llm engine options with their corresponding model names
SHALLOW_AGENT_OPTIONS = { SHALLOW_AGENT_OPTIONS = {
"openai": [ "openai": [
@ -183,6 +202,26 @@ def select_shallow_thinking_agent(provider) -> str:
def select_deep_thinking_agent(provider) -> str: def select_deep_thinking_agent(provider) -> str:
"""Select deep thinking llm engine using an interactive selection.""" """Select deep thinking llm engine using an interactive selection."""
# if provider is custom, let user input model name
if provider.lower() == "custom":
model = questionary.text(
"Enter your custom Deep-Thinking model name:",
validate=lambda x: len(
x.strip()) > 0 or "Please enter a valid model name.",
style=questionary.Style(
[
("text", "fg:green"),
("highlighted", "noinherit"),
]
),
).ask()
if not model:
console.print("\n[red]No model name provided. Exiting...[/red]")
exit(1)
return model.strip()
# Define deep thinking llm engine options with their corresponding model names # Define deep thinking llm engine options with their corresponding model names
DEEP_AGENT_OPTIONS = { DEEP_AGENT_OPTIONS = {
"openai": [ "openai": [
@ -248,6 +287,7 @@ def select_llm_provider() -> tuple[str, str]:
("Google", "https://generativelanguage.googleapis.com/v1"), ("Google", "https://generativelanguage.googleapis.com/v1"),
("Openrouter", "https://openrouter.ai/api/v1"), ("Openrouter", "https://openrouter.ai/api/v1"),
("Ollama", "http://localhost:11434/v1"), ("Ollama", "http://localhost:11434/v1"),
("Custom (Enter your custom backend URL)", "custom"),
] ]
choice = questionary.select( choice = questionary.select(
@ -270,6 +310,30 @@ def select_llm_provider() -> tuple[str, str]:
console.print("\n[red]no OpenAI backend selected. Exiting...[/red]") console.print("\n[red]no OpenAI backend selected. Exiting...[/red]")
exit(1) exit(1)
# if custom, let user input custom url and api key
if choice[1] == "custom":
custom_url = questionary.text(
"Enter your custom API base URL:",
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid backend URL.").ask()
if not custom_url:
console.print("\n[red]No URL provided. Exiting...[/red]")
exit(1)
api_key = questionary.text(
"Enter your API key:",
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid API key.").ask()
if not api_key:
console.print("\n[red]No API key provided. Exiting...[/red]")
exit(1)
# set api key to environment variable
import os
os.environ["OPENAI_API_KEY"] = api_key.strip()
return "Custom", custom_url.strip()
display_name, url = choice display_name, url = choice
print(f"You selected: {display_name}\tURL: {url}") print(f"You selected: {display_name}\tURL: {url}")

View File

@ -72,7 +72,7 @@ class TradingAgentsGraph:
) )
# Initialize LLMs # Initialize LLMs
if self.config["llm_provider"].lower() == "openai" or self.config["llm_provider"] == "ollama" or self.config["llm_provider"] == "openrouter": if self.config["llm_provider"].lower() in ["openai", "ollama", "openrouter", "custom"]:
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"]) self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"]) self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
elif self.config["llm_provider"].lower() == "anthropic": elif self.config["llm_provider"].lower() == "anthropic":