feat: support custom openai base url using .env

This commit is contained in:
mogita 2025-08-15 15:48:23 +08:00
parent fda4f664e8
commit 95572ece42
No known key found for this signature in database
GPG Key ID: A0AA1B9C57A48ECF
6 changed files with 17 additions and 8 deletions

2
.gitignore vendored
View File

@ -7,3 +7,5 @@ eval_results/
eval_data/
*.egg-info/
.env
*.log
results/*

View File

@ -19,6 +19,10 @@ from rich.tree import Tree
from rich import box
from rich.align import Align
from rich.rule import Rule
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
from tradingagents.graph.trading_graph import TradingAgentsGraph
from tradingagents.default_config import DEFAULT_CONFIG

View File

@ -241,13 +241,16 @@ def select_deep_thinking_agent(provider) -> str:
def select_llm_provider() -> tuple[str, str]:
"""Select the OpenAI api url using interactive selection."""
import os
# Define OpenAI api options with their corresponding endpoints
# Use custom URL from environment if available, otherwise use default
openai_url = os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1")
BASE_URLS = [
("OpenAI", "https://api.openai.com/v1"),
("OpenAI", openai_url),
("Anthropic", "https://api.anthropic.com/"),
("Google", "https://generativelanguage.googleapis.com/v1"),
("Openrouter", "https://openrouter.ai/api/v1"),
("Ollama", "http://localhost:11434/v1"),
("Ollama", "http://localhost:11434/v1"),
]
choice = questionary.select(

View File

@ -704,7 +704,7 @@ def get_YFin_data(
def get_stock_news_openai(ticker, curr_date):
config = get_config()
client = OpenAI(base_url=config["backend_url"])
client = OpenAI(base_url=config["backend_url"], api_key=os.getenv("OPENAI_API_KEY"))
response = client.responses.create(
model=config["quick_think_llm"],
@ -739,7 +739,7 @@ def get_stock_news_openai(ticker, curr_date):
def get_global_news_openai(curr_date):
config = get_config()
client = OpenAI(base_url=config["backend_url"])
client = OpenAI(base_url=config["backend_url"], api_key=os.getenv("OPENAI_API_KEY"))
response = client.responses.create(
model=config["quick_think_llm"],
@ -774,7 +774,7 @@ def get_global_news_openai(curr_date):
def get_fundamentals_openai(ticker, curr_date):
config = get_config()
client = OpenAI(base_url=config["backend_url"])
client = OpenAI(base_url=config["backend_url"], api_key=os.getenv("OPENAI_API_KEY"))
response = client.responses.create(
model=config["quick_think_llm"],

View File

@ -12,7 +12,7 @@ DEFAULT_CONFIG = {
"llm_provider": "openai",
"deep_think_llm": "o4-mini",
"quick_think_llm": "gpt-4o-mini",
"backend_url": "https://api.openai.com/v1",
"backend_url": os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1"),
# Debate and discussion settings
"max_debate_rounds": 1,
"max_risk_discuss_rounds": 1,

View File

@ -59,8 +59,8 @@ class TradingAgentsGraph:
# Initialize LLMs
if self.config["llm_provider"].lower() == "openai" or self.config["llm_provider"] == "ollama" or self.config["llm_provider"] == "openrouter":
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], openai_api_base=self.config["backend_url"])
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], openai_api_base=self.config["backend_url"])
elif self.config["llm_provider"].lower() == "anthropic":
self.deep_thinking_llm = ChatAnthropic(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
self.quick_thinking_llm = ChatAnthropic(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])