diff --git a/.gitignore b/.gitignore index 4ebf99e3..6b6c685b 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ eval_results/ eval_data/ *.egg-info/ .env +*.log +results/* \ No newline at end of file diff --git a/cli/main.py b/cli/main.py index 64616ee1..9ebcbaa8 100644 --- a/cli/main.py +++ b/cli/main.py @@ -19,6 +19,10 @@ from rich.tree import Tree from rich import box from rich.align import Align from rich.rule import Rule +from dotenv import load_dotenv + +# Load environment variables from .env file +load_dotenv() from tradingagents.graph.trading_graph import TradingAgentsGraph from tradingagents.default_config import DEFAULT_CONFIG diff --git a/cli/utils.py b/cli/utils.py index 7b9682a6..17893a6f 100644 --- a/cli/utils.py +++ b/cli/utils.py @@ -241,13 +241,16 @@ def select_deep_thinking_agent(provider) -> str: def select_llm_provider() -> tuple[str, str]: """Select the OpenAI api url using interactive selection.""" + import os # Define OpenAI api options with their corresponding endpoints + # Use custom URL from environment if available, otherwise use default + openai_url = os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1") BASE_URLS = [ - ("OpenAI", "https://api.openai.com/v1"), + ("OpenAI", openai_url), ("Anthropic", "https://api.anthropic.com/"), ("Google", "https://generativelanguage.googleapis.com/v1"), ("Openrouter", "https://openrouter.ai/api/v1"), - ("Ollama", "http://localhost:11434/v1"), + ("Ollama", "http://localhost:11434/v1"), ] choice = questionary.select( diff --git a/tradingagents/dataflows/interface.py b/tradingagents/dataflows/interface.py index 7fffbb4f..dfb4b742 100644 --- a/tradingagents/dataflows/interface.py +++ b/tradingagents/dataflows/interface.py @@ -704,7 +704,7 @@ def get_YFin_data( def get_stock_news_openai(ticker, curr_date): config = get_config() - client = OpenAI(base_url=config["backend_url"]) + client = OpenAI(base_url=config["backend_url"], api_key=os.getenv("OPENAI_API_KEY")) response = client.responses.create( model=config["quick_think_llm"], @@ -739,7 +739,7 @@ def get_stock_news_openai(ticker, curr_date): def get_global_news_openai(curr_date): config = get_config() - client = OpenAI(base_url=config["backend_url"]) + client = OpenAI(base_url=config["backend_url"], api_key=os.getenv("OPENAI_API_KEY")) response = client.responses.create( model=config["quick_think_llm"], @@ -774,7 +774,7 @@ def get_global_news_openai(curr_date): def get_fundamentals_openai(ticker, curr_date): config = get_config() - client = OpenAI(base_url=config["backend_url"]) + client = OpenAI(base_url=config["backend_url"], api_key=os.getenv("OPENAI_API_KEY")) response = client.responses.create( model=config["quick_think_llm"], diff --git a/tradingagents/default_config.py b/tradingagents/default_config.py index 089e9c24..b72e82f0 100644 --- a/tradingagents/default_config.py +++ b/tradingagents/default_config.py @@ -12,7 +12,7 @@ DEFAULT_CONFIG = { "llm_provider": "openai", "deep_think_llm": "o4-mini", "quick_think_llm": "gpt-4o-mini", - "backend_url": "https://api.openai.com/v1", + "backend_url": os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1"), # Debate and discussion settings "max_debate_rounds": 1, "max_risk_discuss_rounds": 1, diff --git a/tradingagents/graph/trading_graph.py b/tradingagents/graph/trading_graph.py index eb06cf43..6b157b65 100644 --- a/tradingagents/graph/trading_graph.py +++ b/tradingagents/graph/trading_graph.py @@ -59,8 +59,8 @@ class TradingAgentsGraph: # Initialize LLMs if self.config["llm_provider"].lower() == "openai" or self.config["llm_provider"] == "ollama" or self.config["llm_provider"] == "openrouter": - self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"]) - self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"]) + self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], openai_api_base=self.config["backend_url"]) + self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], openai_api_base=self.config["backend_url"]) elif self.config["llm_provider"].lower() == "anthropic": self.deep_thinking_llm = ChatAnthropic(model=self.config["deep_think_llm"], base_url=self.config["backend_url"]) self.quick_thinking_llm = ChatAnthropic(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])