diff --git a/cli/utils.py b/cli/utils.py index d3873360..5e054f3e 100644 --- a/cli/utils.py +++ b/cli/utils.py @@ -151,6 +151,9 @@ def select_shallow_thinking_agent(provider) -> str: ], "ollama": [ ("llama3.2 local", "llama3.2"), + ], + "deepseek": [ + ("DeepSeek V3 - a 685B-parameter, mixture-of-experts model", "deepseek-chat") ] } @@ -212,6 +215,10 @@ def select_deep_thinking_agent(provider) -> str: ], "ollama": [ ("qwen3", "qwen3"), + ], + "deepseek": [ + ("DeepSeek V3 - a 685B-parameter, mixture-of-experts model", "deepseek-chat"), + ("DeepSeek-R1 - latest iteration of the flagship chat model family from the DeepSeek team.", "deepseek-reasoner"), ] } diff --git a/requirements.txt b/requirements.txt index 1c7c2818..b157407a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,3 +22,4 @@ redis chainlit rich questionary +langchain_anthropic diff --git a/tradingagents/graph/trading_graph.py b/tradingagents/graph/trading_graph.py index eb06cf43..2541f9c4 100644 --- a/tradingagents/graph/trading_graph.py +++ b/tradingagents/graph/trading_graph.py @@ -67,6 +67,9 @@ class TradingAgentsGraph: elif self.config["llm_provider"].lower() == "google": self.deep_thinking_llm = ChatGoogleGenerativeAI(model=self.config["deep_think_llm"]) self.quick_thinking_llm = ChatGoogleGenerativeAI(model=self.config["quick_think_llm"]) + elif self.config["llm_provider"].lower() == 'deepseek': + self.deep_thinking_llm = ChatDeepSeek(model=self.config["deep_think_llm"]) + self.quick_thinking_llm = ChatDeepSeek(model=self.config["quick_think_llm"]) else: raise ValueError(f"Unsupported LLM provider: {self.config['llm_provider']}")