From 3018c73b8e2bfb9b492d414b2fa77d51ce404b3b Mon Sep 17 00:00:00 2001 From: null0NULL123 <2022270162@email.szu.edu.cn> Date: Sat, 7 Mar 2026 00:56:17 +0800 Subject: [PATCH] feat: add DeepSeek provider support in client and utils --- cli/utils.py | 9 +++++++++ tradingagents/llm_clients/factory.py | 3 +++ tradingagents/llm_clients/openai_client.py | 7 +++++++ 3 files changed, 19 insertions(+) diff --git a/cli/utils.py b/cli/utils.py index aa097fb5..55f069c3 100644 --- a/cli/utils.py +++ b/cli/utils.py @@ -160,6 +160,10 @@ def select_shallow_thinking_agent(provider) -> str: ("GPT-OSS:latest (20B, local)", "gpt-oss:latest"), ("GLM-4.7-Flash:latest (30B, local)", "glm-4.7-flash:latest"), ], + "deepseek": [ + ("DeepSeek-V3.2(Non-thinking Mode) - Fast, general-purpose", "deepseek-chat"), + ("DeepSeek-V3.2(Thinking Mode) - More thorough analysis", "deepseek-reasoner"), + ] } choice = questionary.select( @@ -228,6 +232,10 @@ def select_deep_thinking_agent(provider) -> str: ("GPT-OSS:latest (20B, local)", "gpt-oss:latest"), ("Qwen3:latest (8B, local)", "qwen3:latest"), ], + "deepseek": [ + ("DeepSeek-V3.2(Non-thinking Mode) - Fast, general-purpose", "deepseek-chat"), + ("DeepSeek-V3.2(Thinking Mode) - More thorough analysis", "deepseek-reasoner"), + ] } choice = questionary.select( @@ -262,6 +270,7 @@ def select_llm_provider() -> tuple[str, str]: ("xAI", "https://api.x.ai/v1"), ("Openrouter", "https://openrouter.ai/api/v1"), ("Ollama", "http://localhost:11434/v1"), + ("DeepSeek", "https://api.deepseek.com"), ] choice = questionary.select( diff --git a/tradingagents/llm_clients/factory.py b/tradingagents/llm_clients/factory.py index 028c88a2..b13fb3ac 100644 --- a/tradingagents/llm_clients/factory.py +++ b/tradingagents/llm_clients/factory.py @@ -30,6 +30,9 @@ def create_llm_client( if provider_lower in ("openai", "ollama", "openrouter"): return OpenAIClient(model, base_url, provider=provider_lower, **kwargs) + + if provider_lower == "deepseek": + return OpenAIClient(model, base_url, provider="deepseek", **kwargs) if provider_lower == "xai": return OpenAIClient(model, base_url, provider="xai", **kwargs) diff --git a/tradingagents/llm_clients/openai_client.py b/tradingagents/llm_clients/openai_client.py index 7011895f..71d9a400 100644 --- a/tradingagents/llm_clients/openai_client.py +++ b/tradingagents/llm_clients/openai_client.py @@ -50,6 +50,13 @@ class OpenAIClient(BaseLLMClient): api_key = os.environ.get("XAI_API_KEY") if api_key: llm_kwargs["api_key"] = api_key + elif self.provider == "deepseek": + llm_kwargs["base_url"] = "https://api.deepseek.com" + api_key = os.environ.get("DEEPSEEK_API_KEY") + if api_key: + llm_kwargs["api_key"] = api_key + else: + raise ValueError("DEEPSEEK_API_KEY environment variable is required for DeepSeek provider") elif self.provider == "openrouter": llm_kwargs["base_url"] = "https://openrouter.ai/api/v1" api_key = os.environ.get("OPENROUTER_API_KEY")