From 0c2b04a04dfa612b0daecddc2b730e0b756dce8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=98=B3=E8=99=8E?= Date: Thu, 19 Mar 2026 17:11:39 +0800 Subject: [PATCH] fix: support custom Ollama URL via base_url or OLLAMA_HOST env var - Support base_url parameter for Docker/remote Ollama deployments - Support OLLAMA_HOST environment variable fallback - Normalize URL to ensure /v1 suffix for OpenAI-compatible API - Fixes #396 (Docker Ollama connection issues) Usage: - In config: set llm_config with base_url='http://host.docker.internal:11434' - Or env var: export OLLAMA_HOST=http://192.168.1.100:11434 --- tradingagents/llm_clients/openai_client.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tradingagents/llm_clients/openai_client.py b/tradingagents/llm_clients/openai_client.py index 4605c1f9..cf8796c6 100644 --- a/tradingagents/llm_clients/openai_client.py +++ b/tradingagents/llm_clients/openai_client.py @@ -54,7 +54,13 @@ class OpenAIClient(BaseLLMClient): if api_key: llm_kwargs["api_key"] = api_key elif self.provider == "ollama": - llm_kwargs["base_url"] = "http://localhost:11434/v1" + # Support custom Ollama URL via base_url or OLLAMA_HOST env var + ollama_url = self.base_url or os.environ.get("OLLAMA_HOST", "http://localhost:11434") + # Normalize URL: ensure it ends with /v1 for OpenAI-compatible API + ollama_url = ollama_url.rstrip("/") + if not ollama_url.endswith("/v1"): + ollama_url = f"{ollama_url}/v1" + llm_kwargs["base_url"] = ollama_url llm_kwargs["api_key"] = "ollama" # Ollama doesn't require auth elif self.base_url: llm_kwargs["base_url"] = self.base_url