This commit is contained in:
MarkLo127 2026-03-11 18:59:17 +08:00
parent c305ea5d3b
commit ea4a267bcf
1 changed files with 23 additions and 9 deletions

View File

@ -93,16 +93,30 @@ class TradingAgentsXGraph:
deep_api_key = self.config.get("deep_think_api_key", os.getenv("OPENAI_API_KEY")) deep_api_key = self.config.get("deep_think_api_key", os.getenv("OPENAI_API_KEY"))
quick_api_key = self.config.get("quick_think_api_key", os.getenv("OPENAI_API_KEY")) quick_api_key = self.config.get("quick_think_api_key", os.getenv("OPENAI_API_KEY"))
# Helper to initialize LLM based on URL/Provider # Helper to initialize LLM based on model name/provider
def _create_llm(model: str, base_url: str, api_key: str): def _create_llm(model: str, base_url: str, api_key: str):
# Use ChatOpenAI for all providers including Anthropic's OpenAI-compatible endpoint # Anthropic models require ChatAnthropic (different auth header: x-api-key)
# Anthropic's /v1 endpoint is OpenAI-compatible, so we use ChatOpenAI if model.startswith("claude-"):
return ChatOpenAI( return ChatAnthropic(
model=model, model=model,
base_url=base_url, anthropic_api_key=api_key,
openai_api_key=api_key, max_tokens=16384,
max_tokens= 16384 # Prevent report truncation )
) # Google Gemini models use ChatGoogleGenerativeAI
elif model.startswith("gemini-"):
return ChatGoogleGenerativeAI(
model=model,
google_api_key=api_key,
max_output_tokens=16384,
)
# All other providers (OpenAI, Grok, DeepSeek, Qwen, custom) use OpenAI-compatible API
else:
return ChatOpenAI(
model=model,
base_url=base_url,
openai_api_key=api_key,
max_tokens=16384,
)
# Initialize LLMs independently # Initialize LLMs independently
print(f"DEBUG: Initializing Deep Thinking LLM: Model={self.config['deep_think_llm']}, BaseURL={deep_base_url}, Key=**********") print(f"DEBUG: Initializing Deep Thinking LLM: Model={self.config['deep_think_llm']}, BaseURL={deep_base_url}, Key=**********")