fix: tensor_parallel_size=1

This commit is contained in:
Ivan Lee 2025-08-17 20:57:13 +08:00
parent 99280f48a0
commit 5ac6f1857b
1 changed files with 2 additions and 0 deletions

View File

@ -94,10 +94,12 @@ class TradingAgentsGraph:
self.deep_thinking_llm = VLLM( self.deep_thinking_llm = VLLM(
model=self.config["deep_think_llm"], model=self.config["deep_think_llm"],
trust_remote_code=True, trust_remote_code=True,
tensor_parallel_size=1,
) )
self.quick_thinking_llm = VLLM( self.quick_thinking_llm = VLLM(
model=self.config["quick_think_llm"], model=self.config["quick_think_llm"],
trust_remote_code=True, trust_remote_code=True,
tensor_parallel_size=1,
) )
else: else:
raise ValueError(f"Unsupported LLM provider: {self.config['llm_provider']}") raise ValueError(f"Unsupported LLM provider: {self.config['llm_provider']}")