feat: add Amazon Bedrock provider support
Add BedrockClient using langchain-aws ChatBedrockConverse, enabling TradingAgents to use Bedrock-hosted models (Claude, Kimi, Qwen, GLM, etc.) via IAM Role authentication without API keys. Usage: config['llm_provider'] = 'bedrock' config['deep_think_llm'] = 'us.anthropic.claude-sonnet-4-5-20251001-v1:0' config['quick_think_llm'] = 'us.amazon.nova-lite-v1:0' Requires: langchain-aws>=0.2.0
This commit is contained in:
parent
589b351f2a
commit
76876f8cc5
|
|
@ -13,6 +13,7 @@ dependencies = [
|
|||
"backtrader>=1.9.78.123",
|
||||
"langchain-anthropic>=0.3.15",
|
||||
"langchain-experimental>=0.3.4",
|
||||
"langchain-aws>=0.2.0",
|
||||
"langchain-google-genai>=2.1.5",
|
||||
"langchain-openai>=0.3.23",
|
||||
"langgraph>=0.4.8",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
from typing import Any, Optional
|
||||
|
||||
from langchain_aws import ChatBedrockConverse
|
||||
|
||||
from .base_client import BaseLLMClient
|
||||
from .validators import validate_model
|
||||
|
||||
|
||||
class BedrockClient(BaseLLMClient):
|
||||
"""Client for Amazon Bedrock models (Claude, Kimi, Qwen, GLM, etc.)."""
|
||||
|
||||
def __init__(self, model: str, base_url: Optional[str] = None, **kwargs):
|
||||
super().__init__(model, base_url, **kwargs)
|
||||
|
||||
def get_llm(self) -> Any:
|
||||
"""Return configured ChatBedrockConverse instance."""
|
||||
llm_kwargs = {"model_id": self.model}
|
||||
|
||||
if "region_name" in self.kwargs:
|
||||
llm_kwargs["region_name"] = self.kwargs["region_name"]
|
||||
if "max_tokens" in self.kwargs:
|
||||
llm_kwargs["max_tokens"] = self.kwargs["max_tokens"]
|
||||
if "callbacks" in self.kwargs:
|
||||
llm_kwargs["callbacks"] = self.kwargs["callbacks"]
|
||||
if "timeout" in self.kwargs:
|
||||
llm_kwargs["timeout"] = self.kwargs["timeout"]
|
||||
|
||||
return ChatBedrockConverse(**llm_kwargs)
|
||||
|
||||
def validate_model(self) -> bool:
|
||||
"""Validate model for Bedrock (pass-through, model IDs are flexible)."""
|
||||
return True
|
||||
|
|
@ -4,6 +4,7 @@ from .base_client import BaseLLMClient
|
|||
from .openai_client import OpenAIClient
|
||||
from .anthropic_client import AnthropicClient
|
||||
from .google_client import GoogleClient
|
||||
from .bedrock_client import BedrockClient
|
||||
|
||||
|
||||
def create_llm_client(
|
||||
|
|
@ -46,4 +47,7 @@ def create_llm_client(
|
|||
if provider_lower == "google":
|
||||
return GoogleClient(model, base_url, **kwargs)
|
||||
|
||||
if provider_lower == "bedrock":
|
||||
return BedrockClient(model, base_url, **kwargs)
|
||||
|
||||
raise ValueError(f"Unsupported LLM provider: {provider}")
|
||||
|
|
|
|||
Loading…
Reference in New Issue