diff --git a/pyproject.toml b/pyproject.toml index 0decedb0..ec30d43e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,7 @@ dependencies = [ "backtrader>=1.9.78.123", "langchain-anthropic>=0.3.15", "langchain-experimental>=0.3.4", + "langchain-aws>=0.2.0", "langchain-google-genai>=2.1.5", "langchain-openai>=0.3.23", "langgraph>=0.4.8", diff --git a/tradingagents/llm_clients/__init__.py b/tradingagents/llm_clients/__init__.py index e528eabe..b1f32afc 100644 --- a/tradingagents/llm_clients/__init__.py +++ b/tradingagents/llm_clients/__init__.py @@ -1,4 +1,5 @@ from .base_client import BaseLLMClient from .factory import create_llm_client +from .bedrock_client import BedrockClient -__all__ = ["BaseLLMClient", "create_llm_client"] +__all__ = ["BaseLLMClient", "create_llm_client", "BedrockClient"] diff --git a/tradingagents/llm_clients/bedrock_client.py b/tradingagents/llm_clients/bedrock_client.py new file mode 100644 index 00000000..6b1800d8 --- /dev/null +++ b/tradingagents/llm_clients/bedrock_client.py @@ -0,0 +1,56 @@ +from typing import Any, Optional + +from langchain_aws import ChatBedrockConverse + +from .base_client import BaseLLMClient + + +class BedrockClient(BaseLLMClient): + """Client for Amazon Bedrock models. + + Supports any model available on Bedrock via IAM Role (no API key needed), + including Claude, Amazon Nova, Kimi, Qwen, GLM, DeepSeek, MiniMax, and more. + + Authentication: + Uses boto3 default credential chain: IAM Role (EC2/Lambda), environment + variables (AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY), or ~/.aws/credentials. + + Model ID formats: + - Cross-region inference profile (recommended): + ``global.anthropic.claude-sonnet-4-6`` + ``eu.anthropic.claude-3-5-sonnet-20240620-v1:0`` + - Direct on-demand (us-east-1 default region only): + ``amazon.nova-lite-v1:0`` + ``moonshotai.kimi-k2.5`` + ``qwen.qwen3-32b-v1:0`` + ``zai.glm-4.7-flash`` + ``deepseek.v3.2`` + + Note: + When specifying a non-default ``region_name``, use region-specific + inference profile IDs (e.g. ``us-west-2.anthropic.claude-...``), + as direct model IDs only support on-demand throughput in us-east-1. + + Example:: + + config["llm_provider"] = "bedrock" + config["deep_think_llm"] = "global.anthropic.claude-sonnet-4-6" + config["quick_think_llm"] = "amazon.nova-micro-v1:0" + """ + + def __init__(self, model: str, base_url: Optional[str] = None, **kwargs): + super().__init__(model, base_url, **kwargs) + + def get_llm(self) -> Any: + """Return configured ChatBedrockConverse instance.""" + llm_kwargs: dict = {"model_id": self.model} + + for key in ("region_name", "max_tokens", "callbacks", "timeout"): + if key in self.kwargs: + llm_kwargs[key] = self.kwargs[key] + + return ChatBedrockConverse(**llm_kwargs) + + def validate_model(self) -> bool: + """Bedrock model IDs are dynamic; skip static validation.""" + return True diff --git a/tradingagents/llm_clients/factory.py b/tradingagents/llm_clients/factory.py index 93c2a7d3..24109640 100644 --- a/tradingagents/llm_clients/factory.py +++ b/tradingagents/llm_clients/factory.py @@ -4,6 +4,7 @@ from .base_client import BaseLLMClient from .openai_client import OpenAIClient from .anthropic_client import AnthropicClient from .google_client import GoogleClient +from .bedrock_client import BedrockClient def create_llm_client( @@ -46,4 +47,7 @@ def create_llm_client( if provider_lower == "google": return GoogleClient(model, base_url, **kwargs) + if provider_lower == "bedrock": + return BedrockClient(model, base_url, **kwargs) + raise ValueError(f"Unsupported LLM provider: {provider}")