From 7d9e07bd192e4eec43c09856d1346548d11d7103 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=99=86=E5=BC=A0=E5=BC=9B?= Date: Sat, 14 Mar 2026 11:00:07 +0800 Subject: [PATCH] =?UTF-8?q?feat(llm=5Fclients):=20=E6=B7=BB=E5=8A=A0CodexO?= =?UTF-8?q?AuthClient=E6=94=AF=E6=8C=81OAuth=E8=AE=A4=E8=AF=81=E7=9A=84Cod?= =?UTF-8?q?ex=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../llm_clients/codex_oauth_client.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 tradingagents/llm_clients/codex_oauth_client.py diff --git a/tradingagents/llm_clients/codex_oauth_client.py b/tradingagents/llm_clients/codex_oauth_client.py new file mode 100644 index 00000000..68a89709 --- /dev/null +++ b/tradingagents/llm_clients/codex_oauth_client.py @@ -0,0 +1,43 @@ +from typing import Any, Optional + +from .base_client import BaseLLMClient +from .validators import validate_model + + +class CodexOAuthClient(BaseLLMClient): + """Client for ChatGPT OAuth Codex models.""" + + def __init__(self, model: str, base_url: Optional[str] = None, **kwargs): + super().__init__(model, base_url, **kwargs) + + def get_llm(self) -> Any: + """Return configured ChatCodexOAuth instance.""" + try: + from langchain_codex_oauth import ChatCodexOAuth + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + "langchain-codex-oauth is required for llm_provider='codex_oauth'. " + "Install dependencies and run `tradingagents auth login`." + ) from exc + + llm_kwargs = {"model": self.model} + + if self.base_url: + llm_kwargs["base_url"] = self.base_url + + for key in ( + "timeout", + "max_retries", + "reasoning_effort", + "max_tokens", + "temperature", + "callbacks", + ): + if key in self.kwargs: + llm_kwargs[key] = self.kwargs[key] + + return ChatCodexOAuth(**llm_kwargs) + + def validate_model(self) -> bool: + """Validate model for codex_oauth.""" + return validate_model("codex_oauth", self.model)