fix(llm_clients): standardize Google API key to unified api_key param

GoogleClient now accepts the unified `api_key` parameter used by
OpenAI and Anthropic clients, mapping it to the provider-specific
`google_api_key` that ChatGoogleGenerativeAI expects. Legacy
`google_api_key` still works for backward compatibility.

Resolves TODO.md item #2 (inconsistent parameter handling).
This commit is contained in:
javierdejesusda 2026-03-24 14:35:02 +01:00
parent 589b351f2a
commit f5026009f9
3 changed files with 49 additions and 9 deletions

View File

@ -0,0 +1,39 @@
import unittest
from unittest.mock import patch
class TestGoogleApiKeyStandardization(unittest.TestCase):
"""Verify GoogleClient accepts unified api_key parameter."""
@patch("tradingagents.llm_clients.google_client.NormalizedChatGoogleGenerativeAI")
def test_api_key_mapped_to_google_api_key(self, mock_chat):
from tradingagents.llm_clients.google_client import GoogleClient
client = GoogleClient("gemini-2.5-flash", api_key="test-key-123")
client.get_llm()
call_kwargs = mock_chat.call_args[1]
self.assertEqual(call_kwargs["google_api_key"], "test-key-123")
@patch("tradingagents.llm_clients.google_client.NormalizedChatGoogleGenerativeAI")
def test_legacy_google_api_key_still_works(self, mock_chat):
from tradingagents.llm_clients.google_client import GoogleClient
client = GoogleClient("gemini-2.5-flash", google_api_key="legacy-key-456")
client.get_llm()
call_kwargs = mock_chat.call_args[1]
self.assertEqual(call_kwargs["google_api_key"], "legacy-key-456")
@patch("tradingagents.llm_clients.google_client.NormalizedChatGoogleGenerativeAI")
def test_api_key_takes_precedence_over_google_api_key(self, mock_chat):
from tradingagents.llm_clients.google_client import GoogleClient
client = GoogleClient(
"gemini-2.5-flash", api_key="unified", google_api_key="legacy"
)
client.get_llm()
call_kwargs = mock_chat.call_args[1]
self.assertEqual(call_kwargs["google_api_key"], "unified")
if __name__ == "__main__":
unittest.main()

View File

@ -5,14 +5,9 @@
### 1. `validate_model()` is never called ### 1. `validate_model()` is never called
- Add validation call in `get_llm()` with warning (not error) for unknown models - Add validation call in `get_llm()` with warning (not error) for unknown models
### 2. Inconsistent parameter handling ### 2. ~~Inconsistent parameter handling~~ (Fixed)
| Client | API Key Param | Special Params | - GoogleClient now accepts unified `api_key` and maps it to `google_api_key`
|--------|---------------|----------------| - Legacy `google_api_key` still works for backward compatibility
| OpenAI | `api_key` | `reasoning_effort` |
| Anthropic | `api_key` | `thinking_config``thinking` |
| Google | `google_api_key` | `thinking_budget` |
**Fix:** Standardize with unified `api_key` that maps to provider-specific keys
### 3. `base_url` accepted but ignored ### 3. `base_url` accepted but ignored
- `AnthropicClient`: accepts `base_url` but never uses it - `AnthropicClient`: accepts `base_url` but never uses it

View File

@ -27,10 +27,16 @@ class GoogleClient(BaseLLMClient):
"""Return configured ChatGoogleGenerativeAI instance.""" """Return configured ChatGoogleGenerativeAI instance."""
llm_kwargs = {"model": self.model} llm_kwargs = {"model": self.model}
for key in ("timeout", "max_retries", "google_api_key", "callbacks", "http_client", "http_async_client"): for key in ("timeout", "max_retries", "callbacks", "http_client", "http_async_client"):
if key in self.kwargs: if key in self.kwargs:
llm_kwargs[key] = self.kwargs[key] llm_kwargs[key] = self.kwargs[key]
# Unified api_key maps to provider-specific google_api_key
if "api_key" in self.kwargs:
llm_kwargs["google_api_key"] = self.kwargs["api_key"]
elif "google_api_key" in self.kwargs:
llm_kwargs["google_api_key"] = self.kwargs["google_api_key"]
# Map thinking_level to appropriate API param based on model # Map thinking_level to appropriate API param based on model
# Gemini 3 Pro: low, high # Gemini 3 Pro: low, high
# Gemini 3 Flash: minimal, low, medium, high # Gemini 3 Flash: minimal, low, medium, high