Merge pull request #453 from javierdejesusda/fix/standardize-google-api-key
fix(llm_clients): standardize Google API key to unified api_key param
This commit is contained in:
commit
46e1b600b8
|
|
@ -0,0 +1,28 @@
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from tradingagents.llm_clients.google_client import GoogleClient
|
||||||
|
|
||||||
|
|
||||||
|
class TestGoogleApiKeyStandardization(unittest.TestCase):
|
||||||
|
"""Verify GoogleClient accepts unified api_key parameter."""
|
||||||
|
|
||||||
|
@patch("tradingagents.llm_clients.google_client.NormalizedChatGoogleGenerativeAI")
|
||||||
|
def test_api_key_handling(self, mock_chat):
|
||||||
|
test_cases = [
|
||||||
|
("unified api_key is mapped", {"api_key": "test-key-123"}, "test-key-123"),
|
||||||
|
("legacy google_api_key still works", {"google_api_key": "legacy-key-456"}, "legacy-key-456"),
|
||||||
|
("unified api_key takes precedence", {"api_key": "unified", "google_api_key": "legacy"}, "unified"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for msg, kwargs, expected_key in test_cases:
|
||||||
|
with self.subTest(msg=msg):
|
||||||
|
mock_chat.reset_mock()
|
||||||
|
client = GoogleClient("gemini-2.5-flash", **kwargs)
|
||||||
|
client.get_llm()
|
||||||
|
call_kwargs = mock_chat.call_args[1]
|
||||||
|
self.assertEqual(call_kwargs.get("google_api_key"), expected_key)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
|
|
@ -5,14 +5,9 @@
|
||||||
### 1. `validate_model()` is never called
|
### 1. `validate_model()` is never called
|
||||||
- Add validation call in `get_llm()` with warning (not error) for unknown models
|
- Add validation call in `get_llm()` with warning (not error) for unknown models
|
||||||
|
|
||||||
### 2. Inconsistent parameter handling
|
### 2. ~~Inconsistent parameter handling~~ (Fixed)
|
||||||
| Client | API Key Param | Special Params |
|
- GoogleClient now accepts unified `api_key` and maps it to `google_api_key`
|
||||||
|--------|---------------|----------------|
|
- Legacy `google_api_key` still works for backward compatibility
|
||||||
| OpenAI | `api_key` | `reasoning_effort` |
|
|
||||||
| Anthropic | `api_key` | `thinking_config` → `thinking` |
|
|
||||||
| Google | `google_api_key` | `thinking_budget` |
|
|
||||||
|
|
||||||
**Fix:** Standardize with unified `api_key` that maps to provider-specific keys
|
|
||||||
|
|
||||||
### 3. `base_url` accepted but ignored
|
### 3. `base_url` accepted but ignored
|
||||||
- `AnthropicClient`: accepts `base_url` but never uses it
|
- `AnthropicClient`: accepts `base_url` but never uses it
|
||||||
|
|
|
||||||
|
|
@ -27,10 +27,15 @@ class GoogleClient(BaseLLMClient):
|
||||||
"""Return configured ChatGoogleGenerativeAI instance."""
|
"""Return configured ChatGoogleGenerativeAI instance."""
|
||||||
llm_kwargs = {"model": self.model}
|
llm_kwargs = {"model": self.model}
|
||||||
|
|
||||||
for key in ("timeout", "max_retries", "google_api_key", "callbacks", "http_client", "http_async_client"):
|
for key in ("timeout", "max_retries", "callbacks", "http_client", "http_async_client"):
|
||||||
if key in self.kwargs:
|
if key in self.kwargs:
|
||||||
llm_kwargs[key] = self.kwargs[key]
|
llm_kwargs[key] = self.kwargs[key]
|
||||||
|
|
||||||
|
# Unified api_key maps to provider-specific google_api_key
|
||||||
|
google_api_key = self.kwargs.get("api_key") or self.kwargs.get("google_api_key")
|
||||||
|
if google_api_key:
|
||||||
|
llm_kwargs["google_api_key"] = google_api_key
|
||||||
|
|
||||||
# Map thinking_level to appropriate API param based on model
|
# Map thinking_level to appropriate API param based on model
|
||||||
# Gemini 3 Pro: low, high
|
# Gemini 3 Pro: low, high
|
||||||
# Gemini 3 Flash: minimal, low, medium, high
|
# Gemini 3 Flash: minimal, low, medium, high
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue