fix: address code review feedback from Gemini

Move RateLimitError to break block, use specific exceptions, remove duplicate entry
This commit is contained in:
MUmarJ 2026-01-16 19:25:08 -05:00
parent 7e659dfddf
commit 202901c6a4
4 changed files with 9 additions and 12 deletions

View File

@ -72,7 +72,7 @@ def fetch_openai_models() -> Optional[List[Tuple[str, str]]]:
_model_cache["openai"] = result
return result
except Exception:
except (httpx.RequestError, httpx.HTTPStatusError, ValueError, KeyError):
return None
@ -129,7 +129,7 @@ def fetch_anthropic_models() -> Optional[List[Tuple[str, str]]]:
_model_cache["anthropic"] = result
return result
except Exception:
except (httpx.RequestError, httpx.HTTPStatusError, ValueError, KeyError):
return None
@ -187,7 +187,7 @@ def fetch_google_models() -> Optional[List[Tuple[str, str]]]:
_model_cache["google"] = result
return result
except Exception:
except (httpx.RequestError, httpx.HTTPStatusError, ValueError, KeyError):
return None

View File

@ -235,7 +235,6 @@ def select_deep_thinking_agent(provider) -> str:
"openrouter": [
("Xiaomi MiMo V2 Flash - Fast and efficient multimodal model", "xiaomi/mimo-v2-flash:free"),
("DeepSeek V3 - a 685B-parameter, mixture-of-experts model", "deepseek/deepseek-chat-v3-0324:free"),
("Deepseek - latest iteration of the flagship chat model family from the DeepSeek team.", "deepseek/deepseek-chat-v3-0324:free"),
],
"ollama": [
("llama3.2:3b local", "llama3.2:3b"),

View File

@ -219,14 +219,14 @@ def route_to_vendor(method: str, *args, **kwargs):
last_error = None
break # Success, exit retry loop
except AlphaVantageRateLimitError as e:
print(f"RATE_LIMIT: Alpha Vantage rate limit exceeded")
except (AlphaVantageRateLimitError, RateLimitError) as e:
print(f"RATE_LIMIT: {type(e).__name__} exceeded, falling back to next vendor.")
print(f"DEBUG: Rate limit details: {e}")
last_error = e
break # Don't retry rate limits, move to next vendor
except (ConnectionError, TimeoutError, OSError,
APIConnectionError, APITimeoutError, RateLimitError) as e:
APIConnectionError, APITimeoutError) as e:
# Transient errors - retry with backoff
last_error = e
if retry_attempt < max_retries - 1:

View File

@ -7,6 +7,7 @@ This is required for newer models like gpt-5.1-codex-mini that only support
the Responses API.
"""
import json
import os
import uuid
from typing import Any, Dict, Iterator, List, Optional, Sequence, Union
@ -117,9 +118,8 @@ class ChatOpenAIResponses(BaseChatModel):
# Get the JSON schema for parameters
if tool.args_schema:
params = tool.args_schema.model_json_schema()
# Remove extra fields that OpenAI doesn't expect
# Remove title field that OpenAI doesn't expect at schema level
params.pop("title", None)
params.pop("description", None)
else:
params = {"type": "object", "properties": {}}
@ -162,7 +162,6 @@ class ChatOpenAIResponses(BaseChatModel):
- Tool calls from assistant are represented as separate 'function_call' items
- Tool results use 'function_call_output' content type
"""
import json as json_module
converted = []
for msg in messages:
if isinstance(msg, SystemMessage):
@ -193,7 +192,7 @@ class ChatOpenAIResponses(BaseChatModel):
# Convert args to JSON string for the API
args = tc.get("args", {})
if isinstance(args, dict):
args_str = json_module.dumps(args)
args_str = json.dumps(args)
else:
args_str = str(args)
@ -237,7 +236,6 @@ class ChatOpenAIResponses(BaseChatModel):
# Handle function/tool calls
if hasattr(item, 'type') and item.type == 'function_call':
import json
args = item.arguments
if isinstance(args, str):
try: