fix(034): address Gemini review — log exceptions, requests.Session, top-level imports, regex ticker matching

This commit is contained in:
Clayton Brown 2026-04-23 12:16:11 +10:00
parent b6c99e1dde
commit 8972c77ba6
2 changed files with 30 additions and 26 deletions

View File

@ -1,11 +1,14 @@
# TradingAgents/graph/trading_graph.py # TradingAgents/graph/trading_graph.py
import logging
import os import os
from pathlib import Path from pathlib import Path
import json import json
from datetime import date from datetime import date
from typing import Dict, Any, Tuple, List, Optional from typing import Dict, Any, Tuple, List, Optional
logger = logging.getLogger(__name__)
from langgraph.prebuilt import ToolNode from langgraph.prebuilt import ToolNode
from tradingagents.llm_clients import create_llm_client from tradingagents.llm_clients import create_llm_client
@ -201,8 +204,8 @@ class TradingAgentsGraph:
from tradingagents.signals.polymarket import fetch_polymarket_signals, format_signals_text from tradingagents.signals.polymarket import fetch_polymarket_signals, format_signals_text
result = fetch_polymarket_signals() result = fetch_polymarket_signals()
polymarket_context = format_signals_text(result) polymarket_context = format_signals_text(result)
except Exception: except Exception as e:
pass logger.warning("Failed to fetch Polymarket signals: %s", e)
# Initialize state # Initialize state
init_agent_state = self.propagator.create_initial_state( init_agent_state = self.propagator.create_initial_state(

View File

@ -8,6 +8,8 @@ No API key required — public endpoint.
""" """
import datetime import datetime
import json
import re
import sys import sys
from typing import TypedDict from typing import TypedDict
@ -91,26 +93,27 @@ def _fetch_active_markets() -> list[dict]:
offset = 0 offset = 0
limit = 100 limit = 100
max_pages = 10 # up to 1000 markets — enough for keyword filtering max_pages = 10 # up to 1000 markets — enough for keyword filtering
for _ in range(max_pages): with requests.Session() as session:
resp = requests.get( for _ in range(max_pages):
f"{GAMMA_API_URL}/markets", resp = session.get(
params={ f"{GAMMA_API_URL}/markets",
"limit": limit, params={
"offset": offset, "limit": limit,
"active": "true", "offset": offset,
"closed": "false", "active": "true",
"volume_num_min": MIN_VOLUME_USD, "closed": "false",
"end_date_max": cutoff, "volume_num_min": MIN_VOLUME_USD,
}, "end_date_max": cutoff,
timeout=15, },
) timeout=15,
resp.raise_for_status() )
batch = resp.json() resp.raise_for_status()
if not batch: batch = resp.json()
break if not batch:
markets.extend(batch) break
if len(batch) < limit: markets.extend(batch)
break if len(batch) < limit:
break
offset += limit offset += limit
return markets return markets
@ -170,7 +173,6 @@ def _extract_probability(market: dict) -> float:
if prices: if prices:
try: try:
if isinstance(prices, str): if isinstance(prices, str):
import json
prices = json.loads(prices) prices = json.loads(prices)
return float(prices[0]) return float(prices[0])
except (json.JSONDecodeError, IndexError, TypeError, ValueError): except (json.JSONDecodeError, IndexError, TypeError, ValueError):
@ -325,7 +327,7 @@ def map_signals_to_tickers(signals: list[PolymarketSignal], held_tickers: set[st
sectors.add(sector_key) sectors.add(sector_key)
# 3. Direct ticker mention in event text # 3. Direct ticker mention in event text
direct = [t for t in held_upper if f" {t} " in f" {event_lower.upper()} " or event_lower.upper().startswith(f"{t} ")] direct = [t for t in held_upper if re.search(rf"\b{re.escape(t)}\b", event_lower.upper())]
# 4. Collect all tickers from matched sectors, intersect with held # 4. Collect all tickers from matched sectors, intersect with held
matched = set(direct) matched = set(direct)
@ -554,9 +556,8 @@ def _normalize_tags(raw_tags) -> list[str]:
or occasionally as a JSON string or comma-separated string. or occasionally as a JSON string or comma-separated string.
""" """
if isinstance(raw_tags, str): if isinstance(raw_tags, str):
import json as _json
try: try:
raw_tags = _json.loads(raw_tags) raw_tags = json.loads(raw_tags)
except (ValueError, TypeError): except (ValueError, TypeError):
return [t.strip() for t in raw_tags.split(",") if t.strip()] return [t.strip() for t in raw_tags.split(",") if t.strip()]
if not isinstance(raw_tags, list): if not isinstance(raw_tags, list):