fix: remove stale ChatOpenAI imports, fix hardcoded results path, add missing pandas import

- Remove unused `from langchain_openai import ChatOpenAI` imports from
  graph/setup.py, graph/reflection.py, graph/signal_processing.py, and
  agents/utils/agent_states.py. The framework supports multiple LLM
  providers so these type hints were incorrect and misleading.

- Clean up agents/utils/agent_states.py: remove unused imports of
  Sequence, date, timedelta, datetime, ChatOpenAI, agents star-import,
  ToolNode, END, StateGraph, START (only Annotated, TypedDict, and
  MessagesState are actually used).

- Fix graph/trading_graph.py _log_state(): replace hardcoded
  "eval_results/" relative path with config["results_dir"] so logs land
  in the same configured directory as other run outputs, regardless of
  the working directory the user runs from.

- Fix dataflows/y_finance.py _get_stock_stats_bulk(): add missing
  `import pandas as pd` which caused a NameError on pd.isna() during
  bulk indicator calculation, silently falling back to the slower
  per-day loop.

Made-with: Cursor
This commit is contained in:
tiffanychum 2026-04-02 01:16:42 +08:00
parent 4641c03340
commit ce241998c6
6 changed files with 16 additions and 24 deletions

View File

@ -1,10 +1,6 @@
from typing import Annotated, Sequence
from datetime import date, timedelta, datetime
from typing_extensions import TypedDict, Optional
from langchain_openai import ChatOpenAI
from tradingagents.agents import *
from langgraph.prebuilt import ToolNode
from langgraph.graph import END, StateGraph, START, MessagesState
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph import MessagesState
# Researcher team state

View File

@ -1,6 +1,7 @@
from typing import Annotated
from datetime import datetime
from dateutil.relativedelta import relativedelta
import pandas as pd
import yfinance as yf
import os
from .stockstats_utils import StockstatsUtils, _clean_dataframe, yf_retry, load_ohlcv, filter_financials_by_date

View File

@ -1,13 +1,12 @@
# TradingAgents/graph/reflection.py
from typing import Dict, Any
from langchain_openai import ChatOpenAI
from typing import Any, Dict
class Reflector:
"""Handles reflection on decisions and updating memory."""
def __init__(self, quick_thinking_llm: ChatOpenAI):
def __init__(self, quick_thinking_llm: Any):
"""Initialize the reflector with an LLM."""
self.quick_thinking_llm = quick_thinking_llm
self.reflection_system_prompt = self._get_reflection_prompt()

View File

@ -1,8 +1,7 @@
# TradingAgents/graph/setup.py
from typing import Dict, Any
from langchain_openai import ChatOpenAI
from langgraph.graph import END, StateGraph, START
from typing import Any, Dict
from langgraph.graph import END, START, StateGraph
from langgraph.prebuilt import ToolNode
from tradingagents.agents import *
@ -16,8 +15,8 @@ class GraphSetup:
def __init__(
self,
quick_thinking_llm: ChatOpenAI,
deep_thinking_llm: ChatOpenAI,
quick_thinking_llm: Any,
deep_thinking_llm: Any,
tool_nodes: Dict[str, ToolNode],
bull_memory,
bear_memory,

View File

@ -1,12 +1,12 @@
# TradingAgents/graph/signal_processing.py
from langchain_openai import ChatOpenAI
from typing import Any
class SignalProcessor:
"""Processes trading signals to extract actionable decisions."""
def __init__(self, quick_thinking_llm: ChatOpenAI):
def __init__(self, quick_thinking_llm: Any):
"""Initialize with an LLM for processing."""
self.quick_thinking_llm = quick_thinking_llm

View File

@ -258,15 +258,12 @@ class TradingAgentsGraph:
"final_trade_decision": final_state["final_trade_decision"],
}
# Save to file
directory = Path(f"eval_results/{self.ticker}/TradingAgentsStrategy_logs/")
# Save to file — use configured results_dir so logs land alongside other outputs
directory = Path(self.config["results_dir"]) / self.ticker / "TradingAgentsStrategy_logs"
directory.mkdir(parents=True, exist_ok=True)
with open(
f"eval_results/{self.ticker}/TradingAgentsStrategy_logs/full_states_log_{trade_date}.json",
"w",
encoding="utf-8",
) as f:
log_path = directory / f"full_states_log_{trade_date}.json"
with open(log_path, "w", encoding="utf-8") as f:
json.dump(self.log_states_dict, f, indent=4)
def reflect_and_remember(self, returns_losses):