feat: add optional social sentiment tool
This commit is contained in:
parent
82ec67a024
commit
cec2bee6e7
|
|
@ -67,7 +67,8 @@ Our framework decomposes complex trading tasks into specialized roles. This ensu
|
|||
### Analyst Team
|
||||
- Fundamentals Analyst: Evaluates company financials and performance metrics, identifying intrinsic values and potential red flags.
|
||||
- Sentiment Analyst: Analyzes social media and public sentiment using sentiment scoring algorithms to gauge short-term market mood.
|
||||
- News Analyst: Monitors global news and macroeconomic indicators, interpreting the impact of events on market conditions.
|
||||
- News Analyst: Monitors company and global news, interpreting the impact of events on market conditions.
|
||||
- Macro Analyst: Tracks macroeconomic indicators, Treasury curve structure, and Fed policy context using FRED-backed data.
|
||||
- Technical Analyst: Utilizes technical indicators (like MACD and RSI) to detect trading patterns and forecast price movements.
|
||||
|
||||
<p align="center">
|
||||
|
|
@ -128,6 +129,8 @@ export ANTHROPIC_API_KEY=... # Anthropic (Claude)
|
|||
export XAI_API_KEY=... # xAI (Grok)
|
||||
export OPENROUTER_API_KEY=... # OpenRouter
|
||||
export ALPHA_VANTAGE_API_KEY=... # Alpha Vantage
|
||||
export FRED_API_KEY=... # Required for Macro Analyst / FRED macro data
|
||||
export ADANOS_API_KEY=... # Optional, for live social sentiment snapshots in the social analyst
|
||||
```
|
||||
|
||||
For local models, configure Ollama with `llm_provider: "ollama"` in your config.
|
||||
|
|
@ -137,6 +140,8 @@ Alternatively, copy `.env.example` to `.env` and fill in your keys:
|
|||
cp .env.example .env
|
||||
```
|
||||
|
||||
If you want to use the Macro Analyst in the CLI or graph, you must also configure `FRED_API_KEY`.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
Launch the interactive CLI:
|
||||
|
|
@ -144,7 +149,7 @@ Launch the interactive CLI:
|
|||
tradingagents # installed command
|
||||
python -m cli.main # alternative: run directly from source
|
||||
```
|
||||
You will see a screen where you can select your desired tickers, analysis date, LLM provider, research depth, and more.
|
||||
You will see a screen where you can select your desired tickers, analysis date, LLM provider, analyst team (including Macro Analyst), research depth, and more.
|
||||
|
||||
<p align="center">
|
||||
<img src="assets/cli/cli_init.png" width="100%" style="display: inline-block; margin: 0 2%;">
|
||||
|
|
|
|||
|
|
@ -0,0 +1,358 @@
|
|||
from datetime import date
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from tradingagents.agents.analysts.social_media_analyst import create_social_media_analyst
|
||||
from tradingagents.graph.setup import GraphSetup
|
||||
from tradingagents.graph.trading_graph import TradingAgentsGraph
|
||||
from tradingagents.agents.utils.social_data_tools import (
|
||||
get_social_sentiment,
|
||||
has_social_sentiment_support,
|
||||
)
|
||||
|
||||
|
||||
class FakePrompt:
|
||||
def __init__(self):
|
||||
self.partials = {}
|
||||
|
||||
def partial(self, **kwargs):
|
||||
self.partials.update(kwargs)
|
||||
return self
|
||||
|
||||
def __or__(self, bound_llm):
|
||||
bound_llm.prompt_partials = dict(self.partials)
|
||||
return bound_llm
|
||||
|
||||
|
||||
class FakeBoundLLM:
|
||||
def __init__(self, tools):
|
||||
self.tools = tools
|
||||
self.prompt_partials = {}
|
||||
|
||||
def invoke(self, _messages):
|
||||
return type("Response", (), {"tool_calls": [], "content": "sentiment report"})()
|
||||
|
||||
|
||||
class FakeLLM:
|
||||
def __init__(self):
|
||||
self.bound_tools = []
|
||||
self.bound = None
|
||||
|
||||
def bind_tools(self, tools):
|
||||
self.bound_tools = tools
|
||||
self.bound = FakeBoundLLM(tools)
|
||||
return self.bound
|
||||
|
||||
|
||||
class DummyToolNode:
|
||||
def __init__(self, tools):
|
||||
self.tools = tools
|
||||
|
||||
|
||||
class DummyStateGraph:
|
||||
def __init__(self, _state_type):
|
||||
self.nodes = {}
|
||||
|
||||
def add_node(self, name, node):
|
||||
self.nodes[name] = node
|
||||
|
||||
def add_edge(self, *_args, **_kwargs):
|
||||
return None
|
||||
|
||||
def add_conditional_edges(self, *_args, **_kwargs):
|
||||
return None
|
||||
|
||||
def compile(self):
|
||||
return {"nodes": self.nodes}
|
||||
|
||||
|
||||
def test_support_flag_requires_api_key():
|
||||
with patch.dict("os.environ", {}, clear=True):
|
||||
assert has_social_sentiment_support() is False
|
||||
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
assert has_social_sentiment_support() is True
|
||||
|
||||
|
||||
@patch("tradingagents.agents.analysts.social_media_analyst.ChatPromptTemplate.from_messages")
|
||||
def test_social_analyst_hides_sentiment_tool_without_api_key(mock_from_messages):
|
||||
llm = FakeLLM()
|
||||
mock_from_messages.return_value = FakePrompt()
|
||||
|
||||
with patch.dict("os.environ", {}, clear=True):
|
||||
analyst = create_social_media_analyst(llm)
|
||||
analyst(
|
||||
{
|
||||
"trade_date": "2026-03-24",
|
||||
"company_of_interest": "TSLA",
|
||||
"messages": [],
|
||||
}
|
||||
)
|
||||
|
||||
assert [tool.name for tool in llm.bound_tools] == ["get_news"]
|
||||
assert "get_social_sentiment(" not in llm.bound.prompt_partials["system_message"]
|
||||
|
||||
|
||||
@patch("tradingagents.agents.analysts.social_media_analyst.ChatPromptTemplate.from_messages")
|
||||
def test_social_analyst_exposes_sentiment_tool_with_api_key(mock_from_messages):
|
||||
llm = FakeLLM()
|
||||
mock_from_messages.return_value = FakePrompt()
|
||||
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
analyst = create_social_media_analyst(llm)
|
||||
analyst(
|
||||
{
|
||||
"trade_date": "2026-03-24",
|
||||
"company_of_interest": "TSLA",
|
||||
"messages": [],
|
||||
}
|
||||
)
|
||||
|
||||
assert [tool.name for tool in llm.bound_tools] == [
|
||||
"get_social_sentiment",
|
||||
"get_news",
|
||||
]
|
||||
assert "get_social_sentiment(" in llm.bound.prompt_partials["system_message"]
|
||||
|
||||
|
||||
@patch("tradingagents.agents.analysts.social_media_analyst.ChatPromptTemplate.from_messages")
|
||||
def test_social_analyst_honors_explicit_support_snapshot(mock_from_messages):
|
||||
llm = FakeLLM()
|
||||
mock_from_messages.return_value = FakePrompt()
|
||||
|
||||
with patch.dict("os.environ", {}, clear=True):
|
||||
analyst = create_social_media_analyst(llm, social_sentiment_available=True)
|
||||
analyst(
|
||||
{
|
||||
"trade_date": "2026-03-24",
|
||||
"company_of_interest": "TSLA",
|
||||
"messages": [],
|
||||
}
|
||||
)
|
||||
|
||||
assert [tool.name for tool in llm.bound_tools] == [
|
||||
"get_social_sentiment",
|
||||
"get_news",
|
||||
]
|
||||
assert "get_social_sentiment(" in llm.bound.prompt_partials["system_message"]
|
||||
|
||||
|
||||
def test_social_tool_node_hides_sentiment_tool_without_api_key(monkeypatch):
|
||||
monkeypatch.setattr("tradingagents.graph.trading_graph.ToolNode", DummyToolNode)
|
||||
|
||||
graph = object.__new__(TradingAgentsGraph)
|
||||
graph.social_sentiment_available = False
|
||||
tool_nodes = graph._create_tool_nodes()
|
||||
|
||||
assert [tool.name for tool in tool_nodes["social"].tools] == ["get_news"]
|
||||
|
||||
|
||||
def test_social_tool_node_exposes_sentiment_tool_with_api_key(monkeypatch):
|
||||
monkeypatch.setattr("tradingagents.graph.trading_graph.ToolNode", DummyToolNode)
|
||||
|
||||
graph = object.__new__(TradingAgentsGraph)
|
||||
graph.social_sentiment_available = True
|
||||
tool_nodes = graph._create_tool_nodes()
|
||||
|
||||
assert [tool.name for tool in tool_nodes["social"].tools] == [
|
||||
"get_social_sentiment",
|
||||
"get_news",
|
||||
]
|
||||
|
||||
|
||||
def test_graph_setup_passes_shared_social_availability_to_social_analyst(monkeypatch):
|
||||
captured = {}
|
||||
|
||||
monkeypatch.setattr("tradingagents.graph.setup.StateGraph", DummyStateGraph)
|
||||
monkeypatch.setattr("tradingagents.graph.setup.create_msg_delete", lambda: "delete")
|
||||
|
||||
def simple_factory(name):
|
||||
def factory(*_args, **_kwargs):
|
||||
return name
|
||||
|
||||
return factory
|
||||
|
||||
def social_factory(llm, social_sentiment_available=None):
|
||||
captured["llm"] = llm
|
||||
captured["social_sentiment_available"] = social_sentiment_available
|
||||
return "Social Analyst"
|
||||
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_social_media_analyst",
|
||||
social_factory,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_market_analyst",
|
||||
simple_factory("Market Analyst"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_news_analyst",
|
||||
simple_factory("News Analyst"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_fundamentals_analyst",
|
||||
simple_factory("Fundamentals Analyst"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_macro_analyst",
|
||||
simple_factory("Macro Analyst"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_bull_researcher",
|
||||
simple_factory("Bull Researcher"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_bear_researcher",
|
||||
simple_factory("Bear Researcher"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_research_manager",
|
||||
simple_factory("Research Manager"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_trader",
|
||||
simple_factory("Trader"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_aggressive_debator",
|
||||
simple_factory("Aggressive Analyst"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_neutral_debator",
|
||||
simple_factory("Neutral Analyst"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_conservative_debator",
|
||||
simple_factory("Conservative Analyst"),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"tradingagents.graph.setup.create_portfolio_manager",
|
||||
simple_factory("Portfolio Manager"),
|
||||
)
|
||||
|
||||
class PartialConditionalLogic:
|
||||
def should_continue_social(self, _state):
|
||||
return "Msg Clear Social"
|
||||
|
||||
def should_continue_debate(self, _state):
|
||||
return "Research Manager"
|
||||
|
||||
def should_continue_risk_analysis(self, _state):
|
||||
return "Portfolio Manager"
|
||||
|
||||
setup = GraphSetup(
|
||||
quick_thinking_llm="quick-llm",
|
||||
deep_thinking_llm="deep-llm",
|
||||
tool_nodes={"social": "social-tools"},
|
||||
bull_memory=object(),
|
||||
bear_memory=object(),
|
||||
trader_memory=object(),
|
||||
invest_judge_memory=object(),
|
||||
portfolio_manager_memory=object(),
|
||||
conditional_logic=PartialConditionalLogic(),
|
||||
social_sentiment_available=True,
|
||||
)
|
||||
|
||||
graph = setup.setup_graph(selected_analysts=["social"])
|
||||
|
||||
assert captured == {
|
||||
"llm": "quick-llm",
|
||||
"social_sentiment_available": True,
|
||||
}
|
||||
assert graph["nodes"]["tools_social"] == "social-tools"
|
||||
|
||||
|
||||
@patch("tradingagents.agents.utils.social_data_tools.requests.get")
|
||||
def test_historical_trade_dates_do_not_hit_network(mock_get):
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
result = get_social_sentiment.invoke(
|
||||
{"ticker": "TSLA", "curr_date": "2024-01-15", "look_back_days": 7}
|
||||
)
|
||||
|
||||
assert "historical trade date" in result
|
||||
mock_get.assert_not_called()
|
||||
|
||||
|
||||
@patch("tradingagents.agents.utils.social_data_tools.requests.get")
|
||||
def test_recent_weekend_trade_dates_still_hit_live_window(mock_get):
|
||||
empty_response = Mock()
|
||||
empty_response.raise_for_status.return_value = None
|
||||
empty_response.json.return_value = {"stocks": []}
|
||||
mock_get.return_value = empty_response
|
||||
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
with patch("tradingagents.agents.utils.social_data_tools.date") as mock_date:
|
||||
mock_date.today.return_value = date(2026, 3, 23)
|
||||
result = get_social_sentiment.invoke(
|
||||
{"ticker": "TSLA", "curr_date": "2026-03-20", "look_back_days": 7}
|
||||
)
|
||||
|
||||
assert "historical trade date" not in result
|
||||
assert "## Social sentiment for TSLA" in result
|
||||
assert mock_get.call_count == 3
|
||||
|
||||
|
||||
@patch("tradingagents.agents.utils.social_data_tools.requests.get")
|
||||
def test_formats_cross_source_snapshot(mock_get):
|
||||
reddit_response = Mock()
|
||||
reddit_response.raise_for_status.return_value = None
|
||||
reddit_response.json.return_value = {
|
||||
"stocks": [
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"mentions": 647,
|
||||
"buzz_score": 81.2,
|
||||
"bullish_pct": 46,
|
||||
"trend": "rising",
|
||||
"subreddit_count": 23,
|
||||
"total_upvotes": 4120,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
x_response = Mock()
|
||||
x_response.raise_for_status.return_value = None
|
||||
x_response.json.return_value = {
|
||||
"stocks": [
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"mentions": 2650,
|
||||
"buzz_score": 86.4,
|
||||
"bullish_pct": 58,
|
||||
"trend": "falling",
|
||||
"unique_tweets": 392,
|
||||
"total_upvotes": 95000,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
polymarket_response = Mock()
|
||||
polymarket_response.raise_for_status.return_value = None
|
||||
polymarket_response.json.return_value = {
|
||||
"stocks": [
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"trade_count": 3731,
|
||||
"market_count": 71,
|
||||
"buzz_score": 55.7,
|
||||
"bullish_pct": 72,
|
||||
"trend": "stable",
|
||||
"total_liquidity": 8400000,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mock_get.side_effect = [reddit_response, x_response, polymarket_response]
|
||||
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
with patch("tradingagents.agents.utils.social_data_tools.date") as mock_date:
|
||||
mock_date.today.return_value = date(2026, 3, 19)
|
||||
result = get_social_sentiment.invoke(
|
||||
{"ticker": "TSLA", "curr_date": "2026-03-19", "look_back_days": 7}
|
||||
)
|
||||
|
||||
assert "## Social sentiment for TSLA" in result
|
||||
assert "Average buzz: 74.4/100" in result
|
||||
assert "Average bullish: 58.7%" in result
|
||||
assert "### Reddit" in result
|
||||
assert "### X/Twitter" in result
|
||||
assert "### Polymarket" in result
|
||||
|
|
@ -1,21 +1,31 @@
|
|||
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
||||
import time
|
||||
import json
|
||||
from tradingagents.agents.utils.agent_utils import build_instrument_context, get_news
|
||||
from tradingagents.dataflows.config import get_config
|
||||
from tradingagents.agents.utils.agent_utils import (
|
||||
build_social_tools,
|
||||
build_instrument_context,
|
||||
has_social_sentiment_support,
|
||||
)
|
||||
|
||||
|
||||
def create_social_media_analyst(llm):
|
||||
def create_social_media_analyst(llm, social_sentiment_available: bool | None = None):
|
||||
def social_media_analyst_node(state):
|
||||
current_date = state["trade_date"]
|
||||
instrument_context = build_instrument_context(state["company_of_interest"])
|
||||
|
||||
tools = [
|
||||
get_news,
|
||||
]
|
||||
social_sentiment_enabled = social_sentiment_available
|
||||
if social_sentiment_enabled is None:
|
||||
social_sentiment_enabled = has_social_sentiment_support()
|
||||
|
||||
tools = build_social_tools(social_sentiment_enabled)
|
||||
sentiment_guidance = ""
|
||||
if social_sentiment_enabled:
|
||||
sentiment_guidance = (
|
||||
" When available, use the get_social_sentiment(ticker, curr_date, look_back_days) tool first to capture current cross-source social sentiment from Reddit, X/Twitter, and Polymarket. If the social sentiment tool reports that the requested trade date is historical, rely on news context and state that live social sentiment was unavailable for that backtest date."
|
||||
)
|
||||
|
||||
system_message = (
|
||||
"You are a social media and company specific news researcher/analyst tasked with analyzing social media posts, recent company news, and public sentiment for a specific company over the past week. You will be given a company's name your objective is to write a comprehensive long report detailing your analysis, insights, and implications for traders and investors on this company's current state after looking at social media and what people are saying about that company, analyzing sentiment data of what people feel each day about the company, and looking at recent company news. Use the get_news(query, start_date, end_date) tool to search for company-specific news and social media discussions. Try to look at all sources possible from social media to sentiment to news. Provide specific, actionable insights with supporting evidence to help traders make informed decisions."
|
||||
"You are a social media and company specific news researcher/analyst tasked with analyzing social media posts, recent company news, and public sentiment for a specific company over the past week. You will be given a company's name and your objective is to write a comprehensive long report detailing your analysis, insights, and implications for traders and investors."
|
||||
+ sentiment_guidance
|
||||
+ " Then use the get_news(query, start_date, end_date) tool to add company-specific news context. Provide specific, actionable insights with supporting evidence to help traders make informed decisions."
|
||||
+ """ Make sure to append a Markdown table at the end of the report to organize key points in the report, organized and easy to read."""
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,10 @@ from tradingagents.agents.utils.news_data_tools import (
|
|||
get_insider_transactions,
|
||||
get_global_news
|
||||
)
|
||||
from tradingagents.agents.utils.social_data_tools import (
|
||||
get_social_sentiment,
|
||||
has_social_sentiment_support,
|
||||
)
|
||||
from tradingagents.agents.utils.macro_data_tools import (
|
||||
get_economic_indicators,
|
||||
get_fed_calendar,
|
||||
|
|
@ -48,6 +52,7 @@ from tradingagents.agents.utils.valuation_tools import (
|
|||
|
||||
|
||||
__all__ = [
|
||||
"build_social_tools",
|
||||
"build_instrument_context",
|
||||
"build_analyst_report_context",
|
||||
"build_structured_stock_context",
|
||||
|
|
@ -72,12 +77,21 @@ __all__ = [
|
|||
"get_sizing_fundamentals",
|
||||
"get_sizing_indicator",
|
||||
"get_sizing_price_history",
|
||||
"get_social_sentiment",
|
||||
"get_stock_data",
|
||||
"get_valuation_inputs",
|
||||
"has_social_sentiment_support",
|
||||
"get_yield_curve",
|
||||
]
|
||||
|
||||
|
||||
def build_social_tools(include_social_sentiment: bool) -> list:
|
||||
tools = [get_news]
|
||||
if include_social_sentiment:
|
||||
tools.insert(0, get_social_sentiment)
|
||||
return tools
|
||||
|
||||
|
||||
def build_instrument_context(ticker: str) -> str:
|
||||
"""Describe the exact instrument so agents preserve exchange-qualified tickers."""
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -0,0 +1,240 @@
|
|||
from datetime import date, datetime
|
||||
import os
|
||||
from typing import Annotated
|
||||
|
||||
import requests
|
||||
from langchain_core.tools import tool
|
||||
|
||||
|
||||
ADANOS_API_BASE_URL = os.getenv("ADANOS_API_BASE_URL", "https://api.adanos.org").rstrip("/")
|
||||
RECENT_WINDOW_LOOKBACK_DAYS = 4
|
||||
RECENT_WINDOW_FORWARD_DAYS = 1
|
||||
|
||||
|
||||
def has_social_sentiment_support() -> bool:
|
||||
"""Return whether the optional Adanos-backed social sentiment tool is available."""
|
||||
return bool(os.getenv("ADANOS_API_KEY"))
|
||||
|
||||
|
||||
def _supports_recent_social_window(requested_date: date, today: date) -> bool:
|
||||
window_delta_days = (today - requested_date).days
|
||||
return -RECENT_WINDOW_FORWARD_DAYS <= window_delta_days <= RECENT_WINDOW_LOOKBACK_DAYS
|
||||
|
||||
|
||||
def _safe_number(value, digits: int = 1):
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
number = float(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
return round(number, digits)
|
||||
|
||||
|
||||
def _safe_int(value):
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return int(float(value))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _format_percent(value) -> str:
|
||||
if value is None:
|
||||
return "n/a"
|
||||
return f"{value:.1f}%"
|
||||
|
||||
|
||||
def _format_score(value) -> str:
|
||||
if value is None:
|
||||
return "n/a"
|
||||
return f"{value:.1f}/100"
|
||||
|
||||
|
||||
def _format_currency(value) -> str:
|
||||
if value is None:
|
||||
return "n/a"
|
||||
if value >= 1_000_000_000:
|
||||
return f"${value / 1_000_000_000:.1f}B"
|
||||
if value >= 1_000_000:
|
||||
return f"${value / 1_000_000:.1f}M"
|
||||
if value >= 1_000:
|
||||
return f"${value / 1_000:.1f}K"
|
||||
return f"${value:.0f}"
|
||||
|
||||
|
||||
def _normalize_compare_row(payload: dict) -> dict:
|
||||
stocks = payload.get("stocks") if isinstance(payload, dict) else None
|
||||
if not isinstance(stocks, list) or not stocks or not isinstance(stocks[0], dict):
|
||||
return {}
|
||||
return stocks[0]
|
||||
|
||||
|
||||
def _alignment_label(bullish_values: list[float]) -> str:
|
||||
if len(bullish_values) < 2:
|
||||
return "single-source"
|
||||
|
||||
spread = max(bullish_values) - min(bullish_values)
|
||||
average = sum(bullish_values) / len(bullish_values)
|
||||
|
||||
if spread <= 10:
|
||||
if average >= 55:
|
||||
return "aligned bullish"
|
||||
if average <= 45:
|
||||
return "aligned bearish"
|
||||
return "aligned neutral"
|
||||
if spread <= 25:
|
||||
return "mixed"
|
||||
return "divergent"
|
||||
|
||||
|
||||
def _fetch_compare(source: str, ticker: str, look_back_days: int, api_key: str) -> dict:
|
||||
response = requests.get(
|
||||
f"{ADANOS_API_BASE_URL}/{source}/stocks/v1/compare",
|
||||
params={"tickers": ticker, "days": look_back_days},
|
||||
headers={"X-API-Key": api_key},
|
||||
timeout=20,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return _normalize_compare_row(response.json())
|
||||
|
||||
|
||||
@tool
|
||||
def get_social_sentiment(
|
||||
ticker: Annotated[str, "Ticker symbol"],
|
||||
curr_date: Annotated[str, "Current trade date in yyyy-mm-dd format"],
|
||||
look_back_days: Annotated[int, "Rolling lookback window in days"] = 7,
|
||||
) -> str:
|
||||
"""
|
||||
Retrieve a structured social sentiment snapshot for a stock across Reddit, X/Twitter, and Polymarket.
|
||||
|
||||
This tool is intended for current/live workflows. Historical trade dates are not supported because
|
||||
the upstream sentiment API exposes rolling windows ending today rather than point-in-time snapshots.
|
||||
"""
|
||||
api_key = os.getenv("ADANOS_API_KEY")
|
||||
if not api_key:
|
||||
return "Social sentiment tool unavailable: ADANOS_API_KEY is not configured."
|
||||
|
||||
try:
|
||||
requested_date = datetime.strptime(curr_date, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
return f"Social sentiment tool unavailable: invalid curr_date '{curr_date}', expected yyyy-mm-dd."
|
||||
|
||||
today = date.today()
|
||||
if not _supports_recent_social_window(requested_date, today):
|
||||
return (
|
||||
f"Social sentiment snapshot unavailable for historical trade date {curr_date}. "
|
||||
"This tool only supports current rolling windows ending near today, so use company/news context instead for historical runs."
|
||||
)
|
||||
|
||||
normalized_ticker = ticker.strip().upper().lstrip("$")
|
||||
look_back_days = max(1, min(int(look_back_days), 90))
|
||||
|
||||
source_snapshots = {}
|
||||
source_errors = {}
|
||||
|
||||
for source in ("reddit", "x", "polymarket"):
|
||||
try:
|
||||
row = _fetch_compare(source, normalized_ticker, look_back_days, api_key)
|
||||
except requests.RequestException as exc:
|
||||
source_errors[source] = str(exc)
|
||||
continue
|
||||
|
||||
if source == "polymarket":
|
||||
activity = _safe_int(row.get("trade_count"))
|
||||
source_snapshots[source] = {
|
||||
"label": "Polymarket",
|
||||
"activity_label": "trades",
|
||||
"activity_value": activity,
|
||||
"buzz_score": _safe_number(row.get("buzz_score")),
|
||||
"bullish_pct": _safe_number(row.get("bullish_pct")),
|
||||
"trend": row.get("trend") or "n/a",
|
||||
"extra": (
|
||||
f"markets: {_safe_int(row.get('market_count')) or 0}, "
|
||||
f"liquidity: {_format_currency(_safe_number(row.get('total_liquidity')))}"
|
||||
),
|
||||
}
|
||||
elif source == "reddit":
|
||||
activity = _safe_int(row.get("mentions"))
|
||||
source_snapshots[source] = {
|
||||
"label": "Reddit",
|
||||
"activity_label": "mentions",
|
||||
"activity_value": activity,
|
||||
"buzz_score": _safe_number(row.get("buzz_score")),
|
||||
"bullish_pct": _safe_number(row.get("bullish_pct")),
|
||||
"trend": row.get("trend") or "n/a",
|
||||
"extra": (
|
||||
f"subreddits: {_safe_int(row.get('subreddit_count')) or 0}, "
|
||||
f"upvotes: {_safe_int(row.get('total_upvotes')) or 0}"
|
||||
),
|
||||
}
|
||||
else:
|
||||
activity = _safe_int(row.get("mentions"))
|
||||
source_snapshots[source] = {
|
||||
"label": "X/Twitter",
|
||||
"activity_label": "mentions",
|
||||
"activity_value": activity,
|
||||
"buzz_score": _safe_number(row.get("buzz_score")),
|
||||
"bullish_pct": _safe_number(row.get("bullish_pct")),
|
||||
"trend": row.get("trend") or "n/a",
|
||||
"extra": (
|
||||
f"unique tweets: {_safe_int(row.get('unique_tweets')) or 0}, "
|
||||
f"likes: {_safe_int(row.get('total_upvotes')) or 0}"
|
||||
),
|
||||
}
|
||||
|
||||
if not source_snapshots:
|
||||
if source_errors:
|
||||
details = "; ".join(f"{source}: {error}" for source, error in source_errors.items())
|
||||
return f"Unable to retrieve social sentiment for {normalized_ticker}: {details}"
|
||||
return f"No social sentiment data available for {normalized_ticker}."
|
||||
|
||||
available_buzz = [
|
||||
snapshot["buzz_score"]
|
||||
for snapshot in source_snapshots.values()
|
||||
if snapshot["buzz_score"] is not None
|
||||
]
|
||||
available_bullish = [
|
||||
snapshot["bullish_pct"]
|
||||
for snapshot in source_snapshots.values()
|
||||
if snapshot["bullish_pct"] is not None
|
||||
]
|
||||
|
||||
average_buzz = round(sum(available_buzz) / len(available_buzz), 1) if available_buzz else None
|
||||
average_bullish = (
|
||||
round(sum(available_bullish) / len(available_bullish), 1) if available_bullish else None
|
||||
)
|
||||
alignment = _alignment_label(available_bullish)
|
||||
|
||||
lines = [
|
||||
f"## Social sentiment for {normalized_ticker} (last {look_back_days} days)",
|
||||
"",
|
||||
f"- Average buzz: {_format_score(average_buzz)}",
|
||||
f"- Average bullish: {_format_percent(average_bullish)}",
|
||||
f"- Source alignment: {alignment}",
|
||||
"",
|
||||
]
|
||||
|
||||
for source in ("reddit", "x", "polymarket"):
|
||||
snapshot = source_snapshots.get(source)
|
||||
if snapshot is None:
|
||||
if source in source_errors:
|
||||
lines.append(f"### {source.title()}")
|
||||
lines.append(f"- unavailable: {source_errors[source]}")
|
||||
lines.append("")
|
||||
continue
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
f"### {snapshot['label']}",
|
||||
f"- {snapshot['activity_label']}: {snapshot['activity_value'] or 0}",
|
||||
f"- buzz: {_format_score(snapshot['buzz_score'])}",
|
||||
f"- bullish: {_format_percent(snapshot['bullish_pct'])}",
|
||||
f"- trend: {snapshot['trend']}",
|
||||
f"- {snapshot['extra']}",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
return "\n".join(lines).strip()
|
||||
|
|
@ -33,6 +33,7 @@ class GraphSetup:
|
|||
portfolio_manager_memory,
|
||||
conditional_logic: ConditionalLogic,
|
||||
role_llms: Dict[str, Any] | None = None,
|
||||
social_sentiment_available: bool = False,
|
||||
):
|
||||
"""Initialize with required components."""
|
||||
self.quick_thinking_llm = quick_thinking_llm
|
||||
|
|
@ -45,6 +46,7 @@ class GraphSetup:
|
|||
self.invest_judge_memory = invest_judge_memory
|
||||
self.portfolio_manager_memory = portfolio_manager_memory
|
||||
self.conditional_logic = conditional_logic
|
||||
self.social_sentiment_available = social_sentiment_available
|
||||
self.market_analyst_llm = self._get_role_llm("market", self.quick_thinking_llm)
|
||||
self.social_analyst_llm = self._get_role_llm("social", self.quick_thinking_llm)
|
||||
self.news_analyst_llm = self._get_role_llm("news", self.quick_thinking_llm)
|
||||
|
|
@ -143,7 +145,8 @@ class GraphSetup:
|
|||
|
||||
if "social" in selected_analysts:
|
||||
analyst_nodes["social"] = create_social_media_analyst(
|
||||
self.social_analyst_llm
|
||||
self.social_analyst_llm,
|
||||
social_sentiment_available=self.social_sentiment_available,
|
||||
)
|
||||
delete_nodes["social"] = create_msg_delete()
|
||||
tool_nodes["social"] = self.tool_nodes["social"]
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ from tradingagents.dataflows.config import set_config
|
|||
|
||||
# Import the new abstract tool methods from agent_utils
|
||||
from tradingagents.agents.utils.agent_utils import (
|
||||
build_social_tools,
|
||||
get_stock_data,
|
||||
get_indicators,
|
||||
get_fundamentals,
|
||||
|
|
@ -42,6 +43,7 @@ from tradingagents.agents.utils.agent_utils import (
|
|||
get_sizing_fundamentals,
|
||||
get_sizing_indicator,
|
||||
get_sizing_price_history,
|
||||
has_social_sentiment_support,
|
||||
get_valuation_inputs,
|
||||
get_yield_curve,
|
||||
)
|
||||
|
|
@ -120,6 +122,7 @@ class TradingAgentsGraph:
|
|||
self.quick_thinking_llm = self._create_legacy_llm("quick")
|
||||
self.deep_thinking_llm = self._create_legacy_llm("deep")
|
||||
self.role_llms = self._create_role_llms(selected_analysts)
|
||||
self.social_sentiment_available = has_social_sentiment_support()
|
||||
|
||||
# Initialize memories
|
||||
self.bull_memory = FinancialSituationMemory("bull_memory", self.config)
|
||||
|
|
@ -147,6 +150,7 @@ class TradingAgentsGraph:
|
|||
self.portfolio_manager_memory,
|
||||
self.conditional_logic,
|
||||
role_llms=self.role_llms,
|
||||
social_sentiment_available=self.social_sentiment_available,
|
||||
)
|
||||
|
||||
self.propagator = Propagator()
|
||||
|
|
@ -286,6 +290,9 @@ class TradingAgentsGraph:
|
|||
|
||||
def _create_tool_nodes(self) -> Dict[str, ToolNode]:
|
||||
"""Create tool nodes for different data sources using abstract methods."""
|
||||
social_tools = build_social_tools(
|
||||
getattr(self, "social_sentiment_available", has_social_sentiment_support())
|
||||
)
|
||||
return {
|
||||
"market": ToolNode(
|
||||
[
|
||||
|
|
@ -295,12 +302,7 @@ class TradingAgentsGraph:
|
|||
get_indicators,
|
||||
]
|
||||
),
|
||||
"social": ToolNode(
|
||||
[
|
||||
# News tools for social media analysis
|
||||
get_news,
|
||||
]
|
||||
),
|
||||
"social": ToolNode(social_tools),
|
||||
"news": ToolNode(
|
||||
[
|
||||
# News and insider information
|
||||
|
|
|
|||
Loading…
Reference in New Issue