Merge 11d924dba3 into f362a160c3
This commit is contained in:
commit
a2408634c9
|
|
@ -128,6 +128,7 @@ export ANTHROPIC_API_KEY=... # Anthropic (Claude)
|
|||
export XAI_API_KEY=... # xAI (Grok)
|
||||
export OPENROUTER_API_KEY=... # OpenRouter
|
||||
export ALPHA_VANTAGE_API_KEY=... # Alpha Vantage
|
||||
export ADANOS_API_KEY=... # Optional, for live social sentiment snapshots in the social analyst
|
||||
```
|
||||
|
||||
For local models, configure Ollama with `llm_provider: "ollama"` in your config.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,93 @@
|
|||
import unittest
|
||||
from datetime import date
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from tradingagents.agents.utils.social_data_tools import (
|
||||
get_social_sentiment,
|
||||
has_social_sentiment_support,
|
||||
)
|
||||
|
||||
|
||||
class SocialDataToolsTest(unittest.TestCase):
|
||||
def test_support_flag_requires_api_key(self):
|
||||
with patch.dict("os.environ", {}, clear=True):
|
||||
self.assertFalse(has_social_sentiment_support())
|
||||
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
self.assertTrue(has_social_sentiment_support())
|
||||
|
||||
@patch("tradingagents.agents.utils.social_data_tools.requests.get")
|
||||
def test_historical_trade_dates_do_not_hit_network(self, mock_get):
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
result = get_social_sentiment.invoke(
|
||||
{"ticker": "TSLA", "curr_date": "2024-01-15", "look_back_days": 7}
|
||||
)
|
||||
|
||||
self.assertIn("historical trade date", result)
|
||||
mock_get.assert_not_called()
|
||||
|
||||
@patch("tradingagents.agents.utils.social_data_tools.requests.get")
|
||||
def test_formats_cross_source_snapshot(self, mock_get):
|
||||
reddit_response = Mock()
|
||||
reddit_response.raise_for_status.return_value = None
|
||||
reddit_response.json.return_value = {
|
||||
"stocks": [
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"mentions": 647,
|
||||
"buzz_score": 81.2,
|
||||
"bullish_pct": 46,
|
||||
"trend": "rising",
|
||||
"subreddit_count": 23,
|
||||
"total_upvotes": 4120,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
x_response = Mock()
|
||||
x_response.raise_for_status.return_value = None
|
||||
x_response.json.return_value = {
|
||||
"stocks": [
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"mentions": 2650,
|
||||
"buzz_score": 86.4,
|
||||
"bullish_pct": 58,
|
||||
"trend": "falling",
|
||||
"unique_tweets": 392,
|
||||
"total_upvotes": 95000,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
polymarket_response = Mock()
|
||||
polymarket_response.raise_for_status.return_value = None
|
||||
polymarket_response.json.return_value = {
|
||||
"stocks": [
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"trade_count": 3731,
|
||||
"market_count": 71,
|
||||
"buzz_score": 55.7,
|
||||
"bullish_pct": 72,
|
||||
"trend": "stable",
|
||||
"total_liquidity": 8400000,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mock_get.side_effect = [reddit_response, x_response, polymarket_response]
|
||||
|
||||
with patch.dict("os.environ", {"ADANOS_API_KEY": "sk_test"}, clear=True):
|
||||
with patch("tradingagents.agents.utils.social_data_tools.date") as mock_date:
|
||||
mock_date.today.return_value = date(2026, 3, 19)
|
||||
result = get_social_sentiment.invoke(
|
||||
{"ticker": "TSLA", "curr_date": "2026-03-19", "look_back_days": 7}
|
||||
)
|
||||
|
||||
self.assertIn("## Social sentiment for TSLA", result)
|
||||
self.assertIn("Average buzz: 74.4/100", result)
|
||||
self.assertIn("Average bullish: 58.7%", result)
|
||||
self.assertIn("### Reddit", result)
|
||||
self.assertIn("### X/Twitter", result)
|
||||
self.assertIn("### Polymarket", result)
|
||||
|
|
@ -1,22 +1,17 @@
|
|||
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
||||
import time
|
||||
import json
|
||||
from tradingagents.agents.utils.agent_utils import get_news
|
||||
from tradingagents.dataflows.config import get_config
|
||||
from tradingagents.agents.utils.agent_utils import get_news, get_social_sentiment, has_social_sentiment_support
|
||||
|
||||
|
||||
def create_social_media_analyst(llm):
|
||||
def social_media_analyst_node(state):
|
||||
current_date = state["trade_date"]
|
||||
ticker = state["company_of_interest"]
|
||||
company_name = state["company_of_interest"]
|
||||
|
||||
tools = [
|
||||
get_news,
|
||||
]
|
||||
tools = [get_news]
|
||||
if has_social_sentiment_support():
|
||||
tools.insert(0, get_social_sentiment)
|
||||
|
||||
system_message = (
|
||||
"You are a social media and company specific news researcher/analyst tasked with analyzing social media posts, recent company news, and public sentiment for a specific company over the past week. You will be given a company's name your objective is to write a comprehensive long report detailing your analysis, insights, and implications for traders and investors on this company's current state after looking at social media and what people are saying about that company, analyzing sentiment data of what people feel each day about the company, and looking at recent company news. Use the get_news(query, start_date, end_date) tool to search for company-specific news and social media discussions. Try to look at all sources possible from social media to sentiment to news. Do not simply state the trends are mixed, provide detailed and finegrained analysis and insights that may help traders make decisions."
|
||||
"You are a social media and company specific news researcher/analyst tasked with analyzing social media posts, recent company news, and public sentiment for a specific company over the past week. You will be given a company's name and your objective is to write a comprehensive long report detailing your analysis, insights, and implications for traders and investors. When available, use the get_social_sentiment(ticker, curr_date, look_back_days) tool first to capture current cross-source social sentiment from Reddit, X/Twitter, and Polymarket. Then use the get_news(query, start_date, end_date) tool to add company-specific news context. If the social sentiment tool reports that the requested trade date is historical, rely on news context and state that live social sentiment was unavailable for that backtest date. Do not simply state the trends are mixed; provide detailed and finegrained analysis and insights that may help traders make decisions."
|
||||
+ """ Make sure to append a Markdown table at the end of the report to organize key points in the report, organized and easy to read.""",
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,10 @@ from tradingagents.agents.utils.news_data_tools import (
|
|||
get_insider_transactions,
|
||||
get_global_news
|
||||
)
|
||||
from tradingagents.agents.utils.social_data_tools import (
|
||||
get_social_sentiment,
|
||||
has_social_sentiment_support,
|
||||
)
|
||||
|
||||
def create_msg_delete():
|
||||
def delete_messages(state):
|
||||
|
|
@ -35,4 +39,4 @@ def create_msg_delete():
|
|||
return delete_messages
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,215 @@
|
|||
from datetime import date, datetime
|
||||
import os
|
||||
from typing import Annotated
|
||||
|
||||
import requests
|
||||
from langchain_core.tools import tool
|
||||
|
||||
|
||||
ADANOS_API_BASE_URL = os.getenv("ADANOS_API_BASE_URL", "https://api.adanos.org").rstrip("/")
|
||||
CURRENT_WINDOW_BUFFER_DAYS = 1
|
||||
|
||||
|
||||
def has_social_sentiment_support() -> bool:
|
||||
"""Return whether the optional Adanos-backed social sentiment tool is available."""
|
||||
return bool(os.getenv("ADANOS_API_KEY"))
|
||||
|
||||
|
||||
def _safe_number(value, digits: int = 1):
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
number = float(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
return round(number, digits)
|
||||
|
||||
|
||||
def _safe_int(value):
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return int(float(value))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _format_percent(value) -> str:
|
||||
if value is None:
|
||||
return "n/a"
|
||||
return f"{value:.1f}%"
|
||||
|
||||
|
||||
def _format_score(value) -> str:
|
||||
if value is None:
|
||||
return "n/a"
|
||||
return f"{value:.1f}/100"
|
||||
|
||||
|
||||
def _format_currency(value) -> str:
|
||||
if value is None:
|
||||
return "n/a"
|
||||
if value >= 1_000_000_000:
|
||||
return f"${value / 1_000_000_000:.1f}B"
|
||||
if value >= 1_000_000:
|
||||
return f"${value / 1_000_000:.1f}M"
|
||||
if value >= 1_000:
|
||||
return f"${value / 1_000:.1f}K"
|
||||
return f"${value:.0f}"
|
||||
|
||||
|
||||
def _normalize_compare_row(payload: dict) -> dict:
|
||||
stocks = payload.get("stocks") if isinstance(payload, dict) else None
|
||||
if not isinstance(stocks, list) or len(stocks) == 0 or not isinstance(stocks[0], dict):
|
||||
return {}
|
||||
return stocks[0]
|
||||
|
||||
|
||||
def _alignment_label(bullish_values: list[float]) -> str:
|
||||
if len(bullish_values) < 2:
|
||||
return "single-source"
|
||||
|
||||
spread = max(bullish_values) - min(bullish_values)
|
||||
average = sum(bullish_values) / len(bullish_values)
|
||||
|
||||
if spread <= 10:
|
||||
if average >= 55:
|
||||
return "aligned bullish"
|
||||
if average <= 45:
|
||||
return "aligned bearish"
|
||||
return "aligned neutral"
|
||||
if spread <= 25:
|
||||
return "mixed"
|
||||
return "divergent"
|
||||
|
||||
|
||||
def _fetch_compare(source: str, ticker: str, look_back_days: int, api_key: str) -> dict:
|
||||
response = requests.get(
|
||||
f"{ADANOS_API_BASE_URL}/{source}/stocks/v1/compare",
|
||||
params={"tickers": ticker, "days": look_back_days},
|
||||
headers={"X-API-Key": api_key},
|
||||
timeout=20,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return _normalize_compare_row(response.json())
|
||||
|
||||
|
||||
@tool
|
||||
def get_social_sentiment(
|
||||
ticker: Annotated[str, "Ticker symbol"],
|
||||
curr_date: Annotated[str, "Current trade date in yyyy-mm-dd format"],
|
||||
look_back_days: Annotated[int, "Rolling lookback window in days"] = 7,
|
||||
) -> str:
|
||||
"""
|
||||
Retrieve a structured social sentiment snapshot for a stock across Reddit, X/Twitter, and Polymarket.
|
||||
|
||||
This tool is intended for current/live workflows. Historical trade dates are not supported because
|
||||
the upstream sentiment API exposes rolling windows ending today rather than point-in-time snapshots.
|
||||
"""
|
||||
api_key = os.getenv("ADANOS_API_KEY")
|
||||
if not api_key:
|
||||
return "Social sentiment tool unavailable: ADANOS_API_KEY is not configured."
|
||||
|
||||
try:
|
||||
requested_date = datetime.strptime(curr_date, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
return f"Social sentiment tool unavailable: invalid curr_date '{curr_date}', expected yyyy-mm-dd."
|
||||
|
||||
today = date.today()
|
||||
if abs((today - requested_date).days) > CURRENT_WINDOW_BUFFER_DAYS:
|
||||
return (
|
||||
f"Social sentiment snapshot unavailable for historical trade date {curr_date}. "
|
||||
"This tool only supports current rolling windows ending near today, so use company/news context instead for historical runs."
|
||||
)
|
||||
|
||||
normalized_ticker = ticker.strip().upper().lstrip("$")
|
||||
look_back_days = max(1, min(int(look_back_days), 90))
|
||||
|
||||
source_snapshots = {}
|
||||
source_errors = {}
|
||||
|
||||
for source in ("reddit", "x", "polymarket"):
|
||||
try:
|
||||
row = _fetch_compare(source, normalized_ticker, look_back_days, api_key)
|
||||
except requests.RequestException as exc:
|
||||
source_errors[source] = str(exc)
|
||||
continue
|
||||
|
||||
if source == "polymarket":
|
||||
activity = _safe_int(row.get("trade_count"))
|
||||
source_snapshots[source] = {
|
||||
"label": "Polymarket",
|
||||
"activity_label": "trades",
|
||||
"activity_value": activity,
|
||||
"buzz_score": _safe_number(row.get("buzz_score")),
|
||||
"bullish_pct": _safe_number(row.get("bullish_pct")),
|
||||
"trend": row.get("trend") or "n/a",
|
||||
"extra": f"markets: {_safe_int(row.get('market_count')) or 0}, liquidity: {_format_currency(_safe_number(row.get('total_liquidity')))}",
|
||||
}
|
||||
elif source == "reddit":
|
||||
activity = _safe_int(row.get("mentions"))
|
||||
source_snapshots[source] = {
|
||||
"label": "Reddit",
|
||||
"activity_label": "mentions",
|
||||
"activity_value": activity,
|
||||
"buzz_score": _safe_number(row.get("buzz_score")),
|
||||
"bullish_pct": _safe_number(row.get("bullish_pct")),
|
||||
"trend": row.get("trend") or "n/a",
|
||||
"extra": f"subreddits: {_safe_int(row.get('subreddit_count')) or 0}, upvotes: {_safe_int(row.get('total_upvotes')) or 0}",
|
||||
}
|
||||
else:
|
||||
activity = _safe_int(row.get("mentions"))
|
||||
source_snapshots[source] = {
|
||||
"label": "X/Twitter",
|
||||
"activity_label": "mentions",
|
||||
"activity_value": activity,
|
||||
"buzz_score": _safe_number(row.get("buzz_score")),
|
||||
"bullish_pct": _safe_number(row.get("bullish_pct")),
|
||||
"trend": row.get("trend") or "n/a",
|
||||
"extra": f"unique tweets: {_safe_int(row.get('unique_tweets')) or 0}, likes: {_safe_int(row.get('total_upvotes')) or 0}",
|
||||
}
|
||||
|
||||
if not source_snapshots:
|
||||
if source_errors:
|
||||
details = "; ".join(f"{source}: {error}" for source, error in source_errors.items())
|
||||
return f"Unable to retrieve social sentiment for {normalized_ticker}: {details}"
|
||||
return f"No social sentiment data available for {normalized_ticker}."
|
||||
|
||||
available_buzz = [snapshot["buzz_score"] for snapshot in source_snapshots.values() if snapshot["buzz_score"] is not None]
|
||||
available_bullish = [snapshot["bullish_pct"] for snapshot in source_snapshots.values() if snapshot["bullish_pct"] is not None]
|
||||
|
||||
average_buzz = round(sum(available_buzz) / len(available_buzz), 1) if available_buzz else None
|
||||
average_bullish = round(sum(available_bullish) / len(available_bullish), 1) if available_bullish else None
|
||||
alignment = _alignment_label(available_bullish)
|
||||
|
||||
lines = [
|
||||
f"## Social sentiment for {normalized_ticker} (last {look_back_days} days)",
|
||||
"",
|
||||
f"- Average buzz: {_format_score(average_buzz)}",
|
||||
f"- Average bullish: {_format_percent(average_bullish)}",
|
||||
f"- Source alignment: {alignment}",
|
||||
"",
|
||||
]
|
||||
|
||||
for source in ("reddit", "x", "polymarket"):
|
||||
snapshot = source_snapshots.get(source)
|
||||
if snapshot is None:
|
||||
if source in source_errors:
|
||||
lines.append(f"### {source.title()}")
|
||||
lines.append(f"- unavailable: {source_errors[source]}")
|
||||
lines.append("")
|
||||
continue
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
f"### {snapshot['label']}",
|
||||
f"- {snapshot['activity_label']}: {snapshot['activity_value'] or 0}",
|
||||
f"- buzz: {_format_score(snapshot['buzz_score'])}",
|
||||
f"- bullish: {_format_percent(snapshot['bullish_pct'])}",
|
||||
f"- trend: {snapshot['trend']}",
|
||||
f"- {snapshot['extra']}",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
return "\n".join(lines).strip()
|
||||
|
|
@ -29,6 +29,8 @@ from tradingagents.agents.utils.agent_utils import (
|
|||
get_cashflow,
|
||||
get_income_statement,
|
||||
get_news,
|
||||
get_social_sentiment,
|
||||
has_social_sentiment_support,
|
||||
get_insider_transactions,
|
||||
get_global_news
|
||||
)
|
||||
|
|
@ -152,6 +154,10 @@ class TradingAgentsGraph:
|
|||
|
||||
def _create_tool_nodes(self) -> Dict[str, ToolNode]:
|
||||
"""Create tool nodes for different data sources using abstract methods."""
|
||||
social_tools = [get_news]
|
||||
if has_social_sentiment_support():
|
||||
social_tools.insert(0, get_social_sentiment)
|
||||
|
||||
return {
|
||||
"market": ToolNode(
|
||||
[
|
||||
|
|
@ -161,12 +167,7 @@ class TradingAgentsGraph:
|
|||
get_indicators,
|
||||
]
|
||||
),
|
||||
"social": ToolNode(
|
||||
[
|
||||
# News tools for social media analysis
|
||||
get_news,
|
||||
]
|
||||
),
|
||||
"social": ToolNode(social_tools),
|
||||
"news": ToolNode(
|
||||
[
|
||||
# News and insider information
|
||||
|
|
|
|||
Loading…
Reference in New Issue