This commit is contained in:
Alexander Schneider 2026-03-25 20:37:53 +01:00 committed by GitHub
commit 8870fe2a58
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 350 additions and 14 deletions

View File

@ -4,3 +4,7 @@ GOOGLE_API_KEY=
ANTHROPIC_API_KEY=
XAI_API_KEY=
OPENROUTER_API_KEY=
# Optional social sentiment provider
ADANOS_API_KEY=
ADANOS_BASE_URL=https://api.adanos.org

View File

@ -128,6 +128,7 @@ export ANTHROPIC_API_KEY=... # Anthropic (Claude)
export XAI_API_KEY=... # xAI (Grok)
export OPENROUTER_API_KEY=... # OpenRouter
export ALPHA_VANTAGE_API_KEY=... # Alpha Vantage
export ADANOS_API_KEY=... # Optional: Adanos social sentiment
```
For local models, configure Ollama with `llm_provider: "ollama"` in your config.
@ -137,6 +138,27 @@ Alternatively, copy `.env.example` to `.env` and fill in your keys:
cp .env.example .env
```
### Optional Social Sentiment Provider
The sentiment analyst can optionally use Adanos as a structured social sentiment provider for Reddit, News, X, and Polymarket coverage. Configure it with:
```bash
export ADANOS_API_KEY=...
export ADANOS_BASE_URL=https://api.adanos.org
```
To enable the Adanos-backed social tool in code, set the `social_data` vendor to `adanos`:
```python
from tradingagents.default_config import DEFAULT_CONFIG
config = DEFAULT_CONFIG.copy()
config["data_vendors"] = DEFAULT_CONFIG["data_vendors"].copy()
config["data_vendors"]["social_data"] = "adanos"
```
The Adanos integration is currently best suited for letter-based tickers such as `NVDA` or `TSLA`. Exchange-qualified or numeric symbols may still rely on the framework's existing news tools for coverage.
### CLI Usage
Launch the interactive CLI:
@ -192,6 +214,8 @@ config["llm_provider"] = "openai" # openai, google, anthropic, xai, openr
config["deep_think_llm"] = "gpt-5.2" # Model for complex reasoning
config["quick_think_llm"] = "gpt-5-mini" # Model for quick tasks
config["max_debate_rounds"] = 2
config["data_vendors"] = DEFAULT_CONFIG["data_vendors"].copy()
config["data_vendors"]["social_data"] = "adanos" # Optional: structured social sentiment
ta = TradingAgentsGraph(debug=True, config=config)
_, decision = ta.propagate("NVDA", "2026-01-15")

View File

@ -0,0 +1,91 @@
import unittest
from unittest.mock import patch
from tradingagents.agents.utils.social_data_tools import get_social_sentiment as social_tool
from tradingagents.dataflows import adanos_social, interface
class SocialSentimentToolTests(unittest.TestCase):
def test_route_to_vendor_supports_social_data(self):
with patch("tradingagents.dataflows.interface.get_vendor", return_value="adanos"):
with patch.dict(
interface.VENDOR_METHODS["get_social_sentiment"],
{"adanos": lambda ticker, curr_date, look_back_days: f"{ticker}|{curr_date}|{look_back_days}"},
clear=True,
):
result = interface.route_to_vendor("get_social_sentiment", "NVDA", "2026-01-15", 5)
self.assertEqual(result, "NVDA|2026-01-15|5")
def test_route_to_vendor_requires_explicit_social_vendor(self):
with patch("tradingagents.dataflows.interface.get_vendor", return_value="default"):
with self.assertRaises(RuntimeError):
interface.route_to_vendor("get_social_sentiment", "NVDA", "2026-01-15", 5)
def test_social_tool_routes_to_vendor(self):
with patch("tradingagents.agents.utils.social_data_tools.route_to_vendor", return_value="ok") as mock_route:
result = social_tool.invoke(
{"ticker": "NVDA", "curr_date": "2026-01-15", "look_back_days": 7}
)
self.assertEqual(result, "ok")
mock_route.assert_called_once_with("get_social_sentiment", "NVDA", "2026-01-15", 7)
def test_adanos_social_formats_multiple_sources(self):
payloads = {
"/reddit/stocks/v1/stock/NVDA": {
"company_name": "NVIDIA Corporation",
"buzz_score": 72.4,
"sentiment_score": 0.31,
"bullish_pct": 61,
"bearish_pct": 18,
"trend": "rising",
"total_mentions": 142,
"unique_posts": 48,
},
"/news/stocks/v1/stock/NVDA": {
"source_count": 23,
"sentiment_score": 0.22,
"bullish_pct": 54,
"bearish_pct": 16,
},
"/x/stocks/v1/stock/NVDA": {
"unique_tweets": 305,
"sentiment_score": 0.27,
"trend": "rising",
},
"/polymarket/stocks/v1/stock/NVDA": {
"trade_count": 91,
"market_count": 4,
"total_liquidity": 120000.0,
"sentiment_score": 0.14,
},
}
def fake_request(path, *, api_key, base_url, params):
self.assertEqual(api_key, "test-key")
self.assertEqual(base_url, "https://api.adanos.org")
self.assertEqual(params, {"days": 7})
return payloads[path]
with patch.dict("os.environ", {"ADANOS_API_KEY": "test-key"}, clear=False):
with patch("tradingagents.dataflows.adanos_social._request_json", side_effect=fake_request):
result = adanos_social.get_social_sentiment("NVDA", "2026-01-15", 7)
self.assertIn("# NVDA Adanos social sentiment", result)
self.assertIn("## Reddit", result)
self.assertIn("## News", result)
self.assertIn("## X/Twitter", result)
self.assertIn("## Polymarket", result)
self.assertIn("Buzz score: 72.4", result)
self.assertIn("Trades: 91", result)
def test_adanos_social_requires_api_key(self):
with patch.dict("os.environ", {}, clear=True):
result = adanos_social.get_social_sentiment("NVDA", "2026-01-15", 7)
self.assertIn("ADANOS_API_KEY", result)
if __name__ == "__main__":
unittest.main()

View File

@ -1,8 +1,8 @@
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
import time
import json
from tradingagents.agents.utils.agent_utils import build_instrument_context, get_news
from tradingagents.dataflows.config import get_config
from tradingagents.agents.utils.agent_utils import build_instrument_context, get_news, get_social_sentiment
from tradingagents.dataflows.interface import is_tool_configured
def create_social_media_analyst(llm):
@ -10,12 +10,22 @@ def create_social_media_analyst(llm):
current_date = state["trade_date"]
instrument_context = build_instrument_context(state["company_of_interest"])
tools = [
get_news,
]
social_tool_enabled = is_tool_configured("get_social_sentiment")
tools = [get_news]
social_tool_guidance = (
"Use the get_news(query, start_date, end_date) tool to search for company-specific news and public discussion context."
)
if social_tool_enabled:
tools.insert(0, get_social_sentiment)
social_tool_guidance = (
"Use the get_social_sentiment(ticker, curr_date, look_back_days) tool for structured multi-source sentiment data when it is available, "
"and use the get_news(query, start_date, end_date) tool to search for company-specific news and public discussion context."
)
system_message = (
"You are a social media and company specific news researcher/analyst tasked with analyzing social media posts, recent company news, and public sentiment for a specific company over the past week. You will be given a company's name your objective is to write a comprehensive long report detailing your analysis, insights, and implications for traders and investors on this company's current state after looking at social media and what people are saying about that company, analyzing sentiment data of what people feel each day about the company, and looking at recent company news. Use the get_news(query, start_date, end_date) tool to search for company-specific news and social media discussions. Try to look at all sources possible from social media to sentiment to news. Provide specific, actionable insights with supporting evidence to help traders make informed decisions."
"You are a social media and company specific news researcher/analyst tasked with analyzing social media posts, recent company news, and public sentiment for a specific company over the past week. You will be given a company's name your objective is to write a comprehensive long report detailing your analysis, insights, and implications for traders and investors on this company's current state after looking at social media and what people are saying about that company, analyzing sentiment data of what people feel each day about the company, and looking at recent company news. "
+ social_tool_guidance
+ " Try to look at all sources possible from social media to sentiment to news. Provide specific, actionable insights with supporting evidence to help traders make informed decisions."
+ """ Make sure to append a Markdown table at the end of the report to organize key points in the report, organized and easy to read."""
)

View File

@ -18,6 +18,9 @@ from tradingagents.agents.utils.news_data_tools import (
get_insider_transactions,
get_global_news
)
from tradingagents.agents.utils.social_data_tools import (
get_social_sentiment,
)
def build_instrument_context(ticker: str) -> str:

View File

@ -0,0 +1,18 @@
from typing import Annotated
from langchain_core.tools import tool
from tradingagents.dataflows.interface import route_to_vendor
@tool
def get_social_sentiment(
ticker: Annotated[str, "Ticker symbol"],
curr_date: Annotated[str, "Current date in yyyy-mm-dd format"],
look_back_days: Annotated[int, "Number of days to look back"] = 7,
) -> str:
"""
Retrieve structured social and public sentiment for a ticker.
Uses the configured social_data vendor.
"""
return route_to_vendor("get_social_sentiment", ticker, curr_date, look_back_days)

View File

@ -0,0 +1,157 @@
import os
import re
from typing import Any
import requests
ADANOS_DEFAULT_BASE_URL = "https://api.adanos.org"
ADANOS_DEFAULT_TIMEOUT_SECONDS = 20
DEFAULT_SOCIAL_LOOK_BACK_DAYS = 7
LETTER_TICKER_REGEX = re.compile(r"^[A-Z]{1,10}$")
REDDIT_NEWS_TICKER_REGEX = re.compile(r"^[A-Z][A-Z0-9]{0,9}(?:\.[A-Z])?$")
def _normalize_ticker(ticker: str) -> str:
return ticker.strip().upper().lstrip("$")
def _request_json(path: str, *, api_key: str, base_url: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
response = requests.get(
f"{base_url.rstrip('/')}{path}",
headers={"X-API-Key": api_key, "Accept": "application/json"},
params=params or {},
timeout=float(os.getenv("ADANOS_TIMEOUT", str(ADANOS_DEFAULT_TIMEOUT_SECONDS))),
)
response.raise_for_status()
return response.json()
def _iter_source_requests(ticker: str) -> list[tuple[str, str]]:
requests_to_make: list[tuple[str, str]] = []
if REDDIT_NEWS_TICKER_REGEX.fullmatch(ticker):
requests_to_make.extend(
[
("Reddit", f"/reddit/stocks/v1/stock/{ticker}"),
("News", f"/news/stocks/v1/stock/{ticker}"),
]
)
if LETTER_TICKER_REGEX.fullmatch(ticker):
requests_to_make.extend(
[
("X/Twitter", f"/x/stocks/v1/stock/{ticker}"),
("Polymarket", f"/polymarket/stocks/v1/stock/{ticker}"),
]
)
return requests_to_make
def _format_source_section(source_name: str, payload: dict[str, Any]) -> str:
lines = [f"## {source_name}"]
company_name = payload.get("company_name")
if company_name:
lines.append(f"- Company: {company_name}")
if payload.get("buzz_score") is not None:
lines.append(f"- Buzz score: {payload['buzz_score']}")
if payload.get("sentiment_score") is not None:
lines.append(f"- Sentiment score: {payload['sentiment_score']}")
if payload.get("bullish_pct") is not None or payload.get("bearish_pct") is not None:
lines.append(
f"- Bullish/Bearish: {payload.get('bullish_pct', 'n/a')}% / {payload.get('bearish_pct', 'n/a')}%"
)
if payload.get("trend"):
lines.append(f"- Trend: {payload['trend']}")
for key, label in (
("total_mentions", "Mentions"),
("unique_posts", "Unique posts"),
("subreddit_count", "Subreddits"),
("source_count", "Sources"),
("unique_tweets", "Unique tweets"),
("market_count", "Active markets"),
("trade_count", "Trades"),
("total_liquidity", "Total liquidity"),
):
value = payload.get(key)
if value is not None:
lines.append(f"- {label}: {value}")
explanation = payload.get("explanation")
if explanation:
lines.append(f"- Explanation: {explanation}")
return "\n".join(lines)
def get_social_sentiment(
ticker: str,
curr_date: str,
look_back_days: int = DEFAULT_SOCIAL_LOOK_BACK_DAYS,
) -> str:
"""Retrieve multi-source social sentiment from Adanos when available."""
api_key = os.getenv("ADANOS_API_KEY")
if not api_key:
return (
"Adanos social sentiment is unavailable because ADANOS_API_KEY is not set. "
"Configure ADANOS_API_KEY to enable Reddit, X/Twitter, News, and Polymarket sentiment lookups."
)
normalized_ticker = _normalize_ticker(ticker)
source_requests = _iter_source_requests(normalized_ticker)
if not source_requests:
return (
f"Adanos does not currently support the exact ticker format `{ticker}` for per-symbol sentiment lookup. "
"Exchange-qualified or numeric symbols should fall back to the framework's existing news tools."
)
base_url = os.getenv("ADANOS_BASE_URL", ADANOS_DEFAULT_BASE_URL)
days = max(1, int(look_back_days or DEFAULT_SOCIAL_LOOK_BACK_DAYS))
sections: list[str] = []
notes: list[str] = []
for source_name, path in source_requests:
try:
payload = _request_json(path, api_key=api_key, base_url=base_url, params={"days": days})
except requests.HTTPError as exc:
status_code = exc.response.status_code if exc.response is not None else None
if status_code == 404:
notes.append(f"- {source_name}: no coverage for {normalized_ticker}")
continue
if status_code in {401, 403}:
return "Adanos social sentiment request failed due to invalid API credentials."
notes.append(f"- {source_name}: request failed with HTTP {status_code}")
continue
except requests.RequestException as exc:
notes.append(f"- {source_name}: request failed ({exc.__class__.__name__})")
continue
sections.append(_format_source_section(source_name, payload))
if not sections:
note_block = "\n".join(notes) if notes else "- No compatible Adanos sources were available."
return (
f"# {normalized_ticker} Adanos social sentiment\n\n"
f"Analysis date: {curr_date}\n"
f"Lookback window: {days} days\n\n"
"No Adanos sentiment sources returned usable data.\n"
f"{note_block}"
)
output = [
f"# {normalized_ticker} Adanos social sentiment",
"",
f"Analysis date: {curr_date}",
f"Lookback window: {days} days",
"",
*sections,
]
if notes:
output.extend(["", "## Coverage notes", *notes])
return "\n".join(output)

View File

@ -23,6 +23,7 @@ from .alpha_vantage import (
get_global_news as get_alpha_vantage_global_news,
)
from .alpha_vantage_common import AlphaVantageRateLimitError
from .adanos_social import get_social_sentiment as get_adanos_social_sentiment
# Configuration and routing logic
from .config import get_config
@ -57,12 +58,19 @@ TOOLS_CATEGORIES = {
"get_global_news",
"get_insider_transactions",
]
},
"social_data": {
"description": "Structured social and public sentiment data",
"tools": [
"get_social_sentiment",
],
}
}
VENDOR_LIST = [
"yfinance",
"alpha_vantage",
"adanos",
]
# Mapping of methods to their vendor-specific implementations
@ -107,6 +115,10 @@ VENDOR_METHODS = {
"alpha_vantage": get_alpha_vantage_insider_transactions,
"yfinance": get_yfinance_insider_transactions,
},
# social_data
"get_social_sentiment": {
"adanos": get_adanos_social_sentiment,
},
}
def get_category_for_method(method: str) -> str:
@ -131,10 +143,25 @@ def get_vendor(category: str, method: str = None) -> str:
# Fall back to category-level configuration
return config.get("data_vendors", {}).get(category, "default")
def is_tool_configured(method: str) -> bool:
"""Return True when a tool has an explicit non-default vendor configured."""
category = get_category_for_method(method)
vendor_config = get_vendor(category, method)
configured_vendors = [
vendor.strip().lower()
for vendor in str(vendor_config or "").split(",")
if vendor.strip()
]
return any(vendor not in {"default", "none", "disabled"} for vendor in configured_vendors)
def route_to_vendor(method: str, *args, **kwargs):
"""Route method calls to appropriate vendor implementation with fallback support."""
category = get_category_for_method(method)
vendor_config = get_vendor(category, method)
if method == "get_social_sentiment" and not is_tool_configured(method):
raise RuntimeError("No configured vendor for 'get_social_sentiment'")
primary_vendors = [v.strip() for v in vendor_config.split(',')]
if method not in VENDOR_METHODS:
@ -159,4 +186,4 @@ def route_to_vendor(method: str, *args, **kwargs):
except AlphaVantageRateLimitError:
continue # Only rate limits trigger fallback
raise RuntimeError(f"No available vendor for '{method}'")
raise RuntimeError(f"No available vendor for '{method}'")

View File

@ -27,6 +27,7 @@ DEFAULT_CONFIG = {
"technical_indicators": "yfinance", # Options: alpha_vantage, yfinance
"fundamental_data": "yfinance", # Options: alpha_vantage, yfinance
"news_data": "yfinance", # Options: alpha_vantage, yfinance
"social_data": "default", # Options: default (disabled), adanos
},
# Tool-level configuration (takes precedence over category-level)
"tool_vendors": {

View File

@ -19,6 +19,7 @@ from tradingagents.agents.utils.agent_states import (
RiskDebateState,
)
from tradingagents.dataflows.config import set_config
from tradingagents.dataflows.interface import is_tool_configured
# Import the new abstract tool methods from agent_utils
from tradingagents.agents.utils.agent_utils import (
@ -30,7 +31,8 @@ from tradingagents.agents.utils.agent_utils import (
get_income_statement,
get_news,
get_insider_transactions,
get_global_news
get_global_news,
get_social_sentiment,
)
from .conditional_logic import ConditionalLogic
@ -157,6 +159,10 @@ class TradingAgentsGraph:
def _create_tool_nodes(self) -> Dict[str, ToolNode]:
"""Create tool nodes for different data sources using abstract methods."""
social_tools = [get_news]
if is_tool_configured("get_social_sentiment"):
social_tools.insert(0, get_social_sentiment)
return {
"market": ToolNode(
[
@ -166,12 +172,7 @@ class TradingAgentsGraph:
get_indicators,
]
),
"social": ToolNode(
[
# News tools for social media analysis
get_news,
]
),
"social": ToolNode(social_tools),
"news": ToolNode(
[
# News and insider information