feat: add medium-term positioning upgrade
This commit is contained in:
parent
d27de67330
commit
79175b4d4c
|
|
@ -0,0 +1,66 @@
|
|||
from tradingagents.default_config import DEFAULT_CONFIG
|
||||
from tradingagents.graph.conditional_logic import ConditionalLogic
|
||||
|
||||
|
||||
def _make_invest_state(count: int, current_response: str = "Bull: argument") -> dict:
|
||||
return {
|
||||
"investment_debate_state": {
|
||||
"bull_history": "",
|
||||
"bear_history": "",
|
||||
"history": "",
|
||||
"current_response": current_response,
|
||||
"judge_decision": "",
|
||||
"count": count,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _make_risk_state(count: int, latest_speaker: str = "Aggressive") -> dict:
|
||||
return {
|
||||
"risk_debate_state": {
|
||||
"aggressive_history": "",
|
||||
"conservative_history": "",
|
||||
"neutral_history": "",
|
||||
"history": "",
|
||||
"latest_speaker": latest_speaker,
|
||||
"current_aggressive_response": "",
|
||||
"current_conservative_response": "",
|
||||
"current_neutral_response": "",
|
||||
"judge_decision": "",
|
||||
"count": count,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_conditional_logic_defaults_are_2():
|
||||
logic = ConditionalLogic()
|
||||
|
||||
assert logic.max_debate_rounds == 2
|
||||
assert logic.max_risk_discuss_rounds == 2
|
||||
|
||||
|
||||
def test_investment_debate_threshold_uses_max_debate_rounds():
|
||||
logic = ConditionalLogic(max_debate_rounds=2)
|
||||
|
||||
assert logic.should_continue_debate(_make_invest_state(3, "Bear: rebuttal")) == (
|
||||
"Bull Researcher"
|
||||
)
|
||||
assert logic.should_continue_debate(_make_invest_state(4, "Bull: final")) == (
|
||||
"Research Manager"
|
||||
)
|
||||
|
||||
|
||||
def test_risk_debate_threshold_uses_max_risk_discuss_rounds():
|
||||
logic = ConditionalLogic(max_risk_discuss_rounds=2)
|
||||
|
||||
assert logic.should_continue_risk_analysis(
|
||||
_make_risk_state(5, latest_speaker="Neutral")
|
||||
) == "Aggressive Analyst"
|
||||
assert logic.should_continue_risk_analysis(
|
||||
_make_risk_state(6, latest_speaker="Aggressive")
|
||||
) == "Portfolio Manager"
|
||||
|
||||
|
||||
def test_default_config_round_defaults_are_2():
|
||||
assert DEFAULT_CONFIG["max_debate_rounds"] == 2
|
||||
assert DEFAULT_CONFIG["max_risk_discuss_rounds"] == 2
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def _make_series(start: float, end: float, length: int = 250) -> pd.Series:
|
||||
dates = pd.date_range("2025-09-01", periods=length, freq="B")
|
||||
step = (end - start) / max(length - 1, 1)
|
||||
return pd.Series([start + step * i for i in range(length)], index=dates)
|
||||
|
||||
|
||||
def _mock_download_for_risk_on(symbols, **_kwargs):
|
||||
if isinstance(symbols, str):
|
||||
symbols = [symbols]
|
||||
|
||||
series_map = {
|
||||
"^VIX": _make_series(30, 12),
|
||||
"^GSPC": _make_series(4000, 6000),
|
||||
"HYG": _make_series(75, 90),
|
||||
"LQD": _make_series(100, 100),
|
||||
"TLT": _make_series(100, 99),
|
||||
"SHY": _make_series(100, 100),
|
||||
"XLU": _make_series(60, 61),
|
||||
"XLP": _make_series(70, 71),
|
||||
"XLV": _make_series(80, 81),
|
||||
"XLY": _make_series(100, 120),
|
||||
"XLK": _make_series(100, 125),
|
||||
"XLI": _make_series(100, 118),
|
||||
}
|
||||
|
||||
frame = pd.DataFrame({symbol: series_map[symbol] for symbol in symbols})
|
||||
return pd.concat({"Close": frame}, axis=1)
|
||||
|
||||
|
||||
def test_signal_vix_level_thresholds():
|
||||
from tradingagents.dataflows.macro_regime import _signal_vix_level
|
||||
|
||||
assert _signal_vix_level(14.0)[0] == 1
|
||||
assert _signal_vix_level(30.0)[0] == -1
|
||||
assert _signal_vix_level(20.0)[0] == 0
|
||||
|
||||
|
||||
def test_classify_macro_regime_returns_risk_on_for_mocked_risk_on_data():
|
||||
with patch("yfinance.download", side_effect=_mock_download_for_risk_on):
|
||||
from tradingagents.dataflows.macro_regime import classify_macro_regime
|
||||
|
||||
regime = classify_macro_regime()
|
||||
|
||||
assert regime["regime"] == "risk-on"
|
||||
assert regime["score"] >= 3
|
||||
|
||||
|
||||
def test_classify_macro_regime_returns_required_keys_and_six_signals():
|
||||
with patch("yfinance.download", side_effect=_mock_download_for_risk_on):
|
||||
from tradingagents.dataflows.macro_regime import classify_macro_regime
|
||||
|
||||
regime = classify_macro_regime()
|
||||
|
||||
assert set(["regime", "score", "confidence", "signals", "summary"]).issubset(regime)
|
||||
assert len(regime["signals"]) == 6
|
||||
|
||||
|
||||
def test_classify_macro_regime_uses_curr_date_for_download_window():
|
||||
calls = []
|
||||
|
||||
def fake_download(symbols, **kwargs):
|
||||
calls.append(kwargs)
|
||||
return _mock_download_for_risk_on(symbols, **kwargs)
|
||||
|
||||
with patch("yfinance.download", side_effect=fake_download):
|
||||
from tradingagents.dataflows.macro_regime import classify_macro_regime
|
||||
|
||||
classify_macro_regime("2026-03-17")
|
||||
|
||||
assert all(call["end"].startswith("2026-03-18") for call in calls)
|
||||
|
||||
|
||||
def test_format_macro_report_contains_signal_breakdown_and_trading_implications():
|
||||
from tradingagents.dataflows.macro_regime import format_macro_report
|
||||
|
||||
report = format_macro_report(
|
||||
{
|
||||
"regime": "risk-on",
|
||||
"score": 4,
|
||||
"confidence": "high",
|
||||
"vix": 14.5,
|
||||
"signals": [
|
||||
{"name": "vix_level", "score": 1, "description": "Low VIX"},
|
||||
{"name": "vix_trend", "score": 1, "description": "Falling VIX"},
|
||||
{"name": "credit_spread", "score": 1, "description": "Improving"},
|
||||
{"name": "yield_curve", "score": 0, "description": "Neutral"},
|
||||
{"name": "market_breadth", "score": 1, "description": "Above SMA"},
|
||||
{"name": "sector_rotation", "score": 0, "description": "Neutral"},
|
||||
],
|
||||
"summary": "Risk-on summary",
|
||||
}
|
||||
)
|
||||
|
||||
assert "Signal Breakdown" in report
|
||||
assert "What This Means for Trading" in report
|
||||
assert "RISK-ON" in report
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def _make_price_history(symbols: list[str]) -> pd.DataFrame:
|
||||
dates = pd.date_range("2025-09-01", periods=130, freq="B")
|
||||
payload = {}
|
||||
for index, symbol in enumerate(symbols, start=1):
|
||||
payload[symbol] = pd.Series(
|
||||
[100 * (1 + 0.001 * index) ** i for i in range(len(dates))],
|
||||
index=dates,
|
||||
)
|
||||
frame = pd.DataFrame(payload, index=dates)
|
||||
return pd.concat({"Close": frame}, axis=1)
|
||||
|
||||
|
||||
def test_get_sector_peers_maps_known_sector_and_excludes_self():
|
||||
with patch("yfinance.Ticker") as mock_ticker:
|
||||
mock_ticker.return_value.info = {"sector": "Technology"}
|
||||
from tradingagents.dataflows.peer_comparison import get_sector_peers
|
||||
|
||||
sector_display, sector_key, peers = get_sector_peers("AAPL")
|
||||
|
||||
assert sector_display == "Technology"
|
||||
assert sector_key == "technology"
|
||||
assert "AAPL" not in peers
|
||||
assert "MSFT" in peers
|
||||
|
||||
|
||||
def test_compute_relative_performance_returns_ranked_markdown_table():
|
||||
with patch(
|
||||
"yfinance.download",
|
||||
return_value=_make_price_history(["AAPL", "MSFT", "NVDA", "XLK"]),
|
||||
):
|
||||
from tradingagents.dataflows.peer_comparison import compute_relative_performance
|
||||
|
||||
report = compute_relative_performance("AAPL", "technology", ["MSFT", "NVDA"])
|
||||
|
||||
assert "| Symbol | Role |" in report
|
||||
assert "► TARGET" in report
|
||||
assert "ETF Benchmark" in report
|
||||
assert "Alpha vs Sector ETF" in report
|
||||
|
||||
|
||||
def test_compute_relative_performance_ranks_against_peers_only():
|
||||
with patch(
|
||||
"yfinance.download",
|
||||
return_value=_make_price_history(["AAPL", "MSFT", "NVDA", "XLK"]),
|
||||
):
|
||||
from tradingagents.dataflows.peer_comparison import compute_relative_performance
|
||||
|
||||
report = compute_relative_performance(
|
||||
"AAPL",
|
||||
"technology",
|
||||
["MSFT", "NVDA"],
|
||||
)
|
||||
|
||||
assert "Peer rank (3M): 3/3" in report
|
||||
|
||||
|
||||
def test_get_sector_relative_report_handles_unknown_sector_gracefully():
|
||||
with patch("yfinance.Ticker") as mock_ticker:
|
||||
mock_ticker.return_value.info = {"sector": "Unknown Sector"}
|
||||
from tradingagents.dataflows.peer_comparison import get_sector_relative_report
|
||||
|
||||
report = get_sector_relative_report("AAPL")
|
||||
|
||||
assert "No ETF benchmark" in report
|
||||
|
||||
|
||||
def test_get_sector_relative_report_handles_missing_columns_gracefully():
|
||||
dates = pd.date_range("2025-09-01", periods=130, freq="B")
|
||||
partial_history = pd.concat(
|
||||
{"Close": pd.DataFrame({"XLK": pd.Series(range(130), index=dates)}, index=dates)},
|
||||
axis=1,
|
||||
)
|
||||
|
||||
with patch("yfinance.Ticker") as mock_ticker, patch(
|
||||
"yfinance.download",
|
||||
return_value=partial_history,
|
||||
):
|
||||
mock_ticker.return_value.info = {"sector": "Technology"}
|
||||
from tradingagents.dataflows.peer_comparison import get_sector_relative_report
|
||||
|
||||
report = get_sector_relative_report("AAPL")
|
||||
|
||||
assert "| 1-Week |" in report
|
||||
assert "N/A" in report
|
||||
|
||||
|
||||
def test_peer_comparison_uses_curr_date_as_end_date():
|
||||
calls = []
|
||||
|
||||
def fake_download(symbols, **kwargs):
|
||||
calls.append(kwargs)
|
||||
return _make_price_history(["AAPL", "MSFT", "XLK"])
|
||||
|
||||
with patch("yfinance.download", side_effect=fake_download):
|
||||
from tradingagents.dataflows.peer_comparison import compute_relative_performance
|
||||
|
||||
compute_relative_performance(
|
||||
"AAPL",
|
||||
"technology",
|
||||
["MSFT"],
|
||||
curr_date="2026-03-17",
|
||||
)
|
||||
|
||||
assert calls[0]["end"].startswith("2026-03-18")
|
||||
|
|
@ -0,0 +1,132 @@
|
|||
import pandas as pd
|
||||
|
||||
|
||||
def _make_income_csv(n_quarters: int = 8) -> str:
|
||||
dates = pd.date_range("2024-01-01", periods=n_quarters, freq="QS")
|
||||
revenues = [10_000_000_000 * (1.05**i) for i in range(n_quarters)]
|
||||
gross_profit = [revenue * 0.40 for revenue in revenues]
|
||||
operating_income = [revenue * 0.20 for revenue in revenues]
|
||||
net_income = [revenue * 0.15 for revenue in revenues]
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"Total Revenue": revenues,
|
||||
"Gross Profit": gross_profit,
|
||||
"Operating Income": operating_income,
|
||||
"Net Income": net_income,
|
||||
},
|
||||
index=dates,
|
||||
)
|
||||
return df.to_csv()
|
||||
|
||||
|
||||
def _make_balance_csv(n_quarters: int = 8) -> str:
|
||||
dates = pd.date_range("2024-01-01", periods=n_quarters, freq="QS")
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"Total Assets": [50_000_000_000] * n_quarters,
|
||||
"Total Debt": [10_000_000_000] * n_quarters,
|
||||
"Stockholders Equity": [20_000_000_000] * n_quarters,
|
||||
},
|
||||
index=dates,
|
||||
)
|
||||
return df.to_csv()
|
||||
|
||||
|
||||
def _make_cashflow_csv(n_quarters: int = 8) -> str:
|
||||
dates = pd.date_range("2024-01-01", periods=n_quarters, freq="QS")
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"Free Cash Flow": [2_000_000_000] * n_quarters,
|
||||
"Operating Cash Flow": [3_000_000_000] * n_quarters,
|
||||
},
|
||||
index=dates,
|
||||
)
|
||||
return df.to_csv()
|
||||
|
||||
|
||||
def test_compute_ttm_metrics_sums_last_four_quarters():
|
||||
from tradingagents.dataflows.ttm_analysis import compute_ttm_metrics
|
||||
|
||||
metrics = compute_ttm_metrics(
|
||||
_make_income_csv(),
|
||||
_make_balance_csv(),
|
||||
_make_cashflow_csv(),
|
||||
)
|
||||
|
||||
expected = sum(10_000_000_000 * (1.05**i) for i in range(4, 8))
|
||||
assert metrics["quarters_available"] == 8
|
||||
assert metrics["ttm"]["revenue"] == expected
|
||||
|
||||
|
||||
def test_compute_ttm_metrics_exposes_qoq_and_yoy_revenue_trends():
|
||||
from tradingagents.dataflows.ttm_analysis import compute_ttm_metrics
|
||||
|
||||
metrics = compute_ttm_metrics(
|
||||
_make_income_csv(),
|
||||
_make_balance_csv(),
|
||||
_make_cashflow_csv(),
|
||||
)
|
||||
|
||||
assert metrics["trends"]["revenue_qoq_pct"] is not None
|
||||
assert metrics["trends"]["revenue_yoy_pct"] is not None
|
||||
assert metrics["trends"]["gross_margin_direction"] == "stable"
|
||||
|
||||
|
||||
def test_compute_ttm_metrics_handles_empty_income_csv():
|
||||
from tradingagents.dataflows.ttm_analysis import compute_ttm_metrics
|
||||
|
||||
metrics = compute_ttm_metrics("", _make_balance_csv(), _make_cashflow_csv())
|
||||
|
||||
assert metrics["quarters_available"] == 0
|
||||
assert "income statement parse failed" in metrics["metadata"]["parse_errors"]
|
||||
|
||||
|
||||
def test_compute_ttm_metrics_preserves_zero_values():
|
||||
from tradingagents.dataflows.ttm_analysis import compute_ttm_metrics
|
||||
|
||||
dates = pd.date_range("2024-01-01", periods=4, freq="QS")
|
||||
income_csv = pd.DataFrame(
|
||||
{
|
||||
"Total Revenue": [10_000_000_000] * 4,
|
||||
"Gross Profit": [0] * 4,
|
||||
"Operating Income": [0] * 4,
|
||||
"Net Income": [0] * 4,
|
||||
},
|
||||
index=dates,
|
||||
).to_csv()
|
||||
balance_csv = pd.DataFrame(
|
||||
{
|
||||
"Total Assets": [50_000_000_000] * 4,
|
||||
"Total Debt": [0] * 4,
|
||||
"Stockholders Equity": [20_000_000_000] * 4,
|
||||
},
|
||||
index=dates,
|
||||
).to_csv()
|
||||
cashflow_csv = pd.DataFrame(
|
||||
{
|
||||
"Free Cash Flow": [0] * 4,
|
||||
"Operating Cash Flow": [0] * 4,
|
||||
},
|
||||
index=dates,
|
||||
).to_csv()
|
||||
|
||||
metrics = compute_ttm_metrics(income_csv, balance_csv, cashflow_csv)
|
||||
|
||||
assert metrics["ttm"]["gross_margin_pct"] == 0.0
|
||||
assert metrics["ttm"]["net_margin_pct"] == 0.0
|
||||
assert metrics["ttm"]["debt_to_equity"] == 0.0
|
||||
|
||||
|
||||
def test_format_ttm_report_includes_ttm_and_quarterly_sections():
|
||||
from tradingagents.dataflows.ttm_analysis import compute_ttm_metrics, format_ttm_report
|
||||
|
||||
metrics = compute_ttm_metrics(
|
||||
_make_income_csv(),
|
||||
_make_balance_csv(),
|
||||
_make_cashflow_csv(),
|
||||
)
|
||||
report = format_ttm_report(metrics, "AAPL")
|
||||
|
||||
assert "Trailing Twelve Months" in report
|
||||
assert "Trend Signals" in report
|
||||
assert "Quarter" in report
|
||||
|
|
@ -7,16 +7,53 @@ from tradingagents.agents.utils.agent_utils import (
|
|||
get_cashflow,
|
||||
get_fundamentals,
|
||||
get_income_statement,
|
||||
get_insider_transactions,
|
||||
)
|
||||
from tradingagents.dataflows.config import get_config
|
||||
|
||||
|
||||
from tradingagents.dataflows.interface import route_to_vendor
|
||||
from tradingagents.dataflows.macro_regime import classify_macro_regime, format_macro_report
|
||||
from tradingagents.dataflows.peer_comparison import (
|
||||
get_peer_comparison_report,
|
||||
get_sector_relative_report,
|
||||
)
|
||||
from tradingagents.dataflows.ttm_analysis import compute_ttm_metrics, format_ttm_report
|
||||
def create_fundamentals_analyst(llm):
|
||||
def fundamentals_analyst_node(state):
|
||||
current_date = state["trade_date"]
|
||||
ticker = state["company_of_interest"]
|
||||
instrument_context = build_instrument_context(state["company_of_interest"])
|
||||
|
||||
income_csv = route_to_vendor(
|
||||
"get_income_statement",
|
||||
ticker,
|
||||
"quarterly",
|
||||
current_date,
|
||||
)
|
||||
balance_csv = route_to_vendor(
|
||||
"get_balance_sheet",
|
||||
ticker,
|
||||
"quarterly",
|
||||
current_date,
|
||||
)
|
||||
cashflow_csv = route_to_vendor(
|
||||
"get_cashflow",
|
||||
ticker,
|
||||
"quarterly",
|
||||
current_date,
|
||||
)
|
||||
ttm_report = format_ttm_report(
|
||||
compute_ttm_metrics(income_csv, balance_csv, cashflow_csv, n_quarters=8),
|
||||
ticker,
|
||||
)
|
||||
peer_report = get_peer_comparison_report(ticker, current_date)
|
||||
sector_report = get_sector_relative_report(ticker, current_date)
|
||||
macro_regime_report = format_macro_report(classify_macro_regime(current_date))
|
||||
upstream_macro_report = state.get("macro_report", "").strip()
|
||||
macro_report = macro_regime_report
|
||||
if upstream_macro_report:
|
||||
macro_report = (
|
||||
f"{upstream_macro_report}\n\n## Medium-Term Macro Regime Overlay\n\n"
|
||||
f"{macro_regime_report}"
|
||||
)
|
||||
|
||||
tools = [
|
||||
get_fundamentals,
|
||||
get_balance_sheet,
|
||||
|
|
@ -27,7 +64,12 @@ def create_fundamentals_analyst(llm):
|
|||
system_message = (
|
||||
"You are a researcher tasked with analyzing fundamental information over the past week about a company. Please write a comprehensive report of the company's fundamental information such as financial documents, company profile, basic company financials, and company financial history to gain a full view of the company's fundamental information to inform traders. Make sure to include as much detail as possible. Provide specific, actionable insights with supporting evidence to help traders make informed decisions."
|
||||
+ " Make sure to append a Markdown table at the end of the report to organize key points in the report, organized and easy to read."
|
||||
+ " Use the available tools: `get_fundamentals` for comprehensive company analysis, `get_balance_sheet`, `get_cashflow`, and `get_income_statement` for specific financial statements.",
|
||||
+ " Use the available tools: `get_fundamentals` for comprehensive company analysis, `get_balance_sheet`, `get_cashflow`, and `get_income_statement` for specific financial statements."
|
||||
+ f"\n\nPrecomputed medium-term context for {ticker}:"
|
||||
+ f"\n\n[TTM Analysis]\n{ttm_report}"
|
||||
+ f"\n\n[Peer Comparison]\n{peer_report}"
|
||||
+ f"\n\n[Sector Relative Performance]\n{sector_report}"
|
||||
+ f"\n\n[Macro Regime]\n{macro_report}"
|
||||
)
|
||||
|
||||
prompt = ChatPromptTemplate.from_messages(
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from tradingagents.agents.utils.agent_utils import (
|
|||
get_indicators,
|
||||
get_stock_data,
|
||||
)
|
||||
from tradingagents.dataflows.config import get_config
|
||||
from tradingagents.dataflows.macro_regime import classify_macro_regime, format_macro_report
|
||||
|
||||
|
||||
def create_market_analyst(llm):
|
||||
|
|
@ -14,6 +14,14 @@ def create_market_analyst(llm):
|
|||
def market_analyst_node(state):
|
||||
current_date = state["trade_date"]
|
||||
instrument_context = build_instrument_context(state["company_of_interest"])
|
||||
macro_regime_report = format_macro_report(classify_macro_regime(current_date))
|
||||
upstream_macro_report = state.get("macro_report", "").strip()
|
||||
macro_report = macro_regime_report
|
||||
if upstream_macro_report:
|
||||
macro_report = (
|
||||
f"{upstream_macro_report}\n\n## Medium-Term Macro Regime Overlay\n\n"
|
||||
f"{macro_regime_report}"
|
||||
)
|
||||
|
||||
tools = [
|
||||
get_stock_data,
|
||||
|
|
@ -46,6 +54,7 @@ Volume-Based Indicators:
|
|||
- vwma: VWMA: A moving average weighted by volume. Usage: Confirm trends by integrating price action with volume data. Tips: Watch for skewed results from volume spikes; use in combination with other volume analyses.
|
||||
|
||||
- Select indicators that provide diverse and complementary information. Avoid redundancy (e.g., do not select both rsi and stochrsi). Also briefly explain why they are suitable for the given market context. When you tool call, please use the exact name of the indicators provided above as they are defined parameters, otherwise your call will fail. Please make sure to call get_stock_data first to retrieve the CSV that is needed to generate indicators. Then use get_indicators with the specific indicator names. Write a very detailed and nuanced report of the trends you observe. Provide specific, actionable insights with supporting evidence to help traders make informed decisions."""
|
||||
+ f"\n\nUse this macro regime context to frame the technical read:\n{macro_report}"
|
||||
+ """ Make sure to append a Markdown table at the end of the report to organize key points in the report, organized and easy to read."""
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,318 @@
|
|||
"""Macro regime classifier: risk-on / transition / risk-off."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
import yfinance as yf
|
||||
|
||||
|
||||
VIX_RISK_ON_THRESHOLD = 16.0
|
||||
VIX_RISK_OFF_THRESHOLD = 25.0
|
||||
REGIME_RISK_ON_THRESHOLD = 3
|
||||
REGIME_RISK_OFF_THRESHOLD = -3
|
||||
|
||||
_DEFENSIVE_ETFS = ["XLU", "XLP", "XLV"]
|
||||
_CYCLICAL_ETFS = ["XLY", "XLK", "XLI"]
|
||||
|
||||
|
||||
def _download(
|
||||
symbols: list[str],
|
||||
period: str = "3mo",
|
||||
curr_date: str = None,
|
||||
) -> Optional[pd.DataFrame]:
|
||||
kwargs = {
|
||||
"auto_adjust": True,
|
||||
"progress": False,
|
||||
"threads": True,
|
||||
}
|
||||
if curr_date:
|
||||
months_back = int(period[:-2])
|
||||
end_ts = pd.Timestamp(curr_date) + pd.Timedelta(days=1)
|
||||
start_ts = pd.Timestamp(curr_date) - pd.DateOffset(months=months_back + 1)
|
||||
kwargs["start"] = start_ts.strftime("%Y-%m-%d")
|
||||
kwargs["end"] = end_ts.strftime("%Y-%m-%d")
|
||||
else:
|
||||
kwargs["period"] = period
|
||||
try:
|
||||
history = yf.download(symbols, **kwargs)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if history.empty:
|
||||
return None
|
||||
|
||||
closes = history["Close"]
|
||||
if isinstance(closes, pd.Series):
|
||||
closes = closes.to_frame(name=symbols[0])
|
||||
return closes.dropna(how="all")
|
||||
|
||||
|
||||
def _latest(series: Optional[pd.Series]) -> Optional[float]:
|
||||
if series is None:
|
||||
return None
|
||||
clean = series.dropna()
|
||||
if clean.empty:
|
||||
return None
|
||||
return float(clean.iloc[-1])
|
||||
|
||||
|
||||
def _sma(series: Optional[pd.Series], window: int) -> Optional[float]:
|
||||
if series is None:
|
||||
return None
|
||||
clean = series.dropna()
|
||||
if len(clean) < window:
|
||||
return None
|
||||
return float(clean.rolling(window).mean().iloc[-1])
|
||||
|
||||
|
||||
def _pct_change_n(series: pd.Series, periods: int) -> Optional[float]:
|
||||
clean = series.dropna()
|
||||
if len(clean) < periods + 1:
|
||||
return None
|
||||
base = float(clean.iloc[-(periods + 1)])
|
||||
current = float(clean.iloc[-1])
|
||||
if base == 0:
|
||||
return None
|
||||
return (current - base) / base * 100
|
||||
|
||||
|
||||
def _fmt_pct(value: Optional[float]) -> str:
|
||||
if value is None:
|
||||
return "N/A"
|
||||
sign = "+" if value >= 0 else ""
|
||||
return f"{sign}{value:.2f}%"
|
||||
|
||||
|
||||
def _signal_vix_level(vix_price: Optional[float]) -> tuple[int, str]:
|
||||
if vix_price is None:
|
||||
return 0, "VIX level unavailable (neutral)"
|
||||
if vix_price < VIX_RISK_ON_THRESHOLD:
|
||||
return 1, f"VIX level {vix_price:.1f} is below {VIX_RISK_ON_THRESHOLD} (risk-on)"
|
||||
if vix_price > VIX_RISK_OFF_THRESHOLD:
|
||||
return -1, f"VIX level {vix_price:.1f} is above {VIX_RISK_OFF_THRESHOLD} (risk-off)"
|
||||
return 0, f"VIX level {vix_price:.1f} is in the neutral zone"
|
||||
|
||||
|
||||
def _signal_vix_trend(vix_series: Optional[pd.Series]) -> tuple[int, str]:
|
||||
sma5 = _sma(vix_series, 5)
|
||||
sma20 = _sma(vix_series, 20)
|
||||
if sma5 is None or sma20 is None:
|
||||
return 0, "VIX trend unavailable (neutral)"
|
||||
if sma5 < sma20:
|
||||
return 1, "Falling VIX trend supports risk-on"
|
||||
if sma5 > sma20:
|
||||
return -1, "Rising VIX trend supports risk-off"
|
||||
return 0, "Flat VIX trend is neutral"
|
||||
|
||||
|
||||
def _signal_credit_spread(
|
||||
hyg_series: Optional[pd.Series],
|
||||
lqd_series: Optional[pd.Series],
|
||||
) -> tuple[int, str]:
|
||||
if hyg_series is None or lqd_series is None:
|
||||
return 0, "Credit spread proxy unavailable (neutral)"
|
||||
common = hyg_series.dropna().index.intersection(lqd_series.dropna().index)
|
||||
if len(common) < 22:
|
||||
return 0, "Credit spread proxy unavailable (neutral)"
|
||||
ratio = hyg_series.loc[common] / lqd_series.loc[common]
|
||||
ratio_change = _pct_change_n(ratio, 21)
|
||||
if ratio_change is None:
|
||||
return 0, "Credit spread proxy unavailable (neutral)"
|
||||
if ratio_change > 0.5:
|
||||
return 1, "Credit spread proxy improving supports risk-on"
|
||||
if ratio_change < -0.5:
|
||||
return -1, "Credit spread proxy deteriorating supports risk-off"
|
||||
return 0, "Credit spread proxy is neutral"
|
||||
|
||||
|
||||
def _signal_yield_curve(
|
||||
tlt_series: Optional[pd.Series],
|
||||
shy_series: Optional[pd.Series],
|
||||
) -> tuple[int, str]:
|
||||
if tlt_series is None or shy_series is None:
|
||||
return 0, "Yield curve proxy unavailable (neutral)"
|
||||
tlt_change = _pct_change_n(tlt_series, 21)
|
||||
shy_change = _pct_change_n(shy_series, 21)
|
||||
if tlt_change is None or shy_change is None:
|
||||
return 0, "Yield curve proxy unavailable (neutral)"
|
||||
spread = tlt_change - shy_change
|
||||
if spread > 1.0:
|
||||
return -1, "Flight to safety favors risk-off"
|
||||
if spread < -1.0:
|
||||
return 1, "Risk appetite in duration favors risk-on"
|
||||
return 0, "Yield curve proxy is neutral"
|
||||
|
||||
|
||||
def _signal_market_breadth(spx_series: Optional[pd.Series]) -> tuple[int, str]:
|
||||
current = _latest(spx_series)
|
||||
sma200 = _sma(spx_series, 200)
|
||||
if current is None or sma200 is None:
|
||||
return 0, "Market breadth unavailable (neutral)"
|
||||
if current > sma200:
|
||||
return 1, "S&P 500 above 200-day average supports risk-on"
|
||||
return -1, "S&P 500 below 200-day average supports risk-off"
|
||||
|
||||
|
||||
def _signal_sector_rotation(
|
||||
defensive_closes: dict[str, pd.Series],
|
||||
cyclical_closes: dict[str, pd.Series],
|
||||
) -> tuple[int, str]:
|
||||
def _average_change(payload: dict[str, pd.Series]) -> Optional[float]:
|
||||
changes = [
|
||||
change
|
||||
for change in (_pct_change_n(series, 21) for series in payload.values())
|
||||
if change is not None
|
||||
]
|
||||
if not changes:
|
||||
return None
|
||||
return sum(changes) / len(changes)
|
||||
|
||||
defensive_change = _average_change(defensive_closes)
|
||||
cyclical_change = _average_change(cyclical_closes)
|
||||
if defensive_change is None or cyclical_change is None:
|
||||
return 0, "Sector rotation unavailable (neutral)"
|
||||
spread = defensive_change - cyclical_change
|
||||
if spread > 1.0:
|
||||
return -1, "Defensives leading cyclicals supports risk-off"
|
||||
if spread < -1.0:
|
||||
return 1, "Cyclicals leading defensives supports risk-on"
|
||||
return 0, "Sector rotation is neutral"
|
||||
|
||||
|
||||
def classify_macro_regime(curr_date: str = None) -> dict:
|
||||
vix_data = _download(["^VIX"], period="3mo", curr_date=curr_date)
|
||||
market_data = _download(["^GSPC"], period="14mo", curr_date=curr_date)
|
||||
credit_data = _download(["HYG", "LQD"], period="3mo", curr_date=curr_date)
|
||||
yield_data = _download(["TLT", "SHY"], period="3mo", curr_date=curr_date)
|
||||
sector_data = _download(
|
||||
_DEFENSIVE_ETFS + _CYCLICAL_ETFS,
|
||||
period="3mo",
|
||||
curr_date=curr_date,
|
||||
)
|
||||
|
||||
vix_series = vix_data["^VIX"] if vix_data is not None and "^VIX" in vix_data else None
|
||||
spx_series = market_data["^GSPC"] if market_data is not None and "^GSPC" in market_data else None
|
||||
hyg_series = credit_data["HYG"] if credit_data is not None and "HYG" in credit_data else None
|
||||
lqd_series = credit_data["LQD"] if credit_data is not None and "LQD" in credit_data else None
|
||||
tlt_series = yield_data["TLT"] if yield_data is not None and "TLT" in yield_data else None
|
||||
shy_series = yield_data["SHY"] if yield_data is not None and "SHY" in yield_data else None
|
||||
|
||||
defensive = {
|
||||
symbol: sector_data[symbol]
|
||||
for symbol in _DEFENSIVE_ETFS
|
||||
if sector_data is not None and symbol in sector_data
|
||||
}
|
||||
cyclical = {
|
||||
symbol: sector_data[symbol]
|
||||
for symbol in _CYCLICAL_ETFS
|
||||
if sector_data is not None and symbol in sector_data
|
||||
}
|
||||
|
||||
evaluations = [
|
||||
("vix_level", _signal_vix_level(_latest(vix_series))),
|
||||
("vix_trend", _signal_vix_trend(vix_series)),
|
||||
("credit_spread", _signal_credit_spread(hyg_series, lqd_series)),
|
||||
("yield_curve", _signal_yield_curve(tlt_series, shy_series)),
|
||||
("market_breadth", _signal_market_breadth(spx_series)),
|
||||
("sector_rotation", _signal_sector_rotation(defensive, cyclical)),
|
||||
]
|
||||
|
||||
signals = []
|
||||
score = 0
|
||||
for name, (signal_score, description) in evaluations:
|
||||
signals.append(
|
||||
{"name": name, "score": signal_score, "description": description}
|
||||
)
|
||||
score += signal_score
|
||||
|
||||
if score >= REGIME_RISK_ON_THRESHOLD:
|
||||
regime = "risk-on"
|
||||
elif score <= REGIME_RISK_OFF_THRESHOLD:
|
||||
regime = "risk-off"
|
||||
else:
|
||||
regime = "transition"
|
||||
|
||||
abs_score = abs(score)
|
||||
if abs_score >= 4:
|
||||
confidence = "high"
|
||||
elif abs_score >= 2:
|
||||
confidence = "medium"
|
||||
else:
|
||||
confidence = "low"
|
||||
|
||||
return {
|
||||
"regime": regime,
|
||||
"score": score,
|
||||
"confidence": confidence,
|
||||
"signals": signals,
|
||||
"summary": (
|
||||
f"Macro regime: **{regime.upper()}** "
|
||||
f"(score {score:+d}/6, confidence: {confidence})."
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def format_macro_report(regime_data: dict) -> str:
|
||||
lines = [
|
||||
"# Macro Regime Classification",
|
||||
f"# Data retrieved on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
|
||||
"",
|
||||
f"## Regime: {regime_data['regime'].upper()}",
|
||||
"",
|
||||
"| Attribute | Value |",
|
||||
"|-----------|-------|",
|
||||
f"| Regime | **{regime_data['regime'].upper()}** |",
|
||||
f"| Composite Score | {regime_data['score']:+d} / 6 |",
|
||||
f"| Confidence | {regime_data['confidence'].title()} |",
|
||||
"",
|
||||
"## Signal Breakdown",
|
||||
"",
|
||||
"| Signal | Score | Assessment |",
|
||||
"|--------|-------|------------|",
|
||||
]
|
||||
|
||||
labels = {1: "+1 (risk-on)", 0: "0 (neutral)", -1: "-1 (risk-off)"}
|
||||
for signal in regime_data["signals"]:
|
||||
lines.append(
|
||||
f"| {signal['name'].replace('_', ' ').title()} | "
|
||||
f"{labels[signal['score']]} | {signal['description']} |"
|
||||
)
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Interpretation",
|
||||
"",
|
||||
regime_data["summary"],
|
||||
"",
|
||||
"### What This Means for Trading",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
if regime_data["regime"] == "risk-on":
|
||||
lines.extend(
|
||||
[
|
||||
"- Prefer growth and cyclicals when momentum is confirming.",
|
||||
"- Use breakouts and trend continuation setups more confidently.",
|
||||
]
|
||||
)
|
||||
elif regime_data["regime"] == "risk-off":
|
||||
lines.extend(
|
||||
[
|
||||
"- Prefer defensive sectors and tighter risk controls.",
|
||||
"- Reduce exposure to high-beta and speculative setups.",
|
||||
]
|
||||
)
|
||||
else:
|
||||
lines.extend(
|
||||
[
|
||||
"- Keep position sizes moderate while signals remain mixed.",
|
||||
"- Wait for catalysts or clearer trend confirmation.",
|
||||
]
|
||||
)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
|
@ -0,0 +1,271 @@
|
|||
"""Sector and peer-relative performance comparison."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
import yfinance as yf
|
||||
|
||||
|
||||
_SECTOR_ETFS: dict[str, str] = {
|
||||
"technology": "XLK",
|
||||
"healthcare": "XLV",
|
||||
"financials": "XLF",
|
||||
"energy": "XLE",
|
||||
"consumer-discretionary": "XLY",
|
||||
"consumer-staples": "XLP",
|
||||
"industrials": "XLI",
|
||||
"materials": "XLB",
|
||||
"real-estate": "XLRE",
|
||||
"utilities": "XLU",
|
||||
"communication-services": "XLC",
|
||||
}
|
||||
|
||||
_SECTOR_TICKERS: dict[str, list[str]] = {
|
||||
"technology": ["AAPL", "MSFT", "NVDA", "GOOGL", "META", "AVGO", "AMD", "CRM"],
|
||||
"healthcare": ["UNH", "JNJ", "LLY", "PFE", "ABT", "MRK", "ABBV", "AMGN"],
|
||||
"financials": ["JPM", "BAC", "WFC", "GS", "MS", "BLK", "SCHW", "AXP"],
|
||||
"energy": ["XOM", "CVX", "COP", "SLB", "EOG", "MPC", "VLO", "OXY"],
|
||||
"consumer-discretionary": ["AMZN", "TSLA", "HD", "MCD", "NKE", "LOW"],
|
||||
"consumer-staples": ["PG", "KO", "PEP", "COST", "WMT", "MDLZ"],
|
||||
"industrials": ["CAT", "HON", "UNP", "UPS", "RTX", "GE"],
|
||||
"materials": ["LIN", "APD", "SHW", "FCX", "NEM", "NUE"],
|
||||
"real-estate": ["PLD", "AMT", "EQIX", "SPG", "PSA", "DLR"],
|
||||
"utilities": ["NEE", "DUK", "SO", "AEP", "SRE", "XEL"],
|
||||
"communication-services": ["META", "GOOGL", "NFLX", "DIS", "CMCSA", "TMUS"],
|
||||
}
|
||||
|
||||
_SECTOR_NORMALISE: dict[str, str] = {
|
||||
"Technology": "technology",
|
||||
"Healthcare": "healthcare",
|
||||
"Health Care": "healthcare",
|
||||
"Financial Services": "financials",
|
||||
"Financials": "financials",
|
||||
"Energy": "energy",
|
||||
"Consumer Cyclical": "consumer-discretionary",
|
||||
"Consumer Discretionary": "consumer-discretionary",
|
||||
"Consumer Defensive": "consumer-staples",
|
||||
"Consumer Staples": "consumer-staples",
|
||||
"Industrials": "industrials",
|
||||
"Basic Materials": "materials",
|
||||
"Materials": "materials",
|
||||
"Real Estate": "real-estate",
|
||||
"Utilities": "utilities",
|
||||
"Communication Services": "communication-services",
|
||||
}
|
||||
|
||||
|
||||
def _safe_pct(closes: pd.Series, days_back: int) -> Optional[float]:
|
||||
if len(closes) < days_back + 1:
|
||||
return None
|
||||
base = closes.iloc[-(days_back + 1)]
|
||||
current = closes.iloc[-1]
|
||||
if base == 0:
|
||||
return None
|
||||
return (current - base) / base * 100
|
||||
|
||||
|
||||
def _ytd_pct(closes: pd.Series) -> Optional[float]:
|
||||
if closes.empty:
|
||||
return None
|
||||
current_year = closes.index[-1].year
|
||||
year_closes = closes[closes.index.year == current_year]
|
||||
if len(year_closes) < 2:
|
||||
return None
|
||||
base = year_closes.iloc[0]
|
||||
if base == 0:
|
||||
return None
|
||||
return (closes.iloc[-1] - base) / base * 100
|
||||
|
||||
|
||||
def _fmt_pct(value: Optional[float]) -> str:
|
||||
if value is None:
|
||||
return "N/A"
|
||||
sign = "+" if value >= 0 else ""
|
||||
return f"{sign}{value:.2f}%"
|
||||
|
||||
|
||||
def _download_history(symbols: list[str], curr_date: str = None) -> pd.DataFrame:
|
||||
kwargs = {
|
||||
"auto_adjust": True,
|
||||
"progress": False,
|
||||
"threads": True,
|
||||
}
|
||||
if curr_date:
|
||||
end_ts = pd.Timestamp(curr_date) + pd.Timedelta(days=1)
|
||||
start_ts = pd.Timestamp(curr_date) - pd.DateOffset(months=7)
|
||||
kwargs["start"] = start_ts.strftime("%Y-%m-%d")
|
||||
kwargs["end"] = end_ts.strftime("%Y-%m-%d")
|
||||
else:
|
||||
kwargs["period"] = "6mo"
|
||||
return yf.download(symbols, **kwargs)
|
||||
|
||||
|
||||
def get_sector_peers(ticker: str) -> tuple[str, str, list[str]]:
|
||||
try:
|
||||
info = yf.Ticker(ticker.upper()).info
|
||||
except Exception:
|
||||
return "Unknown", "", []
|
||||
|
||||
raw_sector = info.get("sector", "")
|
||||
sector_key = _SECTOR_NORMALISE.get(raw_sector, raw_sector.lower().replace(" ", "-"))
|
||||
peers = [peer for peer in _SECTOR_TICKERS.get(sector_key, []) if peer != ticker.upper()]
|
||||
return raw_sector or "Unknown", sector_key, peers
|
||||
|
||||
|
||||
def compute_relative_performance(
|
||||
ticker: str,
|
||||
sector_key: str,
|
||||
peers: list[str],
|
||||
curr_date: str = None,
|
||||
) -> str:
|
||||
etf = _SECTOR_ETFS.get(sector_key)
|
||||
symbols = [ticker.upper(), *peers[:8]]
|
||||
if etf and etf not in symbols:
|
||||
symbols.append(etf)
|
||||
|
||||
try:
|
||||
history = _download_history(symbols, curr_date=curr_date)
|
||||
except Exception as exc:
|
||||
return f"Error downloading price data for peer comparison: {exc}"
|
||||
|
||||
if history.empty:
|
||||
return "No price data available for peer comparison."
|
||||
|
||||
closes = history.get("Close", pd.DataFrame())
|
||||
rows = []
|
||||
for symbol in symbols:
|
||||
if symbol not in closes.columns:
|
||||
continue
|
||||
series = closes[symbol].dropna()
|
||||
rows.append(
|
||||
{
|
||||
"symbol": symbol,
|
||||
"1W": _safe_pct(series, 5),
|
||||
"1M": _safe_pct(series, 21),
|
||||
"3M": _safe_pct(series, 63),
|
||||
"6M": _safe_pct(series, 126),
|
||||
"YTD": _ytd_pct(series),
|
||||
"is_target": symbol == ticker.upper(),
|
||||
"is_etf": symbol == etf,
|
||||
}
|
||||
)
|
||||
|
||||
rows.sort(
|
||||
key=lambda row: row["3M"] if row["3M"] is not None else float("-inf"),
|
||||
reverse=True,
|
||||
)
|
||||
peer_rows = [row for row in rows if not row["is_etf"]]
|
||||
target_rank = next(
|
||||
(index + 1 for index, row in enumerate(peer_rows) if row["is_target"]),
|
||||
None,
|
||||
)
|
||||
n_peers = len(peer_rows)
|
||||
|
||||
lines = [
|
||||
f"# Relative Performance Analysis: {ticker.upper()}",
|
||||
f"# Data retrieved on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
|
||||
f"# Sector: {sector_key.replace('-', ' ').title()} | Peer rank (3M): {target_rank}/{n_peers}",
|
||||
"",
|
||||
"| Symbol | Role | 1-Week | 1-Month | 3-Month | 6-Month | YTD |",
|
||||
"|--------|------|--------|---------|---------|---------|-----|",
|
||||
]
|
||||
|
||||
for row in rows:
|
||||
role = "► TARGET" if row["is_target"] else ("ETF Benchmark" if row["is_etf"] else "Peer")
|
||||
lines.append(
|
||||
f"| {row['symbol']} | {role} "
|
||||
f"| {_fmt_pct(row['1W'])} "
|
||||
f"| {_fmt_pct(row['1M'])} "
|
||||
f"| {_fmt_pct(row['3M'])} "
|
||||
f"| {_fmt_pct(row['6M'])} "
|
||||
f"| {_fmt_pct(row['YTD'])} |"
|
||||
)
|
||||
|
||||
target_row = next((row for row in rows if row["is_target"]), None)
|
||||
etf_row = next((row for row in rows if row["is_etf"]), None)
|
||||
if target_row and etf_row:
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Alpha vs Sector ETF",
|
||||
"",
|
||||
f"- **1-Month**: {_fmt_pct(target_row['1M'])} vs ETF {_fmt_pct(etf_row['1M'])}",
|
||||
f"- **3-Month**: {_fmt_pct(target_row['3M'])} vs ETF {_fmt_pct(etf_row['3M'])}",
|
||||
f"- **6-Month**: {_fmt_pct(target_row['6M'])} vs ETF {_fmt_pct(etf_row['6M'])}",
|
||||
]
|
||||
)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def get_peer_comparison_report(ticker: str, curr_date: str = None) -> str:
|
||||
sector_display, sector_key, peers = get_sector_peers(ticker)
|
||||
if not peers:
|
||||
return (
|
||||
f"# Peer Comparison: {ticker.upper()}\n\n"
|
||||
f"Could not identify sector peers for {ticker}. Sector detected: '{sector_display}'"
|
||||
)
|
||||
return compute_relative_performance(
|
||||
ticker,
|
||||
sector_key,
|
||||
peers,
|
||||
curr_date=curr_date,
|
||||
)
|
||||
|
||||
|
||||
def get_sector_relative_report(ticker: str, curr_date: str = None) -> str:
|
||||
sector_display, sector_key, _ = get_sector_peers(ticker)
|
||||
etf = _SECTOR_ETFS.get(sector_key)
|
||||
if not etf:
|
||||
return (
|
||||
f"# Sector Relative Performance: {ticker.upper()}\n\n"
|
||||
f"No ETF benchmark found for sector '{sector_display}'."
|
||||
)
|
||||
|
||||
try:
|
||||
history = _download_history([ticker.upper(), etf], curr_date=curr_date)
|
||||
except Exception as exc:
|
||||
return f"Error downloading data for {ticker} vs {etf}: {exc}"
|
||||
|
||||
if history.empty:
|
||||
return f"No price data available for {ticker} or {etf}."
|
||||
|
||||
closes = history.get("Close", pd.DataFrame())
|
||||
lines = [
|
||||
f"# Sector Relative Performance: {ticker.upper()} vs {etf} ({sector_display})",
|
||||
f"# Data retrieved on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
|
||||
"",
|
||||
"| Period | Stock Return | ETF Return | Alpha |",
|
||||
"|--------|-------------|------------|-------|",
|
||||
]
|
||||
|
||||
stock_closes = closes.get(ticker.upper())
|
||||
etf_closes = closes.get(etf)
|
||||
|
||||
for label, days_back in [("1-Week", 5), ("1-Month", 21), ("3-Month", 63), ("6-Month", 126)]:
|
||||
stock_return = (
|
||||
_safe_pct(stock_closes.dropna(), days_back) if stock_closes is not None else None
|
||||
)
|
||||
etf_return = _safe_pct(etf_closes.dropna(), days_back) if etf_closes is not None else None
|
||||
alpha = (
|
||||
stock_return - etf_return
|
||||
if stock_return is not None and etf_return is not None
|
||||
else None
|
||||
)
|
||||
lines.append(
|
||||
f"| {label} | {_fmt_pct(stock_return)} | {_fmt_pct(etf_return)} | {_fmt_pct(alpha)} |"
|
||||
)
|
||||
|
||||
stock_ytd = _ytd_pct(stock_closes.dropna()) if stock_closes is not None else None
|
||||
etf_ytd = _ytd_pct(etf_closes.dropna()) if etf_closes is not None else None
|
||||
ytd_alpha = (
|
||||
stock_ytd - etf_ytd if stock_ytd is not None and etf_ytd is not None else None
|
||||
)
|
||||
lines.append(
|
||||
f"| YTD | {_fmt_pct(stock_ytd)} | {_fmt_pct(etf_ytd)} | {_fmt_pct(ytd_alpha)} |"
|
||||
)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
|
@ -0,0 +1,391 @@
|
|||
"""Trailing Twelve Months (TTM) trend analysis across up to 8 quarters."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from io import StringIO
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
_INCOME_REVENUE_COLS = [
|
||||
"Total Revenue",
|
||||
"TotalRevenue",
|
||||
"totalRevenue",
|
||||
"Revenue",
|
||||
"revenue",
|
||||
]
|
||||
_INCOME_GROSS_PROFIT_COLS = [
|
||||
"Gross Profit",
|
||||
"GrossProfit",
|
||||
"grossProfit",
|
||||
]
|
||||
_INCOME_OPERATING_INCOME_COLS = [
|
||||
"Operating Income",
|
||||
"OperatingIncome",
|
||||
"operatingIncome",
|
||||
"Total Operating Income As Reported",
|
||||
]
|
||||
_INCOME_EBITDA_COLS = [
|
||||
"EBITDA",
|
||||
"Ebitda",
|
||||
"ebitda",
|
||||
"Normalized EBITDA",
|
||||
]
|
||||
_INCOME_NET_INCOME_COLS = [
|
||||
"Net Income",
|
||||
"NetIncome",
|
||||
"netIncome",
|
||||
"Net Income From Continuing Operation Net Minority Interest",
|
||||
]
|
||||
|
||||
_BALANCE_TOTAL_ASSETS_COLS = [
|
||||
"Total Assets",
|
||||
"TotalAssets",
|
||||
"totalAssets",
|
||||
]
|
||||
_BALANCE_TOTAL_DEBT_COLS = [
|
||||
"Total Debt",
|
||||
"TotalDebt",
|
||||
"totalDebt",
|
||||
"Long Term Debt",
|
||||
"LongTermDebt",
|
||||
]
|
||||
_BALANCE_EQUITY_COLS = [
|
||||
"Stockholders Equity",
|
||||
"StockholdersEquity",
|
||||
"Total Stockholder Equity",
|
||||
"TotalStockholderEquity",
|
||||
"Common Stock Equity",
|
||||
"CommonStockEquity",
|
||||
]
|
||||
|
||||
_CASHFLOW_FCF_COLS = [
|
||||
"Free Cash Flow",
|
||||
"FreeCashFlow",
|
||||
"freeCashFlow",
|
||||
]
|
||||
_CASHFLOW_OPERATING_COLS = [
|
||||
"Operating Cash Flow",
|
||||
"OperatingCashflow",
|
||||
"operatingCashflow",
|
||||
"Total Cash From Operating Activities",
|
||||
]
|
||||
|
||||
|
||||
def _find_col(df: pd.DataFrame, candidates: list[str]) -> Optional[str]:
|
||||
for col in candidates:
|
||||
if col in df.columns:
|
||||
return col
|
||||
return None
|
||||
|
||||
|
||||
def _looks_like_dates(values) -> bool:
|
||||
sample = list(values)[:5]
|
||||
count = 0
|
||||
for value in sample:
|
||||
try:
|
||||
pd.to_datetime(str(value))
|
||||
count += 1
|
||||
except Exception:
|
||||
pass
|
||||
return count >= min(2, len(sample))
|
||||
|
||||
|
||||
def _parse_financial_csv(csv_text: str) -> Optional[pd.DataFrame]:
|
||||
if not csv_text or not csv_text.strip():
|
||||
return None
|
||||
|
||||
try:
|
||||
df = pd.read_csv(StringIO(csv_text), index_col=0)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if df.empty:
|
||||
return None
|
||||
|
||||
if _looks_like_dates(df.columns):
|
||||
df = df.T
|
||||
|
||||
try:
|
||||
df.index = pd.to_datetime(df.index)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
df.sort_index(inplace=True)
|
||||
|
||||
for col in df.columns:
|
||||
df[col] = pd.to_numeric(df[col], errors="coerce")
|
||||
|
||||
return df
|
||||
|
||||
|
||||
def _safe_get(
|
||||
df: Optional[pd.DataFrame],
|
||||
col_candidates: list[str],
|
||||
row_idx: int,
|
||||
) -> Optional[float]:
|
||||
if df is None:
|
||||
return None
|
||||
col = _find_col(df, col_candidates)
|
||||
if col is None:
|
||||
return None
|
||||
try:
|
||||
value = df.iloc[row_idx][col]
|
||||
except (IndexError, KeyError):
|
||||
return None
|
||||
return float(value) if pd.notna(value) else None
|
||||
|
||||
|
||||
def _pct_change(new: Optional[float], old: Optional[float]) -> Optional[float]:
|
||||
if new is None or old is None or old == 0:
|
||||
return None
|
||||
return (new - old) / abs(old) * 100
|
||||
|
||||
|
||||
def _margin_trend(margins: list[Optional[float]]) -> str:
|
||||
clean = [margin for margin in margins if margin is not None]
|
||||
if len(clean) < 3:
|
||||
return "insufficient data"
|
||||
recent = clean[-3:]
|
||||
if recent[-1] > recent[0]:
|
||||
return "expanding"
|
||||
if recent[-1] < recent[0]:
|
||||
return "contracting"
|
||||
return "stable"
|
||||
|
||||
|
||||
def _fmt(value: Optional[float], billions: bool = True) -> str:
|
||||
if value is None:
|
||||
return "N/A"
|
||||
if billions:
|
||||
return f"${value / 1e9:.2f}B"
|
||||
return f"{value:.2f}"
|
||||
|
||||
|
||||
def _fmt_pct(value: Optional[float]) -> str:
|
||||
if value is None:
|
||||
return "N/A"
|
||||
sign = "+" if value >= 0 else ""
|
||||
return f"{sign}{value:.1f}%"
|
||||
|
||||
|
||||
def compute_ttm_metrics(
|
||||
income_csv: str,
|
||||
balance_csv: str,
|
||||
cashflow_csv: str,
|
||||
n_quarters: int = 8,
|
||||
) -> dict:
|
||||
income_df = _parse_financial_csv(income_csv)
|
||||
balance_df = _parse_financial_csv(balance_csv)
|
||||
cashflow_df = _parse_financial_csv(cashflow_csv)
|
||||
|
||||
result = {
|
||||
"quarters_available": 0,
|
||||
"ttm": {},
|
||||
"quarterly": [],
|
||||
"trends": {},
|
||||
"metadata": {"parse_errors": []},
|
||||
}
|
||||
|
||||
if income_df is None:
|
||||
result["metadata"]["parse_errors"].append("income statement parse failed")
|
||||
if balance_df is None:
|
||||
result["metadata"]["parse_errors"].append("balance sheet parse failed")
|
||||
if cashflow_df is None:
|
||||
result["metadata"]["parse_errors"].append("cash flow parse failed")
|
||||
|
||||
if income_df is None:
|
||||
return result
|
||||
|
||||
income_df = income_df.tail(n_quarters)
|
||||
result["quarters_available"] = len(income_df)
|
||||
|
||||
if balance_df is not None:
|
||||
balance_df = balance_df.tail(n_quarters)
|
||||
if cashflow_df is not None:
|
||||
cashflow_df = cashflow_df.tail(n_quarters)
|
||||
|
||||
ttm_n = min(4, len(income_df))
|
||||
ttm_income = income_df.tail(ttm_n)
|
||||
|
||||
def _ttm_sum(df: Optional[pd.DataFrame], cols: list[str]) -> Optional[float]:
|
||||
if df is None:
|
||||
return None
|
||||
col = _find_col(df, cols)
|
||||
if col is None:
|
||||
return None
|
||||
values = pd.to_numeric(df.tail(ttm_n)[col], errors="coerce").dropna()
|
||||
return float(values.sum()) if len(values) > 0 else None
|
||||
|
||||
def _latest(df: Optional[pd.DataFrame], cols: list[str]) -> Optional[float]:
|
||||
if df is None:
|
||||
return None
|
||||
col = _find_col(df, cols)
|
||||
if col is None:
|
||||
return None
|
||||
values = pd.to_numeric(df[col], errors="coerce").dropna()
|
||||
return float(values.iloc[-1]) if len(values) > 0 else None
|
||||
|
||||
ttm_revenue = _ttm_sum(ttm_income, _INCOME_REVENUE_COLS)
|
||||
ttm_gross_profit = _ttm_sum(ttm_income, _INCOME_GROSS_PROFIT_COLS)
|
||||
ttm_operating_income = _ttm_sum(ttm_income, _INCOME_OPERATING_INCOME_COLS)
|
||||
ttm_ebitda = _ttm_sum(ttm_income, _INCOME_EBITDA_COLS)
|
||||
ttm_net_income = _ttm_sum(ttm_income, _INCOME_NET_INCOME_COLS)
|
||||
|
||||
ttm_total_assets = _latest(balance_df, _BALANCE_TOTAL_ASSETS_COLS)
|
||||
ttm_total_debt = _latest(balance_df, _BALANCE_TOTAL_DEBT_COLS)
|
||||
ttm_equity = _latest(balance_df, _BALANCE_EQUITY_COLS)
|
||||
ttm_fcf = _ttm_sum(cashflow_df, _CASHFLOW_FCF_COLS)
|
||||
ttm_operating_cf = _ttm_sum(cashflow_df, _CASHFLOW_OPERATING_COLS)
|
||||
|
||||
result["ttm"] = {
|
||||
"revenue": ttm_revenue,
|
||||
"gross_profit": ttm_gross_profit,
|
||||
"operating_income": ttm_operating_income,
|
||||
"ebitda": ttm_ebitda,
|
||||
"net_income": ttm_net_income,
|
||||
"free_cash_flow": ttm_fcf,
|
||||
"operating_cash_flow": ttm_operating_cf,
|
||||
"total_assets": ttm_total_assets,
|
||||
"total_debt": ttm_total_debt,
|
||||
"equity": ttm_equity,
|
||||
"gross_margin_pct": (
|
||||
ttm_gross_profit / ttm_revenue * 100
|
||||
if ttm_revenue is not None and ttm_revenue != 0 and ttm_gross_profit is not None
|
||||
else None
|
||||
),
|
||||
"operating_margin_pct": (
|
||||
ttm_operating_income / ttm_revenue * 100
|
||||
if ttm_revenue is not None and ttm_revenue != 0 and ttm_operating_income is not None
|
||||
else None
|
||||
),
|
||||
"net_margin_pct": (
|
||||
ttm_net_income / ttm_revenue * 100
|
||||
if ttm_revenue is not None and ttm_revenue != 0 and ttm_net_income is not None
|
||||
else None
|
||||
),
|
||||
"roe_pct": (
|
||||
ttm_net_income / ttm_equity * 100
|
||||
if ttm_net_income is not None and ttm_equity is not None and ttm_equity != 0
|
||||
else None
|
||||
),
|
||||
"debt_to_equity": (
|
||||
ttm_total_debt / ttm_equity
|
||||
if ttm_total_debt is not None and ttm_equity is not None and ttm_equity != 0
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
quarterly = []
|
||||
for index in range(len(income_df)):
|
||||
revenue = _safe_get(income_df, _INCOME_REVENUE_COLS, index)
|
||||
gross_profit = _safe_get(income_df, _INCOME_GROSS_PROFIT_COLS, index)
|
||||
operating_income = _safe_get(income_df, _INCOME_OPERATING_INCOME_COLS, index)
|
||||
net_income = _safe_get(income_df, _INCOME_NET_INCOME_COLS, index)
|
||||
quarterly.append(
|
||||
{
|
||||
"date": income_df.index[index].strftime("%Y-%m-%d"),
|
||||
"revenue": revenue,
|
||||
"gross_margin_pct": (
|
||||
gross_profit / revenue * 100
|
||||
if revenue is not None and revenue != 0 and gross_profit is not None
|
||||
else None
|
||||
),
|
||||
"operating_margin_pct": (
|
||||
operating_income / revenue * 100
|
||||
if revenue is not None and revenue != 0 and operating_income is not None
|
||||
else None
|
||||
),
|
||||
"net_margin_pct": (
|
||||
net_income / revenue * 100
|
||||
if revenue is not None and revenue != 0 and net_income is not None
|
||||
else None
|
||||
),
|
||||
"free_cash_flow": _safe_get(cashflow_df, _CASHFLOW_FCF_COLS, index),
|
||||
}
|
||||
)
|
||||
|
||||
result["quarterly"] = quarterly
|
||||
|
||||
if len(quarterly) >= 2:
|
||||
latest_revenue = quarterly[-1]["revenue"]
|
||||
previous_revenue = quarterly[-2]["revenue"]
|
||||
year_ago_revenue = quarterly[-5]["revenue"] if len(quarterly) >= 5 else None
|
||||
result["trends"] = {
|
||||
"revenue_qoq_pct": _pct_change(latest_revenue, previous_revenue),
|
||||
"revenue_yoy_pct": _pct_change(latest_revenue, year_ago_revenue),
|
||||
"gross_margin_direction": _margin_trend(
|
||||
[quarter["gross_margin_pct"] for quarter in quarterly]
|
||||
),
|
||||
"operating_margin_direction": _margin_trend(
|
||||
[quarter["operating_margin_pct"] for quarter in quarterly]
|
||||
),
|
||||
"net_margin_direction": _margin_trend(
|
||||
[quarter["net_margin_pct"] for quarter in quarterly]
|
||||
),
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def format_ttm_report(metrics: dict, ticker: str) -> str:
|
||||
lines = [
|
||||
f"# TTM Fundamental Analysis: {ticker.upper()}",
|
||||
f"# Data retrieved on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
|
||||
f"# Quarters available: {metrics['quarters_available']}",
|
||||
"",
|
||||
]
|
||||
|
||||
errors = metrics["metadata"].get("parse_errors", [])
|
||||
if errors:
|
||||
lines.append(f"**Data warnings:** {'; '.join(errors)}")
|
||||
lines.append("")
|
||||
|
||||
if metrics["quarters_available"] == 0:
|
||||
lines.append("_No quarterly data available._")
|
||||
return "\n".join(lines)
|
||||
|
||||
ttm = metrics["ttm"]
|
||||
lines.extend(
|
||||
[
|
||||
"## Trailing Twelve Months (TTM) Summary",
|
||||
"",
|
||||
"| Metric | Value |",
|
||||
"|--------|-------|",
|
||||
f"| Revenue | {_fmt(ttm.get('revenue'))} |",
|
||||
f"| Gross Margin | {_fmt_pct(ttm.get('gross_margin_pct'))} |",
|
||||
f"| Operating Margin | {_fmt_pct(ttm.get('operating_margin_pct'))} |",
|
||||
f"| Net Margin | {_fmt_pct(ttm.get('net_margin_pct'))} |",
|
||||
f"| Return on Equity | {_fmt_pct(ttm.get('roe_pct'))} |",
|
||||
f"| Debt / Equity | {ttm.get('debt_to_equity') if ttm.get('debt_to_equity') is not None else 'N/A'} |",
|
||||
"",
|
||||
"## Trend Signals",
|
||||
"",
|
||||
"| Signal | Value |",
|
||||
"|--------|-------|",
|
||||
f"| Revenue QoQ Growth | {_fmt_pct(metrics['trends'].get('revenue_qoq_pct'))} |",
|
||||
f"| Revenue YoY Growth | {_fmt_pct(metrics['trends'].get('revenue_yoy_pct'))} |",
|
||||
f"| Gross Margin Trend | {metrics['trends'].get('gross_margin_direction', 'N/A')} |",
|
||||
f"| Operating Margin Trend | {metrics['trends'].get('operating_margin_direction', 'N/A')} |",
|
||||
f"| Net Margin Trend | {metrics['trends'].get('net_margin_direction', 'N/A')} |",
|
||||
"",
|
||||
"## Quarter History",
|
||||
"",
|
||||
"| Quarter | Revenue | Gross Margin | Operating Margin | Net Margin | FCF |",
|
||||
"|---------|---------|--------------|------------------|------------|-----|",
|
||||
]
|
||||
)
|
||||
|
||||
for quarter in metrics["quarterly"]:
|
||||
lines.append(
|
||||
f"| {quarter['date']} "
|
||||
f"| {_fmt(quarter['revenue'])} "
|
||||
f"| {_fmt_pct(quarter['gross_margin_pct'])} "
|
||||
f"| {_fmt_pct(quarter['operating_margin_pct'])} "
|
||||
f"| {_fmt_pct(quarter['net_margin_pct'])} "
|
||||
f"| {_fmt(quarter['free_cash_flow'])} |"
|
||||
)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
|
@ -38,8 +38,8 @@ DEFAULT_CONFIG = {
|
|||
"openai_reasoning_effort": None, # "medium", "high", "low"
|
||||
"anthropic_effort": None, # "high", "medium", "low"
|
||||
# Debate and discussion settings
|
||||
"max_debate_rounds": 1,
|
||||
"max_risk_discuss_rounds": 1,
|
||||
"max_debate_rounds": 2,
|
||||
"max_risk_discuss_rounds": 2,
|
||||
"max_recur_limit": 100,
|
||||
# Data vendor configuration
|
||||
# Category-level configuration (default for all tools in category)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from tradingagents.agents.utils.agent_states import AgentState
|
|||
class ConditionalLogic:
|
||||
"""Handles conditional logic for determining graph flow."""
|
||||
|
||||
def __init__(self, max_debate_rounds=1, max_risk_discuss_rounds=1):
|
||||
def __init__(self, max_debate_rounds=2, max_risk_discuss_rounds=2):
|
||||
"""Initialize with configuration parameters."""
|
||||
self.max_debate_rounds = max_debate_rounds
|
||||
self.max_risk_discuss_rounds = max_risk_discuss_rounds
|
||||
|
|
|
|||
|
|
@ -14,6 +14,13 @@ from .conditional_logic import ConditionalLogic
|
|||
class GraphSetup:
|
||||
"""Handles the setup and configuration of the agent graph."""
|
||||
|
||||
@staticmethod
|
||||
def _order_selected_analysts(selected_analysts):
|
||||
if "macro" not in selected_analysts:
|
||||
return selected_analysts
|
||||
remaining = [analyst for analyst in selected_analysts if analyst != "macro"]
|
||||
return ["macro", *remaining]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
quick_thinking_llm: ChatOpenAI,
|
||||
|
|
@ -112,6 +119,7 @@ class GraphSetup:
|
|||
"""
|
||||
if len(selected_analysts) == 0:
|
||||
raise ValueError("Trading Agents Graph Setup Error: no analysts selected!")
|
||||
selected_analysts = self._order_selected_analysts(list(selected_analysts))
|
||||
|
||||
# Create analyst nodes
|
||||
analyst_nodes = {}
|
||||
|
|
|
|||
Loading…
Reference in New Issue