diff --git a/cli/main.py b/cli/main.py
index d9a9d023..3ed61212 100644
--- a/cli/main.py
+++ b/cli/main.py
@@ -1,6 +1,7 @@
from typing import Optional
import datetime
import json
+from tradingagents.agents.utils.json_utils import extract_json
import typer
from pathlib import Path
from functools import wraps
@@ -1216,7 +1217,7 @@ def run_scan(date: Optional[str] = None):
# Try to parse and show watchlist table
try:
- summary_data = json.loads(summary)
+ summary_data = extract_json(summary)
stocks = summary_data.get("stocks_to_investigate", [])
if stocks:
table = Table(title="Stocks to Investigate", box=box.ROUNDED)
@@ -1234,7 +1235,7 @@ def run_scan(date: Optional[str] = None):
s.get("thesis_angle", ""),
)
console.print(table)
- except (json.JSONDecodeError, KeyError):
+ except (json.JSONDecodeError, KeyError, ValueError):
pass # Summary wasn't valid JSON — already printed as markdown
diff --git a/docs/agent/decisions/002-data-vendor-fallback.md b/docs/agent/decisions/002-data-vendor-fallback.md
index c8d9434b..6af51f00 100644
--- a/docs/agent/decisions/002-data-vendor-fallback.md
+++ b/docs/agent/decisions/002-data-vendor-fallback.md
@@ -1,6 +1,7 @@
---
type: decision
-status: active
+status: superseded
+superseded_by: 011-opt-in-vendor-fallback
date: 2026-03-17
agent_author: "claude"
tags: [data, alpha-vantage, yfinance, fallback]
diff --git a/docs/agent/decisions/008-lessons-learned.md b/docs/agent/decisions/008-lessons-learned.md
index ac423402..a7f89e08 100644
--- a/docs/agent/decisions/008-lessons-learned.md
+++ b/docs/agent/decisions/008-lessons-learned.md
@@ -31,7 +31,8 @@ None — these are universal rules for this project.
### Vendor Fallback
- Functions inside `route_to_vendor` must RAISE on failure, not embed errors in return values.
-- Catch `(AlphaVantageError, ConnectionError, TimeoutError)`, not just specific subtypes.
+- Catch `(AlphaVantageError, FinnhubError, ConnectionError, TimeoutError)`, not just specific subtypes.
+- Fallback is opt-in: only methods in `FALLBACK_ALLOWED` get cross-vendor fallback. All others fail-fast (ADR 011).
### LangGraph
- Any state field written by parallel nodes MUST have a reducer (`Annotated[str, reducer_fn]`).
diff --git a/docs/agent/decisions/010-finnhub-vendor-integration.md b/docs/agent/decisions/010-finnhub-vendor-integration.md
index 05897a82..c248856e 100644
--- a/docs/agent/decisions/010-finnhub-vendor-integration.md
+++ b/docs/agent/decisions/010-finnhub-vendor-integration.md
@@ -39,3 +39,4 @@ economic calendar) and two equivalent-quality replacements (insider transactions
- `route_to_vendor` fallback catch must include `FinnhubError` alongside `AlphaVantageError`
- Calendar functions return graceful empty-state strings (not raise) when API returns empty list — this is normal behaviour, not an error
- Never add Finnhub paid-tier endpoints (`/stock/candle`, `/financials-reported`, `/indicator`) to free-tier routing
+- `get_insider_transactions` is excluded from `FALLBACK_ALLOWED` — Finnhub MSPR aggregate data has no equivalent in other vendors (ADR 011)
diff --git a/docs/agent/decisions/011-opt-in-vendor-fallback.md b/docs/agent/decisions/011-opt-in-vendor-fallback.md
new file mode 100644
index 00000000..401150e8
--- /dev/null
+++ b/docs/agent/decisions/011-opt-in-vendor-fallback.md
@@ -0,0 +1,35 @@
+---
+type: decision
+status: active
+date: 2026-03-18
+agent_author: "claude"
+tags: [data, vendor, fallback, fail-fast]
+related_files: [tradingagents/dataflows/interface.py, tests/test_vendor_failfast.py]
+---
+
+## Context
+
+The previous `route_to_vendor()` silently tried every available vendor when the primary failed. This is dangerous for trading software — different vendors return different data contracts (e.g., AV news has sentiment scores, yfinance doesn't; stockstats indicator names are incompatible with AV API names). Silent fallback corrupts signal quality without leaving a trace.
+
+## The Decision
+
+- Default to fail-fast: only methods in `FALLBACK_ALLOWED` get cross-vendor fallback.
+- `FALLBACK_ALLOWED` contains only methods where data contracts are vendor-agnostic: `get_stock_data`, `get_market_indices`, `get_sector_performance`, `get_market_movers`, `get_industry_performance`.
+- All other methods raise `RuntimeError` immediately when the primary vendor fails.
+- Error messages include method name and vendors tried for debuggability.
+- Exception chaining (`from last_error`) preserves the original cause.
+
+Supersedes: ADR 002 (which assumed universal fallback was safe).
+
+## Constraints
+
+- Adding a method to `FALLBACK_ALLOWED` requires verifying that all vendor implementations return compatible data contracts.
+- Never add news tools (`get_news`, `get_global_news`, `get_topic_news`) — AV has sentiment scores that yfinance lacks.
+- Never add `get_indicators` — stockstats names (`close_50_sma`) differ from AV API names (`SMA`).
+- Never add financial statement tools — different fiscal period alignment across vendors.
+
+## Actionable Rules
+
+- When adding a new data method, it is fail-fast by default. Only add to `FALLBACK_ALLOWED` after verifying data contract compatibility across all vendor implementations.
+- Functions inside `route_to_vendor` must RAISE on failure, not embed errors in return values (unchanged from ADR 002).
+- Test both fail-fast and fallback paths when modifying vendor routing.
diff --git a/tests/test_e2e_api_integration.py b/tests/test_e2e_api_integration.py
index 9c300d0b..3ce501ed 100644
--- a/tests/test_e2e_api_integration.py
+++ b/tests/test_e2e_api_integration.py
@@ -152,7 +152,7 @@ class TestRouteToVendor:
"tradingagents.dataflows.y_finance.yf.Ticker",
side_effect=ConnectionError("network unavailable"),
):
- with pytest.raises(RuntimeError, match="No available vendor"):
+ with pytest.raises(RuntimeError, match="All vendors failed for"):
route_to_vendor("get_stock_data", "AAPL", "2024-01-04", "2024-01-05")
def test_unknown_method_raises_value_error(self):
diff --git a/tests/test_json_utils.py b/tests/test_json_utils.py
new file mode 100644
index 00000000..ea44d3a0
--- /dev/null
+++ b/tests/test_json_utils.py
@@ -0,0 +1,131 @@
+"""Tests for robust JSON extraction from LLM output."""
+import pytest
+from tradingagents.agents.utils.json_utils import extract_json
+
+
+# ─── Happy-path tests ─────────────────────────────────────────────────────────
+
+def test_pure_json():
+ assert extract_json('{"key": "value"}') == {"key": "value"}
+
+
+def test_json_with_whitespace():
+ assert extract_json(' \n{"key": "value"}\n ') == {"key": "value"}
+
+
+def test_markdown_fence_json():
+ text = '```json\n{"key": "value"}\n```'
+ assert extract_json(text) == {"key": "value"}
+
+
+def test_markdown_fence_no_lang():
+ text = '```\n{"key": "value"}\n```'
+ assert extract_json(text) == {"key": "value"}
+
+
+def test_think_preamble_only():
+ text = 'I need to analyze the macro environment carefully.\n{"key": "value"}'
+ assert extract_json(text) == {"key": "value"}
+
+
+def test_think_plus_fence():
+ text = 'Some reasoning here.\n```json\n{"key": "value"}\n```'
+ assert extract_json(text) == {"key": "value"}
+
+
+def test_prose_with_json():
+ text = 'Here is the result:\n{"key": "value"}\nDone.'
+ assert extract_json(text) == {"key": "value"}
+
+
+def test_nested_json():
+ data = {
+ "timeframe": "1 month",
+ "executive_summary": "Strong growth momentum",
+ "macro_context": {
+ "economic_cycle": "expansion",
+ "central_bank_stance": "hawkish",
+ "geopolitical_risks": ["trade tensions", "energy prices"],
+ },
+ "key_themes": [
+ {"theme": "AI Infrastructure", "description": "Data center boom", "conviction": "high", "timeframe": "3-6 months"}
+ ],
+ "stocks_to_investigate": [
+ {
+ "ticker": "NVDA",
+ "name": "NVIDIA Corp",
+ "sector": "Technology",
+ "rationale": "GPU demand for AI training",
+ "thesis_angle": "growth",
+ "conviction": "high",
+ "key_catalysts": ["H100 demand", "Blackwell launch"],
+ "risks": ["Supply constraints", "Competition"],
+ }
+ ],
+ "risk_factors": ["Fed rate hikes", "China tensions"],
+ }
+ import json
+ text = json.dumps(data)
+ result = extract_json(text)
+ assert result["timeframe"] == "1 month"
+ assert result["stocks_to_investigate"][0]["ticker"] == "NVDA"
+
+
+def test_deepseek_r1_realistic():
+ """Simulate a real DeepSeek R1 response with think block and JSON fence."""
+ text = (
+ "\n"
+ "Let me analyze the macro environment. The geopolitical scanner shows tension...\n"
+ "I need to identify the top 8-10 stocks.\n"
+ "\n"
+ "```json\n"
+ '{"timeframe": "1 month", "executive_summary": "Bullish macro backdrop", '
+ '"macro_context": {"economic_cycle": "expansion", "central_bank_stance": "neutral", "geopolitical_risks": []}, '
+ '"key_themes": [], "stocks_to_investigate": [{"ticker": "AAPL", "name": "Apple", "sector": "Technology", '
+ '"rationale": "Strong cash flows", "thesis_angle": "value", "conviction": "high", '
+ '"key_catalysts": ["Services growth"], "risks": ["China sales"]}], "risk_factors": []}\n'
+ "```"
+ )
+ result = extract_json(text)
+ assert result["timeframe"] == "1 month"
+ assert result["stocks_to_investigate"][0]["ticker"] == "AAPL"
+
+
+def test_preamble_and_postamble():
+ """JSON buried in prose before and after."""
+ text = 'Based on my analysis of the market data:\n\n{"result": 42}\n\nThis concludes my analysis.'
+ assert extract_json(text) == {"result": 42}
+
+
+# ─── Error cases ──────────────────────────────────────────────────────────────
+
+def test_empty_input():
+ with pytest.raises(ValueError, match="Empty input"):
+ extract_json("")
+
+
+def test_whitespace_only():
+ with pytest.raises(ValueError, match="Empty input"):
+ extract_json(" \n\t ")
+
+
+def test_malformed_json_no_fallback():
+ with pytest.raises(ValueError):
+ extract_json('{"key": value_without_quotes}')
+
+
+def test_no_json_at_all():
+ with pytest.raises(ValueError):
+ extract_json("Just some text with no JSON structure at all")
+
+
+def test_array_input_returns_list():
+ """extract_json succeeds on JSON arrays — json.loads parses them as lists.
+
+ The function's return-type annotation says dict, but the implementation does
+ not enforce this at runtime. A JSON array is valid JSON, so step 1
+ (direct json.loads) succeeds and returns a list. Callers that need a dict
+ must validate the returned type themselves.
+ """
+ result = extract_json('[1, 2, 3]')
+ assert result == [1, 2, 3]
diff --git a/tests/test_vendor_failfast.py b/tests/test_vendor_failfast.py
new file mode 100644
index 00000000..43a633c5
--- /dev/null
+++ b/tests/test_vendor_failfast.py
@@ -0,0 +1,200 @@
+"""Tests for fail-fast vendor routing (ADR 011).
+
+Methods NOT in FALLBACK_ALLOWED must fail immediately when the primary vendor
+fails, rather than silently falling back to a vendor with a different data contract.
+"""
+
+import pytest
+from unittest.mock import patch, MagicMock
+
+from tradingagents.dataflows.interface import route_to_vendor, FALLBACK_ALLOWED
+from tradingagents.dataflows.alpha_vantage_common import AlphaVantageError
+from tradingagents.dataflows.finnhub_common import FinnhubError
+from tradingagents.dataflows.config import get_config
+
+
+def _config_with_vendor(category: str, vendor: str):
+ """Return a patched config dict that sets a specific vendor for a category."""
+ original = get_config()
+ return {
+ **original,
+ "data_vendors": {**original.get("data_vendors", {}), category: vendor},
+ }
+
+
+class TestFailFastMethods:
+ """Methods NOT in FALLBACK_ALLOWED must not fall back to other vendors."""
+
+ def test_news_fails_fast_no_fallback(self):
+ """get_news configured for alpha_vantage should NOT fall back to yfinance."""
+ config = _config_with_vendor("news_data", "alpha_vantage")
+
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with patch(
+ "tradingagents.dataflows.alpha_vantage_common.requests.get",
+ side_effect=ConnectionError("AV down"),
+ ):
+ with pytest.raises(RuntimeError, match="All vendors failed for 'get_news'"):
+ route_to_vendor("get_news", "AAPL", "2024-01-01", "2024-01-05")
+
+ def test_indicators_fail_fast_no_fallback(self):
+ """get_indicators configured for alpha_vantage should NOT fall back to yfinance."""
+ from tradingagents.dataflows.interface import VENDOR_METHODS
+ config = _config_with_vendor("technical_indicators", "alpha_vantage")
+
+ original = VENDOR_METHODS["get_indicators"]["alpha_vantage"]
+ VENDOR_METHODS["get_indicators"]["alpha_vantage"] = MagicMock(
+ side_effect=AlphaVantageError("AV down")
+ )
+ try:
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with pytest.raises(RuntimeError, match="All vendors failed for 'get_indicators'"):
+ route_to_vendor("get_indicators", "AAPL", "SMA", "2024-01-01", 50)
+ finally:
+ VENDOR_METHODS["get_indicators"]["alpha_vantage"] = original
+
+ def test_fundamentals_fail_fast_no_fallback(self):
+ """get_fundamentals configured for alpha_vantage should NOT fall back to yfinance."""
+ config = _config_with_vendor("fundamental_data", "alpha_vantage")
+
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with patch(
+ "tradingagents.dataflows.alpha_vantage_common.requests.get",
+ side_effect=ConnectionError("AV down"),
+ ):
+ with pytest.raises(RuntimeError, match="All vendors failed for 'get_fundamentals'"):
+ route_to_vendor("get_fundamentals", "AAPL")
+
+ def test_insider_transactions_fail_fast_no_fallback(self):
+ """get_insider_transactions configured for finnhub should NOT fall back."""
+ from tradingagents.dataflows.interface import VENDOR_METHODS
+ config = _config_with_vendor("news_data", "finnhub")
+
+ original = VENDOR_METHODS["get_insider_transactions"]["finnhub"]
+ VENDOR_METHODS["get_insider_transactions"]["finnhub"] = MagicMock(
+ side_effect=FinnhubError("Finnhub down")
+ )
+ try:
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with pytest.raises(RuntimeError, match="All vendors failed for 'get_insider_transactions'"):
+ route_to_vendor("get_insider_transactions", "AAPL")
+ finally:
+ VENDOR_METHODS["get_insider_transactions"]["finnhub"] = original
+
+ def test_topic_news_fail_fast_no_fallback(self):
+ """get_topic_news should NOT fall back across vendors."""
+ from tradingagents.dataflows.interface import VENDOR_METHODS
+ config = _config_with_vendor("scanner_data", "finnhub")
+
+ original = VENDOR_METHODS["get_topic_news"]["finnhub"]
+ VENDOR_METHODS["get_topic_news"]["finnhub"] = MagicMock(
+ side_effect=FinnhubError("Finnhub down")
+ )
+ try:
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with pytest.raises(RuntimeError, match="All vendors failed for 'get_topic_news'"):
+ route_to_vendor("get_topic_news", "technology")
+ finally:
+ VENDOR_METHODS["get_topic_news"]["finnhub"] = original
+
+ def test_calendar_fail_fast_single_vendor(self):
+ """get_earnings_calendar (Finnhub-only) fails fast."""
+ from tradingagents.dataflows.interface import VENDOR_METHODS
+ config = _config_with_vendor("calendar_data", "finnhub")
+
+ original = VENDOR_METHODS["get_earnings_calendar"]["finnhub"]
+ VENDOR_METHODS["get_earnings_calendar"]["finnhub"] = MagicMock(
+ side_effect=FinnhubError("Finnhub down")
+ )
+ try:
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with pytest.raises(RuntimeError, match="All vendors failed for 'get_earnings_calendar'"):
+ route_to_vendor("get_earnings_calendar", "2024-01-01", "2024-01-05")
+ finally:
+ VENDOR_METHODS["get_earnings_calendar"]["finnhub"] = original
+
+
+class TestErrorChaining:
+ """Verify error messages and exception chaining."""
+
+ def test_error_chain_preserved(self):
+ """RuntimeError.__cause__ should be the original vendor exception."""
+ config = _config_with_vendor("news_data", "alpha_vantage")
+
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with patch(
+ "tradingagents.dataflows.alpha_vantage_common.requests.get",
+ side_effect=ConnectionError("network down"),
+ ):
+ with pytest.raises(RuntimeError) as exc_info:
+ route_to_vendor("get_news", "AAPL", "2024-01-01", "2024-01-05")
+
+ assert exc_info.value.__cause__ is not None
+ assert isinstance(exc_info.value.__cause__, ConnectionError)
+
+ def test_error_message_includes_method_and_vendors(self):
+ """Error message should include method name and vendors tried."""
+ config = _config_with_vendor("fundamental_data", "alpha_vantage")
+
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with patch(
+ "tradingagents.dataflows.alpha_vantage_common.requests.get",
+ side_effect=ConnectionError("down"),
+ ):
+ with pytest.raises(RuntimeError) as exc_info:
+ route_to_vendor("get_fundamentals", "AAPL")
+
+ msg = str(exc_info.value)
+ assert "get_fundamentals" in msg
+ assert "alpha_vantage" in msg
+
+ def test_auth_error_propagates(self):
+ """401/403 errors (wrapped as vendor errors) should not silently retry."""
+ config = _config_with_vendor("news_data", "alpha_vantage")
+
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with patch(
+ "tradingagents.dataflows.alpha_vantage_common.requests.get",
+ side_effect=AlphaVantageError("Invalid API key (401)"),
+ ):
+ with pytest.raises(RuntimeError, match="All vendors failed"):
+ route_to_vendor("get_news", "AAPL", "2024-01-01", "2024-01-05")
+
+
+class TestFallbackAllowedStillWorks:
+ """Methods IN FALLBACK_ALLOWED should still get cross-vendor fallback."""
+
+ def test_stock_data_falls_back(self):
+ """get_stock_data (in FALLBACK_ALLOWED) should fall back from AV to yfinance."""
+ import pandas as pd
+
+ config = _config_with_vendor("core_stock_apis", "alpha_vantage")
+ df = pd.DataFrame(
+ {"Open": [183.0], "High": [186.0], "Low": [182.5],
+ "Close": [185.0], "Volume": [45_000_000]},
+ index=pd.date_range("2024-01-04", periods=1, freq="B", tz="America/New_York"),
+ )
+ mock_ticker = MagicMock()
+ mock_ticker.history.return_value = df
+
+ with patch("tradingagents.dataflows.interface.get_config", return_value=config):
+ with patch(
+ "tradingagents.dataflows.alpha_vantage_common.requests.get",
+ side_effect=ConnectionError("AV down"),
+ ):
+ with patch("tradingagents.dataflows.y_finance.yf.Ticker", return_value=mock_ticker):
+ result = route_to_vendor("get_stock_data", "AAPL", "2024-01-04", "2024-01-05")
+
+ assert isinstance(result, str)
+ assert "AAPL" in result
+
+ def test_fallback_allowed_set_contents(self):
+ """Verify the FALLBACK_ALLOWED set contains exactly the expected methods."""
+ expected = {
+ "get_stock_data",
+ "get_market_indices",
+ "get_sector_performance",
+ "get_market_movers",
+ "get_industry_performance",
+ }
+ assert FALLBACK_ALLOWED == expected
diff --git a/tradingagents/agents/scanners/macro_synthesis.py b/tradingagents/agents/scanners/macro_synthesis.py
index 9876a927..c58ef561 100644
--- a/tradingagents/agents/scanners/macro_synthesis.py
+++ b/tradingagents/agents/scanners/macro_synthesis.py
@@ -1,3 +1,10 @@
+import json
+import logging
+
+from tradingagents.agents.utils.json_utils import extract_json
+
+logger = logging.getLogger(__name__)
+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
@@ -41,6 +48,8 @@ def create_macro_synthesis(llm):
'"thesis_angle": "...", "conviction": "high|medium|low", "key_catalysts": [...], "risks": [...] }],\n'
' "risk_factors": ["..."]\n'
"}"
+ "\n\nIMPORTANT: Output ONLY valid JSON. Start your response with '{' and end with '}'. "
+ "Do NOT use markdown code fences. Do NOT include any explanation or preamble before or after the JSON."
f"\n\n{all_reports_context}"
)
@@ -65,6 +74,17 @@ def create_macro_synthesis(llm):
report = result.content
+ # Sanitize LLM output: strip markdown fences / blocks before storing
+ try:
+ parsed = extract_json(report)
+ report = json.dumps(parsed)
+ except (ValueError, json.JSONDecodeError):
+ logger.warning(
+ "macro_synthesis: could not extract JSON from LLM output; "
+ "storing raw content (first 200 chars): %s",
+ report[:200],
+ )
+
return {
"messages": [result],
"macro_scan_summary": report,
diff --git a/tradingagents/agents/utils/json_utils.py b/tradingagents/agents/utils/json_utils.py
new file mode 100644
index 00000000..adc28fac
--- /dev/null
+++ b/tradingagents/agents/utils/json_utils.py
@@ -0,0 +1,69 @@
+"""Robust JSON extraction from LLM responses that may wrap JSON in markdown or prose."""
+
+from __future__ import annotations
+
+import json
+import re
+from typing import Any
+
+
+def extract_json(text: str) -> dict[str, Any]:
+ """Extract a JSON object from LLM output that may contain markdown fences,
+ preamble/postamble text, or blocks.
+
+ Strategy (in order):
+ 1. Try direct json.loads() — works if the LLM returned pure JSON
+ 2. Strip ... blocks (DeepSeek R1 reasoning)
+ 3. Extract from markdown code fences (```json ... ``` or ``` ... ```)
+ 4. Find the first '{' and last '}' and try to parse that substring
+ 5. Raise ValueError if nothing works
+
+ Args:
+ text: Raw LLM response string.
+
+ Returns:
+ Parsed JSON dict.
+
+ Raises:
+ ValueError: If no valid JSON object could be extracted.
+ """
+ if not text or not text.strip():
+ raise ValueError("Empty input — no JSON to extract")
+
+ # 1. Direct parse
+ try:
+ return json.loads(text)
+ except json.JSONDecodeError:
+ pass
+
+ # 2. Strip ... blocks (DeepSeek R1)
+ cleaned = re.sub(r".*?", "", text, flags=re.DOTALL).strip()
+
+ # Try again after stripping think blocks
+ try:
+ return json.loads(cleaned)
+ except json.JSONDecodeError:
+ pass
+
+ # 3. Extract from markdown code fences
+ fence_pattern = r"```(?:json)?\s*\n?(.*?)\n?\s*```"
+ fences = re.findall(fence_pattern, cleaned, re.DOTALL)
+ for block in fences:
+ try:
+ return json.loads(block.strip())
+ except json.JSONDecodeError:
+ continue
+
+ # 4. Find first '{' to last '}'
+ first_brace = cleaned.find("{")
+ last_brace = cleaned.rfind("}")
+ if first_brace != -1 and last_brace > first_brace:
+ try:
+ return json.loads(cleaned[first_brace : last_brace + 1])
+ except json.JSONDecodeError:
+ pass
+
+ raise ValueError(
+ f"Could not extract valid JSON from LLM response (length={len(text)}, "
+ f"preview={text[:200]!r})"
+ )
diff --git a/tradingagents/dataflows/interface.py b/tradingagents/dataflows/interface.py
index fd279430..4e0caec2 100644
--- a/tradingagents/dataflows/interface.py
+++ b/tradingagents/dataflows/interface.py
@@ -107,6 +107,16 @@ VENDOR_LIST = [
"finnhub",
]
+# Methods where cross-vendor fallback is safe (data contracts are fungible).
+# All other methods fail-fast on primary vendor failure — see ADR 011.
+FALLBACK_ALLOWED = {
+ "get_stock_data", # OHLCV is fungible across vendors
+ "get_market_indices", # SPY/DIA/QQQ quotes are fungible
+ "get_sector_performance", # ETF-based proxy, same approach
+ "get_market_movers", # Approximation acceptable for screening
+ "get_industry_performance", # ETF-based proxy
+}
+
# Mapping of methods to their vendor-specific implementations
VENDOR_METHODS = {
# core_stock_apis
@@ -206,7 +216,11 @@ def get_vendor(category: str, method: str = None) -> str:
return config.get("data_vendors", {}).get(category, "default")
def route_to_vendor(method: str, *args, **kwargs):
- """Route method calls to appropriate vendor implementation with fallback support."""
+ """Route method calls to appropriate vendor implementation with fallback support.
+
+ Only methods in FALLBACK_ALLOWED get cross-vendor fallback.
+ All others fail-fast on primary vendor failure (see ADR 011).
+ """
category = get_category_for_method(method)
vendor_config = get_vendor(category, method)
primary_vendors = [v.strip() for v in vendor_config.split(',')]
@@ -214,23 +228,32 @@ def route_to_vendor(method: str, *args, **kwargs):
if method not in VENDOR_METHODS:
raise ValueError(f"Method '{method}' not supported")
- # Build fallback chain: primary vendors first, then remaining available vendors
- all_available_vendors = list(VENDOR_METHODS[method].keys())
- fallback_vendors = primary_vendors.copy()
- for vendor in all_available_vendors:
- if vendor not in fallback_vendors:
- fallback_vendors.append(vendor)
+ if method in FALLBACK_ALLOWED:
+ # Build fallback chain: primary vendors first, then remaining available vendors
+ all_available_vendors = list(VENDOR_METHODS[method].keys())
+ vendors_to_try = primary_vendors.copy()
+ for vendor in all_available_vendors:
+ if vendor not in vendors_to_try:
+ vendors_to_try.append(vendor)
+ else:
+ # Fail-fast: only try configured primary vendor(s)
+ vendors_to_try = primary_vendors
- for vendor in fallback_vendors:
+ last_error = None
+ tried = []
+ for vendor in vendors_to_try:
if vendor not in VENDOR_METHODS[method]:
continue
+ tried.append(vendor)
vendor_impl = VENDOR_METHODS[method][vendor]
impl_func = vendor_impl[0] if isinstance(vendor_impl, list) else vendor_impl
try:
return impl_func(*args, **kwargs)
- except (AlphaVantageError, FinnhubError, ConnectionError, TimeoutError):
- continue # Any vendor error or connection/timeout triggers fallback to next vendor
+ except (AlphaVantageError, FinnhubError, ConnectionError, TimeoutError) as exc:
+ last_error = exc
+ continue
- raise RuntimeError(f"No available vendor for '{method}'")
\ No newline at end of file
+ error_msg = f"All vendors failed for '{method}' (tried: {', '.join(tried)})"
+ raise RuntimeError(error_msg) from last_error
\ No newline at end of file
diff --git a/tradingagents/pipeline/macro_bridge.py b/tradingagents/pipeline/macro_bridge.py
index 53d7a1fa..a7bf86a7 100644
--- a/tradingagents/pipeline/macro_bridge.py
+++ b/tradingagents/pipeline/macro_bridge.py
@@ -5,6 +5,7 @@ from __future__ import annotations
import asyncio
import json
import logging
+from tradingagents.agents.utils.json_utils import extract_json
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
@@ -80,8 +81,8 @@ def parse_macro_output(path: Path) -> tuple[MacroContext, list[StockCandidate]]:
Returns:
Tuple of (MacroContext, list of StockCandidate).
"""
- with path.open() as f:
- data = json.load(f)
+ raw_text = path.read_text()
+ data = extract_json(raw_text)
ctx_raw = data.get("macro_context", {})
macro_context = MacroContext(