Merge pull request #99 from aguzererler/feature/portfolio-resumability-and-cleanup
feat: Add portfolio resumability, extend report saving, and gitignore uv.lock
This commit is contained in:
commit
321cc80434
|
|
@ -78,8 +78,11 @@ FINNHUB_API_KEY=
|
|||
# PostgreSQL connection string for Supabase (required for portfolio commands)
|
||||
# SUPABASE_CONNECTION_STRING=postgresql://postgres.<project>:<password>@aws-1-<region>.pooler.supabase.com:6543/postgres
|
||||
|
||||
# Portfolio data directory (where JSON reports are stored)
|
||||
# TRADINGAGENTS_PORTFOLIO_DATA_DIR=reports
|
||||
# Root directory for all reports (scans, analysis, portfolio artifacts).
|
||||
# All output lands under {REPORTS_DIR}/daily/{date}/...
|
||||
# PORTFOLIO_DATA_DIR overrides this for portfolio-only reports if you need them split.
|
||||
# TRADINGAGENTS_REPORTS_DIR=/absolute/path/to/reports
|
||||
# PORTFOLIO_DATA_DIR=/absolute/path/to/reports
|
||||
|
||||
# Portfolio constraint overrides
|
||||
# TRADINGAGENTS_PM_MAX_POSITIONS=15 # maximum number of open positions
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ ipython_config.py
|
|||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# uv.lock
|
||||
uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
|
|
|
|||
|
|
@ -104,6 +104,27 @@ async def trigger_auto(
|
|||
background_tasks.add_task(_run_and_store, run_id, engine.run_auto(run_id, params or {}))
|
||||
return {"run_id": run_id, "status": "queued"}
|
||||
|
||||
@router.delete("/portfolio-stage")
|
||||
async def reset_portfolio_stage(
|
||||
params: Dict[str, Any],
|
||||
user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""Delete PM decision and execution result for a given date/portfolio_id.
|
||||
|
||||
After calling this, an auto run will re-run Phase 3 from scratch
|
||||
(Phases 1 & 2 are skipped if their cached results still exist).
|
||||
"""
|
||||
from tradingagents.portfolio.report_store import ReportStore
|
||||
date = params.get("date")
|
||||
portfolio_id = params.get("portfolio_id")
|
||||
if not date or not portfolio_id:
|
||||
raise HTTPException(status_code=422, detail="date and portfolio_id are required")
|
||||
store = ReportStore()
|
||||
deleted = store.clear_portfolio_stage(date, portfolio_id)
|
||||
logger.info("reset_portfolio_stage date=%s portfolio=%s deleted=%s user=%s", date, portfolio_id, deleted, user["user_id"])
|
||||
return {"deleted": deleted, "date": date, "portfolio_id": portfolio_id}
|
||||
|
||||
|
||||
@router.get("/")
|
||||
async def list_runs(user: dict = Depends(get_current_user)):
|
||||
# Filter by user in production
|
||||
|
|
|
|||
|
|
@ -19,6 +19,45 @@ logger = logging.getLogger("agent_os.engine")
|
|||
# Maximum characters of prompt/response content to include in the short message
|
||||
_MAX_CONTENT_LEN = 300
|
||||
|
||||
|
||||
def _fetch_prices(tickers: list[str]) -> dict[str, float]:
|
||||
"""Fetch the latest closing price for each ticker via yfinance.
|
||||
|
||||
Returns a dict of {ticker: price}. Tickers that fail are silently skipped.
|
||||
"""
|
||||
if not tickers:
|
||||
return {}
|
||||
try:
|
||||
import yfinance as yf
|
||||
data = yf.download(tickers, period="2d", auto_adjust=True, progress=False, threads=True)
|
||||
if data.empty:
|
||||
return {}
|
||||
close = data["Close"] if "Close" in data.columns else data
|
||||
# Take the last available row
|
||||
last_row = close.iloc[-1]
|
||||
return {
|
||||
t: float(last_row[t])
|
||||
for t in tickers
|
||||
if t in last_row.index and not __import__("math").isnan(last_row[t])
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.warning("_fetch_prices failed: %s", exc)
|
||||
return {}
|
||||
|
||||
|
||||
def _tickers_from_decision(decision: dict) -> list[str]:
|
||||
"""Extract all ticker symbols referenced in a PM decision dict."""
|
||||
tickers = set()
|
||||
for key in ("sells", "buys", "holds"):
|
||||
for item in decision.get(key) or []:
|
||||
if isinstance(item, dict):
|
||||
t = item.get("ticker") or item.get("symbol")
|
||||
else:
|
||||
t = str(item)
|
||||
if t:
|
||||
tickers.add(t.upper())
|
||||
return list(tickers)
|
||||
|
||||
# Maximum characters of prompt/response for the full fields (generous limit)
|
||||
_MAX_FULL_LEN = 50_000
|
||||
|
||||
|
|
@ -280,8 +319,21 @@ class LangGraphEngine:
|
|||
if ticker_analyses:
|
||||
scan_summary["ticker_analyses"] = ticker_analyses
|
||||
|
||||
# Fetch prices from scan_summary if available, else default to empty dict
|
||||
prices = scan_summary.get("prices") or {}
|
||||
# Collect tickers: current holdings + scan candidates, then fetch live prices
|
||||
holding_tickers: list[str] = []
|
||||
try:
|
||||
from tradingagents.portfolio.repository import PortfolioRepository
|
||||
_repo = PortfolioRepository()
|
||||
_, holdings = _repo.get_portfolio_with_holdings(portfolio_id)
|
||||
holding_tickers = [h.ticker for h in holdings]
|
||||
except Exception as exc:
|
||||
logger.warning("run_portfolio: could not load holdings for price fetch: %s", exc)
|
||||
candidate_tickers = [
|
||||
c if isinstance(c, str) else (c.get("ticker") or c.get("symbol") or "")
|
||||
for c in (scan_summary.get("stocks_to_investigate") or [])
|
||||
]
|
||||
all_tickers = list({t.upper() for t in holding_tickers + candidate_tickers if t})
|
||||
prices = _fetch_prices(all_tickers) if all_tickers else {}
|
||||
|
||||
initial_state = {
|
||||
"portfolio_id": portfolio_id,
|
||||
|
|
@ -326,44 +378,106 @@ class LangGraphEngine:
|
|||
except Exception as exc:
|
||||
logger.warning("PORTFOLIO fallback ainvoke failed run=%s: %s", run_id, exc)
|
||||
|
||||
# Save PM decision report
|
||||
# Save portfolio reports (Holding Reviews, Risk Metrics, PM Decision, Execution Result)
|
||||
if final_state:
|
||||
try:
|
||||
pm_decision_str = final_state.get("pm_decision", "")
|
||||
# 1. Holding Reviews — save the raw string via ReportStore
|
||||
holding_reviews_str = final_state.get("holding_reviews")
|
||||
if holding_reviews_str:
|
||||
try:
|
||||
reviews = json.loads(holding_reviews_str) if isinstance(holding_reviews_str, str) else holding_reviews_str
|
||||
store.save_holding_reviews(date, portfolio_id, reviews)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to save holding_reviews run=%s: %s", run_id, exc)
|
||||
|
||||
# 2. Risk Metrics
|
||||
risk_metrics_str = final_state.get("risk_metrics")
|
||||
if risk_metrics_str:
|
||||
try:
|
||||
metrics = json.loads(risk_metrics_str) if isinstance(risk_metrics_str, str) else risk_metrics_str
|
||||
store.save_risk_metrics(date, portfolio_id, metrics)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to save risk_metrics run=%s: %s", run_id, exc)
|
||||
|
||||
# 3. PM Decision
|
||||
pm_decision_str = final_state.get("pm_decision")
|
||||
if pm_decision_str:
|
||||
try:
|
||||
pm_decision_dict = (
|
||||
json.loads(pm_decision_str)
|
||||
if isinstance(pm_decision_str, str)
|
||||
else pm_decision_str
|
||||
)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pm_decision_dict = {"raw": pm_decision_str}
|
||||
ReportStore().save_pm_decision(date, portfolio_id, pm_decision_dict)
|
||||
yield self._system_log(
|
||||
f"Portfolio reports saved for {portfolio_id} on {date}"
|
||||
)
|
||||
decision = json.loads(pm_decision_str) if isinstance(pm_decision_str, str) else pm_decision_str
|
||||
store.save_pm_decision(date, portfolio_id, decision)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to save pm_decision run=%s: %s", run_id, exc)
|
||||
|
||||
# 4. Execution Result
|
||||
execution_result_str = final_state.get("execution_result")
|
||||
if execution_result_str:
|
||||
try:
|
||||
execution = json.loads(execution_result_str) if isinstance(execution_result_str, str) else execution_result_str
|
||||
store.save_execution_result(date, portfolio_id, execution)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to save execution_result run=%s: %s", run_id, exc)
|
||||
|
||||
yield self._system_log(f"Portfolio stage reports (decision & execution) saved for {portfolio_id} on {date}")
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to save portfolio reports run=%s", run_id)
|
||||
yield self._system_log(
|
||||
f"Warning: could not save portfolio reports: {exc}"
|
||||
)
|
||||
yield self._system_log(f"Warning: could not save portfolio reports: {exc}")
|
||||
|
||||
logger.info("Completed PORTFOLIO run=%s", run_id)
|
||||
|
||||
async def run_trade_execution(
|
||||
self, run_id: str, date: str, portfolio_id: str, decision: dict, prices: dict,
|
||||
store: ReportStore | None = None,
|
||||
) -> AsyncGenerator[Dict[str, Any], None]:
|
||||
"""Manually execute a pre-computed PM decision (for resumability)."""
|
||||
logger.info("Starting TRADE_EXECUTION run=%s portfolio=%s date=%s", run_id, portfolio_id, date)
|
||||
yield self._system_log(f"Resuming trade execution for {portfolio_id} using saved decision…")
|
||||
|
||||
from tradingagents.portfolio.trade_executor import TradeExecutor
|
||||
from tradingagents.portfolio.repository import PortfolioRepository
|
||||
|
||||
if not prices:
|
||||
tickers = _tickers_from_decision(decision)
|
||||
if tickers:
|
||||
yield self._system_log(f"Fetching live prices for {tickers} from yfinance…")
|
||||
prices = _fetch_prices(tickers)
|
||||
logger.info("TRADE_EXECUTION run=%s: fetched prices for %s", run_id, list(prices.keys()))
|
||||
if not prices:
|
||||
logger.warning("TRADE_EXECUTION run=%s: no prices available — execution may produce incomplete results", run_id)
|
||||
yield self._system_log(f"Warning: no prices found for {portfolio_id} on {date} — trade execution may be incomplete.")
|
||||
|
||||
_store = store or ReportStore()
|
||||
|
||||
try:
|
||||
repo = PortfolioRepository()
|
||||
executor = TradeExecutor(repo=repo, config=self.config)
|
||||
|
||||
# Execute decisions
|
||||
result = executor.execute_decisions(portfolio_id, decision, prices, date=date)
|
||||
|
||||
# Save results using the shared store instance
|
||||
_store.save_execution_result(date, portfolio_id, result)
|
||||
|
||||
yield self._system_log(f"Trade execution completed for {portfolio_id}. {result.get('summary', {})}")
|
||||
logger.info("Completed TRADE_EXECUTION run=%s", run_id)
|
||||
except Exception as exc:
|
||||
logger.exception("Trade execution failed run=%s", run_id)
|
||||
yield self._system_log(f"Error during trade execution: {exc}")
|
||||
raise
|
||||
|
||||
async def run_auto(
|
||||
self, run_id: str, params: Dict[str, Any]
|
||||
) -> AsyncGenerator[Dict[str, Any], None]:
|
||||
"""Run the full auto pipeline: scan → pipeline → portfolio."""
|
||||
date = params.get("date", time.strftime("%Y-%m-%d"))
|
||||
force = params.get("force", False)
|
||||
|
||||
logger.info("Starting AUTO run=%s date=%s", run_id, date)
|
||||
yield self._system_log(f"Starting full auto workflow for {date}")
|
||||
logger.info("Starting AUTO run=%s date=%s force=%s", run_id, date, force)
|
||||
yield self._system_log(f"Starting full auto workflow for {date} (force={force})")
|
||||
|
||||
# Phase 1: Market scan
|
||||
yield self._system_log("Phase 1/3: Running market scan…")
|
||||
store = ReportStore()
|
||||
if store.load_scan(date):
|
||||
if not force and store.load_scan(date):
|
||||
yield self._system_log(f"Phase 1: Macro scan for {date} already exists, skipping.")
|
||||
else:
|
||||
async for evt in self.run_scan(f"{run_id}_scan", {"date": date}):
|
||||
|
|
@ -382,7 +496,7 @@ class LangGraphEngine:
|
|||
)
|
||||
else:
|
||||
for ticker in tickers:
|
||||
if store.load_analysis(date, ticker):
|
||||
if not force and store.load_analysis(date, ticker):
|
||||
yield self._system_log(f"Phase 2: Analysis for {ticker} on {date} already exists, skipping.")
|
||||
continue
|
||||
|
||||
|
|
@ -395,14 +509,29 @@ class LangGraphEngine:
|
|||
# Phase 3: Portfolio management
|
||||
yield self._system_log("Phase 3/3: Running portfolio manager…")
|
||||
portfolio_params = {k: v for k, v in params.items() if k != "ticker"}
|
||||
# Check if portfolio decision already exists
|
||||
if store.load_pm_decision(date, portfolio_id):
|
||||
yield self._system_log(f"Phase 3: Portfolio decision for {portfolio_id} on {date} already exists, skipping.")
|
||||
portfolio_id = params.get("portfolio_id", "main_portfolio")
|
||||
|
||||
# Check if portfolio stage is fully complete (execution result exists)
|
||||
if not force and store.load_execution_result(date, portfolio_id):
|
||||
yield self._system_log(f"Phase 3: Portfolio execution for {portfolio_id} on {date} already exists, skipping.")
|
||||
else:
|
||||
async for evt in self.run_portfolio(
|
||||
f"{run_id}_portfolio", {"date": date, **portfolio_params}
|
||||
):
|
||||
yield evt
|
||||
# Check if we can resume from a saved decision
|
||||
saved_decision = store.load_pm_decision(date, portfolio_id)
|
||||
if not force and saved_decision:
|
||||
yield self._system_log(f"Phase 3: Found saved PM decision for {portfolio_id}, resuming trade execution…")
|
||||
# Fetch live prices for all tickers referenced in the decision
|
||||
prices = _fetch_prices(_tickers_from_decision(saved_decision))
|
||||
async for evt in self.run_trade_execution(
|
||||
f"{run_id}_resume_trades", date, portfolio_id, saved_decision, prices,
|
||||
store=store,
|
||||
):
|
||||
yield evt
|
||||
else:
|
||||
# Run full portfolio graph (Decision + Execution)
|
||||
async for evt in self.run_portfolio(
|
||||
f"{run_id}_portfolio", {"date": date, **portfolio_params}
|
||||
):
|
||||
yield evt
|
||||
|
||||
logger.info("Completed AUTO run=%s", run_id)
|
||||
|
||||
|
|
@ -475,8 +604,8 @@ class LangGraphEngine:
|
|||
"""Extract ticker symbols from a ReportStore scan summary dict.
|
||||
|
||||
Handles two shapes from the macro synthesis LLM output:
|
||||
* List of dicts: ``[{"ticker": "AAPL", ...}, ...]``
|
||||
* List of strings: ``["AAPL", "TSLA", ...]``
|
||||
* List of dicts: ``[{'ticker': 'AAPL', ...}, ...]``
|
||||
* List of strings: ``['AAPL', 'TSLA', ...]``
|
||||
|
||||
Also checks both ``stocks_to_investigate`` and ``watchlist`` keys.
|
||||
Returns an uppercase, deduplicated list in original order.
|
||||
|
|
@ -599,7 +728,7 @@ class LangGraphEngine:
|
|||
Handles several structures observed across LangChain / LangGraph versions:
|
||||
- flat list of message objects ``[SystemMessage, HumanMessage, ...]``
|
||||
- list-of-lists (batched) ``[[SystemMessage, HumanMessage, ...]]``
|
||||
- list of plain dicts ``[{"role": "system", "content": "..."}]``
|
||||
- list of plain dicts ``[{'role': 'system', 'content': '...'}]``
|
||||
- tuple wrapper ``([SystemMessage, ...],)``
|
||||
"""
|
||||
if not messages:
|
||||
|
|
@ -628,7 +757,7 @@ class LangGraphEngine:
|
|||
|
||||
def _extract_model(self, event: Dict[str, Any]) -> str:
|
||||
"""Best-effort extraction of the model name from a LangGraph event."""
|
||||
data = event.get("data") or {}
|
||||
data = event.get("data") or {};
|
||||
|
||||
# 1. invocation_params (standard LangChain)
|
||||
inv = data.get("invocation_params") or {}
|
||||
|
|
@ -722,7 +851,7 @@ class LangGraphEngine:
|
|||
)
|
||||
|
||||
return {
|
||||
"id": event.get("run_id", f"thought_{time.time_ns()}"),
|
||||
"id": event.get("run_id", f"thought_{time.time_ns()}").strip(),
|
||||
"node_id": node_name,
|
||||
"parent_node_id": "start",
|
||||
"type": "thought",
|
||||
|
|
@ -757,7 +886,7 @@ class LangGraphEngine:
|
|||
logger.info("Tool start tool=%s node=%s run=%s", name, node_name, run_id)
|
||||
|
||||
return {
|
||||
"id": event.get("run_id", f"tool_{time.time_ns()}"),
|
||||
"id": event.get("run_id", f"tool_{time.time_ns()}").strip(),
|
||||
"node_id": f"tool_{name}",
|
||||
"parent_node_id": node_name,
|
||||
"type": "tool",
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import {
|
|||
IconButton,
|
||||
Button,
|
||||
Input,
|
||||
Checkbox,
|
||||
useDisclosure,
|
||||
Drawer,
|
||||
DrawerOverlay,
|
||||
|
|
@ -34,7 +35,7 @@ import {
|
|||
Collapse,
|
||||
useToast,
|
||||
} from '@chakra-ui/react';
|
||||
import { LayoutDashboard, Wallet, Settings, Terminal as TerminalIcon, ChevronRight, Eye, Search, BarChart3, Bot, ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { LayoutDashboard, Wallet, Settings, Terminal as TerminalIcon, ChevronRight, Eye, Search, BarChart3, Bot, ChevronDown, ChevronUp, Trash2 } from 'lucide-react';
|
||||
import { MetricHeader } from './components/MetricHeader';
|
||||
import { AgentGraph } from './components/AgentGraph';
|
||||
import { PortfolioViewer } from './components/PortfolioViewer';
|
||||
|
|
@ -50,6 +51,7 @@ interface RunParams {
|
|||
date: string;
|
||||
ticker: string;
|
||||
portfolio_id: string;
|
||||
force: boolean;
|
||||
}
|
||||
|
||||
const RUN_TYPE_LABELS: Record<RunType, string> = {
|
||||
|
|
@ -64,7 +66,7 @@ const REQUIRED_PARAMS: Record<RunType, (keyof RunParams)[]> = {
|
|||
scan: ['date'],
|
||||
pipeline: ['ticker', 'date'],
|
||||
portfolio: ['date', 'portfolio_id'],
|
||||
auto: ['date', 'ticker'],
|
||||
auto: ['date', 'portfolio_id'],
|
||||
};
|
||||
|
||||
/** Return the colour token for a given event type. */
|
||||
|
|
@ -312,6 +314,7 @@ export const Dashboard: React.FC = () => {
|
|||
date: new Date().toISOString().split('T')[0],
|
||||
ticker: 'AAPL',
|
||||
portfolio_id: 'main_portfolio',
|
||||
force: false,
|
||||
});
|
||||
|
||||
// Auto-scroll the terminal to the bottom as new events arrive
|
||||
|
|
@ -335,7 +338,7 @@ export const Dashboard: React.FC = () => {
|
|||
|
||||
// Validate required params
|
||||
const required = REQUIRED_PARAMS[type];
|
||||
const missing = required.filter((k) => !params[k]?.trim());
|
||||
const missing = required.filter((k) => { const v = params[k]; return typeof v === 'string' ? !v.trim() : !v; });
|
||||
if (missing.length > 0) {
|
||||
toast({
|
||||
title: `Missing required fields for ${RUN_TYPE_LABELS[type]}`,
|
||||
|
|
@ -357,6 +360,7 @@ export const Dashboard: React.FC = () => {
|
|||
portfolio_id: params.portfolio_id,
|
||||
date: params.date,
|
||||
ticker: params.ticker,
|
||||
force: params.force,
|
||||
});
|
||||
setActiveRunId(res.data.run_id);
|
||||
} catch (err) {
|
||||
|
|
@ -367,6 +371,27 @@ export const Dashboard: React.FC = () => {
|
|||
}
|
||||
};
|
||||
|
||||
const resetPortfolioStage = async () => {
|
||||
if (!params.date || !params.portfolio_id) {
|
||||
toast({ title: 'Date and Portfolio ID are required', status: 'warning', duration: 3000, isClosable: true, position: 'top' });
|
||||
setShowParams(true);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const res = await axios.delete(`${API_BASE}/run/portfolio-stage`, { data: { date: params.date, portfolio_id: params.portfolio_id } });
|
||||
const deleted: string[] = res.data.deleted;
|
||||
toast({
|
||||
title: deleted.length ? `Cleared: ${deleted.join(', ')}` : 'Nothing to clear — no decision files found',
|
||||
status: deleted.length ? 'success' : 'info',
|
||||
duration: 4000,
|
||||
isClosable: true,
|
||||
position: 'top',
|
||||
});
|
||||
} catch (err) {
|
||||
toast({ title: 'Failed to reset portfolio stage', status: 'error', duration: 3000, isClosable: true, position: 'top' });
|
||||
}
|
||||
};
|
||||
|
||||
/** Open the full-screen event detail modal */
|
||||
const openModal = useCallback((evt: AgentEvent) => {
|
||||
setModalEvent(evt);
|
||||
|
|
@ -479,6 +504,19 @@ export const Dashboard: React.FC = () => {
|
|||
);
|
||||
})}
|
||||
<Divider orientation="vertical" h="20px" />
|
||||
<Tooltip label="Clear PM decision & execution result for this date/portfolio, then re-run Auto to start Phase 3 fresh">
|
||||
<Button
|
||||
size="sm"
|
||||
leftIcon={<Trash2 size={14} />}
|
||||
colorScheme="red"
|
||||
variant="outline"
|
||||
onClick={resetPortfolioStage}
|
||||
isDisabled={isRunning}
|
||||
>
|
||||
Reset Decision
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<Divider orientation="vertical" h="20px" />
|
||||
<Tag size="sm" colorScheme={status === 'streaming' ? 'green' : status === 'completed' ? 'blue' : status === 'error' ? 'red' : 'gray'}>
|
||||
{status.toUpperCase()}
|
||||
</Tag>
|
||||
|
|
@ -529,8 +567,18 @@ export const Dashboard: React.FC = () => {
|
|||
onChange={(e) => setParams((p) => ({ ...p, portfolio_id: e.target.value }))}
|
||||
/>
|
||||
</HStack>
|
||||
<HStack>
|
||||
<Checkbox
|
||||
size="sm"
|
||||
colorScheme="orange"
|
||||
isChecked={params.force}
|
||||
onChange={(e) => setParams((p) => ({ ...p, force: e.target.checked }))}
|
||||
>
|
||||
<Text fontSize="xs" color="orange.300">Force re-run (ignore cached results)</Text>
|
||||
</Checkbox>
|
||||
</HStack>
|
||||
<Text fontSize="2xs" color="whiteAlpha.400">
|
||||
Required: Scan → date · Pipeline → ticker, date · Portfolio → date, portfolio · Auto → date, ticker
|
||||
Required: Scan → date · Pipeline → ticker, date · Portfolio → date, portfolio · Auto → date, portfolio
|
||||
</Text>
|
||||
</VStack>
|
||||
</Box>
|
||||
|
|
|
|||
|
|
@ -29,7 +29,9 @@ from tradingagents.default_config import _env, _env_float, _env_int
|
|||
|
||||
PORTFOLIO_CONFIG: dict = {
|
||||
"supabase_connection_string": os.getenv("SUPABASE_CONNECTION_STRING", ""),
|
||||
"data_dir": _env("PORTFOLIO_DATA_DIR", "reports"),
|
||||
# PORTFOLIO_DATA_DIR takes precedence; falls back to TRADINGAGENTS_REPORTS_DIR,
|
||||
# then to "reports" (relative to CWD) — same default as report_paths.REPORTS_ROOT.
|
||||
"data_dir": os.getenv("PORTFOLIO_DATA_DIR") or _env("REPORTS_DIR", "reports"),
|
||||
"max_positions": 15,
|
||||
"max_position_pct": 0.15,
|
||||
"max_sector_pct": 0.35,
|
||||
|
|
@ -46,7 +48,7 @@ def get_portfolio_config() -> dict:
|
|||
"""
|
||||
cfg = dict(PORTFOLIO_CONFIG)
|
||||
cfg["supabase_connection_string"] = os.getenv("SUPABASE_CONNECTION_STRING", cfg["supabase_connection_string"])
|
||||
cfg["data_dir"] = _env("PORTFOLIO_DATA_DIR", cfg["data_dir"])
|
||||
cfg["data_dir"] = os.getenv("PORTFOLIO_DATA_DIR") or _env("REPORTS_DIR", cfg["data_dir"])
|
||||
cfg["max_positions"] = _env_int("PM_MAX_POSITIONS", cfg["max_positions"])
|
||||
cfg["max_position_pct"] = _env_float("PM_MAX_POSITION_PCT", cfg["max_position_pct"])
|
||||
cfg["max_sector_pct"] = _env_float("PM_MAX_SECTOR_PCT", cfg["max_sector_pct"])
|
||||
|
|
|
|||
|
|
@ -276,6 +276,50 @@ class ReportStore:
|
|||
path = self._portfolio_dir(date) / f"{portfolio_id}_pm_decision.json"
|
||||
return self._read_json(path)
|
||||
|
||||
def save_execution_result(
|
||||
self,
|
||||
date: str,
|
||||
portfolio_id: str,
|
||||
data: dict[str, Any],
|
||||
) -> Path:
|
||||
"""Save trade execution results.
|
||||
|
||||
Path: ``{base_dir}/daily/{date}/portfolio/{portfolio_id}_execution_result.json``
|
||||
|
||||
Args:
|
||||
date: ISO date string.
|
||||
portfolio_id: UUID of the target portfolio.
|
||||
data: TradeExecutor output dict.
|
||||
"""
|
||||
path = self._portfolio_dir(date) / f"{portfolio_id}_execution_result.json"
|
||||
return self._write_json(path, data)
|
||||
|
||||
def load_execution_result(
|
||||
self,
|
||||
date: str,
|
||||
portfolio_id: str,
|
||||
) -> dict[str, Any] | None:
|
||||
"""Load execution result. Returns None if the file does not exist."""
|
||||
path = self._portfolio_dir(date) / f"{portfolio_id}_execution_result.json"
|
||||
return self._read_json(path)
|
||||
|
||||
def clear_portfolio_stage(self, date: str, portfolio_id: str) -> list[str]:
|
||||
"""Delete PM decision and execution result files for a given date/portfolio.
|
||||
|
||||
Returns a list of deleted file names so the caller can log what was removed.
|
||||
"""
|
||||
targets = [
|
||||
self._portfolio_dir(date) / f"{portfolio_id}_pm_decision.json",
|
||||
self._portfolio_dir(date) / f"{portfolio_id}_pm_decision.md",
|
||||
self._portfolio_dir(date) / f"{portfolio_id}_execution_result.json",
|
||||
]
|
||||
deleted = []
|
||||
for path in targets:
|
||||
if path.exists():
|
||||
path.unlink()
|
||||
deleted.append(path.name)
|
||||
return deleted
|
||||
|
||||
def list_pm_decisions(self, portfolio_id: str) -> list[Path]:
|
||||
"""Return all saved PM decision JSON paths for portfolio_id, newest first.
|
||||
|
||||
|
|
|
|||
|
|
@ -415,3 +415,20 @@ class PortfolioRepository:
|
|||
) -> dict[str, Any] | None:
|
||||
"""Load risk metrics. Returns None if not found."""
|
||||
return self._store.load_risk_metrics(date, portfolio_id)
|
||||
|
||||
def save_execution_result(
|
||||
self,
|
||||
portfolio_id: str,
|
||||
date: str,
|
||||
result: dict[str, Any],
|
||||
) -> Path:
|
||||
"""Save trade execution results."""
|
||||
return self._store.save_execution_result(date, portfolio_id, result)
|
||||
|
||||
def load_execution_result(
|
||||
self,
|
||||
portfolio_id: str,
|
||||
date: str,
|
||||
) -> dict[str, Any] | None:
|
||||
"""Load trade execution results. Returns None if not found."""
|
||||
return self._store.load_execution_result(date, portfolio_id)
|
||||
|
|
|
|||
|
|
@ -19,9 +19,12 @@ all generated artifacts land under a single ``reports/`` tree::
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
REPORTS_ROOT = Path("reports")
|
||||
# Configurable via TRADINGAGENTS_REPORTS_DIR env var.
|
||||
# Falls back to "reports" (relative to CWD) when unset.
|
||||
REPORTS_ROOT = Path(os.getenv("TRADINGAGENTS_REPORTS_DIR") or "reports")
|
||||
|
||||
|
||||
def get_daily_dir(date: str) -> Path:
|
||||
|
|
|
|||
Loading…
Reference in New Issue