fix(dashboard): address 4 critical issues found in pre-landing review

1. main.py: move API key validation before task state creation —
   prevents phantom "running" tasks when ANTHROPIC_API_KEY is missing
2. portfolio.py: make get_positions() async and fetch yfinance prices
   concurrently via run_in_executor — no longer blocks event loop
3. portfolio.py: add fcntl.LOCK_EX around all JSON read-modify-write
   operations on watchlist.json and positions.json — eliminates TOCTOU
   lost-write races under concurrent requests
4. main.py: use tempfile.mkstemp with mode 0o600 instead of world-
   readable /tmp/analysis_{task_id}.py — script content no longer
   exposed to other users on shared hosts

Also: remove unused UploadFile/File imports, undefined _save_to_cache
function, dead code in _delete_task_status, and unused
get_or_create_default_account helper.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
陈少杰 2026-04-07 17:27:49 +08:00
parent 17a4ed2513
commit d12c34c333
2 changed files with 179 additions and 114 deletions

View File

@ -2,6 +2,7 @@
Portfolio API 自选股持仓每日建议 Portfolio API 自选股持仓每日建议
""" """
import asyncio import asyncio
import fcntl
import json import json
import uuid import uuid
from datetime import datetime from datetime import datetime
@ -17,6 +18,8 @@ DATA_DIR.mkdir(parents=True, exist_ok=True)
WATCHLIST_FILE = DATA_DIR / "watchlist.json" WATCHLIST_FILE = DATA_DIR / "watchlist.json"
POSITIONS_FILE = DATA_DIR / "positions.json" POSITIONS_FILE = DATA_DIR / "positions.json"
RECOMMENDATIONS_DIR = DATA_DIR / "recommendations" RECOMMENDATIONS_DIR = DATA_DIR / "recommendations"
WATCHLIST_LOCK = DATA_DIR / "watchlist.lock"
POSITIONS_LOCK = DATA_DIR / "positions.lock"
# ============== Watchlist ============== # ============== Watchlist ==============
@ -25,36 +28,56 @@ def get_watchlist() -> list:
if not WATCHLIST_FILE.exists(): if not WATCHLIST_FILE.exists():
return [] return []
try: try:
return json.loads(WATCHLIST_FILE.read_text()).get("watchlist", []) with open(WATCHLIST_LOCK, "w") as lf:
fcntl.flock(lf.fileno(), fcntl.LOCK_SH)
try:
return json.loads(WATCHLIST_FILE.read_text()).get("watchlist", [])
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
except Exception: except Exception:
return [] return []
def _save_watchlist(watchlist: list): def _save_watchlist(watchlist: list):
WATCHLIST_FILE.write_text(json.dumps({"watchlist": watchlist}, ensure_ascii=False, indent=2)) with open(WATCHLIST_LOCK, "w") as lf:
fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
try:
WATCHLIST_FILE.write_text(json.dumps({"watchlist": watchlist}, ensure_ascii=False, indent=2))
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
def add_to_watchlist(ticker: str, name: str) -> dict: def add_to_watchlist(ticker: str, name: str) -> dict:
watchlist = get_watchlist() with open(WATCHLIST_LOCK, "w") as lf:
if any(s["ticker"] == ticker for s in watchlist): fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
raise ValueError(f"{ticker} 已在自选股中") try:
entry = { watchlist = json.loads(WATCHLIST_FILE.read_text()).get("watchlist", []) if WATCHLIST_FILE.exists() else []
"ticker": ticker, if any(s["ticker"] == ticker for s in watchlist):
"name": name, raise ValueError(f"{ticker} 已在自选股中")
"added_at": datetime.now().strftime("%Y-%m-%d"), entry = {
} "ticker": ticker,
watchlist.append(entry) "name": name,
_save_watchlist(watchlist) "added_at": datetime.now().strftime("%Y-%m-%d"),
return entry }
watchlist.append(entry)
WATCHLIST_FILE.write_text(json.dumps({"watchlist": watchlist}, ensure_ascii=False, indent=2))
return entry
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
def remove_from_watchlist(ticker: str) -> bool: def remove_from_watchlist(ticker: str) -> bool:
watchlist = get_watchlist() with open(WATCHLIST_LOCK, "w") as lf:
new_list = [s for s in watchlist if s["ticker"] != ticker] fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
if len(new_list) == len(watchlist): try:
return False watchlist = json.loads(WATCHLIST_FILE.read_text()).get("watchlist", []) if WATCHLIST_FILE.exists() else []
_save_watchlist(new_list) new_list = [s for s in watchlist if s["ticker"] != ticker]
return True if len(new_list) == len(watchlist):
return False
WATCHLIST_FILE.write_text(json.dumps({"watchlist": new_list}, ensure_ascii=False, indent=2))
return True
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
# ============== Accounts ============== # ============== Accounts ==============
@ -63,47 +86,71 @@ def get_accounts() -> dict:
if not POSITIONS_FILE.exists(): if not POSITIONS_FILE.exists():
return {"accounts": {}} return {"accounts": {}}
try: try:
return json.loads(POSITIONS_FILE.read_text()) with open(POSITIONS_LOCK, "w") as lf:
fcntl.flock(lf.fileno(), fcntl.LOCK_SH)
try:
return json.loads(POSITIONS_FILE.read_text())
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
except Exception: except Exception:
return {"accounts": {}} return {"accounts": {}}
def _save_accounts(data: dict): def _save_accounts(data: dict):
POSITIONS_FILE.write_text(json.dumps(data, ensure_ascii=False, indent=2)) with open(POSITIONS_LOCK, "w") as lf:
fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
try:
def get_or_create_default_account(accounts: dict) -> dict: POSITIONS_FILE.write_text(json.dumps(data, ensure_ascii=False, indent=2))
if "默认账户" not in accounts.get("accounts", {}): finally:
accounts["accounts"]["默认账户"] = {"positions": {}} fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
return accounts["accounts"]["默认账户"]
def create_account(account_name: str) -> dict: def create_account(account_name: str) -> dict:
accounts = get_accounts() with open(POSITIONS_LOCK, "w") as lf:
if account_name in accounts.get("accounts", {}): fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
raise ValueError(f"账户 {account_name} 已存在") try:
accounts["accounts"][account_name] = {"positions": {}} accounts = json.loads(POSITIONS_FILE.read_text()) if POSITIONS_FILE.exists() else {"accounts": {}}
_save_accounts(accounts) if account_name in accounts.get("accounts", {}):
return {"account_name": account_name} raise ValueError(f"账户 {account_name} 已存在")
accounts["accounts"][account_name] = {"positions": {}}
POSITIONS_FILE.write_text(json.dumps(accounts, ensure_ascii=False, indent=2))
return {"account_name": account_name}
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
def delete_account(account_name: str) -> bool: def delete_account(account_name: str) -> bool:
accounts = get_accounts() with open(POSITIONS_LOCK, "w") as lf:
if account_name not in accounts.get("accounts", {}): fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
return False try:
del accounts["accounts"][account_name] accounts = json.loads(POSITIONS_FILE.read_text()) if POSITIONS_FILE.exists() else {"accounts": {}}
_save_accounts(accounts) if account_name not in accounts.get("accounts", {}):
return True return False
del accounts["accounts"][account_name]
POSITIONS_FILE.write_text(json.dumps(accounts, ensure_ascii=False, indent=2))
return True
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
# ============== Positions ============== # ============== Positions =============
def get_positions(account: Optional[str] = None) -> list: def _fetch_price(ticker: str) -> float | None:
"""Fetch current price synchronously (called in thread executor)"""
try:
stock = yfinance.Ticker(ticker)
info = stock.info or {}
return info.get("currentPrice") or info.get("regularMarketPrice")
except Exception:
return None
async def get_positions(account: Optional[str] = None) -> list:
""" """
Returns positions with live price from yfinance and computed P&L. Returns positions with live price from yfinance and computed P&L.
Uses asyncio executor to avoid blocking the event loop on yfinance HTTP calls.
""" """
accounts = get_accounts() accounts = get_accounts()
result = []
if account: if account:
acc = accounts.get("accounts", {}).get(account) acc = accounts.get("accounts", {}).get(account)
@ -119,22 +166,22 @@ def get_positions(account: Optional[str] = None) -> list:
for _pos in _positions for _pos in _positions
] ]
for ticker, pos in positions: if not positions:
try: return []
stock = yfinance.Ticker(ticker)
info = stock.info or {}
current_price = info.get("currentPrice") or info.get("regularMarketPrice")
except Exception:
current_price = None
loop = asyncio.get_event_loop()
tickers = [t for t, _ in positions]
prices = await asyncio.gather(*[loop.run_in_executor(None, _fetch_price, t) for t in tickers])
result = []
for (ticker, pos), current_price in zip(positions, prices):
shares = pos.get("shares", 0) shares = pos.get("shares", 0)
cost_price = pos.get("cost_price", 0) cost_price = pos.get("cost_price", 0)
unrealized_pnl = None
unrealized_pnl_pct = None
if current_price is not None and cost_price: if current_price is not None and cost_price:
unrealized_pnl = (current_price - cost_price) * shares unrealized_pnl = (current_price - cost_price) * shares
unrealized_pnl_pct = (current_price / cost_price - 1) * 100 unrealized_pnl_pct = (current_price / cost_price - 1) * 100
else:
unrealized_pnl = None
unrealized_pnl_pct = None
result.append({ result.append({
"ticker": ticker, "ticker": ticker,
@ -154,56 +201,68 @@ def get_positions(account: Optional[str] = None) -> list:
def add_position(ticker: str, shares: float, cost_price: float, def add_position(ticker: str, shares: float, cost_price: float,
purchase_date: Optional[str], notes: str, account: str) -> dict: purchase_date: Optional[str], notes: str, account: str) -> dict:
accounts = get_accounts() with open(POSITIONS_LOCK, "w") as lf:
acc = accounts.get("accounts", {}).get(account) fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
if not acc: try:
acc = get_or_create_default_account(accounts) accounts = json.loads(POSITIONS_FILE.read_text()) if POSITIONS_FILE.exists() else {"accounts": {}}
acc = accounts.get("accounts", {}).get(account)
if not acc:
if "默认账户" not in accounts.get("accounts", {}):
accounts["accounts"]["默认账户"] = {"positions": {}}
acc = accounts["accounts"]["默认账户"]
position_id = f"pos_{uuid.uuid4().hex[:6]}" position_id = f"pos_{uuid.uuid4().hex[:6]}"
position = { position = {
"position_id": position_id, "position_id": position_id,
"shares": shares, "shares": shares,
"cost_price": cost_price, "cost_price": cost_price,
"purchase_date": purchase_date, "purchase_date": purchase_date,
"notes": notes, "notes": notes,
"account": account, "account": account,
"name": ticker, "name": ticker,
} }
if ticker not in acc["positions"]: if ticker not in acc["positions"]:
acc["positions"][ticker] = [] acc["positions"][ticker] = []
acc["positions"][ticker].append(position) acc["positions"][ticker].append(position)
_save_accounts(accounts) POSITIONS_FILE.write_text(json.dumps(accounts, ensure_ascii=False, indent=2))
return position return position
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
def remove_position(ticker: str, position_id: str, account: Optional[str]) -> bool: def remove_position(ticker: str, position_id: str, account: Optional[str]) -> bool:
accounts = get_accounts() with open(POSITIONS_LOCK, "w") as lf:
if account: fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
acc = accounts.get("accounts", {}).get(account) try:
if acc and ticker in acc.get("positions", {}): accounts = json.loads(POSITIONS_FILE.read_text()) if POSITIONS_FILE.exists() else {"accounts": {}}
acc["positions"][ticker] = [ if account:
p for p in acc["positions"][ticker] acc = accounts.get("accounts", {}).get(account)
if p.get("position_id") != position_id if acc and ticker in acc.get("positions", {}):
] acc["positions"][ticker] = [
if not acc["positions"][ticker]: p for p in acc["positions"][ticker]
del acc["positions"][ticker] if p.get("position_id") != position_id
_save_accounts(accounts) ]
return True if not acc["positions"][ticker]:
else: del acc["positions"][ticker]
for acc_data in accounts.get("accounts", {}).values(): POSITIONS_FILE.write_text(json.dumps(accounts, ensure_ascii=False, indent=2))
if ticker in acc_data.get("positions", {}):
original_len = len(acc_data["positions"][ticker])
acc_data["positions"][ticker] = [
p for p in acc_data["positions"][ticker]
if p.get("position_id") != position_id
]
if len(acc_data["positions"][ticker]) < original_len:
if not acc_data["positions"][ticker]:
del acc_data["positions"][ticker]
_save_accounts(accounts)
return True return True
return False else:
for acc_data in accounts.get("accounts", {}).values():
if ticker in acc_data.get("positions", {}):
original_len = len(acc_data["positions"][ticker])
acc_data["positions"][ticker] = [
p for p in acc_data["positions"][ticker]
if p.get("position_id") != position_id
]
if len(acc_data["positions"][ticker]) < original_len:
if not acc_data["positions"][ticker]:
del acc_data["positions"][ticker]
POSITIONS_FILE.write_text(json.dumps(accounts, ensure_ascii=False, indent=2))
return True
return False
finally:
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
# ============== Recommendations ============== # ============== Recommendations ==============

View File

@ -8,6 +8,7 @@ import json
import os import os
import subprocess import subprocess
import sys import sys
import tempfile
import time import time
import traceback import traceback
from datetime import datetime from datetime import datetime
@ -15,7 +16,7 @@ from pathlib import Path
from typing import Optional from typing import Optional
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Query, UploadFile, File from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Query
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel from pydantic import BaseModel
from fastapi.responses import Response from fastapi.responses import Response
@ -60,7 +61,6 @@ app = FastAPI(
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
allow_origins=["*"], allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],
) )
@ -99,6 +99,17 @@ def _load_from_cache(mode: str) -> Optional[dict]:
return None return None
def _save_to_cache(mode: str, data: dict):
"""Save screening result to cache"""
try:
CACHE_DIR.mkdir(parents=True, exist_ok=True)
cache_path = _get_cache_path(mode)
with open(cache_path, "w") as f:
json.dump(data, f)
except Exception:
pass
def _save_task_status(task_id: str, data: dict): def _save_task_status(task_id: str, data: dict):
"""Persist task state to disk""" """Persist task state to disk"""
try: try:
@ -114,13 +125,6 @@ def _delete_task_status(task_id: str):
(TASK_STATUS_DIR / f"{task_id}.json").unlink(missing_ok=True) (TASK_STATUS_DIR / f"{task_id}.json").unlink(missing_ok=True)
except Exception: except Exception:
pass pass
try:
CACHE_DIR.mkdir(parents=True, exist_ok=True)
cache_path = _get_cache_path(mode)
with open(cache_path, "w") as f:
json.dump(data, f)
except Exception:
pass
# ============== SEPA Screening ============== # ============== SEPA Screening ==============
@ -224,6 +228,11 @@ async def start_analysis(request: AnalysisRequest):
task_id = f"{request.ticker}_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:6]}" task_id = f"{request.ticker}_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:6]}"
date = request.date or datetime.now().strftime("%Y-%m-%d") date = request.date or datetime.now().strftime("%Y-%m-%d")
# Validate API key before storing any task state
api_key = os.environ.get("ANTHROPIC_API_KEY")
if not api_key:
raise HTTPException(status_code=500, detail="ANTHROPIC_API_KEY environment variable not set")
# Initialize task state # Initialize task state
app.state.task_results[task_id] = { app.state.task_results[task_id] = {
"task_id": task_id, "task_id": task_id,
@ -245,17 +254,14 @@ async def start_analysis(request: AnalysisRequest):
"decision": None, "decision": None,
"error": None, "error": None,
} }
# Get API key - fail fast before storing a running task
api_key = os.environ.get("ANTHROPIC_API_KEY")
if not api_key:
raise HTTPException(status_code=500, detail="ANTHROPIC_API_KEY environment variable not set")
await broadcast_progress(task_id, app.state.task_results[task_id]) await broadcast_progress(task_id, app.state.task_results[task_id])
# Write analysis script to temp file (avoids subprocess -c quoting issues) # Write analysis script to temp file with restrictive permissions (avoids subprocess -c quoting issues)
script_path = Path(f"/tmp/analysis_{task_id}.py") fd, script_path_str = tempfile.mkstemp(suffix=".py", prefix=f"analysis_{task_id}_")
script_content = ANALYSIS_SCRIPT_TEMPLATE script_path = Path(script_path_str)
script_path.write_text(script_content) os.chmod(script_path, 0o600)
with os.fdopen(fd, "w") as f:
f.write(ANALYSIS_SCRIPT_TEMPLATE)
# Store process reference for cancellation # Store process reference for cancellation
app.state.processes = getattr(app.state, 'processes', {}) app.state.processes = getattr(app.state, 'processes', {})