Merge optimization plans: A1 (Fast Mode), A2 (Parallel Analysts), C3 (Cloud Sync Retry)

This commit is contained in:
MarkLo127 2026-03-12 21:47:12 +08:00
commit dfa723a23a
11 changed files with 322 additions and 17 deletions

View File

@ -141,6 +141,7 @@ async def run_analysis(
alpha_vantage_api_key=request.alpha_vantage_api_key or "",
finmind_api_key=request.finmind_api_key or "",
language=request.language or "zh-TW", # Pass language for agent reports
analysis_mode=request.analysis_mode or "deep", # Pass analysis mode (fast or deep)
))
# Check for errors in result

View File

@ -67,6 +67,10 @@ class AnalysisRequest(BaseModel):
default="zh-TW",
description="Language for agent reports: 'en' for English, 'zh-TW' for Traditional Chinese"
)
analysis_mode: Optional[Literal["fast", "deep"]] = Field(
default="deep",
description="Analysis mode: 'fast' (no debates, ~15-25 min) or 'deep' (with debates, ~1 hour)"
)
class PriceData(BaseModel):
"""Stock price data model"""

View File

@ -28,14 +28,33 @@ class TradingService:
research_depth: int = 1,
deep_think_llm: str = "gpt-5-mini",
quick_think_llm: str = "gpt-5-mini",
analysis_mode: str = "deep",
) -> Dict[str, Any]:
"""Create configuration for TradingAgentsX"""
"""Create configuration for TradingAgentsX
Args:
research_depth: Research depth (1-5)
deep_think_llm: Deep thinking LLM model
quick_think_llm: Quick thinking LLM model
analysis_mode: "fast" (no debates) or "deep" (with debates)
"""
config = self.default_config.copy()
config["max_debate_rounds"] = research_depth
config["max_risk_discuss_rounds"] = research_depth
config["deep_think_llm"] = deep_think_llm
config["quick_think_llm"] = quick_think_llm
config["results_dir"] = settings.results_dir
# Handle analysis mode
if analysis_mode == "fast":
# Fast mode: disable debates entirely
config["max_debate_rounds"] = 0
config["max_risk_discuss_rounds"] = 0
logger.info("Analysis mode: FAST (debates disabled)")
else:
# Deep mode: use research_depth for debate rounds
config["max_debate_rounds"] = research_depth
config["max_risk_discuss_rounds"] = research_depth
logger.info(f"Analysis mode: DEEP (research_depth={research_depth})")
return config
async def run_analysis(
@ -59,6 +78,7 @@ class TradingService:
deep_think_llm: str = "gpt-5-mini",
quick_think_llm: str = "gpt-5-mini",
language: str = "zh-TW", # Language for agent reports: 'en' or 'zh-TW'
analysis_mode: str = "deep", # Analysis mode: 'fast' (no debates) or 'deep' (with debates)
) -> Dict[str, Any]:
"""
Run trading analysis for a given ticker and date with user-provided API keys
@ -106,8 +126,8 @@ class TradingService:
os.environ["OPENAI_API_KEY"] = openai_api_key
# Create configuration
logger.info(f"Initializing TradingAgentsX for {ticker} on {analysis_date}")
config = self.create_config(research_depth, deep_think_llm, quick_think_llm)
logger.info(f"Initializing TradingAgentsX for {ticker} on {analysis_date} (mode={analysis_mode})")
config = self.create_config(research_depth, deep_think_llm, quick_think_llm, analysis_mode)
# Normalize base URLs (ensure lowercase paths, common issue with custom endpoints)
def normalize_base_url(url: str) -> str:

View File

@ -63,6 +63,10 @@ export default function AnalysisPage() {
});
if (cloudId) {
console.log("☁️ Auto-saved report to cloud");
} else {
// Cloud sync failed - mark for retry but don't fail the auto-save
// The report is already safely stored in local IndexedDB
console.warn("⚠️ Cloud sync failed, but report saved locally. Will retry later.");
}
}
// Note: Redis cleanup is handled immediately when analysis completes

View File

@ -7,6 +7,7 @@ import { AnalysisProvider } from "@/context/AnalysisContext";
import { ThemeProvider } from "@/components/theme/ThemeProvider";
import { AuthProvider } from "@/contexts/auth-context";
import { LanguageProvider } from "@/contexts/LanguageContext";
import { SyncInitializer } from "@/components/providers/SyncInitializer";
const inter = Inter({ subsets: ["latin"] });
@ -118,6 +119,7 @@ export default function RootLayout({
<ThemeProvider>
<LanguageProvider>
<AuthProvider>
<SyncInitializer />
<AnalysisProvider>
<div className="flex flex-col min-h-screen gradient-page-bg">
<Header />

View File

@ -52,6 +52,7 @@ const formSchema = z.object({
.regex(/^\d{4}-\d{2}-\d{2}$/, "日期格式必須為 YYYY-MM-DD"),
analysts: z.array(z.string()).min(1, "請至少選擇一位分析師"),
research_depth: z.number().int().min(1).max(5),
analysis_mode: z.enum(["fast", "deep"]).default("deep"),
quick_think_llm: z.string().min(1, "請選擇快速思維模型"),
deep_think_llm: z.string().min(1, "請選擇深層思維模型"),
embedding_model: z.string().min(1, "請選擇嵌入式模型"),
@ -111,6 +112,7 @@ export function AnalysisForm({ onSubmit, loading = false }: AnalysisFormProps) {
analysis_date: format(new Date(), "yyyy-MM-dd"),
analysts: ["market", "social", "news", "fundamentals"], // 預設全選
research_depth: 3, // 預設中等層級
analysis_mode: "deep", // 預設深層分析
market_type: "us", // 預設美股
quick_think_llm: "gpt-5-mini",
deep_think_llm: "gpt-5-mini",
@ -473,6 +475,35 @@ export function AnalysisForm({ onSubmit, loading = false }: AnalysisFormProps) {
/>
</div>
{/* 分析模式行 */}
<div className="md:col-span-2 grid grid-cols-1 md:grid-cols-2 gap-6">
<FormField
control={form.control}
name="analysis_mode"
render={({ field }) => (
<FormItem>
<FormLabel></FormLabel>
<Select
onValueChange={field.onChange}
defaultValue={field.value}
>
<FormControl>
<SelectTrigger>
<SelectValue placeholder="選擇分析模式" />
</SelectTrigger>
</FormControl>
<SelectContent>
<SelectItem value="fast"> (~15-25 )</SelectItem>
<SelectItem value="deep"> (~1 )</SelectItem>
</SelectContent>
</Select>
<FormDescription></FormDescription>
<FormMessage />
</FormItem>
)}
/>
</div>
{/* 第二行研究深度、快速思維模型、深層思維模型、嵌入式模型4列 */}
<div className="md:col-span-2 grid grid-cols-1 md:grid-cols-4 gap-6">
<FormField

View File

@ -0,0 +1,27 @@
/**
* Sync Initializer - Starts cloud sync retry loop on app startup
* This client component ensures that failed cloud syncs are automatically retried
*/
"use client";
import { useEffect } from "react";
import { startRetryLoop, stopRetryLoop } from "@/lib/sync-retry";
import { isCloudSyncEnabled } from "@/lib/user-api";
export function SyncInitializer() {
useEffect(() => {
// Only start retry loop if user is authenticated
if (isCloudSyncEnabled()) {
console.log("🔄 Starting cloud sync retry service");
startRetryLoop();
// Cleanup on unmount
return () => {
console.log("⏹️ Stopping cloud sync retry service");
stopRetryLoop();
};
}
}, []);
return null; // This component doesn't render anything
}

220
frontend/lib/sync-retry.ts Normal file
View File

@ -0,0 +1,220 @@
/**
* Cloud sync retry service
* Handles retrying failed cloud syncs for reports stored in local IndexedDB
*/
import { getAllReports, saveReport } from "./reports-db";
import { saveCloudReport, isCloudSyncEnabled } from "./user-api";
import type { SavedReport } from "./reports-db";
// Retry configuration
const RETRY_INTERVAL = 30000; // 30 seconds between retry attempts
const MAX_RETRIES = 5; // Give up after 5 failed attempts
const RETRY_BACKOFF = 1.5; // Exponential backoff multiplier
interface RetryRecord {
ticker: string;
analysis_date: string;
market_type: "us" | "twse" | "tpex";
language?: "en" | "zh-TW";
attempts: number;
last_attempt: number;
}
// Track retry attempts in memory
const retryMap = new Map<string, RetryRecord>();
/**
* Generate a unique key for a report for retry tracking
*/
function getReportKey(
ticker: string,
analysis_date: string,
market_type: string,
language?: string
): string {
return `${ticker}|${analysis_date}|${market_type}|${language || "zh-TW"}`;
}
/**
* Retry a single report's cloud sync
*/
async function retrySingleReport(report: SavedReport): Promise<boolean> {
if (!isCloudSyncEnabled()) {
console.log("Cloud sync not enabled, skipping retry");
return false;
}
const key = getReportKey(
report.ticker,
report.analysis_date,
report.market_type,
report.language
);
// Check retry attempts
const retryRecord = retryMap.get(key);
if (retryRecord && retryRecord.attempts >= MAX_RETRIES) {
console.warn(
`⚠️ [${report.ticker}] Max retries exceeded, giving up on cloud sync`
);
retryMap.delete(key);
return false;
}
try {
console.log(
`🔄 [${report.ticker}] Retrying cloud sync (attempt ${(retryRecord?.attempts || 0) + 1}/${MAX_RETRIES})`
);
const cloudId = await saveCloudReport({
ticker: report.ticker,
market_type: report.market_type,
analysis_date: report.analysis_date,
result: report.result,
language: report.language,
});
if (cloudId) {
console.log(`✅ [${report.ticker}] Cloud sync successful, clearing retry record`);
retryMap.delete(key);
return true;
} else {
// Still failed, increment retry count
if (!retryRecord) {
retryMap.set(key, {
ticker: report.ticker,
analysis_date: report.analysis_date,
market_type: report.market_type,
language: report.language,
attempts: 1,
last_attempt: Date.now(),
});
} else {
retryRecord.attempts++;
retryRecord.last_attempt = Date.now();
}
return false;
}
} catch (error) {
console.error(`❌ [${report.ticker}] Cloud sync retry failed:`, error);
// Increment retry count
if (!retryRecord) {
retryMap.set(key, {
ticker: report.ticker,
analysis_date: report.analysis_date,
market_type: report.market_type,
language: report.language,
attempts: 1,
last_attempt: Date.now(),
});
} else {
retryRecord.attempts++;
retryRecord.last_attempt = Date.now();
}
return false;
}
}
/**
* Attempt to sync all reports with pending_sync flag
*/
export async function retryPendingSyncs(): Promise<{
successful: number;
failed: number;
}> {
if (!isCloudSyncEnabled()) {
console.log("Cloud sync not enabled, skipping retry");
return { successful: 0, failed: 0 };
}
try {
const allReports = await getAllReports();
let successful = 0;
let failed = 0;
// Try to sync reports (we'll assume any local report without a cloud_id needs syncing)
for (const report of allReports) {
if (!report.cloud_id && report.pending_sync) {
const synced = await retrySingleReport(report);
if (synced) {
successful++;
// Update the report to clear pending_sync flag
// Note: This would require an updateReport function in reports-db.ts
} else {
failed++;
}
}
}
if (successful > 0 || failed > 0) {
console.log(
`📊 Cloud sync retry summary: ${successful} successful, ${failed} failed`
);
}
return { successful, failed };
} catch (error) {
console.error("Error retrying pending syncs:", error);
return { successful: 0, failed: 0 };
}
}
/**
* Get the number of pending syncs
*/
export function getPendingSyncCount(): number {
return retryMap.size;
}
/**
* Mark a report as needing retry
*/
export function markForRetry(
ticker: string,
analysis_date: string,
market_type: "us" | "twse" | "tpex",
language?: "en" | "zh-TW"
): void {
const key = getReportKey(ticker, analysis_date, market_type, language);
if (!retryMap.has(key)) {
retryMap.set(key, {
ticker,
analysis_date,
market_type,
language,
attempts: 0,
last_attempt: 0,
});
console.log(`📌 [${ticker}] Marked for cloud sync retry`);
}
}
/**
* Start automatic retry loop (should be called once on app startup)
*/
let retryIntervalId: NodeJS.Timeout | null = null;
export function startRetryLoop(): void {
if (retryIntervalId) {
console.warn("Retry loop already started");
return;
}
retryIntervalId = setInterval(async () => {
if (isCloudSyncEnabled() && retryMap.size > 0) {
await retryPendingSyncs();
}
}, RETRY_INTERVAL);
console.log("🔄 Cloud sync retry loop started");
}
export function stopRetryLoop(): void {
if (retryIntervalId) {
clearInterval(retryIntervalId);
retryIntervalId = null;
console.log("⏹️ Cloud sync retry loop stopped");
}
}

View File

@ -7,6 +7,7 @@ export interface AnalysisRequest {
analysis_date: string;
analysts?: string[];
research_depth?: number;
analysis_mode?: "fast" | "deep"; // Analysis mode: 'fast' (no debates) or 'deep' (with debates)
market_type?: "us" | "twse" | "tpex"; // 市場類型:美股、上市、上櫃/興櫃
quick_think_llm?: string;
deep_think_llm?: string;

Binary file not shown.

View File

@ -151,12 +151,12 @@ class GraphSetup:
workflow.add_node("Risk Judge", risk_manager_node)
# 定義邊
# 從第一個分析師開始
first_analyst = selected_analysts[0]
workflow.add_edge(START, f"{first_analyst.capitalize()} Analyst")
# 平行啟動所有分析師(而不是順序連接)
for analyst_type in selected_analysts:
workflow.add_edge(START, f"{analyst_type.capitalize()} Analyst")
# 依次連接分析師
for i, analyst_type in enumerate(selected_analysts):
# 連接所有分析師到其工具和清除節點
for analyst_type in selected_analysts:
current_analyst = f"{analyst_type.capitalize()} Analyst"
current_tools = f"tools_{analyst_type}"
current_clear = f"Msg Clear {analyst_type.capitalize()}"
@ -168,13 +168,8 @@ class GraphSetup:
[current_tools, current_clear],
)
workflow.add_edge(current_tools, current_analyst)
# 連接到下一個分析師,如果是最後一個分析師,則連接到看漲研究員
if i < len(selected_analysts) - 1:
next_analyst = f"{selected_analysts[i+1].capitalize()} Analyst"
workflow.add_edge(current_clear, next_analyst)
else:
workflow.add_edge(current_clear, "Bull Researcher")
# 所有分析師完成後都直接連接到看漲研究員(平行匯聚)
workflow.add_edge(current_clear, "Bull Researcher")
# 新增剩餘的邊
workflow.add_conditional_edges(