This commit is contained in:
MarkLo127 2026-03-11 19:15:24 +08:00
parent ea4a267bcf
commit 6802731507
3 changed files with 138 additions and 24 deletions

View File

@ -7,7 +7,7 @@ from uuid import UUID
from fastapi import APIRouter, HTTPException, Depends, Header
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete
from sqlalchemy import select, delete, func
from datetime import datetime
from backend.app.db import get_db, User, UserSettings, Report
@ -203,19 +203,45 @@ async def create_report(
user: User = Depends(get_current_user_required),
db: AsyncSession = Depends(get_db)
):
"""Save a new report"""
"""Save a report (upsert: update if same ticker/date/market_type/language exists)"""
# Normalize language: treat None as "zh-TW" to avoid NULL matching issues
language = report_data.language or "zh-TW"
# Check for existing report with same key to prevent duplicates
existing_result = await db.execute(
select(Report)
.where(Report.user_id == user.id)
.where(Report.ticker == report_data.ticker)
.where(Report.analysis_date == report_data.analysis_date)
.where(Report.market_type == report_data.market_type)
.where(func.coalesce(Report.language, "zh-TW") == language)
)
existing = existing_result.scalar_one_or_none()
if existing:
# Update existing report instead of creating a duplicate
existing.result = report_data.result
existing.language = language
await db.commit()
return {
"success": True,
"report_id": str(existing.id),
"message": "Report updated successfully"
}
# No existing report found — create new
report = Report(
user_id=user.id,
ticker=report_data.ticker,
market_type=report_data.market_type,
analysis_date=report_data.analysis_date,
result=report_data.result,
language=report_data.language
language=language,
)
db.add(report)
await db.commit()
await db.refresh(report)
return {
"success": True,
"report_id": str(report.id),
@ -254,6 +280,47 @@ async def get_report(
)
@router.delete("/reports/cleanup-duplicates")
async def cleanup_duplicate_reports(
user: User = Depends(get_current_user_required),
db: AsyncSession = Depends(get_db)
):
"""Remove duplicate reports, keeping only the most recent one per (ticker, analysis_date, market_type, language)"""
# Fetch all user reports ordered newest first
result = await db.execute(
select(Report)
.where(Report.user_id == user.id)
.order_by(Report.created_at.desc())
)
all_reports = result.scalars().all()
seen: set = set()
ids_to_delete: list = []
for report in all_reports:
# Normalize language
lang = report.language or "zh-TW"
key = (report.ticker, report.analysis_date, report.market_type, lang)
if key in seen:
ids_to_delete.append(report.id)
else:
seen.add(key)
if ids_to_delete:
await db.execute(
delete(Report)
.where(Report.user_id == user.id)
.where(Report.id.in_(ids_to_delete))
)
await db.commit()
return {
"success": True,
"deleted": len(ids_to_delete),
"message": f"Cleaned up {len(ids_to_delete)} duplicate reports"
}
@router.delete("/reports/{report_id}")
async def delete_report(
report_id: str,

View File

@ -50,6 +50,7 @@ import {
deleteCloudReport,
saveCloudReport,
isCloudSyncEnabled,
cleanupDuplicateCloudReports,
} from "@/lib/user-api";
// import { LoginPrompt } from "@/components/auth/login-button";
import { PendingTaskRecovery } from "@/components/PendingTaskRecovery";
@ -365,23 +366,12 @@ const parseUTCDate = (dateStr: string): Date => {
/**
* Helper to generate a unique signature for deduplication.
* This ensures reports for the same ticker on the same day are not squashed together.
* Uses only stable key fields: ticker + date + market_type + language.
* Language is normalized to "zh-TW" when null/undefined to match backend behavior.
*/
const getReportSignature = (report: any): string => {
const baseKey = `${report.ticker}_${report.analysis_date}_${report.market_type || 'us'}`;
let contentHash = "";
if (report.result) {
if (report.result.reports?.trader_investment_plan) {
const plan = report.result.reports.trader_investment_plan;
contentHash = `${plan.length}_${plan.slice(-30).replace(/[\s\n\r]+/g, '')}`;
} else {
contentHash = JSON.stringify(report.result).length.toString();
}
}
const langKey = report.language || "unknown_lang";
return `${baseKey}_${langKey}_${contentHash}`;
const lang = report.language || "zh-TW";
return `${report.ticker}_${report.analysis_date}_${report.market_type || 'us'}_${lang}`;
};
export default function HistoryPage() {
@ -449,7 +439,18 @@ export default function HistoryPage() {
}
try {
// First auto-clean local duplicates that might exist from older flawed versions
// First cleanup backend duplicates (existing duplicates from old versions)
try {
const cleanupResult = await cleanupDuplicateCloudReports();
if (cleanupResult && cleanupResult.deleted > 0) {
console.log(`☁️ Cleaned up ${cleanupResult.deleted} duplicate reports from cloud DB`);
cloudReportsPromiseRef.current = null; // Force refresh after cleanup
}
} catch (err) {
console.warn("Cloud duplicate cleanup failed:", err);
}
// Then auto-clean local duplicates that might exist from older flawed versions
try {
const allLocal = await getAllReports();
const seenSignatures = new Set<string>();
@ -787,12 +788,38 @@ export default function HistoryPage() {
setDeleting(true);
try {
const cloudId = (reportToDelete as any).cloudId;
const targetLang = reportToDelete.language || detectReportLanguage(reportToDelete.result?.reports);
// IMPORTANT: Delete from BOTH cloud AND local to prevent re-sync issues
// 1. If cloud ID exists, delete from cloud
if (cloudId) {
console.log("🗑️ Deleting from cloud:", cloudId);
await deleteCloudReport(cloudId);
// 1. Delete from cloud: delete the specific report AND any other duplicates with the same key
try {
const allCloudReports = await fetchCloudReportsCached(true);
const matchingCloudIds = allCloudReports
.filter((r) => {
const lang = r.language || "zh-TW";
return (
r.ticker === reportToDelete.ticker &&
r.analysis_date === reportToDelete.analysis_date &&
r.market_type === reportToDelete.market_type &&
lang === (targetLang || "zh-TW")
);
})
.map((r) => r.id);
if (matchingCloudIds.length > 0) {
console.log(`🗑️ Deleting ${matchingCloudIds.length} cloud report(s):`, matchingCloudIds);
await Promise.all(matchingCloudIds.map((id) => deleteCloudReport(id)));
} else if (cloudId) {
// Fallback: delete by cloudId if no match found by key
console.log("🗑️ Deleting from cloud by ID:", cloudId);
await deleteCloudReport(cloudId);
}
} catch (cloudErr) {
console.warn("Could not delete cloud copy:", cloudErr);
// Fallback to original cloudId delete
if (cloudId) {
await deleteCloudReport(cloudId);
}
}
// 2. Always try to delete from local IndexedDB as well

View File

@ -176,6 +176,26 @@ export async function deleteCloudReport(reportId: string): Promise<boolean> {
}
}
/**
* Remove duplicate reports from cloud (keeps newest per ticker/date/market/language)
*/
export async function cleanupDuplicateCloudReports(): Promise<{ deleted: number } | null> {
if (!isCloudSyncEnabled()) return null;
try {
const response = await fetch(`${API_BASE}/api/user/reports/cleanup-duplicates`, {
method: "DELETE",
headers: getAuthHeaders(),
});
if (!response.ok) return null;
return await response.json();
} catch (error) {
console.error("Failed to cleanup duplicate cloud reports:", error);
return null;
}
}
/**
* Get a single report by ID from cloud
*/