This commit is contained in:
parent
7e62df6ebf
commit
63ac0668ac
|
|
@ -14,6 +14,7 @@ from backend.app.models.schemas import (
|
|||
Ticker,
|
||||
TaskCreatedResponse,
|
||||
TaskStatusResponse,
|
||||
DownloadRequest,
|
||||
)
|
||||
from backend.app.services.trading_service import TradingService
|
||||
from backend.app.services.task_manager import task_manager
|
||||
|
|
@ -167,3 +168,109 @@ async def get_tickers():
|
|||
{"symbol": "QQQ", "name": "Invesco QQQ Trust"},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@router.post("/download/reports")
|
||||
async def download_reports(request: DownloadRequest):
|
||||
"""
|
||||
Download analyst reports as PDF or ZIP
|
||||
|
||||
Args:
|
||||
request: Download request with ticker, date, task_id, and analyst list
|
||||
|
||||
Returns:
|
||||
PDF file (single analyst) or ZIP file (multiple analysts)
|
||||
"""
|
||||
from fastapi.responses import Response
|
||||
from backend.app.services.download_service import download_service
|
||||
|
||||
# Get task result
|
||||
task = task_manager.get_task_status(request.task_id)
|
||||
|
||||
if not task:
|
||||
raise HTTPException(status_code=404, detail=f"Task {request.task_id} not found")
|
||||
|
||||
if task.get("status") != "completed":
|
||||
raise HTTPException(status_code=400, detail="Task is not completed yet")
|
||||
|
||||
result = task.get("result")
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="No analysis result found")
|
||||
|
||||
reports_data = result.get("reports", {})
|
||||
|
||||
# Analyst name mapping
|
||||
ANALYST_MAPPING = {
|
||||
"market": ("市場分析師", "market_report"),
|
||||
"social": ("社群媒體分析師", "sentiment_report"),
|
||||
"news": ("新聞分析師", "news_report"),
|
||||
"fundamentals": ("基本面分析師", "fundamentals_report"),
|
||||
"bull": ("看漲研究員", "investment_debate_state.bull_history"),
|
||||
"bear": ("看跌研究員", "investment_debate_state.bear_history"),
|
||||
"research_manager": ("研究經理", "investment_debate_state.judge_decision"),
|
||||
"trader": ("交易員", "trader_investment_plan"),
|
||||
"risky": ("激進分析師", "risk_debate_state.risky_history"),
|
||||
"safe": ("保守分析師", "risk_debate_state.safe_history"),
|
||||
"neutral": ("中立分析師", "risk_debate_state.neutral_history"),
|
||||
"risk_manager": ("風險經理", "risk_debate_state.judge_decision"),
|
||||
}
|
||||
|
||||
# Helper function to get nested value
|
||||
def get_nested_value(obj: dict, path: str):
|
||||
keys = path.split('.')
|
||||
for key in keys:
|
||||
if isinstance(obj, dict):
|
||||
obj = obj.get(key)
|
||||
else:
|
||||
return None
|
||||
return obj
|
||||
|
||||
# Collect reports
|
||||
reports_to_download = []
|
||||
for analyst_key in request.analysts:
|
||||
if analyst_key not in ANALYST_MAPPING:
|
||||
continue
|
||||
|
||||
analyst_name, report_key = ANALYST_MAPPING[analyst_key]
|
||||
report_content = get_nested_value(reports_data, report_key)
|
||||
|
||||
if report_content:
|
||||
reports_to_download.append({
|
||||
"analyst_name": analyst_name,
|
||||
"report_content": report_content,
|
||||
})
|
||||
|
||||
if not reports_to_download:
|
||||
raise HTTPException(status_code=404, detail="No reports found for selected analysts")
|
||||
|
||||
# Single report - return PDF
|
||||
if len(reports_to_download) == 1:
|
||||
pdf_bytes, filename = download_service.create_single_pdf(
|
||||
analyst_name=reports_to_download[0]["analyst_name"],
|
||||
ticker=request.ticker,
|
||||
analysis_date=request.analysis_date,
|
||||
report_content=reports_to_download[0]["report_content"],
|
||||
)
|
||||
|
||||
return Response(
|
||||
content=pdf_bytes,
|
||||
media_type="application/pdf",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename={filename}"
|
||||
}
|
||||
)
|
||||
|
||||
# Multiple reports - return ZIP
|
||||
zip_bytes, filename = download_service.create_multiple_pdfs_zip(
|
||||
ticker=request.ticker,
|
||||
analysis_date=request.analysis_date,
|
||||
reports=reports_to_download,
|
||||
)
|
||||
|
||||
return Response(
|
||||
content=zip_bytes,
|
||||
media_type="application/zip",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename={filename}"
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -124,3 +124,19 @@ class TaskStatusResponse(BaseModel):
|
|||
result: Optional[AnalysisResponse] = Field(None, description="Analysis result (only when completed)")
|
||||
error: Optional[str] = Field(None, description="Error message (only when failed)")
|
||||
completed_at: Optional[str] = Field(None, description="Completion timestamp")
|
||||
|
||||
|
||||
# Download Schemas
|
||||
|
||||
class AnalystReport(BaseModel):
|
||||
"""Single analyst report for download"""
|
||||
analyst_name: str = Field(..., description="Name of the analyst")
|
||||
report_key: str = Field(..., description="Key to access report in results")
|
||||
|
||||
|
||||
class DownloadRequest(BaseModel):
|
||||
"""Request model for downloading analyst reports"""
|
||||
ticker: str = Field(..., description="Stock ticker symbol")
|
||||
analysis_date: str = Field(..., description="Analysis date in YYYY-MM-DD format")
|
||||
task_id: str = Field(..., description="Task ID of the completed analysis")
|
||||
analysts: List[str] = Field(..., description="List of analyst keys to download", min_length=1)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,104 @@
|
|||
"""
|
||||
Download Service for Analyst Reports
|
||||
Handles single PDF and multiple PDF ZIP downloads
|
||||
"""
|
||||
import io
|
||||
import zipfile
|
||||
from typing import List, Dict, Optional
|
||||
from datetime import datetime
|
||||
|
||||
from backend.app.services.pdf_generator import PDFGenerator
|
||||
|
||||
|
||||
class DownloadService:
|
||||
"""Service for handling analyst report downloads"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize download service"""
|
||||
self.pdf_generator = PDFGenerator()
|
||||
|
||||
def create_single_pdf(
|
||||
self,
|
||||
analyst_name: str,
|
||||
ticker: str,
|
||||
analysis_date: str,
|
||||
report_content: str,
|
||||
) -> tuple[bytes, str]:
|
||||
"""
|
||||
Create a PDF for a single analyst report
|
||||
|
||||
Args:
|
||||
analyst_name: Name of the analyst
|
||||
ticker: Stock ticker symbol
|
||||
analysis_date: Date of analysis (YYYY-MM-DD)
|
||||
report_content: Markdown formatted report content
|
||||
|
||||
Returns:
|
||||
Tuple of (PDF bytes, filename)
|
||||
"""
|
||||
# Generate PDF
|
||||
pdf_bytes = self.pdf_generator.generate_analyst_report_pdf(
|
||||
analyst_name=analyst_name,
|
||||
ticker=ticker,
|
||||
analysis_date=analysis_date,
|
||||
report_content=report_content,
|
||||
)
|
||||
|
||||
# Generate filename: 股票代號_分析師_日期.pdf
|
||||
filename = f"{ticker}_{analyst_name}_{analysis_date}.pdf"
|
||||
|
||||
return pdf_bytes, filename
|
||||
|
||||
def create_multiple_pdfs_zip(
|
||||
self,
|
||||
ticker: str,
|
||||
analysis_date: str,
|
||||
reports: List[Dict[str, str]],
|
||||
) -> tuple[bytes, str]:
|
||||
"""
|
||||
Create a ZIP file containing multiple analyst report PDFs
|
||||
|
||||
Args:
|
||||
ticker: Stock ticker symbol
|
||||
analysis_date: Date of analysis (YYYY-MM-DD)
|
||||
reports: List of dicts with keys 'analyst_name' and 'report_content'
|
||||
|
||||
Returns:
|
||||
Tuple of (ZIP bytes, filename)
|
||||
"""
|
||||
# Create in-memory ZIP file
|
||||
zip_buffer = io.BytesIO()
|
||||
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||||
for report in reports:
|
||||
analyst_name = report.get('analyst_name', 'Unknown')
|
||||
report_content = report.get('report_content', '')
|
||||
|
||||
# Skip if no content
|
||||
if not report_content:
|
||||
continue
|
||||
|
||||
# Generate PDF for this analyst
|
||||
pdf_bytes = self.pdf_generator.generate_analyst_report_pdf(
|
||||
analyst_name=analyst_name,
|
||||
ticker=ticker,
|
||||
analysis_date=analysis_date,
|
||||
report_content=report_content,
|
||||
)
|
||||
|
||||
# Add to ZIP with proper filename
|
||||
pdf_filename = f"{ticker}_{analyst_name}_{analysis_date}.pdf"
|
||||
zip_file.writestr(pdf_filename, pdf_bytes)
|
||||
|
||||
# Get ZIP content
|
||||
zip_bytes = zip_buffer.getvalue()
|
||||
zip_buffer.close()
|
||||
|
||||
# Generate ZIP filename: 股票代號_日期.zip
|
||||
zip_filename = f"{ticker}_{analysis_date}.zip"
|
||||
|
||||
return zip_bytes, zip_filename
|
||||
|
||||
|
||||
# Singleton instance
|
||||
download_service = DownloadService()
|
||||
|
|
@ -0,0 +1,204 @@
|
|||
"""
|
||||
PDF Generation Service for Analyst Reports
|
||||
Converts markdown reports to PDF format with Chinese character support
|
||||
"""
|
||||
import io
|
||||
import re
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from reportlab.lib.pagesizes import A4
|
||||
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
||||
from reportlab.lib.units import cm
|
||||
from reportlab.lib.enums import TA_LEFT, TA_CENTER
|
||||
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, PageBreak
|
||||
from reportlab.pdfbase import pdfmetrics
|
||||
from reportlab.pdfbase.ttfonts import TTFont
|
||||
from reportlab.lib.colors import HexColor
|
||||
import markdown
|
||||
|
||||
|
||||
class PDFGenerator:
|
||||
"""Generate PDF reports from markdown content"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize PDF generator with Chinese font support"""
|
||||
# Try to register Chinese fonts (fallback to default if not available)
|
||||
try:
|
||||
# Try common Chinese font paths on different systems
|
||||
# macOS: /System/Library/Fonts/PingFang.ttc
|
||||
# Linux: /usr/share/fonts/truetype/
|
||||
# For now, we'll use built-in fonts and handle Chinese with Unicode
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def generate_analyst_report_pdf(
|
||||
self,
|
||||
analyst_name: str,
|
||||
ticker: str,
|
||||
analysis_date: str,
|
||||
report_content: str,
|
||||
) -> bytes:
|
||||
"""
|
||||
Generate a PDF from analyst report content
|
||||
|
||||
Args:
|
||||
analyst_name: Name of the analyst
|
||||
ticker: Stock ticker symbol
|
||||
analysis_date: Date of analysis
|
||||
report_content: Markdown formatted report content
|
||||
|
||||
Returns:
|
||||
PDF file content as bytes
|
||||
"""
|
||||
buffer = io.BytesIO()
|
||||
|
||||
# Create PDF document
|
||||
doc = SimpleDocTemplate(
|
||||
buffer,
|
||||
pagesize=A4,
|
||||
rightMargin=2*cm,
|
||||
leftMargin=2*cm,
|
||||
topMargin=2*cm,
|
||||
bottomMargin=2*cm,
|
||||
)
|
||||
|
||||
# Container for the 'Flowable' objects
|
||||
elements = []
|
||||
|
||||
# Define styles
|
||||
styles = getSampleStyleSheet()
|
||||
|
||||
# Custom styles with better Chinese support
|
||||
title_style = ParagraphStyle(
|
||||
'CustomTitle',
|
||||
parent=styles['Heading1'],
|
||||
fontSize=24,
|
||||
textColor=HexColor('#1a1a1a'),
|
||||
spaceAfter=30,
|
||||
alignment=TA_CENTER,
|
||||
)
|
||||
|
||||
subtitle_style = ParagraphStyle(
|
||||
'CustomSubtitle',
|
||||
parent=styles['Normal'],
|
||||
fontSize=12,
|
||||
textColor=HexColor('#666666'),
|
||||
spaceAfter=20,
|
||||
alignment=TA_CENTER,
|
||||
)
|
||||
|
||||
heading_style = ParagraphStyle(
|
||||
'CustomHeading',
|
||||
parent=styles['Heading2'],
|
||||
fontSize=16,
|
||||
textColor=HexColor('#2c3e50'),
|
||||
spaceAfter=12,
|
||||
spaceBefore=12,
|
||||
)
|
||||
|
||||
body_style = ParagraphStyle(
|
||||
'CustomBody',
|
||||
parent=styles['Normal'],
|
||||
fontSize=10,
|
||||
leading=14,
|
||||
textColor=HexColor('#333333'),
|
||||
spaceAfter=8,
|
||||
)
|
||||
|
||||
# Add title
|
||||
title = f"{analyst_name}"
|
||||
elements.append(Paragraph(title, title_style))
|
||||
elements.append(Spacer(1, 0.3*cm))
|
||||
|
||||
# Add metadata
|
||||
metadata = f"{ticker} | {analysis_date}"
|
||||
elements.append(Paragraph(metadata, subtitle_style))
|
||||
elements.append(Spacer(1, 0.5*cm))
|
||||
|
||||
# Convert markdown to simple text (basic conversion)
|
||||
# Clean markdown formatting
|
||||
content = self._clean_markdown(report_content)
|
||||
|
||||
# Split content into paragraphs
|
||||
paragraphs = content.split('\n')
|
||||
|
||||
for para in paragraphs:
|
||||
para = para.strip()
|
||||
if not para:
|
||||
elements.append(Spacer(1, 0.2*cm))
|
||||
continue
|
||||
|
||||
# Check if it's a heading
|
||||
if para.startswith('# '):
|
||||
text = para[2:]
|
||||
elements.append(Paragraph(text, heading_style))
|
||||
elif para.startswith('## '):
|
||||
text = para[3:]
|
||||
elements.append(Paragraph(text, heading_style))
|
||||
elif para.startswith('### '):
|
||||
text = para[4:]
|
||||
elements.append(Paragraph(text, heading_style))
|
||||
else:
|
||||
# Regular paragraph - escape HTML chars and handle special characters
|
||||
text = self._escape_html(para)
|
||||
try:
|
||||
elements.append(Paragraph(text, body_style))
|
||||
except Exception as e:
|
||||
# If paragraph fails, add as plain text
|
||||
elements.append(Paragraph(text.encode('ascii', 'xmlcharrefreplace').decode(), body_style))
|
||||
|
||||
# Build PDF
|
||||
doc.build(elements)
|
||||
|
||||
# Get the PDF content
|
||||
pdf_content = buffer.getvalue()
|
||||
buffer.close()
|
||||
|
||||
return pdf_content
|
||||
|
||||
def _clean_markdown(self, text: str) -> str:
|
||||
"""
|
||||
Clean markdown formatting for PDF
|
||||
|
||||
Args:
|
||||
text: Markdown text
|
||||
|
||||
Returns:
|
||||
Cleaned text
|
||||
"""
|
||||
# Remove markdown links but keep text
|
||||
text = re.sub(r'\[([^\]]+)\]\([^\)]+\)', r'\1', text)
|
||||
|
||||
# Remove bold/italic markers
|
||||
text = re.sub(r'\*\*([^\*]+)\*\*', r'\1', text)
|
||||
text = re.sub(r'\*([^\*]+)\*', r'\1', text)
|
||||
text = re.sub(r'__([^_]+)__', r'\1', text)
|
||||
text = re.sub(r'_([^_]+)_', r'\1', text)
|
||||
|
||||
# Remove code blocks
|
||||
text = re.sub(r'```[^`]*```', '', text, flags=re.DOTALL)
|
||||
text = re.sub(r'`([^`]+)`', r'\1', text)
|
||||
|
||||
# Clean up bullet points
|
||||
text = re.sub(r'^\s*[\*\-\+]\s+', '• ', text, flags=re.MULTILINE)
|
||||
|
||||
# Remove horizontal rules
|
||||
text = re.sub(r'^[\-\*\_]{3,}\s*$', '', text, flags=re.MULTILINE)
|
||||
|
||||
return text
|
||||
|
||||
def _escape_html(self, text: str) -> str:
|
||||
"""
|
||||
Escape HTML special characters for PDF
|
||||
|
||||
Args:
|
||||
text: Text to escape
|
||||
|
||||
Returns:
|
||||
Escaped text
|
||||
"""
|
||||
text = text.replace('&', '&')
|
||||
text = text.replace('<', '<')
|
||||
text = text.replace('>', '>')
|
||||
return text
|
||||
|
|
@ -37,3 +37,7 @@ langchain_anthropic
|
|||
langchain-google-genai
|
||||
beautifulsoup4>=4.12.0
|
||||
tenacity>=8.2.0
|
||||
|
||||
# PDF and document generation
|
||||
reportlab>=4.0.0
|
||||
markdown>=3.5.0
|
||||
|
|
|
|||
|
|
@ -13,16 +13,19 @@ import type { AnalysisRequest } from "@/lib/types";
|
|||
|
||||
export default function AnalysisPage() {
|
||||
const router = useRouter();
|
||||
const { setAnalysisResult } = useAnalysisContext();
|
||||
const { runAnalysis, loading, error, result } = useAnalysis();
|
||||
const { setAnalysisResult, setTaskId } = useAnalysisContext();
|
||||
const { runAnalysis, loading, error, result, taskId } = useAnalysis();
|
||||
|
||||
// 當分析完成時自動跳轉到結果頁面
|
||||
useEffect(() => {
|
||||
if (result && !loading && !error) {
|
||||
setAnalysisResult(result);
|
||||
if (taskId) {
|
||||
setTaskId(taskId);
|
||||
}
|
||||
router.push("/analysis/results");
|
||||
}
|
||||
}, [result, loading, error, router, setAnalysisResult]);
|
||||
}, [result, loading, error, router, setAnalysisResult, taskId, setTaskId]);
|
||||
|
||||
const handleSubmit = async (data: AnalysisRequest) => {
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import ReactMarkdown from "react-markdown";
|
|||
import remarkGfm from "remark-gfm";
|
||||
import { useAnalysisContext } from "@/context/AnalysisContext";
|
||||
import { PriceChart } from "@/components/analysis/PriceChart";
|
||||
import { DownloadReports } from "@/components/analysis/DownloadReports";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import { Card, CardContent, CardHeader, CardTitle, CardDescription } from "@/components/ui/card";
|
||||
|
|
@ -100,7 +101,7 @@ const getNestedValue = (obj: any, path: string) => {
|
|||
|
||||
export default function AnalysisResultsPage() {
|
||||
const router = useRouter();
|
||||
const { analysisResult } = useAnalysisContext();
|
||||
const { analysisResult, taskId } = useAnalysisContext();
|
||||
const [selectedAnalyst, setSelectedAnalyst] = useState("market");
|
||||
|
||||
// 如果沒有結果,重定向到分析頁面
|
||||
|
|
@ -150,6 +151,17 @@ export default function AnalysisResultsPage() {
|
|||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Download Reports Section */}
|
||||
{taskId && analysisResult.reports && (
|
||||
<DownloadReports
|
||||
ticker={analysisResult.ticker}
|
||||
analysisDate={analysisResult.analysis_date}
|
||||
taskId={taskId}
|
||||
analysts={ANALYSTS}
|
||||
reports={analysisResult.reports}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* 分析師選擇 Tabs */}
|
||||
<Tabs value={selectedAnalyst} onValueChange={setSelectedAnalyst} className="w-full">
|
||||
<TabsList className="grid w-full grid-cols-2 md:grid-cols-3 lg:grid-cols-4 h-auto gap-2">
|
||||
|
|
|
|||
|
|
@ -0,0 +1,205 @@
|
|||
/**
|
||||
* Download Reports Component
|
||||
* Allows users to select and download analyst reports
|
||||
*/
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { Download, FileDown } from "lucide-react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { Label } from "@/components/ui/label";
|
||||
|
||||
interface AnalystInfo {
|
||||
key: string;
|
||||
label: string;
|
||||
reportKey: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
interface DownloadReportsProps {
|
||||
ticker: string;
|
||||
analysisDate: string;
|
||||
taskId: string;
|
||||
analysts: AnalystInfo[];
|
||||
reports: any;
|
||||
}
|
||||
|
||||
export function DownloadReports({
|
||||
ticker,
|
||||
analysisDate,
|
||||
taskId,
|
||||
analysts,
|
||||
reports,
|
||||
}: DownloadReportsProps) {
|
||||
const [selectedAnalysts, setSelectedAnalysts] = useState<string[]>([]);
|
||||
const [isDownloading, setIsDownloading] = useState(false);
|
||||
|
||||
// Helper to get nested value from reports object
|
||||
const getNestedValue = (obj: any, path: string) => {
|
||||
return path.split('.').reduce((current, key) => current?.[key], obj);
|
||||
};
|
||||
|
||||
// Filter analysts that have actual reports
|
||||
const availableAnalysts = analysts.filter(analyst => {
|
||||
const reportContent = getNestedValue(reports, analyst.reportKey);
|
||||
return reportContent && reportContent.trim().length > 0;
|
||||
});
|
||||
|
||||
// Handle select all
|
||||
const handleSelectAll = () => {
|
||||
if (selectedAnalysts.length === availableAnalysts.length) {
|
||||
setSelectedAnalysts([]);
|
||||
} else {
|
||||
setSelectedAnalysts(availableAnalysts.map(a => a.key));
|
||||
}
|
||||
};
|
||||
|
||||
// Handle individual selection
|
||||
const handleToggleAnalyst = (analystKey: string) => {
|
||||
setSelectedAnalysts(prev => {
|
||||
if (prev.includes(analystKey)) {
|
||||
return prev.filter(key => key !== analystKey);
|
||||
} else {
|
||||
return [...prev, analystKey];
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Handle download
|
||||
const handleDownload = async () => {
|
||||
if (selectedAnalysts.length === 0) return;
|
||||
|
||||
setIsDownloading(true);
|
||||
try {
|
||||
const response = await fetch(`${process.env.NEXT_PUBLIC_API_URL || 'http://localhost:8000'}/api/download/reports`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
ticker,
|
||||
analysis_date: analysisDate,
|
||||
task_id: taskId,
|
||||
analysts: selectedAnalysts,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Download failed');
|
||||
}
|
||||
|
||||
// Get the blob
|
||||
const blob = await response.blob();
|
||||
|
||||
// Get filename from Content-Disposition header if available
|
||||
const contentDisposition = response.headers.get('Content-Disposition');
|
||||
let filename = `${ticker}_${analysisDate}.pdf`;
|
||||
|
||||
if (contentDisposition) {
|
||||
const filenameMatch = contentDisposition.match(/filename=(.+)/);
|
||||
if (filenameMatch) {
|
||||
filename = filenameMatch[1];
|
||||
}
|
||||
} else if (selectedAnalysts.length > 1) {
|
||||
filename = `${ticker}_${analysisDate}.zip`;
|
||||
}
|
||||
|
||||
// Create download link
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
link.download = filename;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
|
||||
// Cleanup
|
||||
document.body.removeChild(link);
|
||||
window.URL.revokeObjectURL(url);
|
||||
} catch (error) {
|
||||
console.error('Download error:', error);
|
||||
alert('下載失敗,請稍後再試');
|
||||
} finally {
|
||||
setIsDownloading(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (availableAnalysts.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isAllSelected = selectedAnalysts.length === availableAnalysts.length && availableAnalysts.length > 0;
|
||||
|
||||
return (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<FileDown className="h-5 w-5" />
|
||||
下載報告
|
||||
</CardTitle>
|
||||
<CardDescription>
|
||||
選擇要下載的分析師報告(支援單一PDF或多個ZIP)
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
{/* Select All */}
|
||||
<div className="flex items-center space-x-2 pb-2 border-b">
|
||||
<Checkbox
|
||||
id="select-all"
|
||||
checked={isAllSelected}
|
||||
onCheckedChange={handleSelectAll}
|
||||
/>
|
||||
<Label
|
||||
htmlFor="select-all"
|
||||
className="text-sm font-medium cursor-pointer"
|
||||
>
|
||||
全選 ({availableAnalysts.length} 個報告)
|
||||
</Label>
|
||||
</div>
|
||||
|
||||
{/* Analyst List */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-3">
|
||||
{availableAnalysts.map(analyst => (
|
||||
<div
|
||||
key={analyst.key}
|
||||
className="flex items-start space-x-2 p-3 rounded-lg border hover:bg-accent/50 transition-colors"
|
||||
>
|
||||
<Checkbox
|
||||
id={`analyst-${analyst.key}`}
|
||||
checked={selectedAnalysts.includes(analyst.key)}
|
||||
onCheckedChange={() => handleToggleAnalyst(analyst.key)}
|
||||
/>
|
||||
<div className="flex-1">
|
||||
<Label
|
||||
htmlFor={`analyst-${analyst.key}`}
|
||||
className="text-sm font-medium cursor-pointer"
|
||||
>
|
||||
{analyst.label}
|
||||
</Label>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
{analyst.description}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Download Button */}
|
||||
<div className="flex items-center justify-between pt-4 border-t">
|
||||
<div className="text-sm text-muted-foreground">
|
||||
已選擇 {selectedAnalysts.length} 個報告
|
||||
</div>
|
||||
<Button
|
||||
onClick={handleDownload}
|
||||
disabled={selectedAnalysts.length === 0 || isDownloading}
|
||||
className="gap-2"
|
||||
>
|
||||
<Download className="h-4 w-4" />
|
||||
{isDownloading ? '下載中...' : selectedAnalysts.length === 1 ? '下載 PDF' : '下載 ZIP'}
|
||||
</Button>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
|
@ -6,6 +6,8 @@ import type { AnalysisResponse } from "@/lib/types";
|
|||
interface AnalysisContextType {
|
||||
analysisResult: AnalysisResponse | null;
|
||||
setAnalysisResult: (result: AnalysisResponse | null) => void;
|
||||
taskId: string | null;
|
||||
setTaskId: (taskId: string | null) => void;
|
||||
}
|
||||
|
||||
const AnalysisContext = createContext<AnalysisContextType | undefined>(
|
||||
|
|
@ -16,9 +18,10 @@ export function AnalysisProvider({ children }: { children: ReactNode }) {
|
|||
const [analysisResult, setAnalysisResult] = useState<AnalysisResponse | null>(
|
||||
null
|
||||
);
|
||||
const [taskId, setTaskId] = useState<string | null>(null);
|
||||
|
||||
return (
|
||||
<AnalysisContext.Provider value={{ analysisResult, setAnalysisResult }}>
|
||||
<AnalysisContext.Provider value={{ analysisResult, setAnalysisResult, taskId, setTaskId }}>
|
||||
{children}
|
||||
</AnalysisContext.Provider>
|
||||
);
|
||||
|
|
|
|||
Loading…
Reference in New Issue