"use client"; import { useState, useEffect, useRef, Suspense } from "react"; import { useRouter, useSearchParams } from "next/navigation"; import ReactMarkdown from "react-markdown"; import remarkGfm from "remark-gfm"; import Image from "next/image"; import { useLanguage } from "@/contexts/LanguageContext"; import { useAuth } from "@/contexts/auth-context"; import { getApiSettingsAsync } from "@/lib/storage"; import { getBaseUrlForModel } from "@/lib/api-helpers"; import { api } from "@/lib/api"; import { getReportsByMarketType, type SavedReport } from "@/lib/reports-db"; import { getCloudReports, isCloudSyncEnabled } from "@/lib/user-api"; import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; import { MessageCircle, Send, Loader2, Bot, User, Sparkles, AlertCircle, Trash2, ArrowLeft, Settings2, } from "lucide-react"; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue, } from "@/components/ui/select"; interface ChatMessage { role: "user" | "assistant"; content: string; } const AVAILABLE_MODELS = [ // OpenAI { id: "gpt-5.2-2025-12-11", name: "GPT-5.2", provider: "openai", logo: "/logos/openai.svg" }, { id: "gpt-5.1", name: "GPT-5.1", provider: "openai", logo: "/logos/openai.svg" }, { id: "gpt-5-mini", name: "GPT-5 Mini", provider: "openai", logo: "/logos/openai.svg" }, { id: "gpt-5-nano", name: "GPT-5 Nano", provider: "openai", logo: "/logos/openai.svg" }, { id: "gpt-4.1-mini", name: "GPT-4.1 Mini", provider: "openai", logo: "/logos/openai.svg" }, { id: "gpt-4.1-nano", name: "GPT-4.1 Nano", provider: "openai", logo: "/logos/openai.svg" }, { id: "o4-mini", name: "o4-mini", provider: "openai", logo: "/logos/openai.svg" }, // Anthropic { id: "claude-sonnet-4-5-20250929", name: "Claude Sonnet 4.5", provider: "anthropic", logo: "/logos/claude-color.svg" }, { id: "claude-haiku-4-5-20251001", name: "Claude Haiku 4.5", provider: "anthropic", logo: "/logos/claude-color.svg" }, { id: "claude-sonnet-4-20250514", name: "Claude Sonnet 4", provider: "anthropic", logo: "/logos/claude-color.svg" }, { id: "claude-3-haiku-20240307", name: "Claude 3 Haiku", provider: "anthropic", logo: "/logos/claude-color.svg" }, // Google { id: "gemini-2.5-pro", name: "Gemini 2.5 Pro", provider: "google", logo: "/logos/gemini-color.svg" }, { id: "gemini-2.5-flash", name: "Gemini 2.5 Flash", provider: "google", logo: "/logos/gemini-color.svg" }, { id: "gemini-2.5-flash-lite", name: "Gemini 2.5 Flash Lite", provider: "google", logo: "/logos/gemini-color.svg" }, { id: "gemini-2.0-flash", name: "Gemini 2.0 Flash", provider: "google", logo: "/logos/gemini-color.svg" }, { id: "gemini-2.0-flash-lite", name: "Gemini 2.0 Flash Lite", provider: "google", logo: "/logos/gemini-color.svg" }, // Grok { id: "grok-4-1-fast-reasoning", name: "Grok 4.1 Fast Reasoning", provider: "grok", logo: "/logos/grok.svg" }, { id: "grok-4-1-fast-non-reasoning", name: "Grok 4.1 Fast Non Reasoning", provider: "grok", logo: "/logos/grok.svg" }, { id: "grok-4-fast-reasoning", name: "Grok 4 Fast Reasoning", provider: "grok", logo: "/logos/grok.svg" }, { id: "grok-4-fast-non-reasoning", name: "Grok 4 Fast Non Reasoning", provider: "grok", logo: "/logos/grok.svg" }, { id: "grok-4-0709", name: "Grok 4", provider: "grok", logo: "/logos/grok.svg" }, { id: "grok-3", name: "Grok 3", provider: "grok", logo: "/logos/grok.svg" }, { id: "grok-3-mini", name: "Grok 3 Mini", provider: "grok", logo: "/logos/grok.svg" }, // DeepSeek { id: "deepseek-reasoner", name: "DeepSeek Reasoner", provider: "deepseek", logo: "/logos/deepseek-color.svg" }, { id: "deepseek-chat", name: "DeepSeek Chat", provider: "deepseek", logo: "/logos/deepseek-color.svg" }, // Qwen { id: "qwen3-max", name: "Qwen 3 Max", provider: "qwen", logo: "/logos/qwen-color.svg" }, { id: "qwen-plus", name: "Qwen Plus", provider: "qwen", logo: "/logos/qwen-color.svg" }, { id: "qwen-flash", name: "Qwen Flash", provider: "qwen", logo: "/logos/qwen-color.svg" }, // Custom { id: "custom", name: "Other (自訂模型)", provider: "custom", logo: null }, ]; function HistoryChatContent() { const router = useRouter(); const searchParams = useSearchParams(); const { t, locale } = useLanguage(); const { isAuthenticated } = useAuth(); const [messages, setMessages] = useState([]); const [input, setInput] = useState(""); const [isLoading, setIsLoading] = useState(false); const [error, setError] = useState(null); const [report, setReport] = useState(null); const [loadingReport, setLoadingReport] = useState(true); // Default to GPT-5 Mini const [selectedModelId, setSelectedModelId] = useState("gpt-5-mini"); const [customModel, setCustomModel] = useState(""); const messagesEndRef = useRef(null); const inputRef = useRef(null); const ticker = searchParams.get("ticker"); const dateStr = searchParams.get("date"); const market = searchParams.get("market"); // Load the specific report useEffect(() => { const loadReport = async () => { if (!ticker || !dateStr || !market) { setLoadingReport(false); return; } try { setLoadingReport(true); // Try local DB first const localObj = await getReportsByMarketType(market as any); const match = localObj.find( (r) => r.ticker === ticker && r.analysis_date === dateStr ); if (match) { setReport(match); } else if (isAuthenticated && isCloudSyncEnabled()) { // Fallback to cloud const cloudReports = await getCloudReports(); const cloudMatch = cloudReports.find( (r) => r.ticker === ticker && r.analysis_date === dateStr && r.market_type === market ); if (cloudMatch) { setReport({ id: parseInt(cloudMatch.id.replace(/-/g, "").slice(0, 8), 16), ticker: cloudMatch.ticker, market_type: cloudMatch.market_type as any, analysis_date: cloudMatch.analysis_date, saved_at: new Date(cloudMatch.created_at), result: cloudMatch.result, language: cloudMatch.language, }); } } } catch (err) { console.error("Failed to load report for chat:", err); } finally { setLoadingReport(false); } }; loadReport(); }, [ticker, dateStr, market, isAuthenticated]); // Auto-scroll to bottom when messages change useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); }, [messages, isLoading]); // Focus input when loaded useEffect(() => { if (!loadingReport && report) { setTimeout(() => inputRef.current?.focus(), 200); } }, [loadingReport, report]); const handleClearChat = () => { setMessages([]); setError(null); }; const handleSend = async () => { const trimmed = input.trim(); if (!trimmed || isLoading || !report) return; setError(null); const userMessage: ChatMessage = { role: "user", content: trimmed }; setMessages((prev) => [...prev, userMessage]); setInput(""); setIsLoading(true); try { const settings = await getApiSettingsAsync(); let chatModel = "gpt-4o-mini"; let apiKey = ""; let baseUrl = "https://api.openai.com/v1"; const providers = { openai: { key: settings.openai_api_key, defaultModel: "gpt-4o-mini" }, anthropic: { key: settings.anthropic_api_key, defaultModel: "claude-3-5-sonnet-20241022" }, google: { key: settings.google_api_key, defaultModel: "gemini-2.5-flash" }, grok: { key: settings.grok_api_key, defaultModel: "grok-2-1212" }, deepseek: { key: settings.deepseek_api_key, defaultModel: "deepseek-chat" }, qwen: { key: settings.qwen_api_key, defaultModel: "qwen-max" }, }; const activeModelId = selectedModelId === "custom" ? customModel.trim() : selectedModelId; if (!activeModelId) { // Auto logic wrapper (now acts as a fallback if custom is empty) for (const [providerName, providerData] of Object.entries(providers)) { if (providerData.key && providerData.key.trim() !== "") { apiKey = providerData.key; chatModel = providerData.defaultModel; baseUrl = getBaseUrlForModel(chatModel, settings.custom_base_url); break; } } // Custom settings override if configured if (settings.custom_api_key && settings.custom_base_url && !apiKey) { apiKey = settings.custom_api_key; baseUrl = settings.custom_base_url; } } else { chatModel = activeModelId; const modelInfo = AVAILABLE_MODELS.find(m => m.id === selectedModelId); const providerName = modelInfo ? modelInfo.provider : "custom"; const matchedProvider = (providers as any)[providerName]; if (matchedProvider && matchedProvider.key) { apiKey = matchedProvider.key; baseUrl = getBaseUrlForModel(chatModel, settings.custom_base_url); } else if (settings.custom_api_key) { apiKey = settings.custom_api_key; baseUrl = settings.custom_base_url || "https://api.openai.com/v1"; } } if (!apiKey) { setError(t.chat?.noApiKey || "Please configure your API key in settings first."); setIsLoading(false); return; } const history = messages.map((m) => ({ role: m.role, content: m.content, })); const response = await api.sendChatMessage({ message: trimmed, reports: report.result.reports || {}, ticker: report.ticker, analysis_date: report.analysis_date, history, model: chatModel, api_key: apiKey, base_url: baseUrl, language: locale as "en" | "zh-TW", }); setMessages((prev) => [ ...prev, { role: "assistant", content: response.reply }, ]); } catch (err: any) { console.error("Chat error:", err); const errorMsg = err?.response?.data?.detail || err?.message || (t.chat?.error || "Failed to get response. Please try again."); setError(errorMsg); } finally { setIsLoading(false); } }; const handleKeyDown = (e: React.KeyboardEvent) => { if (e.key === "Enter" && !e.shiftKey) { e.preventDefault(); handleSend(); } }; if (loadingReport) { return (

{t.history?.loading || "Loading..."}

); } if (!report) { return (

Report not found.

); } const contextLabel = t.chat?.allReports || "All Reports"; return (
{/* Header */}

{t.chat?.title || "Report Chat"} — {report.ticker} {contextLabel}

{t.history?.analysisDate || "Date"}: {report.analysis_date}

{messages.length > 0 && ( )}
{/* Messages Area */}
{/* Empty state */} {messages.length === 0 && !isLoading && (

{t.chat?.emptyState || "Ask any question about this analysis report"}

{t.chat?.emptyHint || 'e.g. "What are the main risk factors?"'}

{/* Quick suggestions */}
{(locale === "zh-TW" ? [ "主要的風險因素有哪些?", "總結這份報告的重點", "建議的進場策略是什麼?", "看漲和看跌的觀點有何不同?", ] : [ "What are the key risk factors?", "Summarize this report", "What's the recommended entry strategy?", "How do bull and bear views differ?", ] ).map((suggestion) => ( ))}
)} {/* Message list */}
{messages.map((msg, i) => (
{/* Avatar */}
{msg.role === "user" ? ( ) : ( )}
{msg.role === "assistant" ? (
{msg.content}
) : (

{msg.content}

)}
))} {/* Loading indicator */} {isLoading && (
{t.chat?.thinking || "Thinking..."}
)} {/* Error message */} {error && (
{error}
)}
{/* Input Bar */}
{/* Model Selector */}
{selectedModelId === "custom" && ( setCustomModel(e.target.value)} placeholder="輸入模型名稱 (e.g. gpt-4)" className="h-8 w-[180px] text-xs rounded-full border-gray-200 dark:border-gray-700 bg-white dark:bg-gray-800" /> )}
{/* Text Input */}
setInput(e.target.value)} onKeyDown={handleKeyDown} placeholder={t.chat?.placeholder || "Ask about this report..."} disabled={isLoading} className="flex-1 text-base rounded-full border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-800 focus-visible:ring-purple-500 h-12 md:h-14 px-6 shadow-sm" />

LLM can make mistakes. Please verify important information.

); } export default function HistoryChatPage() { return (

Loading chat...

} > ); }