import questionary from typing import List, Optional, Tuple, Dict from rich.console import Console from cli.models import AnalystType from cli.api_keys import is_provider_available from cli.model_fetcher import fetch_models_for_provider console = Console() ANALYST_ORDER = [ ("Market Analyst", AnalystType.MARKET), ("Social Media Analyst", AnalystType.SOCIAL), ("News Analyst", AnalystType.NEWS), ("Fundamentals Analyst", AnalystType.FUNDAMENTALS), ] def get_ticker() -> str: """Prompt the user to enter a ticker symbol.""" ticker = questionary.text( "Enter the ticker symbol to analyze:", validate=lambda x: len(x.strip()) > 0 or "Please enter a valid ticker symbol.", style=questionary.Style( [ ("text", "fg:green"), ("highlighted", "noinherit"), ] ), ).ask() if not ticker: console.print("\n[red]No ticker symbol provided. Exiting...[/red]") exit(1) return ticker.strip().upper() def get_analysis_date() -> str: """Prompt the user to enter a date in YYYY-MM-DD format.""" import re from datetime import datetime def validate_date(date_str: str) -> bool: if not re.match(r"^\d{4}-\d{2}-\d{2}$", date_str): return False try: datetime.strptime(date_str, "%Y-%m-%d") return True except ValueError: return False date = questionary.text( "Enter the analysis date (YYYY-MM-DD):", validate=lambda x: validate_date(x.strip()) or "Please enter a valid date in YYYY-MM-DD format.", style=questionary.Style( [ ("text", "fg:green"), ("highlighted", "noinherit"), ] ), ).ask() if not date: console.print("\n[red]No date provided. Exiting...[/red]") exit(1) return date.strip() def select_analysts() -> List[AnalystType]: """Select analysts using an interactive checkbox.""" choices = questionary.checkbox( "Select Your [Analysts Team]:", choices=[ questionary.Choice(display, value=value) for display, value in ANALYST_ORDER ], instruction="\n- Press Space to select/unselect analysts\n- Press 'a' to select/unselect all\n- Press Enter when done", validate=lambda x: len(x) > 0 or "You must select at least one analyst.", style=questionary.Style( [ ("checkbox-selected", "fg:green"), ("selected", "fg:green noinherit"), ("highlighted", "noinherit"), ("pointer", "noinherit"), ] ), ).ask() if not choices: console.print("\n[red]No analysts selected. Exiting...[/red]") exit(1) return choices def select_research_depth() -> int: """Select research depth using an interactive selection.""" # Define research depth options with their corresponding values DEPTH_OPTIONS = [ ("Shallow - Quick research, few debate and strategy discussion rounds", 1), ("Medium - Middle ground, moderate debate rounds and strategy discussion", 3), ("Deep - Comprehensive research, in depth debate and strategy discussion", 5), ] choice = questionary.select( "Select Your [Research Depth]:", choices=[ questionary.Choice(display, value=value) for display, value in DEPTH_OPTIONS ], instruction="\n- Use arrow keys to navigate\n- Press Enter to select", style=questionary.Style( [ ("selected", "fg:yellow noinherit"), ("highlighted", "fg:yellow noinherit"), ("pointer", "fg:yellow noinherit"), ] ), ).ask() if choice is None: console.print("\n[red]No research depth selected. Exiting...[/red]") exit(1) return choice def select_shallow_thinking_agent(provider) -> str: """Select shallow thinking llm engine using an interactive selection.""" # Static fallback options for each provider SHALLOW_AGENT_OPTIONS = { "openai": [ ("GPT-4o-mini - Fast and efficient for quick tasks", "gpt-4o-mini"), ("GPT-4.1-nano - Ultra-lightweight model for basic operations", "gpt-4.1-nano"), ("GPT-4.1-mini - Compact model with good performance", "gpt-4.1-mini"), ("GPT-4o - Standard model with solid capabilities", "gpt-4o"), ], "anthropic": [ ("Claude Haiku 3.5 - Fast inference and standard capabilities", "claude-3-5-haiku-latest"), ("Claude Sonnet 3.5 - Highly capable standard model", "claude-3-5-sonnet-latest"), ("Claude Sonnet 3.7 - Exceptional hybrid reasoning and agentic capabilities", "claude-3-7-sonnet-latest"), ("Claude Sonnet 4 - High performance and excellent reasoning", "claude-sonnet-4-0"), ], "google": [ ("Gemini 2.0 Flash-Lite - Cost efficiency and low latency", "gemini-2.0-flash-lite"), ("Gemini 2.0 Flash - Next generation features, speed, and thinking", "gemini-2.0-flash"), ("Gemini 2.5 Flash-Lite - Lightweight and cost efficient", "gemini-2.5-flash-lite"), ("Gemini 2.5 Flash - Adaptive thinking, cost efficiency", "gemini-2.5-flash"), ("Gemini 3 Flash Preview - Latest generation flash model", "gemini-3-flash-preview"), ], "openrouter": [ ("Xiaomi MiMo V2 Flash - Fast and efficient multimodal model", "xiaomi/mimo-v2-flash:free"), ("Meta: Llama 4 Scout", "meta-llama/llama-4-scout:free"), ("Meta: Llama 3.3 8B Instruct - A lightweight and ultra-fast variant of Llama 3.3 70B", "meta-llama/llama-3.3-8b-instruct:free"), ("google/gemini-2.0-flash-exp:free - Gemini Flash 2.0 offers a significantly faster time to first token", "google/gemini-2.0-flash-exp:free"), ], "ollama": [ ("llama3.2:3b local", "llama3.2:3b"), ("phi3.5 local", "phi3.5:latest"), ], "lm studio": [ ("Local Model (default)", "local-model"), ] } provider_lower = provider.lower() # Try dynamic fetch for supported providers (OpenAI, Anthropic, Google) model_options = None if provider_lower in ["openai", "anthropic", "google"]: dynamic_models = fetch_models_for_provider(provider_lower) if dynamic_models: model_options = dynamic_models # Fall back to static list if dynamic fetch failed or not supported if model_options is None: model_options = SHALLOW_AGENT_OPTIONS.get(provider_lower, []) choice = questionary.select( "Select Your [Quick-Thinking LLM Engine]:", choices=[ questionary.Choice(display, value=value) for display, value in model_options ], instruction="\n- Use arrow keys to navigate\n- Press Enter to select", style=questionary.Style( [ ("selected", "fg:magenta noinherit"), ("highlighted", "fg:magenta noinherit"), ("pointer", "fg:magenta noinherit"), ] ), ).ask() if choice is None: console.print( "\n[red]No shallow thinking llm engine selected. Exiting...[/red]" ) exit(1) return choice def select_deep_thinking_agent(provider) -> str: """Select deep thinking llm engine using an interactive selection.""" # Static fallback options for each provider DEEP_AGENT_OPTIONS = { "openai": [ ("GPT-4.1-nano - Ultra-lightweight model for basic operations", "gpt-4.1-nano"), ("GPT-4.1-mini - Compact model with good performance", "gpt-4.1-mini"), ("GPT-4o - Standard model with solid capabilities", "gpt-4o"), ("o4-mini - Specialized reasoning model (compact)", "o4-mini"), ("o3-mini - Advanced reasoning model (lightweight)", "o3-mini"), ("o3 - Full advanced reasoning model", "o3"), ("o1 - Premier reasoning and problem-solving model", "o1"), ], "anthropic": [ ("Claude Haiku 3.5 - Fast inference and standard capabilities", "claude-3-5-haiku-latest"), ("Claude Sonnet 3.5 - Highly capable standard model", "claude-3-5-sonnet-latest"), ("Claude Sonnet 3.7 - Exceptional hybrid reasoning and agentic capabilities", "claude-3-7-sonnet-latest"), ("Claude Sonnet 4 - High performance and excellent reasoning", "claude-sonnet-4-0"), ("Claude Opus 4 - Most powerful Anthropic model", "claude-opus-4-0"), ], "google": [ ("Gemini 2.0 Flash-Lite - Cost efficiency and low latency", "gemini-2.0-flash-lite"), ("Gemini 2.0 Flash - Next generation features, speed, and thinking", "gemini-2.0-flash"), ("Gemini 2.5 Flash-Lite - Lightweight and cost efficient", "gemini-2.5-flash-lite"), ("Gemini 2.5 Flash - Adaptive thinking, cost efficiency", "gemini-2.5-flash"), ("Gemini 3 Flash Preview - Latest generation flash model", "gemini-3-flash-preview"), ], "openrouter": [ ("Xiaomi MiMo V2 Flash - Fast and efficient multimodal model", "xiaomi/mimo-v2-flash:free"), ("DeepSeek V3 - a 685B-parameter, mixture-of-experts model", "deepseek/deepseek-chat-v3-0324:free"), ], "ollama": [ ("llama3.2:3b local", "llama3.2:3b"), ("phi3.5 local", "phi3.5:latest"), ], "lm studio": [ ("Local Model (default)", "local-model"), ] } provider_lower = provider.lower() # Try dynamic fetch for supported providers (OpenAI, Anthropic, Google) model_options = None if provider_lower in ["openai", "anthropic", "google"]: dynamic_models = fetch_models_for_provider(provider_lower) if dynamic_models: model_options = dynamic_models # Fall back to static list if dynamic fetch failed or not supported if model_options is None: model_options = DEEP_AGENT_OPTIONS.get(provider_lower, []) choice = questionary.select( "Select Your [Deep-Thinking LLM Engine]:", choices=[ questionary.Choice(display, value=value) for display, value in model_options ], instruction="\n- Use arrow keys to navigate\n- Press Enter to select", style=questionary.Style( [ ("selected", "fg:magenta noinherit"), ("highlighted", "fg:magenta noinherit"), ("pointer", "fg:magenta noinherit"), ] ), ).ask() if choice is None: console.print("\n[red]No deep thinking llm engine selected. Exiting...[/red]") exit(1) return choice def select_llm_provider() -> tuple[str, str]: """Select the LLM provider using interactive selection with availability checks.""" # Define provider options with their corresponding endpoints BASE_URLS = [ ("OpenAI", "https://api.openai.com/v1"), ("Anthropic", "https://api.anthropic.com/"), ("Google", "https://generativelanguage.googleapis.com/v1"), ("Openrouter", "https://openrouter.ai/api/v1"), ("Ollama", "http://localhost:11434/v1"), ("LM Studio", "http://localhost:1234/v1"), ] # Build choices with availability status choices = [] for display, url in BASE_URLS: available, reason = is_provider_available(display) if available: choices.append(questionary.Choice(display, value=(display, url))) else: # Show disabled option with reason disabled_label = f"{display} ({reason})" choices.append(questionary.Choice( disabled_label, value=(display, url), disabled=reason )) choice = questionary.select( "Select your LLM Provider:", choices=choices, instruction="\n- Use arrow keys to navigate\n- Press Enter to select", style=questionary.Style( [ ("selected", "fg:magenta noinherit"), ("highlighted", "fg:magenta noinherit"), ("pointer", "fg:magenta noinherit"), ] ), ).ask() if choice is None: console.print("\n[red]No LLM provider selected. Exiting...[/red]") exit(1) display_name, url = choice print(f"You selected: {display_name}\tURL: {url}") return display_name, url