diff --git a/cli/utils.py b/cli/utils.py index 7b9682a6..dd816f73 100644 --- a/cli/utils.py +++ b/cli/utils.py @@ -1,6 +1,8 @@ import questionary from typing import List, Optional, Tuple, Dict +from rich.console import Console + from cli.models import AnalystType ANALYST_ORDER = [ @@ -10,6 +12,8 @@ ANALYST_ORDER = [ ("Fundamentals Analyst", AnalystType.FUNDAMENTALS), ] +console = Console() + def get_ticker() -> str: """Prompt the user to enter a ticker symbol.""" @@ -48,7 +52,7 @@ def get_analysis_date() -> str: date = questionary.text( "Enter the analysis date (YYYY-MM-DD):", validate=lambda x: validate_date(x.strip()) - or "Please enter a valid date in YYYY-MM-DD format.", + or "Please enter a valid date in YYYY-MM-DD format.", style=questionary.Style( [ ("text", "fg:green"), @@ -146,8 +150,10 @@ def select_shallow_thinking_agent(provider) -> str: ], "openrouter": [ ("Meta: Llama 4 Scout", "meta-llama/llama-4-scout:free"), - ("Meta: Llama 3.3 8B Instruct - A lightweight and ultra-fast variant of Llama 3.3 70B", "meta-llama/llama-3.3-8b-instruct:free"), - ("google/gemini-2.0-flash-exp:free - Gemini Flash 2.0 offers a significantly faster time to first token", "google/gemini-2.0-flash-exp:free"), + ("Meta: Llama 3.3 8B Instruct - A lightweight and ultra-fast variant of Llama 3.3 70B", + "meta-llama/llama-3.3-8b-instruct:free"), + ("google/gemini-2.0-flash-exp:free - Gemini Flash 2.0 offers a significantly faster time to first token", + "google/gemini-2.0-flash-exp:free"), ], "ollama": [ ("llama3.1 local", "llama3.1"), @@ -209,14 +215,15 @@ def select_deep_thinking_agent(provider) -> str: ], "openrouter": [ ("DeepSeek V3 - a 685B-parameter, mixture-of-experts model", "deepseek/deepseek-chat-v3-0324:free"), - ("Deepseek - latest iteration of the flagship chat model family from the DeepSeek team.", "deepseek/deepseek-chat-v3-0324:free"), + ("Deepseek - latest iteration of the flagship chat model family from the DeepSeek team.", + "deepseek/deepseek-chat-v3-0324:free"), ], "ollama": [ ("llama3.1 local", "llama3.1"), ("qwen3", "qwen3"), ] } - + choice = questionary.select( "Select Your [Deep-Thinking LLM Engine]:", choices=[ @@ -239,6 +246,7 @@ def select_deep_thinking_agent(provider) -> str: return choice + def select_llm_provider() -> tuple[str, str]: """Select the OpenAI api url using interactive selection.""" # Define OpenAI api options with their corresponding endpoints @@ -247,9 +255,9 @@ def select_llm_provider() -> tuple[str, str]: ("Anthropic", "https://api.anthropic.com/"), ("Google", "https://generativelanguage.googleapis.com/v1"), ("Openrouter", "https://openrouter.ai/api/v1"), - ("Ollama", "http://localhost:11434/v1"), + ("Ollama", "http://localhost:11434/v1"), ] - + choice = questionary.select( "Select your LLM Provider:", choices=[ @@ -265,12 +273,12 @@ def select_llm_provider() -> tuple[str, str]: ] ), ).ask() - + if choice is None: console.print("\n[red]no OpenAI backend selected. Exiting...[/red]") exit(1) - + display_name, url = choice print(f"You selected: {display_name}\tURL: {url}") - + return display_name, url