ensure tradigagents works

This commit is contained in:
Jaskirat 2025-11-21 17:56:56 -08:00
parent 7b2ae218a3
commit 1b320c0797
7 changed files with 72 additions and 31 deletions

6
.gitignore vendored
View File

@ -9,3 +9,9 @@ eval_results/
eval_data/
*.egg-info/
.env
data/
llamacpp.txt
nohup.out
out.txt
test_llamacppserver.ipynb
venv/

View File

@ -1,7 +1,11 @@
______ ___ ___ __
/_ __/________ _____/ (_)___ ____ _/ | ____ ____ ____ / /______
/ / / ___/ __ `/ __ / / __ \/ __ `/ /| |/ __ `/ _ \/ __ \/ __/ ___/
/ / / / / /_/ / /_/ / / / / / /_/ / ___ / /_/ / __/ / / / /_(__ )
/_/ /_/ \__,_/\__,_/_/_/ /_/\__, /_/ |_\__, /\___/_/ /_/\__/____/
/____/ /____/
/$$$$$$$ /$$ /$$ /$$
| $$__ $$ | $$ | $$|__/
| $$ \ $$ /$$$$$$ | $$$$$$$ /$$$$$$ /$$$$$$$ /$$ /$$ /$$ /$$$$$$/$$$$
| $$$$$$$/ |____ $$| $$__ $$ |____ $$ /$$__ $$| $$| $$ | $$| $$_ $$_ $$
| $$__ $$ /$$$$$$$| $$ \ $$ /$$$$$$$| $$ | $$| $$| $$ | $$| $$ \ $$ \ $$
| $$ \ $$ /$$__ $$| $$ | $$ /$$__ $$| $$ | $$| $$| $$ | $$| $$ | $$ | $$
| $$ | $$| $$$$$$$| $$$$$$$/| $$$$$$$| $$$$$$$| $$| $$$$$$/| $$ | $$ | $$
|__/ |__/ \_______/|_______/ \_______/ \_______/|__/ \______/ |__/ |__/ |__/

View File

@ -127,6 +127,15 @@ def select_shallow_thinking_agent(provider) -> str:
# Define shallow thinking llm engine options with their corresponding model names
SHALLOW_AGENT_OPTIONS = {
"ollama": [
("qwen3_ollama", "qwen3:30b"),
("gemma3", "gemma3:27b"),
("gpt_oss_ollama", "gpt-oss"),
("gpt_oss", "hf.co/unsloth/gpt-oss-20b-GGUF:F16"),
("qwen3", "hf.co/unsloth/Qwen3-30B-A3B-Instruct-2507-GGUF:Q5_K_XL"),
("llama3.1 local", "llama3.1"),
("llama3.2 local", "llama3.2"),
],
"openai": [
("GPT-4o-mini - Fast and efficient for quick tasks", "gpt-4o-mini"),
("GPT-4.1-nano - Ultra-lightweight model for basic operations", "gpt-4.1-nano"),
@ -149,9 +158,8 @@ def select_shallow_thinking_agent(provider) -> str:
("Meta: Llama 3.3 8B Instruct - A lightweight and ultra-fast variant of Llama 3.3 70B", "meta-llama/llama-3.3-8b-instruct:free"),
("google/gemini-2.0-flash-exp:free - Gemini Flash 2.0 offers a significantly faster time to first token", "google/gemini-2.0-flash-exp:free"),
],
"ollama": [
("llama3.1 local", "llama3.1"),
("llama3.2 local", "llama3.2"),
"llama_cpp": [
("default", "default"),
]
}
@ -185,6 +193,14 @@ def select_deep_thinking_agent(provider) -> str:
# Define deep thinking llm engine options with their corresponding model names
DEEP_AGENT_OPTIONS = {
"ollama": [
("qwen3_ollama", "qwen3:30b"),
("gemma3", "gemma3:27b"),
("gpt_oss_ollama", "gpt-oss"),
("gpt_oss", "hf.co/unsloth/gpt-oss-20b-GGUF:F16"),
("qwen3", "hf.co/unsloth/Qwen3-30B-A3B-Instruct-2507-GGUF:Q5_K_XL"),
("llama3.1 local", "llama3.1"),
],
"openai": [
("GPT-4.1-nano - Ultra-lightweight model for basic operations", "gpt-4.1-nano"),
("GPT-4.1-mini - Compact model with good performance", "gpt-4.1-mini"),
@ -211,9 +227,8 @@ def select_deep_thinking_agent(provider) -> str:
("DeepSeek V3 - a 685B-parameter, mixture-of-experts model", "deepseek/deepseek-chat-v3-0324:free"),
("Deepseek - latest iteration of the flagship chat model family from the DeepSeek team.", "deepseek/deepseek-chat-v3-0324:free"),
],
"ollama": [
("llama3.1 local", "llama3.1"),
("qwen3", "qwen3"),
"llama_cpp": [
("default", "default"),
]
}
@ -243,11 +258,12 @@ def select_llm_provider() -> tuple[str, str]:
"""Select the OpenAI api url using interactive selection."""
# Define OpenAI api options with their corresponding endpoints
BASE_URLS = [
("Ollama", "http://localhost:11434/v1"),
("OpenAI", "https://api.openai.com/v1"),
("Anthropic", "https://api.anthropic.com/"),
("Google", "https://generativelanguage.googleapis.com/v1"),
("Openrouter", "https://openrouter.ai/api/v1"),
("Ollama", "http://localhost:11434/v1"),
("Openrouter", "https://openrouter.ai/api/v1"),
("Llama_CPP", "https://localhost:8080/v1"),
]
choice = questionary.select(

View File

@ -12,7 +12,7 @@ def create_news_analyst(llm):
tools = [
get_news,
get_global_news,
# get_global_news,
]
system_message = (

View File

@ -48,7 +48,7 @@ TOOLS_CATEGORIES = {
"tools": [
"get_news",
"get_global_news",
"get_insider_sentiment",
# "get_insider_sentiment",
"get_insider_transactions",
]
}
@ -104,11 +104,11 @@ VENDOR_METHODS = {
},
"get_global_news": {
"openai": get_global_news_openai,
"local": get_reddit_global_news
},
"get_insider_sentiment": {
"local": get_finnhub_company_insider_sentiment
# "local": get_reddit_global_news
},
# "get_insider_sentiment": {
# "local": get_finnhub_company_insider_sentiment
# },
"get_insider_transactions": {
"alpha_vantage": get_alpha_vantage_insider_transactions,
"yfinance": get_yfinance_insider_transactions,

View File

@ -3,31 +3,46 @@ import os
DEFAULT_CONFIG = {
"project_dir": os.path.abspath(os.path.join(os.path.dirname(__file__), ".")),
"results_dir": os.getenv("TRADINGAGENTS_RESULTS_DIR", "./results"),
"data_dir": "/Users/yluo/Documents/Code/ScAI/FR1-data",
"data_dir": "/data/coding/trading_agents",
"data_cache_dir": os.path.join(
os.path.abspath(os.path.join(os.path.dirname(__file__), ".")),
"dataflows/data_cache",
),
# LLM settings
"llm_provider": "openai",
"deep_think_llm": "o4-mini",
"quick_think_llm": "gpt-4o-mini",
"backend_url": "https://api.openai.com/v1",
# "deep_think_llm": "hf.co/unsloth/Qwen3-30B-A3B-Instruct-2507-GGUF:Q5_K_XL",
# "quick_think_llm": "hf.co/unsloth/Qwen3-30B-A3B-Instruct-2507-GGUF:Q5_K_XL",
# "deep_think_llm": "hf.co/unsloth/gpt-oss-20b-GGUF:F16",
# "quick_think_llm": "hf.co/unsloth/gpt-oss-20b-GGUF:F16",
"deep_think_llm": "gemma3:27b",
"quick_think_llm": "gemma3:27b",
# "backend_url": "http://localhost:8080/v1",
"backend_url": "http://localhost:11434/v1",
# Debate and discussion settings
"max_debate_rounds": 1,
"max_risk_discuss_rounds": 1,
"max_recur_limit": 100,
"max_recur_limit": 500,
# Data vendor configuration
# Category-level configuration (default for all tools in category)
"data_vendors": {
"core_stock_apis": "yfinance", # Options: yfinance, alpha_vantage, local
"technical_indicators": "yfinance", # Options: yfinance, alpha_vantage, local
"fundamental_data": "alpha_vantage", # Options: openai, alpha_vantage, local
"news_data": "alpha_vantage", # Options: openai, alpha_vantage, google, local
"news_data": "google", # Options: openai, alpha_vantage, google, local
},
# Tool-level configuration (takes precedence over category-level)
"tool_vendors": {
# Example: "get_stock_data": "alpha_vantage", # Override category default
# Example: "get_news": "openai", # Override category default
"get_stock_data": "yfinance",
"get_indicators": "yfinance",
"get_fundamentals": "alpha_vantage",
"get_balance_sheet": "yfinance",
"get_cashflow": "yfinance",
"get_income_statement": "yfinance",
"get_news": "alpha_vantage",
"get_global_news": "openai",
# "get_insider_sentiment": "na",
"get_insider_transactions": "yfinance",
},
}
}

View File

@ -108,7 +108,7 @@ class TradingAgentsGraph:
self.conditional_logic,
)
self.propagator = Propagator()
self.propagator = Propagator(self.config["max_recur_limit"])
self.reflector = Reflector(self.quick_thinking_llm)
self.signal_processor = SignalProcessor(self.quick_thinking_llm)
@ -141,8 +141,8 @@ class TradingAgentsGraph:
[
# News and insider information
get_news,
get_global_news,
get_insider_sentiment,
# get_global_news,
# get_insider_sentiment,
get_insider_transactions,
]
),