refactor config to setttings

This commit is contained in:
mhmmdjafarg 2026-01-01 12:21:35 +07:00
parent 0d8291b2aa
commit f2d6896cc0
22 changed files with 481 additions and 158 deletions

View File

@ -4,10 +4,6 @@ import typer
from pathlib import Path
from functools import wraps
from rich.console import Console
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
from rich.panel import Panel
from rich.spinner import Spinner
from rich.live import Live
@ -25,7 +21,7 @@ from rich.align import Align
from rich.rule import Rule
from tradingagents.graph.trading_graph import TradingAgentsGraph
from tradingagents.default_config import DEFAULT_CONFIG
from tradingagents.config import get_config
from cli.models import AnalystType
from cli.utils import *
@ -761,22 +757,26 @@ def run_analysis():
# First get all user selections
selections = get_user_selections()
# Create config with selected research depth
config = DEFAULT_CONFIG.copy()
config["max_debate_rounds"] = selections["research_depth"]
config["max_risk_discuss_rounds"] = selections["research_depth"]
config["quick_think_llm"] = selections["shallow_thinker"]
config["deep_think_llm"] = selections["deep_thinker"]
config["backend_url"] = selections["backend_url"]
config["llm_provider"] = selections["llm_provider"].lower()
# Update settings with selected research depth
from tradingagents.config import settings, update_config
config_updates = {
"max_debate_rounds": selections["research_depth"],
"max_risk_discuss_rounds": selections["research_depth"],
"quick_think_llm": selections["shallow_thinker"],
"deep_think_llm": selections["deep_thinker"],
"backend_url": selections["backend_url"],
"llm_provider": selections["llm_provider"].lower()
}
update_config(config_updates)
# Initialize the graph
graph = TradingAgentsGraph(
[analyst.value for analyst in selections["analysts"]], config=config, debug=True
[analyst.value for analyst in selections["analysts"]], config=get_config(), debug=True
)
# Create result directory
results_dir = Path(config["results_dir"]) / selections["ticker"] / selections["analysis_date"]
results_dir = Path(settings.RESULTS_DIR) / selections["ticker"] / selections["analysis_date"]
results_dir.mkdir(parents=True, exist_ok=True)
report_dir = results_dir / "reports"
report_dir.mkdir(parents=True, exist_ok=True)

View File

@ -4,7 +4,7 @@ services:
container_name: ${REDIS_CONTAINER_NAME:-trading_agents_redis}
restart: unless-stopped
ports:
- "6379:6379"
- "6380:6379"
volumes:
- redis-data:/data
environment:

35
main.py
View File

@ -1,27 +1,26 @@
from tradingagents.graph.trading_graph import TradingAgentsGraph
from tradingagents.default_config import DEFAULT_CONFIG
from tradingagents.config import get_config, update_config
from dotenv import load_dotenv
# Get the centralized config (already includes .env loading)
config = get_config()
# Load environment variables from .env file
load_dotenv()
# Create a custom config
config = DEFAULT_CONFIG.copy()
config["deep_think_llm"] = "gpt-4o-mini" # Use a different model
config["quick_think_llm"] = "gpt-4o-mini" # Use a different model
config["max_debate_rounds"] = 1 # Increase debate rounds
# Configure data vendors (default uses yfinance and alpha_vantage)
config["data_vendors"] = {
"core_stock_apis": "yfinance", # Options: yfinance, alpha_vantage, local
"technical_indicators": "yfinance", # Options: yfinance, alpha_vantage, local
"fundamental_data": "alpha_vantage", # Options: openai, alpha_vantage, local
"news_data": "alpha_vantage", # Options: openai, alpha_vantage, google, local
# Customize config if needed
updates = {
"deep_think_llm": "gpt-4o-mini", # Use a different model
"quick_think_llm": "gpt-4o-mini", # Use a different model
"max_debate_rounds": 1, # Increase debate rounds
# Configure data vendors
"data_vendors": {
"core_stock_apis": "yfinance", # Options: yfinance, alpha_vantage, local
"technical_indicators": "yfinance", # Options: yfinance, alpha_vantage, local
"fundamental_data": "alpha_vantage", # Options: openai, alpha_vantage, local
"news_data": "alpha_vantage", # Options: openai, alpha_vantage, google, local
}
}
update_config(updates)
# Initialize with custom config
ta = TradingAgentsGraph(debug=True, config=config)
ta = TradingAgentsGraph(debug=True, config=get_config())
# forward propagate
_, decision = ta.propagate("NVDA", "2024-05-10")

View File

@ -2,49 +2,50 @@ from tradingagents.external.redis.repo import redis_queue, redis_repo
from tradingagents.domain.model import AnalysisMeta, AnalysisStatus
from tradingagents.domain.response import EnqueueAnalysisResponse
from rq import get_current_job
from tradingagents.external.redis.repo import RQ_RETRIES
from tradingagents.graph.trading_graph import TradingAgentsGraph
# from tradingagents.external.redis.repo import RQ_RETRIES
from tradingagents.dataflows.config import get_config
DEFAULT_USER = "global_user"
# Initialize trading agent once at startup
def create_trading_agent():
"""Create trading agent with fixed configuration"""
return TradingAgentsGraph(debug=True, config=get_config())
# def create_trading_agent():
# """Create trading agent with fixed configuration"""
# return TradingAgentsGraph(debug=True, config=get_config())
# Create the trading agent instance once
trading_agent = create_trading_agent()
# # Create the trading agent instance once
# trading_agent = create_trading_agent()
def process_job(user_id: str, symbol: str, date: str):
try:
job = get_current_job()
print(f"INFO: Starting job for symbol {symbol} and date {date} by user {user_id}")
print(f"DEBUG: Job function called - this should only happen with a worker!")
# try:
# job = get_current_job()
attempt = job.meta.get("attempt", 1)
job.meta["attempt"] = attempt
job.save_meta()
# attempt = job.meta.get("attempt", 1)
# job.meta["attempt"] = attempt
# job.save_meta()
print(f"INFO: Processing job-id {job.id} for symbol {symbol} and date {date} by user {user_id}")
# print(f"INFO: Processing job-id {job.id} for symbol {symbol} and date {date} by user {user_id}")
# Update status to RUNNING
redis_repo.update_status_analysis_meta(job_id=job.id, status=AnalysisStatus.RUNNING)
# # Update status to RUNNING
# redis_repo.update_status_analysis_meta(job_id=job.id, status=AnalysisStatus.RUNNING)
final_state, decision = trading_agent.propagate(ticker=symbol, trade_date=date)
# final_state, decision = trading_agent.propagate(ticker=symbol, trade_date=date)
print(f"INFO: Decision for job-id {job.id}: {decision}")
# print(f"INFO: Decision for job-id {job.id}: {decision}")
# Save the final result
redis_repo.save_result(job_id=job.id, final_trade=final_state["final_trade_decision"])
# Update status to DONE
redis_repo.update_status_analysis_meta(job_id=job.id, status=AnalysisStatus.DONE)
# # Save the final result
# redis_repo.save_result(job_id=job.id, final_trade=final_state["final_trade_decision"])
# # Update status to DONE
# redis_repo.update_status_analysis_meta(job_id=job.id, status=AnalysisStatus.DONE)
print(f"INFO: Completed job-id {job.id} for symbol {symbol}")
except Exception as e:
job.meta["attempt"] = attempt + 1
job.save_meta()
print(f"ERROR: Failed to process job-id {job.id}: {e} (Attempt {attempt} of {RQ_RETRIES})")
# Update status to FAILED
redis_repo.update_status_analysis_meta(job_id=job.id, status=AnalysisStatus.FAILED)
# print(f"INFO: Completed job-id {job.id} for symbol {symbol}")
# except Exception as e:
# job.meta["attempt"] = attempt + 1
# job.save_meta()
# print(f"ERROR: Failed to process job-id {job.id}: {e} (Attempt {attempt} of {RQ_RETRIES})")
# # Update status to FAILED
# redis_repo.update_status_analysis_meta(job_id=job.id, status=AnalysisStatus.FAILED)
def enqueue_analysis(symbol: str, date: str) -> EnqueueAnalysisResponse:

View File

@ -1,15 +1,16 @@
import chromadb
from chromadb.config import Settings
from openai import OpenAI
from tradingagents.config import settings
class FinancialSituationMemory:
def __init__(self, name, config):
if config["backend_url"] == "http://localhost:11434/v1":
if settings.BACKEND_URL == "http://localhost:11434/v1":
self.embedding = "nomic-embed-text"
else:
self.embedding = "text-embedding-3-small"
self.client = OpenAI(base_url=config["backend_url"])
self.client = OpenAI(base_url=settings.BACKEND_URL)
self.chroma_client = chromadb.Client(Settings(allow_reset=True))
self.situation_collection = self.chroma_client.get_or_create_collection(name=name)

View File

@ -0,0 +1,28 @@
"""
Centralized configuration package for TradingAgents.
"""
from .manager import (
settings,
get_config,
update_config,
set_config,
get_config_value,
get_nested_config,
get_redis_config,
get_external_config,
get_rq_config,
initialize_config
)
__all__ = [
'settings',
'get_config',
'update_config',
'set_config',
'get_config_value',
'get_nested_config',
'get_redis_config',
'get_external_config',
'get_rq_config',
'initialize_config'
]

View File

@ -0,0 +1,287 @@
"""
Centralized configuration management for TradingAgents.
This module provides a Settings class that loads environment variables
once and makes them available as attributes throughout the application.
"""
import os
from typing import Dict, Any, List
from dotenv import load_dotenv
class Settings:
"""Application settings loaded from environment variables and defaults."""
_instance = None
_initialized = False
def __new__(cls):
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self):
if not self._initialized:
self._load_environment()
self._load_settings()
Settings._initialized = True
def _load_environment(self):
"""Load environment variables from .env file if it exists."""
# Try to find .env file starting from current directory and going up
env_file = None
current_dir = os.getcwd()
# Check common locations
possible_locations = [
current_dir,
os.path.dirname(__file__),
os.path.dirname(os.path.dirname(__file__)), # tradingagents dir
os.path.dirname(os.path.dirname(os.path.dirname(__file__))), # project root
]
for location in possible_locations:
env_path = os.path.join(location, '.env')
if os.path.exists(env_path):
env_file = env_path
break
if env_file:
print(f"INFO: Loading environment from {env_file}")
load_dotenv(env_file)
else:
print("INFO: No .env file found, using system environment variables")
def _load_settings(self):
"""Load all settings from environment variables with defaults."""
# App settings
self.APP_HOST = os.getenv("APP_HOST", "localhost")
self.APP_PORT = int(os.getenv("APP_PORT", 8000))
# Directory settings
self.PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
self.RESULTS_DIR = os.getenv("TRADINGAGENTS_RESULTS_DIR", "./results")
self.DATA_DIR = os.getenv("TRADINGAGENTS_DATA_DIR", "/Users/yluo/Documents/Code/ScAI/FR1-data")
self.DATA_CACHE_DIR = os.path.join(self.PROJECT_DIR, "dataflows/data_cache")
# LLM settings
self.LLM_PROVIDER = os.getenv("LLM_PROVIDER", "openai")
self.DEEP_THINK_LLM = os.getenv("DEEP_THINK_LLM", "gpt-4o-mini")
self.QUICK_THINK_LLM = os.getenv("QUICK_THINK_LLM", "gpt-4o-mini")
self.BACKEND_URL = os.getenv("BACKEND_URL", "https://api.openai.com/v1")
# Debate and discussion settings
self.MAX_DEBATE_ROUNDS = int(os.getenv("MAX_DEBATE_ROUNDS", 1))
self.MAX_RISK_DISCUSS_ROUNDS = int(os.getenv("MAX_RISK_DISCUSS_ROUNDS", 1))
self.MAX_RECUR_LIMIT = int(os.getenv("MAX_RECUR_LIMIT", 100))
# Data vendor settings
self.CORE_CRYPTO_APIS = os.getenv("CORE_CRYPTO_APIS", "bybit")
self.CORE_STOCK_APIS = os.getenv("CORE_STOCK_APIS", "yfinance")
self.TECHNICAL_INDICATORS = os.getenv("TECHNICAL_INDICATORS", "bybit")
self.FUNDAMENTAL_DATA = os.getenv("FUNDAMENTAL_DATA", "alpha_vantage")
self.NEWS_DATA = os.getenv("NEWS_DATA", "openai")
self.PROFILE_DATA = os.getenv("PROFILE_DATA", "bybit")
# Tool overrides
self.TOOL_GET_GLOBAL_NEWS = os.getenv("TOOL_GET_GLOBAL_NEWS", "telegram")
# External API settings
self.BINANCE_API_KEY = os.getenv("BINANCE_API_KEY", "")
self.TAAPI_BASE_URL = os.getenv("TAAPI_BASE_URL", "https://api.taapi.io")
self.TAAPI_API_KEY = os.getenv("TAAPI_API_KEY", "")
self.BYBIT_BASE_URL = os.getenv("BYBIT_BASE_URL", "https://api-demo.bybit.com")
self.BYBIT_API_KEY = os.getenv("BYBIT_API_KEY", "")
self.BYBIT_API_SECRET = os.getenv("BYBIT_API_SECRET", "")
self.COIN_GECKO_API_BASE_URL = os.getenv("COIN_GECKO_API_BASE_URL", "https://api.coingecko.com/api/v3")
self.TELEGRAM_API_ID = os.getenv("TELEGRAM_API_ID", "")
self.TELEGRAM_API_HASH = os.getenv("TELEGRAM_API_HASH", "")
self.TELEGRAM_SESSION_NAME = os.getenv("TELEGRAM_SESSION_NAME", "")
# Redis settings
self.REDIS_HOST = os.getenv("REDIS_HOST", "localhost")
self.REDIS_PORT = int(os.getenv("REDIS_PORT", 6379))
self.REDIS_PASSWORD = os.getenv("REDIS_PASSWORD", "defaultpassword")
self.REDIS_DB = int(os.getenv("REDIS_DB", 0))
# RQ settings
self.RQ_RETRIES = int(os.getenv("RQ_RETRIES", 3))
self.RQ_INTERVALS = [
int(x.strip()) for x in os.getenv("RQ_INTERVALS", "30,60,120").split(",")
]
@property
def data_vendors(self) -> Dict[str, str]:
"""Get data vendors configuration as dictionary for backwards compatibility."""
return {
"core_crypto_apis": self.CORE_CRYPTO_APIS,
"core_stock_apis": self.CORE_STOCK_APIS,
"technical_indicators": self.TECHNICAL_INDICATORS,
"fundamental_data": self.FUNDAMENTAL_DATA,
"news_data": self.NEWS_DATA,
"profile_data": self.PROFILE_DATA,
}
@property
def tool_vendors(self) -> Dict[str, str]:
"""Get tool vendors configuration as dictionary for backwards compatibility."""
return {
"get_global_news": self.TOOL_GET_GLOBAL_NEWS
}
@property
def tool_providers(self) -> Dict[str, str]:
"""Get tool providers configuration as dictionary for backwards compatibility."""
return {
"TAAPI_BASE_URL": self.TAAPI_BASE_URL,
}
@property
def external(self) -> Dict[str, str]:
"""Get external APIs configuration as dictionary for backwards compatibility."""
return {
"BINANCE_API_KEY": self.BINANCE_API_KEY,
"TAAPI_BASE_URL": self.TAAPI_BASE_URL,
"TAAPI_API_KEY": self.TAAPI_API_KEY,
"BYBIT_BASE_URL": self.BYBIT_BASE_URL,
"BYBIT_API_KEY": self.BYBIT_API_KEY,
"BYBIT_API_SECRET": self.BYBIT_API_SECRET,
"COIN_GECKO_API_BASE_URL": self.COIN_GECKO_API_BASE_URL,
"TELEGRAM_API_ID": self.TELEGRAM_API_ID,
"TELEGRAM_API_HASH": self.TELEGRAM_API_HASH,
"TELEGRAM_SESSION_NAME": self.TELEGRAM_SESSION_NAME,
}
@property
def redis(self) -> Dict[str, Any]:
"""Get Redis configuration as dictionary for backwards compatibility."""
return {
"REDIS_HOST": self.REDIS_HOST,
"REDIS_PORT": self.REDIS_PORT,
"REDIS_PASSWORD": self.REDIS_PASSWORD,
"REDIS_DB": self.REDIS_DB,
}
def to_dict(self) -> Dict[str, Any]:
"""Convert settings to dictionary for backwards compatibility."""
return {
# App config
"APP_HOST": self.APP_HOST,
"APP_PORT": self.APP_PORT,
# Directory settings
"project_dir": self.PROJECT_DIR,
"results_dir": self.RESULTS_DIR,
"data_dir": self.DATA_DIR,
"data_cache_dir": self.DATA_CACHE_DIR,
# LLM settings
"llm_provider": self.LLM_PROVIDER,
"deep_think_llm": self.DEEP_THINK_LLM,
"quick_think_llm": self.QUICK_THINK_LLM,
"backend_url": self.BACKEND_URL,
# Debate settings
"max_debate_rounds": self.MAX_DEBATE_ROUNDS,
"max_risk_discuss_rounds": self.MAX_RISK_DISCUSS_ROUNDS,
"max_recur_limit": self.MAX_RECUR_LIMIT,
# Data vendors
"data_vendors": self.data_vendors,
"tool_vendors": self.tool_vendors,
"tool_providers": self.tool_providers,
"external": self.external,
"redis": self.redis,
}
# Global singleton instance
settings = Settings()
# Backwards compatibility functions
def get_config() -> Dict[str, Any]:
"""Get configuration as dictionary for backwards compatibility."""
return settings.to_dict()
def update_config(updates: Dict[str, Any]) -> None:
"""Update configuration for backwards compatibility."""
# Update settings attributes based on dictionary updates
for key, value in updates.items():
if key == "llm_provider":
settings.LLM_PROVIDER = value
elif key == "deep_think_llm":
settings.DEEP_THINK_LLM = value
elif key == "quick_think_llm":
settings.QUICK_THINK_LLM = value
elif key == "backend_url":
settings.BACKEND_URL = value
elif key == "max_debate_rounds":
settings.MAX_DEBATE_ROUNDS = value
elif key == "max_risk_discuss_rounds":
settings.MAX_RISK_DISCUSS_ROUNDS = value
elif key == "data_vendors" and isinstance(value, dict):
for vendor_key, vendor_value in value.items():
if vendor_key == "core_crypto_apis":
settings.CORE_CRYPTO_APIS = vendor_value
elif vendor_key == "core_stock_apis":
settings.CORE_STOCK_APIS = vendor_value
elif vendor_key == "technical_indicators":
settings.TECHNICAL_INDICATORS = vendor_value
elif vendor_key == "fundamental_data":
settings.FUNDAMENTAL_DATA = vendor_value
elif vendor_key == "news_data":
settings.NEWS_DATA = vendor_value
elif vendor_key == "profile_data":
settings.PROFILE_DATA = vendor_value
def set_config(config: Dict[str, Any]) -> None:
"""Set/update configuration. For backwards compatibility."""
update_config(config)
def get_config_value(key: str, default: Any = None) -> Any:
"""Get a specific configuration value."""
return getattr(settings, key.upper(), default)
def get_nested_config(*keys: str, default: Any = None) -> Any:
"""Get a nested configuration value."""
if len(keys) == 1:
return getattr(settings, keys[0].upper(), default)
# Handle nested keys for backwards compatibility
if len(keys) == 2:
if keys[0] == "redis":
return getattr(settings, f"REDIS_{keys[1]}", default)
elif keys[0] == "external":
return getattr(settings, keys[1], default)
return default
def initialize_config() -> None:
"""Force initialization of configuration."""
settings._load_settings()
# Convenience functions
def get_redis_config() -> Dict[str, Any]:
"""Get Redis configuration."""
return settings.redis
def get_external_config() -> Dict[str, Any]:
"""Get external APIs configuration."""
return settings.external
def get_rq_config() -> Dict[str, Any]:
"""Get RQ configuration."""
return {
"RQ_RETRIES": settings.RQ_RETRIES,
"RQ_INTERVALS": settings.RQ_INTERVALS,
}

View File

@ -5,6 +5,7 @@ from datetime import datetime
import csv
import io
from tradingagents.dataflows.config import get_config
from tradingagents.config import settings
_client = None
@ -13,8 +14,7 @@ def get_binance_client():
global _client
if _client is None:
try:
config = get_config()
api_key = config["external"].get("BINANCE_API_KEY", "")
api_key = settings.BINANCE_API_KEY
if not api_key:
raise ValueError("BINANCE_API_KEY not found in configuration")

View File

@ -6,7 +6,7 @@ from typing import Dict, Optional, List
from urllib.parse import urlencode
import requests
from .config import get_config
from tradingagents.config import settings
import json
from datetime import datetime, timedelta, timezone
@ -16,10 +16,9 @@ from stockstats import StockDataFrame
def bybit_v5_request(method: str, path: str, params: Optional[Dict] = None, body: Optional[Dict] = None) -> Dict:
"""Generic signed HTTP request helper for Bybit V5 API."""
config = get_config()["external"]
base_url = config["BYBIT_BASE_URL"].rstrip("/")
api_key = config["BYBIT_API_KEY"]
api_secret = config["BYBIT_API_SECRET"]
base_url = settings.BYBIT_BASE_URL.rstrip("/")
api_key = settings.BYBIT_API_KEY
api_secret = settings.BYBIT_API_SECRET
if not api_key or not api_secret:
raise ValueError("Missing BYBIT_API_KEY or BYBIT_API_SECRET")

View File

@ -1,6 +1,5 @@
import requests
from .alpha_vantage_common import API_BASE_URL
from tradingagents.dataflows.config import get_config
from tradingagents.config import settings
def get_market_cap() -> str:
"""
@ -9,8 +8,7 @@ def get_market_cap() -> str:
Returns:
str: Market capitalization percentage data for cryptocurrencies
"""
config = get_config()
api_base_url = config["external"].get("COIN_GECKO_API_BASE_URL", "https://api.coingecko.com/api/v3")
api_base_url = settings.COIN_GECKO_API_BASE_URL
endpoint = f"{api_base_url}/global"
response = requests.get(endpoint)
print(f"DEBUG: CoinGecko API response status code: {response.status_code}")

View File

@ -1,33 +1,36 @@
import tradingagents.default_config as default_config
"""
Backwards compatibility layer for dataflows config.
This module now uses the centralized configuration system.
"""
from tradingagents.config import (
get_config as _get_config,
update_config as _update_config,
get_config_value
)
from typing import Dict, Optional
# Use default config but allow it to be overridden
_config: Optional[Dict] = None
# For backwards compatibility
DATA_DIR: Optional[str] = None
def initialize_config():
"""Initialize the configuration with default values."""
global _config, DATA_DIR
if _config is None:
_config = default_config.DEFAULT_CONFIG.copy()
DATA_DIR = _config["data_dir"]
global DATA_DIR
config = _get_config()
DATA_DIR = config.get("data_dir")
def set_config(config: Dict):
"""Update the configuration with custom values."""
global _config, DATA_DIR
if _config is None:
_config = default_config.DEFAULT_CONFIG.copy()
_config.update(config)
DATA_DIR = _config["data_dir"]
global DATA_DIR
_update_config(config)
updated_config = _get_config()
DATA_DIR = updated_config.get("data_dir")
def get_config() -> Dict:
"""Get the current configuration."""
if _config is None:
initialize_config()
return _config.copy()
return _get_config()
# Initialize with default config

View File

@ -1,4 +1,5 @@
from typing import Annotated
from tradingagents.config import settings
# Import from vendor-specific modules
from .local import get_YFin_data, get_finnhub_news, get_finnhub_company_insider_sentiment, get_finnhub_company_insider_transactions, get_simfin_balance_sheet, get_simfin_cashflow, get_simfin_income_statements, get_reddit_global_news, get_reddit_company_news, get_fear_and_greed
@ -196,7 +197,8 @@ def get_vendor(category: str, method: str = None) -> str:
return tool_vendors[method]
# Fall back to category-level configuration
return config.get("data_vendors", {}).get(category, "default")
data_vendors = settings.data_vendors
return data_vendors.get(category, "default")
def route_to_vendor(method: str, *args, **kwargs):
"""Route method calls to appropriate vendor implementation with fallback support."""

View File

@ -1,5 +1,5 @@
from openai import OpenAI
from .config import get_config
from tradingagents.config import settings
_client = None
@ -8,8 +8,7 @@ def get_openai_client():
global _client
if _client is None:
try:
config = get_config()
base_url = config.get("backend_url")
base_url = settings.BACKEND_URL
if not base_url:
raise ValueError("backend_url not found in configuration")
_client = OpenAI(base_url=base_url)
@ -20,11 +19,10 @@ def get_openai_client():
return _client
def get_stock_news_openai(query, start_date, end_date):
config = get_config()
client = get_openai_client()
response = client.responses.create(
model=config["quick_think_llm"],
model=settings.QUICK_THINK_LLM,
input=[
{
"role": "system",
@ -55,11 +53,10 @@ def get_stock_news_openai(query, start_date, end_date):
return response.output[1].content[0].text
def get_crypto_news_openai(query, start_date, end_date):
config = get_config()
client = get_openai_client()
response = client.responses.create(
model=config["quick_think_llm"],
model=settings.QUICK_THINK_LLM,
input=[
{
"role": "system",
@ -90,11 +87,10 @@ def get_crypto_news_openai(query, start_date, end_date):
return response.output[1].content[0].text
def get_global_news_openai(curr_date, look_back_days=7, limit=5):
config = get_config()
client = get_openai_client()
response = client.responses.create(
model=config["quick_think_llm"],
model=settings.QUICK_THINK_LLM,
input=[
{
"role": "system",
@ -126,11 +122,10 @@ def get_global_news_openai(curr_date, look_back_days=7, limit=5):
def get_fundamentals_openai(ticker, curr_date):
config = get_config()
client = get_openai_client()
response = client.responses.create(
model=config["quick_think_llm"],
model=settings.QUICK_THINK_LLM,
input=[
{
"role": "system",
@ -162,11 +157,10 @@ def get_fundamentals_openai(ticker, curr_date):
return response.output[1].content[0].text
def get_whitepaper_openai(symbol):
config = get_config()
client = get_openai_client()
response = client.responses.create(
model=config["quick_think_llm"],
model=settings.QUICK_THINK_LLM,
input=[
{
"role": "system",

View File

@ -18,9 +18,9 @@ class StockstatsUtils:
],
):
# Get config and set up data directory path
from tradingagents.config import settings
config = get_config()
online = config["data_vendors"]["technical_indicators"] != "local"
online = settings.TECHNICAL_INDICATORS != "local"
df = None
data = None
@ -46,10 +46,10 @@ class StockstatsUtils:
end_date = end_date.strftime("%Y-%m-%d")
# Get config and ensure cache directory exists
os.makedirs(config["data_cache_dir"], exist_ok=True)
os.makedirs(settings.DATA_CACHE_DIR, exist_ok=True)
data_file = os.path.join(
config["data_cache_dir"],
settings.DATA_CACHE_DIR,
f"{symbol}-YFin-data-{start_date}-{end_date}.csv",
)

View File

@ -1,6 +1,6 @@
import requests
from typing import Annotated, List
from tradingagents.dataflows.config import get_config
from tradingagents.config import settings
# This is for single indicator, unused for now but kept for reference
def get_crypto_stats_indicators_window(
@ -48,9 +48,8 @@ def get_crypto_stats_indicators_window(
if indicator.lower() not in supported_indicators:
return f"Error: Indicator '{indicator}' is not supported. Please choose from: {list(supported_indicators.keys())}"
config = get_config()
base_url = config["external"].get("TAAPI_BASE_URL", "https://api.taapi.io")
api_key = config["external"].get("TAAPI_API_KEY", "")
base_url = settings.TAAPI_BASE_URL
api_key = settings.TAAPI_API_KEY
if not api_key:
return "Error: TAAPI_API_KEY is not set in the configuration."
@ -179,9 +178,8 @@ def get_crypto_stats_indicators(
if invalid_indicators:
return f"Error: Indicators {invalid_indicators} are not supported. Please choose from: {list(supported_indicators.keys())}"
config = get_config()
base_url = config["external"].get("TAAPI_BASE_URL", "https://api.taapi.io")
api_key = config["external"].get("TAAPI_API_KEY", "")
base_url = settings.TAAPI_BASE_URL
api_key = settings.TAAPI_API_KEY
if not api_key:
return "Error: TAAPI_API_KEY is not set in the configuration."

View File

@ -1,14 +1,13 @@
import asyncio
from telethon import TelegramClient
from datetime import datetime, timedelta, timezone
from tradingagents.dataflows.config import get_config
from tradingagents.config import settings
def get_api_credentials():
"""Retrieve Telegram API credentials from environment variables."""
config = get_config()
api_id = config["external"]["TELEGRAM_API_ID"]
api_hash = config["external"]["TELEGRAM_API_HASH"]
session_name = config["external"]["TELEGRAM_SESSION_NAME"]
api_id = settings.TELEGRAM_API_ID
api_hash = settings.TELEGRAM_API_HASH
session_name = settings.TELEGRAM_SESSION_NAME
if not api_id or not api_hash or not session_name:
raise ValueError("Missing required Telegram credentials: TELEGRAM_API_ID, TELEGRAM_API_HASH, or TELEGRAM_SESSION_NAME")

View File

@ -199,15 +199,15 @@ def _get_stock_stats_bulk(
from stockstats import wrap
import os
config = get_config()
online = config["data_vendors"]["technical_indicators"] != "local"
from tradingagents.config import settings
online = settings.TECHNICAL_INDICATORS != "local"
if not online:
# Local data path
try:
data = pd.read_csv(
os.path.join(
config.get("data_cache_dir", "data"),
settings.DATA_CACHE_DIR,
f"{symbol}-YFin-data-2015-01-01-2025-03-25.csv",
)
)
@ -224,10 +224,10 @@ def _get_stock_stats_bulk(
start_date_str = start_date.strftime("%Y-%m-%d")
end_date_str = end_date.strftime("%Y-%m-%d")
os.makedirs(config["data_cache_dir"], exist_ok=True)
os.makedirs(settings.DATA_CACHE_DIR, exist_ok=True)
data_file = os.path.join(
config["data_cache_dir"],
settings.DATA_CACHE_DIR,
f"{symbol}-YFin-data-{start_date_str}-{end_date_str}.csv",
)
@ -404,4 +404,4 @@ def get_insider_transactions(
return header + csv_string
except Exception as e:
return f"Error retrieving insider transactions for {ticker}: {str(e)}"
return f"Error retrieving insider transactions for {ticker}: {str(e)}"

View File

@ -1,4 +1,20 @@
"""
DEPRECATED: This configuration file is deprecated in favor of the centralized config system.
Please use tradingagents.config instead:
from tradingagents.config import get_config
config = get_config()
This file is kept for backwards compatibility only.
"""
import os
import warnings
warnings.warn(
"tradingagents.default_config is deprecated. Please use 'from tradingagents.config import get_config' instead.",
DeprecationWarning,
stacklevel=2
)
DEFAULT_CONFIG = {
# App config

View File

@ -1,25 +1,29 @@
from redis import Redis, ConnectionPool
from redis.backoff import ExponentialBackoff
from redis.retry import Retry
from tradingagents.dataflows.config import get_config
from redis.exceptions import ResponseError, DataError
from tradingagents.config import settings
import logging
_client = None
logger = logging.getLogger(__name__)
def get_redis_client() -> Redis:
"""Get or create Redis client with lazy initialization."""
global _client
if _client is None:
try:
config = get_config()
print(f"INFO: Creating Redis connection pool config {config}")
print(f"INFO: Creating Redis connection pool with host={settings.REDIS_HOST}, port={settings.REDIS_PORT}")
retry = Retry(ExponentialBackoff(), retries=5)
pool = ConnectionPool(
host=config["redis"]["REDIS_HOST"],
port=config["redis"]["REDIS_PORT"],
password=config["redis"]["REDIS_PASSWORD"],
db=config["redis"]["REDIS_DB"],
decode_responses=True,
host=settings.REDIS_HOST,
port=settings.REDIS_PORT,
password=settings.REDIS_PASSWORD,
db=settings.REDIS_DB,
decode_responses=False, # Set to False to let RQ handle decoding
encoding='utf-8',
socket_connect_timeout=5,
socket_timeout=5,
health_check_interval=10,
@ -28,6 +32,7 @@ def get_redis_client() -> Redis:
print("INFO: Initializing Redis client")
_client = Redis(connection_pool=pool)
print("INFO: Redis client initialized successfully")
except Exception as e:
print(f"ERROR: Failed to initialize Redis client: {e}")
raise

View File

@ -1,13 +1,10 @@
import time
from tradingagents.external.redis.client import get_redis_client
from tradingagents.domain.model import AnalysisMeta, AnalysisStatus
from tradingagents.config import settings
from rq import Queue, Retry
from redis import Redis
# TODO: Move to config
RQ_RETRIES = 3
RQ_INTERVAL = [30, 60, 120]
ANALYSIS_META_KEY = "analysis:meta:{job_id}"
ANALYSIS_RESULT_KEY = "analysis:result:{job_id}"
ANALYSIS_COOLDOWN_KEY = "tradingagents-analysis-cooldown-{user_id}:{symbol}"
@ -102,4 +99,4 @@ class RedisRepo:
redis_repo = RedisRepo(get_redis_client())
redis_queue = Queue(connection=get_redis_client(), retry=Retry(max=RQ_RETRIES, interval=RQ_INTERVAL))
redis_queue = Queue(connection=get_redis_client(), retry=Retry(max=settings.RQ_RETRIES, interval=settings.RQ_INTERVALS))

View File

@ -13,14 +13,14 @@ from langchain_google_genai import ChatGoogleGenerativeAI
from langgraph.prebuilt import ToolNode
from tradingagents.agents import *
from tradingagents.default_config import DEFAULT_CONFIG
from tradingagents.config import settings, get_config, set_config
from tradingagents.agents.utils.memory import FinancialSituationMemory
from tradingagents.agents.utils.agent_states import (
AgentState,
InvestDebateState,
RiskDebateState,
)
from tradingagents.dataflows.config import set_config
# Import removed as set_config is now imported from tradingagents.config
# Import the new abstract tool methods from agent_utils
from tradingagents.agents.utils.agent_utils import (
@ -67,29 +67,29 @@ class TradingAgentsGraph:
config: Configuration dictionary. If None, uses default config
"""
self.debug = debug
self.config = config or DEFAULT_CONFIG
self.config = config or get_config()
# Update the interface's config
set_config(self.config)
# Create necessary directories
os.makedirs(
os.path.join(self.config["project_dir"], "dataflows/data_cache"),
os.path.join(settings.PROJECT_DIR, "dataflows/data_cache"),
exist_ok=True,
)
# Initialize LLMs
if self.config["llm_provider"].lower() == "openai" or self.config["llm_provider"] == "ollama" or self.config["llm_provider"] == "openrouter":
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
elif self.config["llm_provider"].lower() == "anthropic":
self.deep_thinking_llm = ChatAnthropic(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
self.quick_thinking_llm = ChatAnthropic(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
elif self.config["llm_provider"].lower() == "google":
self.deep_thinking_llm = ChatGoogleGenerativeAI(model=self.config["deep_think_llm"])
self.quick_thinking_llm = ChatGoogleGenerativeAI(model=self.config["quick_think_llm"])
if settings.LLM_PROVIDER.lower() == "openai" or settings.LLM_PROVIDER == "ollama" or settings.LLM_PROVIDER == "openrouter":
self.deep_thinking_llm = ChatOpenAI(model=settings.DEEP_THINK_LLM, base_url=settings.BACKEND_URL)
self.quick_thinking_llm = ChatOpenAI(model=settings.QUICK_THINK_LLM, base_url=settings.BACKEND_URL)
elif settings.LLM_PROVIDER.lower() == "anthropic":
self.deep_thinking_llm = ChatAnthropic(model=settings.DEEP_THINK_LLM, base_url=settings.BACKEND_URL)
self.quick_thinking_llm = ChatAnthropic(model=settings.QUICK_THINK_LLM, base_url=settings.BACKEND_URL)
elif settings.LLM_PROVIDER.lower() == "google":
self.deep_thinking_llm = ChatGoogleGenerativeAI(model=settings.DEEP_THINK_LLM)
self.quick_thinking_llm = ChatGoogleGenerativeAI(model=settings.QUICK_THINK_LLM)
else:
raise ValueError(f"Unsupported LLM provider: {self.config['llm_provider']}")
raise ValueError(f"Unsupported LLM provider: {settings.LLM_PROVIDER}")
# Initialize memories
self.bull_memory = FinancialSituationMemory("bull_memory", self.config)

View File

@ -5,11 +5,7 @@ from datetime import datetime
# Import your trading agents
from service import enqueue_analysis
from tradingagents.dataflows.config import get_config
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
from tradingagents.config import get_config
config = get_config()