This commit is contained in:
Jaskirat 2026-01-29 22:07:45 -05:00
parent 817deb8247
commit 324ef8a98f
4 changed files with 30 additions and 12 deletions

View File

@ -5,11 +5,11 @@ from openai import OpenAI
class FinancialSituationMemory:
def __init__(self, name, config):
if config["backend_url"] == "http://localhost:11434/v1":
if config["backend_embedding_url"] == "http://localhost:11434/v1":
self.embedding = "nomic-embed-text"
else:
self.embedding = "text-embedding-3-small"
self.client = OpenAI(base_url=config["backend_url"])
self.client = OpenAI(base_url=config["backend_embedding_url"])
self.chroma_client = chromadb.Client(Settings(allow_reset=True))
self.situation_collection = self.chroma_client.create_collection(name=name)

View File

@ -4,7 +4,7 @@ from typing import Annotated
from .local import get_YFin_data, get_finnhub_news, get_finnhub_company_insider_sentiment, get_finnhub_company_insider_transactions, get_simfin_balance_sheet, get_simfin_cashflow, get_simfin_income_statements, get_reddit_global_news, get_reddit_company_news
from .local_news import get_local_news
from .y_finance import get_YFin_data_online, get_stock_stats_indicators_window, get_balance_sheet as get_yfinance_balance_sheet, get_cashflow as get_yfinance_cashflow, get_income_statement as get_yfinance_income_statement, get_insider_transactions as get_yfinance_insider_transactions
from .google import get_google_news
# Note: get_google_news not imported - incompatible signature with get_news tool interface
from .openai import get_stock_news_openai, get_global_news_openai, get_fundamentals_openai
from .alpha_vantage import (
get_stock as get_alpha_vantage_stock,
@ -98,11 +98,11 @@ VENDOR_METHODS = {
"local": get_simfin_income_statements,
},
# news_data
# Note: get_google_news removed - incompatible signature (expects look_back_days int, not end_date string)
"get_news": {
"alpha_vantage": get_alpha_vantage_news,
"openai": get_stock_news_openai,
"google": get_google_news,
"local": [get_finnhub_news, get_reddit_company_news, get_google_news],
"local": [get_finnhub_news, get_reddit_company_news],
"local_news": get_local_news
},
"get_global_news": {

View File

@ -18,10 +18,14 @@ DEFAULT_CONFIG = {
# "quick_think_llm": "qwen3:30b",
# "deep_think_llm": "gpt-oss",
# "quick_think_llm": "gpt-oss",
# "temperature": 0.1,
"deep_think_llm": "glm-4.7-flash",
"quick_think_llm": "glm-4.7-flash",
# "backend_url": "http://localhost:8080/v1",
"backend_url": "http://localhost:11434/v1",
"temperature": 0.7,
"top_p": 0.95,
"backend_url": "http://localhost:8080/v1",
"backend_embedding_url": "http://localhost:8081/v1",
# "backend_url": "http://localhost:11434/v1",
# Debate and discussion settings
"max_debate_rounds": 5,
"max_risk_discuss_rounds": 5,
@ -49,4 +53,6 @@ DEFAULT_CONFIG = {
# "get_insider_sentiment": "na",
"get_insider_transactions": "yfinance",
},
}
}
# docker run --rm -it --gpus all -v ~/.cache/llama.cpp:/root/.cache/llama.cpp -p 8080:8080 ghcr.io/ggml-org/llama.cpp:server-cuda -hf unsloth/GLM-4.7-Flash-GGUF:Q4_K_M --port 8080 --host 0.0.0.0 -ngl 99 --jinja --ctx-size 32768 --min-p 0.01 --embeddings --pooling cls --ubatch-size 8192

View File

@ -73,11 +73,23 @@ class TradingAgentsGraph:
# Initialize LLMs
if self.config["llm_provider"].lower() == "openai" or self.config["llm_provider"] == "ollama" or self.config["llm_provider"] == "openrouter":
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"], temperature=0)
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"], temperature=0)
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"],
base_url=self.config["backend_url"],
temperature=self.config["temperature"],
top_p=self.config["top_p"])
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"],
base_url=self.config["backend_url"],
temperature=self.config["temperature"],
top_p=self.config["top_p"])
elif self.config["llm_provider"].lower() == "anthropic":
self.deep_thinking_llm = ChatAnthropic(model=self.config["deep_think_llm"], base_url=self.config["backend_url"], temperature=0)
self.quick_thinking_llm = ChatAnthropic(model=self.config["quick_think_llm"], base_url=self.config["backend_url"], temperature=0)
self.deep_thinking_llm = ChatAnthropic(model=self.config["deep_think_llm"],
base_url=self.config["backend_url"],
temperature=self.config["temperature"],
top_p=self.config["top_p"])
self.quick_thinking_llm = ChatAnthropic(model=self.config["quick_think_llm"],
base_url=self.config["backend_url"],
temperature=self.config["temperature"],
top_p=self.config["top_p"])
elif self.config["llm_provider"].lower() == "google":
self.deep_thinking_llm = ChatGoogleGenerativeAI(model=self.config["deep_think_llm"])
self.quick_thinking_llm = ChatGoogleGenerativeAI(model=self.config["quick_think_llm"])