This commit is contained in:
parent
c3abae8549
commit
d522d3ec43
|
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# 預設清理當前目錄,也可以傳入指定目錄
|
||||
TARGET_DIR="${1:-.}"
|
||||
|
||||
echo "🧹 Cleaning __pycache__ & .DS_Store under: $TARGET_DIR"
|
||||
|
||||
# 刪除所有 __pycache__ 資料夾
|
||||
find "$TARGET_DIR" -type d -name "__pycache__" -exec rm -rf {} +
|
||||
|
||||
# 刪除所有 .DS_Store 檔案
|
||||
find "$TARGET_DIR" -type f -name ".DS_Store" -delete
|
||||
|
||||
echo "✅ Done. All __pycache__ and .DS_Store removed."
|
||||
|
|
@ -10,7 +10,7 @@ DEFAULT_CONFIG = {
|
|||
),
|
||||
# LLM 設定
|
||||
"llm_provider": "openai",
|
||||
"deep_think_llm": "o4-mini",
|
||||
"deep_think_llm": "gpt-4o-mini",
|
||||
"quick_think_llm": "gpt-4o-mini",
|
||||
"backend_url": "https://api.openai.com/v1",
|
||||
# 辯論與討論設定
|
||||
|
|
|
|||
|
|
@ -84,8 +84,18 @@ class TradingAgentsGraph:
|
|||
# 初始化 LLM
|
||||
provider = self.config["llm_provider"].lower()
|
||||
if provider in ["openai", "ollama", "openrouter"]:
|
||||
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
|
||||
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
|
||||
# Get the OpenAI API key from environment variable
|
||||
openai_api_key = os.getenv("OPENAI_API_KEY")
|
||||
self.deep_thinking_llm = ChatOpenAI(
|
||||
model=self.config["deep_think_llm"],
|
||||
base_url=self.config["backend_url"],
|
||||
openai_api_key=openai_api_key
|
||||
)
|
||||
self.quick_thinking_llm = ChatOpenAI(
|
||||
model=self.config["quick_think_llm"],
|
||||
base_url=self.config["backend_url"],
|
||||
openai_api_key=openai_api_key
|
||||
)
|
||||
elif provider == "anthropic":
|
||||
self.deep_thinking_llm = ChatAnthropic(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
|
||||
self.quick_thinking_llm = ChatAnthropic(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
|
||||
|
|
@ -94,7 +104,7 @@ class TradingAgentsGraph:
|
|||
self.quick_thinking_llm = ChatGoogleGenerativeAI(model=self.config["quick_think_llm"])
|
||||
else:
|
||||
raise ValueError(f"不支援的 LLM 供應商: {self.config['llm_provider']}")
|
||||
|
||||
|
||||
# 初始化記憶體
|
||||
self.bull_memory = FinancialSituationMemory("bull_memory", self.config)
|
||||
self.bear_memory = FinancialSituationMemory("bear_memory", self.config)
|
||||
|
|
|
|||
Loading…
Reference in New Issue