Add support for vllm

This commit is contained in:
reopio 2025-10-16 07:07:16 +00:00
parent 080ac8892f
commit d9540c3bba
3 changed files with 3 additions and 10 deletions

View File

@ -153,10 +153,7 @@ def select_shallow_thinking_agent(provider) -> str:
("llama3.1 local", "llama3.1"),
("llama3.2 local", "llama3.2"),
],
"vllm": [
("llama3.1 local", "llama3.1"),
("qwen3", "qwen3"),
]
"vllm": [],
}
if provider == "vllm":
@ -234,10 +231,7 @@ def select_deep_thinking_agent(provider) -> str:
("llama3.1 local", "llama3.1"),
("qwen3", "qwen3"),
],
"vllm": [
("llama3.1 local", "llama3.1"),
("qwen3", "qwen3"),
]
"vllm": [],
}
if provider == "vllm":

View File

@ -1,7 +1,6 @@
import chromadb
from chromadb.config import Settings
from openai import OpenAI
import questionary
class FinancialSituationMemory:

View File

@ -88,7 +88,7 @@ class TradingAgentsGraph:
# Initialize memories
if self.config["llm_provider"] == "vllm":
questionary.text(
self.config["embeddings"] = questionary.text(
"Please input the vllm embedding model name (default: None):",
default="None",
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid embedding model name.",