Add support for vllm
This commit is contained in:
parent
080ac8892f
commit
d9540c3bba
10
cli/utils.py
10
cli/utils.py
|
|
@ -153,10 +153,7 @@ def select_shallow_thinking_agent(provider) -> str:
|
|||
("llama3.1 local", "llama3.1"),
|
||||
("llama3.2 local", "llama3.2"),
|
||||
],
|
||||
"vllm": [
|
||||
("llama3.1 local", "llama3.1"),
|
||||
("qwen3", "qwen3"),
|
||||
]
|
||||
"vllm": [],
|
||||
}
|
||||
|
||||
if provider == "vllm":
|
||||
|
|
@ -234,10 +231,7 @@ def select_deep_thinking_agent(provider) -> str:
|
|||
("llama3.1 local", "llama3.1"),
|
||||
("qwen3", "qwen3"),
|
||||
],
|
||||
"vllm": [
|
||||
("llama3.1 local", "llama3.1"),
|
||||
("qwen3", "qwen3"),
|
||||
]
|
||||
"vllm": [],
|
||||
}
|
||||
|
||||
if provider == "vllm":
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import chromadb
|
||||
from chromadb.config import Settings
|
||||
from openai import OpenAI
|
||||
import questionary
|
||||
|
||||
|
||||
class FinancialSituationMemory:
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ class TradingAgentsGraph:
|
|||
|
||||
# Initialize memories
|
||||
if self.config["llm_provider"] == "vllm":
|
||||
questionary.text(
|
||||
self.config["embeddings"] = questionary.text(
|
||||
"Please input the vllm embedding model name (default: None):",
|
||||
default="None",
|
||||
validate=lambda x: len(x.strip()) > 0 or "Please enter a valid embedding model name.",
|
||||
|
|
|
|||
Loading…
Reference in New Issue