feat: add openrouter api credentials
This commit is contained in:
parent
21f1cb1782
commit
bf4269e2fe
|
|
@ -86,6 +86,11 @@ export OPENAI_API_KEY=your_openai_key_here
|
|||
export GOOGLE_API_KEY=your_google_api_key_here
|
||||
```
|
||||
|
||||
### For OpenRouter
|
||||
```bash
|
||||
export OPENROUTER_API_KEY=your_openrouter_api_key_here
|
||||
```
|
||||
|
||||
## Error Handling Flow
|
||||
|
||||
1. **Agent Tool Called** → Online LLM function invoked
|
||||
|
|
|
|||
|
|
@ -124,6 +124,11 @@ You will need the OpenAI API for all the agents.
|
|||
export OPENAI_API_KEY=$YOUR_OPENAI_API_KEY
|
||||
```
|
||||
|
||||
If you plan to use OpenRouter as your LLM provider, you'll also need:
|
||||
```bash
|
||||
export OPENROUTER_API_KEY=$YOUR_OPENROUTER_API_KEY
|
||||
```
|
||||
|
||||
### CLI Usage
|
||||
|
||||
You can also try out the CLI directly by running:
|
||||
|
|
|
|||
|
|
@ -817,24 +817,35 @@ def _call_llm_api(prompt, config):
|
|||
raise ValueError(error_msg) from e
|
||||
|
||||
else:
|
||||
# Use OpenAI (default)
|
||||
# Use OpenAI-compatible providers (OpenAI, OpenRouter, Ollama)
|
||||
import os
|
||||
from openai import OpenAI, AuthenticationError, RateLimitError, NotFoundError
|
||||
|
||||
# Check if API key is available
|
||||
api_key = os.getenv("OPENAI_API_KEY")
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
"❌ OPENAI_API_KEY environment variable is not set.\n"
|
||||
"Please set your OpenAI API key:\n"
|
||||
"export OPENAI_API_KEY=your_key_here"
|
||||
)
|
||||
# Check if API key is available based on provider
|
||||
if provider.lower() == "openrouter":
|
||||
api_key = os.getenv("OPENROUTER_API_KEY")
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
"❌ OPENROUTER_API_KEY environment variable is not set.\n"
|
||||
"Please set your OpenRouter API key:\n"
|
||||
"export OPENROUTER_API_KEY=your_openrouter_key_here\n"
|
||||
"Get your key from: https://openrouter.ai/keys"
|
||||
)
|
||||
else:
|
||||
# Default to OpenAI
|
||||
api_key = os.getenv("OPENAI_API_KEY")
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
"❌ OPENAI_API_KEY environment variable is not set.\n"
|
||||
"Please set your OpenAI API key:\n"
|
||||
"export OPENAI_API_KEY=your_key_here"
|
||||
)
|
||||
|
||||
model = config["quick_think_llm"]
|
||||
valid_models = _get_valid_models("openai")
|
||||
|
||||
try:
|
||||
client = OpenAI(base_url=config["backend_url"])
|
||||
client = OpenAI(base_url=config["backend_url"], api_key=api_key)
|
||||
response = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[
|
||||
|
|
|
|||
|
|
@ -59,8 +59,36 @@ class TradingAgentsGraph:
|
|||
|
||||
# Initialize LLMs
|
||||
if self.config["llm_provider"].lower() == "openai" or self.config["llm_provider"] == "ollama" or self.config["llm_provider"] == "openrouter":
|
||||
self.deep_thinking_llm = ChatOpenAI(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
|
||||
self.quick_thinking_llm = ChatOpenAI(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
|
||||
# Handle API key based on provider
|
||||
api_key = None
|
||||
if self.config["llm_provider"].lower() == "openrouter":
|
||||
api_key = os.getenv("OPENROUTER_API_KEY")
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
"❌ OPENROUTER_API_KEY environment variable is not set.\n"
|
||||
"Please set your OpenRouter API key:\n"
|
||||
"export OPENROUTER_API_KEY=your_openrouter_key_here\n"
|
||||
"Get your key from: https://openrouter.ai/keys"
|
||||
)
|
||||
elif self.config["llm_provider"].lower() == "openai":
|
||||
api_key = os.getenv("OPENAI_API_KEY")
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
"❌ OPENAI_API_KEY environment variable is not set.\n"
|
||||
"Please set your OpenAI API key:\n"
|
||||
"export OPENAI_API_KEY=your_openai_key_here"
|
||||
)
|
||||
|
||||
self.deep_thinking_llm = ChatOpenAI(
|
||||
model=self.config["deep_think_llm"],
|
||||
base_url=self.config["backend_url"],
|
||||
api_key=api_key
|
||||
)
|
||||
self.quick_thinking_llm = ChatOpenAI(
|
||||
model=self.config["quick_think_llm"],
|
||||
base_url=self.config["backend_url"],
|
||||
api_key=api_key
|
||||
)
|
||||
elif self.config["llm_provider"].lower() == "anthropic":
|
||||
self.deep_thinking_llm = ChatAnthropic(model=self.config["deep_think_llm"], base_url=self.config["backend_url"])
|
||||
self.quick_thinking_llm = ChatAnthropic(model=self.config["quick_think_llm"], base_url=self.config["backend_url"])
|
||||
|
|
|
|||
Loading…
Reference in New Issue