44 lines
1.3 KiB
Python
44 lines
1.3 KiB
Python
from typing import Optional
|
|
|
|
from .base_client import BaseLLMClient
|
|
from .openai_client import OpenAIClient
|
|
from .anthropic_client import AnthropicClient
|
|
from .google_client import GoogleClient
|
|
|
|
|
|
def create_llm_client(
|
|
provider: str,
|
|
model: str,
|
|
base_url: Optional[str] = None,
|
|
**kwargs,
|
|
) -> BaseLLMClient:
|
|
"""Create an LLM client for the specified provider.
|
|
|
|
Args:
|
|
provider: LLM provider (openai, anthropic, google, xai, ollama, openrouter)
|
|
model: Model name/identifier
|
|
base_url: Optional base URL for API endpoint
|
|
**kwargs: Additional provider-specific arguments
|
|
|
|
Returns:
|
|
Configured BaseLLMClient instance
|
|
|
|
Raises:
|
|
ValueError: If provider is not supported
|
|
"""
|
|
provider_lower = provider.lower()
|
|
|
|
if provider_lower in ("openai", "ollama", "openrouter"):
|
|
return OpenAIClient(model, base_url, provider=provider_lower, **kwargs)
|
|
|
|
if provider_lower == "xai":
|
|
return OpenAIClient(model, base_url, provider="xai", **kwargs)
|
|
|
|
if provider_lower == "anthropic":
|
|
return AnthropicClient(model, base_url, **kwargs)
|
|
|
|
if provider_lower == "google":
|
|
return GoogleClient(model, base_url, **kwargs)
|
|
|
|
raise ValueError(f"Unsupported LLM provider: {provider}")
|