refactor(llm_clients): 重构工厂模式实现,添加动态导入和依赖检查
将硬编码的导入改为动态导入,并添加对缺失依赖的检查 为codex_oauth添加新的客户端支持 更新provider参数文档说明
This commit is contained in:
parent
7d9e07bd19
commit
24f53bc237
|
|
@ -1,9 +1,6 @@
|
|||
from typing import Optional
|
||||
|
||||
from .base_client import BaseLLMClient
|
||||
from .openai_client import OpenAIClient
|
||||
from .anthropic_client import AnthropicClient
|
||||
from .google_client import GoogleClient
|
||||
|
||||
|
||||
def create_llm_client(
|
||||
|
|
@ -15,7 +12,7 @@ def create_llm_client(
|
|||
"""Create an LLM client for the specified provider.
|
||||
|
||||
Args:
|
||||
provider: LLM provider (openai, anthropic, google, xai, ollama, openrouter)
|
||||
provider: LLM provider (openai, codex_oauth, anthropic, google, xai, ollama, openrouter)
|
||||
model: Model name/identifier
|
||||
base_url: Optional base URL for API endpoint
|
||||
**kwargs: Additional provider-specific arguments
|
||||
|
|
@ -29,15 +26,57 @@ def create_llm_client(
|
|||
provider_lower = provider.lower()
|
||||
|
||||
if provider_lower in ("openai", "ollama", "openrouter"):
|
||||
try:
|
||||
from .openai_client import OpenAIClient
|
||||
except ModuleNotFoundError as exc:
|
||||
raise ModuleNotFoundError(
|
||||
"Missing dependency for OpenAI-compatible providers. "
|
||||
"Install `langchain-openai`."
|
||||
) from exc
|
||||
|
||||
return OpenAIClient(model, base_url, provider=provider_lower, **kwargs)
|
||||
|
||||
if provider_lower == "codex_oauth":
|
||||
try:
|
||||
from .codex_oauth_client import CodexOAuthClient
|
||||
except ModuleNotFoundError as exc:
|
||||
raise ModuleNotFoundError(
|
||||
"Missing dependency for codex_oauth provider. "
|
||||
"Install `langchain-codex-oauth`."
|
||||
) from exc
|
||||
|
||||
return CodexOAuthClient(model, base_url, **kwargs)
|
||||
|
||||
if provider_lower == "xai":
|
||||
try:
|
||||
from .openai_client import OpenAIClient
|
||||
except ModuleNotFoundError as exc:
|
||||
raise ModuleNotFoundError(
|
||||
"Missing dependency for xAI provider. Install `langchain-openai`."
|
||||
) from exc
|
||||
|
||||
return OpenAIClient(model, base_url, provider="xai", **kwargs)
|
||||
|
||||
if provider_lower == "anthropic":
|
||||
try:
|
||||
from .anthropic_client import AnthropicClient
|
||||
except ModuleNotFoundError as exc:
|
||||
raise ModuleNotFoundError(
|
||||
"Missing dependency for Anthropic provider. "
|
||||
"Install `langchain-anthropic`."
|
||||
) from exc
|
||||
|
||||
return AnthropicClient(model, base_url, **kwargs)
|
||||
|
||||
if provider_lower == "google":
|
||||
try:
|
||||
from .google_client import GoogleClient
|
||||
except ModuleNotFoundError as exc:
|
||||
raise ModuleNotFoundError(
|
||||
"Missing dependency for Google provider. "
|
||||
"Install `langchain-google-genai`."
|
||||
) from exc
|
||||
|
||||
return GoogleClient(model, base_url, **kwargs)
|
||||
|
||||
raise ValueError(f"Unsupported LLM provider: {provider}")
|
||||
|
|
|
|||
Loading…
Reference in New Issue