refactor: lazy-import LLM client modules in factory to avoid SDK init at collection time
This commit is contained in:
parent
aae3c99743
commit
051ba93bc3
|
|
@ -1,10 +1,6 @@
|
|||
from typing import Optional
|
||||
|
||||
from .base_client import BaseLLMClient
|
||||
from .openai_client import OpenAIClient
|
||||
from .anthropic_client import AnthropicClient
|
||||
from .google_client import GoogleClient
|
||||
from .azure_client import AzureOpenAIClient
|
||||
|
||||
# Providers that use the OpenAI-compatible chat completions API
|
||||
_OPENAI_COMPATIBLE = (
|
||||
|
|
@ -20,6 +16,9 @@ def create_llm_client(
|
|||
) -> BaseLLMClient:
|
||||
"""Create an LLM client for the specified provider.
|
||||
|
||||
Client modules are imported lazily so that collecting tests or importing
|
||||
the package does not trigger heavy LLM SDK initialization.
|
||||
|
||||
Args:
|
||||
provider: LLM provider name
|
||||
model: Model name/identifier
|
||||
|
|
@ -35,15 +34,19 @@ def create_llm_client(
|
|||
provider_lower = provider.lower()
|
||||
|
||||
if provider_lower in _OPENAI_COMPATIBLE:
|
||||
from .openai_client import OpenAIClient
|
||||
return OpenAIClient(model, base_url, provider=provider_lower, **kwargs)
|
||||
|
||||
if provider_lower == "anthropic":
|
||||
from .anthropic_client import AnthropicClient
|
||||
return AnthropicClient(model, base_url, **kwargs)
|
||||
|
||||
if provider_lower == "google":
|
||||
from .google_client import GoogleClient
|
||||
return GoogleClient(model, base_url, **kwargs)
|
||||
|
||||
if provider_lower == "azure":
|
||||
from .azure_client import AzureOpenAIClient
|
||||
return AzureOpenAIClient(model, base_url, **kwargs)
|
||||
|
||||
raise ValueError(f"Unsupported LLM provider: {provider}")
|
||||
|
|
|
|||
Loading…
Reference in New Issue