41 lines
1.3 KiB
Python
41 lines
1.3 KiB
Python
from abc import ABC, abstractmethod
|
|
from typing import Any, Optional
|
|
|
|
|
|
def normalize_content(response):
|
|
"""Normalize LLM response content to a plain string.
|
|
|
|
Multiple providers (OpenAI Responses API, Google Gemini 3) return content
|
|
as a list of typed blocks, e.g. [{'type': 'reasoning', ...}, {'type': 'text', 'text': '...'}].
|
|
Downstream agents expect response.content to be a string. This extracts
|
|
and joins the text blocks, discarding reasoning/metadata blocks.
|
|
"""
|
|
content = response.content
|
|
if isinstance(content, list):
|
|
texts = [
|
|
item.get("text", "") if isinstance(item, dict) and item.get("type") == "text"
|
|
else item if isinstance(item, str) else ""
|
|
for item in content
|
|
]
|
|
response.content = "\n".join(t for t in texts if t)
|
|
return response
|
|
|
|
|
|
class BaseLLMClient(ABC):
|
|
"""Abstract base class for LLM clients."""
|
|
|
|
def __init__(self, model: str, base_url: Optional[str] = None, **kwargs):
|
|
self.model = model
|
|
self.base_url = base_url
|
|
self.kwargs = kwargs
|
|
|
|
@abstractmethod
|
|
def get_llm(self) -> Any:
|
|
"""Return the configured LLM instance."""
|
|
pass
|
|
|
|
@abstractmethod
|
|
def validate_model(self) -> bool:
|
|
"""Validate that the model is supported by this client."""
|
|
pass
|