118 lines
4.2 KiB
Python
118 lines
4.2 KiB
Python
"""Factory for creating LLM adapters from settings."""
|
|
|
|
from agno.models.anthropic import Claude
|
|
from agno.models.google import Gemini
|
|
from agno.models.openai import OpenAIChat, OpenAIResponses
|
|
from agno.models.groq import Groq
|
|
from agno.models.mistral.mistral import MistralChat
|
|
from agno.models.ollama import Ollama
|
|
|
|
from sql_optimizer_team.application.ports.llm_port import LLMPort
|
|
from sql_optimizer_team.domain.domain.exceptions.domain_exceptions import LLMProviderError
|
|
from sql_optimizer_team.infrastructure.config.logger import get_logger
|
|
from sql_optimizer_team.infrastructure.config.settings import Settings
|
|
from sql_optimizer_team.infrastructure.llm.agno_adapter import AgnoLLMAdapter
|
|
from sql_optimizer_team.infrastructure.llm.oracle_genai_adapter import OracleGenAIAdapter
|
|
|
|
logger = get_logger(__name__)
|
|
|
|
|
|
class LLMAdapterFactory:
|
|
"""Factory for creating LLM provider adapters.
|
|
|
|
This factory creates concrete adapter implementations based on
|
|
application settings, handling provider-specific initialization.
|
|
|
|
Examples:
|
|
>>> settings = Settings.from_env()
|
|
>>> adapter = LLMAdapterFactory.create(settings)
|
|
>>> response = await adapter.generate_text("prompt")
|
|
"""
|
|
|
|
@staticmethod
|
|
def create(settings: Settings) -> LLMPort:
|
|
"""Create LLM adapter from settings.
|
|
|
|
Args:
|
|
settings: Application settings with LLM configuration
|
|
|
|
Returns:
|
|
Configured LLM adapter implementation
|
|
|
|
Raises:
|
|
LLMProviderError: If provider is unsupported or configuration is invalid
|
|
"""
|
|
provider = settings.llm.provider
|
|
model = settings.effective_model_name
|
|
|
|
logger.info("Creating LLM adapter", provider=provider, model=model)
|
|
|
|
try:
|
|
if provider == "oracle_genai":
|
|
return LLMAdapterFactory._create_oracle_genai(settings)
|
|
|
|
return LLMAdapterFactory._create_agno_adapter(settings, model)
|
|
|
|
except LLMProviderError:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to create LLM adapter", provider=provider, error=str(e)
|
|
)
|
|
raise LLMProviderError(f"Failed to create {provider} adapter: {e}") from e
|
|
|
|
@staticmethod
|
|
def _create_agno_adapter(settings: Settings, model: str) -> LLMPort:
|
|
"""Create Agno-based adapter for supported providers.
|
|
|
|
Args:
|
|
settings: Application settings
|
|
model: Model name
|
|
|
|
Returns:
|
|
Configured Agno adapter
|
|
"""
|
|
provider = settings.llm.provider
|
|
api_key = settings.llm.api_key
|
|
|
|
def _openai_model_class(model_id: str):
|
|
normalized = model_id.strip().lower()
|
|
if normalized.startswith("gpt-5") or normalized.startswith("o3") or normalized.startswith("o4-mini"):
|
|
return OpenAIResponses
|
|
return OpenAIChat
|
|
|
|
providers = {
|
|
"gemini": (Gemini, "GOOGLE_API_KEY"),
|
|
"openai": (_openai_model_class(model), "OPENAI_API_KEY"),
|
|
"claude": (Claude, "ANTHROPIC_API_KEY"),
|
|
"groq": (Groq, "GROQ_API_KEY"),
|
|
"mistral": (MistralChat, "MISTRAL_API_KEY"),
|
|
"ollama": (Ollama, None),
|
|
}
|
|
|
|
model_cls, api_env = providers.get(provider, (None, None))
|
|
if model_cls is None:
|
|
raise LLMProviderError(f"Unsupported LLM provider: {provider}")
|
|
|
|
if api_env and not api_key:
|
|
raise LLMProviderError(f"{provider} requires API key ({api_env})")
|
|
|
|
if provider == "ollama":
|
|
return AgnoLLMAdapter(model_cls(id=model))
|
|
|
|
return AgnoLLMAdapter(model_cls(id=model, api_key=api_key))
|
|
|
|
@staticmethod
|
|
def _create_oracle_genai(settings: Settings) -> OracleGenAIAdapter:
|
|
"""Create Oracle GenAI adapter.
|
|
|
|
Args:
|
|
settings: Application settings
|
|
|
|
Returns:
|
|
Configured Oracle GenAI adapter
|
|
"""
|
|
if not settings.oracle_genai:
|
|
raise LLMProviderError("Oracle GenAI requires OracleGenAISettings")
|
|
|
|
return OracleGenAIAdapter.from_settings(settings.oracle_genai)
|