Spaces:
Paused
Paused
| """ | |
| LLM Provider Factory for Flare | |
| """ | |
| import os | |
| from typing import Optional | |
| from dotenv import load_dotenv | |
| from .llm_interface import LLMInterface | |
| from .llm_spark import SparkLLM | |
| from .llm_openai import OpenAILLM | |
| from config.config_provider import ConfigProvider | |
| from utils.logger import log_info, log_error, log_warning, log_debug | |
| class LLMFactory: | |
| def create_provider() -> LLMInterface: | |
| """Create LLM provider based on configuration""" | |
| cfg = ConfigProvider.get() | |
| llm_config = cfg.global_config.llm_provider | |
| if not llm_config: | |
| raise ValueError("No LLM provider configured") | |
| provider_name = llm_config.name | |
| log_info(f"π Creating LLM provider: {provider_name}") | |
| # Get provider definition | |
| provider_def = cfg.global_config.get_provider_config("llm", provider_name) | |
| if not provider_def: | |
| raise ValueError(f"Unknown LLM provider: {provider_name}") | |
| # Get API key | |
| api_key = LLMFactory._get_api_key(provider_name, llm_config.api_key) | |
| # Create provider based on name | |
| if provider_name == "spark": | |
| return LLMFactory._create_spark_provider(llm_config, api_key, provider_def) | |
| elif provider_name == "spark_cloud": | |
| return LLMFactory._create_spark_provider(llm_config, api_key, provider_def) | |
| elif provider_name in ["gpt-4o", "gpt-4o-mini"]: | |
| return LLMFactory._create_gpt_provider(llm_config, api_key, provider_def) | |
| else: | |
| raise ValueError(f"Unsupported LLM provider: {provider_name}") | |
| def _create_spark_provider(llm_config, api_key, provider_def): | |
| """Create Spark LLM provider""" | |
| endpoint = llm_config.endpoint | |
| if not endpoint: | |
| raise ValueError("Spark endpoint not configured") | |
| # Determine variant based on environment | |
| is_cloud = bool(os.environ.get("SPACE_ID")) | |
| variant = "hfcloud" if is_cloud else "on-premise" | |
| return SparkLLM( | |
| spark_endpoint=endpoint, | |
| spark_token=api_key, | |
| provider_variant=variant, | |
| settings=llm_config.settings | |
| ) | |
| def _create_gpt_provider(llm_config, api_key, provider_def): | |
| """Create OpenAI GPT provider""" | |
| return OpenAILLM( | |
| api_key=api_key, | |
| model=llm_config.name, | |
| settings=llm_config.settings | |
| ) | |
| def _get_api_key(provider_name: str, configured_key: Optional[str]) -> str: | |
| """Get API key from config or environment""" | |
| # First try configured key | |
| if configured_key: | |
| # Handle encrypted keys | |
| if configured_key.startswith("enc:"): | |
| from utils.encryption_utils import decrypt | |
| return decrypt(configured_key) | |
| return configured_key | |
| # Then try environment variables | |
| env_mappings = { | |
| "spark": "SPARK_TOKEN", | |
| "gpt-4o": "OPENAI_API_KEY", | |
| "gpt-4o-mini": "OPENAI_API_KEY" | |
| } | |
| env_var = env_mappings.get(provider_name) | |
| if env_var: | |
| key = os.environ.get(env_var) | |
| if key: | |
| log_info(f"π Using API key from environment: {env_var}") | |
| return key | |
| raise ValueError(f"No API key found for provider: {provider_name}") |