Spaces:
Paused
Paused
| """Admin API endpoints for Flare (Refactored) | |
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
| Provides authentication, project, version, and API management endpoints with provider support. | |
| """ | |
| import os | |
| import time | |
| import threading | |
| import hashlib | |
| import bcrypt | |
| from typing import Optional, Dict, List, Any | |
| from datetime import datetime, timedelta, timezone | |
| from fastapi import APIRouter, HTTPException, Depends, Query, Response, Body | |
| from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials | |
| from pydantic import BaseModel, Field | |
| import httpx | |
| from functools import wraps | |
| from utils.utils import verify_token, create_token, get_current_timestamp | |
| from config.config_provider import ConfigProvider | |
| from utils.logger import log_info, log_error, log_warning, log_debug | |
| from utils.exceptions import ( | |
| FlareException, | |
| RaceConditionError, | |
| ValidationError, | |
| ResourceNotFoundError, | |
| AuthenticationError, | |
| AuthorizationError, | |
| DuplicateResourceError | |
| ) | |
| from config.config_models import VersionConfig, IntentConfig, LLMConfiguration | |
| # ===================== Constants & Config ===================== | |
| security = HTTPBearer() | |
| router = APIRouter(tags=["admin"]) | |
| # ===================== Decorators ===================== | |
| def handle_exceptions(func): | |
| """Decorator to handle exceptions consistently""" | |
| async def wrapper(*args, **kwargs): | |
| try: | |
| return await func(*args, **kwargs) | |
| except HTTPException: | |
| # HTTPException'ları olduğu gibi geçir | |
| raise | |
| except FlareException: | |
| # Let global handlers deal with our custom exceptions | |
| raise | |
| except Exception as e: | |
| # Log and convert unexpected exceptions to HTTP 500 | |
| log_error(f"❌ Unexpected error in {func.__name__}", e) | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| return wrapper | |
| # ===================== Models ===================== | |
| class LoginRequest(BaseModel): | |
| username: str | |
| password: str | |
| class LoginResponse(BaseModel): | |
| token: str | |
| username: str | |
| class ChangePasswordRequest(BaseModel): | |
| current_password: str | |
| new_password: str | |
| class ProviderSettingsUpdate(BaseModel): | |
| name: str | |
| api_key: Optional[str] = None | |
| endpoint: Optional[str] = None | |
| settings: Dict[str, Any] = Field(default_factory=dict) | |
| class EnvironmentUpdate(BaseModel): | |
| llm_provider: ProviderSettingsUpdate | |
| tts_provider: ProviderSettingsUpdate | |
| stt_provider: ProviderSettingsUpdate | |
| parameter_collection_config: Optional[Dict[str, Any]] = None | |
| class ProjectCreate(BaseModel): | |
| name: str | |
| caption: Optional[str] = "" | |
| icon: Optional[str] = "folder" | |
| description: Optional[str] = "" | |
| default_locale: str = "tr" | |
| supported_locales: List[str] = Field(default_factory=lambda: ["tr"]) | |
| timezone: str = "Europe/Istanbul" | |
| region: str = "tr-TR" | |
| class ProjectUpdate(BaseModel): | |
| caption: str | |
| icon: Optional[str] = "folder" | |
| description: Optional[str] = "" | |
| default_locale: str = "tr" | |
| supported_locales: List[str] = Field(default_factory=lambda: ["tr"]) | |
| timezone: str = "Europe/Istanbul" | |
| region: str = "tr-TR" | |
| last_update_date: str | |
| class VersionCreate(BaseModel): | |
| caption: str | |
| source_version_no: int | None = None | |
| class IntentModel(BaseModel): | |
| name: str | |
| caption: Optional[str] = "" | |
| detection_prompt: str | |
| examples: List[Dict[str, str]] = [] # LocalizedExample format | |
| parameters: List[Dict[str, Any]] = [] | |
| action: str | |
| fallback_timeout_prompt: Optional[str] = None | |
| fallback_error_prompt: Optional[str] = None | |
| class VersionUpdate(BaseModel): | |
| caption: str | |
| general_prompt: str | |
| llm: Dict[str, Any] | |
| intents: List[IntentModel] | |
| last_update_date: str | |
| class APICreate(BaseModel): | |
| name: str | |
| url: str | |
| method: str = "POST" | |
| headers: Dict[str, str] = {} | |
| body_template: Dict[str, Any] = {} | |
| timeout_seconds: int = 10 | |
| retry: Dict[str, Any] = Field(default_factory=lambda: {"retry_count": 3, "backoff_seconds": 2, "strategy": "static"}) | |
| proxy: Optional[str] = None | |
| auth: Optional[Dict[str, Any]] = None | |
| response_prompt: Optional[str] = None | |
| response_mappings: List[Dict[str, Any]] = [] | |
| class APIUpdate(BaseModel): | |
| url: str | |
| method: str | |
| headers: Dict[str, str] | |
| body_template: Dict[str, Any] | |
| timeout_seconds: int | |
| retry: Dict[str, Any] | |
| proxy: Optional[str] | |
| auth: Optional[Dict[str, Any]] | |
| response_prompt: Optional[str] | |
| response_mappings: List[Dict[str, Any]] = [] | |
| last_update_date: str | |
| class TestRequest(BaseModel): | |
| test_type: str # "all", "ui", "backend", "integration", "spark" | |
| # ===================== Auth Endpoints ===================== | |
| async def login(request: LoginRequest): | |
| """User login endpoint""" | |
| cfg = ConfigProvider.get() | |
| # Find user | |
| user = next((u for u in cfg.global_config.users if u.username == request.username), None) | |
| if not user: | |
| raise HTTPException(status_code=401, detail="Invalid credentials") | |
| # Verify password - Try both bcrypt and SHA256 for backward compatibility | |
| password_valid = False | |
| # First try bcrypt (new format) | |
| try: | |
| if user.password_hash.startswith("$2b$") or user.password_hash.startswith("$2a$"): | |
| password_valid = bcrypt.checkpw(request.password.encode('utf-8'), user.password_hash.encode('utf-8')) | |
| except: | |
| pass | |
| # If not valid, try SHA256 (old format) | |
| if not password_valid: | |
| sha256_hash = hashlib.sha256(request.password.encode('utf-8')).hexdigest() | |
| password_valid = (user.password_hash == sha256_hash) | |
| if not password_valid: | |
| raise HTTPException(status_code=401, detail="Invalid credentials") | |
| # Create token | |
| token = create_token(request.username) | |
| log_info(f"✅ User '{request.username}' logged in successfully") | |
| return LoginResponse(token=token, username=request.username) | |
| async def change_password( | |
| request: ChangePasswordRequest, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Change user password""" | |
| cfg = ConfigProvider.get() | |
| # Find user | |
| user = next((u for u in cfg.global_config.users if u.username == username), None) | |
| if not user: | |
| raise HTTPException(status_code=404, detail="User not found") | |
| # Verify current password - Try both bcrypt and SHA256 for backward compatibility | |
| password_valid = False | |
| # First try bcrypt (new format) | |
| try: | |
| if user.password_hash.startswith("$2b$") or user.password_hash.startswith("$2a$"): | |
| password_valid = bcrypt.checkpw(request.current_password.encode('utf-8'), user.password_hash.encode('utf-8')) | |
| except: | |
| pass | |
| # If not valid, try SHA256 (old format) | |
| if not password_valid: | |
| sha256_hash = hashlib.sha256(request.current_password.encode('utf-8')).hexdigest() | |
| password_valid = (user.password_hash == sha256_hash) | |
| if not password_valid: | |
| raise HTTPException(status_code=401, detail="Current password is incorrect") | |
| # Generate new password hash (always use bcrypt for new passwords) | |
| salt = bcrypt.gensalt() | |
| new_hash = bcrypt.hashpw(request.new_password.encode('utf-8'), salt) | |
| # Update user | |
| user.password_hash = new_hash.decode('utf-8') | |
| user.salt = salt.decode('utf-8') | |
| # Save configuration via ConfigProvider | |
| ConfigProvider.save(cfg, username) | |
| log_info(f"✅ Password changed for user '{username}'") | |
| return {"success": True} | |
| # ===================== Locales Endpoints ===================== | |
| async def get_available_locales(username: str = Depends(verify_token)): | |
| """Get all system-supported locales""" | |
| from config.locale_manager import LocaleManager | |
| locales = LocaleManager.get_available_locales_with_names() | |
| return { | |
| "locales": locales, | |
| "default": LocaleManager.get_default_locale() | |
| } | |
| async def get_locale_details( | |
| locale_code: str, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get detailed information for a specific locale""" | |
| from config.locale_manager import LocaleManager | |
| locale_info = LocaleManager.get_locale_details(locale_code) | |
| if not locale_info: | |
| raise HTTPException(status_code=404, detail=f"Locale '{locale_code}' not found") | |
| return locale_info | |
| # ===================== Environment Endpoints ===================== | |
| async def get_environment(username: str = Depends(verify_token)): | |
| """Get environment configuration with provider info""" | |
| cfg = ConfigProvider.get() | |
| env_config = cfg.global_config | |
| # Provider tabanlı yeni yapıyı destekle | |
| response = {} | |
| # LLM Provider | |
| if hasattr(env_config, 'llm_provider'): | |
| response["llm_provider"] = env_config.llm_provider | |
| # TTS Provider | |
| if hasattr(env_config, 'tts_provider'): | |
| response["tts_provider"] = env_config.tts_provider | |
| # STT Provider | |
| if hasattr(env_config, 'stt_provider'): | |
| response["stt_provider"] = env_config.stt_provider | |
| else: | |
| response["stt_provider"] = { | |
| "name": getattr(env_config, 'stt_engine', 'no_stt'), | |
| "api_key": getattr(env_config, 'stt_engine_api_key', None) or "", | |
| "endpoint": None, | |
| "settings": getattr(env_config, 'stt_settings', {}) | |
| } | |
| # Provider listesi | |
| if hasattr(env_config, 'providers'): | |
| providers_list = [] | |
| for provider in env_config.providers: | |
| providers_list.append(provider) | |
| response["providers"] = providers_list | |
| else: | |
| # Varsayılan provider listesi | |
| response["providers"] = [ | |
| { | |
| "type": "llm", | |
| "name": "spark_cloud", | |
| "display_name": "Spark LLM (Cloud)", | |
| "requires_endpoint": True, | |
| "requires_api_key": True, | |
| "requires_repo_info": False | |
| }, | |
| { | |
| "type": "llm", | |
| "name": "gpt-4o", | |
| "display_name": "GPT-4o", | |
| "requires_endpoint": True, | |
| "requires_api_key": True, | |
| "requires_repo_info": False | |
| }, | |
| { | |
| "type": "llm", | |
| "name": "gpt-4o-mini", | |
| "display_name": "GPT-4o Mini", | |
| "requires_endpoint": True, | |
| "requires_api_key": True, | |
| "requires_repo_info": False | |
| }, | |
| { | |
| "type": "tts", | |
| "name": "no_tts", | |
| "display_name": "No TTS", | |
| "requires_endpoint": False, | |
| "requires_api_key": False, | |
| "requires_repo_info": False | |
| }, | |
| { | |
| "type": "tts", | |
| "name": "elevenlabs", | |
| "display_name": "ElevenLabs", | |
| "requires_endpoint": False, | |
| "requires_api_key": True, | |
| "requires_repo_info": False | |
| }, | |
| { | |
| "type": "stt", | |
| "name": "no_stt", | |
| "display_name": "No STT", | |
| "requires_endpoint": False, | |
| "requires_api_key": False, | |
| "requires_repo_info": False | |
| }, | |
| { | |
| "type": "stt", | |
| "name": "google", | |
| "display_name": "Google Cloud STT", | |
| "requires_endpoint": False, | |
| "requires_api_key": True, | |
| "requires_repo_info": False | |
| } | |
| ] | |
| # Parameter collection config | |
| if hasattr(env_config, 'parameter_collection_config'): | |
| response["parameter_collection_config"] = env_config.parameter_collection_config | |
| else: | |
| # Varsayılan değerler | |
| response["parameter_collection_config"] = { | |
| "max_params_per_question": 2, | |
| "retry_unanswered": True, | |
| "smart_grouping": True, | |
| "collection_prompt": "You are a helpful assistant collecting information from the user..." | |
| } | |
| return response | |
| async def update_environment( | |
| update: EnvironmentUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update environment configuration with provider validation""" | |
| log_info(f"📝 Updating environment config by {username}") | |
| cfg = ConfigProvider.get() | |
| # Validate LLM provider | |
| llm_provider_def = cfg.global_config.get_provider_config("llm", update.llm_provider.name) | |
| if not llm_provider_def: | |
| raise HTTPException(status_code=400, detail=f"Unknown LLM provider: {update.llm_provider.name}") | |
| if llm_provider_def.requires_api_key and not update.llm_provider.api_key: | |
| raise HTTPException(status_code=400, detail=f"{llm_provider_def.display_name} requires API key") | |
| if llm_provider_def.requires_endpoint and not update.llm_provider.endpoint: | |
| raise HTTPException(status_code=400, detail=f"{llm_provider_def.display_name} requires endpoint") | |
| # Validate TTS provider | |
| tts_provider_def = cfg.global_config.get_provider_config("tts", update.tts_provider.name) | |
| if not tts_provider_def: | |
| raise HTTPException(status_code=400, detail=f"Unknown TTS provider: {update.tts_provider.name}") | |
| if tts_provider_def.requires_api_key and not update.tts_provider.api_key: | |
| raise HTTPException(status_code=400, detail=f"{tts_provider_def.display_name} requires API key") | |
| # Validate STT provider | |
| stt_provider_def = cfg.global_config.get_provider_config("stt", update.stt_provider.name) | |
| if not stt_provider_def: | |
| raise HTTPException(status_code=400, detail=f"Unknown STT provider: {update.stt_provider.name}") | |
| if stt_provider_def.requires_api_key and not update.stt_provider.api_key: | |
| raise HTTPException(status_code=400, detail=f"{stt_provider_def.display_name} requires API key") | |
| # Update via ConfigProvider | |
| ConfigProvider.update_environment(update.model_dump(), username) | |
| log_info(f"✅ Environment updated to LLM: {update.llm_provider.name}, TTS: {update.tts_provider.name}, STT: {update.stt_provider.name} by {username}") | |
| return {"success": True} | |
| # ===================== Project Endpoints ===================== | |
| async def list_enabled_projects(): | |
| """Get list of enabled project names for chat""" | |
| cfg = ConfigProvider.get() | |
| return [p.name for p in cfg.projects if p.enabled and not getattr(p, 'deleted', False)] | |
| async def list_projects( | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List all projects""" | |
| cfg = ConfigProvider.get() | |
| projects = cfg.projects | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| projects = [p for p in projects if not getattr(p, 'deleted', False)] | |
| return [p.model_dump() for p in projects] | |
| async def get_project( | |
| project_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get single project by ID""" | |
| project = ConfigProvider.get_project(project_id) | |
| if not project or getattr(project, 'deleted', False): | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| return project.model_dump() | |
| async def create_project( | |
| project: ProjectCreate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Create new project with initial version""" | |
| # Validate supported locales | |
| from config.locale_manager import LocaleManager | |
| invalid_locales = LocaleManager.validate_project_languages(project.supported_locales) | |
| if invalid_locales: | |
| available_locales = LocaleManager.get_available_locales_with_names() | |
| available_codes = [locale['code'] for locale in available_locales] | |
| raise HTTPException( | |
| status_code=400, | |
| detail=f"Unsupported locales: {', '.join(invalid_locales)}. Available locales: {', '.join(available_codes)}" | |
| ) | |
| # Check if default locale is in supported locales | |
| if project.default_locale not in project.supported_locales: | |
| raise HTTPException( | |
| status_code=400, | |
| detail="Default locale must be one of the supported locales" | |
| ) | |
| # Debug log for project creation | |
| log_debug(f"🔍 Creating project '{project.name}' with default_locale: {project.default_locale}") | |
| new_project = ConfigProvider.create_project(project.model_dump(), username) | |
| # Debug log for initial version | |
| if new_project.versions: | |
| initial_version = new_project.versions[0] | |
| log_debug(f"🔍 Initial version created - no: {initial_version.no}, published: {initial_version.published}, type: {type(initial_version.published)}") | |
| log_info(f"✅ Project '{project.name}' created by {username}") | |
| return new_project.model_dump() | |
| async def update_project( | |
| project_id: int, | |
| update: ProjectUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update existing project with race condition handling""" | |
| log_info(f"🔍 Update request for project {project_id} by {username}") | |
| log_info(f"🔍 Received last_update_date: {update.last_update_date}") | |
| # Mevcut project'i al ve durumunu logla | |
| current_project = ConfigProvider.get_project(project_id) | |
| if current_project: | |
| log_info(f"🔍 Current project last_update_date: {current_project.last_update_date}") | |
| log_info(f"🔍 Current project last_update_user: {current_project.last_update_user}") | |
| # Optimistic locking kontrolü | |
| result = ConfigProvider.update_project( | |
| project_id, | |
| update.model_dump(), | |
| username, | |
| expected_last_update=update.last_update_date | |
| ) | |
| log_info(f"✅ Project {project_id} updated by {username}") | |
| return result | |
| async def delete_project(project_id: int, username: str = Depends(verify_token)): | |
| """Delete project (soft delete)""" | |
| ConfigProvider.delete_project(project_id, username) | |
| log_info(f"✅ Project deleted by {username}") | |
| return {"success": True} | |
| async def toggle_project(project_id: int, username: str = Depends(verify_token)): | |
| """Toggle project enabled status""" | |
| enabled = ConfigProvider.toggle_project(project_id, username) | |
| log_info(f"✅ Project {'enabled' if enabled else 'disabled'} by {username}") | |
| return {"enabled": enabled} | |
| # ===================== Import/Export Endpoints ===================== | |
| async def export_project( | |
| project_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Export project as JSON""" | |
| project = ConfigProvider.get_project(project_id) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Prepare export data | |
| export_data = { | |
| "name": project.name, | |
| "caption": project.caption, | |
| "icon": project.icon, | |
| "description": project.description, | |
| "default_locale": project.default_locale, | |
| "supported_locales": project.supported_locales, | |
| "timezone": project.timezone, | |
| "region": project.region, | |
| "versions": [] | |
| } | |
| # Add versions (only non-deleted) | |
| for version in project.versions: | |
| if not getattr(version, 'deleted', False): | |
| version_data = { | |
| "caption": version.caption, | |
| "description": getattr(version, 'description', ''), | |
| "general_prompt": version.general_prompt, | |
| "welcome_prompt": getattr(version, 'welcome_prompt', None), | |
| "llm": version.llm.model_dump() if version.llm else {}, | |
| "intents": [intent.model_dump() for intent in version.intents] | |
| } | |
| export_data["versions"].append(version_data) | |
| log_info(f"✅ Project '{project.name}' exported by {username}") | |
| return export_data | |
| async def import_project( | |
| project_data: dict = Body(...), | |
| username: str = Depends(verify_token) | |
| ): | |
| """Import project from JSON""" | |
| # Validate required fields | |
| if not project_data.get('name'): | |
| raise HTTPException(status_code=400, detail="Project name is required") | |
| # Check for duplicate name | |
| cfg = ConfigProvider.get() | |
| if any(p.name == project_data['name'] for p in cfg.projects if not p.deleted): | |
| raise HTTPException( | |
| status_code=409, | |
| detail=f"Project with name '{project_data['name']}' already exists" | |
| ) | |
| # Create project | |
| new_project_data = { | |
| "name": project_data['name'], | |
| "caption": project_data.get('caption', project_data['name']), | |
| "icon": project_data.get('icon', 'folder'), | |
| "description": project_data.get('description', ''), | |
| "default_locale": project_data.get('default_locale', 'tr'), | |
| "supported_locales": project_data.get('supported_locales', ['tr']), | |
| "timezone": project_data.get('timezone', 'Europe/Istanbul'), | |
| "region": project_data.get('region', 'tr-TR') | |
| } | |
| # Create project | |
| new_project = ConfigProvider.create_project(new_project_data, username) | |
| # Import versions | |
| if 'versions' in project_data and project_data['versions']: | |
| # Remove the initial version that was auto-created | |
| if new_project.versions: | |
| new_project.versions.clear() | |
| # Add imported versions | |
| for idx, version_data in enumerate(project_data['versions']): | |
| version = VersionConfig( | |
| no=idx + 1, | |
| caption=version_data.get('caption', f'Version {idx + 1}'), | |
| description=version_data.get('description', ''), | |
| published=False, # Imported versions are unpublished | |
| deleted=False, | |
| general_prompt=version_data.get('general_prompt', ''), | |
| welcome_prompt=version_data.get('welcome_prompt'), | |
| llm=LLMConfiguration(**version_data.get('llm', { | |
| 'repo_id': '', | |
| 'generation_config': { | |
| 'max_new_tokens': 512, | |
| 'temperature': 0.7, | |
| 'top_p': 0.9 | |
| }, | |
| 'use_fine_tune': False, | |
| 'fine_tune_zip': '' | |
| })), | |
| intents=[IntentConfig(**intent) for intent in version_data.get('intents', [])], | |
| created_date=get_current_timestamp(), | |
| created_by=username | |
| ) | |
| new_project.versions.append(version) | |
| # Update version counter | |
| new_project.version_id_counter = len(new_project.versions) + 1 | |
| # Save updated project | |
| ConfigProvider.save(cfg, username) | |
| log_info(f"✅ Project '{new_project.name}' imported by {username}") | |
| return {"success": True, "project_id": new_project.id, "project_name": new_project.name} | |
| # ===================== Version Endpoints ===================== | |
| async def list_versions( | |
| project_id: int, | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List project versions""" | |
| project = ConfigProvider.get_project(project_id) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| versions = project.versions | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| versions = [v for v in versions if not getattr(v, 'deleted', False)] | |
| return [v.model_dump() for v in versions] | |
| async def create_version( | |
| project_id: int, | |
| version_data: VersionCreate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Create new version""" | |
| new_version = ConfigProvider.create_version(project_id, version_data.model_dump(), username) | |
| log_info(f"✅ Version created for project {project_id} by {username}") | |
| return new_version.model_dump() | |
| async def update_version( | |
| project_id: int, | |
| version_no: int, | |
| update: VersionUpdate, | |
| force: bool = Query(default=False, description="Force update despite conflicts"), | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update version with race condition handling""" | |
| log_debug(f"🔍 Version update request - project: {project_id}, version: {version_no}, user: {username}") | |
| # Force parametresi kontrolü | |
| if force: | |
| log_warning(f"⚠️ Force update requested for version {version_no} by {username}") | |
| result = ConfigProvider.update_version( | |
| project_id, | |
| version_no, | |
| update.model_dump(), | |
| username, | |
| expected_last_update=update.last_update_date if not force else None | |
| ) | |
| log_info(f"✅ Version {version_no} updated by {username}") | |
| return result | |
| async def publish_version( | |
| project_id: int, | |
| version_no: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Publish version""" | |
| project, version = ConfigProvider.publish_version(project_id, version_no, username) | |
| log_info(f"✅ Version {version_no} published for project '{project.name}' by {username}") | |
| # Notify LLM provider if project is enabled and provider requires repo info | |
| cfg = ConfigProvider.get() | |
| llm_provider_def = cfg.global_config.get_provider_config("llm", cfg.global_config.llm_provider.name) | |
| if project.enabled and llm_provider_def and llm_provider_def.requires_repo_info: | |
| try: | |
| await notify_llm_startup(project, version) | |
| except Exception as e: | |
| log_error(f"⚠️ Failed to notify LLM provider", e) | |
| # Don't fail the publish | |
| return {"success": True} | |
| async def delete_version( | |
| project_id: int, | |
| version_no: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Delete version (soft delete)""" | |
| ConfigProvider.delete_version(project_id, version_no, username) | |
| log_info(f"✅ Version {version_no} deleted for project {project_id} by {username}") | |
| return {"success": True} | |
| async def get_project_versions( | |
| project_name: str, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get all versions of a project for testing""" | |
| cfg = ConfigProvider.get() | |
| # Find project | |
| project = next((p for p in cfg.projects if p.name == project_name), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found") | |
| # Return versions with their status | |
| versions = [] | |
| for v in project.versions: | |
| if not getattr(v, 'deleted', False): | |
| versions.append({ | |
| "version_number": v.no, | |
| "caption": v.caption, | |
| "published": v.published, | |
| "description": getattr(v, 'description', ''), | |
| "intent_count": len(v.intents), | |
| "created_date": getattr(v, 'created_date', None), | |
| "is_current": v.published # Published version is current | |
| }) | |
| return { | |
| "project_name": project_name, | |
| "project_caption": project.caption, | |
| "versions": versions | |
| } | |
| async def compare_versions( | |
| project_id: int, | |
| version1_no: int, | |
| version2_no: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Compare two versions and return differences""" | |
| project = ConfigProvider.get_project(project_id) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| v1 = next((v for v in project.versions if v.no == version1_no), None) | |
| v2 = next((v for v in project.versions if v.no == version2_no), None) | |
| if not v1 or not v2: | |
| raise HTTPException(status_code=404, detail="Version not found") | |
| # Deep comparison | |
| differences = { | |
| 'general_prompt': { | |
| 'changed': v1.general_prompt != v2.general_prompt, | |
| 'v1': v1.general_prompt, | |
| 'v2': v2.general_prompt | |
| }, | |
| 'intents': { | |
| 'added': [], | |
| 'removed': [], | |
| 'modified': [] | |
| } | |
| } | |
| # Compare intents | |
| v1_intents = {i.name: i for i in v1.intents} | |
| v2_intents = {i.name: i for i in v2.intents} | |
| # Find added/removed | |
| differences['intents']['added'] = list(set(v2_intents.keys()) - set(v1_intents.keys())) | |
| differences['intents']['removed'] = list(set(v1_intents.keys()) - set(v2_intents.keys())) | |
| # Find modified | |
| for intent_name in set(v1_intents.keys()) & set(v2_intents.keys()): | |
| i1, i2 = v1_intents[intent_name], v2_intents[intent_name] | |
| if i1.model_dump() != i2.model_dump(): | |
| differences['intents']['modified'].append({ | |
| 'name': intent_name, | |
| 'differences': compare_intent_details(i1, i2) | |
| }) | |
| log_info( | |
| f"Version comparison performed", | |
| user=username, | |
| project_id=project_id, | |
| version1_id=version1_id, | |
| version2_id=version2_id | |
| ) | |
| return differences | |
| # ===================== API Endpoints ===================== | |
| async def list_apis( | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List all APIs""" | |
| cfg = ConfigProvider.get() | |
| apis = cfg.apis | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| apis = [a for a in apis if not getattr(a, 'deleted', False)] | |
| return [a.model_dump() for a in apis] | |
| async def create_api(api: APICreate, username: str = Depends(verify_token)): | |
| """Create new API""" | |
| try: | |
| new_api = ConfigProvider.create_api(api.model_dump(), username) | |
| log_info(f"✅ API '{api.name}' created by {username}") | |
| return new_api.model_dump() | |
| except DuplicateResourceError as e: | |
| # DuplicateResourceError'ı handle et | |
| raise HTTPException(status_code=409, detail=str(e)) | |
| async def update_api( | |
| api_name: str, | |
| update: APIUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update API configuration with race condition handling""" | |
| result = ConfigProvider.update_api( | |
| api_name, | |
| update.model_dump(), | |
| username, | |
| expected_last_update=update.last_update_date | |
| ) | |
| log_info(f"✅ API '{api_name}' updated by {username}") | |
| return result | |
| async def delete_api(api_name: str, username: str = Depends(verify_token)): | |
| """Delete API (soft delete)""" | |
| ConfigProvider.delete_api(api_name, username) | |
| log_info(f"✅ API '{api_name}' deleted by {username}") | |
| return {"success": True} | |
| async def validate_regex( | |
| request: dict = Body(...), | |
| username: str = Depends(verify_token) | |
| ): | |
| """Validate regex pattern""" | |
| pattern = request.get("pattern", "") | |
| test_value = request.get("test_value", "") | |
| import re | |
| compiled_regex = re.compile(pattern) | |
| matches = bool(compiled_regex.match(test_value)) | |
| return { | |
| "valid": True, | |
| "matches": matches, | |
| "pattern": pattern, | |
| "test_value": test_value | |
| } | |
| # ===================== Test Endpoints ===================== | |
| async def run_all_tests( | |
| request: TestRequest, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Run all tests""" | |
| log_info(f"🧪 Running {request.test_type} tests requested by {username}") | |
| # TODO: Implement test runner | |
| # For now, return mock results | |
| return { | |
| "test_run_id": "test_" + datetime.now().isoformat(), | |
| "status": "running", | |
| "total_tests": 60, | |
| "completed": 0, | |
| "passed": 0, | |
| "failed": 0, | |
| "message": "Test run started" | |
| } | |
| async def get_test_status( | |
| test_run_id: str, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get test run status""" | |
| # TODO: Implement test status tracking | |
| return { | |
| "test_run_id": test_run_id, | |
| "status": "completed", | |
| "total_tests": 60, | |
| "completed": 60, | |
| "passed": 57, | |
| "failed": 3, | |
| "duration": 340.5, | |
| "details": [] | |
| } | |
| # ===================== Activity Log ===================== | |
| async def get_activity_log( | |
| limit: int = Query(100, ge=1, le=1000), | |
| entity_type: Optional[str] = None, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get activity log""" | |
| cfg = ConfigProvider.get() | |
| logs = cfg.activity_log | |
| # Filter by entity type if specified | |
| if entity_type: | |
| logs = [l for l in logs if l.entity_type == entity_type] | |
| # Return most recent entries | |
| return logs[-limit:] | |
| # ===================== Helper Functions ===================== | |
| async def notify_llm_startup(project, version): | |
| """Notify LLM provider about project startup""" | |
| from llm.llm_factory import LLMFactory | |
| try: | |
| llm_provider = LLMFactory.create_provider() | |
| # Build project config for startup | |
| project_config = { | |
| "name": project.name, | |
| "version_no": version.no, | |
| "repo_id": version.llm.repo_id, | |
| "generation_config": version.llm.generation_config, | |
| "use_fine_tune": version.llm.use_fine_tune, | |
| "fine_tune_zip": version.llm.fine_tune_zip | |
| } | |
| success = await llm_provider.startup(project_config) | |
| if success: | |
| log_info(f"✅ LLM provider notified for project '{project.name}'") | |
| else: | |
| log_info(f"⚠️ LLM provider notification failed for project '{project.name}'") | |
| except Exception as e: | |
| log_error("❌ Error notifying LLM provider", e) | |
| raise | |
| # ===================== Cleanup Task ===================== | |
| def cleanup_activity_log(): | |
| """Cleanup old activity log entries""" | |
| while True: | |
| try: | |
| cfg = ConfigProvider.get() | |
| # Keep only last 30 days | |
| cutoff = datetime.now() - timedelta(days=30) | |
| cutoff_str = cutoff.isoformat() | |
| original_count = len(cfg.activity_log) | |
| cfg.activity_log = [ | |
| log for log in cfg.activity_log | |
| if hasattr(log, 'timestamp') and str(log.timestamp) >= cutoff_str | |
| ] | |
| if len(cfg.activity_log) < original_count: | |
| removed = original_count - len(cfg.activity_log) | |
| log_info(f"🧹 Cleaned up {removed} old activity log entries") | |
| # ConfigProvider.save(cfg, "system") kullanmalıyız | |
| ConfigProvider.save(cfg, "system") | |
| except Exception as e: | |
| log_error("❌ Activity log cleanup error", e) | |
| # Run every hour | |
| time.sleep(3600) | |
| def start_cleanup_task(): | |
| """Start the cleanup task in background""" | |
| thread = threading.Thread(target=cleanup_activity_log, daemon=True) | |
| thread.start() | |
| log_info("🧹 Activity log cleanup task started") |