Spaces:
				
			
			
	
			
			
		Paused
		
	
	
	
			
			
	
	
	
	
		
		
		Paused
		
	| """Admin API endpoints for Flare | |
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
| Provides authentication, project, version, and API management endpoints. | |
| """ | |
| import os | |
| import sys | |
| import hashlib | |
| import json | |
| import jwt | |
| import httpx | |
| from datetime import datetime, timedelta, timezone | |
| from typing import Optional, List, Dict, Any | |
| from pathlib import Path | |
| import threading | |
| import time | |
| import bcrypt | |
| from fastapi import APIRouter, HTTPException, Depends, Body, Query | |
| from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials | |
| from pydantic import BaseModel, Field | |
| from utils import log | |
| # ===================== JWT Config ===================== | |
| def get_jwt_config(): | |
| """Get JWT configuration based on environment""" | |
| work_mode = os.getenv("WORK_MODE", "on-premise") | |
| if work_mode == "hfcloud": | |
| # Cloud mode - use secrets from environment | |
| jwt_secret = os.getenv("JWT_SECRET") | |
| if not jwt_secret: | |
| log("⚠️ WARNING: JWT_SECRET not found in environment, using fallback") | |
| jwt_secret = "flare-admin-secret-key-change-in-production" # Fallback | |
| else: | |
| # On-premise mode - use .env file | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| jwt_secret = os.getenv("JWT_SECRET", "flare-admin-secret-key-change-in-production") | |
| return { | |
| "secret": jwt_secret, | |
| "algorithm": os.getenv("JWT_ALGORITHM", "HS256"), | |
| "expiration_hours": int(os.getenv("JWT_EXPIRATION_HOURS", "24")) | |
| } | |
| # ===================== Constants & Config ===================== | |
| router = APIRouter(prefix="/api") | |
| security = HTTPBearer() | |
| # ===================== Models ===================== | |
| class LoginRequest(BaseModel): | |
| username: str | |
| password: str | |
| class LoginResponse(BaseModel): | |
| token: str | |
| username: str | |
| class ChangePasswordRequest(BaseModel): | |
| current_password: str | |
| new_password: str | |
| class EnvironmentUpdate(BaseModel): | |
| work_mode: str | |
| cloud_token: Optional[str] = None | |
| spark_endpoint: str | |
| internal_prompt: Optional[str] = None | |
| tts_engine: str = "no_tts" | |
| tts_engine_api_key: Optional[str] = None | |
| tts_settings: Optional[Dict[str, Any]] = None | |
| stt_engine: str = "no_stt" | |
| stt_engine_api_key: Optional[str] = None | |
| stt_settings: Optional[Dict[str, Any]] = None | |
| class ProjectCreate(BaseModel): | |
| name: str | |
| caption: Optional[str] = "" | |
| icon: Optional[str] = "folder" | |
| description: Optional[str] = "" | |
| default_language: str = "Turkish" | |
| supported_languages: List[str] = Field(default_factory=lambda: ["tr"]) | |
| timezone: str = "Europe/Istanbul" | |
| region: str = "tr-TR" | |
| class ProjectUpdate(BaseModel): | |
| caption: str | |
| icon: Optional[str] = "folder" | |
| description: Optional[str] = "" | |
| default_language: str = "Turkish" | |
| supported_languages: List[str] = Field(default_factory=lambda: ["tr"]) | |
| timezone: str = "Europe/Istanbul" | |
| region: str = "tr-TR" | |
| last_update_date: str | |
| class VersionCreate(BaseModel): | |
| caption: str | |
| source_version_id: int | None = None # None → boş template | |
| class IntentModel(BaseModel): | |
| name: str | |
| caption: Optional[str] = "" | |
| locale: str = "tr-TR" | |
| detection_prompt: str | |
| examples: List[str] = [] | |
| parameters: List[Dict[str, Any]] = [] | |
| action: str | |
| fallback_timeout_prompt: Optional[str] = None | |
| fallback_error_prompt: Optional[str] = None | |
| class VersionUpdate(BaseModel): | |
| caption: str | |
| general_prompt: str | |
| llm: Dict[str, Any] | |
| intents: List[IntentModel] | |
| last_update_date: str | |
| class APICreate(BaseModel): | |
| name: str | |
| url: str | |
| method: str = "POST" | |
| headers: Dict[str, str] = {} | |
| body_template: Dict[str, Any] = {} | |
| timeout_seconds: int = 10 | |
| retry: Dict[str, Any] = Field(default_factory=lambda: {"retry_count": 3, "backoff_seconds": 2, "strategy": "static"}) | |
| proxy: Optional[str] = None | |
| auth: Optional[Dict[str, Any]] = None | |
| response_prompt: Optional[str] = None | |
| response_mappings: List[Dict[str, Any]] = [] | |
| class APIUpdate(BaseModel): | |
| url: str | |
| method: str | |
| headers: Dict[str, str] | |
| body_template: Dict[str, Any] | |
| timeout_seconds: int | |
| retry: Dict[str, Any] | |
| proxy: Optional[str] | |
| auth: Optional[Dict[str, Any]] | |
| response_prompt: Optional[str] | |
| response_mappings: List[Dict[str, Any]] = [] | |
| last_update_date: str | |
| class TestRequest(BaseModel): | |
| test_type: str # "all", "ui", "backend", "integration", "spark" | |
| class EnvironmentUpdate(BaseModel): | |
| work_mode: str | |
| cloud_token: Optional[str] = None | |
| spark_endpoint: str | |
| internal_prompt: Optional[str] = None | |
| tts_engine: str = "no_tts" | |
| tts_engine_api_key: Optional[str] = None | |
| tts_settings: Optional[Dict[str, Any]] = None | |
| stt_engine: str = "no_stt" | |
| stt_engine_api_key: Optional[str] = None | |
| stt_settings: Optional[Dict[str, Any]] = None | |
| class TTSRequest(BaseModel): | |
| text: str | |
| voice_id: Optional[str] = None | |
| model_id: Optional[str] = None | |
| output_format: Optional[str] = "mp3_44100_128" | |
| # ===================== Helpers ===================== | |
| def verify_token(credentials: HTTPAuthorizationCredentials = Depends(security)) -> str: | |
| """Verify JWT token and return username""" | |
| jwt_config = get_jwt_config() | |
| try: | |
| payload = jwt.decode( | |
| credentials.credentials, | |
| jwt_config["secret"], | |
| algorithms=[jwt_config["algorithm"]] | |
| ) | |
| username = payload.get("sub") | |
| if username is None: | |
| raise HTTPException(status_code=401, detail="Invalid token") | |
| return username | |
| except jwt.ExpiredSignatureError: | |
| raise HTTPException(status_code=401, detail="Token expired") | |
| except jwt.InvalidTokenError: # Bu genel JWT hatalarını yakalar | |
| raise HTTPException(status_code=401, detail="Invalid token") | |
| def hash_password(password: str, salt: str = None) -> tuple[str, str]: | |
| """Hash password with bcrypt. | |
| Returns (hashed_password, salt)""" | |
| if salt is None: | |
| salt = bcrypt.gensalt().decode('utf-8') | |
| # Ensure salt is bytes | |
| salt_bytes = salt.encode('utf-8') if isinstance(salt, str) else salt | |
| # Hash the password | |
| hashed = bcrypt.hashpw(password.encode('utf-8'), salt_bytes) | |
| return hashed.decode('utf-8'), salt | |
| def verify_password(password: str, hashed: str, salt: str = None) -> bool: | |
| """Verify password against hash""" | |
| try: | |
| # For bcrypt hashes (they contain salt) | |
| if hashed.startswith('$2b$') or hashed.startswith('$2a$'): | |
| return bcrypt.checkpw(password.encode('utf-8'), hashed.encode('utf-8')) | |
| # For legacy SHA256 hashes | |
| return hashlib.sha256(password.encode()).hexdigest() == hashed | |
| except Exception as e: | |
| log(f"Password verification error: {e}") | |
| return False | |
| def load_config(): | |
| """Load service_config.jsonc""" | |
| config_path = Path("service_config.jsonc") | |
| if not config_path.exists(): | |
| return {"config": {}, "projects": [], "apis": []} | |
| with open(config_path, 'r', encoding='utf-8') as f: | |
| content = f.read() | |
| # Remove comments for JSON parsing | |
| lines = [] | |
| for line in content.split('\n'): | |
| stripped = line.strip() | |
| if not stripped.startswith('//'): | |
| lines.append(line) | |
| clean_content = '\n'.join(lines) | |
| return json.loads(clean_content) | |
| def save_config(config: dict): | |
| """Save config back to service_config.jsonc""" | |
| with open("service_config.jsonc", 'w', encoding='utf-8') as f: | |
| json.dump(config, f, indent=2, ensure_ascii=False) | |
| def get_timestamp(): | |
| """Get current timestamp in ISO format with milliseconds""" | |
| return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" | |
| def add_activity_log(config: dict, username: str, action: str, | |
| entity_type: str, entity_id: Any, entity_name: str, | |
| details: str = ""): | |
| """Add activity log entry""" | |
| if "activity_log" not in config: | |
| config["activity_log"] = [] | |
| # Get next ID | |
| log_id = max([log.get("id", 0) for log in config["activity_log"]], default=0) + 1 | |
| config["activity_log"].append({ | |
| "id": log_id, | |
| "timestamp": get_timestamp(), | |
| "user": username, | |
| "action": action, | |
| "entity_type": entity_type, | |
| "entity_id": entity_id, | |
| "entity_name": entity_name, | |
| "details": details | |
| }) | |
| # Keep only last 1000 entries | |
| if len(config["activity_log"]) > 1000: | |
| config["activity_log"] = config["activity_log"][-1000:] | |
| async def _spark_project_control(action: str, project_name: str, username: str): | |
| """Common function for Spark project control""" | |
| if not project_name: | |
| raise HTTPException(status_code=400, detail="project_name is required") | |
| config = load_config() | |
| spark_endpoint = config.get("config", {}).get("spark_endpoint", "").rstrip("/") | |
| spark_token = _get_spark_token() | |
| if not spark_endpoint: | |
| raise HTTPException(status_code=400, detail="Spark endpoint not configured") | |
| if not spark_token: | |
| raise HTTPException(status_code=400, detail="Spark token not configured") | |
| headers = { | |
| "Authorization": f"Bearer {spark_token}", | |
| "Content-Type": "application/json" | |
| } | |
| try: | |
| async with httpx.AsyncClient(timeout=30) as client: | |
| # Hepsi POST request olarak gönderiliyor | |
| response = await client.post( | |
| f"{spark_endpoint}/project/{action}", | |
| json={"project_name": project_name}, | |
| headers=headers | |
| ) | |
| response.raise_for_status() | |
| return response.json() | |
| except httpx.HTTPStatusError as e: | |
| error_detail = e.response.json() if e.response.text else {"error": str(e)} | |
| raise HTTPException(status_code=e.response.status_code, detail=error_detail) | |
| except Exception as e: | |
| log(f"❌ Spark {action} failed: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| def _get_spark_token() -> Optional[str]: | |
| """Get Spark token based on work_mode""" | |
| config = load_config() | |
| work_mode = config.get("config", {}).get("work_mode", "on-premise") | |
| if work_mode in ("hfcloud", "cloud"): | |
| # Cloud mode - use HuggingFace Secrets | |
| token = os.getenv("SPARK_TOKEN") | |
| if not token: | |
| log("❌ SPARK_TOKEN not found in HuggingFace Secrets!") | |
| return token | |
| else: | |
| # On-premise mode - use .env file | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| return os.getenv("SPARK_TOKEN") | |
| async def notify_spark_manual(project: dict, version: dict, global_config: dict): | |
| """Manual Spark notification (similar to notify_spark but returns response)""" | |
| import httpx | |
| spark_endpoint = global_config.get("spark_endpoint", "").rstrip("/") | |
| spark_token = _get_spark_token() | |
| if not spark_endpoint: | |
| raise ValueError("Spark endpoint not configured") | |
| if not spark_token: | |
| raise ValueError("Spark token not configured") | |
| work_mode = global_config.get("work_mode", "hfcloud") | |
| cloud_token = global_config.get("cloud_token", "") | |
| # Decrypt token if needed | |
| if cloud_token and cloud_token.startswith("enc:"): | |
| from encryption_utils import decrypt | |
| cloud_token = decrypt(cloud_token) | |
| payload = { | |
| "work_mode": work_mode, | |
| "cloud_token": cloud_token, | |
| "project_name": project["name"], | |
| "project_version": version["id"], | |
| "repo_id": version["llm"]["repo_id"], | |
| "generation_config": version["llm"]["generation_config"], | |
| "use_fine_tune": version["llm"]["use_fine_tune"], | |
| "fine_tune_zip": version["llm"]["fine_tune_zip"] if version["llm"]["use_fine_tune"] else None | |
| } | |
| headers = { | |
| "Authorization": f"Bearer {spark_token}", | |
| "Content-Type": "application/json" | |
| } | |
| log(f"🚀 Manually notifying Spark about {project['name']} v{version['id']}") | |
| async with httpx.AsyncClient(timeout=30) as client: | |
| response = await client.post(spark_endpoint + "/startup", json=payload, headers=headers) | |
| response.raise_for_status() | |
| result = response.json() | |
| log(f"✅ Spark manual notification successful: {result.get('message', 'OK')}") | |
| return result | |
| # ===================== Auth Endpoints ===================== | |
| async def login(request: LoginRequest): | |
| """Authenticate user and return JWT token""" | |
| config = load_config() | |
| users = config.get("config", {}).get("users", []) | |
| # Find user | |
| user = next((u for u in users if u["username"] == request.username), None) | |
| if not user: | |
| raise HTTPException(status_code=401, detail="Invalid credentials") | |
| # Verify password | |
| if not verify_password(request.password, user["password_hash"], user.get("salt")): | |
| raise HTTPException(status_code=401, detail="Invalid credentials") | |
| # Generate JWT token | |
| jwt_config = get_jwt_config() | |
| payload = { | |
| "sub": request.username, | |
| "exp": datetime.now(timezone.utc) + timedelta(hours=jwt_config["expiration_hours"]) | |
| } | |
| token = jwt.encode(payload, jwt_config["secret"], algorithm=jwt_config["algorithm"]) | |
| log(f"✅ User '{request.username}' logged in") | |
| return LoginResponse(token=token, username=request.username) | |
| async def change_password( | |
| request: ChangePasswordRequest, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Change user password""" | |
| config = load_config() | |
| users = config.get("config", {}).get("users", []) | |
| # Find user | |
| user = next((u for u in users if u["username"] == username), None) | |
| if not user: | |
| raise HTTPException(status_code=404, detail="User not found") | |
| # Verify current password | |
| if not verify_password(request.current_password, user["password_hash"], user.get("salt")): | |
| raise HTTPException(status_code=401, detail="Current password is incorrect") | |
| # Hash new password | |
| new_hash, new_salt = hash_password(request.new_password) | |
| user["password_hash"] = new_hash | |
| user["salt"] = new_salt | |
| # Save config | |
| save_config(config) | |
| log(f"✅ Password changed for user '{username}'") | |
| return {"success": True} | |
| # ===================== Environment Endpoints ===================== | |
| async def get_environment(username: str = Depends(verify_token)): | |
| """Get environment configuration""" | |
| config = load_config() | |
| env_config = config.get("config", {}) | |
| return { | |
| "work_mode": env_config.get("work_mode", "on-premise"), | |
| "cloud_token": env_config.get("cloud_token", ""), | |
| "spark_endpoint": env_config.get("spark_endpoint", "http://localhost:7861"), | |
| "internal_prompt": env_config.get("internal_prompt", ""), | |
| "tts_engine": env_config.get("tts_engine", "no_tts"), | |
| "tts_engine_api_key": env_config.get("tts_engine_api_key", ""), | |
| "tts_settings": env_config.get("tts_settings", { | |
| "use_ssml": False | |
| }), | |
| "stt_engine": env_config.get("stt_engine", "no_stt"), | |
| "stt_engine_api_key": env_config.get("stt_engine_api_key", ""), | |
| "stt_settings": env_config.get("stt_settings", { | |
| "speech_timeout_ms": 2000, | |
| "noise_reduction_level": 2, | |
| "vad_sensitivity": 0.5, | |
| "language": "tr-TR", | |
| "model": "latest_long", | |
| "use_enhanced": True, | |
| "enable_punctuation": True, | |
| "interim_results": True | |
| }) | |
| } | |
| async def update_environment( | |
| update: EnvironmentUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update environment configuration""" | |
| config = load_config() | |
| # Token validation based on mode | |
| if update.work_mode in ("gpt4o", "gpt4o-mini"): | |
| if not update.cloud_token: | |
| raise HTTPException(status_code=400, detail="OpenAI API key is required for GPT modes") | |
| if not update.cloud_token.startswith("sk-") and not update.cloud_token.startswith("enc:"): | |
| raise HTTPException(status_code=400, detail="Invalid OpenAI API key format") | |
| elif update.work_mode in ("hfcloud", "cloud"): | |
| if not update.cloud_token: | |
| raise HTTPException(status_code=400, detail="Cloud token is required for cloud modes") | |
| # Debug log - gelen değerleri kontrol et | |
| log(f"📥 Received TTS engine: {update.tts_engine}") | |
| log(f"📥 Received TTS key: {'***' + update.tts_engine_api_key[-4:] if update.tts_engine_api_key else 'None'}") | |
| # TTS/STT validation | |
| if update.tts_engine not in ("no_tts", "elevenlabs", "blaze"): | |
| raise HTTPException(status_code=400, detail="Invalid TTS engine") | |
| if update.stt_engine not in ("no_stt", "google", "azure", "amazon", "gpt4o_realtime", "flicker"): | |
| raise HTTPException(status_code=400, detail="Invalid STT engine") | |
| if update.tts_engine != "no_tts" and not update.tts_engine_api_key: | |
| raise HTTPException(status_code=400, detail=f"{update.tts_engine} API key is required") | |
| if update.stt_engine != "no_stt" and not update.stt_engine_api_key: | |
| raise HTTPException(status_code=400, detail=f"{update.stt_engine} API key or credentials required") | |
| # Spark endpoint validation | |
| if update.work_mode not in ("gpt4o", "gpt4o-mini") and not update.spark_endpoint: | |
| raise HTTPException(status_code=400, detail="Spark endpoint is required for non-GPT modes") | |
| # Encrypt API keys if needed | |
| from encryption_utils import encrypt | |
| # TTS key encryption debug | |
| if update.tts_engine_api_key: | |
| encrypted_tts_key = encrypt(update.tts_engine_api_key) | |
| log(f"🔐 Encrypted TTS key: {encrypted_tts_key[:20]}...") | |
| else: | |
| encrypted_tts_key = "" | |
| log("⚠️ No TTS key to encrypt") | |
| if update.tts_settings: | |
| config["config"]["tts_settings"] = update.tts_settings | |
| if update.stt_settings: | |
| config["config"]["stt_settings"] = update.stt_settings | |
| # Update config | |
| config["config"]["work_mode"] = update.work_mode | |
| config["config"]["cloud_token"] = update.cloud_token or "" | |
| config["config"]["spark_endpoint"] = update.spark_endpoint | |
| config["config"]["internal_prompt"] = update.internal_prompt or "" | |
| config["config"]["tts_engine"] = update.tts_engine | |
| config["config"]["tts_engine_api_key"] = encrypted_tts_key # Direkt encrypted değeri kullan | |
| config["config"]["stt_engine"] = update.stt_engine | |
| config["config"]["stt_engine_api_key"] = encrypt(update.stt_engine_api_key) if update.stt_engine_api_key else "" | |
| config["config"]["last_update_date"] = get_timestamp() | |
| config["config"]["last_update_user"] = username | |
| # Save öncesi kontrol | |
| log(f"💾 Config before save - TTS key: {config['config'].get('tts_engine_api_key', 'NOT SET')[:20]}...") | |
| # Add activity log - Güncellendi | |
| add_activity_log(config, username, "UPDATE_ENVIRONMENT", "config", None, | |
| "environment", f"Changed to {update.work_mode}, TTS: {update.tts_engine}, STT: {update.stt_engine}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Environment updated to {update.work_mode} with TTS: {update.tts_engine}, STT: {update.stt_engine} by {username}") | |
| return {"success": True} | |
| # ===================== TTS Endpoints ===================== | |
| async def generate_tts( | |
| request: TTSRequest, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Generate TTS audio from text""" | |
| try: | |
| config = load_config() | |
| env_config = config.get("config", {}) | |
| tts_engine = env_config.get("tts_engine", "no_tts") | |
| if tts_engine == "no_tts": | |
| raise HTTPException(status_code=400, detail="TTS is not configured") | |
| # Get TTS provider | |
| from config_provider import ConfigProvider | |
| cfg = ConfigProvider.get() | |
| api_key = cfg.global_config.get_tts_api_key() | |
| if not api_key: | |
| raise HTTPException(status_code=400, detail="TTS API key not configured") | |
| # Import here to avoid circular dependency | |
| from tts_interface import create_tts_provider | |
| tts_provider = create_tts_provider(tts_engine, api_key) | |
| if not tts_provider: | |
| raise HTTPException(status_code=500, detail="Failed to create TTS provider") | |
| log(f"🎤 Generating TTS for {len(request.text)} characters using {tts_engine}") | |
| # Generate audio | |
| audio_data = await tts_provider.synthesize( | |
| text=request.text, | |
| voice_id=request.voice_id, | |
| model_id=request.model_id, | |
| output_format=request.output_format | |
| ) | |
| # Return audio data | |
| from fastapi.responses import Response | |
| content_type = "audio/mpeg" if request.output_format.startswith("mp3") else "audio/wav" | |
| return Response( | |
| content=audio_data, | |
| media_type=content_type, | |
| headers={ | |
| "Content-Disposition": f"attachment; filename=tts_output.{request.output_format.split('_')[0]}" | |
| } | |
| ) | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| log(f"❌ TTS generation error: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # ===================== Project Endpoints ===================== | |
| def list_enabled_projects(): | |
| """Get list of enabled project names for chat""" | |
| cfg = load_config() | |
| projects = cfg.get("projects", []) | |
| return [p["name"] for p in projects if p.get("enabled", False) and not p.get("deleted", False)] | |
| async def list_projects( | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List all projects""" | |
| config = load_config() | |
| projects = config.get("projects", []) | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| projects = [p for p in projects if not p.get("deleted", False)] | |
| return projects | |
| async def get_project( | |
| project_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get single project by ID""" | |
| try: | |
| config = load_config() | |
| projects = config.get("projects", []) | |
| project = next((p for p in projects if p.get("id") == project_id), None) | |
| if not project or project.get("deleted", False): | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| return project | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| log(f"Failed to get project: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # POST /api/projects | |
| async def create_project( | |
| project_data: ProjectCreate, | |
| username: str = Depends(verify_token) | |
| ): | |
| cfg = load_config() | |
| # Yeni proje ID'si | |
| project_id = cfg["config"].get("project_id_counter", 0) + 1 | |
| cfg["config"]["project_id_counter"] = project_id | |
| # Proje gövdesi - yeni alanlarla | |
| new_project = { | |
| "id": project_id, | |
| "name": project_data.name, | |
| "caption": project_data.caption, | |
| "icon": project_data.icon, | |
| "description": project_data.description, | |
| "default_language": project_data.default_language, | |
| "supported_languages": project_data.supported_languages, | |
| "timezone": project_data.timezone, | |
| "region": project_data.region, | |
| "enabled": False, | |
| "deleted": False, | |
| "created_date": datetime.utcnow().isoformat(), | |
| "created_by": username, | |
| "last_update_date": datetime.utcnow().isoformat(), | |
| "last_update_user": username, | |
| # Versiyon sayaçları | |
| "version_id_counter": 1, | |
| # İlk versiyon | |
| "versions": [{ | |
| "id": 1, | |
| "no": 1, | |
| "caption": "Version 1", | |
| "description": "Initial version", | |
| "published": False, | |
| "llm": { | |
| "repo_id": "", | |
| "generation_config": { | |
| "max_new_tokens": 512, | |
| "temperature": 0.7, | |
| "top_p": 0.95, | |
| "top_k": 50, | |
| "repetition_penalty": 1.1 | |
| }, | |
| "use_fine_tune": False, | |
| "fine_tune_zip": "" | |
| }, | |
| "intents": [], | |
| "parameters": [], | |
| "created_date": datetime.utcnow().isoformat(), | |
| "created_by": username, | |
| "last_update_date": datetime.utcnow().isoformat(), | |
| "last_update_user": username, | |
| "deleted": False, | |
| "publish_date": None, | |
| "published_by": None | |
| }] | |
| } | |
| cfg.setdefault("projects", []).append(new_project) | |
| save_config(cfg) | |
| add_activity_log(cfg, username, "CREATE_PROJECT", | |
| "project", project_id, new_project["name"]) | |
| save_config(cfg) | |
| return new_project | |
| async def update_project( | |
| project_id: int, | |
| update: ProjectUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update project""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Check race condition | |
| if project.get("last_update_date") != update.last_update_date: | |
| raise HTTPException(status_code=409, detail="Project was modified by another user") | |
| # Update - yeni alanlarla | |
| project["caption"] = update.caption | |
| project["icon"] = update.icon | |
| project["description"] = update.description | |
| project["default_language"] = update.default_language | |
| project["supported_languages"] = update.supported_languages | |
| project["timezone"] = update.timezone | |
| project["region"] = update.region | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "UPDATE_PROJECT", "project", project_id, project["name"]) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{project['name']}' updated by {username}") | |
| return project | |
| async def delete_project(project_id: int, username: str = Depends(verify_token)): | |
| """Delete project (soft delete)""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Soft delete | |
| project["deleted"] = True | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "DELETE_PROJECT", "project", project_id, project["name"]) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{project['name']}' deleted by {username}") | |
| return {"success": True} | |
| async def toggle_project(project_id: int, username: str = Depends(verify_token)): | |
| """Toggle project enabled status""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Toggle | |
| project["enabled"] = not project.get("enabled", False) | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| action = "ENABLE_PROJECT" if project["enabled"] else "DISABLE_PROJECT" | |
| add_activity_log(config, username, action, "project", project_id, project["name"]) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{project['name']}' {'enabled' if project['enabled'] else 'disabled'} by {username}") | |
| return {"enabled": project["enabled"]} | |
| # ===================== Version Endpoints ===================== | |
| async def list_versions( | |
| project_id: int, | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List project versions""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| versions = project.get("versions", []) | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| versions = [v for v in versions if not v.get("deleted", False)] | |
| return versions | |
| async def create_version( | |
| project_id: int, | |
| version_data: VersionCreate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Create new version""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Get next version ID | |
| version_id = project.get("version_id_counter", 0) + 1 | |
| project["version_id_counter"] = version_id | |
| # Get next version number | |
| existing_versions = [v for v in project.get("versions", []) if not v.get("deleted", False)] | |
| version_no = max([v.get("no", 0) for v in existing_versions], default=0) + 1 | |
| # Create base version | |
| new_version = { | |
| "id": version_id, | |
| "no": version_no, | |
| "caption": version_data.caption, | |
| "description": f"Version {version_no}", | |
| "published": False, | |
| "deleted": False, | |
| "created_date": get_timestamp(), | |
| "created_by": username, | |
| "last_update_date": get_timestamp(), | |
| "last_update_user": username, | |
| "publish_date": None, | |
| "published_by": None | |
| } | |
| # Copy from source version if specified | |
| if version_data.source_version_id: | |
| source_version = next( | |
| (v for v in project.get("versions", []) if v["id"] == version_data.source_version_id), | |
| None | |
| ) | |
| if source_version: | |
| # Copy configuration from source | |
| new_version.update({ | |
| "general_prompt": source_version.get("general_prompt", ""), | |
| "llm": source_version.get("llm", {}).copy(), | |
| "intents": [intent.copy() for intent in source_version.get("intents", [])], | |
| "parameters": [param.copy() for param in source_version.get("parameters", [])] | |
| }) | |
| else: | |
| # Empty template | |
| new_version.update({ | |
| "general_prompt": "", | |
| "llm": { | |
| "repo_id": "", | |
| "generation_config": { | |
| "max_new_tokens": 512, | |
| "temperature": 0.7, | |
| "top_p": 0.95, | |
| "top_k": 50, | |
| "repetition_penalty": 1.1 | |
| }, | |
| "use_fine_tune": False, | |
| "fine_tune_zip": "" | |
| }, | |
| "intents": [], | |
| "parameters": [] | |
| }) | |
| # Add to project | |
| if "versions" not in project: | |
| project["versions"] = [] | |
| project["versions"].append(new_version) | |
| # Update project timestamp | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "CREATE_VERSION", "version", version_id, | |
| f"{project['name']} v{version_no}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Version {version_no} created for project '{project['name']}' by {username}") | |
| return new_version | |
| async def update_version( | |
| project_id: int, | |
| version_id: int, | |
| update: VersionUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update version""" | |
| config = load_config() | |
| # Find project and version | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| version = next((v for v in project.get("versions", []) if v["id"] == version_id), None) | |
| if not version: | |
| raise HTTPException(status_code=404, detail="Version not found") | |
| # Check race condition | |
| if version.get("last_update_date") != update.last_update_date: | |
| raise HTTPException(status_code=409, detail="Version was modified by another user") | |
| # Cannot update published version | |
| if version.get("published", False): | |
| raise HTTPException(status_code=400, detail="Cannot modify published version") | |
| # Update version | |
| version["caption"] = update.caption | |
| version["general_prompt"] = update.general_prompt | |
| version["llm"] = update.llm | |
| version["intents"] = [intent.dict() for intent in update.intents] | |
| version["last_update_date"] = get_timestamp() | |
| version["last_update_user"] = username | |
| # Update project timestamp | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "UPDATE_VERSION", "version", version_id, | |
| f"{project['name']} v{version['no']}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Version {version['no']} updated for project '{project['name']}' by {username}") | |
| return version | |
| async def publish_version( | |
| project_id: int, | |
| version_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Publish version""" | |
| config = load_config() | |
| # Find project and version | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| version = next((v for v in project.get("versions", []) if v["id"] == version_id), None) | |
| if not version: | |
| raise HTTPException(status_code=404, detail="Version not found") | |
| # Unpublish all other versions | |
| for v in project.get("versions", []): | |
| if v["id"] != version_id: | |
| v["published"] = False | |
| # Publish this version | |
| version["published"] = True | |
| version["publish_date"] = get_timestamp() | |
| version["published_by"] = username | |
| version["last_update_date"] = get_timestamp() | |
| version["last_update_user"] = username | |
| # Update project timestamp | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "PUBLISH_VERSION", "version", version_id, | |
| f"{project['name']} v{version['no']}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Version {version_id} published for project '{project['name']}' by {username}") | |
| # Notify Spark if project is enabled | |
| if project.get("enabled", False): | |
| try: | |
| await notify_spark_manual(project, version, config.get("config", {})) | |
| except Exception as e: | |
| log(f"⚠️ Failed to notify Spark: {e}") | |
| # Don't fail the publish | |
| return {"success": True} | |
| async def delete_version( | |
| project_id: int, | |
| version_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Delete version (soft delete)""" | |
| config = load_config() | |
| # Find project and version | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| version = next((v for v in project.get("versions", []) if v["id"] == version_id), None) | |
| if not version: | |
| raise HTTPException(status_code=404, detail="Version not found") | |
| # Cannot delete published version | |
| if version.get("published", False): | |
| raise HTTPException(status_code=400, detail="Cannot delete published version") | |
| # Soft delete | |
| version["deleted"] = True | |
| version["last_update_date"] = get_timestamp() | |
| version["last_update_user"] = username | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "DELETE_VERSION", "version", version_id, | |
| f"{project['name']} v{version_id}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Version {version_id} deleted for project '{project['name']}' by {username}") | |
| return {"success": True} | |
| async def validate_regex( | |
| request: dict = Body(...), | |
| username: str = Depends(verify_token) | |
| ): | |
| """Validate regex pattern""" | |
| pattern = request.get("pattern", "") | |
| test_value = request.get("test_value", "") | |
| try: | |
| import re | |
| compiled_regex = re.compile(pattern) | |
| matches = bool(compiled_regex.match(test_value)) | |
| return { | |
| "valid": True, | |
| "matches": matches, | |
| "pattern": pattern, | |
| "test_value": test_value | |
| } | |
| except Exception as e: | |
| return { | |
| "valid": False, | |
| "matches": False, | |
| "error": str(e), | |
| "pattern": pattern, | |
| "test_value": test_value | |
| } | |
| # ===================== API Endpoints ===================== | |
| async def list_apis( | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List all APIs""" | |
| config = load_config() | |
| apis = config.get("apis", []) | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| apis = [a for a in apis if not a.get("deleted", False)] | |
| return apis | |
| async def create_api(api: APICreate, username: str = Depends(verify_token)): | |
| """Create new API""" | |
| config = load_config() | |
| # Check duplicate name | |
| existing = [a for a in config.get("apis", []) if a["name"] == api.name] | |
| if existing: | |
| raise HTTPException(status_code=400, detail="API name already exists") | |
| # Create API | |
| new_api = api.dict() | |
| new_api["deleted"] = False | |
| new_api["created_date"] = get_timestamp() | |
| new_api["created_by"] = username | |
| new_api["last_update_date"] = get_timestamp() | |
| new_api["last_update_user"] = username | |
| if "apis" not in config: | |
| config["apis"] = [] | |
| config["apis"].append(new_api) | |
| # Add activity log | |
| add_activity_log(config, username, "CREATE_API", "api", api.name, api.name) | |
| # Save | |
| save_config(config) | |
| log(f"✅ API '{api.name}' created by {username}") | |
| return new_api | |
| async def update_api( | |
| api_name: str, | |
| update: APIUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update API""" | |
| config = load_config() | |
| # Find API | |
| api = next((a for a in config.get("apis", []) if a["name"] == api_name), None) | |
| if not api: | |
| raise HTTPException(status_code=404, detail="API not found") | |
| # Check race condition | |
| if api.get("last_update_date") != update.last_update_date: | |
| raise HTTPException(status_code=409, detail="API was modified by another user") | |
| # Check if API is in use | |
| for project in config.get("projects", []): | |
| for version in project.get("versions", []): | |
| for intent in version.get("intents", []): | |
| if intent.get("action") == api_name and version.get("published", False): | |
| raise HTTPException(status_code=400, | |
| detail=f"API is used in published version of project '{project['name']}'") | |
| # Update | |
| update_dict = update.dict() | |
| del update_dict["last_update_date"] | |
| api.update(update_dict) | |
| api["last_update_date"] = get_timestamp() | |
| api["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "UPDATE_API", "api", api_name, api_name) | |
| # Save | |
| save_config(config) | |
| log(f"✅ API '{api_name}' updated by {username}") | |
| return api | |
| async def delete_api(api_name: str, username: str = Depends(verify_token)): | |
| """Delete API (soft delete)""" | |
| config = load_config() | |
| # Find API | |
| api = next((a for a in config.get("apis", []) if a["name"] == api_name), None) | |
| if not api: | |
| raise HTTPException(status_code=404, detail="API not found") | |
| # Check if API is in use | |
| for project in config.get("projects", []): | |
| # Skip deleted projects | |
| if project.get("deleted", False): | |
| continue | |
| for version in project.get("versions", []): | |
| # Skip deleted versions | |
| if version.get("deleted", False): | |
| continue | |
| # Check in intents | |
| for intent in version.get("intents", []): | |
| if intent.get("action") == api_name: | |
| raise HTTPException(status_code=400, | |
| detail=f"API is used in intent '{intent.get('name', 'unknown')}' in project '{project['name']}' version {version.get('version_number', version.get('id'))}") | |
| # Soft delete | |
| api["deleted"] = True | |
| api["last_update_date"] = get_timestamp() | |
| api["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "DELETE_API", "api", api_name, api_name) | |
| # Save | |
| save_config(config) | |
| log(f"✅ API '{api_name}' deleted by {username}") | |
| return {"success": True} | |
| # ===================== Spark Integration Endpoints ===================== | |
| async def spark_startup(request: dict = Body(...), username: str = Depends(verify_token)): | |
| """Trigger Spark startup for a project""" | |
| project_name = request.get("project_name") | |
| if not project_name: | |
| raise HTTPException(status_code=400, detail="project_name is required") | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["name"] == project_name), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail=f"Project not found: {project_name}") | |
| # Find published version | |
| version = next((v for v in project.get("versions", []) if v.get("published", False)), None) | |
| if not version: | |
| raise HTTPException(status_code=400, detail=f"No published version found for project: {project_name}") | |
| # Notify Spark | |
| try: | |
| result = await notify_spark_manual(project, version, config.get("config", {})) | |
| return {"message": result.get("message", "Spark startup initiated")} | |
| except Exception as e: | |
| log(f"❌ Spark startup failed: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def spark_get_projects(username: str = Depends(verify_token)): | |
| """Get Spark project list""" | |
| config = load_config() | |
| spark_endpoint = config.get("config", {}).get("spark_endpoint", "").rstrip("/") | |
| spark_token = _get_spark_token() | |
| if not spark_endpoint: | |
| raise HTTPException(status_code=400, detail="Spark endpoint not configured") | |
| if not spark_token: | |
| raise HTTPException(status_code=400, detail="Spark token not configured") | |
| headers = { | |
| "Authorization": f"Bearer {spark_token}" | |
| } | |
| try: | |
| async with httpx.AsyncClient(timeout=30) as client: | |
| response = await client.get(spark_endpoint + "/project/list", headers=headers) | |
| response.raise_for_status() | |
| return response.json() | |
| except Exception as e: | |
| log(f"❌ Failed to get Spark projects: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def spark_enable_project(request: dict = Body(...), username: str = Depends(verify_token)): | |
| """Enable project in Spark""" | |
| return await _spark_project_control("enable", request.get("project_name"), username) | |
| async def spark_disable_project(request: dict = Body(...), username: str = Depends(verify_token)): | |
| """Disable project in Spark""" | |
| return await _spark_project_control("disable", request.get("project_name"), username) | |
| async def spark_delete_project(project_name: str, username: str = Depends(verify_token)): | |
| """Delete project from Spark""" | |
| return await _spark_project_control("delete", project_name, username) | |
| # ===================== Test Endpoints ===================== | |
| async def test_api(api_data: dict = Body(...), username: str = Depends(verify_token)): | |
| """Test API endpoint with auth support""" | |
| import requests | |
| import time | |
| try: | |
| # Extract test request data if provided | |
| test_request = api_data.pop("test_request", None) | |
| # Parse the APICreate model | |
| api = APICreate(**api_data) | |
| # Prepare headers | |
| headers = api.headers.copy() | |
| # Handle authentication if enabled | |
| auth_token = None | |
| if api.auth and api.auth.get("enabled"): | |
| auth_config = api.auth | |
| try: | |
| log(f"🔑 Fetching auth token for test...") | |
| # Make auth request | |
| auth_response = requests.post( | |
| auth_config["token_endpoint"], | |
| json=auth_config.get("token_request_body", {}), | |
| timeout=10 | |
| ) | |
| auth_response.raise_for_status() | |
| # Extract token from response | |
| auth_json = auth_response.json() | |
| token_path = auth_config.get("response_token_path", "token").split(".") | |
| auth_token = auth_json | |
| for path_part in token_path: | |
| auth_token = auth_token.get(path_part) | |
| if auth_token is None: | |
| raise ValueError(f"Token not found at path: {auth_config.get('response_token_path')}") | |
| # Add token to headers | |
| headers["Authorization"] = f"Bearer {auth_token}" | |
| log(f"✅ Auth token obtained: {auth_token[:20]}...") | |
| except Exception as e: | |
| log(f"❌ Auth failed during test: {e}") | |
| return { | |
| "success": False, | |
| "error": f"Authentication failed: {str(e)}" | |
| } | |
| # Use test_request if provided, otherwise use body_template | |
| request_body = test_request if test_request is not None else api.body_template | |
| # Make the actual API request | |
| start_time = time.time() | |
| # Determine how to send the body based on method | |
| if api.method in ["POST", "PUT", "PATCH"]: | |
| response = requests.request( | |
| method=api.method, | |
| url=api.url, | |
| headers=headers, | |
| json=request_body, | |
| timeout=api.timeout_seconds, | |
| proxies={"http": api.proxy, "https": api.proxy} if api.proxy else None | |
| ) | |
| elif api.method == "GET": | |
| response = requests.request( | |
| method=api.method, | |
| url=api.url, | |
| headers=headers, | |
| params=request_body if isinstance(request_body, dict) else None, | |
| timeout=api.timeout_seconds, | |
| proxies={"http": api.proxy, "https": api.proxy} if api.proxy else None | |
| ) | |
| else: # DELETE, HEAD, etc. | |
| response = requests.request( | |
| method=api.method, | |
| url=api.url, | |
| headers=headers, | |
| timeout=api.timeout_seconds, | |
| proxies={"http": api.proxy, "https": api.proxy} if api.proxy else None | |
| ) | |
| response_time = int((time.time() - start_time) * 1000) | |
| # Prepare response body | |
| try: | |
| response_body = response.json() | |
| except: | |
| response_body = response.text | |
| # Check if request was successful (2xx status codes) | |
| is_success = 200 <= response.status_code < 300 | |
| # Extract values if response mappings are defined | |
| extracted_values = [] | |
| if api.response_mappings and isinstance(response_body, dict): | |
| from jsonpath_ng import parse | |
| for mapping in api.response_mappings: | |
| try: | |
| jsonpath_expr = parse(mapping['json_path']) | |
| matches = jsonpath_expr.find(response_body) | |
| value = matches[0].value if matches else None | |
| extracted_values.append({ | |
| "variable_name": mapping['variable_name'], | |
| "value": value, | |
| "type": mapping['type'], | |
| "caption": mapping.get('caption', '') | |
| }) | |
| except Exception as e: | |
| log(f"Failed to extract {mapping['variable_name']}: {e}") | |
| extracted_values.append({ | |
| "variable_name": mapping['variable_name'], | |
| "value": None, | |
| "error": str(e), | |
| "type": mapping['type'], | |
| "caption": mapping.get('caption', '') | |
| }) | |
| result = { | |
| "success": is_success, | |
| "status_code": response.status_code, | |
| "response_time": response_time, | |
| "response_body": response_body, | |
| "response_headers": dict(response.headers), | |
| "request_body": request_body, | |
| "request_headers": headers | |
| } | |
| # Add extracted values if any | |
| if extracted_values: | |
| result["extracted_values"] = extracted_values | |
| # Add error info for non-2xx responses | |
| if not is_success: | |
| result["error"] = f"HTTP {response.status_code}: {response.reason}" | |
| log(f"📋 Test result: {response.status_code} in {response_time}ms") | |
| return result | |
| except requests.exceptions.Timeout: | |
| return { | |
| "success": False, | |
| "error": f"Request timed out after {api.timeout_seconds} seconds" | |
| } | |
| except requests.exceptions.ConnectionError as e: | |
| return { | |
| "success": False, | |
| "error": f"Connection error: {str(e)}" | |
| } | |
| except Exception as e: | |
| log(f"❌ Test API error: {e}") | |
| return { | |
| "success": False, | |
| "error": str(e) | |
| } | |
| async def run_all_tests( | |
| request: TestRequest, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Run all tests""" | |
| # TODO: Implement test runner | |
| return { | |
| "status": "completed", | |
| "total": 10, | |
| "passed": 8, | |
| "failed": 2, | |
| "details": [] | |
| } | |
| # ===================== Import/Export Endpoints ===================== | |
| async def import_project( | |
| project_data: dict = Body(...), | |
| username: str = Depends(verify_token) | |
| ): | |
| """Import project from JSON""" | |
| config = load_config() | |
| # Validate structure | |
| if "name" not in project_data: | |
| raise HTTPException(status_code=400, detail="Invalid project data") | |
| # Check duplicate name | |
| existing = [p for p in config.get("projects", []) | |
| if p["name"] == project_data["name"] and not p.get("deleted", False)] | |
| if existing: | |
| raise HTTPException(status_code=400, detail="Project name already exists") | |
| # Get new project ID | |
| project_id = config["config"].get("project_id_counter", 0) + 1 | |
| config["config"]["project_id_counter"] = project_id | |
| # Create new project | |
| new_project = { | |
| "id": project_id, | |
| "name": project_data["name"], | |
| "caption": project_data.get("caption", ""), | |
| "icon": project_data.get("icon", "folder"), | |
| "description": project_data.get("description", ""), | |
| "enabled": False, | |
| "deleted": False, | |
| "created_date": get_timestamp(), | |
| "created_by": username, | |
| "last_update_date": get_timestamp(), | |
| "last_update_user": username, | |
| "version_id_counter": 1, | |
| "versions": [] | |
| } | |
| # Import versions | |
| for idx, version_data in enumerate(project_data.get("versions", [])): | |
| new_version = { | |
| "id": idx + 1, | |
| "no": idx + 1, | |
| "caption": version_data.get("caption", f"Version {idx + 1}"), | |
| "description": version_data.get("description", ""), | |
| "published": False, | |
| "deleted": False, | |
| "created_date": get_timestamp(), | |
| "created_by": username, | |
| "last_update_date": get_timestamp(), | |
| "last_update_user": username, | |
| "publish_date": None, | |
| "published_by": None, | |
| "general_prompt": version_data.get("general_prompt", ""), | |
| "llm": version_data.get("llm", {}), | |
| "intents": version_data.get("intents", []), | |
| "parameters": version_data.get("parameters", []) | |
| } | |
| new_project["versions"].append(new_version) | |
| new_project["version_id_counter"] = idx + 1 | |
| # Add to config | |
| if "projects" not in config: | |
| config["projects"] = [] | |
| config["projects"].append(new_project) | |
| # Add activity log | |
| add_activity_log(config, username, "IMPORT_PROJECT", "project", project_id, new_project["name"]) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{new_project['name']}' imported by {username}") | |
| return new_project | |
| async def export_project( | |
| project_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Export project as JSON""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Create export data | |
| export_data = { | |
| "name": project["name"], | |
| "caption": project.get("caption", ""), | |
| "icon": project.get("icon", "folder"), | |
| "description": project.get("description", ""), | |
| "versions": [] | |
| } | |
| # Export versions | |
| for version in project.get("versions", []): | |
| if not version.get("deleted", False): | |
| export_version = { | |
| "caption": version.get("caption", ""), | |
| "description": version.get("description", ""), | |
| "general_prompt": version.get("general_prompt", ""), | |
| "llm": version.get("llm", {}), | |
| "intents": version.get("intents", []), | |
| "parameters": version.get("parameters", []) | |
| } | |
| export_data["versions"].append(export_version) | |
| # Add activity log | |
| add_activity_log(config, username, "EXPORT_PROJECT", "project", project_id, project["name"]) | |
| save_config(config) | |
| log(f"✅ Project '{project['name']}' exported by {username}") | |
| return export_data | |
| # ===================== TTS Endpoints ===================== | |
| class TTSRequest(BaseModel): | |
| text: str | |
| voice_id: Optional[str] = None | |
| model_id: Optional[str] = None | |
| output_format: Optional[str] = "mp3_44100_128" | |
| async def generate_tts( | |
| request: TTSRequest, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Generate TTS audio from text""" | |
| try: | |
| config = load_config() | |
| env_config = config.get("config", {}) | |
| tts_engine = env_config.get("tts_engine", "no_tts") | |
| if tts_engine == "no_tts": | |
| raise HTTPException(status_code=400, detail="TTS is not configured") | |
| # Get TTS provider | |
| from config_provider import ConfigProvider | |
| cfg = ConfigProvider.get() | |
| api_key = cfg.global_config.get_tts_api_key() | |
| # Debug log - API key'in ilk ve son karakterlerini göster | |
| if api_key: | |
| masked_key = f"{api_key[:4]}...{api_key[-4:]}" if len(api_key) > 8 else "***" | |
| log(f"🔑 TTS API Key (masked): {masked_key}") | |
| log(f"🔑 Key starts with 'enc:': {api_key.startswith('enc:')}") | |
| else: | |
| log("❌ TTS API key is None!") | |
| if not api_key: | |
| raise HTTPException(status_code=400, detail="TTS API key not configured") | |
| # Import here to avoid circular dependency | |
| from tts_interface import create_tts_provider | |
| tts_provider = create_tts_provider(tts_engine, api_key) | |
| if not tts_provider: | |
| raise HTTPException(status_code=500, detail="Failed to create TTS provider") | |
| log(f"🎤 Generating TTS for {len(request.text)} characters using {tts_engine}") | |
| # Generate audio | |
| audio_data = await tts_provider.synthesize( | |
| text=request.text, | |
| voice_id=request.voice_id, | |
| model_id=request.model_id, | |
| output_format=request.output_format | |
| ) | |
| # Return audio data | |
| from fastapi.responses import Response | |
| content_type = "audio/mpeg" if request.output_format.startswith("mp3") else "audio/wav" | |
| return Response( | |
| content=audio_data, | |
| media_type=content_type, | |
| headers={ | |
| "Content-Disposition": f"attachment; filename=tts_output.{request.output_format.split('_')[0]}" | |
| } | |
| ) | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| log(f"❌ TTS generation error: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # ===================== Activity Log Endpoints ===================== | |
| async def get_activity_log( | |
| limit: int = Query(100, ge=1, le=1000), | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get activity log""" | |
| config = load_config() | |
| logs = config.get("activity_log", []) | |
| # Return latest entries (format as paginated response if needed) | |
| return logs[-limit:] | |
| # ===================== Cleanup Task ===================== | |
| def cleanup_old_logs(): | |
| """Cleanup old activity logs (runs in background)""" | |
| while True: | |
| try: | |
| config = load_config() | |
| if "activity_log" in config and len(config["activity_log"]) > 5000: | |
| # Keep only last 1000 entries | |
| config["activity_log"] = config["activity_log"][-1000:] | |
| save_config(config) | |
| log("🧹 Cleaned up old activity logs") | |
| except Exception as e: | |
| log(f"Error in cleanup task: {e}") | |
| # Run every hour | |
| time.sleep(3600) | |
| def start_cleanup_task(): | |
| """Start cleanup task in background""" | |
| cleanup_thread = threading.Thread(target=cleanup_old_logs, daemon=True) | |
| cleanup_thread.start() | |
| log("🧹 Started activity log cleanup task") | 

