Spaces:
Paused
Paused
| """ | |
| Flare Admin API Routes | |
| ~~~~~~~~~~~~~~~~~~~~~ | |
| Admin UI için gerekli tüm endpoint'ler | |
| """ | |
| import time | |
| import threading | |
| import os | |
| import json | |
| import hashlib | |
| import secrets | |
| import commentjson | |
| import bcrypt | |
| from datetime import datetime, timedelta | |
| from typing import Dict, List, Optional, Any | |
| from pathlib import Path | |
| from fastapi import APIRouter, HTTPException, Depends, Header | |
| from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials | |
| from pydantic import BaseModel, Field | |
| import jwt | |
| from jwt.exceptions import ExpiredSignatureError, InvalidTokenError | |
| from utils import log | |
| from config_provider import ConfigProvider | |
| # Activity log retention policy (keep last 30 days) | |
| ACTIVITY_LOG_RETENTION_DAYS = 30 | |
| ACTIVITY_LOG_MAX_ENTRIES = 10000 | |
| # Activity log cleanup fonksiyonunu thread-safe yap | |
| def cleanup_activity_log(): | |
| """Cleanup old activity log entries - runs in background thread""" | |
| while True: | |
| try: | |
| config = load_config() | |
| if "activity_log" in config: | |
| # Calculate cutoff date | |
| cutoff_date = datetime.utcnow() - timedelta(days=ACTIVITY_LOG_RETENTION_DAYS) | |
| # Filter recent entries | |
| original_count = len(config["activity_log"]) | |
| config["activity_log"] = [ | |
| log_entry for log_entry in config["activity_log"] | |
| if datetime.fromisoformat(log_entry["timestamp"].replace("Z", "+00:00")) > cutoff_date | |
| ] | |
| # Also limit by max entries | |
| if len(config["activity_log"]) > ACTIVITY_LOG_MAX_ENTRIES: | |
| config["activity_log"] = config["activity_log"][-ACTIVITY_LOG_MAX_ENTRIES:] | |
| # Save if anything was removed | |
| removed_count = original_count - len(config["activity_log"]) | |
| if removed_count > 0: | |
| save_config(config) | |
| log(f"🧹 Cleaned up {removed_count} old activity log entries") | |
| except Exception as e: | |
| log(f"❌ Activity log cleanup error: {e}") | |
| # Run cleanup once per day | |
| time.sleep(86400) # 24 hours | |
| # Start cleanup task when module loads | |
| def start_cleanup_task(): | |
| thread = threading.Thread(target=cleanup_activity_log, daemon=True) | |
| thread.start() | |
| # ===================== Dynamic Config Loading ===================== | |
| def get_jwt_config(): | |
| """Get JWT configuration based on work_mode""" | |
| cfg = ConfigProvider.get() | |
| if cfg.global_config.is_cloud_mode(): | |
| # Cloud mode - use HuggingFace Secrets | |
| jwt_secret = os.getenv("JWT_SECRET") | |
| if not jwt_secret: | |
| log("❌ JWT_SECRET not found in HuggingFace Secrets!") | |
| jwt_secret = "flare-admin-secret-key-change-in-production" # Fallback | |
| else: | |
| # On-premise mode - use .env file | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| jwt_secret = os.getenv("JWT_SECRET", "flare-admin-secret-key-change-in-production") | |
| return { | |
| "secret": jwt_secret, | |
| "algorithm": os.getenv("JWT_ALGORITHM", "HS256"), | |
| "expiration_hours": int(os.getenv("JWT_EXPIRATION_HOURS", "24")) | |
| } | |
| # ===================== Constants & Config ===================== | |
| router = APIRouter(prefix="/api") | |
| security = HTTPBearer() | |
| # ===================== Models ===================== | |
| class LoginRequest(BaseModel): | |
| username: str | |
| password: str | |
| class LoginResponse(BaseModel): | |
| token: str | |
| username: str | |
| class ChangePasswordRequest(BaseModel): | |
| current_password: str | |
| new_password: str | |
| class EnvironmentUpdate(BaseModel): | |
| work_mode: str | |
| cloud_token: Optional[str] = None | |
| spark_endpoint: str | |
| class ProjectCreate(BaseModel): | |
| name: str | |
| caption: Optional[str] = "" | |
| class ProjectUpdate(BaseModel): | |
| caption: str | |
| last_update_date: str | |
| class VersionCreate(BaseModel): | |
| source_version_id: int | |
| caption: str | |
| class IntentModel(BaseModel): | |
| name: str | |
| caption: Optional[str] = "" | |
| locale: str = "tr-TR" | |
| detection_prompt: str | |
| examples: List[str] = [] | |
| parameters: List[Dict[str, Any]] = [] | |
| action: str | |
| fallback_timeout_prompt: Optional[str] = None | |
| fallback_error_prompt: Optional[str] = None | |
| class VersionUpdate(BaseModel): | |
| caption: str | |
| general_prompt: str | |
| llm: Dict[str, Any] | |
| intents: List[IntentModel] | |
| last_update_date: str | |
| class APICreate(BaseModel): | |
| name: str | |
| url: str | |
| method: str = "POST" | |
| headers: Dict[str, str] = {} | |
| body_template: Dict[str, Any] = {} | |
| timeout_seconds: int = 10 | |
| retry: Dict[str, Any] = Field(default_factory=lambda: {"retry_count": 3, "backoff_seconds": 2, "strategy": "static"}) | |
| proxy: Optional[str] = None | |
| auth: Optional[Dict[str, Any]] = None | |
| response_prompt: Optional[str] = None | |
| class APIUpdate(BaseModel): | |
| url: str | |
| method: str | |
| headers: Dict[str, str] | |
| body_template: Dict[str, Any] | |
| timeout_seconds: int | |
| retry: Dict[str, Any] | |
| proxy: Optional[str] | |
| auth: Optional[Dict[str, Any]] | |
| response_prompt: Optional[str] | |
| last_update_date: str | |
| class TestRequest(BaseModel): | |
| test_type: str # "all", "ui", "backend", "integration", "spark" | |
| # ===================== Helpers ===================== | |
| def verify_token(credentials: HTTPAuthorizationCredentials = Depends(security)) -> str: | |
| """Verify JWT token and return username""" | |
| jwt_config = get_jwt_config() | |
| try: | |
| payload = jwt.decode( | |
| credentials.credentials, | |
| jwt_config["secret"], | |
| algorithms=[jwt_config["algorithm"]] | |
| ) | |
| username = payload.get("sub") | |
| if username is None: | |
| raise HTTPException(status_code=401, detail="Invalid token") | |
| return username | |
| except jwt.ExpiredSignatureError: | |
| raise HTTPException(status_code=401, detail="Token expired") | |
| except jwt.InvalidTokenError: # Bu genel JWT hatalarını yakalar | |
| raise HTTPException(status_code=401, detail="Invalid token") | |
| def hash_password(password: str, salt: str = None) -> tuple[str, str]: | |
| """Hash password with bcrypt. Returns (hash, salt)""" | |
| if salt is None: | |
| # Generate new salt | |
| salt_bytes = bcrypt.gensalt() | |
| salt = salt_bytes.decode('utf-8') | |
| else: | |
| # Convert string salt to bytes | |
| salt_bytes = salt.encode('utf-8') | |
| # Hash the password | |
| hashed = bcrypt.hashpw(password.encode('utf-8'), salt_bytes).decode('utf-8') | |
| return hashed, salt | |
| def verify_password(password: str, stored_hash: str, salt: str = None) -> bool: | |
| """Verify password against hash - supports both bcrypt and SHA256""" | |
| # First try bcrypt | |
| if salt and len(stored_hash) == 60: # bcrypt hash length | |
| try: | |
| return bcrypt.checkpw(password.encode('utf-8'), stored_hash.encode('utf-8')) | |
| except: | |
| pass | |
| # Fallback to SHA256 for backward compatibility | |
| sha256_hash = hashlib.sha256(password.encode()).hexdigest() | |
| return sha256_hash == stored_hash | |
| def load_config() -> Dict[str, Any]: | |
| """Load service_config.jsonc""" | |
| config_path = Path("service_config.jsonc") | |
| with open(config_path, 'r', encoding='utf-8') as f: | |
| return commentjson.load(f) | |
| def save_config(config: Dict[str, Any]): | |
| """Save service_config.jsonc with pretty formatting""" | |
| config_path = Path("service_config.jsonc") | |
| with open(config_path, 'w', encoding='utf-8') as f: | |
| # Convert to JSON string with proper formatting | |
| json_str = json.dumps(config, indent=2, ensure_ascii=False) | |
| f.write(json_str) | |
| def get_timestamp() -> str: | |
| """Get current ISO timestamp""" | |
| return datetime.utcnow().isoformat() + "Z" | |
| def add_activity_log(config: Dict[str, Any], user: str, action: str, entity_type: str, | |
| entity_id: Any, entity_name: str, details: str = ""): | |
| """Add entry to activity log""" | |
| if "activity_log" not in config: | |
| config["activity_log"] = [] | |
| log_entry = { | |
| "id": len(config["activity_log"]) + 1, | |
| "timestamp": get_timestamp(), | |
| "user": user, | |
| "action": action, | |
| "entity_type": entity_type, | |
| "entity_id": entity_id, | |
| "entity_name": entity_name, | |
| "details": details | |
| } | |
| config["activity_log"].append(log_entry) | |
| # Keep only last 100 entries | |
| if len(config["activity_log"]) > 100: | |
| config["activity_log"] = config["activity_log"][-100:] | |
| # ===================== Auth Endpoints ===================== | |
| async def login(request: LoginRequest): | |
| """User login""" | |
| config = load_config() | |
| jwt_config = get_jwt_config() | |
| # Find user | |
| users = config.get("config", {}).get("users", []) | |
| user = next((u for u in users if u["username"] == request.username), None) | |
| if not user: | |
| raise HTTPException(status_code=401, detail="Invalid credentials") | |
| # Verify password with bcrypt | |
| if user.get("salt"): | |
| # New bcrypt format | |
| if not verify_password(request.password, user["password_hash"], user["salt"]): | |
| raise HTTPException(status_code=401, detail="Invalid credentials") | |
| else: | |
| # Legacy SHA256 format | |
| if hashlib.sha256(request.password.encode()).hexdigest() != user["password_hash"]: | |
| raise HTTPException(status_code=401, detail="Invalid credentials") | |
| # Create JWT token | |
| expire = datetime.utcnow() + timedelta(hours=jwt_config["expiration_hours"]) | |
| payload = { | |
| "sub": request.username, | |
| "exp": expire | |
| } | |
| token = jwt.encode(payload, jwt_config["secret"], algorithm=jwt_config["algorithm"]) | |
| log(f"✅ User '{request.username}' logged in") | |
| return LoginResponse(token=token, username=request.username) | |
| async def change_password( | |
| request: ChangePasswordRequest, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Change user password""" | |
| config = load_config() | |
| # Find user | |
| users = config.get("config", {}).get("users", []) | |
| user_index = next((i for i, u in enumerate(users) if u["username"] == username), None) | |
| if user_index is None: | |
| raise HTTPException(status_code=404, detail="User not found") | |
| user = users[user_index] | |
| # Verify current password | |
| if not verify_password(request.current_password, user["password_hash"], user.get("salt", "")): | |
| raise HTTPException(status_code=401, detail="Current password is incorrect") | |
| # Generate new hash with new salt | |
| new_hash, new_salt = hash_password(request.new_password) | |
| # Update user | |
| users[user_index]["password_hash"] = new_hash | |
| users[user_index]["salt"] = new_salt | |
| # Save config | |
| save_config(config) | |
| # Add activity log | |
| add_activity_log(config, username, "CHANGE_PASSWORD", "user", username, username, "Password changed") | |
| save_config(config) | |
| log(f"✅ Password changed for user '{username}'") | |
| return {"success": True, "message": "Password changed successfully"} | |
| # ===================== Environment Endpoints ===================== | |
| async def get_environment(username: str = Depends(verify_token)): | |
| """Get environment configuration""" | |
| config = load_config() | |
| env_config = config.get("config", {}) | |
| return { | |
| "work_mode": env_config.get("work_mode", "hfcloud"), | |
| "cloud_token": env_config.get("cloud_token", ""), | |
| "spark_endpoint": env_config.get("spark_endpoint", "") | |
| } | |
| async def update_environment(env: EnvironmentUpdate, username: str = Depends(verify_token)): | |
| """Update environment configuration""" | |
| config = load_config() | |
| # Update config | |
| config["config"]["work_mode"] = env.work_mode | |
| config["config"]["cloud_token"] = env.cloud_token if env.work_mode != "on-premise" else "" | |
| config["config"]["spark_endpoint"] = env.spark_endpoint | |
| config["config"]["last_update_date"] = get_timestamp() | |
| config["config"]["last_update_user"] = username | |
| # Save | |
| save_config(config) | |
| # Add activity log | |
| add_activity_log(config, username, "UPDATE_ENVIRONMENT", "config", 0, "environment", | |
| f"Work mode: {env.work_mode}") | |
| save_config(config) | |
| log(f"✅ Environment updated by {username}") | |
| return {"success": True} | |
| # ===================== Project Endpoints ===================== | |
| async def list_projects( | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List all projects""" | |
| config = load_config() | |
| projects = config.get("projects", []) | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| projects = [p for p in projects if not p.get("deleted", False)] | |
| return projects | |
| async def create_project(project: ProjectCreate, username: str = Depends(verify_token)): | |
| """Create new project""" | |
| config = load_config() | |
| # Check duplicate name | |
| existing = [p for p in config.get("projects", []) if p["name"] == project.name] | |
| if existing: | |
| raise HTTPException(status_code=400, detail="Project name already exists") | |
| # Get next ID | |
| config["config"]["project_id_counter"] = config["config"].get("project_id_counter", 0) + 1 | |
| project_id = config["config"]["project_id_counter"] | |
| # Create project | |
| new_project = { | |
| "id": project_id, | |
| "name": project.name, | |
| "caption": project.caption, | |
| "enabled": True, | |
| "last_version_number": 0, | |
| "version_id_counter": 0, | |
| "versions": [], | |
| "deleted": False, | |
| "created_date": get_timestamp(), | |
| "created_by": username, | |
| "last_update_date": get_timestamp(), | |
| "last_update_user": username | |
| } | |
| if "projects" not in config: | |
| config["projects"] = [] | |
| config["projects"].append(new_project) | |
| # Add activity log | |
| add_activity_log(config, username, "CREATE_PROJECT", "project", project_id, project.name) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{project.name}' created by {username}") | |
| return new_project | |
| async def update_project( | |
| project_id: int, | |
| update: ProjectUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update project""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Check race condition | |
| if project.get("last_update_date") != update.last_update_date: | |
| raise HTTPException(status_code=409, detail="Project was modified by another user") | |
| # Update | |
| project["caption"] = update.caption | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "UPDATE_PROJECT", "project", project_id, project["name"]) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{project['name']}' updated by {username}") | |
| return project | |
| async def delete_project(project_id: int, username: str = Depends(verify_token)): | |
| """Delete project (soft delete)""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Soft delete | |
| project["deleted"] = True | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "DELETE_PROJECT", "project", project_id, project["name"]) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{project['name']}' deleted by {username}") | |
| return {"success": True} | |
| async def toggle_project(project_id: int, username: str = Depends(verify_token)): | |
| """Enable/disable project""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Toggle | |
| project["enabled"] = not project.get("enabled", True) | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| action = "ENABLE_PROJECT" if project["enabled"] else "DISABLE_PROJECT" | |
| add_activity_log(config, username, action, "project", project_id, project["name"]) | |
| # Save | |
| save_config(config) | |
| log(f"✅ Project '{project['name']}' {'enabled' if project['enabled'] else 'disabled'} by {username}") | |
| return {"enabled": project["enabled"]} | |
| # ===================== Version Endpoints ===================== | |
| async def create_version( | |
| project_id: int, | |
| version: VersionCreate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Create new version from existing version""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Check if there's unpublished version | |
| unpublished = [v for v in project.get("versions", []) if not v.get("published", False)] | |
| if unpublished: | |
| raise HTTPException(status_code=400, detail="There is already an unpublished version") | |
| # Find source version | |
| source = next((v for v in project.get("versions", []) if v["id"] == version.source_version_id), None) | |
| if not source: | |
| raise HTTPException(status_code=404, detail="Source version not found") | |
| # Create new version | |
| project["version_id_counter"] = project.get("version_id_counter", 0) + 1 | |
| new_version = source.copy() | |
| new_version["id"] = project["version_id_counter"] | |
| new_version["caption"] = version.caption | |
| new_version["published"] = False | |
| new_version["created_date"] = get_timestamp() | |
| new_version["created_by"] = username | |
| new_version["last_update_date"] = get_timestamp() | |
| new_version["last_update_user"] = username | |
| new_version["publish_date"] = None | |
| new_version["published_by"] = None | |
| project["versions"].append(new_version) | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "CREATE_VERSION", "version", new_version["id"], | |
| f"{project['name']} v{new_version['id']}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Version {new_version['id']} created for project '{project['name']}' by {username}") | |
| return new_version | |
| async def update_version( | |
| project_id: int, | |
| version_id: int, | |
| update: VersionUpdate, | |
| force: bool = False, # Add force parameter | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update version""" | |
| config = load_config() | |
| # Find project and version | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| version = next((v for v in project.get("versions", []) if v["id"] == version_id), None) | |
| if not version: | |
| raise HTTPException(status_code=404, detail="Version not found") | |
| # Check if published | |
| if version.get("published", False): | |
| raise HTTPException(status_code=400, detail="Cannot update published version") | |
| # Check race condition (skip if force=True) | |
| if not force and version.get("last_update_date") != update.last_update_date: | |
| raise HTTPException( | |
| status_code=409, | |
| detail="Version was modified by another user. Please reload or force save." | |
| ) | |
| # Update | |
| version["caption"] = update.caption | |
| version["general_prompt"] = update.general_prompt | |
| version["llm"] = update.llm | |
| version["intents"] = [intent.dict() for intent in update.intents] | |
| version["last_update_date"] = get_timestamp() | |
| version["last_update_user"] = username | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "UPDATE_VERSION", "version", version_id, | |
| f"{project['name']} v{version_id}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Version {version_id} updated for project '{project['name']}' by {username}") | |
| return version | |
| async def publish_version( | |
| project_id: int, | |
| version_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Publish version""" | |
| config = load_config() | |
| # Find project and version | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| version = next((v for v in project.get("versions", []) if v["id"] == version_id), None) | |
| if not version: | |
| raise HTTPException(status_code=404, detail="Version not found") | |
| # Unpublish all other versions | |
| for v in project.get("versions", []): | |
| if v["id"] != version_id: | |
| v["published"] = False | |
| # Publish this version | |
| version["published"] = True | |
| version["publish_date"] = get_timestamp() | |
| version["published_by"] = username | |
| version["last_update_date"] = get_timestamp() | |
| version["last_update_user"] = username | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "PUBLISH_VERSION", "version", version_id, | |
| f"{project['name']} v{version_id}") | |
| # Save | |
| save_config(config) | |
| # TODO: Notify Spark about new version | |
| log(f"✅ Version {version_id} published for project '{project['name']}' by {username}") | |
| return {"success": True} | |
| async def delete_version( | |
| project_id: int, | |
| version_id: int, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Delete version (soft delete)""" | |
| config = load_config() | |
| # Find project and version | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| version = next((v for v in project.get("versions", []) if v["id"] == version_id), None) | |
| if not version: | |
| raise HTTPException(status_code=404, detail="Version not found") | |
| # Cannot delete published version | |
| if version.get("published", False): | |
| raise HTTPException(status_code=400, detail="Cannot delete published version") | |
| # Soft delete | |
| version["deleted"] = True | |
| version["last_update_date"] = get_timestamp() | |
| version["last_update_user"] = username | |
| project["last_update_date"] = get_timestamp() | |
| project["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "DELETE_VERSION", "version", version_id, | |
| f"{project['name']} v{version_id}") | |
| # Save | |
| save_config(config) | |
| log(f"✅ Version {version_id} deleted for project '{project['name']}' by {username}") | |
| return {"success": True} | |
| # ===================== API Endpoints ===================== | |
| async def list_apis( | |
| include_deleted: bool = False, | |
| username: str = Depends(verify_token) | |
| ): | |
| """List all APIs""" | |
| config = load_config() | |
| apis = config.get("apis", []) | |
| # Filter deleted if needed | |
| if not include_deleted: | |
| apis = [a for a in apis if not a.get("deleted", False)] | |
| return apis | |
| async def create_api(api: APICreate, username: str = Depends(verify_token)): | |
| """Create new API""" | |
| config = load_config() | |
| # Check duplicate name | |
| existing = [a for a in config.get("apis", []) if a["name"] == api.name] | |
| if existing: | |
| raise HTTPException(status_code=400, detail="API name already exists") | |
| # Create API | |
| new_api = api.dict() | |
| new_api["deleted"] = False | |
| new_api["created_date"] = get_timestamp() | |
| new_api["created_by"] = username | |
| new_api["last_update_date"] = get_timestamp() | |
| new_api["last_update_user"] = username | |
| if "apis" not in config: | |
| config["apis"] = [] | |
| config["apis"].append(new_api) | |
| # Add activity log | |
| add_activity_log(config, username, "CREATE_API", "api", api.name, api.name) | |
| # Save | |
| save_config(config) | |
| log(f"✅ API '{api.name}' created by {username}") | |
| return new_api | |
| async def update_api( | |
| api_name: str, | |
| update: APIUpdate, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Update API""" | |
| config = load_config() | |
| # Find API | |
| api = next((a for a in config.get("apis", []) if a["name"] == api_name), None) | |
| if not api: | |
| raise HTTPException(status_code=404, detail="API not found") | |
| # Check race condition | |
| if api.get("last_update_date") != update.last_update_date: | |
| raise HTTPException(status_code=409, detail="API was modified by another user") | |
| # Check if API is in use | |
| for project in config.get("projects", []): | |
| for version in project.get("versions", []): | |
| for intent in version.get("intents", []): | |
| if intent.get("action") == api_name and version.get("published", False): | |
| raise HTTPException(status_code=400, | |
| detail=f"API is used in published version of project '{project['name']}'") | |
| # Update | |
| update_dict = update.dict() | |
| del update_dict["last_update_date"] | |
| api.update(update_dict) | |
| api["last_update_date"] = get_timestamp() | |
| api["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "UPDATE_API", "api", api_name, api_name) | |
| # Save | |
| save_config(config) | |
| log(f"✅ API '{api_name}' updated by {username}") | |
| return api | |
| async def delete_api(api_name: str, username: str = Depends(verify_token)): | |
| """Delete API (soft delete)""" | |
| config = load_config() | |
| # Find API | |
| api = next((a for a in config.get("apis", []) if a["name"] == api_name), None) | |
| if not api: | |
| raise HTTPException(status_code=404, detail="API not found") | |
| # Check if API is in use | |
| for project in config.get("projects", []): | |
| for version in project.get("versions", []): | |
| for intent in version.get("intents", []): | |
| if intent.get("action") == api_name: | |
| raise HTTPException(status_code=400, | |
| detail=f"API is used in project '{project['name']}'") | |
| # Soft delete | |
| api["deleted"] = True | |
| api["last_update_date"] = get_timestamp() | |
| api["last_update_user"] = username | |
| # Add activity log | |
| add_activity_log(config, username, "DELETE_API", "api", api_name, api_name) | |
| # Save | |
| save_config(config) | |
| log(f"✅ API '{api_name}' deleted by {username}") | |
| return {"success": True} | |
| # ===================== Test Endpoints ===================== | |
| async def test_api(api: APICreate, username: str = Depends(verify_token)): | |
| """Test API endpoint""" | |
| import requests | |
| try: | |
| # Prepare request | |
| headers = api.headers.copy() | |
| # Add sample auth token for testing | |
| if api.auth and api.auth.get("enabled"): | |
| headers["Authorization"] = "Bearer test_token_12345" | |
| # Make request | |
| response = requests.request( | |
| method=api.method, | |
| url=api.url, | |
| headers=headers, | |
| json=api.body_template if api.method in ["POST", "PUT", "PATCH"] else None, | |
| params=api.body_template if api.method == "GET" else None, | |
| timeout=api.timeout_seconds, | |
| proxies={"http": api.proxy, "https": api.proxy} if api.proxy else None | |
| ) | |
| return { | |
| "success": True, | |
| "status_code": response.status_code, | |
| "response_time_ms": int(response.elapsed.total_seconds() * 1000), | |
| "headers": dict(response.headers), | |
| "body": response.text[:1000], # First 1000 chars | |
| "request_headers": headers, # Debug için | |
| "request_body": api.body_template # Debug için | |
| } | |
| except requests.exceptions.Timeout: | |
| return { | |
| "success": False, | |
| "error": f"Request timed out after {api.timeout_seconds} seconds" | |
| } | |
| except Exception as e: | |
| return { | |
| "success": False, | |
| "error": str(e), | |
| "error_type": type(e).__name__ | |
| } | |
| async def get_activity_log( | |
| page: int = 1, | |
| limit: int = 50, | |
| username: str = Depends(verify_token) | |
| ): | |
| """Get activity log with pagination""" | |
| config = load_config() | |
| logs = config.get("activity_log", []) | |
| # Sort by timestamp descending (newest first) | |
| logs = sorted(logs, key=lambda x: x['timestamp'], reverse=True) | |
| # Calculate pagination | |
| total = len(logs) | |
| start = (page - 1) * limit | |
| end = start + limit | |
| # Get page of logs | |
| page_logs = logs[start:end] | |
| return { | |
| "items": page_logs, | |
| "total": total, | |
| "page": page, | |
| "limit": limit, | |
| "pages": (total + limit - 1) // limit # Ceiling division | |
| } | |
| async def run_all_tests(test: TestRequest, username: str = Depends(verify_token)): | |
| """Run all tests""" | |
| # This is a placeholder - in real implementation, this would run actual tests | |
| log(f"🧪 Running {test.test_type} tests requested by {username}") | |
| # Simulate test results | |
| results = { | |
| "test_type": test.test_type, | |
| "start_time": get_timestamp(), | |
| "tests": [ | |
| {"name": "Login with valid credentials", "status": "PASS", "duration_ms": 120}, | |
| {"name": "Create new project", "status": "PASS", "duration_ms": 340}, | |
| {"name": "Delete API in use", "status": "PASS", "duration_ms": 45}, | |
| {"name": "Race condition detection", "status": "PASS", "duration_ms": 567}, | |
| {"name": "Invalid token handling", "status": "PASS", "duration_ms": 23} | |
| ], | |
| "summary": { | |
| "total": 5, | |
| "passed": 5, | |
| "failed": 0, | |
| "duration_ms": 1095 | |
| } | |
| } | |
| return results | |
| async def validate_regex(pattern: str, test_value: str, username: str = Depends(verify_token)): | |
| """Validate regex pattern""" | |
| import re | |
| try: | |
| compiled = re.compile(pattern) | |
| match = compiled.fullmatch(test_value) | |
| return { | |
| "valid": True, | |
| "matches": match is not None | |
| } | |
| except re.error as e: | |
| return { | |
| "valid": False, | |
| "error": str(e) | |
| } | |
| # ===================== Export/Import ===================== | |
| async def export_project(project_id: int, username: str = Depends(verify_token)): | |
| """Export project as JSON""" | |
| config = load_config() | |
| # Find project | |
| project = next((p for p in config.get("projects", []) if p["id"] == project_id), None) | |
| if not project: | |
| raise HTTPException(status_code=404, detail="Project not found") | |
| # Collect all related APIs | |
| api_names = set() | |
| for version in project.get("versions", []): | |
| for intent in version.get("intents", []): | |
| api_names.add(intent.get("action")) | |
| apis = [a for a in config.get("apis", []) if a["name"] in api_names] | |
| # Create export | |
| export_data = { | |
| "export_date": get_timestamp(), | |
| "exported_by": username, | |
| "project": project, | |
| "apis": apis | |
| } | |
| log(f"📤 Project '{project['name']}' exported by {username}") | |
| return export_data | |
| async def import_project(data: Dict[str, Any], username: str = Depends(verify_token)): | |
| """Import project from JSON""" | |
| config = load_config() | |
| # Extract project and APIs | |
| project_data = data.get("project", {}) | |
| apis_data = data.get("apis", []) | |
| # Check duplicate project name | |
| existing = [p for p in config.get("projects", []) if p["name"] == project_data.get("name")] | |
| if existing: | |
| # Generate new name | |
| project_data["name"] = f"{project_data['name']}_imported_{int(datetime.now().timestamp())}" | |
| # Generate new IDs | |
| config["config"]["project_id_counter"] = config["config"].get("project_id_counter", 0) + 1 | |
| project_data["id"] = config["config"]["project_id_counter"] | |
| # Reset version IDs | |
| version_counter = 0 | |
| for version in project_data.get("versions", []): | |
| version_counter += 1 | |
| version["id"] = version_counter | |
| version["published"] = False # Imported versions are unpublished | |
| project_data["version_id_counter"] = version_counter | |
| project_data["created_date"] = get_timestamp() | |
| project_data["created_by"] = username | |
| project_data["last_update_date"] = get_timestamp() | |
| project_data["last_update_user"] = username | |
| # Import APIs | |
| imported_apis = [] | |
| for api_data in apis_data: | |
| # Check if API already exists | |
| existing_api = next((a for a in config.get("apis", []) if a["name"] == api_data.get("name")), None) | |
| if not existing_api: | |
| api_data["created_date"] = get_timestamp() | |
| api_data["created_by"] = username | |
| api_data["last_update_date"] = get_timestamp() | |
| api_data["last_update_user"] = username | |
| api_data["deleted"] = False | |
| config["apis"].append(api_data) | |
| imported_apis.append(api_data["name"]) | |
| # Add project | |
| config["projects"].append(project_data) | |
| # Add activity log | |
| add_activity_log(config, username, "IMPORT_PROJECT", "project", project_data["id"], | |
| project_data["name"], f"Imported with {len(imported_apis)} APIs") | |
| # Save | |
| save_config(config) | |
| log(f"📥 Project '{project_data['name']}' imported by {username}") | |
| return { | |
| "success": True, | |
| "project_name": project_data["name"], | |
| "project_id": project_data["id"], | |
| "imported_apis": imported_apis | |
| } |