# Standard library imports import os import uuid import time import zipfile # Still imported, but primarily for handling potential old zip logic or error messages # Third-party library imports import docker # For interacting with Docker daemon import git # For Git repository operations (requires 'GitPython' package: pip install GitPython) import hmac # For validating GitHub webhook signatures (important for security) import hashlib # For hashing in webhook signature validation from pyngrok import ngrok # For creating public URLs (ensure ngrok is configured) from fastapi import APIRouter, HTTPException, UploadFile, Form, Request, BackgroundTasks # Added Request and BackgroundTasks from fastapi.responses import JSONResponse # Initialize FastAPI router router = APIRouter() deployed_projects = {} GITHUB_WEBHOOK_SECRET = os.getenv("GITHUB_WEBHOOK_SECRET", "your_github_webhook_secret_here_CHANGE_THIS") if GITHUB_WEBHOOK_SECRET == "your_github_webhook_secret_here_CHANGE_THIS": print("WARNING: GITHUB_WEBHOOK_SECRET is not set. Webhook security is compromised.") # --- Helper Functions --- # Function to recursively find a file (case-insensitive) within a directory def _find_file_in_project(filename: str, root_dir: str) -> str | None: """ Searches for a file (case-insensitive) within the given root directory and its subdirectories. Returns the absolute path to the file if found, otherwise None. """ filename_lower = filename.lower() for dirpath, _, files in os.walk(root_dir): for file in files: if file.lower() == filename_lower: return os.path.join(dirpath, file) return None # Function to build and deploy a Docker container from a project path async def _build_and_deploy(project_id: str, project_path: str, app_name: str, existing_container_name: str = None): """ Handles the Docker build and deployment process for a given project. If an existing_container_name is provided, it attempts to stop and remove it first. Manages ngrok tunnels for the deployed application. """ docker_client = docker.from_env() # Define consistent naming for Docker image and container image_name = f"{app_name.lower()}_{project_id[:8]}" container_name = f"{image_name}_container" try: # Step 1: Clean up old containers and images if they exist # Stop and remove the previously deployed container for this project if existing_container_name: print(f"Attempting to stop and remove existing container: {existing_container_name}") try: old_container = docker_client.containers.get(existing_container_name) old_container.stop(timeout=5) # Give 5 seconds to stop gracefully old_container.remove(force=True) print(f"Successfully stopped and removed old container: {existing_container_name}") except docker.errors.NotFound: print(f"Existing container {existing_container_name} not found, proceeding with new deployment.") except Exception as e: print(f"Error stopping/removing old container {existing_container_name}: {e}") # Remove any exited or created containers that might be lingering from previous runs # (This is a general cleanup, not specific to this project_id, but good practice) for c in docker_client.containers.list(all=True): if c.status in ["created", "exited"]: # Only remove non-running containers # Be cautious: only remove containers clearly associated with this deployment logic # For more robust logic, might check labels or names more strictly if c.name.startswith(f"{app_name.lower()}_{project_id[:8]}") or c.name.startswith(f"ngrok-"): print(f"Removing leftover container {c.name} ({c.id}) with status {c.status}") try: c.remove(force=True) except Exception as e: print(f"Error removing leftover container {c.name}: {e}") # Step 2: Build Docker image print(f"Building Docker image from {project_path} with tag: {image_name}") image, build_logs_generator = docker_client.images.build(path=project_path, tag=image_name, rm=True) # Process build logs (can be streamed to UI in a real application) for log_line in build_logs_generator: if 'stream' in log_line: print(f"[BUILD LOG] {log_line['stream'].strip()}") elif 'error' in log_line: print(f"[BUILD ERROR] {log_line['error'].strip()}") print(f"Docker image built successfully: {image.id}") # Step 3: Run new Docker container print(f"Running new container {container_name} from image {image_name}") container = docker_client.containers.run( image=image_name, ports={"8080/tcp": None}, # Docker will assign a random host port for 8080/tcp name=container_name, detach=True, # Run in background mem_limit="512m", # Limit memory usage nano_cpus=1_000_000_000, # Limit CPU usage to 1 full core (1 billion nano-CPUs) read_only=True, # Make container filesystem read-only (except tmpfs) tmpfs={"/tmp": ""}, # Mount an in-memory tmpfs for /tmp directory user="1001:1001" # Run as a non-root user (important for security) ) print(f"Container started with ID: {container.id}") # Wait a moment for the container to fully start and expose its port time.sleep(5) # Increased sleep to give the application within the container more time # Retrieve the dynamically assigned host port for the container's 8080 port port_info = docker_client.api.port(container.id, 8080) if not port_info: # If port 8080 is not exposed, the container likely failed to start or is not exposing correctly print(f"Error: Port 8080 not exposed by container {container.id}. Inspecting container logs...") try: container_logs = container.logs().decode('utf-8') print(f"Container logs:\n{container_logs}") except Exception as log_e: print(f"Could not retrieve container logs: {log_e}") container.stop() container.remove(force=True) raise Exception("Port 8080 not exposed by container or container failed to start correctly. Check container logs.") host_port = port_info[0]['HostPort'] print(f"Container {container.id} is accessible on host port: {host_port}") # Step 4: Manage ngrok tunnel # Check if an ngrok tunnel already exists for this project and close it if project_id in deployed_projects and deployed_projects[project_id].get('ngrok_tunnel'): existing_tunnel = deployed_projects[project_id]['ngrok_tunnel'] print(f"Closing existing ngrok tunnel: {existing_tunnel.public_url}") try: existing_tunnel.disconnect() except Exception as ngrok_disconnect_e: print(f"Error disconnecting existing ngrok tunnel: {ngrok_disconnect_e}") deployed_projects[project_id]['ngrok_tunnel'] = None # Clear the reference # Connect a new ngrok tunnel to the dynamically assigned host port print(f"Connecting new ngrok tunnel to host port {host_port}") tunnel = ngrok.connect(host_port, bind_tls=True) # bind_tls=True for HTTPS public_url = tunnel.public_url print(f"Ngrok public URL for {app_name}: {public_url}") # Step 5: Update global state with new deployment details # Ensure the project_id exists in deployed_projects before updating if project_id not in deployed_projects: deployed_projects[project_id] = {} # Initialize if not already present (should be by deploy_from_git) deployed_projects[project_id].update({ "container_id": container.id, "container_name": container_name, "ngrok_tunnel": tunnel, "public_url": public_url, "status": "deployed" # Set status to deployed on success }) return public_url, container_name except docker.errors.BuildError as e: print(f"Docker build error: {e}") # Capture and return detailed build logs for better debugging build_logs_str = "\n".join([str(log_line.get('stream', '')).strip() for log_line in e.build_log if 'stream' in log_line]) if project_id in deployed_projects: deployed_projects[project_id]["status"] = "failed" raise HTTPException(status_code=500, detail=f"Docker build failed: {e.msg}\nLogs:\n{build_logs_str}") except docker.errors.ContainerError as e: print(f"Docker container runtime error: {e}") if project_id in deployed_projects: deployed_projects[project_id]["status"] = "failed" raise HTTPException(status_code=500, detail=f"Container failed during runtime: {e.stderr.decode()}") except docker.errors.APIError as e: print(f"Docker API error: {e}") if project_id in deployed_projects: deployed_projects[project_id]["status"] = "failed" raise HTTPException(status_code=500, detail=f"Docker daemon or API error: {e.explanation}") except Exception as e: print(f"General deployment error: {e}") if project_id in deployed_projects: deployed_projects[project_id]["status"] = "failed" raise HTTPException(status_code=500, detail=f"Deployment process failed unexpectedly: {str(e)}") # --- API Endpoints --- @router.post("/project") async def deploy_from_git(repo_url: str = Form(...), app_name: str = Form(...)): """ Deploys a FastAPI/Flask application from a specified Git repository. The repository must contain a main.py, requirements.txt, and Dockerfile. """ # Basic validation for the Git repository URL format if not repo_url.startswith(("http://", "https://", "git@", "ssh://")): raise HTTPException(status_code=400, detail="Invalid Git repository URL format. Must be HTTP(S) or SSH.") # Generate a unique ID for this project project_id = str(uuid.uuid4()) # Define project directories base_dir = os.path.dirname(os.path.abspath(__file__)) # This is where 'router.py' is projects_dir = os.path.abspath(os.path.join(base_dir, "..", "projects")) # Parent directory's 'projects' folder os.makedirs(projects_dir, exist_ok=True) # Ensure the base projects directory exists project_path = os.path.join(projects_dir, project_id) os.makedirs(project_path, exist_ok=True) # Create a unique directory for this project try: # Step 1: Clone the Git repository print(f"Cloning repository {repo_url} into {project_path}") git.Repo.clone_from(repo_url, project_path) print("Repository cloned successfully.") except git.exc.GitCommandError as e: print(f"Git clone failed: {e.stderr.decode()}") # Clean up the partially created project directory if cloning fails if os.path.exists(project_path): import shutil shutil.rmtree(project_path) raise HTTPException(status_code=400, detail=f"Failed to clone repository: {e.stderr.decode()}") except Exception as e: print(f"Unexpected error during git clone: {e}") if os.path.exists(project_path): import shutil shutil.rmtree(project_path) raise HTTPException(status_code=500, detail=f"An unexpected error occurred during repository cloning: {str(e)}") # Step 2: Validate required project files (main.py, requirements.txt, Dockerfile) main_py_path = _find_file_in_project("main.py", project_path) requirements_txt_path = _find_file_in_project("requirements.txt", project_path) dockerfile_path = _find_file_in_project("Dockerfile", project_path) missing_files = [] if not main_py_path: missing_files.append("main.py") if not requirements_txt_path: missing_files.append("requirements.txt") if not dockerfile_path: missing_files.append("Dockerfile") if missing_files: # Clean up the project directory if essential files are missing if os.path.exists(project_path): import shutil shutil.rmtree(project_path) raise HTTPException( status_code=400, detail=f"The cloned repository is missing required file(s): {', '.join(missing_files)} (case-insensitive search)." ) # Ensure Dockerfile is at the root of the project_path for Docker build context if os.path.dirname(dockerfile_path) != project_path: print(f"[DEBUG] Moving Dockerfile from {dockerfile_path} to project root: {project_path}") target_dockerfile_path = os.path.join(project_path, "Dockerfile") os.replace(dockerfile_path, target_dockerfile_path) dockerfile_path = target_dockerfile_path # Update the path to reference the new location # Step 3: Store initial project details in global state (or database) deployed_projects[project_id] = { "app_name": app_name, "repo_url": repo_url, "project_path": project_path, "status": "building", # Set initial status "container_name": None, # Will be set by _build_and_deploy "public_url": None, # Will be set by _build_and_deploy "ngrok_tunnel": None # Will be set by _build_and_deploy } print(f"Project {project_id} initialized for deployment.") # Step 4: Trigger the build and deploy process try: public_url, container_name = await _build_and_deploy(project_id, project_path, app_name) return JSONResponse({ "project_id": project_id, "container_name": container_name, "preview_url": public_url, "message": "Deployment initiated from Git repository. Check logs for status." }, status_code=202) # Use 202 Accepted, as deployment happens in background except HTTPException as e: # If _build_and_deploy raises a specific HTTPException, re-raise it if project_id in deployed_projects: deployed_projects[project_id]["status"] = "failed" raise e except Exception as e: # Catch any other unexpected errors during the build/deploy phase if project_id in deployed_projects: deployed_projects[project_id]["status"] = "failed" print(f"Error during initial _build_and_deploy for project {project_id}: {e}") raise HTTPException(status_code=500, detail=f"Initial deployment failed unexpectedly: {str(e)}") @router.post("/webhook/github") async def github_webhook(request: Request, background_tasks: BackgroundTasks): """ Endpoint to receive GitHub webhook events (e.g., push events) and trigger redeployments. """ # --- Security: Verify GitHub Webhook Signature --- # This is CRUCIAL to ensure the webhook is from GitHub and hasn't been tampered with. # For production, DO NOT comment this out. signature_header = request.headers.get("X-Hub-Signature-256") if not signature_header: raise HTTPException(status_code=403, detail="X-Hub-Signature-256 header missing.") # Read the raw request body once to use for hashing body = await request.body() try: # Calculate expected signature sha_name, signature = signature_header.split("=", 1) if sha_name != "sha256": raise HTTPException(status_code=400, detail="Invalid X-Hub-Signature-256 algorithm. Only sha256 supported.") # Use HMAC-SHA256 with your secret key to hash the raw request body # Ensure the secret is encoded to bytes mac = hmac.new(GITHUB_WEBHOOK_SECRET.encode("utf-8"), body, hashlib.sha256) # Compare the calculated hash with the signature received from GitHub if not hmac.compare_digest(mac.hexdigest(), signature): raise HTTPException(status_code=403, detail="Invalid GitHub signature.") except Exception as e: print(f"Webhook signature verification failed: {e}") raise HTTPException(status_code=403, detail="Signature verification failed.") # Parse the JSON payload from the webhook payload = await request.json() github_event = request.headers.get("X-GitHub-Event") print(f"Received GitHub '{github_event}' webhook for repository: {payload.get('repository', {}).get('full_name')}") # Process only 'push' events if github_event != "push": return JSONResponse({"message": f"Received '{github_event}' event, but only 'push' events are processed."}, status_code=200) # Get the repository URL from the webhook payload repo_url_from_webhook = payload.get("repository", {}).get("html_url") # Prefer html_url or clone_url if not repo_url_from_webhook: raise HTTPException(status_code=400, detail="Repository URL not found in webhook payload.") # Find the project linked to this repository in our in-memory storage project_to_redeploy = None project_id_to_redeploy = None for project_id, project_data in deployed_projects.items(): # Match based on repo_url. A more robust solution might normalize URLs or use repository IDs. if project_data.get("repo_url") == repo_url_from_webhook: project_to_redeploy = project_data project_id_to_redeploy = project_id break if not project_to_redeploy: print(f"No active project found for repository: {repo_url_from_webhook}. Webhook ignored.") return JSONResponse({"message": "No associated project found for this repository, ignoring webhook."}, status_code=200) print(f"Received push for {repo_url_from_webhook}. Triggering redeployment for project {project_id_to_redeploy} ({project_to_redeploy['app_name']}).") # Step 1: Pull the latest changes from the Git repository project_path = project_to_redeploy["project_path"] try: repo = git.Repo(project_path) origin = repo.remotes.origin print(f"Pulling latest changes for {repo_url_from_webhook} into {project_path}") origin.pull() # Pull the latest changes from the remote print("Latest changes pulled successfully.") except git.exc.GitCommandError as e: print(f"Failed to pull latest changes for {repo_url_from_webhook}: {e.stderr.decode()}") # Update project status to failed if pull fails deployed_projects[project_id_to_redeploy]["status"] = "failed" return JSONResponse({"error": f"Failed to pull latest changes: {e.stderr.decode()}"}, status_code=500) except Exception as e: print(f"Unexpected error during git pull: {e}") deployed_projects[project_id_to_redeploy]["status"] = "failed" return JSONResponse({"error": f"An unexpected error occurred during git pull: {str(e)}"}, status_code=500) # Step 2: Trigger redeployment in a background task # Using FastAPI's BackgroundTasks ensures the webhook endpoint returns immediately, # preventing timeouts for GitHub, while the redeployment happens asynchronously. # Get the current container name for proper cleanup in _build_and_deploy current_container_name = project_to_redeploy.get("container_name") # Add the build and deploy task to background tasks background_tasks.add_task( _build_and_deploy, project_id_to_redeploy, project_path, project_to_redeploy["app_name"], current_container_name # Pass existing container name for cleanup ) # Update project status to indicate redeployment is in progress deployed_projects[project_id_to_redeploy]["status"] = "redeploying" return JSONResponse( {"message": f"Redeployment for project {project_id_to_redeploy} initiated from GitHub webhook."}, background=background_tasks, status_code=202 # 202 Accepted: request has been accepted for processing ) # --- Cleanup Endpoint (Optional, for manual testing/management) --- @router.post("/project/delete/{project_id}") async def delete_project(project_id: str): """ Deletes a deployed project, its Docker container, ngrok tunnel, and local files. """ if project_id not in deployed_projects: raise HTTPException(status_code=404, detail=f"Project with ID {project_id} not found.") project_data = deployed_projects[project_id] # Stop and remove Docker container docker_client = docker.from_env() container_name = project_data.get("container_name") if container_name: try: container = docker_client.containers.get(container_name) container.stop(timeout=5) container.remove(force=True) print(f"Container {container_name} for project {project_id} removed.") except docker.errors.NotFound: print(f"Container {container_name} not found, already removed?") except Exception as e: print(f"Error removing container {container_name}: {e}") # Do not raise HTTPException, try to continue cleanup # Disconnect ngrok tunnel ngrok_tunnel = project_data.get("ngrok_tunnel") if ngrok_tunnel: try: ngrok_tunnel.disconnect() print(f"Ngrok tunnel for project {project_id} disconnected.") except Exception as e: print(f"Error disconnecting ngrok tunnel for project {project_id}: {e}") # Remove local project directory project_path = project_data.get("project_path") if project_path and os.path.exists(project_path): try: import shutil shutil.rmtree(project_path) print(f"Project directory {project_path} removed.") except Exception as e: print(f"Error removing project directory {project_path}: {e}") # Remove from global state del deployed_projects[project_id] print(f"Project {project_id} removed from deployed_projects.") return JSONResponse({"message": f"Project {project_id} and associated resources deleted."})