File size: 22,298 Bytes
65d3b67 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 |
# Standard library imports
import os
import uuid
import time
import zipfile # Still imported, but primarily for handling potential old zip logic or error messages
# Third-party library imports
import docker # For interacting with Docker daemon
import git # For Git repository operations (requires 'GitPython' package: pip install GitPython)
import hmac # For validating GitHub webhook signatures (important for security)
import hashlib # For hashing in webhook signature validation
from pyngrok import ngrok # For creating public URLs (ensure ngrok is configured)
from fastapi import APIRouter, HTTPException, UploadFile, Form, Request, BackgroundTasks # Added Request and BackgroundTasks
from fastapi.responses import JSONResponse
# Initialize FastAPI router
router = APIRouter()
deployed_projects = {}
GITHUB_WEBHOOK_SECRET = os.getenv("GITHUB_WEBHOOK_SECRET", "your_github_webhook_secret_here_CHANGE_THIS")
if GITHUB_WEBHOOK_SECRET == "your_github_webhook_secret_here_CHANGE_THIS":
print("WARNING: GITHUB_WEBHOOK_SECRET is not set. Webhook security is compromised.")
# --- Helper Functions ---
# Function to recursively find a file (case-insensitive) within a directory
def _find_file_in_project(filename: str, root_dir: str) -> str | None:
"""
Searches for a file (case-insensitive) within the given root directory and its subdirectories.
Returns the absolute path to the file if found, otherwise None.
"""
filename_lower = filename.lower()
for dirpath, _, files in os.walk(root_dir):
for file in files:
if file.lower() == filename_lower:
return os.path.join(dirpath, file)
return None
# Function to build and deploy a Docker container from a project path
async def _build_and_deploy(project_id: str, project_path: str, app_name: str, existing_container_name: str = None):
"""
Handles the Docker build and deployment process for a given project.
If an existing_container_name is provided, it attempts to stop and remove it first.
Manages ngrok tunnels for the deployed application.
"""
docker_client = docker.from_env()
# Define consistent naming for Docker image and container
image_name = f"{app_name.lower()}_{project_id[:8]}"
container_name = f"{image_name}_container"
try:
# Step 1: Clean up old containers and images if they exist
# Stop and remove the previously deployed container for this project
if existing_container_name:
print(f"Attempting to stop and remove existing container: {existing_container_name}")
try:
old_container = docker_client.containers.get(existing_container_name)
old_container.stop(timeout=5) # Give 5 seconds to stop gracefully
old_container.remove(force=True)
print(f"Successfully stopped and removed old container: {existing_container_name}")
except docker.errors.NotFound:
print(f"Existing container {existing_container_name} not found, proceeding with new deployment.")
except Exception as e:
print(f"Error stopping/removing old container {existing_container_name}: {e}")
# Remove any exited or created containers that might be lingering from previous runs
# (This is a general cleanup, not specific to this project_id, but good practice)
for c in docker_client.containers.list(all=True):
if c.status in ["created", "exited"]: # Only remove non-running containers
# Be cautious: only remove containers clearly associated with this deployment logic
# For more robust logic, might check labels or names more strictly
if c.name.startswith(f"{app_name.lower()}_{project_id[:8]}") or c.name.startswith(f"ngrok-"):
print(f"Removing leftover container {c.name} ({c.id}) with status {c.status}")
try:
c.remove(force=True)
except Exception as e:
print(f"Error removing leftover container {c.name}: {e}")
# Step 2: Build Docker image
print(f"Building Docker image from {project_path} with tag: {image_name}")
image, build_logs_generator = docker_client.images.build(path=project_path, tag=image_name, rm=True)
# Process build logs (can be streamed to UI in a real application)
for log_line in build_logs_generator:
if 'stream' in log_line:
print(f"[BUILD LOG] {log_line['stream'].strip()}")
elif 'error' in log_line:
print(f"[BUILD ERROR] {log_line['error'].strip()}")
print(f"Docker image built successfully: {image.id}")
# Step 3: Run new Docker container
print(f"Running new container {container_name} from image {image_name}")
container = docker_client.containers.run(
image=image_name,
ports={"8080/tcp": None}, # Docker will assign a random host port for 8080/tcp
name=container_name,
detach=True, # Run in background
mem_limit="512m", # Limit memory usage
nano_cpus=1_000_000_000, # Limit CPU usage to 1 full core (1 billion nano-CPUs)
read_only=True, # Make container filesystem read-only (except tmpfs)
tmpfs={"/tmp": ""}, # Mount an in-memory tmpfs for /tmp directory
user="1001:1001" # Run as a non-root user (important for security)
)
print(f"Container started with ID: {container.id}")
# Wait a moment for the container to fully start and expose its port
time.sleep(5) # Increased sleep to give the application within the container more time
# Retrieve the dynamically assigned host port for the container's 8080 port
port_info = docker_client.api.port(container.id, 8080)
if not port_info:
# If port 8080 is not exposed, the container likely failed to start or is not exposing correctly
print(f"Error: Port 8080 not exposed by container {container.id}. Inspecting container logs...")
try:
container_logs = container.logs().decode('utf-8')
print(f"Container logs:\n{container_logs}")
except Exception as log_e:
print(f"Could not retrieve container logs: {log_e}")
container.stop()
container.remove(force=True)
raise Exception("Port 8080 not exposed by container or container failed to start correctly. Check container logs.")
host_port = port_info[0]['HostPort']
print(f"Container {container.id} is accessible on host port: {host_port}")
# Step 4: Manage ngrok tunnel
# Check if an ngrok tunnel already exists for this project and close it
if project_id in deployed_projects and deployed_projects[project_id].get('ngrok_tunnel'):
existing_tunnel = deployed_projects[project_id]['ngrok_tunnel']
print(f"Closing existing ngrok tunnel: {existing_tunnel.public_url}")
try:
existing_tunnel.disconnect()
except Exception as ngrok_disconnect_e:
print(f"Error disconnecting existing ngrok tunnel: {ngrok_disconnect_e}")
deployed_projects[project_id]['ngrok_tunnel'] = None # Clear the reference
# Connect a new ngrok tunnel to the dynamically assigned host port
print(f"Connecting new ngrok tunnel to host port {host_port}")
tunnel = ngrok.connect(host_port, bind_tls=True) # bind_tls=True for HTTPS
public_url = tunnel.public_url
print(f"Ngrok public URL for {app_name}: {public_url}")
# Step 5: Update global state with new deployment details
# Ensure the project_id exists in deployed_projects before updating
if project_id not in deployed_projects:
deployed_projects[project_id] = {} # Initialize if not already present (should be by deploy_from_git)
deployed_projects[project_id].update({
"container_id": container.id,
"container_name": container_name,
"ngrok_tunnel": tunnel,
"public_url": public_url,
"status": "deployed" # Set status to deployed on success
})
return public_url, container_name
except docker.errors.BuildError as e:
print(f"Docker build error: {e}")
# Capture and return detailed build logs for better debugging
build_logs_str = "\n".join([str(log_line.get('stream', '')).strip() for log_line in e.build_log if 'stream' in log_line])
if project_id in deployed_projects:
deployed_projects[project_id]["status"] = "failed"
raise HTTPException(status_code=500, detail=f"Docker build failed: {e.msg}\nLogs:\n{build_logs_str}")
except docker.errors.ContainerError as e:
print(f"Docker container runtime error: {e}")
if project_id in deployed_projects:
deployed_projects[project_id]["status"] = "failed"
raise HTTPException(status_code=500, detail=f"Container failed during runtime: {e.stderr.decode()}")
except docker.errors.APIError as e:
print(f"Docker API error: {e}")
if project_id in deployed_projects:
deployed_projects[project_id]["status"] = "failed"
raise HTTPException(status_code=500, detail=f"Docker daemon or API error: {e.explanation}")
except Exception as e:
print(f"General deployment error: {e}")
if project_id in deployed_projects:
deployed_projects[project_id]["status"] = "failed"
raise HTTPException(status_code=500, detail=f"Deployment process failed unexpectedly: {str(e)}")
# --- API Endpoints ---
@router.post("/project")
async def deploy_from_git(repo_url: str = Form(...), app_name: str = Form(...)):
"""
Deploys a FastAPI/Flask application from a specified Git repository.
The repository must contain a main.py, requirements.txt, and Dockerfile.
"""
# Basic validation for the Git repository URL format
if not repo_url.startswith(("http://", "https://", "git@", "ssh://")):
raise HTTPException(status_code=400, detail="Invalid Git repository URL format. Must be HTTP(S) or SSH.")
# Generate a unique ID for this project
project_id = str(uuid.uuid4())
# Define project directories
base_dir = os.path.dirname(os.path.abspath(__file__)) # This is where 'router.py' is
projects_dir = os.path.abspath(os.path.join(base_dir, "..", "projects")) # Parent directory's 'projects' folder
os.makedirs(projects_dir, exist_ok=True) # Ensure the base projects directory exists
project_path = os.path.join(projects_dir, project_id)
os.makedirs(project_path, exist_ok=True) # Create a unique directory for this project
try:
# Step 1: Clone the Git repository
print(f"Cloning repository {repo_url} into {project_path}")
git.Repo.clone_from(repo_url, project_path)
print("Repository cloned successfully.")
except git.exc.GitCommandError as e:
print(f"Git clone failed: {e.stderr.decode()}")
# Clean up the partially created project directory if cloning fails
if os.path.exists(project_path):
import shutil
shutil.rmtree(project_path)
raise HTTPException(status_code=400, detail=f"Failed to clone repository: {e.stderr.decode()}")
except Exception as e:
print(f"Unexpected error during git clone: {e}")
if os.path.exists(project_path):
import shutil
shutil.rmtree(project_path)
raise HTTPException(status_code=500, detail=f"An unexpected error occurred during repository cloning: {str(e)}")
# Step 2: Validate required project files (main.py, requirements.txt, Dockerfile)
main_py_path = _find_file_in_project("main.py", project_path)
requirements_txt_path = _find_file_in_project("requirements.txt", project_path)
dockerfile_path = _find_file_in_project("Dockerfile", project_path)
missing_files = []
if not main_py_path:
missing_files.append("main.py")
if not requirements_txt_path:
missing_files.append("requirements.txt")
if not dockerfile_path:
missing_files.append("Dockerfile")
if missing_files:
# Clean up the project directory if essential files are missing
if os.path.exists(project_path):
import shutil
shutil.rmtree(project_path)
raise HTTPException(
status_code=400,
detail=f"The cloned repository is missing required file(s): {', '.join(missing_files)} (case-insensitive search)."
)
# Ensure Dockerfile is at the root of the project_path for Docker build context
if os.path.dirname(dockerfile_path) != project_path:
print(f"[DEBUG] Moving Dockerfile from {dockerfile_path} to project root: {project_path}")
target_dockerfile_path = os.path.join(project_path, "Dockerfile")
os.replace(dockerfile_path, target_dockerfile_path)
dockerfile_path = target_dockerfile_path # Update the path to reference the new location
# Step 3: Store initial project details in global state (or database)
deployed_projects[project_id] = {
"app_name": app_name,
"repo_url": repo_url,
"project_path": project_path,
"status": "building", # Set initial status
"container_name": None, # Will be set by _build_and_deploy
"public_url": None, # Will be set by _build_and_deploy
"ngrok_tunnel": None # Will be set by _build_and_deploy
}
print(f"Project {project_id} initialized for deployment.")
# Step 4: Trigger the build and deploy process
try:
public_url, container_name = await _build_and_deploy(project_id, project_path, app_name)
return JSONResponse({
"project_id": project_id,
"container_name": container_name,
"preview_url": public_url,
"message": "Deployment initiated from Git repository. Check logs for status."
}, status_code=202) # Use 202 Accepted, as deployment happens in background
except HTTPException as e:
# If _build_and_deploy raises a specific HTTPException, re-raise it
if project_id in deployed_projects:
deployed_projects[project_id]["status"] = "failed"
raise e
except Exception as e:
# Catch any other unexpected errors during the build/deploy phase
if project_id in deployed_projects:
deployed_projects[project_id]["status"] = "failed"
print(f"Error during initial _build_and_deploy for project {project_id}: {e}")
raise HTTPException(status_code=500, detail=f"Initial deployment failed unexpectedly: {str(e)}")
@router.post("/webhook/github")
async def github_webhook(request: Request, background_tasks: BackgroundTasks):
"""
Endpoint to receive GitHub webhook events (e.g., push events) and trigger redeployments.
"""
# --- Security: Verify GitHub Webhook Signature ---
# This is CRUCIAL to ensure the webhook is from GitHub and hasn't been tampered with.
# For production, DO NOT comment this out.
signature_header = request.headers.get("X-Hub-Signature-256")
if not signature_header:
raise HTTPException(status_code=403, detail="X-Hub-Signature-256 header missing.")
# Read the raw request body once to use for hashing
body = await request.body()
try:
# Calculate expected signature
sha_name, signature = signature_header.split("=", 1)
if sha_name != "sha256":
raise HTTPException(status_code=400, detail="Invalid X-Hub-Signature-256 algorithm. Only sha256 supported.")
# Use HMAC-SHA256 with your secret key to hash the raw request body
# Ensure the secret is encoded to bytes
mac = hmac.new(GITHUB_WEBHOOK_SECRET.encode("utf-8"), body, hashlib.sha256)
# Compare the calculated hash with the signature received from GitHub
if not hmac.compare_digest(mac.hexdigest(), signature):
raise HTTPException(status_code=403, detail="Invalid GitHub signature.")
except Exception as e:
print(f"Webhook signature verification failed: {e}")
raise HTTPException(status_code=403, detail="Signature verification failed.")
# Parse the JSON payload from the webhook
payload = await request.json()
github_event = request.headers.get("X-GitHub-Event")
print(f"Received GitHub '{github_event}' webhook for repository: {payload.get('repository', {}).get('full_name')}")
# Process only 'push' events
if github_event != "push":
return JSONResponse({"message": f"Received '{github_event}' event, but only 'push' events are processed."}, status_code=200)
# Get the repository URL from the webhook payload
repo_url_from_webhook = payload.get("repository", {}).get("html_url") # Prefer html_url or clone_url
if not repo_url_from_webhook:
raise HTTPException(status_code=400, detail="Repository URL not found in webhook payload.")
# Find the project linked to this repository in our in-memory storage
project_to_redeploy = None
project_id_to_redeploy = None
for project_id, project_data in deployed_projects.items():
# Match based on repo_url. A more robust solution might normalize URLs or use repository IDs.
if project_data.get("repo_url") == repo_url_from_webhook:
project_to_redeploy = project_data
project_id_to_redeploy = project_id
break
if not project_to_redeploy:
print(f"No active project found for repository: {repo_url_from_webhook}. Webhook ignored.")
return JSONResponse({"message": "No associated project found for this repository, ignoring webhook."}, status_code=200)
print(f"Received push for {repo_url_from_webhook}. Triggering redeployment for project {project_id_to_redeploy} ({project_to_redeploy['app_name']}).")
# Step 1: Pull the latest changes from the Git repository
project_path = project_to_redeploy["project_path"]
try:
repo = git.Repo(project_path)
origin = repo.remotes.origin
print(f"Pulling latest changes for {repo_url_from_webhook} into {project_path}")
origin.pull() # Pull the latest changes from the remote
print("Latest changes pulled successfully.")
except git.exc.GitCommandError as e:
print(f"Failed to pull latest changes for {repo_url_from_webhook}: {e.stderr.decode()}")
# Update project status to failed if pull fails
deployed_projects[project_id_to_redeploy]["status"] = "failed"
return JSONResponse({"error": f"Failed to pull latest changes: {e.stderr.decode()}"}, status_code=500)
except Exception as e:
print(f"Unexpected error during git pull: {e}")
deployed_projects[project_id_to_redeploy]["status"] = "failed"
return JSONResponse({"error": f"An unexpected error occurred during git pull: {str(e)}"}, status_code=500)
# Step 2: Trigger redeployment in a background task
# Using FastAPI's BackgroundTasks ensures the webhook endpoint returns immediately,
# preventing timeouts for GitHub, while the redeployment happens asynchronously.
# Get the current container name for proper cleanup in _build_and_deploy
current_container_name = project_to_redeploy.get("container_name")
# Add the build and deploy task to background tasks
background_tasks.add_task(
_build_and_deploy,
project_id_to_redeploy,
project_path,
project_to_redeploy["app_name"],
current_container_name # Pass existing container name for cleanup
)
# Update project status to indicate redeployment is in progress
deployed_projects[project_id_to_redeploy]["status"] = "redeploying"
return JSONResponse(
{"message": f"Redeployment for project {project_id_to_redeploy} initiated from GitHub webhook."},
background=background_tasks,
status_code=202 # 202 Accepted: request has been accepted for processing
)
# --- Cleanup Endpoint (Optional, for manual testing/management) ---
@router.post("/project/delete/{project_id}")
async def delete_project(project_id: str):
"""
Deletes a deployed project, its Docker container, ngrok tunnel, and local files.
"""
if project_id not in deployed_projects:
raise HTTPException(status_code=404, detail=f"Project with ID {project_id} not found.")
project_data = deployed_projects[project_id]
# Stop and remove Docker container
docker_client = docker.from_env()
container_name = project_data.get("container_name")
if container_name:
try:
container = docker_client.containers.get(container_name)
container.stop(timeout=5)
container.remove(force=True)
print(f"Container {container_name} for project {project_id} removed.")
except docker.errors.NotFound:
print(f"Container {container_name} not found, already removed?")
except Exception as e:
print(f"Error removing container {container_name}: {e}")
# Do not raise HTTPException, try to continue cleanup
# Disconnect ngrok tunnel
ngrok_tunnel = project_data.get("ngrok_tunnel")
if ngrok_tunnel:
try:
ngrok_tunnel.disconnect()
print(f"Ngrok tunnel for project {project_id} disconnected.")
except Exception as e:
print(f"Error disconnecting ngrok tunnel for project {project_id}: {e}")
# Remove local project directory
project_path = project_data.get("project_path")
if project_path and os.path.exists(project_path):
try:
import shutil
shutil.rmtree(project_path)
print(f"Project directory {project_path} removed.")
except Exception as e:
print(f"Error removing project directory {project_path}: {e}")
# Remove from global state
del deployed_projects[project_id]
print(f"Project {project_id} removed from deployed_projects.")
return JSONResponse({"message": f"Project {project_id} and associated resources deleted."})
|