Ai_lip_sync / Dockerfile
WebashalarForML's picture
Update Dockerfile
f47069f verified
raw
history blame
1.58 kB
# Use an official Python runtime as a parent image
FROM python:3.9-slim
# Set environment variables for Python and Numba
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
NUMBA_DISABLE_JIT=1 \
NUMBA_CACHE_DIR=/tmp/numba_cache
# Set the working directory
WORKDIR /app
# Copy the requirements file into the container at /app
COPY requirements.txt /app/
# Install build dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends \
llvm \
clang \
build-essential \
libedit-dev \
libffi-dev \
python3-dev \
libgl1-mesa-glx \
libsm6 \
libxrender1 \
libglib2.0-0 \
ffmpeg \
&& rm -rf /var/lib/apt/lists/*
# Set LLVM_CONFIG before building llvmlite (if needed)
ENV LLVM_CONFIG=/usr/bin/llvm-config-14
RUN pip install librosa==0.9.2 numba==0.55.2 llvmlite==0.38.0
# Install numpy first for numba compatibility
RUN pip install --no-cache-dir numpy==1.22.4
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy the rest of the application
COPY . /app/
# Create necessary directories
RUN mkdir -p /app/cache /app/uploads /app/results /app/checkpoints /app/temp \
&& chmod -R 777 /app/cache /app/uploads /app/results /app/checkpoints /app/temp
# Ensure full permissions for app directory
RUN chmod -R 777 /app
# Expose the app port
EXPOSE 7860
# Set Flask environment variables
ENV FLASK_APP=app.py \
FLASK_ENV=production
# Start the application with Gunicorn
CMD ["gunicorn","0.0.0.0:7860","app:app"]