# Use an official Python runtime as a base image | |
FROM python:3.10 | |
# Set working directory | |
WORKDIR /app | |
# Install system dependencies for llama_cpp | |
RUN apt-get update && apt-get install -y \ | |
gcc \ | |
g++ \ | |
libffi-dev \ | |
libgcc-s1 \ | |
libstdc++6 \ | |
libopenblas-dev \ | |
&& rm -rf /var/lib/apt/lists/* | |
# Set environment variables for cache | |
ENV TRANSFORMERS_CACHE=/app/cache | |
ENV HF_HOME=/app/cache | |
# Create cache directory with write permissions | |
RUN mkdir -p /app/cache && chmod -R 777 /app/cache | |
# Copy and install requirements | |
COPY requirements.txt . | |
RUN pip install --no-cache-dir -r requirements.txt | |
# Copy application files | |
COPY app.py . | |
COPY cv_embeddings.json . | |
COPY cv_text.txt . | |
# Expose port | |
EXPOSE 7860 | |
# Run the application | |
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] |