Update Dockerfile
Browse files- Dockerfile +18 -10
Dockerfile
CHANGED
@@ -1,36 +1,44 @@
|
|
1 |
# 1. Base Image
|
2 |
FROM python:3.10-slim
|
3 |
|
4 |
-
# 2.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
RUN useradd -m -u 1000 user
|
6 |
USER user
|
7 |
|
8 |
-
#
|
9 |
ENV HOME=/home/user
|
10 |
ENV PATH=$HOME/.local/bin:$PATH
|
11 |
WORKDIR $HOME/app
|
12 |
|
13 |
-
#
|
14 |
COPY --chown=user requirements.txt .
|
15 |
|
16 |
-
#
|
|
|
17 |
RUN pip install --no-cache-dir --user -r requirements.txt
|
18 |
|
19 |
-
#
|
20 |
-
# Ini memastikan model sudah ada di dalam image dan tidak diunduh setiap kali Space startup
|
21 |
RUN huggingface-cli download Dnfs/gema-4b-indra10k-model1-Q4_K_M-GGUF \
|
22 |
--local-dir ./model \
|
23 |
--local-dir-use-symlinks False
|
24 |
|
25 |
-
#
|
26 |
COPY --chown=user app.py .
|
27 |
|
28 |
-
#
|
29 |
EXPOSE 8000
|
30 |
|
31 |
-
#
|
32 |
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
33 |
CMD curl -f http://localhost:8000/health || exit 1
|
34 |
|
35 |
-
#
|
36 |
CMD ["python", "app.py"]
|
|
|
1 |
# 1. Base Image
|
2 |
FROM python:3.10-slim
|
3 |
|
4 |
+
# 2. Install system dependencies REQUIRED FOR COMPILING llama-cpp-python
|
5 |
+
# This is the key fix for the build error.
|
6 |
+
# We run this as root before switching to the non-root user.
|
7 |
+
RUN apt-get update && apt-get install -y \
|
8 |
+
build-essential \
|
9 |
+
cmake \
|
10 |
+
&& rm -rf /var/lib/apt/lists/*
|
11 |
+
|
12 |
+
# 3. Set up a non-root user
|
13 |
RUN useradd -m -u 1000 user
|
14 |
USER user
|
15 |
|
16 |
+
# 4. Set Environment Variables & Working Directory
|
17 |
ENV HOME=/home/user
|
18 |
ENV PATH=$HOME/.local/bin:$PATH
|
19 |
WORKDIR $HOME/app
|
20 |
|
21 |
+
# 5. Copy requirements first for better Docker layer caching
|
22 |
COPY --chown=user requirements.txt .
|
23 |
|
24 |
+
# 6. Install Python dependencies for the non-root user
|
25 |
+
# This will now succeed because the build tools are available.
|
26 |
RUN pip install --no-cache-dir --user -r requirements.txt
|
27 |
|
28 |
+
# 7. Download the model during the build process
|
|
|
29 |
RUN huggingface-cli download Dnfs/gema-4b-indra10k-model1-Q4_K_M-GGUF \
|
30 |
--local-dir ./model \
|
31 |
--local-dir-use-symlinks False
|
32 |
|
33 |
+
# 8. Copy the rest of the application code
|
34 |
COPY --chown=user app.py .
|
35 |
|
36 |
+
# 9. Expose the port the app runs on
|
37 |
EXPOSE 8000
|
38 |
|
39 |
+
# 10. Health check to ensure the app is running
|
40 |
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
41 |
CMD curl -f http://localhost:8000/health || exit 1
|
42 |
|
43 |
+
# 11. Command to run the application
|
44 |
CMD ["python", "app.py"]
|