Update dockerfile
Browse files- dockerfile +16 -10
dockerfile
CHANGED
@@ -1,27 +1,33 @@
|
|
1 |
# Use an official Python runtime as a parent image
|
2 |
FROM python:3.10-slim
|
3 |
|
4 |
-
#
|
5 |
-
WORKDIR /app
|
6 |
-
|
7 |
-
# Install system dependencies
|
8 |
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
|
9 |
|
10 |
# Install Ollama
|
11 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
12 |
|
13 |
-
# Pre-download the llama3.2:1b model
|
14 |
RUN ollama pull llama3.2:1b
|
15 |
|
16 |
-
#
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
RUN pip install --no-cache-dir -r requirements.txt
|
19 |
|
20 |
# Copy the app code
|
21 |
-
COPY app.py .
|
22 |
|
23 |
# Expose the Streamlit port
|
24 |
EXPOSE 8501
|
25 |
|
26 |
-
#
|
27 |
-
CMD
|
|
|
1 |
# Use an official Python runtime as a parent image
|
2 |
FROM python:3.10-slim
|
3 |
|
4 |
+
# Install curl and other dependencies required for Ollama
|
|
|
|
|
|
|
5 |
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
|
6 |
|
7 |
# Install Ollama
|
8 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
9 |
|
10 |
+
# Pre-download the llama3.2:1b model during build
|
11 |
RUN ollama pull llama3.2:1b
|
12 |
|
13 |
+
# Set up non-root user as required by Hugging Face Spaces
|
14 |
+
RUN useradd -m -u 1000 user
|
15 |
+
USER user
|
16 |
+
ENV HOME=/home/user \
|
17 |
+
PATH="/home/user/.local/bin:$PATH"
|
18 |
+
|
19 |
+
# Set working directory
|
20 |
+
WORKDIR /home/user/app
|
21 |
+
|
22 |
+
# Copy and install Python dependencies
|
23 |
+
COPY --chown=user:user requirements.txt .
|
24 |
RUN pip install --no-cache-dir -r requirements.txt
|
25 |
|
26 |
# Copy the app code
|
27 |
+
COPY --chown=user:user app.py .
|
28 |
|
29 |
# Expose the Streamlit port
|
30 |
EXPOSE 8501
|
31 |
|
32 |
+
# Start Ollama in the background and then run Streamlit
|
33 |
+
CMD ollama serve & sleep 5 && streamlit run app.py --server.port 8501 --server.address 0.0.0.0
|