ash-171 commited on
Commit
974fb82
·
verified ·
1 Parent(s): 1cfe90e

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +25 -18
Dockerfile CHANGED
@@ -1,41 +1,48 @@
 
1
  FROM nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04
2
 
 
3
  ENV DEBIAN_FRONTEND=noninteractive
4
  ENV PYTHONDONTWRITEBYTECODE=1
5
  ENV PYTHONUNBUFFERED=1
6
 
7
- # Install system dependencies as root
8
  RUN apt-get update && apt-get install -y \
9
  python3 python3-pip ffmpeg curl git wget sudo \
10
  && rm -rf /var/lib/apt/lists/*
11
 
12
- # Create Python symlink before switching user
13
- RUN ln -s /usr/bin/python3 /usr/bin/python
14
- RUN pip install --upgrade pip
15
 
16
- # Create non-root user and switch
17
  RUN useradd -ms /bin/bash ollama
18
  USER ollama
19
  WORKDIR /home/ollama
20
 
21
- # Set up Python
22
- RUN ln -s /usr/bin/python3 /usr/bin/python
23
- RUN pip install --upgrade pip
24
-
25
- # Install Ollama (under user home, not root)
26
  RUN curl -fsSL https://ollama.com/install.sh | sh
27
 
28
- # Set up app directory under user space
29
- WORKDIR /home/ollama/app
30
- COPY requirements.txt .
31
- RUN pip install -r requirements.txt
 
 
 
32
  COPY . .
33
 
34
- # Download Hugging Face interface file
35
  RUN wget -O src/custome_interface.py https://huggingface.co/Jzuluaga/accent-id-commonaccent_xlsr-en-english/resolve/main/custom_interface.py
36
 
37
- # Expose port for Streamlit
 
 
 
38
  EXPOSE 8501
39
 
40
- # Start Ollama and the app
41
- CMD bash -c "ollama serve & sleep 5 && ollama pull gemma3 && streamlit run streamlit_app.py --server.port=8501 --server.address=0.0.0.0"
 
 
 
 
 
1
+ # Base image with GPU support
2
  FROM nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04
3
 
4
+ # Set env vars
5
  ENV DEBIAN_FRONTEND=noninteractive
6
  ENV PYTHONDONTWRITEBYTECODE=1
7
  ENV PYTHONUNBUFFERED=1
8
 
9
+ # Install Python and other system dependencies
10
  RUN apt-get update && apt-get install -y \
11
  python3 python3-pip ffmpeg curl git wget sudo \
12
  && rm -rf /var/lib/apt/lists/*
13
 
14
+ # Ensure `python` points to `python3`
15
+ RUN ln -sf /usr/bin/python3 /usr/bin/python && pip install --upgrade pip
 
16
 
17
+ # Add a user to avoid running everything as root (best practice)
18
  RUN useradd -ms /bin/bash ollama
19
  USER ollama
20
  WORKDIR /home/ollama
21
 
22
+ # Install Ollama as user
 
 
 
 
23
  RUN curl -fsSL https://ollama.com/install.sh | sh
24
 
25
+ # Switch back to root to install Python packages globally
26
+ USER root
27
+
28
+ # Create working directory for your app
29
+ WORKDIR /app
30
+
31
+ # Copy project files
32
  COPY . .
33
 
34
+ # Download SpeechBrain model interface if needed
35
  RUN wget -O src/custome_interface.py https://huggingface.co/Jzuluaga/accent-id-commonaccent_xlsr-en-english/resolve/main/custom_interface.py
36
 
37
+ # Install Python dependencies
38
+ RUN pip install -r requirements.txt
39
+
40
+ # Expose Streamlit default port
41
  EXPOSE 8501
42
 
43
+ # Entrypoint: Start Ollama server, wait a bit, pull model, then launch Streamlit
44
+ CMD bash -c "\
45
+ /home/ollama/.ollama/bin/ollama serve & \
46
+ sleep 5 && \
47
+ /home/ollama/.ollama/bin/ollama pull gemma3 && \
48
+ streamlit run streamlit_app.py --server.port=8501 --server.address=0.0.0.0"