akashjayampu commited on
Commit
a2ed2c5
·
verified ·
1 Parent(s): da19753

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +20 -12
Dockerfile CHANGED
@@ -1,29 +1,37 @@
1
- # Use lightweight Python base image
2
  FROM python:3.9-slim
3
 
4
- # Setup environment for Hugging Face
5
  ENV HF_HOME=/app/.cache/huggingface
6
- RUN mkdir -p $HF_HOME && chmod -R 777 $HF_HOME
7
 
8
- # Allow passing Hugging Face token securely
9
- ARG HF_TOKEN
10
- ENV HF_TOKEN=${HF_TOKEN}
11
 
 
12
  WORKDIR /app
13
 
14
- # Install dependencies
15
  COPY requirements.txt .
16
  RUN pip install --no-cache-dir -r requirements.txt
17
 
18
- # Copy app source
19
  COPY . .
20
 
21
- # Authenticate and pre-download the gated model with token during build
22
- RUN python3 -c "from transformers import pipeline; pipeline('summarization', model='mistralai/Mistral-7B-Instruct-v0.1', tokenizer='mistralai/Mistral-7B-Instruct-v0.1', use_auth_token='${HF_TOKEN}')" && \
23
- python3 -c "from transformers import pipeline; pipeline('sentiment-analysis', model='distilbert-base-uncased-finetuned-sst-2-english')"
 
 
 
 
 
 
 
 
 
24
 
25
  # Expose Streamlit default port
26
  EXPOSE 7860
27
 
28
- # Run the app
29
  CMD ["streamlit", "run", "src/streamlit_app.py", "--server.port=7860", "--server.address=0.0.0.0"]
 
1
+ # Use minimal Python base image
2
  FROM python:3.9-slim
3
 
4
+ # Set Hugging Face home for caching
5
  ENV HF_HOME=/app/.cache/huggingface
 
6
 
7
+ # Ensure permissions for cache directory
8
+ RUN mkdir -p $HF_HOME && chmod -R 777 $HF_HOME
 
9
 
10
+ # Set working directory
11
  WORKDIR /app
12
 
13
+ # Copy requirements and install dependencies
14
  COPY requirements.txt .
15
  RUN pip install --no-cache-dir -r requirements.txt
16
 
17
+ # Copy all project files
18
  COPY . .
19
 
20
+ # Preload the gated model using HF_TOKEN (already injected in Spaces)
21
+ RUN python3 -c "\
22
+ import os; \
23
+ from transformers import pipeline; \
24
+ pipe = pipeline('summarization', model='mistralai/Mistral-7B-Instruct-v0.1', \
25
+ tokenizer='mistralai/Mistral-7B-Instruct-v0.1', \
26
+ use_auth_token=os.environ['HF_TOKEN'])"
27
+
28
+ # Also preload another model (not gated)
29
+ RUN python3 -c "\
30
+ from transformers import pipeline; \
31
+ pipeline('sentiment-analysis', model='distilbert-base-uncased-finetuned-sst-2-english')"
32
 
33
  # Expose Streamlit default port
34
  EXPOSE 7860
35
 
36
+ # Launch Streamlit app
37
  CMD ["streamlit", "run", "src/streamlit_app.py", "--server.port=7860", "--server.address=0.0.0.0"]