Spaces:
Running
Running
Commit
·
7b983c8
1
Parent(s):
516439b
Update Dockerfile model caching
Browse files- Dockerfile +5 -9
- app.py +14 -5
Dockerfile
CHANGED
@@ -18,16 +18,12 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
18 |
ENV HF_HOME="/home/user/.cache/huggingface"
|
19 |
ENV SENTENCE_TRANSFORMERS_HOME="/home/user/.cache/huggingface/sentence-transformers"
|
20 |
|
21 |
-
#
|
22 |
-
RUN python -c "from
|
23 |
-
|
24 |
|
25 |
-
#
|
26 |
-
RUN
|
27 |
-
cp -r /app/model_cache/* /home/user/.cache/huggingface/sentence-transformers/
|
28 |
-
|
29 |
-
# ✅ Ensure ownership and permissions remain intact
|
30 |
-
RUN chown -R user:user /home/user/.cache/huggingface /app/model_cache
|
31 |
|
32 |
# EXPOSE 7860 (HF automatically maps it)
|
33 |
EXPOSE 7860
|
|
|
18 |
ENV HF_HOME="/home/user/.cache/huggingface"
|
19 |
ENV SENTENCE_TRANSFORMERS_HOME="/home/user/.cache/huggingface/sentence-transformers"
|
20 |
|
21 |
+
# Download and persist SentenceTransformer model during build stage
|
22 |
+
RUN python -c "from sentence_transformers import SentenceTransformer; \
|
23 |
+
SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2', cache_folder='/home/user/.cache/huggingface/sentence-transformers')"
|
24 |
|
25 |
+
# Ensure ownership and permissions remain intact
|
26 |
+
RUN chown -R user:user /home/user/.cache/huggingface
|
|
|
|
|
|
|
|
|
27 |
|
28 |
# EXPOSE 7860 (HF automatically maps it)
|
29 |
EXPOSE 7860
|
app.py
CHANGED
@@ -66,11 +66,20 @@ os.environ["SENTENCE_TRANSFORMERS_HOME"] = hf_cache_dir
|
|
66 |
from huggingface_hub import snapshot_download
|
67 |
print("⏳ Checking or downloading the all-MiniLM-L6-v2 model from huggingface_hub...")
|
68 |
# st.write("⏳ Checking or downloading the all-MiniLM-L6-v2 model from huggingface_hub...")
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
print(f"✅ Model directory: {model_loc}")
|
75 |
# st.write(f"✅ Model directory: {model_loc}")
|
76 |
|
|
|
66 |
from huggingface_hub import snapshot_download
|
67 |
print("⏳ Checking or downloading the all-MiniLM-L6-v2 model from huggingface_hub...")
|
68 |
# st.write("⏳ Checking or downloading the all-MiniLM-L6-v2 model from huggingface_hub...")
|
69 |
+
try:
|
70 |
+
model_loc = snapshot_download(
|
71 |
+
repo_id="sentence-transformers/all-MiniLM-L6-v2",
|
72 |
+
cache_dir=hf_cache_dir, # Set directly also fine `os.environ["HF_HOME"]`
|
73 |
+
local_files_only=True # 🚨 Avoids re-downloading / fetch from internet (set False for local dev)
|
74 |
+
)
|
75 |
+
except Exception as e:
|
76 |
+
print(f"❌ Error loading model from cache: {e}")
|
77 |
+
print("⚠️ Retrying with online download enabled...")
|
78 |
+
model_loc = snapshot_download(
|
79 |
+
repo_id="sentence-transformers/all-MiniLM-L6-v2",
|
80 |
+
cache_dir=hf_cache_dir,
|
81 |
+
local_files_only=False # ⬇️ Fallback
|
82 |
+
)
|
83 |
print(f"✅ Model directory: {model_loc}")
|
84 |
# st.write(f"✅ Model directory: {model_loc}")
|
85 |
|