ateetvatan commited on
Commit
e2e640c
·
1 Parent(s): 4ede07c

defining model cache directories

Browse files
Files changed (2) hide show
  1. Dockerfile +7 -0
  2. model_loader.py +6 -0
Dockerfile CHANGED
@@ -1,12 +1,19 @@
1
  # Base image
2
  FROM python:3.10-slim
3
 
 
 
 
 
4
  # Install system dependencies
5
  RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
6
 
7
  # Create app directory
8
  WORKDIR /app
9
 
 
 
 
10
  # Copy requirements and install
11
  COPY requirements.txt .
12
  RUN pip install --upgrade pip && pip install -r requirements.txt
 
1
  # Base image
2
  FROM python:3.10-slim
3
 
4
+ # Set cache envs for Transformers & HF Hub (must come before any Python install)
5
+ ENV TRANSFORMERS_CACHE=/app/cache
6
+ ENV HF_HOME=/app/hf_home
7
+
8
  # Install system dependencies
9
  RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
10
 
11
  # Create app directory
12
  WORKDIR /app
13
 
14
+ # Create cache directories (safe fallback in Docker)
15
+ RUN mkdir -p /app/cache /app/hf_home
16
+
17
  # Copy requirements and install
18
  COPY requirements.txt .
19
  RUN pip install --upgrade pip && pip install -r requirements.txt
model_loader.py CHANGED
@@ -1,4 +1,10 @@
1
  # model_loader.py
 
 
 
 
 
 
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import torch, os
4
  from dotenv import load_dotenv
 
1
  # model_loader.py
2
+ import os
3
+
4
+ # Safe fallback if ENV vars are not set (e.g., during local dev)
5
+ os.environ.setdefault("TRANSFORMERS_CACHE", "/app/cache")
6
+ os.environ.setdefault("HF_HOME", "/app/hf_home")
7
+
8
  from transformers import AutoTokenizer, AutoModelForCausalLM
9
  import torch, os
10
  from dotenv import load_dotenv