Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 1,034 Bytes
7b2eca8 ee03864 9acc34b 7b2eca8 1017099 ac9c6c1 7b2eca8 bbf7f29 86103f1 7d83d86 e4acc16 7b2eca8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
# π Use official Python
FROM python:3.11-slim
ENV TRANSFORMERS_CACHE=/tmp
ENV HF_HOME=/tmp
# Install wget
RUN apt-get update && apt-get install -y wget
WORKDIR /app
COPY . .
RUN mkdir -p vit_captioning/artifacts && \
wget https://huggingface.co/datasets/ClemSummer/clip-checkpoints/resolve/main/CLIPEncoder_40epochs_unfreeze12.pth \
-O vit_captioning/artifacts/CLIPEncoder_40epochs_unfreeze12.pth
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
RUN mkdir -p /models/clip && \
python3 -c "from transformers import CLIPModel; CLIPModel.from_pretrained('openai/clip-vit-base-patch32').save_pretrained('/models/clip')"
RUN python3 -c "from transformers import AutoTokenizer; AutoTokenizer.from_pretrained('bert-base-uncased').save_pretrained('/models/bert-tokenizer')"
RUN python3 -c "from transformers import CLIPProcessor; CLIPProcessor.from_pretrained('openai/clip-vit-base-patch32').save_pretrained('/models/clip')"
EXPOSE 8000
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] |