mknolan commited on
Commit
b13051c
·
verified ·
1 Parent(s): 920f22f

Add sentencepiece dependency for InternVL2 tokenizer

Browse files
Files changed (1) hide show
  1. Dockerfile +4 -1
Dockerfile CHANGED
@@ -32,7 +32,8 @@ einops\n\
32
  gradio==3.38.0\n\
33
  numpy\n\
34
  Pillow\n\
35
- torch>=2.0.0" > requirements.txt
 
36
 
37
  # Install Python dependencies
38
  RUN pip3 install --no-cache-dir --upgrade pip && \
@@ -55,6 +56,8 @@ if [ $(python3 -c "import torch; print(torch.cuda.is_available())") = "True" ];
55
  python3 -c "import torch; for i in range(torch.cuda.device_count()): print(f\"GPU {i}: {torch.cuda.get_device_name(i)}\")" \n\
56
  python3 -c "import torch; print(f\"Total GPU memory: {torch.cuda.get_device_properties(0).total_memory / 1024 / 1024 / 1024:.2f} GB\")" \n\
57
  fi \n\
 
 
58
  echo "\n===== Starting Application =====" \n\
59
  exec "$@"' > /entrypoint.sh && \
60
  chmod +x /entrypoint.sh
 
32
  gradio==3.38.0\n\
33
  numpy\n\
34
  Pillow\n\
35
+ torch>=2.0.0\n\
36
+ sentencepiece" > requirements.txt
37
 
38
  # Install Python dependencies
39
  RUN pip3 install --no-cache-dir --upgrade pip && \
 
56
  python3 -c "import torch; for i in range(torch.cuda.device_count()): print(f\"GPU {i}: {torch.cuda.get_device_name(i)}\")" \n\
57
  python3 -c "import torch; print(f\"Total GPU memory: {torch.cuda.get_device_properties(0).total_memory / 1024 / 1024 / 1024:.2f} GB\")" \n\
58
  fi \n\
59
+ echo "\n===== Package Information =====" \n\
60
+ pip3 list | grep -E "transformers|einops|torch|sentencepiece|gradio" \n\
61
  echo "\n===== Starting Application =====" \n\
62
  exec "$@"' > /entrypoint.sh && \
63
  chmod +x /entrypoint.sh