warshanks commited on
Commit
b5698dc
·
verified ·
1 Parent(s): e35b258

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -15,6 +15,8 @@ from PIL import Image
15
  from transformers import AutoProcessor, AutoModelForImageTextToText, TextIteratorStreamer, Qwen2_5_VLForConditionalGeneration
16
  from qwen_vl_utils import process_vision_info
17
 
 
 
18
 
19
  model = Qwen2_5_VLForConditionalGeneration.from_pretrained(
20
  "lingshu-medical-mllm/Lingshu-32B",
 
15
  from transformers import AutoProcessor, AutoModelForImageTextToText, TextIteratorStreamer, Qwen2_5_VLForConditionalGeneration
16
  from qwen_vl_utils import process_vision_info
17
 
18
+ import subprocess
19
+ subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
20
 
21
  model = Qwen2_5_VLForConditionalGeneration.from_pretrained(
22
  "lingshu-medical-mllm/Lingshu-32B",