ankandrew commited on
Commit
c4f32fc
·
1 Parent(s): 4ed6c98

Require minimum flash-attn

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -6,7 +6,7 @@ from qwen_vl_utils import process_vision_info
6
  from transformers.utils import is_flash_attn_2_available
7
 
8
  subprocess.run(
9
- "pip install flash-attn --no-build-isolation",
10
  env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
11
  shell=True,
12
  )
 
6
  from transformers.utils import is_flash_attn_2_available
7
 
8
  subprocess.run(
9
+ "pip install 'flash-attn>=2.2.0' --no-build-isolation",
10
  env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
11
  shell=True,
12
  )