Futuretop commited on
Commit
b771622
Β·
verified Β·
1 Parent(s): f8399c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -5,9 +5,9 @@ from PIL import Image
5
  from transformers import AutoProcessor, AutoModelForConditionalGeneration
6
 
7
  subprocess.run(
8
- 'pip install flash-attn --no-build-isolation',
9
- env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"},
10
- shell=True
11
  )
12
 
13
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 
5
  from transformers import AutoProcessor, AutoModelForConditionalGeneration
6
 
7
  subprocess.run(
8
+ "pip install --upgrade transformers>=4.50.0",
9
+ shell=True,
10
+ check=True
11
  )
12
 
13
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")