Futuretop commited on
Commit
7171190
Β·
verified Β·
1 Parent(s): 08f5690

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -4,9 +4,13 @@ import torch
4
  from PIL import Image
5
  from transformers import AutoProcessor, AutoModelForCausalLM
6
 
7
- subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
 
 
 
 
8
 
9
- device = "cuda" if torch.cuda.is_available() else "CPU"
10
  model = 'microsoft/Florence-2-base-ft'
11
  florence_model = AutoModelForCausalLM.from_pretrained(
12
  model,
 
4
  from PIL import Image
5
  from transformers import AutoProcessor, AutoModelForCausalLM
6
 
7
+ subprocess.run(
8
+ 'pip install flash-attn --no-build-isolation',
9
+ env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"},
10
+ shell=True
11
+ )
12
 
13
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
14
  model = 'microsoft/Florence-2-base-ft'
15
  florence_model = AutoModelForCausalLM.from_pretrained(
16
  model,