Futuretop commited on
Commit
1526e1d
Β·
verified Β·
1 Parent(s): 0ce6765

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -5
app.py CHANGED
@@ -3,14 +3,17 @@ import subprocess
3
  import torch
4
  from PIL import Image
5
  from transformers import AutoProcessor, AutoModelForCausalLM
6
-
7
-
8
 
9
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
10
 
11
- device = "cuda" if torch.cuda.is_available() else "cpu"
12
- florence_model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True).to(device).eval()
13
- florence_processor = AutoProcessor.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True)
 
 
 
14
 
15
  def generate_caption(image):
16
  if not isinstance(image, Image.Image):
 
3
  import torch
4
  from PIL import Image
5
  from transformers import AutoProcessor, AutoModelForCausalLM
6
+ import modeling_florence2
7
+ modeling_florence2.DaViT._initialize_weights = modeling_florence2.DaViT._init_weights
8
 
9
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
10
 
11
+ device = "cuda" if torch.cuda.is_available() else "CPU"
12
+ model = 'microsoft/Florence-2-base-ft'
13
+ florence_model = AutoModelForCausalLM.from_pretrained(
14
+ model,
15
+ trust_remote_code=True).to(device).eval()
16
+ florence_processor = AutoProcessor.from_pretrained(model, trust_remote_code=True)
17
 
18
  def generate_caption(image):
19
  if not isinstance(image, Image.Image):