gokaygokay commited on
Commit
a056563
·
verified ·
1 Parent(s): c1ba40e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -13
app.py CHANGED
@@ -33,13 +33,13 @@ from typing import Union
33
  from transformers.dynamic_module_utils import get_imports
34
 
35
 
36
- def fixed_get_imports(filename):
37
- """Work around for https://huggingface.co/microsoft/phi-1_5/discussions/72."""
38
- if not str(filename).endswith("/modeling_florence2.py"):
39
- return get_imports(filename)
40
- imports = get_imports(filename)
41
- imports.remove("flash_attn")
42
- return imports
43
 
44
 
45
  import subprocess
@@ -125,13 +125,13 @@ DEFAULT_NEGATIVE_SUFFIX = "Nsfw oversaturated crappy_art low_quality blurry bad_
125
  # Initialize Florence model
126
  device = "cuda" if torch.cuda.is_available() else "cpu"
127
 
128
- def load_models():
129
- with patch("transformers.dynamic_module_utils.get_imports", fixed_get_imports):
130
- florence_model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True).to(device).eval()
131
- florence_processor = AutoProcessor.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True)
132
- return florence_model, florence_processor
133
 
134
- florence_model, florence_processor = load_models()
135
 
136
  # Prompt Enhancer
137
  enhancer_medium = pipeline("summarization", model="gokaygokay/Lamini-Prompt-Enchance", device=device)
 
33
  from transformers.dynamic_module_utils import get_imports
34
 
35
 
36
+ # def fixed_get_imports(filename):
37
+ # """Work around for https://huggingface.co/microsoft/phi-1_5/discussions/72."""
38
+ # if not str(filename).endswith("/modeling_florence2.py"):
39
+ # return get_imports(filename)
40
+ # imports = get_imports(filename)
41
+ # imports.remove("flash_attn")
42
+ # return imports
43
 
44
 
45
  import subprocess
 
125
  # Initialize Florence model
126
  device = "cuda" if torch.cuda.is_available() else "cpu"
127
 
128
+ #def load_models():
129
+ # with patch("transformers.dynamic_module_utils.get_imports", fixed_get_imports):
130
+ florence_model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True).to(device).eval()
131
+ florence_processor = AutoProcessor.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True)
132
+ # return florence_model, florence_processor
133
 
134
+ #florence_model, florence_processor = load_models()
135
 
136
  # Prompt Enhancer
137
  enhancer_medium = pipeline("summarization", model="gokaygokay/Lamini-Prompt-Enchance", device=device)