Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -20,8 +20,8 @@ huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
|
| 20 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype, token = huggingface_token).to(device)
|
| 21 |
|
| 22 |
# Initialize Florence model
|
| 23 |
-
florence_model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-
|
| 24 |
-
florence_processor = AutoProcessor.from_pretrained('microsoft/Florence-2-
|
| 25 |
|
| 26 |
# Prompt Enhancer
|
| 27 |
enhancer_long = pipeline("summarization", model="gokaygokay/Lamini-Prompt-Enchance-Long", device=device)
|
|
|
|
| 20 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype, token = huggingface_token).to(device)
|
| 21 |
|
| 22 |
# Initialize Florence model
|
| 23 |
+
florence_model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True).to(device).eval()
|
| 24 |
+
florence_processor = AutoProcessor.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True)
|
| 25 |
|
| 26 |
# Prompt Enhancer
|
| 27 |
enhancer_long = pipeline("summarization", model="gokaygokay/Lamini-Prompt-Enchance-Long", device=device)
|