Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,7 +18,7 @@ from pathlib import Path
|
|
| 18 |
|
| 19 |
import gradio as gr
|
| 20 |
import torch
|
| 21 |
-
import spaces
|
| 22 |
|
| 23 |
from uno.flux.pipeline import UNOPipeline
|
| 24 |
|
|
@@ -55,7 +55,7 @@ def create_demo(
|
|
| 55 |
offload: bool = False,
|
| 56 |
):
|
| 57 |
pipeline = UNOPipeline(model_type, device, offload, only_lora=True, lora_rank=512)
|
| 58 |
-
pipeline.gradio_generate = spaces.GPU(duratioin=120)(pipeline.gradio_generate)
|
| 59 |
|
| 60 |
|
| 61 |
badges_text = r"""
|
|
@@ -148,4 +148,4 @@ if __name__ == "__main__":
|
|
| 148 |
args = args_tuple[0]
|
| 149 |
|
| 150 |
demo = create_demo(args.name, args.device, args.offload)
|
| 151 |
-
demo.launch(server_port=args.port, ssr_mode=False)
|
|
|
|
| 18 |
|
| 19 |
import gradio as gr
|
| 20 |
import torch
|
| 21 |
+
# import spaces
|
| 22 |
|
| 23 |
from uno.flux.pipeline import UNOPipeline
|
| 24 |
|
|
|
|
| 55 |
offload: bool = False,
|
| 56 |
):
|
| 57 |
pipeline = UNOPipeline(model_type, device, offload, only_lora=True, lora_rank=512)
|
| 58 |
+
# pipeline.gradio_generate = spaces.GPU(duratioin=120)(pipeline.gradio_generate)
|
| 59 |
|
| 60 |
|
| 61 |
badges_text = r"""
|
|
|
|
| 148 |
args = args_tuple[0]
|
| 149 |
|
| 150 |
demo = create_demo(args.name, args.device, args.offload)
|
| 151 |
+
demo.launch(server_port=args.port, ssr_mode=False, share=True)
|