Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -61,7 +61,7 @@ def get_scheduler(scheduler_config: Dict, name: str) -> Optional[Callable]:
|
|
| 61 |
}
|
| 62 |
return scheduler_factory_map.get(name, lambda: None)()
|
| 63 |
|
| 64 |
-
@spaces.GPU
|
| 65 |
def generate(
|
| 66 |
prompt: str,
|
| 67 |
negative_prompt: str = None,
|
|
@@ -69,7 +69,7 @@ def generate(
|
|
| 69 |
width: int = 1024,
|
| 70 |
height: int = 1024,
|
| 71 |
guidance_scale: float = 5.0,
|
| 72 |
-
num_inference_steps: int =
|
| 73 |
sampler: str = "Euler a",
|
| 74 |
clip_skip: int = 1,
|
| 75 |
progress=gr.Progress(track_tqdm=True),
|
|
@@ -84,7 +84,6 @@ def generate(
|
|
| 84 |
add_watermarker=False,
|
| 85 |
use_auth_token=HF_TOKEN
|
| 86 |
)
|
| 87 |
-
pipe.to(device)
|
| 88 |
|
| 89 |
generator = seed_everything(seed)
|
| 90 |
pipe.scheduler = get_scheduler(pipe.scheduler.config, sampler)
|
|
@@ -94,6 +93,8 @@ def generate(
|
|
| 94 |
num_hidden_layers = 12 - (clip_skip - 1),
|
| 95 |
torch_dtype = torch.float16
|
| 96 |
)
|
|
|
|
|
|
|
| 97 |
|
| 98 |
try:
|
| 99 |
|
|
|
|
| 61 |
}
|
| 62 |
return scheduler_factory_map.get(name, lambda: None)()
|
| 63 |
|
| 64 |
+
@spaces.GPU(enable_queue=False)
|
| 65 |
def generate(
|
| 66 |
prompt: str,
|
| 67 |
negative_prompt: str = None,
|
|
|
|
| 69 |
width: int = 1024,
|
| 70 |
height: int = 1024,
|
| 71 |
guidance_scale: float = 5.0,
|
| 72 |
+
num_inference_steps: int = 26,
|
| 73 |
sampler: str = "Euler a",
|
| 74 |
clip_skip: int = 1,
|
| 75 |
progress=gr.Progress(track_tqdm=True),
|
|
|
|
| 84 |
add_watermarker=False,
|
| 85 |
use_auth_token=HF_TOKEN
|
| 86 |
)
|
|
|
|
| 87 |
|
| 88 |
generator = seed_everything(seed)
|
| 89 |
pipe.scheduler = get_scheduler(pipe.scheduler.config, sampler)
|
|
|
|
| 93 |
num_hidden_layers = 12 - (clip_skip - 1),
|
| 94 |
torch_dtype = torch.float16
|
| 95 |
)
|
| 96 |
+
|
| 97 |
+
pipe.to(device)
|
| 98 |
|
| 99 |
try:
|
| 100 |
|