Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -144,7 +144,7 @@ def sample_then_run(net):
|
|
| 144 |
cfg = 3.0
|
| 145 |
steps = 25
|
| 146 |
image = inference(net, prompt, negative_prompt, cfg, steps, seed)
|
| 147 |
-
return net,net,image
|
| 148 |
|
| 149 |
@torch.no_grad()
|
| 150 |
@spaces.GPU()
|
|
@@ -401,7 +401,7 @@ def run_inversion(net, dict, pcs, epochs, weight_decay,lr):
|
|
| 401 |
steps = 25
|
| 402 |
image = inference( net, prompt, negative_prompt, cfg, steps, seed)
|
| 403 |
|
| 404 |
-
return net, net, image
|
| 405 |
|
| 406 |
|
| 407 |
@spaces.GPU
|
|
@@ -422,7 +422,7 @@ def file_upload(file, net):
|
|
| 422 |
cfg = 3.0
|
| 423 |
steps = 25
|
| 424 |
image = inference(net, prompt, negative_prompt, cfg, steps, seed)
|
| 425 |
-
return net, image
|
| 426 |
|
| 427 |
help_text1 = """
|
| 428 |
<b>Instructions</b>:
|
|
@@ -531,7 +531,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
| 531 |
gr.Markdown(help_text1)
|
| 532 |
gr.Markdown(help_text2)
|
| 533 |
|
| 534 |
-
gr.Markdown("""<div style="text-align: justify;"
|
| 535 |
|
| 536 |
with gr.Row():
|
| 537 |
file_output = gr.File(label="Download Model", container=True, interactive=False)
|
|
@@ -541,15 +541,15 @@ with gr.Blocks(css="style.css") as demo:
|
|
| 541 |
|
| 542 |
invert_button.click(fn=run_inversion,
|
| 543 |
inputs=[net, input_image, pcs, epochs, weight_decay,lr],
|
| 544 |
-
outputs = [net, file_output, input_image])
|
| 545 |
|
| 546 |
|
| 547 |
-
sample.click(fn=sample_then_run,inputs = [net], outputs=[net, file_output, input_image])
|
| 548 |
|
| 549 |
submit.click(
|
| 550 |
fn=edit_inference, inputs=[net, prompt, negative_prompt, cfg, steps, seed, injection_step, a1, a2, a3, a4, input_image], outputs=[net, gallery]
|
| 551 |
)
|
| 552 |
-
file_input.change(fn=file_upload, inputs=[file_input, net], outputs = [net, input_image])
|
| 553 |
|
| 554 |
|
| 555 |
|
|
|
|
| 144 |
cfg = 3.0
|
| 145 |
steps = 25
|
| 146 |
image = inference(net, prompt, negative_prompt, cfg, steps, seed)
|
| 147 |
+
return net,net,image, None
|
| 148 |
|
| 149 |
@torch.no_grad()
|
| 150 |
@spaces.GPU()
|
|
|
|
| 401 |
steps = 25
|
| 402 |
image = inference( net, prompt, negative_prompt, cfg, steps, seed)
|
| 403 |
|
| 404 |
+
return net, net, image, None
|
| 405 |
|
| 406 |
|
| 407 |
@spaces.GPU
|
|
|
|
| 422 |
cfg = 3.0
|
| 423 |
steps = 25
|
| 424 |
image = inference(net, prompt, negative_prompt, cfg, steps, seed)
|
| 425 |
+
return net, image, None
|
| 426 |
|
| 427 |
help_text1 = """
|
| 428 |
<b>Instructions</b>:
|
|
|
|
| 531 |
gr.Markdown(help_text1)
|
| 532 |
gr.Markdown(help_text2)
|
| 533 |
|
| 534 |
+
gr.Markdown("""<div style="text-align: justify;">❸ After sampling a new model or inverting, you can download the model below.""")
|
| 535 |
|
| 536 |
with gr.Row():
|
| 537 |
file_output = gr.File(label="Download Model", container=True, interactive=False)
|
|
|
|
| 541 |
|
| 542 |
invert_button.click(fn=run_inversion,
|
| 543 |
inputs=[net, input_image, pcs, epochs, weight_decay,lr],
|
| 544 |
+
outputs = [net, file_output, input_image, gallery])
|
| 545 |
|
| 546 |
|
| 547 |
+
sample.click(fn=sample_then_run,inputs = [net], outputs=[net, file_output, input_image, gallery])
|
| 548 |
|
| 549 |
submit.click(
|
| 550 |
fn=edit_inference, inputs=[net, prompt, negative_prompt, cfg, steps, seed, injection_step, a1, a2, a3, a4, input_image], outputs=[net, gallery]
|
| 551 |
)
|
| 552 |
+
file_input.change(fn=file_upload, inputs=[file_input, net], outputs = [net, input_image, gallery])
|
| 553 |
|
| 554 |
|
| 555 |
|