Update app.py
Browse files
app.py
CHANGED
@@ -603,72 +603,70 @@ if __name__ == "__main__":
|
|
603 |
|
604 |
with gr.Column():
|
605 |
output = gr.Image(label="Result")
|
606 |
-
|
607 |
-
examples=
|
608 |
-
[
|
609 |
-
|
610 |
-
|
611 |
-
|
612 |
-
|
613 |
-
|
614 |
-
|
615 |
-
|
616 |
-
|
617 |
-
|
618 |
-
|
619 |
-
|
620 |
-
|
621 |
-
|
622 |
-
|
623 |
-
|
624 |
-
|
625 |
-
|
626 |
-
|
627 |
-
|
628 |
-
|
629 |
-
|
630 |
-
|
631 |
-
|
632 |
-
|
633 |
-
|
634 |
-
|
635 |
-
|
636 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
637 |
],
|
638 |
-
[
|
639 |
-
|
640 |
-
|
641 |
-
|
642 |
-
|
643 |
-
|
644 |
-
|
645 |
-
|
646 |
-
|
|
|
|
|
|
|
|
|
|
|
647 |
],
|
648 |
-
|
649 |
-
|
650 |
-
|
651 |
-
|
652 |
-
|
653 |
-
target_width,
|
654 |
-
weight_id,
|
655 |
-
weight_ip,
|
656 |
-
ip_scale_str,
|
657 |
-
vae_lora_scale,
|
658 |
-
vae_skip_iter_s1,
|
659 |
-
vae_skip_iter_s2,
|
660 |
-
*images,
|
661 |
-
*captions,
|
662 |
-
*idip_checkboxes
|
663 |
-
],
|
664 |
-
outputs=accordion_states,
|
665 |
-
fn=open_accordion_on_example_selection,
|
666 |
-
cache_examples=True,
|
667 |
-
)
|
668 |
-
|
669 |
-
|
670 |
-
|
671 |
-
|
672 |
gen_btn.click(
|
673 |
generate_image,
|
674 |
inputs=[
|
|
|
603 |
|
604 |
with gr.Column():
|
605 |
output = gr.Image(label="Result")
|
606 |
+
|
607 |
+
examples = gr.Examples(
|
608 |
+
examples=[
|
609 |
+
[
|
610 |
+
"ENT1 wearing a tiny hat",
|
611 |
+
42, 256, 768, 768,
|
612 |
+
3, 5,
|
613 |
+
0.85, 1.3,
|
614 |
+
0.05, 0.8,
|
615 |
+
"sample/hamster.jpg", None, None, None, None, None,
|
616 |
+
"a hamster", None, None, None, None, None,
|
617 |
+
False, False, False, False, False, False
|
618 |
+
],
|
619 |
+
[
|
620 |
+
"ENT1 in a red dress is smiling",
|
621 |
+
42, 256, 768, 768,
|
622 |
+
3, 5,
|
623 |
+
0.85, 1.3,
|
624 |
+
0.05, 0.8,
|
625 |
+
"sample/woman.jpg", None, None, None, None, None,
|
626 |
+
"a woman", None, None, None, None, None,
|
627 |
+
True, False, False, False, False, False
|
628 |
+
],
|
629 |
+
[
|
630 |
+
"ENT1 and ENT2 standing together in a park.",
|
631 |
+
42, 256, 768, 768,
|
632 |
+
2, 5,
|
633 |
+
0.85, 1.3,
|
634 |
+
0.05, 0.8,
|
635 |
+
"sample/woman.jpg", "sample/girl.jpg", None, None, None, None,
|
636 |
+
"a woman", "a girl", None, None, None, None,
|
637 |
+
True, True, False, False, False, False
|
638 |
+
],
|
639 |
+
[
|
640 |
+
"ENT1, ENT2, and ENT3 standing together in a park.",
|
641 |
+
42, 256, 768, 768,
|
642 |
+
2.5, 5,
|
643 |
+
0.8, 1.2,
|
644 |
+
0.05, 0.8,
|
645 |
+
"sample/woman.jpg", "sample/girl.jpg", "sample/old_man.jpg", None, None, None,
|
646 |
+
"a woman", "a girl", "an old man", None, None, None,
|
647 |
+
True, True, True, False, False, False
|
648 |
+
],
|
649 |
],
|
650 |
+
inputs=[
|
651 |
+
prompt, seed,
|
652 |
+
cond_size,
|
653 |
+
target_height,
|
654 |
+
target_width,
|
655 |
+
weight_id,
|
656 |
+
weight_ip,
|
657 |
+
ip_scale_str,
|
658 |
+
vae_lora_scale,
|
659 |
+
vae_skip_iter_s1,
|
660 |
+
vae_skip_iter_s2,
|
661 |
+
*images,
|
662 |
+
*captions,
|
663 |
+
*idip_checkboxes
|
664 |
],
|
665 |
+
outputs=accordion_states,
|
666 |
+
fn=open_accordion_on_example_selection,
|
667 |
+
cache_examples=True,
|
668 |
+
)
|
669 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
670 |
gen_btn.click(
|
671 |
generate_image,
|
672 |
inputs=[
|