Update app.py
Browse files
app.py
CHANGED
@@ -39,12 +39,12 @@ import torch
|
|
39 |
os.environ["XVERSE_PREPROCESSED_DATA"] = f"{os.getcwd()}/proprocess_data"
|
40 |
|
41 |
|
42 |
-
# FLUX.1-schnell
|
43 |
-
snapshot_download(
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
)
|
48 |
|
49 |
|
50 |
# # FLUX.1-dev
|
@@ -603,7 +603,70 @@ if __name__ == "__main__":
|
|
603 |
|
604 |
with gr.Column():
|
605 |
output = gr.Image(label="Result")
|
606 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
607 |
|
608 |
|
609 |
gen_btn.click(
|
|
|
39 |
os.environ["XVERSE_PREPROCESSED_DATA"] = f"{os.getcwd()}/proprocess_data"
|
40 |
|
41 |
|
42 |
+
# # FLUX.1-schnell
|
43 |
+
# snapshot_download(
|
44 |
+
# repo_id="black-forest-labs/FLUX.1-schnell",
|
45 |
+
# local_dir="/data/checkpoints/FLUX.1-schnell",
|
46 |
+
# local_dir_use_symlinks=False
|
47 |
+
# )
|
48 |
|
49 |
|
50 |
# # FLUX.1-dev
|
|
|
603 |
|
604 |
with gr.Column():
|
605 |
output = gr.Image(label="Result")
|
606 |
+
examples = gr.Examples(
|
607 |
+
examples=[
|
608 |
+
[
|
609 |
+
"ENT1 wearing a tiny hat",
|
610 |
+
42, 256, 768, 768,
|
611 |
+
3, 5,
|
612 |
+
0.85, 1.3,
|
613 |
+
0.05, 0.8,
|
614 |
+
"sample/hamster.jpg", None, None, None, None, None,
|
615 |
+
"a hamster", None, None, None, None, None,
|
616 |
+
False, False, False, False, False, False
|
617 |
+
],
|
618 |
+
[
|
619 |
+
"ENT1 in a red dress is smiling",
|
620 |
+
42, 256, 768, 768,
|
621 |
+
3, 5,
|
622 |
+
0.85, 1.3,
|
623 |
+
0.05, 0.8,
|
624 |
+
"sample/woman.jpg", None, None, None, None, None,
|
625 |
+
"a woman", None, None, None, None, None,
|
626 |
+
True, False, False, False, False, False
|
627 |
+
],
|
628 |
+
[
|
629 |
+
"ENT1 and ENT2 standing together in a park.",
|
630 |
+
42, 256, 768, 768,
|
631 |
+
2, 5,
|
632 |
+
0.85, 1.3,
|
633 |
+
0.05, 0.8,
|
634 |
+
"sample/woman.jpg", "sample/girl.jpg", None, None, None, None,
|
635 |
+
"a woman", "a girl", None, None, None, None,
|
636 |
+
True, True, False, False, False, False
|
637 |
+
],
|
638 |
+
[
|
639 |
+
"ENT1, ENT2, and ENT3 standing together in a park.",
|
640 |
+
42, 256, 768, 768,
|
641 |
+
2.5, 5,
|
642 |
+
0.8, 1.2,
|
643 |
+
0.05, 0.8,
|
644 |
+
"sample/woman.jpg", "sample/girl.jpg", "sample/old_man.jpg", None, None, None,
|
645 |
+
"a woman", "a girl", "an old man", None, None, None,
|
646 |
+
True, True, True, False, False, False
|
647 |
+
],
|
648 |
+
],
|
649 |
+
inputs=[
|
650 |
+
prompt, seed,
|
651 |
+
cond_size,
|
652 |
+
target_height,
|
653 |
+
target_width,
|
654 |
+
weight_id,
|
655 |
+
weight_ip,
|
656 |
+
ip_scale_str,
|
657 |
+
vae_lora_scale,
|
658 |
+
vae_skip_iter_s1,
|
659 |
+
vae_skip_iter_s2,
|
660 |
+
*images,
|
661 |
+
*captions,
|
662 |
+
*idip_checkboxes
|
663 |
+
],
|
664 |
+
outputs=accordion_states,
|
665 |
+
fn=open_accordion_on_example_selection,
|
666 |
+
cache_examples=True,
|
667 |
+
)
|
668 |
+
|
669 |
+
|
670 |
|
671 |
|
672 |
gen_btn.click(
|