alexnasa commited on
Commit
c4b9ec5
·
verified ·
1 Parent(s): 52369c9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -77
app.py CHANGED
@@ -22,7 +22,7 @@ import gradio as gr
22
  import string
23
  import random, time, math
24
  import os
25
-
26
  import src.flux.generate
27
  from src.flux.generate import generate_from_test_sample, seed_everything
28
  from src.flux.pipeline_tools import CustomFluxPipeline, load_modulation_adapter, load_dit_lora
@@ -213,24 +213,41 @@ def resize_keep_aspect_ratio(pil_image, target_size=1024):
213
  return pil_image.resize((new_W, new_H))
214
 
215
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
  @spaces.GPU()
217
  def generate_image(
218
  prompt,
219
  image_1, caption_1, use_id_1,
220
  image_2, caption_2, use_id_2,
221
- cond_size,
222
- target_height,
223
- target_width,
224
- seed,
225
- vae_skip_iter,
226
- control_weight_lambda,
227
- double_attention,
228
- single_attention,
229
- ip_scale,
230
- latent_sblora_scale_str,
231
- vae_lora_scale,
232
- session_id,
233
  ):
 
 
 
 
234
  torch.cuda.empty_cache()
235
  num_images = 1
236
 
@@ -392,11 +409,12 @@ def create_min_image_input(index, open=True, indices_state=None):
392
  with gr.Column(min_width=256):
393
  image = gr.Image(type="filepath", label=f"Image {index + 1}")
394
  caption = gr.Textbox(label=f"ENT{index + 1}", value="")
 
395
  id_ip_checkbox = gr.Checkbox(value=True, label=f"ID or not {index + 1}", visible=False)
396
  with gr.Row():
397
  vlm_btn = gr.Button("Generate Caption", visible=False)
398
  det_btn = gr.Button("Det & Seg", visible=False)
399
- face_btn = gr.Button("Crop Face", visible=False)
400
 
401
  return image, caption, face_btn, det_btn, vlm_btn, id_ip_checkbox
402
 
@@ -604,69 +622,32 @@ if __name__ == "__main__":
604
  with gr.Column():
605
  output = gr.Image(label="Result")
606
 
607
- # examples = gr.Examples(
608
- # examples=[
609
- # [
610
- # "ENT1 wearing a tiny hat",
611
- # 42, 256, 768, 768,
612
- # 3, 5,
613
- # 0.85, 1.3,
614
- # 0.05, 0.8,
615
- # "sample/hamster.jpg", None, None, None, None, None,
616
- # "a hamster", None, None, None, None, None,
617
- # False, False, False, False, False, False
618
- # ],
619
- # [
620
- # "ENT1 in a red dress is smiling",
621
- # 42, 256, 768, 768,
622
- # 3, 5,
623
- # 0.85, 1.3,
624
- # 0.05, 0.8,
625
- # "sample/woman.jpg", None, None, None, None, None,
626
- # "a woman", None, None, None, None, None,
627
- # True, False, False, False, False, False
628
- # ],
629
- # [
630
- # "ENT1 and ENT2 standing together in a park.",
631
- # 42, 256, 768, 768,
632
- # 2, 5,
633
- # 0.85, 1.3,
634
- # 0.05, 0.8,
635
- # "sample/woman.jpg", "sample/girl.jpg", None, None, None, None,
636
- # "a woman", "a girl", None, None, None, None,
637
- # True, True, False, False, False, False
638
- # ],
639
- # [
640
- # "ENT1, ENT2, and ENT3 standing together in a park.",
641
- # 42, 256, 768, 768,
642
- # 2.5, 5,
643
- # 0.8, 1.2,
644
- # 0.05, 0.8,
645
- # "sample/woman.jpg", "sample/girl.jpg", "sample/old_man.jpg", None, None, None,
646
- # "a woman", "a girl", "an old man", None, None, None,
647
- # True, True, True, False, False, False
648
- # ],
649
- # ],
650
- # inputs=[
651
- # prompt,
652
- # seed,
653
- # cond_size,
654
- # target_height,
655
- # target_width,
656
- # weight_id,
657
- # weight_ip,
658
- # ip_scale_str,
659
- # vae_lora_scale,
660
- # vae_skip_iter_s1,
661
- # vae_skip_iter_s2,
662
- # *images,
663
- # *captions,
664
- # *idip_checkboxes
665
- # ],
666
- # outputs=output,
667
- # fn=generate_image,
668
- # cache_examples=True,
669
- # )
670
 
671
 
672
 
 
22
  import string
23
  import random, time, math
24
  import os
25
+ import uuid
26
  import src.flux.generate
27
  from src.flux.generate import generate_from_test_sample, seed_everything
28
  from src.flux.pipeline_tools import CustomFluxPipeline, load_modulation_adapter, load_dit_lora
 
213
  return pil_image.resize((new_W, new_H))
214
 
215
 
216
+ cond_size,
217
+ target_height,
218
+ target_width,
219
+ seed,
220
+ vae_skip_iter,
221
+ weight_id_ip_str,
222
+ double_attention,
223
+ single_attention,
224
+ db_latent_lora_scale_str,
225
+ sb_latent_lora_scale_str,
226
+ vae_lora_scale_str,
227
+ session_state,
228
+
229
  @spaces.GPU()
230
  def generate_image(
231
  prompt,
232
  image_1, caption_1, use_id_1,
233
  image_2, caption_2, use_id_2,
234
+ cond_size = 256,
235
+ target_height = 768,
236
+ target_width = 768,
237
+ seed = 42,
238
+ vae_skip_iter = "0-0.05:1,0.8-1:1",
239
+ control_weight_lambda = "0-1:1/3/5",
240
+ double_attention = False,
241
+ single_attention = True,
242
+ ip_scale = "0-1:0.85",
243
+ latent_sblora_scale_str = "0-1:0.85",
244
+ vae_lora_scale = "0-1:1.3",
245
+ session_id = None,
246
  ):
247
+
248
+ if session_id is None:
249
+ session_id = uuid.uuid4().hex
250
+
251
  torch.cuda.empty_cache()
252
  num_images = 1
253
 
 
409
  with gr.Column(min_width=256):
410
  image = gr.Image(type="filepath", label=f"Image {index + 1}")
411
  caption = gr.Textbox(label=f"ENT{index + 1}", value="")
412
+ face_btn = gr.Button("Crop Face")
413
  id_ip_checkbox = gr.Checkbox(value=True, label=f"ID or not {index + 1}", visible=False)
414
  with gr.Row():
415
  vlm_btn = gr.Button("Generate Caption", visible=False)
416
  det_btn = gr.Button("Det & Seg", visible=False)
417
+
418
 
419
  return image, caption, face_btn, det_btn, vlm_btn, id_ip_checkbox
420
 
 
622
  with gr.Column():
623
  output = gr.Image(label="Result")
624
 
625
+ examples = gr.Examples(
626
+ examples=[
627
+ [
628
+ "ENT1 wearing a tiny hat",
629
+ "sample/hamster.jpg", "a hamster", True,
630
+ ],
631
+ [
632
+ "ENT1 in a red dress is smiling",
633
+ "sample/woman.jpg", "a woman", True,
634
+ None, None, True,
635
+ ],
636
+ [
637
+ "ENT1 and ENT2 standing together in a park.",
638
+ "sample/woman.jpg", "a woman", True,
639
+ "sample/girl.jpg", "a girl", True,
640
+ ],
641
+ ],
642
+ inputs=[
643
+ prompt,
644
+ images[0], captions[0], idip_checkboxes[0],
645
+ images[1], captions[1], idip_checkboxes[1],
646
+ ],
647
+ outputs=output,
648
+ fn=generate_image,
649
+ cache_examples=True,
650
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
651
 
652
 
653