helloworld-S commited on
Commit
df618a5
·
verified ·
1 Parent(s): bcdd850

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -17
app.py CHANGED
@@ -137,7 +137,7 @@ accordion_states = []
137
 
138
  def open_accordion_on_example_selection(*args):
139
  print("enter open_accordion_on_example_selection")
140
- images = list(args[-num_inputs*3:-num_inputs*2])
141
  outputs = []
142
  for i, img in enumerate(images):
143
  if img is not None:
@@ -513,47 +513,50 @@ if __name__ == "__main__":
513
  examples = gr.Examples(
514
  examples=[
515
  [
 
 
 
516
  "ENT1 wearing a tiny hat",
517
  42, 256, 768, 768,
518
  3, 5,
519
  0.85, 1.3,
520
  0.05, 0.8,
521
- "sample/hamster.jpg", None, None, None,
522
- "a hamster", None, None, None,
523
- False, False, False, False,
524
  ],
525
  [
 
 
 
526
  "ENT1 in a red dress is smiling",
527
  42, 256, 768, 768,
528
  3, 5,
529
  0.85, 1.3,
530
  0.05, 0.8,
531
- "sample/woman.jpg", None, None, None,
532
- "a woman", None, None, None,
533
- True, False, False, False,
534
  ],
535
  [
 
 
 
536
  "ENT1 and ENT2 standing together in a park.",
537
  42, 256, 768, 768,
538
  2, 5,
539
  0.85, 1.3,
540
  0.05, 0.8,
541
- "sample/woman.jpg", "sample/girl.jpg", None, None,
542
- "a woman", "a girl", None, None,
543
- True, True, False, False,
544
  ],
545
  [
 
 
 
546
  "ENT1, ENT2, and ENT3 standing together in a park.",
547
  42, 256, 768, 768,
548
  2.5, 5,
549
  0.8, 1.2,
550
  0.05, 0.8,
551
- "sample/woman.jpg", "sample/girl.jpg", "sample/old_man.jpg", None,
552
- "a woman", "a girl", "an old man", None,
553
- True, True, True, False,
554
  ],
555
  ],
556
  inputs=[
 
 
 
557
  prompt, seed,
558
  cond_size,
559
  target_height,
@@ -564,13 +567,11 @@ if __name__ == "__main__":
564
  vae_lora_scale,
565
  vae_skip_iter_s1,
566
  vae_skip_iter_s2,
567
- *images,
568
- *captions,
569
- *idip_checkboxes
570
  ],
571
  outputs=accordion_states,
572
  fn=open_accordion_on_example_selection,
573
- run_on_click=True
 
574
  )
575
 
576
  demo.queue()
 
137
 
138
  def open_accordion_on_example_selection(*args):
139
  print("enter open_accordion_on_example_selection")
140
+ images = list(args[:num_inputs])
141
  outputs = []
142
  for i, img in enumerate(images):
143
  if img is not None:
 
513
  examples = gr.Examples(
514
  examples=[
515
  [
516
+ "sample/hamster.jpg", None, None, None,
517
+ "a hamster", None, None, None,
518
+ False, False, False, False,
519
  "ENT1 wearing a tiny hat",
520
  42, 256, 768, 768,
521
  3, 5,
522
  0.85, 1.3,
523
  0.05, 0.8,
 
 
 
524
  ],
525
  [
526
+ "sample/woman.jpg", None, None, None,
527
+ "a woman", None, None, None,
528
+ True, False, False, False,
529
  "ENT1 in a red dress is smiling",
530
  42, 256, 768, 768,
531
  3, 5,
532
  0.85, 1.3,
533
  0.05, 0.8,
 
 
 
534
  ],
535
  [
536
+ "sample/woman.jpg", "sample/girl.jpg", None, None,
537
+ "a woman", "a girl", None, None,
538
+ True, True, False, False,
539
  "ENT1 and ENT2 standing together in a park.",
540
  42, 256, 768, 768,
541
  2, 5,
542
  0.85, 1.3,
543
  0.05, 0.8,
 
 
 
544
  ],
545
  [
546
+ "sample/woman.jpg", "sample/girl.jpg", "sample/old_man.jpg", None,
547
+ "a woman", "a girl", "an old man", None,
548
+ True, True, True, False,
549
  "ENT1, ENT2, and ENT3 standing together in a park.",
550
  42, 256, 768, 768,
551
  2.5, 5,
552
  0.8, 1.2,
553
  0.05, 0.8,
 
 
 
554
  ],
555
  ],
556
  inputs=[
557
+ images[0], images[1], images[2], images[3],
558
+ captions[0], captions[1], captions[2], captions[3],
559
+ idip_checkboxes[0], idip_checkboxes[1], idip_checkboxes[2], idip_checkboxes[3],
560
  prompt, seed,
561
  cond_size,
562
  target_height,
 
567
  vae_lora_scale,
568
  vae_skip_iter_s1,
569
  vae_skip_iter_s2,
 
 
 
570
  ],
571
  outputs=accordion_states,
572
  fn=open_accordion_on_example_selection,
573
+ run_on_click=True,
574
+ label="Examples"
575
  )
576
 
577
  demo.queue()