Spaces:
Running
on
Zero
Running
on
Zero
test
Browse files- .gitattributes +1 -0
- app.py +33 -31
- examples/style_000_0.jpg +3 -0
- examples/style_000_1.jpg +3 -0
.gitattributes
CHANGED
@@ -35,3 +35,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
*.png filter=lfs diff=lfs merge=lfs -text
|
37 |
*.webp filter=lfs diff=lfs merge=lfs -text
|
|
|
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
*.png filter=lfs diff=lfs merge=lfs -text
|
37 |
*.webp filter=lfs diff=lfs merge=lfs -text
|
38 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
app.py
CHANGED
@@ -46,6 +46,8 @@ class DreamFuseGUI:
|
|
46 |
"./examples/030_0.webp"],
|
47 |
["./examples/handheld_001_1.png",
|
48 |
"./examples/handheld_001_0.png"],
|
|
|
|
|
49 |
]
|
50 |
self.examples = [[Image.open(x) for x in example] for example in self.examples]
|
51 |
self.css_style = self._get_css_style()
|
@@ -440,9 +442,9 @@ class DreamFuseGUI:
|
|
440 |
config = InferenceConfig()
|
441 |
config.lora_id = 'LL3RD/DreamFuse'
|
442 |
|
443 |
-
|
444 |
-
pipeline = DreamFuseInference(config)
|
445 |
-
pipeline.gradio_generate = spaces.GPU(duratioin=120)(pipeline.gradio_generate)
|
446 |
|
447 |
with gr.Blocks(css=self.css_style) as demo:
|
448 |
modified_fg_state = gr.State()
|
@@ -467,32 +469,6 @@ class DreamFuseGUI:
|
|
467 |
draggable_img_in = gr.Image(label="Foreground Image", type="pil", image_mode="RGBA", height=240, width=200)
|
468 |
generate_btn = gr.Button("2️⃣ Generate Canvas")
|
469 |
|
470 |
-
with gr.Row():
|
471 |
-
with gr.Column(scale=1):
|
472 |
-
gr.Examples(
|
473 |
-
examples=[self.examples[0]],
|
474 |
-
inputs=[background_img_in, draggable_img_in],
|
475 |
-
# elem_id="small-examples"
|
476 |
-
)
|
477 |
-
with gr.Column(scale=1):
|
478 |
-
gr.Examples(
|
479 |
-
examples=[self.examples[2]],
|
480 |
-
inputs=[background_img_in, draggable_img_in],
|
481 |
-
# elem_id="small-examples"
|
482 |
-
)
|
483 |
-
with gr.Row():
|
484 |
-
with gr.Column(scale=1):
|
485 |
-
gr.Examples(
|
486 |
-
examples=[self.examples[1]],
|
487 |
-
inputs=[background_img_in, draggable_img_in],
|
488 |
-
# elem_id="small-examples"
|
489 |
-
)
|
490 |
-
with gr.Column(scale=1):
|
491 |
-
gr.Examples(
|
492 |
-
examples=[self.examples[3]],
|
493 |
-
inputs=[background_img_in, draggable_img_in],
|
494 |
-
# elem_id="small-examples"
|
495 |
-
)
|
496 |
with gr.Column(scale=1, elem_id="section-preview"):
|
497 |
gr.Markdown("### Preview Region")
|
498 |
html_out = gr.HTML(
|
@@ -521,6 +497,32 @@ class DreamFuseGUI:
|
|
521 |
transformation_text = gr.Textbox(label="Transformation Info", elem_id="transformation_info", visible=False)
|
522 |
model_output = gr.Image(label="Model Output", type="pil", height=512, width=512)
|
523 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
524 |
|
525 |
generate_btn.click(
|
526 |
fn=self.on_upload,
|
@@ -534,8 +536,8 @@ class DreamFuseGUI:
|
|
534 |
)
|
535 |
|
536 |
model_generate_btn.click(
|
537 |
-
fn=pipeline.gradio_generate,
|
538 |
-
|
539 |
inputs=[background_img_in, modified_fg_state, transformation_text, seed_slider, \
|
540 |
prompt_text, enable_gui, cfg_slider, size_select, text_strength, enable_truecfg],
|
541 |
outputs=model_output
|
|
|
46 |
"./examples/030_0.webp"],
|
47 |
["./examples/handheld_001_1.png",
|
48 |
"./examples/handheld_001_0.png"],
|
49 |
+
["./examples/style_000_1.jpg",
|
50 |
+
"./examples/style_000_0.jpg"],
|
51 |
]
|
52 |
self.examples = [[Image.open(x) for x in example] for example in self.examples]
|
53 |
self.css_style = self._get_css_style()
|
|
|
442 |
config = InferenceConfig()
|
443 |
config.lora_id = 'LL3RD/DreamFuse'
|
444 |
|
445 |
+
pipeline = None
|
446 |
+
# pipeline = DreamFuseInference(config)
|
447 |
+
# pipeline.gradio_generate = spaces.GPU(duratioin=120)(pipeline.gradio_generate)
|
448 |
|
449 |
with gr.Blocks(css=self.css_style) as demo:
|
450 |
modified_fg_state = gr.State()
|
|
|
469 |
draggable_img_in = gr.Image(label="Foreground Image", type="pil", image_mode="RGBA", height=240, width=200)
|
470 |
generate_btn = gr.Button("2️⃣ Generate Canvas")
|
471 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
472 |
with gr.Column(scale=1, elem_id="section-preview"):
|
473 |
gr.Markdown("### Preview Region")
|
474 |
html_out = gr.HTML(
|
|
|
497 |
transformation_text = gr.Textbox(label="Transformation Info", elem_id="transformation_info", visible=False)
|
498 |
model_output = gr.Image(label="Model Output", type="pil", height=512, width=512)
|
499 |
|
500 |
+
with gr.Row():
|
501 |
+
with gr.Column(scale=1):
|
502 |
+
gr.Examples(
|
503 |
+
examples=[self.examples[0]],
|
504 |
+
inputs=[background_img_in, draggable_img_in],
|
505 |
+
# elem_id="small-examples"
|
506 |
+
)
|
507 |
+
with gr.Column(scale=1):
|
508 |
+
gr.Examples(
|
509 |
+
examples=[self.examples[2]],
|
510 |
+
inputs=[background_img_in, draggable_img_in],
|
511 |
+
# elem_id="small-examples"
|
512 |
+
)
|
513 |
+
with gr.Row():
|
514 |
+
with gr.Column(scale=1):
|
515 |
+
gr.Examples(
|
516 |
+
examples=[self.examples[1]],
|
517 |
+
inputs=[background_img_in, draggable_img_in],
|
518 |
+
# elem_id="small-examples"
|
519 |
+
)
|
520 |
+
with gr.Column(scale=1):
|
521 |
+
gr.Examples(
|
522 |
+
examples=[self.examples[3]],
|
523 |
+
inputs=[background_img_in, draggable_img_in],
|
524 |
+
# elem_id="small-examples"
|
525 |
+
)
|
526 |
|
527 |
generate_btn.click(
|
528 |
fn=self.on_upload,
|
|
|
536 |
)
|
537 |
|
538 |
model_generate_btn.click(
|
539 |
+
# fn=pipeline.gradio_generate,
|
540 |
+
fn=self.pil_to_base64,
|
541 |
inputs=[background_img_in, modified_fg_state, transformation_text, seed_slider, \
|
542 |
prompt_text, enable_gui, cfg_slider, size_select, text_strength, enable_truecfg],
|
543 |
outputs=model_output
|
examples/style_000_0.jpg
ADDED
![]() |
Git LFS Details
|
examples/style_000_1.jpg
ADDED
![]() |
Git LFS Details
|