Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
|
@@ -74,36 +74,42 @@ except Exception as e:
|
|
| 74 |
@torch.inference_mode()
|
| 75 |
def generate_image(
|
| 76 |
prompt: str,
|
| 77 |
-
id_image
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
true_cfg: float = 1.0,
|
| 87 |
-
timestep_to_start_cfg: int = 1,
|
| 88 |
-
max_sequence_length: int = 128,
|
| 89 |
-
gamma: float = 0.5,
|
| 90 |
-
eta: float = 0.7,
|
| 91 |
-
s: float = 0,
|
| 92 |
-
tau: float = 5,
|
| 93 |
):
|
| 94 |
# ๋ชจ๋ธ์ด ์ด๊ธฐํ๋์ง ์์์ผ๋ฉด ์ค๋ฅ ๋ฉ์์ง ๋ฐํ
|
| 95 |
if not model_initialized:
|
| 96 |
-
return None, "GPU ์ค๋ฅ: CUDA GPU๋ฅผ ์ฐพ์ ์ ์์ด ๋ชจ๋ธ์ ์ด๊ธฐํํ ์ ์์ต๋๋ค."
|
| 97 |
|
| 98 |
# ID ์ด๋ฏธ์ง๊ฐ ์์ผ๋ฉด ์คํ ๋ถ๊ฐ
|
| 99 |
if id_image is None:
|
| 100 |
-
return None, "์ค๋ฅ: ID ์ด๋ฏธ์ง๊ฐ ํ์ํฉ๋๋ค."
|
| 101 |
|
| 102 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 103 |
flux_generator.t5.max_length = max_sequence_length
|
| 104 |
|
| 105 |
# ์๋ ์ค์
|
| 106 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 107 |
if seed == -1:
|
| 108 |
seed = None
|
| 109 |
|
|
@@ -231,17 +237,17 @@ def generate_image(
|
|
| 231 |
edited = rearrange(edited[0], "c h w -> h w c")
|
| 232 |
edited = Image.fromarray((127.5 * (edited + 1.0)).cpu().byte().numpy())
|
| 233 |
|
| 234 |
-
return edited, str(opts.seed)
|
| 235 |
|
| 236 |
except Exception as e:
|
| 237 |
import traceback
|
| 238 |
error_msg = f"์ด๋ฏธ์ง ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}\n{traceback.format_exc()}"
|
| 239 |
print(error_msg)
|
| 240 |
-
return None, error_msg
|
| 241 |
|
| 242 |
|
| 243 |
def create_demo():
|
| 244 |
-
with gr.Blocks() as demo:
|
| 245 |
gr.Markdown("# PuLID: ์ธ๋ฌผ ์ด๋ฏธ์ง ๋ณํ ๋๊ตฌ")
|
| 246 |
|
| 247 |
if not model_initialized:
|
|
@@ -260,7 +266,7 @@ def create_demo():
|
|
| 260 |
with gr.Accordion("๊ณ ๊ธ ์ต์
", open=False):
|
| 261 |
neg_prompt = gr.Textbox(label="๋ค๊ฑฐํฐ๋ธ ํ๋กฌํํธ", value="")
|
| 262 |
true_cfg = gr.Slider(1.0, 10.0, 3.5, step=0.1, label="CFG ์ค์ผ์ผ")
|
| 263 |
-
seed = gr.Textbox(-1, label="์๋ (-1: ๋๋ค)")
|
| 264 |
gr.Markdown("### ๊ธฐํ ์ต์
")
|
| 265 |
gamma = gr.Slider(0.0, 1.0, 0.5, step=0.1, label="๊ฐ๋ง")
|
| 266 |
eta = gr.Slider(0.0, 1.0, 0.8, step=0.1, label="์ํ")
|
|
@@ -272,10 +278,13 @@ def create_demo():
|
|
| 272 |
seed_output = gr.Textbox(label="๊ฒฐ๊ณผ/์ค๋ฅ ๋ฉ์์ง")
|
| 273 |
gr.Markdown(_CITE_)
|
| 274 |
|
|
|
|
| 275 |
generate_btn.click(
|
| 276 |
fn=generate_image,
|
| 277 |
-
inputs=[
|
| 278 |
-
|
|
|
|
|
|
|
| 279 |
outputs=[output_image, seed_output],
|
| 280 |
)
|
| 281 |
|
|
|
|
| 74 |
@torch.inference_mode()
|
| 75 |
def generate_image(
|
| 76 |
prompt: str,
|
| 77 |
+
id_image,
|
| 78 |
+
num_steps: int,
|
| 79 |
+
guidance: float,
|
| 80 |
+
seed,
|
| 81 |
+
id_weight: float,
|
| 82 |
+
neg_prompt: str,
|
| 83 |
+
true_cfg: float,
|
| 84 |
+
gamma: float,
|
| 85 |
+
eta: float,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 86 |
):
|
| 87 |
# ๋ชจ๋ธ์ด ์ด๊ธฐํ๋์ง ์์์ผ๋ฉด ์ค๋ฅ ๋ฉ์์ง ๋ฐํ
|
| 88 |
if not model_initialized:
|
| 89 |
+
return None, "GPU ์ค๋ฅ: CUDA GPU๋ฅผ ์ฐพ์ ์ ์์ด ๋ชจ๋ธ์ ์ด๊ธฐํํ ์ ์์ต๋๋ค."
|
| 90 |
|
| 91 |
# ID ์ด๋ฏธ์ง๊ฐ ์์ผ๋ฉด ์คํ ๋ถ๊ฐ
|
| 92 |
if id_image is None:
|
| 93 |
+
return None, "์ค๋ฅ: ID ์ด๋ฏธ์ง๊ฐ ํ์ํฉ๋๋ค."
|
| 94 |
|
| 95 |
try:
|
| 96 |
+
# ๊ณ ์ ๋งค๊ฐ๋ณ์
|
| 97 |
+
width = 512
|
| 98 |
+
height = 512
|
| 99 |
+
start_step = 0
|
| 100 |
+
timestep_to_start_cfg = 1
|
| 101 |
+
max_sequence_length = 128
|
| 102 |
+
s = 0
|
| 103 |
+
tau = 5
|
| 104 |
+
|
| 105 |
flux_generator.t5.max_length = max_sequence_length
|
| 106 |
|
| 107 |
# ์๋ ์ค์
|
| 108 |
+
try:
|
| 109 |
+
seed = int(seed)
|
| 110 |
+
except:
|
| 111 |
+
seed = -1
|
| 112 |
+
|
| 113 |
if seed == -1:
|
| 114 |
seed = None
|
| 115 |
|
|
|
|
| 237 |
edited = rearrange(edited[0], "c h w -> h w c")
|
| 238 |
edited = Image.fromarray((127.5 * (edited + 1.0)).cpu().byte().numpy())
|
| 239 |
|
| 240 |
+
return edited, str(opts.seed)
|
| 241 |
|
| 242 |
except Exception as e:
|
| 243 |
import traceback
|
| 244 |
error_msg = f"์ด๋ฏธ์ง ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}\n{traceback.format_exc()}"
|
| 245 |
print(error_msg)
|
| 246 |
+
return None, error_msg
|
| 247 |
|
| 248 |
|
| 249 |
def create_demo():
|
| 250 |
+
with gr.Blocks(theme="apriel") as demo:
|
| 251 |
gr.Markdown("# PuLID: ์ธ๋ฌผ ์ด๋ฏธ์ง ๋ณํ ๋๊ตฌ")
|
| 252 |
|
| 253 |
if not model_initialized:
|
|
|
|
| 266 |
with gr.Accordion("๊ณ ๊ธ ์ต์
", open=False):
|
| 267 |
neg_prompt = gr.Textbox(label="๋ค๊ฑฐํฐ๋ธ ํ๋กฌํํธ", value="")
|
| 268 |
true_cfg = gr.Slider(1.0, 10.0, 3.5, step=0.1, label="CFG ์ค์ผ์ผ")
|
| 269 |
+
seed = gr.Textbox(value="-1", label="์๋ (-1: ๋๋ค)")
|
| 270 |
gr.Markdown("### ๊ธฐํ ์ต์
")
|
| 271 |
gamma = gr.Slider(0.0, 1.0, 0.5, step=0.1, label="๊ฐ๋ง")
|
| 272 |
eta = gr.Slider(0.0, 1.0, 0.8, step=0.1, label="์ํ")
|
|
|
|
| 278 |
seed_output = gr.Textbox(label="๊ฒฐ๊ณผ/์ค๋ฅ ๋ฉ์์ง")
|
| 279 |
gr.Markdown(_CITE_)
|
| 280 |
|
| 281 |
+
# Gradio ์ด๋ฒคํธ ์ฐ๊ฒฐ ์์ - ์ง์ ์ ์ ๊ฐ์ ์ ๋ฌํ์ง ์๊ณ UI ์ปดํฌ๋ํธ๋ง ์ ๋ฌ
|
| 282 |
generate_btn.click(
|
| 283 |
fn=generate_image,
|
| 284 |
+
inputs=[
|
| 285 |
+
prompt, id_image, num_steps, guidance, seed,
|
| 286 |
+
id_weight, neg_prompt, true_cfg, gamma, eta
|
| 287 |
+
],
|
| 288 |
outputs=[output_image, seed_output],
|
| 289 |
)
|
| 290 |
|