openfree commited on
Commit
a01dc26
ยท
verified ยท
1 Parent(s): a24f281

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -25
app.py CHANGED
@@ -74,36 +74,42 @@ except Exception as e:
74
  @torch.inference_mode()
75
  def generate_image(
76
  prompt: str,
77
- id_image = None,
78
- width: int = 512,
79
- height: int = 512,
80
- num_steps: int = 20,
81
- start_step: int = 0,
82
- guidance: float = 4.0,
83
- seed: int = -1,
84
- id_weight: float = 1.0,
85
- neg_prompt: str = "",
86
- true_cfg: float = 1.0,
87
- timestep_to_start_cfg: int = 1,
88
- max_sequence_length: int = 128,
89
- gamma: float = 0.5,
90
- eta: float = 0.7,
91
- s: float = 0,
92
- tau: float = 5,
93
  ):
94
  # ๋ชจ๋ธ์ด ์ดˆ๊ธฐํ™”๋˜์ง€ ์•Š์•˜์œผ๋ฉด ์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€ ๋ฐ˜ํ™˜
95
  if not model_initialized:
96
- return None, "GPU ์˜ค๋ฅ˜: CUDA GPU๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์–ด ๋ชจ๋ธ์„ ์ดˆ๊ธฐํ™”ํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", None
97
 
98
  # ID ์ด๋ฏธ์ง€๊ฐ€ ์—†์œผ๋ฉด ์‹คํ–‰ ๋ถˆ๊ฐ€
99
  if id_image is None:
100
- return None, "์˜ค๋ฅ˜: ID ์ด๋ฏธ์ง€๊ฐ€ ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค.", None
101
 
102
  try:
 
 
 
 
 
 
 
 
 
103
  flux_generator.t5.max_length = max_sequence_length
104
 
105
  # ์‹œ๋“œ ์„ค์ •
106
- seed = int(seed)
 
 
 
 
107
  if seed == -1:
108
  seed = None
109
 
@@ -231,17 +237,17 @@ def generate_image(
231
  edited = rearrange(edited[0], "c h w -> h w c")
232
  edited = Image.fromarray((127.5 * (edited + 1.0)).cpu().byte().numpy())
233
 
234
- return edited, str(opts.seed), flux_generator.pulid_model.debug_img_list
235
 
236
  except Exception as e:
237
  import traceback
238
  error_msg = f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}\n{traceback.format_exc()}"
239
  print(error_msg)
240
- return None, error_msg, None
241
 
242
 
243
  def create_demo():
244
- with gr.Blocks() as demo:
245
  gr.Markdown("# PuLID: ์ธ๋ฌผ ์ด๋ฏธ์ง€ ๋ณ€ํ™˜ ๋„๊ตฌ")
246
 
247
  if not model_initialized:
@@ -260,7 +266,7 @@ def create_demo():
260
  with gr.Accordion("๊ณ ๊ธ‰ ์˜ต์…˜", open=False):
261
  neg_prompt = gr.Textbox(label="๋„ค๊ฑฐํ‹ฐ๋ธŒ ํ”„๋กฌํ”„ํŠธ", value="")
262
  true_cfg = gr.Slider(1.0, 10.0, 3.5, step=0.1, label="CFG ์Šค์ผ€์ผ")
263
- seed = gr.Textbox(-1, label="์‹œ๋“œ (-1: ๋žœ๋ค)")
264
  gr.Markdown("### ๊ธฐํƒ€ ์˜ต์…˜")
265
  gamma = gr.Slider(0.0, 1.0, 0.5, step=0.1, label="๊ฐ๋งˆ")
266
  eta = gr.Slider(0.0, 1.0, 0.8, step=0.1, label="์—ํƒ€")
@@ -272,10 +278,13 @@ def create_demo():
272
  seed_output = gr.Textbox(label="๊ฒฐ๊ณผ/์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€")
273
  gr.Markdown(_CITE_)
274
 
 
275
  generate_btn.click(
276
  fn=generate_image,
277
- inputs=[prompt, id_image, 512, 512, num_steps, 0, guidance, seed, id_weight, neg_prompt,
278
- true_cfg, 1, 128, gamma, eta, 0, 5],
 
 
279
  outputs=[output_image, seed_output],
280
  )
281
 
 
74
  @torch.inference_mode()
75
  def generate_image(
76
  prompt: str,
77
+ id_image,
78
+ num_steps: int,
79
+ guidance: float,
80
+ seed,
81
+ id_weight: float,
82
+ neg_prompt: str,
83
+ true_cfg: float,
84
+ gamma: float,
85
+ eta: float,
 
 
 
 
 
 
 
86
  ):
87
  # ๋ชจ๋ธ์ด ์ดˆ๊ธฐํ™”๋˜์ง€ ์•Š์•˜์œผ๋ฉด ์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€ ๋ฐ˜ํ™˜
88
  if not model_initialized:
89
+ return None, "GPU ์˜ค๋ฅ˜: CUDA GPU๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์–ด ๋ชจ๋ธ์„ ์ดˆ๊ธฐํ™”ํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
90
 
91
  # ID ์ด๋ฏธ์ง€๊ฐ€ ์—†์œผ๋ฉด ์‹คํ–‰ ๋ถˆ๊ฐ€
92
  if id_image is None:
93
+ return None, "์˜ค๋ฅ˜: ID ์ด๋ฏธ์ง€๊ฐ€ ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค."
94
 
95
  try:
96
+ # ๊ณ ์ • ๋งค๊ฐœ๋ณ€์ˆ˜
97
+ width = 512
98
+ height = 512
99
+ start_step = 0
100
+ timestep_to_start_cfg = 1
101
+ max_sequence_length = 128
102
+ s = 0
103
+ tau = 5
104
+
105
  flux_generator.t5.max_length = max_sequence_length
106
 
107
  # ์‹œ๋“œ ์„ค์ •
108
+ try:
109
+ seed = int(seed)
110
+ except:
111
+ seed = -1
112
+
113
  if seed == -1:
114
  seed = None
115
 
 
237
  edited = rearrange(edited[0], "c h w -> h w c")
238
  edited = Image.fromarray((127.5 * (edited + 1.0)).cpu().byte().numpy())
239
 
240
+ return edited, str(opts.seed)
241
 
242
  except Exception as e:
243
  import traceback
244
  error_msg = f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}\n{traceback.format_exc()}"
245
  print(error_msg)
246
+ return None, error_msg
247
 
248
 
249
  def create_demo():
250
+ with gr.Blocks(theme="apriel") as demo:
251
  gr.Markdown("# PuLID: ์ธ๋ฌผ ์ด๋ฏธ์ง€ ๋ณ€ํ™˜ ๋„๊ตฌ")
252
 
253
  if not model_initialized:
 
266
  with gr.Accordion("๊ณ ๊ธ‰ ์˜ต์…˜", open=False):
267
  neg_prompt = gr.Textbox(label="๋„ค๊ฑฐํ‹ฐ๋ธŒ ํ”„๋กฌํ”„ํŠธ", value="")
268
  true_cfg = gr.Slider(1.0, 10.0, 3.5, step=0.1, label="CFG ์Šค์ผ€์ผ")
269
+ seed = gr.Textbox(value="-1", label="์‹œ๋“œ (-1: ๋žœ๋ค)")
270
  gr.Markdown("### ๊ธฐํƒ€ ์˜ต์…˜")
271
  gamma = gr.Slider(0.0, 1.0, 0.5, step=0.1, label="๊ฐ๋งˆ")
272
  eta = gr.Slider(0.0, 1.0, 0.8, step=0.1, label="์—ํƒ€")
 
278
  seed_output = gr.Textbox(label="๊ฒฐ๊ณผ/์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€")
279
  gr.Markdown(_CITE_)
280
 
281
+ # Gradio ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ ์ˆ˜์ • - ์ง์ ‘ ์ •์ˆ˜ ๊ฐ’์„ ์ „๋‹ฌํ•˜์ง€ ์•Š๊ณ  UI ์ปดํฌ๋„ŒํŠธ๋งŒ ์ „๋‹ฌ
282
  generate_btn.click(
283
  fn=generate_image,
284
+ inputs=[
285
+ prompt, id_image, num_steps, guidance, seed,
286
+ id_weight, neg_prompt, true_cfg, gamma, eta
287
+ ],
288
  outputs=[output_image, seed_output],
289
  )
290