qyoo commited on
Commit
adfcb4c
·
1 Parent(s): 1af3dca
app.py CHANGED
@@ -203,7 +203,6 @@ def generate(
203
  ) -> np.ndarray:
204
  global pipeline
205
  change_model_fn(model_name)
206
- print(type(image))
207
  if isinstance(pipeline, FluxConceptrolPipeline):
208
  images = pipeline(
209
  prompt=prompt,
@@ -370,7 +369,6 @@ with gr.Blocks(css="style.css") as demo:
370
  # inputs=gr.Number(1, visible=False),
371
  # outputs=generate_button,
372
  # )
373
- print(type(image_prompt))
374
  inputs = [
375
  prompt,
376
  textual_concept,
 
203
  ) -> np.ndarray:
204
  global pipeline
205
  change_model_fn(model_name)
 
206
  if isinstance(pipeline, FluxConceptrolPipeline):
207
  images = pipeline(
208
  prompt=prompt,
 
369
  # inputs=gr.Number(1, visible=False),
370
  # outputs=generate_button,
371
  # )
 
372
  inputs = [
373
  prompt,
374
  textual_concept,
omini_control/flux_conceptrol_pipeline.py CHANGED
@@ -194,8 +194,6 @@ class FluxConceptrolPipeline(FluxPipeline):
194
  else:
195
  batch_size = prompt_embeds.shape[0]
196
 
197
- print(batch_size)
198
-
199
  device = self._execution_device
200
 
201
  lora_scale = (
@@ -292,10 +290,6 @@ class FluxConceptrolPipeline(FluxPipeline):
292
  guidance = guidance.expand(latents.shape[0])
293
  else:
294
  guidance = None
295
- print("condition_latents.shape:", condition_latents.shape)
296
- print("latent.shape:", latents.shape)
297
- print("prompt_embeds.shape", prompt_embeds.shape)
298
- print("condition_ids.shape", condition_ids.shape)
299
  noise_pred = tranformer_forward(
300
  self.transformer,
301
  model_config=model_config,
 
194
  else:
195
  batch_size = prompt_embeds.shape[0]
196
 
 
 
197
  device = self._execution_device
198
 
199
  lora_scale = (
 
290
  guidance = guidance.expand(latents.shape[0])
291
  else:
292
  guidance = None
 
 
 
 
293
  noise_pred = tranformer_forward(
294
  self.transformer,
295
  model_config=model_config,