AbstractPhil commited on
Commit
4bec240
Β·
1 Parent(s): be3429e
__pycache__/two_stream_shunt_adapter.cpython-310.pyc CHANGED
Binary files a/__pycache__/two_stream_shunt_adapter.cpython-310.pyc and b/__pycache__/two_stream_shunt_adapter.cpython-310.pyc differ
 
app.py CHANGED
@@ -130,8 +130,9 @@ def encode_sdxl_prompt(prompt, negative_prompt=""):
130
 
131
 
132
  # ─── Inference ────────────────────────────────────────────
133
- @spaces.GPU
134
  @torch.no_grad()
 
135
  def infer(prompt, negative_prompt, adapter_l_file, adapter_g_file, strength, noise, gate_prob,
136
  use_anchor, steps, cfg_scale, scheduler_name, width, height, seed):
137
 
@@ -333,12 +334,14 @@ with gr.Blocks(title="SDXL Dual Shunt Adapter", theme=gr.themes.Soft()) as demo:
333
  stats_g = gr.Textbox(label="CLIP-G Stats", interactive=False)
334
 
335
  # Event handlers
 
336
  def process_adapters(adapter_l_val, adapter_g_val):
337
  # Convert "None" back to None for processing
338
  adapter_l_processed = None if adapter_l_val == "None" else adapter_l_val
339
  adapter_g_processed = None if adapter_g_val == "None" else adapter_g_val
340
  return adapter_l_processed, adapter_g_processed
341
 
 
342
  def run_inference(*args):
343
  # Process adapter selections
344
  adapter_l_processed, adapter_g_processed = process_adapters(args[2], args[3])
 
130
 
131
 
132
  # ─── Inference ────────────────────────────────────────────
133
+
134
  @torch.no_grad()
135
+ @spaces.GPU
136
  def infer(prompt, negative_prompt, adapter_l_file, adapter_g_file, strength, noise, gate_prob,
137
  use_anchor, steps, cfg_scale, scheduler_name, width, height, seed):
138
 
 
334
  stats_g = gr.Textbox(label="CLIP-G Stats", interactive=False)
335
 
336
  # Event handlers
337
+ @spaces.GPU
338
  def process_adapters(adapter_l_val, adapter_g_val):
339
  # Convert "None" back to None for processing
340
  adapter_l_processed = None if adapter_l_val == "None" else adapter_l_val
341
  adapter_g_processed = None if adapter_g_val == "None" else adapter_g_val
342
  return adapter_l_processed, adapter_g_processed
343
 
344
+ @spaces.GPU
345
  def run_inference(*args):
346
  # Process adapter selections
347
  adapter_l_processed, adapter_g_processed = process_adapters(args[2], args[3])