ovi054 commited on
Commit
63d55b2
·
verified ·
1 Parent(s): 3fc723e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -23
app.py CHANGED
@@ -65,26 +65,20 @@ def generate(prompt, negative_prompt, width=1024, height=1024, num_inference_ste
65
  # pipe.unload_lora_weights()
66
  # pipe.load_lora_weights(lora_id.strip())
67
 
68
- clean_lora_id = lora_id.strip() if lora_id else ""
69
-
70
- # try:
71
- # --- IF LORA ID IS NONE, USE ONE LORA ---
72
- if not clean_lora_id:
73
- print("No custom LoRA provided. Using ONLY the base LoRA.")
74
- # Activate just the default LoRA
75
- pipe.set_adapters([DEFAULT_LORA_NAME], adapter_weights=[1.0])
76
 
77
- # --- OTHERWISE, LOAD AND USE TWO LORAS ---
78
- else:
79
- print(f"Custom LoRA provided. Loading '{clean_lora_id}' and combining with base LoRA.")
80
- # Load the custom LoRA fresh for this run
81
  pipe.load_lora_weights(clean_lora_id, adapter_name=CUSTOM_LORA_NAME)
82
-
83
- # Activate BOTH LoRAs together
84
- pipe.set_adapters(
85
- [DEFAULT_LORA_NAME, CUSTOM_LORA_NAME],
86
- adapter_weights=[1.0, 1.0] # Strength for base, strength for custom
87
- )
88
 
89
 
90
  pipe.to("cuda")
@@ -110,12 +104,15 @@ def generate(prompt, negative_prompt, width=1024, height=1024, num_inference_ste
110
  # if lora_id and lora_id.strip() != "":
111
  # pass
112
  # pipe.unload_lora_weights()
113
- if clean_lora_id:
114
- print(f"Unloading '{CUSTOM_LORA_NAME}' from this run.")
115
- pipe.unload_lora_weights(CUSTOM_LORA_NAME)
 
 
 
 
 
116
 
117
- # Always disable all active LoRAs to reset the state.
118
- pipe.disable_lora()
119
 
120
  iface = gr.Interface(
121
  fn=generate,
 
65
  # pipe.unload_lora_weights()
66
  # pipe.load_lora_weights(lora_id.strip())
67
 
68
+ print("Loading base LoRA for this run...")
69
+ causvid_path = hf_hub_download(repo_id=CAUSVID_LORA_REPO, filename=CAUSVID_LORA_FILENAME)
70
+ pipe.load_lora_weights(causvid_path, adapter_name=DEFAULT_LORA_NAME)
 
 
 
 
 
71
 
72
+ # If a custom LoRA is provided, load it as well.
73
+ if clean_lora_id:
74
+ print(f"Loading custom LoRA '{clean_lora_id}' for this run...")
 
75
  pipe.load_lora_weights(clean_lora_id, adapter_name=CUSTOM_LORA_NAME)
76
+ # If a custom LoRA is present, activate both.
77
+ pipe.set_adapters([DEFAULT_LORA_NAME, CUSTOM_LORA_NAME], adapter_weights=[1.0, 1.0])
78
+ else:
79
+ # If no custom LoRA, just activate the base one.
80
+ print("Activating base LoRA only.")
81
+ pipe.set_adapters([DEFAULT_LORA_NAME], adapter_weights=[1.0])
82
 
83
 
84
  pipe.to("cuda")
 
104
  # if lora_id and lora_id.strip() != "":
105
  # pass
106
  # pipe.unload_lora_weights()
107
+ # if clean_lora_id:
108
+ # print(f"Unloading '{CUSTOM_LORA_NAME}' from this run.")
109
+ # pipe.unload_lora_weights(CUSTOM_LORA_NAME)
110
+
111
+ # # Always disable all active LoRAs to reset the state.
112
+ # pipe.disable_lora()
113
+ print("Unloading all LoRAs to clean up.")
114
+ pipe.unload_lora_weights()
115
 
 
 
116
 
117
  iface = gr.Interface(
118
  fn=generate,