Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -17,11 +17,15 @@ def get_lora_sd_pipeline(
|
|
17 |
base_model_name_or_path=None,
|
18 |
dtype=torch.float16,
|
19 |
adapter_name="default"
|
20 |
-
|
21 |
-
|
22 |
unet_sub_dir = os.path.join(lora_dir, "unet")
|
23 |
text_encoder_sub_dir = os.path.join(lora_dir, "text_encoder")
|
24 |
|
|
|
|
|
|
|
|
|
|
|
25 |
if os.path.exists(text_encoder_sub_dir) and base_model_name_or_path is None:
|
26 |
config = LoraConfig.from_pretrained(text_encoder_sub_dir)
|
27 |
base_model_name_or_path = config.base_model_name_or_path
|
@@ -30,14 +34,30 @@ def get_lora_sd_pipeline(
|
|
30 |
raise ValueError("Укажите название базовой модели или путь к ней")
|
31 |
|
32 |
pipe = StableDiffusionPipeline.from_pretrained(base_model_name_or_path, torch_dtype=dtype)
|
33 |
-
before_params = pipe.unet.parameters()
|
34 |
-
pipe.unet = PeftModel.from_pretrained(pipe.unet, unet_sub_dir, adapter_name=adapter_name)
|
35 |
-
pipe.unet.set_adapter(adapter_name)
|
36 |
-
after_params = pipe.unet.parameters()
|
37 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
if os.path.exists(text_encoder_sub_dir):
|
39 |
pipe.text_encoder = PeftModel.from_pretrained(pipe.text_encoder, text_encoder_sub_dir, adapter_name=adapter_name)
|
40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
if dtype in (torch.float16, torch.bfloat16):
|
42 |
pipe.unet.half()
|
43 |
pipe.text_encoder.half()
|
|
|
17 |
base_model_name_or_path=None,
|
18 |
dtype=torch.float16,
|
19 |
adapter_name="default"
|
20 |
+
):
|
|
|
21 |
unet_sub_dir = os.path.join(lora_dir, "unet")
|
22 |
text_encoder_sub_dir = os.path.join(lora_dir, "text_encoder")
|
23 |
|
24 |
+
# Проверка существования директорий LoRA
|
25 |
+
print(f"LoRA directory exists: {os.path.exists(lora_dir)}")
|
26 |
+
print(f"UNet LoRA exists: {os.path.exists(unet_sub_dir)}")
|
27 |
+
print(f"Text encoder LoRA exists: {os.path.exists(text_encoder_sub_dir)}")
|
28 |
+
|
29 |
if os.path.exists(text_encoder_sub_dir) and base_model_name_or_path is None:
|
30 |
config = LoraConfig.from_pretrained(text_encoder_sub_dir)
|
31 |
base_model_name_or_path = config.base_model_name_or_path
|
|
|
34 |
raise ValueError("Укажите название базовой модели или путь к ней")
|
35 |
|
36 |
pipe = StableDiffusionPipeline.from_pretrained(base_model_name_or_path, torch_dtype=dtype)
|
|
|
|
|
|
|
|
|
37 |
|
38 |
+
# Логирование параметров до применения LoRA
|
39 |
+
before_params = list(pipe.unet.parameters())
|
40 |
+
|
41 |
+
# Применение LoRA к UNet
|
42 |
+
if os.path.exists(unet_sub_dir):
|
43 |
+
pipe.unet = PeftModel.from_pretrained(pipe.unet, unet_sub_dir, adapter_name=adapter_name)
|
44 |
+
pipe.unet.set_adapter(adapter_name)
|
45 |
+
|
46 |
+
# Применение LoRA к текстовому энкодеру (если есть)
|
47 |
if os.path.exists(text_encoder_sub_dir):
|
48 |
pipe.text_encoder = PeftModel.from_pretrained(pipe.text_encoder, text_encoder_sub_dir, adapter_name=adapter_name)
|
49 |
|
50 |
+
# Логирование параметров после применения LoRA
|
51 |
+
after_params = list(pipe.unet.parameters())
|
52 |
+
print(f"Parameters changed: {before_params != after_params}")
|
53 |
+
|
54 |
+
# Детальное сравнение параметров
|
55 |
+
for (name1, param1), (name2, param2) in zip(before_params, after_params):
|
56 |
+
if not torch.equal(param1, param2):
|
57 |
+
print(f"Parameter {name1} changed.")
|
58 |
+
else:
|
59 |
+
print(f"Parameter {name1} did not change.")
|
60 |
+
|
61 |
if dtype in (torch.float16, torch.bfloat16):
|
62 |
pipe.unet.half()
|
63 |
pipe.text_encoder.half()
|