|
import argparse |
|
import torch |
|
from safetensors.torch import load_file, save_file |
|
from collections import OrderedDict |
|
|
|
def strip_lora_extra_keys(lora_path, output_path): |
|
""" |
|
Loads a LoRA file and saves a new version containing only |
|
'.lora_down.weight' and '.lora_up.weight' keys, |
|
preserving their original module paths and the 'diffusion_model.' prefix. |
|
All tensors will be saved in float16. |
|
""" |
|
try: |
|
source_state_dict = load_file(lora_path) |
|
print(f"Successfully loaded LoRA from: {lora_path} ({len(source_state_dict)} original keys)") |
|
except Exception as e: |
|
print(f"Error loading LoRA file '{lora_path}': {e}") |
|
return |
|
|
|
stripped_state_dict = OrderedDict() |
|
kept_keys_count = 0 |
|
discarded_keys_count = 0 |
|
|
|
for key, tensor in source_state_dict.items(): |
|
|
|
if key.endswith(".lora_down.weight") or key.endswith(".lora_up.weight"): |
|
if tensor.is_floating_point(): |
|
stripped_state_dict[key] = tensor.to(torch.float16) |
|
else: |
|
stripped_state_dict[key] = tensor |
|
print(f"Warning: Tensor {key} was not floating point, dtype not changed.") |
|
kept_keys_count += 1 |
|
else: |
|
discarded_keys_count += 1 |
|
|
|
|
|
print(f"\nStripping complete.") |
|
print(f"Kept {kept_keys_count} keys (lora_down.weight / lora_up.weight).") |
|
print(f"Discarded {discarded_keys_count} other keys (e.g., .diff_b, .diff, etc.).") |
|
|
|
if stripped_state_dict: |
|
print(f"Output dictionary has {len(stripped_state_dict)} keys.") |
|
print(f"Now attempting to save the stripped LoRA to: {output_path}...") |
|
try: |
|
save_file(stripped_state_dict, output_path) |
|
print(f"\nSuccessfully saved stripped LoRA to: {output_path}") |
|
except Exception as e: |
|
print(f"Error saving stripped LoRA file '{output_path}': {e}") |
|
else: |
|
print("\nNo '.lora_down.weight' or '.lora_up.weight' keys were found. Output file not saved.") |
|
|
|
|
|
if __name__ == "__main__": |
|
parser = argparse.ArgumentParser( |
|
description="Strips a LoRA file to only keep .lora_down.weight and .lora_up.weight keys, converting to float16.", |
|
formatter_class=argparse.RawTextHelpFormatter |
|
) |
|
parser.add_argument("lora_path", type=str, help="Path to the input LoRA (.safetensors) file to strip.") |
|
parser.add_argument("output_path", type=str, help="Path to save the stripped LoRA (.safetensors) file.") |
|
args = parser.parse_args() |
|
|
|
strip_lora_extra_keys(args.lora_path, args.output_path) |