File size: 2,832 Bytes
8c929b0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
import argparse
import torch
from safetensors.torch import load_file, save_file
from collections import OrderedDict # Use OrderedDict to maintain key order if desired
def strip_lora_extra_keys(lora_path, output_path):
"""
Loads a LoRA file and saves a new version containing only
'.lora_down.weight' and '.lora_up.weight' keys,
preserving their original module paths and the 'diffusion_model.' prefix.
All tensors will be saved in float16.
"""
try:
source_state_dict = load_file(lora_path)
print(f"Successfully loaded LoRA from: {lora_path} ({len(source_state_dict)} original keys)")
except Exception as e:
print(f"Error loading LoRA file '{lora_path}': {e}")
return
stripped_state_dict = OrderedDict()
kept_keys_count = 0
discarded_keys_count = 0
for key, tensor in source_state_dict.items():
# We expect keys to already be in the 'diffusion_model.<...>.lora_down.weight' format
if key.endswith(".lora_down.weight") or key.endswith(".lora_up.weight"):
if tensor.is_floating_point():
stripped_state_dict[key] = tensor.to(torch.float16)
else: # Should not happen for these weights
stripped_state_dict[key] = tensor
print(f"Warning: Tensor {key} was not floating point, dtype not changed.")
kept_keys_count += 1
else:
discarded_keys_count += 1
# print(f"Discarded key: {key}") # Uncomment for verbose output
print(f"\nStripping complete.")
print(f"Kept {kept_keys_count} keys (lora_down.weight / lora_up.weight).")
print(f"Discarded {discarded_keys_count} other keys (e.g., .diff_b, .diff, etc.).")
if stripped_state_dict:
print(f"Output dictionary has {len(stripped_state_dict)} keys.")
print(f"Now attempting to save the stripped LoRA to: {output_path}...")
try:
save_file(stripped_state_dict, output_path)
print(f"\nSuccessfully saved stripped LoRA to: {output_path}")
except Exception as e:
print(f"Error saving stripped LoRA file '{output_path}': {e}")
else:
print("\nNo '.lora_down.weight' or '.lora_up.weight' keys were found. Output file not saved.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Strips a LoRA file to only keep .lora_down.weight and .lora_up.weight keys, converting to float16.",
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument("lora_path", type=str, help="Path to the input LoRA (.safetensors) file to strip.")
parser.add_argument("output_path", type=str, help="Path to save the stripped LoRA (.safetensors) file.")
args = parser.parse_args()
strip_lora_extra_keys(args.lora_path, args.output_path) |