Wan2GP_Loras / inspect_lora_detailed.py
peteromallet's picture
Upload inspect_lora_detailed.py with huggingface_hub
4aaac17 verified
import argparse
from safetensors.torch import load_file
import torch # Ensure torch is imported
def inspect_lora_keys_detailed(lora_path, search_terms=None):
"""
Loads a LoRA file, prints its first few keys, and searches for specific terms.
"""
if search_terms is None:
search_terms = []
try:
state_dict = load_file(lora_path)
print(f"Successfully loaded LoRA from: {lora_path}")
print("\nListing the first 20 keys (or all if fewer):")
keys = list(state_dict.keys())
for i, key in enumerate(keys):
if i >= 20:
break
print(f" Key: \"{key}\", Shape: {state_dict[key].shape}, Dtype: {state_dict[key].dtype}")
if search_terms:
print(f"\n--- Searching for keys containing any of: {search_terms} ---")
overall_found_count = 0
for term in search_terms:
term_found_this_iteration = 0
print(f"\nSearching for keys containing '{term}':")
for key in keys:
if term in key:
print(f" Found: \"{key}\", Shape: {state_dict[key].shape}, Dtype: {state_dict[key].dtype}")
overall_found_count += 1
term_found_this_iteration += 1
if term_found_this_iteration == 0:
print(f" No keys found containing '{term}'.")
if overall_found_count == 0 and search_terms:
print(f"\nNo keys found matching any of the search terms: {search_terms} across the entire file.")
# Attempt to find and print a common alpha value if present
alpha_keys_simple_suffix = [k for k in keys if k.endswith(".alpha")] # e.g. some.module.alpha
# More complex check for alpha, e.g., if it's like module_name.alpha where module_name is the base for lora_down/up
alpha_keys_complex = []
# Create a set of base module names from lora keys
lora_base_modules = set()
for k_lora in keys:
if ".lora_down.weight" in k_lora:
lora_base_modules.add(k_lora.rsplit(".lora_down.weight", 1)[0])
elif ".lora_up.weight" in k_lora:
lora_base_modules.add(k_lora.rsplit(".lora_up.weight", 1)[0])
elif ".lora_A.weight" in k_lora: # For inspecting original ComfyUI ones too
lora_base_modules.add(k_lora.rsplit(".lora_A.weight", 1)[0])
elif ".lora_B.weight" in k_lora:
lora_base_modules.add(k_lora.rsplit(".lora_B.weight", 1)[0])
for base_module in lora_base_modules:
potential_alpha_key = base_module + ".alpha"
if potential_alpha_key in state_dict:
alpha_keys_complex.append(potential_alpha_key)
all_found_alpha_keys = list(set(alpha_keys_simple_suffix + alpha_keys_complex))
if all_found_alpha_keys:
# Just print the first few found for brevity
print(f"\nFound {len(all_found_alpha_keys)} alpha key(s). Examples:")
for i, alpha_key in enumerate(all_found_alpha_keys):
if i >= 5: # Print max 5 examples
print(f" ...and {len(all_found_alpha_keys) - 5} more.")
break
print(f" Key: '{alpha_key}', Value: {state_dict[alpha_key]}")
else:
print("\nNo obvious '.alpha' keys found with common naming patterns.")
except Exception as e:
print(f"Error loading or processing LoRA file: {e}")
print("Please ensure you provide a valid .safetensors LoRA file path.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Inspect keys in a LoRA .safetensors file, with search functionality.")
parser.add_argument("lora_path", type=str, help="Path to the LoRA .safetensors file")
parser.add_argument("--search", type=str, nargs="+", help="Optional: one or more terms to search for in keys (e.g., --search img_emb cross_attn)")
args = parser.parse_args()
inspect_lora_keys_detailed(args.lora_path, search_terms=args.search)