StupidGame's picture
Upload 1941 files
baa8e90
#---------------------------------------------------------------------------------------------------------------------#
# Comfyroll Custom Nodes by RockOfFire and Akatsuzi https://github.com/Suzie1/ComfyUI_Comfyroll_CustomNodes #
# for ComfyUI https://github.com/comfyanonymous/ComfyUI #
#---------------------------------------------------------------------------------------------------------------------#
import os
import sys
import comfy.sd
import comfy.utils
import folder_paths
from ..categories import icons
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), "comfy"))
#---------------------------------------------------------------------------------------------------------------------#
# LoRA Nodes
#---------------------------------------------------------------------------------------------------------------------#
# This is a load lora node with an added switch to turn on or off. On will add the lora and off will skip the node.
class CR_LoraLoader:
def __init__(self):
self.loaded_lora = None
@classmethod
def INPUT_TYPES(s):
file_list = folder_paths.get_filename_list("loras")
file_list.insert(0, "None")
return {"required": { "model": ("MODEL",),
"clip": ("CLIP", ),
"switch": (["On","Off"],),
"lora_name": (file_list, ),
"strength_model": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
"strength_clip": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
}}
RETURN_TYPES = ("MODEL", "CLIP", "STRING", )
RETURN_NAMES = ("MODEL", "CLIP", "show_help", )
FUNCTION = "load_lora"
CATEGORY = icons.get("Comfyroll/LoRA")
def load_lora(self, model, clip, switch, lora_name, strength_model, strength_clip):
show_help = "https://github.com/Suzie1/ComfyUI_Comfyroll_CustomNodes/wiki/LoRA-Nodes#cr-load-lora"
if strength_model == 0 and strength_clip == 0:
return (model, clip, show_help, )
if switch == "Off" or lora_name == "None":
return (model, clip, show_help, )
lora_path = folder_paths.get_full_path("loras", lora_name)
lora = None
if self.loaded_lora is not None:
if self.loaded_lora[0] == lora_path:
lora = self.loaded_lora[1]
else:
del self.loaded_lora
if lora is None:
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
self.loaded_lora = (lora_path, lora)
model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip)
return (model_lora, clip_lora, show_help, )
#---------------------------------------------------------------------------------------------------------------------#
# Based on Efficiency Nodes
# This is a lora stack where a single node has 3 different loras each with their own switch
class CR_LoRAStack:
@classmethod
def INPUT_TYPES(cls):
loras = ["None"] + folder_paths.get_filename_list("loras")
return {"required": {
"switch_1": (["Off","On"],),
"lora_name_1": (loras,),
"model_weight_1": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
"clip_weight_1": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
"switch_2": (["Off","On"],),
"lora_name_2": (loras,),
"model_weight_2": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
"clip_weight_2": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
"switch_3": (["Off","On"],),
"lora_name_3": (loras,),
"model_weight_3": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
"clip_weight_3": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
},
"optional": {"lora_stack": ("LORA_STACK",)
},
}
RETURN_TYPES = ("LORA_STACK", "STRING", )
RETURN_NAMES = ("LORA_STACK", "show_help", )
FUNCTION = "lora_stacker"
CATEGORY = icons.get("Comfyroll/LoRA")
def lora_stacker(self, lora_name_1, model_weight_1, clip_weight_1, switch_1, lora_name_2, model_weight_2, clip_weight_2, switch_2, lora_name_3, model_weight_3, clip_weight_3, switch_3, lora_stack=None):
# Initialise the list
lora_list=list()
if lora_stack is not None:
lora_list.extend([l for l in lora_stack if l[0] != "None"])
if lora_name_1 != "None" and switch_1 == "On":
lora_list.extend([(lora_name_1, model_weight_1, clip_weight_1)]),
if lora_name_2 != "None" and switch_2 == "On":
lora_list.extend([(lora_name_2, model_weight_2, clip_weight_2)]),
if lora_name_3 != "None" and switch_3 == "On":
lora_list.extend([(lora_name_3, model_weight_3, clip_weight_3)]),
show_help = "https://github.com/Suzie1/ComfyUI_Comfyroll_CustomNodes/wiki/LoRA-Nodes#cr-lora-stack"
return (lora_list, show_help, )
#---------------------------------------------------------------------------------------------------------------------#
# This applies the lora stack.
class CR_ApplyLoRAStack:
@classmethod
def INPUT_TYPES(cls):
return {"required": {"model": ("MODEL",),
"clip": ("CLIP", ),
"lora_stack": ("LORA_STACK", ),
}
}
RETURN_TYPES = ("MODEL", "CLIP", "STRING", )
RETURN_NAMES = ("MODEL", "CLIP", "show_help", )
FUNCTION = "apply_lora_stack"
CATEGORY = icons.get("Comfyroll/LoRA")
def apply_lora_stack(self, model, clip, lora_stack=None,):
show_help = "https://github.com/Suzie1/ComfyUI_Comfyroll_CustomNodes/wiki/LoRA-Nodes#cr-apply-lora-stack"
# Initialise the list
lora_params = list()
# Extend lora_params with lora-stack items
if lora_stack:
lora_params.extend(lora_stack)
else:
return (model, clip, show_help,)
# Initialise the model and clip
model_lora = model
clip_lora = clip
# Loop through the list
for tup in lora_params:
lora_name, strength_model, strength_clip = tup
lora_path = folder_paths.get_full_path("loras", lora_name)
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
model_lora, clip_lora = comfy.sd.load_lora_for_models(model_lora, clip_lora, lora, strength_model, strength_clip)
return (model_lora, clip_lora, show_help,)
#---------------------------------------------------------------------------------------------------------------------#
# MAPPINGS
#---------------------------------------------------------------------------------------------------------------------#
# For reference only, actual mappings are in __init__.py
'''
NODE_CLASS_MAPPINGS = {
"CR Load LoRA": CR_LoraLoader,
"CR LoRA Stack":CR_LoRAStack,
"CR Apply LoRA Stack":CR_ApplyLoRAStack,
}
'''