Spaces:
Configuration error
Configuration error
Upload 8 files
Browse files- args_manager.py +90 -0
- backend_base/__init__.py +34 -0
- backend_base/civitai.py +62 -0
- backend_base/comfyclient_pipeline.py +189 -0
- backend_base/comfyd.py +148 -0
- backend_base/models_info.py +824 -0
- backend_base/params_mapper.py +94 -0
- backend_base/utils.py +73 -0
args_manager.py
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import ldm_patched.modules.args_parser as args_parser
|
3 |
+
from pathlib import Path
|
4 |
+
|
5 |
+
current_dir = Path.cwd()
|
6 |
+
|
7 |
+
args_parser.parser.add_argument("--preset", type=str, default='Default',
|
8 |
+
help="Apply specified preset parameters.")
|
9 |
+
args_parser.parser.add_argument("--disable-preset-selection", action='store_true',
|
10 |
+
help="Disable preset selection in Gradio.")
|
11 |
+
|
12 |
+
args_parser.parser.add_argument("--language", type=str, default='en',
|
13 |
+
help="Translate UI using json files in [language] folder. "
|
14 |
+
"For example, [--language en_uk] will use [language/en_uk.json] for translation.")
|
15 |
+
|
16 |
+
args_parser.parser.add_argument("--webroot", type=str, default='',
|
17 |
+
help="Set the webroot path.")
|
18 |
+
|
19 |
+
args_parser.parser.add_argument("--location", type=str, default='CN',
|
20 |
+
help="Set the access location by country")
|
21 |
+
|
22 |
+
# For example, https://github.com/lllyasviel/Fooocus/issues/849
|
23 |
+
args_parser.parser.add_argument("--disable-offload-from-vram", action="store_true",
|
24 |
+
help="Operate in Smart Memory mode: VRAM will be unloaded only when necessary")
|
25 |
+
|
26 |
+
args_parser.parser.add_argument("--theme", type=str,
|
27 |
+
help="Launch FooocusPlus with a light or dark theme", default='dark')
|
28 |
+
|
29 |
+
args_parser.parser.add_argument("--disable-image-log", action='store_true',
|
30 |
+
help="Prevent writing image logs to the Outputs folder.")
|
31 |
+
|
32 |
+
# args_parser.parser.add_argument("--disable-analytics", action='store_true',
|
33 |
+
# help="This is an obsolete argument: Gradio Analytics are always disabled.")
|
34 |
+
args_parser.args.disable_analytics = True
|
35 |
+
os.environ["GRADIO_ANALYTICS_ENABLED"] = "False" # Gradio is no longer allowed to call home
|
36 |
+
os.environ["NO_ALBUMENTATIONS_UPDATE"] = "True" # an update would cause some serios errors
|
37 |
+
|
38 |
+
args_parser.parser.add_argument("--disable-metadata", action='store_true',
|
39 |
+
help="Disable saving metadata to images.")
|
40 |
+
|
41 |
+
args_parser.parser.add_argument("--disable-preset-download", action='store_true',
|
42 |
+
help="Disable downloading models for presets", default=False)
|
43 |
+
|
44 |
+
args_parser.parser.add_argument("--disable-enhance-output-sorting", action='store_true',
|
45 |
+
help="Disable enhanced output sorting of the image gallery.", default=False)
|
46 |
+
|
47 |
+
args_parser.parser.add_argument("--enable-auto-describe-image", action='store_true',
|
48 |
+
help="Enable automatic description of UOV and enhance image when prompt is empty", default=False)
|
49 |
+
|
50 |
+
args_parser.parser.add_argument("--always-download-new-model", action='store_true',
|
51 |
+
help="Always download newer models", default=False)
|
52 |
+
|
53 |
+
args_parser.parser.add_argument("--rebuild-hash-cache",
|
54 |
+
help="Generates missing model and LoRA hashes.",
|
55 |
+
type=int, nargs="?", metavar="CPU_NUM_THREADS", const=-1)
|
56 |
+
|
57 |
+
args_parser.parser.add_argument("--dev", action='store_true',
|
58 |
+
help="Launch the dev branch", default=False)
|
59 |
+
|
60 |
+
args_parser.parser.add_argument("--user-dir", type=str,
|
61 |
+
help="Set the path to the user directory",
|
62 |
+
default = Path(current_dir.resolve().parent/'UserDir'))
|
63 |
+
|
64 |
+
args_parser.parser.add_argument("--models-root", type=str,
|
65 |
+
help="Set the path to the models directory", default=None)
|
66 |
+
|
67 |
+
args_parser.parser.add_argument("--config", type=str,
|
68 |
+
help="Set the path for config.txt", default=None)
|
69 |
+
|
70 |
+
args_parser.parser.add_argument("--disable-comfyd", action='store_true',
|
71 |
+
help="Do not auto-start the Comfy server at launch", default=False)
|
72 |
+
|
73 |
+
args_parser.parser.set_defaults(
|
74 |
+
disable_cuda_malloc=True,
|
75 |
+
in_browser=True,
|
76 |
+
port=None
|
77 |
+
)
|
78 |
+
|
79 |
+
args_parser.args = args_parser.parser.parse_args()
|
80 |
+
|
81 |
+
# (Disable by default because of issues like https://github.com/lllyasviel/Fooocus/issues/724)
|
82 |
+
# This "solution" was introduced in mainline Fooocus 2.1.699
|
83 |
+
# I do not know why the always_offload_from_vram argument was not considered sufficient
|
84 |
+
# Let's try it without this secret override - David Sage
|
85 |
+
# args_parser.args.always_offload_from_vram = not args_parser.args.disable_offload_from_vram
|
86 |
+
|
87 |
+
if args_parser.args.disable_in_browser:
|
88 |
+
args_parser.args.in_browser = False
|
89 |
+
|
90 |
+
args = args_parser.args
|
backend_base/__init__.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import importlib.util
|
3 |
+
import backend_base
|
4 |
+
|
5 |
+
__all__ = ['models_info', 'comfyclient_pipeline', 'params_mapper', 'config', 'comfyd']
|
6 |
+
|
7 |
+
def get_torch_xformers_cuda_version():
|
8 |
+
torch_ver = ""
|
9 |
+
cuda_ver = ""
|
10 |
+
xformers_ver = ""
|
11 |
+
try:
|
12 |
+
torch_spec = importlib.util.find_spec("torch")
|
13 |
+
for folder in torch_spec.submodule_search_locations:
|
14 |
+
ver_file = os.path.join(folder, "version.py")
|
15 |
+
if os.path.isfile(ver_file):
|
16 |
+
spec = importlib.util.spec_from_file_location("torch_version_import", ver_file)
|
17 |
+
module = importlib.util.module_from_spec(spec)
|
18 |
+
spec.loader.exec_module(module)
|
19 |
+
torch_ver = module.__version__
|
20 |
+
cuda_ver = getattr(module, 'cuda', "")
|
21 |
+
xformers_spec = importlib.util.find_spec("xformers")
|
22 |
+
for folder in xformers_spec.submodule_search_locations:
|
23 |
+
ver_file = os.path.join(folder, "version.py")
|
24 |
+
if os.path.isfile(ver_file):
|
25 |
+
spec = importlib.util.spec_from_file_location("xformers_version_import", ver_file)
|
26 |
+
module = importlib.util.module_from_spec(spec)
|
27 |
+
spec.loader.exec_module(module)
|
28 |
+
xformers_ver = module.__version__
|
29 |
+
except:
|
30 |
+
pass
|
31 |
+
return torch_ver, xformers_ver, cuda_ver
|
32 |
+
|
33 |
+
|
34 |
+
torch_version, xformers_version, cuda_version = get_torch_xformers_cuda_version()
|
backend_base/civitai.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from urllib.parse import urlparse
|
3 |
+
from urllib.parse import parse_qs
|
4 |
+
from modules.model_loader import load_file_from_url
|
5 |
+
from modules.config import paths_checkpoints, path_loras
|
6 |
+
|
7 |
+
def download_from_civitai(c_url):
|
8 |
+
allowed_types = ['Checkpoint', 'LORA']
|
9 |
+
save_loactions = {
|
10 |
+
'Checkpoint': paths_checkpoints[0],
|
11 |
+
'LORA': path_loras
|
12 |
+
}
|
13 |
+
|
14 |
+
#c_url = "https://civitai.com/models/133005/juggernaut-xl"
|
15 |
+
parsed_url = urlparse(c_url)
|
16 |
+
model_id = parsed_url.path.split('/')[parsed_url.path.split('/').index('models') + 1]
|
17 |
+
model_version_id = parse_qs(parsed_url.query).get('modelVersionId')
|
18 |
+
|
19 |
+
url = "https://civitai.com/api/v1/models/" + model_id
|
20 |
+
response = requests.get(url)
|
21 |
+
if response.status_code != 200:
|
22 |
+
raise RuntimeError('model not found')
|
23 |
+
data = response.json()
|
24 |
+
|
25 |
+
model_type = data.get('type')
|
26 |
+
if model_type not in allowed_types:
|
27 |
+
raise RuntimeError('model is not a checkpoint or LoRA')
|
28 |
+
model_versions = data.get('modelVersions')
|
29 |
+
selected_version = None
|
30 |
+
if model_version_id:
|
31 |
+
for model_version in model_versions:
|
32 |
+
if str(model_version.get('id')) == model_version_id[0]:
|
33 |
+
selected_version = model_version
|
34 |
+
else:
|
35 |
+
selected_version = model_versions[0]
|
36 |
+
|
37 |
+
if selected_version is None:
|
38 |
+
raise RuntimeError("this version doesn't exist")
|
39 |
+
|
40 |
+
|
41 |
+
files = selected_version.get('files')
|
42 |
+
primary_file = None
|
43 |
+
for f in files:
|
44 |
+
if f.get('primary'):
|
45 |
+
primary_file = f
|
46 |
+
download_url = primary_file.get('downloadUrl')
|
47 |
+
filename = primary_file.get('name')+".1"
|
48 |
+
|
49 |
+
model_name = data.get('name')
|
50 |
+
selected_version_name = selected_version.get('name')
|
51 |
+
print(f'downloading {model_name} ({model_type} version {selected_version_name})')
|
52 |
+
print(f'from {download_url} to {save_loactions[model_type]}/{filename}')
|
53 |
+
|
54 |
+
load_file_from_url(
|
55 |
+
url=download_url,
|
56 |
+
model_dir=save_loactions[model_type],
|
57 |
+
file_name=filename
|
58 |
+
)
|
59 |
+
return filename
|
60 |
+
|
61 |
+
url = "https://civitai.com/models/128397"
|
62 |
+
download_from_civitai(url)
|
backend_base/comfyclient_pipeline.py
ADDED
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import json
|
3 |
+
import websocket
|
4 |
+
import uuid
|
5 |
+
import random
|
6 |
+
import httpx
|
7 |
+
import time
|
8 |
+
import numpy as np
|
9 |
+
import ldm_patched.modules.model_management as model_management
|
10 |
+
from io import BytesIO
|
11 |
+
from PIL import Image
|
12 |
+
|
13 |
+
from . import utils
|
14 |
+
|
15 |
+
def upload_mask(mask):
|
16 |
+
with BytesIO() as output:
|
17 |
+
mask.save(output)
|
18 |
+
output.seek(0)
|
19 |
+
files = {'mask': ('mask.jpg', output)}
|
20 |
+
data = {'overwrite': 'true', 'type': 'example_type'}
|
21 |
+
response = httpx.post("http://{}/upload/mask".format(server_address), files=files, data=data)
|
22 |
+
return response.json()
|
23 |
+
|
24 |
+
def queue_prompt(prompt):
|
25 |
+
p = {"prompt": prompt, "client_id": client_id}
|
26 |
+
data = json.dumps(p).encode('utf-8')
|
27 |
+
try:
|
28 |
+
with httpx.Client() as client:
|
29 |
+
response = client.post("http://{}/prompt".format(server_address), data=data)
|
30 |
+
return json.loads(response.read())
|
31 |
+
except httpx.RequestError as e:
|
32 |
+
print(f"httpx.RequestError: {e}")
|
33 |
+
return None
|
34 |
+
|
35 |
+
def get_image(filename, subfolder, folder_type):
|
36 |
+
params = httpx.QueryParams({
|
37 |
+
"filename": filename,
|
38 |
+
"subfolder": subfolder,
|
39 |
+
"type": folder_type
|
40 |
+
})
|
41 |
+
with httpx.Client() as client:
|
42 |
+
response = client.get(f"http://{server_address}/view", params=params)
|
43 |
+
return response.read()
|
44 |
+
|
45 |
+
def get_history(prompt_id):
|
46 |
+
with httpx.Client() as client:
|
47 |
+
response = client.get("http://{}/history/{}".format(server_address, prompt_id))
|
48 |
+
return json.loads(response.read())
|
49 |
+
|
50 |
+
def get_images(ws, prompt, callback=None):
|
51 |
+
prompt_id = queue_prompt(prompt)['prompt_id']
|
52 |
+
print('[ComfyClient] Request and get ComfyTask_id:{}'.format(prompt_id))
|
53 |
+
output_images = {}
|
54 |
+
current_node = ''
|
55 |
+
last_node = None
|
56 |
+
preview_image = []
|
57 |
+
last_step = None
|
58 |
+
current_step = None
|
59 |
+
current_total_steps = None
|
60 |
+
while True:
|
61 |
+
model_management.throw_exception_if_processing_interrupted()
|
62 |
+
try:
|
63 |
+
out = ws.recv()
|
64 |
+
except ConnectionResetError as e:
|
65 |
+
print(f'[ComfyClient] The connect was exception, restart and try again: {e}')
|
66 |
+
ws = websocket.WebSocket()
|
67 |
+
ws.connect("ws://{}/ws?clientId={}".format(server_address, client_id))
|
68 |
+
out = ws.recv()
|
69 |
+
if isinstance(out, str):
|
70 |
+
message = json.loads(out)
|
71 |
+
current_type = message['type']
|
72 |
+
#print(f'current_message={message}')
|
73 |
+
if message['type'] == 'executing':
|
74 |
+
data = message['data']
|
75 |
+
if data['node'] is None and data['prompt_id'] == prompt_id:
|
76 |
+
break
|
77 |
+
else:
|
78 |
+
current_node = data['node']
|
79 |
+
elif message['type'] == 'progress':
|
80 |
+
current_step = message["data"]["value"]
|
81 |
+
current_total_steps = message["data"]["max"]
|
82 |
+
else:
|
83 |
+
if current_type == 'progress':
|
84 |
+
if prompt[current_node]['class_type'] in ['KSampler', 'SamplerCustomAdvanced', 'TiledKSampler'] and callback is not None:
|
85 |
+
if current_step == last_step:
|
86 |
+
preview_image.append(out[8:])
|
87 |
+
else:
|
88 |
+
if last_step is not None:
|
89 |
+
callback(last_step, current_total_steps, Image.open(BytesIO(preview_image[0])))
|
90 |
+
preview_image = []
|
91 |
+
preview_image.append(out[8:])
|
92 |
+
last_step = current_step
|
93 |
+
if prompt[current_node]['class_type'] == 'SaveImageWebsocket':
|
94 |
+
images_output = output_images.get(prompt[current_node]['_meta']['title'], [])
|
95 |
+
images_output.append(out[8:])
|
96 |
+
output_images[prompt[current_node]['_meta']['title']] = images_output[0]
|
97 |
+
continue
|
98 |
+
|
99 |
+
output_images = {k: np.array(Image.open(BytesIO(v))) for k, v in output_images.items()}
|
100 |
+
print(f'[ComfyClient] The ComfyTask:{prompt_id} has finished: {len(output_images)}')
|
101 |
+
return output_images
|
102 |
+
|
103 |
+
def images_upload(images):
|
104 |
+
result = {}
|
105 |
+
if images is None:
|
106 |
+
return result
|
107 |
+
for k,np_image in images.items():
|
108 |
+
pil_image = Image.fromarray(np_image)
|
109 |
+
with BytesIO() as output:
|
110 |
+
pil_image.save(output, format="PNG")
|
111 |
+
output.seek(0)
|
112 |
+
files = {'image': (f'image_{client_id}_{random.randint(1000, 9999)}.png', output)}
|
113 |
+
data = {'overwrite': 'true', 'type': 'input'}
|
114 |
+
response = httpx.post("http://{}/upload/image".format(server_address), files=files, data=data)
|
115 |
+
result.update({k: response.json()["name"]})
|
116 |
+
print(f'[ComfyClient] The ComfyTask:upload_input_images has finished: {len(result)}')
|
117 |
+
return result
|
118 |
+
|
119 |
+
|
120 |
+
def process_flow(flow_name, params, images, callback=None):
|
121 |
+
global ws
|
122 |
+
|
123 |
+
flow_file = os.path.join(WORKFLOW_DIR, f'{flow_name}_api.json')
|
124 |
+
if ws is None or ws.status != 101:
|
125 |
+
if ws is not None:
|
126 |
+
print(f'[ComfyClient] websocket status: {ws.status}, timeout:{ws.timeout}s.')
|
127 |
+
ws.close()
|
128 |
+
try:
|
129 |
+
ws = websocket.WebSocket()
|
130 |
+
ws.connect("ws://{}/ws?clientId={}".format(server_address, client_id))
|
131 |
+
except ConnectionRefusedError as e:
|
132 |
+
print(f'[ComfyClient] The connect_to_server has failed, sleep and try again: {e}')
|
133 |
+
time.sleep(8)
|
134 |
+
try:
|
135 |
+
ws = websocket.WebSocket()
|
136 |
+
ws.connect("ws://{}/ws?clientId={}".format(server_address, client_id))
|
137 |
+
except ConnectionRefusedError as e:
|
138 |
+
print(f'[ComfyClient] The connect_to_server has failed, restart and try again: {e}')
|
139 |
+
time.sleep(12)
|
140 |
+
ws = websocket.WebSocket()
|
141 |
+
ws.connect("ws://{}/ws?clientId={}".format(server_address, client_id))
|
142 |
+
|
143 |
+
images_map = images_upload(images)
|
144 |
+
params.update_params(images_map)
|
145 |
+
with open(flow_file, 'r', encoding="utf-8") as workflow_api_file:
|
146 |
+
flowdata = json.load(workflow_api_file)
|
147 |
+
print(f'[ComfyClient] Ready ComfyTask to process: workflow={flow_name}')
|
148 |
+
for k,v in params.params.items():
|
149 |
+
print(f' {k} = {v}')
|
150 |
+
try:
|
151 |
+
prompt_str = params.convert2comfy(flowdata)
|
152 |
+
if not utils.echo_off:
|
153 |
+
print(f'[ComfyClient] ComfyTask prompt: {prompt_str}')
|
154 |
+
images = get_images(ws, prompt_str, callback=callback)
|
155 |
+
except websocket.WebSocketException as e:
|
156 |
+
print(f'[ComfyClient] The connection has been closed, restart and try again: {e}')
|
157 |
+
ws = None
|
158 |
+
|
159 |
+
images_keys = sorted(images.keys(), reverse=True)
|
160 |
+
imgs = [images[key] for key in images_keys]
|
161 |
+
return imgs
|
162 |
+
|
163 |
+
def interrupt():
|
164 |
+
try:
|
165 |
+
with httpx.Client() as client:
|
166 |
+
response = client.post("http://{}/interrupt".format(server_address))
|
167 |
+
return
|
168 |
+
except httpx.RequestError as e:
|
169 |
+
print(f"httpx.RequestError: {e}")
|
170 |
+
return
|
171 |
+
|
172 |
+
def free(all=False):
|
173 |
+
p = {"unload_models": all==True, "free_memory": True}
|
174 |
+
data = json.dumps(p).encode('utf-8')
|
175 |
+
try:
|
176 |
+
with httpx.Client() as client:
|
177 |
+
response = client.post("http://{}/free".format(server_address), data=data)
|
178 |
+
return
|
179 |
+
except httpx.RequestError as e:
|
180 |
+
print(f"httpx.RequestError: {e}")
|
181 |
+
return
|
182 |
+
|
183 |
+
|
184 |
+
WORKFLOW_DIR = 'workflows'
|
185 |
+
COMFYUI_ENDPOINT_IP = '127.0.0.1'
|
186 |
+
COMFYUI_ENDPOINT_PORT = '8187'
|
187 |
+
server_address = f'{COMFYUI_ENDPOINT_IP}:{COMFYUI_ENDPOINT_PORT}'
|
188 |
+
client_id = str(uuid.uuid4())
|
189 |
+
ws = None
|
backend_base/comfyd.py
ADDED
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import subprocess
|
2 |
+
import os
|
3 |
+
import sys
|
4 |
+
import torch
|
5 |
+
import gc
|
6 |
+
import ldm_patched.modules.model_management as model_management
|
7 |
+
from . import comfyclient_pipeline, utils
|
8 |
+
|
9 |
+
comfyd_process = None
|
10 |
+
comfyd_active = False
|
11 |
+
comfyd_args = [[]]
|
12 |
+
|
13 |
+
def is_running():
|
14 |
+
global comfyd_process
|
15 |
+
if 'comfyd_process' not in globals():
|
16 |
+
return False
|
17 |
+
if comfyd_process is None:
|
18 |
+
return False
|
19 |
+
process_code = comfyd_process.poll()
|
20 |
+
if process_code is None:
|
21 |
+
return True
|
22 |
+
print("[Comfyd] comfyd process status code: {process_code}")
|
23 |
+
return False
|
24 |
+
|
25 |
+
def start(args_patch=[[]]):
|
26 |
+
global comfyd_process, comfyd_args
|
27 |
+
if not is_running():
|
28 |
+
backend_script = os.path.join(os.getcwd(),'comfy/main.py')
|
29 |
+
args_comfyd = [["--preview-method", "auto"], ["--port", "8187"], ["--disable-auto-launch"]]
|
30 |
+
if len(args_patch) > 0 and len(args_patch[0]) > 0:
|
31 |
+
comfyd_args += args_patch
|
32 |
+
if not utils.echo_off:
|
33 |
+
print(f'[Comfyd] args_comfyd was patched: {args_comfyd}, patch:{comfyd_args}')
|
34 |
+
arguments = [arg for sublist in args_comfyd for arg in sublist]
|
35 |
+
process_env = os.environ.copy()
|
36 |
+
process_env["PYTHONPATH"] = os.pathsep.join(sys.path)
|
37 |
+
model_management.unload_all_models()
|
38 |
+
gc.collect()
|
39 |
+
torch.cuda.empty_cache()
|
40 |
+
if not utils.echo_off:
|
41 |
+
print(f'[Comfyd] Ready to start with arguments: {arguments}, env: {process_env}')
|
42 |
+
if 'comfyd_process' not in globals():
|
43 |
+
globals()['comfyd_process'] = None
|
44 |
+
comfyd_process = subprocess.Popen([sys.executable, backend_script] + arguments, env=process_env)
|
45 |
+
comfyclient_pipeline.ws = None
|
46 |
+
else:
|
47 |
+
print("[Comfyd] Comfyd is active!")
|
48 |
+
return
|
49 |
+
|
50 |
+
def active(flag=False):
|
51 |
+
global comfyd_active
|
52 |
+
comfyd_active = flag
|
53 |
+
if flag and not is_running():
|
54 |
+
start()
|
55 |
+
if not flag and is_running():
|
56 |
+
stop()
|
57 |
+
return
|
58 |
+
|
59 |
+
def finished():
|
60 |
+
global comfyd_process
|
61 |
+
if 'comfyd_process' not in globals():
|
62 |
+
return
|
63 |
+
if comfyd_process is None:
|
64 |
+
return
|
65 |
+
if comfyd_active:
|
66 |
+
#free()
|
67 |
+
gc.collect()
|
68 |
+
print("[Comfyd] Task finished !")
|
69 |
+
return
|
70 |
+
comfyclient_pipeline.ws = None
|
71 |
+
free()
|
72 |
+
gc.collect()
|
73 |
+
print("[Comfyd] Comfyd stopped!")
|
74 |
+
|
75 |
+
def stop():
|
76 |
+
global comfyd_process
|
77 |
+
if 'comfyd_process' not in globals():
|
78 |
+
return
|
79 |
+
if comfyd_process is None:
|
80 |
+
return
|
81 |
+
if comfyd_active:
|
82 |
+
free(all=True)
|
83 |
+
gc.collect()
|
84 |
+
print("[Comfyd] Releasing Comfyd!")
|
85 |
+
return
|
86 |
+
if is_running():
|
87 |
+
comfyd_process.terminate()
|
88 |
+
comfyd_process.wait()
|
89 |
+
del comfyd_process
|
90 |
+
comfyclient_pipeline.ws = None
|
91 |
+
free()
|
92 |
+
gc.collect()
|
93 |
+
print("[Comfyd] Comfyd has stopped!")
|
94 |
+
|
95 |
+
def free(all=False):
|
96 |
+
global comfyd_process
|
97 |
+
if 'comfyd_process' not in globals():
|
98 |
+
return
|
99 |
+
if comfyd_process is None:
|
100 |
+
return
|
101 |
+
comfyclient_pipeline.free(all)
|
102 |
+
return
|
103 |
+
|
104 |
+
def interrupt():
|
105 |
+
global comfyd_process
|
106 |
+
if 'comfyd_process' not in globals():
|
107 |
+
return
|
108 |
+
if comfyd_process is None:
|
109 |
+
return
|
110 |
+
comfyclient_pipeline.interrupt()
|
111 |
+
return
|
112 |
+
|
113 |
+
def args_mapping(args_fooocus):
|
114 |
+
args_comfy = []
|
115 |
+
if "--gpu-device-id" in args_fooocus:
|
116 |
+
args_comfy += [["--cuda-device", args_fooocus[args_fooocus.index("--gpu-device-id")+1]]]
|
117 |
+
if "--async-cuda-allocation" in args_fooocus:
|
118 |
+
args_comfy += [["--cuda-malloc"]]
|
119 |
+
if "--disable-async-cuda-allocation" in args_fooocus:
|
120 |
+
args_comfy += [["--disable-cuda-malloc"]]
|
121 |
+
if "--vae-in-cpu" in args_fooocus:
|
122 |
+
args_comfy += [["--vae-in-cpu"]]
|
123 |
+
if "--directml" in args_fooocus:
|
124 |
+
args_comfy += [["--directml"]]
|
125 |
+
if "--disable-xformers" in args_fooocus:
|
126 |
+
args_comfy += [["--disable-xformers"]]
|
127 |
+
if "--always-cpu" in args_fooocus:
|
128 |
+
args_comfy += [["--cpu"]]
|
129 |
+
if "--always-low-vram" in args_fooocus:
|
130 |
+
args_comfy += [["--lowvram"]]
|
131 |
+
if "--always-gpu" in args_fooocus:
|
132 |
+
args_comfy += [["--gpu-only"]]
|
133 |
+
print()
|
134 |
+
if "--always-offload-from-vram" in args_fooocus:
|
135 |
+
args_comfy += [["--disable-smart-memory"]]
|
136 |
+
print("Smart memory disabled")
|
137 |
+
else:
|
138 |
+
print("Smart memory enabled")
|
139 |
+
if not utils.echo_off:
|
140 |
+
print(f'[Comfyd] args_fooocus: {args_fooocus}\nargs_comfy: {args_comfy}')
|
141 |
+
return args_comfy
|
142 |
+
|
143 |
+
def get_entry_point_id():
|
144 |
+
global comfyd_process
|
145 |
+
if 'comfyd_process' in globals() and comfyd_process:
|
146 |
+
return gen_entry_point_id(comfyd_process.pid)
|
147 |
+
else:
|
148 |
+
return None
|
backend_base/models_info.py
ADDED
@@ -0,0 +1,824 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import json
|
3 |
+
from . import utils
|
4 |
+
|
5 |
+
default_models_info = {
|
6 |
+
"checkpoints/Alternative/hunyuan_dit_1.2.safetensors": {
|
7 |
+
"size": 8240228270,
|
8 |
+
"hash": "4fb84f84079cda457d171b3c6b15d1be95b5a3e5d9825703951a99ddf92d1787",
|
9 |
+
"muid": "8c4c0098ac"
|
10 |
+
},
|
11 |
+
"checkpoints/Alternative/playground-v2.5-1024px.safetensors": {
|
12 |
+
"size": 6938040576,
|
13 |
+
"hash": "bcaa7dd6780974f000b17b5a6c63e6f867a75c51ffa85c67d6b196882c69b992",
|
14 |
+
"muid": "d0e21c789d50"
|
15 |
+
},
|
16 |
+
"checkpoints/albedobaseXL_v21.safetensors": {
|
17 |
+
"size": 6938041042,
|
18 |
+
"hash": "1718b5bb2da1ef4815fee8af8a7fc2fa8ab8f467b279eded4d991ea0cce59a6d",
|
19 |
+
"muid": "acf28f1aeb42"
|
20 |
+
},
|
21 |
+
"checkpoints/animaPencilXL_v310.safetensors": {
|
22 |
+
"size": 6938040682,
|
23 |
+
"hash": "67b97ee6eec64abf0cb73c2284a0afecdd8e205a87a3622c2c23231e25e29b5b",
|
24 |
+
"muid": "2def60f4b273"
|
25 |
+
},
|
26 |
+
"checkpoints/animaPencilXL_v500.safetensors": {
|
27 |
+
"size": 6938041144,
|
28 |
+
"hash": "896faa18cd6852ccf977e2dec76191c38f256d031204e233cb3ed76f6088d55b",
|
29 |
+
"muid": "239e9199aa"
|
30 |
+
},
|
31 |
+
"checkpoints/FluxDev/flux1-dev.safetensors": {
|
32 |
+
"size": 23802932552,
|
33 |
+
"hash": "4610115bb0c89560703c892c59ac2742fa821e60ef5871b33493ba544683abd7",
|
34 |
+
"muid": "2f3c5caac0"
|
35 |
+
},
|
36 |
+
"checkpoints/FluxDev/flux1-dev-fp8.safetensors": {
|
37 |
+
"size": 17246524772,
|
38 |
+
"hash": "8e91b68084b53a7fc44ed2a3756d821e355ac1a7b6fe29be760c1db532f3d88a",
|
39 |
+
"muid": "7f89b4dd65"
|
40 |
+
},
|
41 |
+
"checkpoints/FluxDev/flux1-dev-bnb-nf4.safetensors": {
|
42 |
+
"size": 11489884113,
|
43 |
+
"hash": "c5e25d12d720e30a277598ce9ded9db406ee54f63419fe0c801b283d4ea146e2",
|
44 |
+
"muid": "6487417fee"
|
45 |
+
},
|
46 |
+
"checkpoints/FluxDev/flux1-dev-bnb-nf4-v2.safetensors": {
|
47 |
+
"size": 12044280207,
|
48 |
+
"hash": "fef37763b8f2c8cc3463139bbb6d91aa517c605b654d4e5c540de52813f30306",
|
49 |
+
"muid": "8c53267ff9"
|
50 |
+
},
|
51 |
+
"checkpoints/FluxDev/flux-hyp8-Q5_K_M.gguf": {
|
52 |
+
"size": 8421981408,
|
53 |
+
"hash": "a0dac309ffb497fde0d1bbfa0291f5371d0d05c66173df830318bc475777c68a",
|
54 |
+
"muid": "a0dac309ff"
|
55 |
+
},
|
56 |
+
"checkpoints/FluxDev/fluxunchained-dev-Q5_K_M.gguf": {
|
57 |
+
"size": 8428152160,
|
58 |
+
"hash": "ee5df99febf1aebd63137672d3396407a6e89f771ca9e83bc13c475c5d57a521",
|
59 |
+
"muid": "ee5df99feb"
|
60 |
+
},
|
61 |
+
"checkpoints/FluxSchnell/flux1-schnell.safetensors": {
|
62 |
+
"size": 23782506688,
|
63 |
+
"hash": "9403429e0052277ac2a87ad800adece5481eecefd9ed334e1f348723621d2a0a",
|
64 |
+
"muid": "d314672fc6"
|
65 |
+
},
|
66 |
+
"checkpoints/FluxSchnell/flux1-schnell-fp8.safetensors": {
|
67 |
+
"size": 17236328572,
|
68 |
+
"hash": "ead426278b49030e9da5df862994f25ce94ab2ee4df38b556ddddb3db093bf72",
|
69 |
+
"muid": "8f031d049d"
|
70 |
+
},
|
71 |
+
"checkpoints/FLUX.1-schnell-dev-merged.safetensors": {
|
72 |
+
"size": 23802903480,
|
73 |
+
"hash": "0dc649761fba58625f57f596738e76422df9424c4c8801ca70c53ad6998c905b",
|
74 |
+
"muid": "6661979a94"
|
75 |
+
},
|
76 |
+
"checkpoints/FluxSchnell/flux1-schnell-bnb-nf4.safetensors": {
|
77 |
+
"size": 11484555394,
|
78 |
+
"hash": "e6cba6afca8b2f5599879111e1a5f3dabebe69bcc3ee4a6af46807447adc6d09",
|
79 |
+
"muid": "0eaea6dc0d"
|
80 |
+
},
|
81 |
+
"checkpoints/Juggernaut-XL_v9_RunDiffusionPhoto_v2.safetensors": {
|
82 |
+
"size": 7105348188,
|
83 |
+
"hash": "c9e3e68f89b8e38689e1097d4be4573cf308de4e3fd044c64ca697bdb4aa8bca",
|
84 |
+
"muid": "393f61fcec"
|
85 |
+
},
|
86 |
+
"checkpoints/Pony/ponyDiffusionV6XL.safetensors": {
|
87 |
+
"size": 6938041050,
|
88 |
+
"hash": "67ab2fd8ec439a89b3fedb15cc65f54336af163c7eb5e4f2acc98f090a29b0b3",
|
89 |
+
"muid": "e023c14343"
|
90 |
+
},
|
91 |
+
"checkpoints/realisticStockPhoto_v20.safetensors": {
|
92 |
+
"size": 6938054242,
|
93 |
+
"hash": "f99f3dec38a09b4834a4a073bdc45aabd42b422b4d327f5e8001afcb5ffb5f45",
|
94 |
+
"muid": "5d99d6fc4fbf"
|
95 |
+
},
|
96 |
+
"checkpoints/realisticVisionV60B1_v51VAE.safetensors": {
|
97 |
+
"size": 2132625894,
|
98 |
+
"hash": "15012c538f503ce2ebfc2c8547b268c75ccdaff7a281db55399940ff1d70e21d",
|
99 |
+
"muid": "5da06f78b3c8"
|
100 |
+
},
|
101 |
+
"checkpoints/SD3x/sd3_medium_incl_clips.safetensors": {
|
102 |
+
"size": 5973224240,
|
103 |
+
"hash": "3bb7f21bc5fb450220f4eb78a2f276b15422309d5166a4bdeb8c3b763a3a0581",
|
104 |
+
"muid": "bb3cbb3221ef"
|
105 |
+
},
|
106 |
+
"checkpoints/SD3x/sd3_medium_incl_clips_t5xxlfp16.safetensors": {
|
107 |
+
"size": 15761074532,
|
108 |
+
"hash": "69a950c5d143ce782a7423c532c8a12b75da6a37b0e6f26a322acf4e76208912",
|
109 |
+
"muid": "c3a45b17d217"
|
110 |
+
},
|
111 |
+
"checkpoints/SD3x/sd3_medium_incl_clips_t5xxlfp8.safetensors": {
|
112 |
+
"size": 10867168284,
|
113 |
+
"hash": "92db4295e9c9ab8401ef60566d975656a35b0bd0f6d9ce0d083725171f7b3174",
|
114 |
+
"muid": "41d49489bc24"
|
115 |
+
},
|
116 |
+
"checkpoints/sd_xl_base_1.0_0.9vae.safetensors": {
|
117 |
+
"size": 6938078334,
|
118 |
+
"hash": "e6bb9ea85bbf7bf6478a7c6d18b71246f22e95d41bcdd80ed40aa212c33cfeff",
|
119 |
+
"muid": "5e756477ea9d"
|
120 |
+
},
|
121 |
+
"checkpoints/sd_xl_refiner_1.0_0.9vae.safetensors": {
|
122 |
+
"size": 6075981930,
|
123 |
+
"hash": "8d0ce6c016004cbdacd50f937dad381d8c396628d621a7f97191470532780164",
|
124 |
+
"muid": "bd66e233fe56"
|
125 |
+
},
|
126 |
+
"loras/FilmVelvia3.safetensors": {
|
127 |
+
"size": 151108832,
|
128 |
+
"hash": "ac8b0e4aa77be4d8b83da9bafe0134a2e36504c9b5263a7030394cffe4f7003a",
|
129 |
+
"muid": "6e93473d6228"
|
130 |
+
},
|
131 |
+
"loras/Hyper-SDXL-8steps-lora.safetensors": {
|
132 |
+
"size": 787359648,
|
133 |
+
"hash": "ca689190e8c46038550384b5675488526cfe5a40d35f82b27acb75c100f417c1",
|
134 |
+
"muid": "4f494295ed"
|
135 |
+
},
|
136 |
+
"loras/ip-adapter-faceid-plusv2_sdxl_lora.safetensors": {
|
137 |
+
"size": 371842896,
|
138 |
+
"hash": "f24b4bb2dad6638a09c00f151cde84991baf374409385bcbab53c1871a30cb7b",
|
139 |
+
"muid": "13623d29c464"
|
140 |
+
},
|
141 |
+
"loras/sd_xl_offset_example-lora_1.0.safetensors": {
|
142 |
+
"size": 49553604,
|
143 |
+
"hash": "4852686128f953d0277d0793e2f0335352f96a919c9c16a09787d77f55cbdf6f",
|
144 |
+
"muid": "8e3e833226b3"
|
145 |
+
},
|
146 |
+
"loras/SDXL_FILM_PHOTOGRAPHY_STYLE_V1.safetensors": {
|
147 |
+
"size": 912593164,
|
148 |
+
"hash": "9e2a98e1f27dbdbb0bda11523dee3444df099599bff7471c6e557f6ad55f27eb",
|
149 |
+
"muid": "b39d197db0"
|
150 |
+
},
|
151 |
+
"loras/sdxl_hyper_sd_4step_lora.safetensors": {
|
152 |
+
"size": 787359648,
|
153 |
+
"hash": "12f81a27d00a751a40d68fd15597091896c5a90f3bd632fb6c475607cbdad76e",
|
154 |
+
"muid": "1c88f7295856"
|
155 |
+
},
|
156 |
+
"loras/sdxl_lightning_4step_lora.safetensors": {
|
157 |
+
"size": 393854592,
|
158 |
+
"hash": "bf56cf2657efb15e465d81402ed481d1e11c4677e4bcce1bc11fe71ad8506b79",
|
159 |
+
"muid": "1c32bdb07a7c"
|
160 |
+
},
|
161 |
+
"embeddings/unaestheticXLhk1.safetensors": {
|
162 |
+
"size": 33296,
|
163 |
+
"hash": "ca29d24a64c1801efc82f8f4d05d98308e5b6c51c15d156fb61ac074f24f87ce",
|
164 |
+
"muid": "63578af5d493"
|
165 |
+
},
|
166 |
+
"embeddings/unaestheticXLv31.safetensors": {
|
167 |
+
"size": 33296,
|
168 |
+
"hash": "75fa9a0423a19c56ccaaea3b985b4999408b530585eca3f6108685c0007e5b2e",
|
169 |
+
"muid": "a20bca3b2146"
|
170 |
+
},
|
171 |
+
"diffusers/Kolors/text_encoder/pytorch_model-00001-of-00007.bin": {
|
172 |
+
"size": 1827781090,
|
173 |
+
"hash": "b6a6388dae55b598efe76c704e7f017bd84e6f6213466b7686a8f8326f78ab05",
|
174 |
+
"muid": "b6a6388dae"
|
175 |
+
},
|
176 |
+
"diffusers/Kolors/text_encoder/pytorch_model-00002-of-00007.bin": {
|
177 |
+
"size": 1968299480,
|
178 |
+
"hash": "2f96bef324acb5c3fe06b7a80f84272fe064d0327cbf14eddfae7af0d665a6ac",
|
179 |
+
"muid": "2f96bef324"
|
180 |
+
},
|
181 |
+
"diffusers/Kolors/text_encoder/pytorch_model-00003-of-00007.bin": {
|
182 |
+
"size": 1927415036,
|
183 |
+
"hash": "2400101255213250d9df716f778b7d2325f2fa4a8acaedee788338fceee5b27e",
|
184 |
+
"muid": "2400101255"
|
185 |
+
},
|
186 |
+
"diffusers/Kolors/text_encoder/pytorch_model-00004-of-00007.bin": {
|
187 |
+
"size": 1815225998,
|
188 |
+
"hash": "472567c1b0e448a19171fbb5b3dab5670426d0a5dfdfd2c3a87a60bb1f96037d",
|
189 |
+
"muid": "472567c1b0"
|
190 |
+
},
|
191 |
+
"diffusers/Kolors/text_encoder/pytorch_model-00005-of-00007.bin": {
|
192 |
+
"size": 1968299544,
|
193 |
+
"hash": "ef2aea78fa386168958e5ba42ecf09cbb567ed3e77ce2be990d556b84081e2b9",
|
194 |
+
"muid": "ef2aea78fa"
|
195 |
+
},
|
196 |
+
"diffusers/Kolors/text_encoder/pytorch_model-00006-of-00007.bin": {
|
197 |
+
"size": 1927415036,
|
198 |
+
"hash": "35191adf21a1ab632c2b175fcbb6c27601150026cb1ed5d602938d825954526f",
|
199 |
+
"muid": "35191adf21"
|
200 |
+
},
|
201 |
+
"diffusers/Kolors/text_encoder/pytorch_model-00007-of-00007.bin": {
|
202 |
+
"size": 1052808542,
|
203 |
+
"hash": "b7cdaa9b8ed183284905c49d19bf42360037fdf2f95acb3093039d3c3a459261",
|
204 |
+
"muid": "b7cdaa9b8e"
|
205 |
+
},
|
206 |
+
"diffusers/Kolors/unet/diffusion_pytorch_model.fp16.safetensors": {
|
207 |
+
"size": 5159140240,
|
208 |
+
"hash": "425ff1dcbe3a70ac13d3afdd69bd4e3176b0c3260722527c80b210f11d2d966c",
|
209 |
+
"muid": "9c8f088e4c"
|
210 |
+
},
|
211 |
+
"diffusers/Kolors/vae/diffusion_pytorch_model.fp16.safetensors": {
|
212 |
+
"size": 167335342,
|
213 |
+
"hash": "bcb60880a46b63dea58e9bc591abe15f8350bde47b405f9c38f4be70c6161e68",
|
214 |
+
"muid": "345f7343ee"
|
215 |
+
},
|
216 |
+
"DIFFUSERS/Kolors": {
|
217 |
+
"size": 20054,
|
218 |
+
"hash": "2eff895bdad33abb2f647c35ccb0c2d70173031effa4f126b335a815442af1e3",
|
219 |
+
"muid": "2eff895bda"
|
220 |
+
},
|
221 |
+
"controlnet/control-lora-canny-rank128.safetensors": {
|
222 |
+
"size": 395733680,
|
223 |
+
"hash": "56389dbb245ca44de91d662529bd4298abc55ce2318f60bc19454fb72ff68247",
|
224 |
+
"muid": "44f83205f6"
|
225 |
+
},
|
226 |
+
"controlnet/detection_Resnet50_Final.pth": {
|
227 |
+
"size": 109497761,
|
228 |
+
"hash": "6d1de9c2944f2ccddca5f5e010ea5ae64a39845a86311af6fdf30841b0a5a16d",
|
229 |
+
"muid": "6d1de9c294"
|
230 |
+
},
|
231 |
+
"controlnet/fooocus_ip_negative.safetensors": {
|
232 |
+
"size": 65616,
|
233 |
+
"hash": "d7caedfb46780825895718c7c8e9ee077e675c935ddfcf272f1c01a4fc8ea72d",
|
234 |
+
"muid": "4682603510"
|
235 |
+
},
|
236 |
+
"controlnet/fooocus_xl_cpds_128.safetensors": {
|
237 |
+
"size": 395706528,
|
238 |
+
"hash": "eec3fd8209a65b41341ea9f415de66909c97b30fb4d20965b3304e8e5251c2f1",
|
239 |
+
"muid": "aa82117d38"
|
240 |
+
},
|
241 |
+
"controlnet/ip-adapter-plus-face_sdxl_vit-h.bin": {
|
242 |
+
"size": 1013454761,
|
243 |
+
"hash": "50e886d82940b3c5873d80c2b06d8a4b0d0fccec70bc44fd53f16ac3cfd7fc36",
|
244 |
+
"muid": "50e886d829"
|
245 |
+
},
|
246 |
+
"controlnet/ip-adapter-plus_sdxl_vit-h.bin": {
|
247 |
+
"size": 1013454427,
|
248 |
+
"hash": "ec70edb7cc8e769c9388d94eeaea3e4526352c9fae793a608782d1d8951fde90",
|
249 |
+
"muid": "ec70edb7cc"
|
250 |
+
},
|
251 |
+
"controlnet/parsing_bisenet.pth": {
|
252 |
+
"size": 53289463,
|
253 |
+
"hash": "468e13ca13a9b43cc0881a9f99083a430e9c0a38abd935431d1c28ee94b26567",
|
254 |
+
"muid": "468e13ca13"
|
255 |
+
},
|
256 |
+
"controlnet/parsing_parsenet.pth": {
|
257 |
+
"size": 85331193,
|
258 |
+
"hash": "3d558d8d0e42c20224f13cf5a29c79eba2d59913419f945545d8cf7b72920de2",
|
259 |
+
"muid": "3d558d8d0e"
|
260 |
+
},
|
261 |
+
"inpaint/fooocus_inpaint_head.pth": {
|
262 |
+
"size": 52602,
|
263 |
+
"hash": "32f7f838e0c6d8f13437ba8411e77a4688d77a2e34df8857e4ef4d51f6b97692",
|
264 |
+
"muid": "32f7f838e0"
|
265 |
+
},
|
266 |
+
"inpaint/groundingdino_swint_ogc.pth": {
|
267 |
+
"size": 693997677,
|
268 |
+
"hash": "3b3ca2563c77c69f651d7bd133e97139c186df06231157a64c507099c52bc799",
|
269 |
+
"muid": "3b3ca2563c"
|
270 |
+
},
|
271 |
+
"inpaint/sam_vit_b_01ec64.pth": {
|
272 |
+
"size": 375042383,
|
273 |
+
"hash": "ec2df62732614e57411cdcf32a23ffdf28910380d03139ee0f4fcbe91eb8c912",
|
274 |
+
"muid": "ec2df62732"
|
275 |
+
},
|
276 |
+
"inpaint/sam_vit_h_4b8939.pth": {
|
277 |
+
"size": 2564550879,
|
278 |
+
"hash": "a7bf3b02f3ebf1267aba913ff637d9a2d5c33d3173bb679e46d9f338c26f262e",
|
279 |
+
"muid": "a7bf3b02f3"
|
280 |
+
},
|
281 |
+
"inpaint/sam_vit_l_0b3195.pth": {
|
282 |
+
"size": 1249524607,
|
283 |
+
"hash": "3adcc4315b642a4d2101128f611684e8734c41232a17c648ed1693702a49a622",
|
284 |
+
"muid": "3adcc4315b"
|
285 |
+
},
|
286 |
+
"unet/iclight_sd15_fbc_unet_ldm.safetensors": {
|
287 |
+
"size": 1719167896,
|
288 |
+
"hash": "97a662b8076504e0abad3b3a20b0e91d3312f2a5f19ffcef9059dab6d6679700",
|
289 |
+
"muid": "4019c0f83d"
|
290 |
+
},
|
291 |
+
"unet/iclight_sd15_fc_unet_ldm.safetensors": {
|
292 |
+
"size": 1719144856,
|
293 |
+
"hash": "9f91f1fc8ad2a2073c5a605fcd70cc70b2e7d2321b30aadca2a247d6490cd780",
|
294 |
+
"muid": "f220618ed6"
|
295 |
+
},
|
296 |
+
"unet/kolors_unet_fp16.safetensors": {
|
297 |
+
"size": 5159140240,
|
298 |
+
"hash": "425ff1dcbe3a70ac13d3afdd69bd4e3176b0c3260722527c80b210f11d2d966c",
|
299 |
+
"muid": "9c8f088e4c"
|
300 |
+
},
|
301 |
+
"llms/Helsinki-NLP/opus-mt-zh-en/pytorch_model.bin": {
|
302 |
+
"size": 312087009,
|
303 |
+
"hash": "9d8ceb91d103ef89400c9d9d62328b4858743cf8924878aee3b8afc594242ce0",
|
304 |
+
"muid": "9d8ceb91d1"
|
305 |
+
},
|
306 |
+
"llms/bert-base-uncased/model.safetensors": {
|
307 |
+
"size": 440449768,
|
308 |
+
"hash": "68d45e234eb4a928074dfd868cead0219ab85354cc53d20e772753c6bb9169d3",
|
309 |
+
"muid": "9c02f497ee"
|
310 |
+
},
|
311 |
+
"llms/nllb-200-distilled-600M/pytorch_model.bin": {
|
312 |
+
"size": 2460457927,
|
313 |
+
"hash": "c266c2cfd19758b6d09c1fc31ecdf1e485509035f6b51dfe84f1ada83eefcc42",
|
314 |
+
"muid": "c266c2cfd1"
|
315 |
+
},
|
316 |
+
"llms/superprompt-v1/model.safetensors": {
|
317 |
+
"size": 307867048,
|
318 |
+
"hash": "4f31e59c0582d4a74aac96ffb4ea9f5d64b268564ae5d1f68e8620dc940127d7",
|
319 |
+
"muid": "ac31ee526b"
|
320 |
+
},
|
321 |
+
"vae/ae.safetensors": {
|
322 |
+
"size": 335304388,
|
323 |
+
"hash": "afc8e28272cd15db3919bacdb6918ce9c1ed22e96cb12c4d5ed0fba823529e38",
|
324 |
+
"muid": "ddec9c299f"
|
325 |
+
},
|
326 |
+
"vae/ponyDiffusionV6XL_vae.safetensors": {
|
327 |
+
"size": 334641162,
|
328 |
+
"hash": "235745af8d86bf4a4c1b5b4f529868b37019a10f7c0b2e79ad0abca3a22bc6e1",
|
329 |
+
"muid": "55f20a1016"
|
330 |
+
},
|
331 |
+
"vae/sdxl_fp16.vae.safetensors": {
|
332 |
+
"size": 167335342,
|
333 |
+
"hash": "bcb60880a46b63dea58e9bc591abe15f8350bde47b405f9c38f4be70c6161e68",
|
334 |
+
"muid": "345f7343ee"
|
335 |
+
},
|
336 |
+
"checkpoints/Kolors-Inpainting.safetensors": {
|
337 |
+
"size": 5159169040,
|
338 |
+
"hash": "235db024626d7291e5d8af6776e8f49fa719c90221da9a54b553bb746101a787",
|
339 |
+
"muid": "781857d59e"
|
340 |
+
},
|
341 |
+
"controlnet/Kolors-ControlNet-Canny.safetensors": {
|
342 |
+
"size": 2526129624,
|
343 |
+
"hash": "ab34969b4ee57a182deb6e52e15d06c81c5285739caf4db2d8774135fd2b99e7",
|
344 |
+
"muid": "0dec730f7e"
|
345 |
+
},
|
346 |
+
"controlnet/Kolors-ControlNet-Depth.safetensors": {
|
347 |
+
"size": 2526129624,
|
348 |
+
"hash": "b2e9f9ff67c6c8e3b3fbe833f9596d9d16d456b1911633af9aeb4b80949ee60b",
|
349 |
+
"muid": "0ad6e5c573"
|
350 |
+
},
|
351 |
+
"controlnet/Kolors-ControlNet-Pose.safetensors": {
|
352 |
+
"size": 2526129624,
|
353 |
+
"hash": "2d21bbb821c903166c7c79f8a3435b51a39fd449cd227f74ac1d345bbc4eb153",
|
354 |
+
"muid": "3fdfc617f9"
|
355 |
+
},
|
356 |
+
"checkpoints/juggernautXL_v8Rundiffusion.safetensors": {
|
357 |
+
"size": 7105348592,
|
358 |
+
"hash": "aeb7e9e6897a1e58b10494bd989d001e3d4bc9b634633cd7b559838f612c2867",
|
359 |
+
"muid": "f84d1c1e05d4"
|
360 |
+
},
|
361 |
+
"checkpoints/juggernautXL_juggXIByRundiffusion.safetensors": {
|
362 |
+
"size": 7105350536,
|
363 |
+
"hash": "33e58e86686f6b386c526682b5da9228ead4f91d994abd4b053442dc5b42719e",
|
364 |
+
"muid": "2f1dcc5762"
|
365 |
+
},
|
366 |
+
"unet/Kolors-Inpainting.safetensors": {
|
367 |
+
"size": 5159169040,
|
368 |
+
"hash": "235db024626d7291e5d8af6776e8f49fa719c90221da9a54b553bb746101a787",
|
369 |
+
"muid": "781857d59e"
|
370 |
+
},
|
371 |
+
"controlnet/Kolors-ControlNet-Canny.safetensors": {
|
372 |
+
"size": 2526129624,
|
373 |
+
"hash": "ab34969b4ee57a182deb6e52e15d06c81c5285739caf4db2d8774135fd2b99e7",
|
374 |
+
"muid": "0dec730f7e"
|
375 |
+
},
|
376 |
+
"controlnet/Kolors-ControlNet-Depth.safetensors": {
|
377 |
+
"size": 2526129624,
|
378 |
+
"hash": "b2e9f9ff67c6c8e3b3fbe833f9596d9d16d456b1911633af9aeb4b80949ee60b",
|
379 |
+
"muid": "0ad6e5c573"
|
380 |
+
},
|
381 |
+
"controlnet/Kolors-ControlNet-Pose.safetensors": {
|
382 |
+
"size": 2526129624,
|
383 |
+
"hash": "2d21bbb821c903166c7c79f8a3435b51a39fd449cd227f74ac1d345bbc4eb153",
|
384 |
+
"muid": "3fdfc617f9"
|
385 |
+
},
|
386 |
+
}
|
387 |
+
|
388 |
+
is_calc_hash = False # flag to print json update message
|
389 |
+
|
390 |
+
def sync_model_info(downurls):
|
391 |
+
print(f'downurls:{downurls}')
|
392 |
+
keylist = []
|
393 |
+
return keylist
|
394 |
+
|
395 |
+
|
396 |
+
class ModelsInfo:
|
397 |
+
|
398 |
+
def __init__(self, models_info_path, path_map, scan_hash=False):
|
399 |
+
self.scan_models_hash = scan_hash
|
400 |
+
self.info_path = models_info_path
|
401 |
+
self.path_map = path_map
|
402 |
+
self.m_info = {}
|
403 |
+
self.m_muid = {}
|
404 |
+
self.m_file = {}
|
405 |
+
self.load_model_info()
|
406 |
+
self.refresh_from_path(scan_hash)
|
407 |
+
|
408 |
+
def get_stat(self):
|
409 |
+
return len(self.m_info)
|
410 |
+
|
411 |
+
def load_model_info(self):
|
412 |
+
if os.path.exists(self.info_path):
|
413 |
+
try:
|
414 |
+
with open(self.info_path, "r", encoding="utf-8") as json_file:
|
415 |
+
self.m_info.update(json.load(json_file))
|
416 |
+
file_no_exists_list = []
|
417 |
+
for k in self.m_info.keys():
|
418 |
+
if self.m_info[k]['file']:
|
419 |
+
model_files = self.m_info[k]['file']
|
420 |
+
exists_file_list = []
|
421 |
+
for file in model_files:
|
422 |
+
file = file.replace("/", os.sep)
|
423 |
+
if os.path.exists(file):
|
424 |
+
if file in self.m_file and self.m_file[file]:
|
425 |
+
self.m_file[file].append(k)
|
426 |
+
else:
|
427 |
+
self.m_file.update({file: [k]})
|
428 |
+
if file not in exists_file_list:
|
429 |
+
exists_file_list.append(file)
|
430 |
+
if len(exists_file_list) > 0:
|
431 |
+
self.m_info[k]['file'] = exists_file_list
|
432 |
+
else:
|
433 |
+
file_no_exists_list.append(k)
|
434 |
+
if k not in file_no_exists_list and self.m_info[k]['muid']:
|
435 |
+
self.update_muid_map(self.m_info[k]['muid'], k)
|
436 |
+
for k in file_no_exists_list:
|
437 |
+
del self.m_info[k]
|
438 |
+
#print(f'load m_info_key:{self.m_info.keys()}')
|
439 |
+
except Exception as e:
|
440 |
+
print(f'[ModelInfo] Load model info file {self.info_path} failed!, error:{e}')
|
441 |
+
self.m_info = {}
|
442 |
+
self.m_muid = {}
|
443 |
+
self.m_file = {}
|
444 |
+
|
445 |
+
def refresh_from_path(self, scan_hash=False):
|
446 |
+
new_info_key = []
|
447 |
+
new_model_key = []
|
448 |
+
del_model_key = []
|
449 |
+
new_model_file = {}
|
450 |
+
new_file_key = []
|
451 |
+
del_file_key = []
|
452 |
+
|
453 |
+
self.scan_models_hash = scan_hash
|
454 |
+
for path in self.path_map.keys():
|
455 |
+
if self.path_map[path]:
|
456 |
+
path_filenames = self.get_path_filenames(path)
|
457 |
+
for (p, k) in path_filenames:
|
458 |
+
model_key = f"{path}/{k.replace(os.sep, '/')}"
|
459 |
+
file_path = os.path.join(p, k)
|
460 |
+
if file_path not in new_file_key:
|
461 |
+
new_file_key.append(file_path)
|
462 |
+
if model_key in new_model_file:
|
463 |
+
if file_path not in new_model_file[model_key]:
|
464 |
+
new_model_file[model_key].append(file_path)
|
465 |
+
else:
|
466 |
+
new_model_file[model_key] = [file_path]
|
467 |
+
if model_key not in new_info_key:
|
468 |
+
new_info_key.append(model_key)
|
469 |
+
if model_key not in self.m_info.keys():
|
470 |
+
new_model_key.append(model_key)
|
471 |
+
if not utils.echo_off:
|
472 |
+
print(f'[ModelInfo] new_model_key:{new_model_key}')
|
473 |
+
for k in self.m_info.keys():
|
474 |
+
if k not in new_info_key:
|
475 |
+
del_model_key.append(k)
|
476 |
+
for f in self.m_file.keys():
|
477 |
+
if f not in new_file_key:
|
478 |
+
del_file_key.append(f)
|
479 |
+
if not utils.echo_off:
|
480 |
+
print(f'[ModelInfo] del_model_key:{del_model_key}, del_file_key:{del_file_key}')
|
481 |
+
for f in new_model_key:
|
482 |
+
self.add_or_refresh_model(f, new_model_file[f])
|
483 |
+
for f in del_model_key:
|
484 |
+
self.remove_model(f)
|
485 |
+
for f in del_file_key:
|
486 |
+
self.remove_file(f)
|
487 |
+
self.save_model_info()
|
488 |
+
|
489 |
+
def get_path_filenames(self, path):
|
490 |
+
if path.isupper():
|
491 |
+
path_filenames = []
|
492 |
+
for f_path in self.path_map[path]:
|
493 |
+
path_filenames += [(f_path, entry) for entry in os.listdir(f_path) if
|
494 |
+
os.path.isdir(os.path.join(f_path, entry))]
|
495 |
+
else:
|
496 |
+
path_filenames = get_model_filenames(self.path_map[path])
|
497 |
+
return path_filenames
|
498 |
+
|
499 |
+
def add_or_refresh_model(self, model_key, file_path_list, url=None):
|
500 |
+
file_path_list_all = [] if model_key not in self.m_info else self.m_info[model_key]['file']
|
501 |
+
for file_path in file_path_list:
|
502 |
+
if file_path not in file_path_list_all:
|
503 |
+
file_path_list_all.append(file_path)
|
504 |
+
url1 = '' if model_key not in self.m_info else self.m_info[model_key]['url']
|
505 |
+
url = url1 if url is None else url
|
506 |
+
size, hash, muid = self.calculate_model_info(model_key, file_path_list[0])
|
507 |
+
self.m_info.update(
|
508 |
+
{model_key: {'size': size, 'hash': hash, 'file': file_path_list_all, 'muid': muid, 'url': url}})
|
509 |
+
self.update_muid_map(muid, model_key)
|
510 |
+
self.update_file_map(file_path_list_all, model_key)
|
511 |
+
|
512 |
+
def remove_model(self, model_key):
|
513 |
+
if self.m_info[model_key]['muid'] and self.m_info[model_key]['muid'] in self.m_muid:
|
514 |
+
self.remove_muid_map(self.m_info[model_key]['muid'], model_key)
|
515 |
+
if self.m_info[model_key]['file']:
|
516 |
+
self.remove_file_map(self.m_info[model_key]['file'], model_key)
|
517 |
+
del self.m_info[model_key]
|
518 |
+
|
519 |
+
def remove_file(self, file_path):
|
520 |
+
if file_path in self.m_file and self.m_file[file_path]:
|
521 |
+
for model_key in self.m_file[file_path]:
|
522 |
+
cata = model_key.split('/')[0]
|
523 |
+
if cata.isupper():
|
524 |
+
continue
|
525 |
+
if model_key in self.m_info and self.m_info[model_key]['file']:
|
526 |
+
if file_path in self.m_info[model_key]['file']:
|
527 |
+
self.m_info[model_key]['file'].remove(file_path)
|
528 |
+
if len(self.m_info[model_key]['file']) == 0:
|
529 |
+
self.remove_model(model_key)
|
530 |
+
del self.m_file[file_path]
|
531 |
+
|
532 |
+
def remove_file_map(self, file_paths, model_key):
|
533 |
+
for file_path in file_paths:
|
534 |
+
if file_path in self.m_file:
|
535 |
+
if model_key in self.m_file[file_path]:
|
536 |
+
self.m_file[file_path].remove(model_key)
|
537 |
+
if len(self.m_file[file_path]) == 0:
|
538 |
+
del self.m_file[file_path]
|
539 |
+
|
540 |
+
def update_file_map(self, file_paths, model_key):
|
541 |
+
for file_path in file_paths:
|
542 |
+
if file_path in self.m_file:
|
543 |
+
if model_key not in self.m_file[file_path]:
|
544 |
+
self.m_file[file_path].append(model_key)
|
545 |
+
else:
|
546 |
+
self.m_file.update({file_path: [model_key]})
|
547 |
+
|
548 |
+
def update_muid_map(self, muid, model_key):
|
549 |
+
if muid in self.m_muid and self.m_muid[muid]:
|
550 |
+
if model_key not in self.m_muid[muid]:
|
551 |
+
self.m_muid[muid].append(model_key)
|
552 |
+
else:
|
553 |
+
self.m_muid[muid] = [model_key]
|
554 |
+
|
555 |
+
def remove_muid_map(self, muid, model_key):
|
556 |
+
if muid in self.m_muid and self.m_muid[muid]:
|
557 |
+
if model_key in self.m_muid[muid]:
|
558 |
+
self.m_muid[muid].remove(model_key)
|
559 |
+
if len(self.m_muid[muid]) == 0:
|
560 |
+
del self.m_muid[muid]
|
561 |
+
|
562 |
+
def calculate_model_info(self, model_key, file_path):
|
563 |
+
global is_calc_hash
|
564 |
+
if os.path.isdir(file_path):
|
565 |
+
size = utils.get_size_subfolders(file_path)
|
566 |
+
else:
|
567 |
+
size = os.path.getsize(file_path)
|
568 |
+
if model_key in default_models_info.keys() and size == default_models_info[model_key]["size"]:
|
569 |
+
hash = default_models_info[model_key]["hash"]
|
570 |
+
muid = default_models_info[model_key]["muid"]
|
571 |
+
elif self.scan_models_hash:
|
572 |
+
is_calc_hash = True
|
573 |
+
print(f'[ModelInfo] Calculate hash for {file_path}')
|
574 |
+
if os.path.isdir(file_path):
|
575 |
+
hash = utils.calculate_sha256_subfolder(file_path)
|
576 |
+
muid = hash[:10]
|
577 |
+
else:
|
578 |
+
hash = utils.sha256(file_path, length=None)
|
579 |
+
_, file_extension = os.path.splitext(file_path)
|
580 |
+
if file_extension == '.safetensors':
|
581 |
+
print(f'[ModelInfo] Calculate addnet hash for {file_path}')
|
582 |
+
muid = utils.sha256(file_path, use_addnet_hash=True)
|
583 |
+
else:
|
584 |
+
muid = hash[:10]
|
585 |
+
else:
|
586 |
+
hash = ''
|
587 |
+
muid = ''
|
588 |
+
return size, hash, muid
|
589 |
+
|
590 |
+
def save_model_info(self):
|
591 |
+
global is_calc_hash
|
592 |
+
try:
|
593 |
+
with open(self.info_path, "w", encoding="utf-8") as json_file:
|
594 |
+
json.dump(self.m_info, json_file, indent=4)
|
595 |
+
if is_calc_hash:
|
596 |
+
print(f'[ModelInfo] Models info updated and saved to {self.info_path}')
|
597 |
+
except PermissionError:
|
598 |
+
print(f'[ModelInfo] Models info update and save failed: Permission denied, {self.info_path}')
|
599 |
+
except json.JSONDecodeError:
|
600 |
+
print(f'[ModelInfo] Models info update and save failed: JSON decode error, {self.info_path}')
|
601 |
+
except Exception as e:
|
602 |
+
print(f'[ModelInfo Models info update and save failed: {e}, {self.info_path}')
|
603 |
+
|
604 |
+
def refresh_file(self, action, file_path, url=None):
|
605 |
+
if action not in ['add', 'delete']:
|
606 |
+
print(f'[ModelInfo] Invalid action: {action}. Action must be either "add" or "delete".')
|
607 |
+
return
|
608 |
+
|
609 |
+
if action == 'add':
|
610 |
+
if not os.path.exists(file_path):
|
611 |
+
print(f'[ModelInfo] The added file does not exist: {file_path}')
|
612 |
+
return
|
613 |
+
|
614 |
+
# Determine the catalog and model_name
|
615 |
+
catalog = None
|
616 |
+
max_match_length = 0
|
617 |
+
model_name = os.path.basename(file_path)
|
618 |
+
for key, paths in self.path_map.items():
|
619 |
+
for path in paths:
|
620 |
+
if file_path.startswith(path) and len(path) > max_match_length:
|
621 |
+
catalog = key
|
622 |
+
max_match_length = len(path)
|
623 |
+
model_name = file_path[len(path) + 1:]
|
624 |
+
|
625 |
+
if not catalog:
|
626 |
+
print(f'[ModelInfo] The added file path {file_path} does not match any path in path_map.')
|
627 |
+
return
|
628 |
+
|
629 |
+
scan_hash = self.scan_models_hash
|
630 |
+
self.scan_models_hash = True
|
631 |
+
model_name = model_name.replace(os.sep, '/')
|
632 |
+
model_key = f'{catalog}/{model_name}'
|
633 |
+
self.add_or_refresh_model(model_key, [file_path], url)
|
634 |
+
print(f'[ModelInfo] Added model {model_key} with file {file_path}')
|
635 |
+
self.scan_models_hash = scan_hash
|
636 |
+
|
637 |
+
elif action == 'delete':
|
638 |
+
if file_path not in self.m_file:
|
639 |
+
print(f'[ModelInfo] File not found in model info: {file_path}')
|
640 |
+
return
|
641 |
+
self.remove_file(file_path)
|
642 |
+
print(f'[ModelInfo] Deleted model {model_key} with file {file_path}')
|
643 |
+
|
644 |
+
self.save_model_info()
|
645 |
+
|
646 |
+
def exists_model(self, catalog='', model_path='', muid=None):
|
647 |
+
if muid and muid in self.m_muid:
|
648 |
+
return True
|
649 |
+
if catalog and model_path:
|
650 |
+
model_path = model_path.replace('\\', '/').replace(os.sep, '/')
|
651 |
+
model_key = f'{catalog}/{model_path}'
|
652 |
+
if model_key in self.m_info:
|
653 |
+
return True
|
654 |
+
return False
|
655 |
+
|
656 |
+
def exists_model_key(self, model_key):
|
657 |
+
if model_key:
|
658 |
+
cata = model_key.split('/')[0]
|
659 |
+
model_path = model_key[len(cata) + 1:].replace('\\', '/').replace(os.sep, '/')
|
660 |
+
model_key = f'{cata}/{model_path}'
|
661 |
+
if model_key in self.m_info:
|
662 |
+
return True
|
663 |
+
return False
|
664 |
+
|
665 |
+
def get_model_filepath(self, catalog='', model_path='', muid=None):
|
666 |
+
if muid and muid in self.m_muid:
|
667 |
+
model_key = self.m_muid[muid][0]
|
668 |
+
file_paths = self.m_info[model_key]['file']
|
669 |
+
return file_paths[0]
|
670 |
+
if catalog and model_path:
|
671 |
+
model_path = model_path.replace('\\', '/').replace(os.sep, '/')
|
672 |
+
model_key = f'{catalog}/{model_path}'
|
673 |
+
if model_key in self.m_info:
|
674 |
+
return self.m_info[model_key]['file'][0]
|
675 |
+
return ''
|
676 |
+
|
677 |
+
def get_model_names(self, catalog, filters=[], casesensitive=False, reverse=False):
|
678 |
+
result = []
|
679 |
+
result_reverse = []
|
680 |
+
for f in self.m_info.keys():
|
681 |
+
cata = f.split('/')[0]
|
682 |
+
m_path_or_file = f[len(cata) + 1:].replace('/', os.sep)
|
683 |
+
if catalog and cata == catalog:
|
684 |
+
result_reverse.append(m_path_or_file)
|
685 |
+
if len(filters) > 0:
|
686 |
+
for item in filters:
|
687 |
+
if casesensitive:
|
688 |
+
if item in m_path_or_file:
|
689 |
+
result.append(m_path_or_file)
|
690 |
+
result_reverse.pop()
|
691 |
+
break
|
692 |
+
else:
|
693 |
+
if item.lower() in m_path_or_file.lower():
|
694 |
+
result.append(m_path_or_file)
|
695 |
+
result_reverse.pop()
|
696 |
+
break
|
697 |
+
else:
|
698 |
+
result.append(m_path_or_file)
|
699 |
+
result_reverse.pop()
|
700 |
+
if reverse:
|
701 |
+
return sorted(result_reverse, key=str.casefold)
|
702 |
+
return sorted(result, key=str.casefold)
|
703 |
+
|
704 |
+
def get_model_info(self, catalog, model_path):
|
705 |
+
if catalog and model_path:
|
706 |
+
model_path = model_path.replace('\\', '/').replace(os.sep, '/')
|
707 |
+
model_key = f'{catalog}/{model_path}'
|
708 |
+
return self.get_model_key_info(model_key)
|
709 |
+
|
710 |
+
def get_model_key_info(self, model_key):
|
711 |
+
if model_key:
|
712 |
+
cata = model_key.split('/')[0]
|
713 |
+
model_path = model_key[len(cata) + 1:].replace('\\', '/').replace(os.sep, '/')
|
714 |
+
model_key = f'{cata}/{model_path}'
|
715 |
+
if model_key in self.m_info:
|
716 |
+
return self.m_info[model_key]
|
717 |
+
return None
|
718 |
+
|
719 |
+
def get_file_muid(self, file_path):
|
720 |
+
if file_path:
|
721 |
+
if file_path not in self.m_file:
|
722 |
+
self.refresh_file('add', file_path)
|
723 |
+
model_key = self.m_file[file_path][0]
|
724 |
+
muid = self.m_info[model_key]['muid']
|
725 |
+
if not muid:
|
726 |
+
scan_hash = self.scan_models_hash
|
727 |
+
self.scan_models_hash = True
|
728 |
+
self.add_or_refresh_model(model_key, [file_path])
|
729 |
+
self.save_model_info()
|
730 |
+
self.scan_models_hash = scan_hash
|
731 |
+
muid = self.m_info[model_key]['muid']
|
732 |
+
return muid
|
733 |
+
return ''
|
734 |
+
|
735 |
+
def get_model_path_by_name(self, catalog, name, casesensitive=True, collection=False):
|
736 |
+
if catalog and name:
|
737 |
+
catalog = f'{catalog}/'
|
738 |
+
if os.sep in name:
|
739 |
+
name = name.replace(os.sep, '/')
|
740 |
+
name1 = f'/{name}'
|
741 |
+
if not casesensitive:
|
742 |
+
name=name1.lower()
|
743 |
+
catalog=catalog.lower()
|
744 |
+
results = []
|
745 |
+
for f in self.m_info.keys():
|
746 |
+
if not casesensitive:
|
747 |
+
f1=f.lower()
|
748 |
+
else:
|
749 |
+
f1=f
|
750 |
+
if f1.startswith(catalog) and f1.endswith(name):
|
751 |
+
cata = f.split('/')[0]
|
752 |
+
model_path = f[len(cata) + 1:].replace('/', os.sep)
|
753 |
+
if not collection:
|
754 |
+
return model_path
|
755 |
+
results.append(model_path)
|
756 |
+
if collection:
|
757 |
+
return results
|
758 |
+
return ''
|
759 |
+
|
760 |
+
def get_file_path_by_name(self, catalog, name, casesensitive=True, collection=False):
|
761 |
+
if catalog and name:
|
762 |
+
cata = f'{catalog}/'
|
763 |
+
if os.sep in name:
|
764 |
+
name = name.replace(os.sep, '/')
|
765 |
+
name1 = f'/{name}'
|
766 |
+
if not casesensitive:
|
767 |
+
name1=name1.lower()
|
768 |
+
cata=cata.lower()
|
769 |
+
results = []
|
770 |
+
for f in self.m_info.keys():
|
771 |
+
if not casesensitive:
|
772 |
+
f1=f.lower()
|
773 |
+
else:
|
774 |
+
f1=f
|
775 |
+
if f1.startswith(cata) and f1.endswith(name1):
|
776 |
+
file_paths = self.m_info[f]['file']
|
777 |
+
if not collection:
|
778 |
+
return file_paths[0]
|
779 |
+
results.append(file_paths[0])
|
780 |
+
if collection and len(results)>0:
|
781 |
+
return results
|
782 |
+
return os.path.join(self.path_map[catalog][0], name.replace('/', os.sep))
|
783 |
+
return ''
|
784 |
+
|
785 |
+
def get_model_filenames(folder_paths, extensions=None, name_filter=None, variation=False):
|
786 |
+
if extensions is None:
|
787 |
+
extensions = ['.pth', '.ckpt', '.bin', '.safetensors', '.fooocus.patch', '.gguf']
|
788 |
+
files = []
|
789 |
+
for folder in folder_paths:
|
790 |
+
files += get_files_from_folder(folder, extensions, name_filter, variation)
|
791 |
+
return files
|
792 |
+
|
793 |
+
|
794 |
+
folder_variation = {}
|
795 |
+
|
796 |
+
|
797 |
+
def get_files_from_folder(folder_path, extensions=None, name_filter=None, variation=False):
|
798 |
+
global folder_variation
|
799 |
+
|
800 |
+
if not os.path.isdir(folder_path):
|
801 |
+
raise ValueError("Folder path is not a valid directory.")
|
802 |
+
|
803 |
+
filenames = []
|
804 |
+
for root, dirs, files in os.walk(folder_path, topdown=False):
|
805 |
+
relative_path = os.path.relpath(root, folder_path)
|
806 |
+
if relative_path == ".":
|
807 |
+
relative_path = ""
|
808 |
+
for filename in sorted(files, key=lambda s: s.casefold()):
|
809 |
+
_, file_extension = os.path.splitext(filename)
|
810 |
+
if (extensions is None or file_extension.lower() in extensions) and (
|
811 |
+
name_filter is None or name_filter in _):
|
812 |
+
path = os.path.join(relative_path, filename)
|
813 |
+
if variation:
|
814 |
+
mtime = int(os.path.getmtime(os.path.join(root, filename)))
|
815 |
+
if folder_path not in folder_variation or path not in folder_variation[folder_path] or mtime > \
|
816 |
+
folder_variation[folder_path][path]:
|
817 |
+
if folder_path not in folder_variation:
|
818 |
+
folder_variation.update({folder_path: {path: mtime}})
|
819 |
+
else:
|
820 |
+
folder_variation[folder_path].update({path: mtime})
|
821 |
+
filenames.append((folder_path, path))
|
822 |
+
else:
|
823 |
+
filenames.append((folder_path, path))
|
824 |
+
return filenames
|
backend_base/params_mapper.py
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
class ComfyTaskParams:
|
3 |
+
def __init__(self, params):
|
4 |
+
self.params = params
|
5 |
+
self.workflow = ''
|
6 |
+
|
7 |
+
fooo2node = {
|
8 |
+
'seed': 'KSampler:main_sampler:seed;TiledKSampler:main_sampler:seed;KolorsSampler:main_sampler:seed;RandomNoise:noise_seed:noise_seed;easy seed:sync_seed:seed',
|
9 |
+
'steps': 'KSampler:main_sampler:steps;TiledKSampler:main_sampler:steps;KolorsSampler:main_sampler:steps;BasicScheduler:scheduler_select:steps',
|
10 |
+
'cfg_scale': 'KSampler:main_sampler:cfg;TiledKSampler:main_sampler:cfg;KolorsSampler:main_sampler:cfg;CLIPTextEncodeFlux:prompt:guidance',
|
11 |
+
'sampler': 'KSampler:main_sampler:sampler_name;TiledKSampler:main_sampler:sampler_name;KSamplerSelect:sampler_select:sampler_name',
|
12 |
+
'scheduler': 'KSampler:main_sampler:scheduler;TiledKSampler:main_sampler:scheduler;KolorsSampler:main_sampler:scheduler;BasicScheduler:scheduler_select:scheduler',
|
13 |
+
'denoise': 'KSampler:main_sampler:denoise;TiledKSampler:main_sampler:denoise;KolorsSampler:main_sampler:denoise_strength;BasicScheduler:scheduler_select:denoise',
|
14 |
+
'tiling': 'TiledKSampler:main_sampler:tiling;SeamlessTile:seamless_tile:tiling;CircularVAEDecode:vae_tiled:tiling',
|
15 |
+
'tiled_offset_x': 'OffsetImage:offset_image:x_percent',
|
16 |
+
'tiled_offset_y': 'OffsetImage:offset_image:y_percent',
|
17 |
+
'base_model': 'CheckpointLoaderSimple:base_model:ckpt_name;UNETLoader:base_model:unet_name;CheckpointLoaderNF4:base_model:ckpt_name;UnetLoaderGGUF:base_model:unet_name',
|
18 |
+
'base_model_dtype': 'UNETLoader:base_model:weight_dtype',
|
19 |
+
'merge_model': 'UNETLoader:merge_model:unet_name',
|
20 |
+
'model_merge_ratio': 'ModelMergeSimple:model_merge_ratio:ratio',
|
21 |
+
'lora_speedup': 'LoraLoaderModelOnly:lora_speedup:lora_name',
|
22 |
+
'lora_speedup_strength': 'LoraLoaderModelOnly:lora_speedup:strength_model',
|
23 |
+
'lora_1': 'LoraLoaderModelOnly:lora_1:lora_name;LoraLoaderModelOnly:lora_speedup:lora_name',
|
24 |
+
'lora_1_strength': 'LoraLoaderModelOnly:lora_1:strength_model;LoraLoaderModelOnly:lora_speedup:strength_model',
|
25 |
+
'lora_2': 'LoraLoaderModelOnly:lora_2:lora_name',
|
26 |
+
'lora_2_strength': 'LoraLoaderModelOnly:lora_2:strength_model',
|
27 |
+
'lora_3': 'LoraLoaderModelOnly:lora_3:lora_name',
|
28 |
+
'lora_3_strength': 'LoraLoaderModelOnly:lora_3:strength_model',
|
29 |
+
'lora_4': 'LoraLoaderModelOnly:lora_4:lora_name',
|
30 |
+
'lora_4_strength': 'LoraLoaderModelOnly:lora_4:strength_model',
|
31 |
+
'lora_5': 'LoraLoaderModelOnly:lora_5:lora_name',
|
32 |
+
'lora_5_strength': 'LoraLoaderModelOnly:lora_5:strength_model',
|
33 |
+
'width': 'EmptyLatentImage:aspect_ratios_size:width;EmptySD3LatentImage:aspect_ratios_size:width;ImageResize+:resize_input_image:width;KolorsSampler:main_sampler:width;easy int:aspect_ratios_width:value',
|
34 |
+
'height': 'EmptyLatentImage:aspect_ratios_size:height;EmptySD3LatentImage:aspect_ratios_size:height;ImageResize+:resize_input_image:height;KolorsSampler:main_sampler:height;easy int:aspect_ratios_height:value',
|
35 |
+
'prompt': 'CLIPTextEncode:prompt:text;MZ_ChatGLM3_V2:prompt:text;KolorsTextEncode:prompt_negative_prompt:prompt;CLIPTextEncodeFlux:prompt:t5xxl;CLIPTextEncodeFlux:prompt:clip_l',
|
36 |
+
'negative_prompt': 'CLIPTextEncode:negative_prompt:text;MZ_ChatGLM3_V2:negative_prompt:text;KolorsTextEncode:prompt_negative_prompt:negative_prompt',
|
37 |
+
'clip_model': 'DualCLIPLoader:clip_model:clip_name1;DualCLIPLoaderGGUF:clip_model:clip_name1;CLIPLoaderGGUF:clip_model:clip_name;CLIPLoader:clip_model:clip_name',
|
38 |
+
'llms_model': 'MZ_ChatGLM3Loader:llms_model:chatglm3_checkpoint;DownloadAndLoadChatGLM3:llms_model:precision',
|
39 |
+
'input_image': 'LoadImage:input_image:image',
|
40 |
+
'layer_diffuse_injection': 'LayeredDiffusionApply:layer_diffuse_apply:config',
|
41 |
+
'sd_version': 'LayeredDiffusionDecode:layer_diffuse_decode:sd_version;LayeredDiffusionDecodeRGBA:layer_diffuse_decode_rgba:sd_version',
|
42 |
+
'layer_diffuse_cond': 'LayeredDiffusionCondApply:layer_diffuse_cond_apply:config',
|
43 |
+
|
44 |
+
'light_source_text_switch': 'easy imageSwitch:ic_light_source_text_switch:boolean',
|
45 |
+
'light_source_shape_switch': 'easy imageSwitch:ic_light_source_shape_switch:boolean',
|
46 |
+
'light_source_text': 'LightSource:ic_light_source_text:light_position',
|
47 |
+
'light_apply': 'LoadAndApplyICLightUnet:ic_light_apply:model_path',
|
48 |
+
'light_detail_transfer': 'DetailTransfer:ic_light_detail_transfer:mode',
|
49 |
+
'light_source_start_color': 'CreateGradientFromCoords:ic_light_source_color:start_color',
|
50 |
+
'light_source_end_color': 'CreateGradientFromCoords:ic_light_source_color:end_color',
|
51 |
+
'light_editor_path': 'SplineEditor:ic_light_editor:points_store'
|
52 |
+
|
53 |
+
}
|
54 |
+
|
55 |
+
def set_mapping_rule(self, maps):
|
56 |
+
self.fooo2node.update(maps)
|
57 |
+
|
58 |
+
def update_params(self, new_parms):
|
59 |
+
self.params.update(new_parms)
|
60 |
+
|
61 |
+
def delete_params(self, keys):
|
62 |
+
for k in keys:
|
63 |
+
if k in self.params:
|
64 |
+
del self.params[k]
|
65 |
+
|
66 |
+
def convert2comfy(self, workflow):
|
67 |
+
#print(f'params:{self.params}')
|
68 |
+
self.workflow = workflow
|
69 |
+
for (pk1,v) in self.params.items():
|
70 |
+
if pk1 in self.fooo2node:
|
71 |
+
nk = self.fooo2node[pk1]
|
72 |
+
self.replace_key(nk,v)
|
73 |
+
return self.workflow
|
74 |
+
|
75 |
+
|
76 |
+
def replace_key(self,nk,v):
|
77 |
+
lines = nk.split(';')
|
78 |
+
for line in lines:
|
79 |
+
parts = line.strip().split(':')
|
80 |
+
class_type = parts[0].strip()
|
81 |
+
meta_title = parts[1].strip()
|
82 |
+
inputs = parts[2].strip()
|
83 |
+
for n in self.workflow.keys():
|
84 |
+
if self.workflow[n]["class_type"]==class_type and self.workflow[n]["_meta"]["title"]==meta_title:
|
85 |
+
if '|' in inputs:
|
86 |
+
keys = inputs.split('|')
|
87 |
+
vs = v.strip().split('|')
|
88 |
+
for i in range(len(keys)):
|
89 |
+
self.workflow[n]["inputs"][keys[i]] = vs[i]
|
90 |
+
else:
|
91 |
+
self.workflow[n]["inputs"][inputs] = v
|
92 |
+
|
93 |
+
|
94 |
+
|
backend_base/utils.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import hashlib
|
3 |
+
from typing import Optional
|
4 |
+
|
5 |
+
echo_off = True
|
6 |
+
|
7 |
+
HASH_SHA256_LENGTH = 10
|
8 |
+
def sha256(filename, use_addnet_hash=False, length=HASH_SHA256_LENGTH):
|
9 |
+
if use_addnet_hash:
|
10 |
+
with open(filename, "rb") as file:
|
11 |
+
sha256_value = addnet_hash_safetensors(file)
|
12 |
+
else:
|
13 |
+
sha256_value = calculate_sha256(filename)
|
14 |
+
#print(f"{sha256_value}")
|
15 |
+
|
16 |
+
return sha256_value[:length] if length is not None else sha256_value
|
17 |
+
|
18 |
+
|
19 |
+
def addnet_hash_safetensors(b):
|
20 |
+
"""kohya-ss hash for safetensors from https://github.com/kohya-ss/sd-scripts/blob/main/library/train_util.py"""
|
21 |
+
hash_sha256 = hashlib.sha256()
|
22 |
+
blksize = 1024 * 1024
|
23 |
+
|
24 |
+
b.seek(0)
|
25 |
+
header = b.read(8)
|
26 |
+
n = int.from_bytes(header, "little")
|
27 |
+
|
28 |
+
offset = n + 8
|
29 |
+
b.seek(offset)
|
30 |
+
for chunk in iter(lambda: b.read(blksize), b""):
|
31 |
+
hash_sha256.update(chunk)
|
32 |
+
|
33 |
+
return hash_sha256.hexdigest()
|
34 |
+
|
35 |
+
|
36 |
+
def calculate_sha256(filename) -> str:
|
37 |
+
hash_sha256 = hashlib.sha256()
|
38 |
+
blksize = 1024 * 1024
|
39 |
+
|
40 |
+
with open(filename, "rb") as f:
|
41 |
+
for chunk in iter(lambda: f.read(blksize), b""):
|
42 |
+
hash_sha256.update(chunk)
|
43 |
+
|
44 |
+
return hash_sha256.hexdigest()
|
45 |
+
|
46 |
+
def calculate_sha256_subfolder(folder_path) -> str:
|
47 |
+
hash_sha256 = hashlib.sha256()
|
48 |
+
blksize = 1024 * 1024
|
49 |
+
for entry in os.listdir(folder_path):
|
50 |
+
full_path = os.path.join(folder_path, entry)
|
51 |
+
if os.path.isfile(full_path):
|
52 |
+
with open(full_path, "rb") as f:
|
53 |
+
for chunk in iter(lambda: f.read(blksize), b""):
|
54 |
+
hash_sha256.update(chunk)
|
55 |
+
return hash_sha256.hexdigest()
|
56 |
+
|
57 |
+
def get_size_subfolders(folder_path):
|
58 |
+
total_size = 0
|
59 |
+
for entry in os.listdir(folder_path):
|
60 |
+
full_path = os.path.join(folder_path, entry)
|
61 |
+
if os.path.isfile(full_path):
|
62 |
+
total_size += os.path.getsize(full_path)
|
63 |
+
return total_size
|
64 |
+
|
65 |
+
def load_model_for_path(models_url, root_name):
|
66 |
+
models_root = folder_paths.get_folder_paths(root_name)[0]
|
67 |
+
for model_path in models_url:
|
68 |
+
model_full_path = os.path.join(models_root, model_path)
|
69 |
+
if not os.path.exists(model_full_path):
|
70 |
+
model_full_path = load_file_from_url(
|
71 |
+
url=models_url[model_path], model_dir=models_root, file_name=model_path
|
72 |
+
)
|
73 |
+
return
|