Spaces:
Running
on
Zero
Running
on
Zero
Update ComfyUI/custom_nodes/ComfyUI-BrushNet/brushnet_nodes.py
Browse files
ComfyUI/custom_nodes/ComfyUI-BrushNet/brushnet_nodes.py
CHANGED
|
@@ -6,7 +6,7 @@ import torch
|
|
| 6 |
import torchvision.transforms as T
|
| 7 |
import torch.nn.functional as F
|
| 8 |
from accelerate import init_empty_weights, load_checkpoint_and_dispatch
|
| 9 |
-
|
| 10 |
import comfy
|
| 11 |
import folder_paths
|
| 12 |
|
|
@@ -523,7 +523,9 @@ class BlendInpaint:
|
|
| 523 |
|
| 524 |
|
| 525 |
|
| 526 |
-
def scale_mask_and_image(image, mask,
|
|
|
|
|
|
|
| 527 |
h0, w0 = mask.shape
|
| 528 |
iy, ix = (mask == 1).nonzero(as_tuple=True)
|
| 529 |
|
|
@@ -536,15 +538,14 @@ def scale_mask_and_image(image, mask, width, height, side_margin):
|
|
| 536 |
x_c, y_c = (x_min + x_max) / 2.0, (y_min + y_max) / 2.0
|
| 537 |
mask_width, mask_height = x_max - x_min + 1, y_max - y_min + 1
|
| 538 |
|
| 539 |
-
aspect_ratio = width / height
|
| 540 |
mask_aspect_ratio = mask_width / mask_height
|
| 541 |
|
| 542 |
-
if mask_aspect_ratio >
|
| 543 |
new_mask_width = mask_width
|
| 544 |
-
new_mask_height = mask_width
|
| 545 |
else:
|
| 546 |
new_mask_height = mask_height
|
| 547 |
-
new_mask_width = mask_height
|
| 548 |
|
| 549 |
margin = side_margin/100.0
|
| 550 |
cut_width = int(new_mask_width * (1 + 2 * margin))
|
|
@@ -553,21 +554,32 @@ def scale_mask_and_image(image, mask, width, height, side_margin):
|
|
| 553 |
x0 = max(0, min(w0 - cut_width, int(x_c - cut_width / 2)))
|
| 554 |
y0 = max(0, min(h0 - cut_height, int(y_c - cut_height / 2)))
|
| 555 |
|
| 556 |
-
# Adjust cut dimensions if they exceed image dimensions
|
| 557 |
cut_width = min(cut_width, w0 - x0)
|
| 558 |
cut_height = min(cut_height, h0 - y0)
|
| 559 |
|
| 560 |
cut_image = image[y0:y0+cut_height, x0:x0+cut_width]
|
| 561 |
cut_mask = mask[y0:y0+cut_height, x0:x0+cut_width]
|
| 562 |
|
| 563 |
-
|
| 564 |
-
|
| 565 |
-
|
| 566 |
-
|
| 567 |
-
|
| 568 |
-
|
| 569 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 570 |
return scaled_image, scaled_mask, (x0, y0, cut_width, cut_height)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 571 |
|
| 572 |
class CutForInpaint:
|
| 573 |
|
|
@@ -577,7 +589,9 @@ class CutForInpaint:
|
|
| 577 |
{
|
| 578 |
"image": ("IMAGE",),
|
| 579 |
"mask": ("MASK",),
|
| 580 |
-
"side_margin_percent": ("INT", {"default": 10, "min": 0, "max": 1000})
|
|
|
|
|
|
|
| 581 |
},
|
| 582 |
}
|
| 583 |
|
|
@@ -587,12 +601,12 @@ class CutForInpaint:
|
|
| 587 |
|
| 588 |
FUNCTION = "cut_for_inpaint"
|
| 589 |
|
| 590 |
-
def cut_for_inpaint(self, image: torch.Tensor, mask: torch.Tensor, side_margin_percent: int):
|
| 591 |
ret = []
|
| 592 |
msk = []
|
| 593 |
org = []
|
| 594 |
for i in range(image.shape[0]):
|
| 595 |
-
cut_image, cut_mask, (x0, y0, cut_width, cut_height) = scale_mask_and_image(image[i], mask[i],
|
| 596 |
ret.append(cut_image)
|
| 597 |
msk.append(cut_mask)
|
| 598 |
org.append(torch.IntTensor([x0, y0, cut_width, cut_height]))
|
|
|
|
| 6 |
import torchvision.transforms as T
|
| 7 |
import torch.nn.functional as F
|
| 8 |
from accelerate import init_empty_weights, load_checkpoint_and_dispatch
|
| 9 |
+
import math
|
| 10 |
import comfy
|
| 11 |
import folder_paths
|
| 12 |
|
|
|
|
| 523 |
|
| 524 |
|
| 525 |
|
| 526 |
+
def scale_mask_and_image(image, mask, side_margin, min_side, max_side):
|
| 527 |
+
min_area = min_side * min_side
|
| 528 |
+
max_area = max_side * max_side
|
| 529 |
h0, w0 = mask.shape
|
| 530 |
iy, ix = (mask == 1).nonzero(as_tuple=True)
|
| 531 |
|
|
|
|
| 538 |
x_c, y_c = (x_min + x_max) / 2.0, (y_min + y_max) / 2.0
|
| 539 |
mask_width, mask_height = x_max - x_min + 1, y_max - y_min + 1
|
| 540 |
|
|
|
|
| 541 |
mask_aspect_ratio = mask_width / mask_height
|
| 542 |
|
| 543 |
+
if mask_aspect_ratio > 1:
|
| 544 |
new_mask_width = mask_width
|
| 545 |
+
new_mask_height = mask_width
|
| 546 |
else:
|
| 547 |
new_mask_height = mask_height
|
| 548 |
+
new_mask_width = mask_height
|
| 549 |
|
| 550 |
margin = side_margin/100.0
|
| 551 |
cut_width = int(new_mask_width * (1 + 2 * margin))
|
|
|
|
| 554 |
x0 = max(0, min(w0 - cut_width, int(x_c - cut_width / 2)))
|
| 555 |
y0 = max(0, min(h0 - cut_height, int(y_c - cut_height / 2)))
|
| 556 |
|
|
|
|
| 557 |
cut_width = min(cut_width, w0 - x0)
|
| 558 |
cut_height = min(cut_height, h0 - y0)
|
| 559 |
|
| 560 |
cut_image = image[y0:y0+cut_height, x0:x0+cut_width]
|
| 561 |
cut_mask = mask[y0:y0+cut_height, x0:x0+cut_width]
|
| 562 |
|
| 563 |
+
|
| 564 |
+
current_area = cut_width * cut_height
|
| 565 |
+
print(f"current_area: {current_area} min_area: {min_area} max_area: {max_area}")
|
| 566 |
+
|
| 567 |
+
if current_area > max_area or current_area <= min_area:
|
| 568 |
+
if current_area > max_area:
|
| 569 |
+
print("current_area > max_area")
|
| 570 |
+
scale_factor = math.sqrt(max_area / current_area)
|
| 571 |
+
elif current_area <= min_area:
|
| 572 |
+
print("current_area <= min_area")
|
| 573 |
+
scale_factor = math.sqrt(min_area / current_area)
|
| 574 |
+
new_width = int(cut_width * scale_factor)
|
| 575 |
+
new_height = int(cut_height * scale_factor)
|
| 576 |
+
scaled_image = F.interpolate(cut_image.permute(2, 0, 1).unsqueeze(0), size=(new_height, new_width), mode='bilinear', align_corners=False).squeeze(0).permute(1, 2, 0)
|
| 577 |
+
scaled_mask = F.interpolate(cut_mask.unsqueeze(0).unsqueeze(0).float(), size=(new_height, new_width), mode='nearest').squeeze(0).squeeze(0)
|
| 578 |
return scaled_image, scaled_mask, (x0, y0, cut_width, cut_height)
|
| 579 |
+
else:
|
| 580 |
+
print("original size mask")
|
| 581 |
+
return cut_image, cut_mask, (x0, y0, cut_width, cut_height)
|
| 582 |
+
|
| 583 |
|
| 584 |
class CutForInpaint:
|
| 585 |
|
|
|
|
| 589 |
{
|
| 590 |
"image": ("IMAGE",),
|
| 591 |
"mask": ("MASK",),
|
| 592 |
+
"side_margin_percent": ("INT", {"default": 10, "min": 0, "max": 1000}),
|
| 593 |
+
"min_side": ("INT", {"default": 512, "min": 128, "max": 4096}),
|
| 594 |
+
"max_side": ("INT", {"default": 1536, "min": 128, "max": 4096})
|
| 595 |
},
|
| 596 |
}
|
| 597 |
|
|
|
|
| 601 |
|
| 602 |
FUNCTION = "cut_for_inpaint"
|
| 603 |
|
| 604 |
+
def cut_for_inpaint(self, image: torch.Tensor, mask: torch.Tensor, side_margin_percent: int, min_side: int, max_side: int):
|
| 605 |
ret = []
|
| 606 |
msk = []
|
| 607 |
org = []
|
| 608 |
for i in range(image.shape[0]):
|
| 609 |
+
cut_image, cut_mask, (x0, y0, cut_width, cut_height) = scale_mask_and_image(image[i], mask[i], side_margin_percent, min_side, max_side)
|
| 610 |
ret.append(cut_image)
|
| 611 |
msk.append(cut_mask)
|
| 612 |
org.append(torch.IntTensor([x0, y0, cut_width, cut_height]))
|