gaur3009 commited on
Commit
7bda381
·
verified ·
1 Parent(s): 96e997e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -14
app.py CHANGED
@@ -27,7 +27,7 @@ def estimate_depth(image):
27
  def blend_design(cloth_img, design_img):
28
  """Blend design onto clothing naturally."""
29
  cloth_img = cloth_img.convert("RGB")
30
- design_img = design_img.convert("RGB")
31
  cloth_np = np.array(cloth_img)
32
  design_np = np.array(design_img)
33
 
@@ -36,18 +36,21 @@ def blend_design(cloth_img, design_img):
36
  dh, dw, _ = design_np.shape
37
  scale_factor = min(w / dw, h / dh) * 0.4 # Scale to 40% of clothing area
38
  new_w, new_h = int(dw * scale_factor), int(dh * scale_factor)
39
- design_np = cv2.resize(design_np, (new_w, new_h))
40
 
41
- # Convert design to grayscale and darken for print effect
42
- design_gray = cv2.cvtColor(design_np, cv2.COLOR_RGB2GRAY)
43
- design_np = cv2.cvtColor(design_gray, cv2.COLOR_GRAY2RGB)
44
- design_np = cv2.convertScaleAbs(design_np, alpha=1.5, beta=-40) # Increase contrast
45
 
46
  # Create a blank canvas and paste the resized design at the center
47
- design_canvas = np.zeros_like(cloth_np)
48
  x_offset = (w - new_w) // 2
49
  y_offset = int(h * 0.35) # Move slightly upward for a natural position
50
- design_canvas[y_offset:y_offset+new_h, x_offset:x_offset+new_w] = design_np
 
 
 
 
 
51
 
52
  # Estimate depth for fold detection
53
  depth_map = estimate_depth(cloth_img)
@@ -63,13 +66,9 @@ def blend_design(cloth_img, design_img):
63
  map_x, map_y = np.meshgrid(np.arange(w), np.arange(h))
64
  map_x = np.clip(np.float32(map_x + displacement_x), 0, w - 1)
65
  map_y = np.clip(np.float32(map_y + displacement_y), 0, h - 1)
66
- warped_design = cv2.remap(design_canvas, map_x, map_y, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT)
67
-
68
- # Use Poisson blending for seamless integration
69
- mask = (warped_design > 0).astype(np.uint8) * 255
70
- blended = cv2.seamlessClone(warped_design, cloth_np, mask, (w//2, int(h * 0.35 + new_h//2)), cv2.NORMAL_CLONE)
71
 
72
- return Image.fromarray(blended)
73
 
74
  def main(cloth, design):
75
  return blend_design(cloth, design)
 
27
  def blend_design(cloth_img, design_img):
28
  """Blend design onto clothing naturally."""
29
  cloth_img = cloth_img.convert("RGB")
30
+ design_img = design_img.convert("RGBA")
31
  cloth_np = np.array(cloth_img)
32
  design_np = np.array(design_img)
33
 
 
36
  dh, dw, _ = design_np.shape
37
  scale_factor = min(w / dw, h / dh) * 0.4 # Scale to 40% of clothing area
38
  new_w, new_h = int(dw * scale_factor), int(dh * scale_factor)
39
+ design_np = cv2.resize(design_np, (new_w, new_h), interpolation=cv2.INTER_AREA)
40
 
41
+ # Convert design to include transparency effect
42
+ alpha_channel = design_np[:, :, 3] / 255.0 # Extract alpha channel
43
+ design_np = design_np[:, :, :3] # Remove alpha channel for processing
 
44
 
45
  # Create a blank canvas and paste the resized design at the center
 
46
  x_offset = (w - new_w) // 2
47
  y_offset = int(h * 0.35) # Move slightly upward for a natural position
48
+
49
+ for c in range(3):
50
+ cloth_np[y_offset:y_offset+new_h, x_offset:x_offset+new_w, c] = (
51
+ cloth_np[y_offset:y_offset+new_h, x_offset:x_offset+new_w, c] * (1 - alpha_channel) +
52
+ design_np[:, :, c] * alpha_channel
53
+ )
54
 
55
  # Estimate depth for fold detection
56
  depth_map = estimate_depth(cloth_img)
 
66
  map_x, map_y = np.meshgrid(np.arange(w), np.arange(h))
67
  map_x = np.clip(np.float32(map_x + displacement_x), 0, w - 1)
68
  map_y = np.clip(np.float32(map_y + displacement_y), 0, h - 1)
69
+ warped_cloth = cv2.remap(cloth_np, map_x, map_y, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT)
 
 
 
 
70
 
71
+ return Image.fromarray(warped_cloth)
72
 
73
  def main(cloth, design):
74
  return blend_design(cloth, design)