gaur3009 commited on
Commit
7888c29
·
verified ·
1 Parent(s): 1fbb462

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -7
app.py CHANGED
@@ -18,7 +18,7 @@ def estimate_depth(image):
18
  input_tensor = midas_transform(image).unsqueeze(0).to(device)
19
  with torch.no_grad():
20
  depth = midas_model(input_tensor).squeeze().cpu().numpy()
21
- depth = cv2.resize(depth, (image.width, image.height))
22
  depth = (depth - depth.min()) / (depth.max() - depth.min()) * 255
23
  return depth.astype(np.uint8)
24
 
@@ -28,11 +28,13 @@ def apply_tps_warping(design, depth):
28
  grid_x, grid_y = np.meshgrid(np.arange(w), np.arange(h))
29
  displacement_x = cv2.Sobel(depth, cv2.CV_32F, 1, 0, ksize=5)
30
  displacement_y = cv2.Sobel(depth, cv2.CV_32F, 0, 1, ksize=5)
31
- displacement_x = cv2.normalize(displacement_x, None, -10, 10, cv2.NORM_MINMAX)
32
- displacement_y = cv2.normalize(displacement_y, None, -10, 10, cv2.NORM_MINMAX)
 
33
  map_x = np.clip(grid_x + displacement_x, 0, w - 1).astype(np.float32)
34
  map_y = np.clip(grid_y + displacement_y, 0, h - 1).astype(np.float32)
35
- warped_design = cv2.remap(design, map_x, map_y, interpolation=cv2.INTER_CUBIC, borderMode=cv2.BORDER_REFLECT)
 
36
  return warped_design
37
 
38
  def blend_design(cloth_img, design_img):
@@ -63,9 +65,10 @@ def blend_design(cloth_img, design_img):
63
  depth_map = estimate_depth(cloth_img)
64
  warped_design = apply_tps_warping(design_canvas, depth_map)
65
 
66
- # Blend design onto cloth
67
- for c in range(3):
68
- cloth_np[:, :, c] = (cloth_np[:, :, c] * (1 - alpha_channel) + warped_design[:, :, c] * alpha_channel)
 
69
 
70
  return Image.fromarray(cloth_np)
71
 
 
18
  input_tensor = midas_transform(image).unsqueeze(0).to(device)
19
  with torch.no_grad():
20
  depth = midas_model(input_tensor).squeeze().cpu().numpy()
21
+ depth = cv2.resize(depth, (image.size[0], image.size[1]))
22
  depth = (depth - depth.min()) / (depth.max() - depth.min()) * 255
23
  return depth.astype(np.uint8)
24
 
 
28
  grid_x, grid_y = np.meshgrid(np.arange(w), np.arange(h))
29
  displacement_x = cv2.Sobel(depth, cv2.CV_32F, 1, 0, ksize=5)
30
  displacement_y = cv2.Sobel(depth, cv2.CV_32F, 0, 1, ksize=5)
31
+ displacement_x = cv2.normalize(displacement_x, None, -5, 5, cv2.NORM_MINMAX)
32
+ displacement_y = cv2.normalize(displacement_y, None, -5, 5, cv2.NORM_MINMAX)
33
+
34
  map_x = np.clip(grid_x + displacement_x, 0, w - 1).astype(np.float32)
35
  map_y = np.clip(grid_y + displacement_y, 0, h - 1).astype(np.float32)
36
+
37
+ warped_design = cv2.remap(design, map_x, map_y, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT)
38
  return warped_design
39
 
40
  def blend_design(cloth_img, design_img):
 
65
  depth_map = estimate_depth(cloth_img)
66
  warped_design = apply_tps_warping(design_canvas, depth_map)
67
 
68
+ # Ensure alpha is applied correctly
69
+ mask = np.zeros_like(cloth_np, dtype=np.float32)
70
+ mask[y_offset:y_offset+new_h, x_offset:x_offset+new_w] = np.expand_dims(alpha_channel, axis=-1)
71
+ cloth_np = (cloth_np * (1 - mask) + warped_design * mask).astype(np.uint8)
72
 
73
  return Image.fromarray(cloth_np)
74