gaur3009 commited on
Commit
023c7c8
·
verified ·
1 Parent(s): deceec0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -28
app.py CHANGED
@@ -5,6 +5,7 @@ import numpy as np
5
  from torchvision import transforms
6
  from PIL import Image
7
  from transformers import DPTForDepthEstimation, DPTFeatureExtractor
 
8
 
9
  # Load depth estimation model
10
  model_name = "Intel/dpt-large"
@@ -30,39 +31,18 @@ def warp_design(cloth_img, design_img):
30
  cloth_np = np.array(cloth_img)
31
  design_np = np.array(design_img)
32
  h, w, _ = cloth_np.shape
33
- dh, dw, _ = design_np.shape
34
-
35
- # Resize design to fit within 70% of the clothing area
36
- scale_factor = min(w / dw, h / dh) * 0.7
37
- new_w, new_h = int(dw * scale_factor), int(dh * scale_factor)
38
- design_np = cv2.resize(design_np, (new_w, new_h))
39
-
40
- # Create blank canvas with transparent background
41
- design_canvas = np.zeros_like(cloth_np, dtype=np.uint8)
42
- x_offset = (w - new_w) // 2
43
- y_offset = (h - new_h) // 2
44
- design_canvas[y_offset:y_offset+new_h, x_offset:x_offset+new_w] = design_np
45
 
46
  # Estimate depth map
47
  depth_map = estimate_depth(cloth_img)
48
  depth_map = cv2.resize(depth_map, (w, h))
49
 
50
- # Generate displacement map based on depth
51
- displacement_x = cv2.Sobel(depth_map, cv2.CV_32F, 1, 0, ksize=5)
52
- displacement_y = cv2.Sobel(depth_map, cv2.CV_32F, 0, 1, ksize=5)
53
-
54
- displacement_x = cv2.normalize(displacement_x, None, -3, 3, cv2.NORM_MINMAX)
55
- displacement_y = cv2.normalize(displacement_y, None, -3, 3, cv2.NORM_MINMAX)
56
-
57
- map_x, map_y = np.meshgrid(np.arange(w), np.arange(h))
58
- map_x = np.clip(np.float32(map_x + displacement_x), 0, w - 1)
59
- map_y = np.clip(np.float32(map_y + displacement_y), 0, h - 1)
60
- warped_design = cv2.remap(design_canvas, map_x, map_y, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT)
61
 
62
- # Blend images without excessive transparency
63
- mask = np.any(warped_design > 0, axis=-1).astype(np.uint8) * 255
64
- blended = cloth_np.copy()
65
- np.copyto(blended, warped_design, where=(mask[..., None] > 0))
66
 
67
  return Image.fromarray(blended)
68
 
@@ -78,4 +58,4 @@ iface = gr.Interface(
78
  )
79
 
80
  if __name__ == "__main__":
81
- iface.launch(share=True)
 
5
  from torchvision import transforms
6
  from PIL import Image
7
  from transformers import DPTForDepthEstimation, DPTFeatureExtractor
8
+ import torchvision.transforms.functional as F
9
 
10
  # Load depth estimation model
11
  model_name = "Intel/dpt-large"
 
31
  cloth_np = np.array(cloth_img)
32
  design_np = np.array(design_img)
33
  h, w, _ = cloth_np.shape
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
  # Estimate depth map
36
  depth_map = estimate_depth(cloth_img)
37
  depth_map = cv2.resize(depth_map, (w, h))
38
 
39
+ # Compute optical flow for warping
40
+ flow = cv2.calcOpticalFlowFarneback(depth_map, depth_map, None, 0.5, 3, 15, 3, 5, 1.2, 0)
41
+ flow_map = np.column_stack((flow[..., 0] + np.arange(w), flow[..., 1] + np.arange(h)[:, None]))
42
+ warped_design = cv2.remap(design_np, flow_map, None, cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT)
 
 
 
 
 
 
 
43
 
44
+ # Blending
45
+ blended = cv2.addWeighted(cloth_np, 0.7, warped_design, 0.3, 0)
 
 
46
 
47
  return Image.fromarray(blended)
48
 
 
58
  )
59
 
60
  if __name__ == "__main__":
61
+ iface.launch(share=True)