pranaya20 commited on
Commit
8b2b3e8
·
verified ·
1 Parent(s): f081ce7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -22
app.py CHANGED
@@ -9,44 +9,48 @@ from PIL import Image
9
  # Load YOLO model for tree detection
10
  yolo_model = YOLO("yolov8n.pt")
11
 
12
- # Load MiDaS depth model
13
  midas = torch.hub.load("intel-isl/MiDaS", "MiDaS_small")
14
  midas.to("cpu").eval()
15
- midas_transforms = torch.hub.load("intel-isl/MiDaS", "transforms").small
 
 
16
 
17
  def estimate_tree_height(image):
18
- # Convert image to OpenCV format
19
- image = np.array(image)
20
- image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
21
 
22
- # Object Detection (Tree)
23
- results = yolo_model(image_rgb)
24
- boxes = results[0].boxes.xyxy.cpu().numpy() # Get bounding boxes
25
  labels = results[0].boxes.cls.cpu().numpy()
26
 
27
- tree_boxes = [box for box, label in zip(boxes, labels) if int(label) == 0] # class 0 usually means 'person/tree'
 
28
 
29
  if not tree_boxes:
30
  return "No tree detected", None, None
31
 
32
- x1, y1, x2, y2 = tree_boxes[0]
33
- tree_crop = image[int(y1):int(y2), int(x1):int(x2)]
34
 
35
  # Depth estimation
36
- input_tensor = midas_transforms(Image.fromarray(image_rgb)).to("cpu")
37
  with torch.no_grad():
38
  depth_map = midas(input_tensor.unsqueeze(0))[0]
39
- depth_resized = torch.nn.functional.interpolate(
40
- depth_map.unsqueeze(0),
41
- size=image_rgb.shape[:2],
42
- mode="bicubic",
43
- align_corners=False
44
- ).squeeze().cpu().numpy()
45
 
46
- avg_depth = np.mean(depth_resized[int(y1):int(y2), int(x1):int(x2)])
47
- estimated_height_m = avg_depth * 1.8 # arbitrary scaling for demo
 
 
 
 
 
 
 
48
 
49
- # Wikipedia summary (simulate species info)
50
  try:
51
  summary = wikipedia.summary("tree", sentences=2)
52
  except Exception:
@@ -64,7 +68,7 @@ demo = gr.Interface(
64
  gr.Textbox(label="Tree Species Info")
65
  ],
66
  title="🌳 Tree Measurement App",
67
- description="Capture a tree image to estimate its height and get basic species info."
68
  )
69
 
70
  demo.launch()
 
9
  # Load YOLO model for tree detection
10
  yolo_model = YOLO("yolov8n.pt")
11
 
12
+ # Load MiDaS model for depth estimation
13
  midas = torch.hub.load("intel-isl/MiDaS", "MiDaS_small")
14
  midas.to("cpu").eval()
15
+
16
+ # ✅ FIXED: Use correct MiDaS transform attribute
17
+ midas_transforms = torch.hub.load("intel-isl/MiDaS", "transforms").small_transform
18
 
19
  def estimate_tree_height(image):
20
+ # Convert to OpenCV format
21
+ image_np = np.array(image)
22
+ image_bgr = cv2.cvtColor(image_np, cv2.COLOR_RGB2BGR)
23
 
24
+ # Run YOLO detection
25
+ results = yolo_model(image_bgr)
26
+ boxes = results[0].boxes.xyxy.cpu().numpy()
27
  labels = results[0].boxes.cls.cpu().numpy()
28
 
29
+ # Filter for trees (assuming class 0 is tree - adjust if needed)
30
+ tree_boxes = [box for box, label in zip(boxes, labels) if int(label) == 0]
31
 
32
  if not tree_boxes:
33
  return "No tree detected", None, None
34
 
35
+ x1, y1, x2, y2 = map(int, tree_boxes[0])
36
+ tree_crop = image_np[y1:y2, x1:x2]
37
 
38
  # Depth estimation
39
+ input_tensor = midas_transforms(Image.fromarray(image_np)).to("cpu")
40
  with torch.no_grad():
41
  depth_map = midas(input_tensor.unsqueeze(0))[0]
 
 
 
 
 
 
42
 
43
+ depth_resized = torch.nn.functional.interpolate(
44
+ depth_map.unsqueeze(0),
45
+ size=image_np.shape[:2],
46
+ mode="bicubic",
47
+ align_corners=False
48
+ ).squeeze().cpu().numpy()
49
+
50
+ avg_depth = np.mean(depth_resized[y1:y2, x1:x2])
51
+ estimated_height_m = avg_depth * 1.8 # scale arbitrarily for demo
52
 
53
+ # Wikipedia summary
54
  try:
55
  summary = wikipedia.summary("tree", sentences=2)
56
  except Exception:
 
68
  gr.Textbox(label="Tree Species Info")
69
  ],
70
  title="🌳 Tree Measurement App",
71
+ description="Upload or capture a tree image to estimate its height and get basic species info."
72
  )
73
 
74
  demo.launch()