import gradio as gr from ultralytics import YOLO import torch import cv2 import numpy as np import wikipedia from PIL import Image # Load YOLO model for tree detection yolo_model = YOLO("yolov8n.pt") # Load MiDaS model for depth estimation midas = torch.hub.load("intel-isl/MiDaS", "MiDaS_small") midas.to("cpu").eval() # ✅ FIXED: Use correct MiDaS transform attribute midas_transforms = torch.hub.load("intel-isl/MiDaS", "transforms").small_transform def estimate_tree_height(image): # Convert to OpenCV format image_np = np.array(image) image_bgr = cv2.cvtColor(image_np, cv2.COLOR_RGB2BGR) # Run YOLO detection results = yolo_model(image_bgr) boxes = results[0].boxes.xyxy.cpu().numpy() labels = results[0].boxes.cls.cpu().numpy() # Filter for trees (assuming class 0 is tree - adjust if needed) tree_boxes = [box for box, label in zip(boxes, labels) if int(label) == 0] if not tree_boxes: return "No tree detected", None, None x1, y1, x2, y2 = map(int, tree_boxes[0]) tree_crop = image_np[y1:y2, x1:x2] # Depth estimation input_tensor = midas_transforms(Image.fromarray(image_np)).to("cpu") with torch.no_grad(): depth_map = midas(input_tensor.unsqueeze(0))[0] depth_resized = torch.nn.functional.interpolate( depth_map.unsqueeze(0), size=image_np.shape[:2], mode="bicubic", align_corners=False ).squeeze().cpu().numpy() avg_depth = np.mean(depth_resized[y1:y2, x1:x2]) estimated_height_m = avg_depth * 1.8 # scale arbitrarily for demo # Wikipedia summary try: summary = wikipedia.summary("tree", sentences=2) except Exception: summary = "Tree species information not available." return f"Estimated Tree Height: {estimated_height_m:.2f} meters", Image.fromarray(tree_crop), summary # Gradio Interface demo = gr.Interface( fn=estimate_tree_height, inputs=gr.Image(type="pil"), outputs=[ gr.Textbox(label="Tree Height Estimate"), gr.Image(label="Detected Tree"), gr.Textbox(label="Tree Species Info") ], title="🌳 Tree Measurement App", description="Upload or capture a tree image to estimate its height and get basic species info." ) demo.launch(share=True)