Upload app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from ultralytics import YOLO
|
| 3 |
+
from PIL import Image, ImageDraw
|
| 4 |
+
import numpy as np
|
| 5 |
+
|
| 6 |
+
# Load your YOLOv8 model
|
| 7 |
+
model = YOLO("my_yolo_model.onnx") # or "yolov8n.pt"
|
| 8 |
+
|
| 9 |
+
def predict(image):
|
| 10 |
+
# Convert Gradio's numpy array to PIL Image
|
| 11 |
+
pil_image = Image.fromarray(image)
|
| 12 |
+
|
| 13 |
+
# Run YOLOv8 inference
|
| 14 |
+
results = model(pil_image)
|
| 15 |
+
|
| 16 |
+
# Extract bounding boxes and labels
|
| 17 |
+
boxes = results[0].boxes.xyxy.cpu().numpy() # Coordinates
|
| 18 |
+
classes = results[0].boxes.cls.cpu().numpy() # Class IDs
|
| 19 |
+
confidences = results[0].boxes.conf.cpu().numpy() # Confidence scores
|
| 20 |
+
|
| 21 |
+
# Draw bounding boxes on the image (PIL)
|
| 22 |
+
draw = ImageDraw.Draw(pil_image)
|
| 23 |
+
for box, cls, conf in zip(boxes, classes, confidences):
|
| 24 |
+
x1, y1, x2, y2 = box
|
| 25 |
+
label = f"{model.names[int(cls)]} {conf:.2f}"
|
| 26 |
+
|
| 27 |
+
# Draw rectangle and label
|
| 28 |
+
draw.rectangle([x1, y1, x2, y2], outline="blue", width=2)
|
| 29 |
+
draw.text((x1, y1), label, fill="red")
|
| 30 |
+
|
| 31 |
+
return pil_image # Return PIL Image (Gradio handles RGB)
|
| 32 |
+
|
| 33 |
+
# Gradio Interface
|
| 34 |
+
demo = gr.Interface(
|
| 35 |
+
fn=predict,
|
| 36 |
+
inputs=gr.Image(label="Input Image"),
|
| 37 |
+
outputs=gr.Image(label="Detected Objects"),
|
| 38 |
+
title="Pathole Detection by Yunusa Jibrin ",
|
| 39 |
+
examples=["example1.jpg", "example2.jpg"], # Optional
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
demo.launch()
|