Spaces:
Runtime error
Runtime error
File size: 949 Bytes
339d133 4d78218 339d133 03a517d 339d133 03a517d 339d133 4d78218 339d133 03a517d 4d78218 2642a92 4d78218 539c230 004d756 4d78218 f1fe0df f312c0e 0573a4e 339d133 e009936 339d133 3e6e90e e009936 0573a4e 5127947 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
import torch
import cv2
import numpy as np
import gradio as gr
from PIL import Image
model = torch.hub.load('ultralytics/yolov5', 'yolov5s', pretrained=True)
model.conf = 0.25
model.iou = 0.45
model.agnostic = False
model.multi_label = False
model.max_det = 1000
def detect(img):
results = model(img, size=640)
predictions = results.pred[0]
boxes = predictions[:, :4] # x1, y1, x2, y2
scores = predictions[:, 4]
categories = predictions[:, 5]
new_image = np.squeeze(results.render())
return new_image
examples = ['apple_img.jpg',]
css = ".output-image, .input-image, .image-preview {height: 400px !important}"
iface = gr.Interface(fn=detect,
inputs=gr.inputs.Image(type="numpy",),
outputs=gr.outputs.Image(type="numpy",),
css=css,
examples = examples,
)
iface.launch(debug=True, inline=True) |