Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import torch | |
| import torchvision | |
| import numpy as np | |
| from PIL import Image | |
| # Load model weights | |
| model = torch.hub.load('ultralytics/yolov5', 'custom', "model_weights/datasets_1000_41class.pt",map_location=torch.device('cpu')) | |
| # Define a yolo prediction function | |
| def yolo(im, size=640): | |
| g = (size / max(im.size)) # gain | |
| im = im.resize((int(x * g) for x in im.size), Image.ANTIALIAS) # resize | |
| results = model(im) # inference | |
| results.render() # updates results.imgs with boxes and labels | |
| return Image.fromarray(results.imgs[0]) | |
| inputs = gr.inputs.Image(type='pil', label="Original Image") | |
| outputs = gr.outputs.Image(type="pil", label="Output Image") | |
| title = "BandiCount: Detecting Australian native animal species" | |
| description = "BandiCount: Detecting Australian native animal species in NSW national parks, using object detection. Upload an image or click an example image to use." | |
| article = "" | |
| examples = [['BrushtailPossum.jpg'], ['Eagle.jpg'], ['Macropod.jpg'], ['cat.jpg'], ['echidna.gif'], ['fox_in_snow.mp4'], ['godzilla_fantail.png'], ['ibis.jpg'], ['koala1.jpeg'], ['koala2.jpg'], ['lyrebird.mp4']] | |
| gr.Interface(yolo, inputs, outputs, title=title, description=description, article=article, examples=examples, theme="huggingface").launch(cache_examples=True,enable_queue=True) | |