File size: 469 Bytes
263ad4a
902135a
 
 
 
 
 
 
 
263ad4a
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
import gradio as gr
from PIL import Image
import cv2
import torch
import numpy as np

if __name__ == '__main__':
    model = torch.hub.load('ultralytics/yolov5', 'custom', path='myYOLO/best.pt', force_reload=True)
    main()

demo_app = gr.Interface(
    fn=yolov5_inference,
    inputs=inputs,
    outputs=outputs,
    title=title,
    examples=examples,
    cache_examples=True,
    live=True,
    theme='huggingface',
)
demo_app.launch(debug=True, enable_queue=True)