File size: 1,397 Bytes
ad3e283
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import gradio as gr
import torch
from PIL import Image
import json

m_raw_model = torch.hub.load('ultralytics/yolov8', 'custom', path='M-Raw.pt', source="local") 
s_raw_model = torch.hub.load('ultralytics/yolov8', 'custom', path='S-Raw.pt', source="local") 
n_raw_model = torch.hub.load('ultralytics/yolov8', 'custom', path='N-Raw.pt', source="local")
m_pre_model = torch.hub.load('ultralytics/yolov8', 'custom', path='M-Pre.pt', source="local") 
s_pre_model = torch.hub.load('ultralytics/yolov8', 'custom', path='S-Pre.pt', source="local") 
n_pre_model = torch.hub.load('ultralytics/yolov8', 'custom', path='N-Pre.pt', source="local")

def snap(image, model, conf, iou):
    
        # If no model selected, use M-Raw
        if model == None:
            model = "M-Raw"
        
        # Run the selected model
        results = None
        if model == "M-Raw":
            results = m_raw_model(image, conf=conf, iou=iou)
        elif model == "N-Raw":
            results = n_raw_model(image, conf=conf, iou=iou)
        elif model == "S-Raw":
            results = s_raw_model(image, conf=conf, iou=iou)
        elif model == "M-Pre":
            results = m_pre_model(image, conf=conf, iou=iou)
        elif model == "N-Pre":
            results = n_pre_model(image, conf=conf, iou=iou)
        elif model == "S-Pre":
            results = s_pre_model(image, conf=conf, iou=iou)