File size: 1,187 Bytes
acc2d02
 
df9ee76
a3aae2d
a64e68a
401fcb5
0097bf7
66467b7
acc2d02
1674599
a64e68a
2693f9e
 
 
 
 
 
401fcb5
 
a64e68a
acc2d02
 
 
 
 
 
 
 
 
401fcb5
acc2d02
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import torch
import torchvision
import torch.nn as nn
import torchvision.transforms as transforms

model = torchvision.models.resnet50(pretrained=True)
model.fc = nn.Linear(model.fc.in_features, 2)
model.load_state_dict(torch.load("model.pth", map_location=torch.device('cpu')))
model.eval()
device = torch.device("cpu")

transform = transforms.Compose([
    transforms.Resize((224, 224)),
    transforms.ToTensor(),
    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])

classes = ['Fruta própria para o consumo', 'Fruta imprópria para o consumo']

import gradio as gr
from PIL import Image

# Define the function to make predictions
def predict(image):
    image = transform(image).unsqueeze(0).to(device)
    model.eval()
    with torch.no_grad():
        output = model(image)
        _, predicted = torch.max(output.data, 1)
        return classes[predicted.item()]

# Define the input and output components
image_input = gr.inputs.Image(type="pil", label="Upload Image")
label_output = gr.outputs.Label()

# Create the interface
interface = gr.Interface(fn=predict, inputs=image_input, outputs=label_output)

# Launch the interface
interface.launch()