File size: 1,139 Bytes
e6d1285
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37

import gradio as gr
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch

# Load tokenizer dan model dari Hugging Face Hub
model_name = "ElizabethSrgh/customer-service-multitask"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name)

# Daftar label sesuai urutan model (ubah jika berbeda)
label_map = {
    0: "Complaint - Negative",
    1: "Inquiry - Neutral",
    2: "Request - Positive"
}

def predict(text):
    inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
    with torch.no_grad():
        outputs = model(**inputs)
    logits = outputs.logits
    predicted_class_id = torch.argmax(logits, dim=1).item()
    return label_map.get(predicted_class_id, "Unknown")

# Gradio UI
interface = gr.Interface(
    fn=predict,
    inputs=gr.Textbox(lines=4, label="Masukkan Teks Percakapan"),
    outputs=gr.Textbox(label="Hasil Prediksi"),
    title="Klasifikasi Layanan Pelanggan",
    description="Masukkan teks untuk memprediksi topik dan sentimen."
)

if __name__ == "__main__":
    interface.launch()