import gradio as gr from transformers import BertTokenizer, BertForSequenceClassification import torch import torch.nn.functional as F # Load the tokenizer and model tokenizer = BertTokenizer.from_pretrained('indobenchmark/indobert-large-p1') model = BertForSequenceClassification.from_pretrained("hendri/emotion") labels = ["LABEL_0", "LABEL_1", "LABEL_2", "LABEL_3", "LABEL_4"] # Map these to your actual labels: label_mapping = { "LABEL_0": "sadness", "LABEL_1": "anger", "LABEL_2": "love", "LABEL_3": "fear", "LABEL_4": "happy" } # Define a function to process user input and return predictions def classify_emotion(text): inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=128) with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probabilities = F.softmax(logits, dim=-1) predictions = {label_mapping[labels[i]]: round(float(prob), 4) for i, prob in enumerate(probabilities[0])} return predictions # Create the Gradio interface interface = gr.Interface( fn=classify_emotion, inputs=gr.Textbox(label="Enter Text for Emotion Classification"), outputs=gr.Label(label="Predicted Emotions"), title="Emotion Classification", description="This application uses an IndoBERT model fine-tuned for emotion classification. Enter a sentence (bahasa Indonesia) to see the predicted emotions and their probabilities." ) # Launch the Gradio interface interface.launch()