### 1. Imports and class names setup ### import gradio as gr import os import torch from transformers import DistilBertTokenizerFast from timeit import default_timer as timer # Setup class names class_names = ["Positive", "Negative",] ### 2. Load the model ### model = torch.load(f="BERT_sentiment_analysis.pth", map_location=torch.device("cpu")) # load to CPU) ### 3. Predict function ### # Create predict function def predict(text: str): """Transforms and performs a prediction on img and returns prediction and time taken. """ # Start the timer start_time = timer() tokenizer = DistilBertTokenizerFast.from_pretrained( 'distilbert-base-uncased' ) input = tokenizer(text, return_tensors="pt").to("cpu") model.eval() with torch.inference_mode(): logits = model(**input).logits predicted_class_id = logits.argmax().item() if predicted_class_id == 1: result = "Positive 😊" else: result = "Negative 🙁" # Calculate the prediction time pred_time = round(timer() - start_time, 5) # Return the prediction dictionary and prediction time return result ### 4. Gradio app ### # Create title, description and article strings title = "Sentiment Classifier" description = "A Sentiment Classifier trained by fine-tuning [DistilBert](https://huggingface.co/docs/transformers/v4.42.0/en/model_doc/distilbert#transformers.DistilBertForSequenceClassification) Transformer model using hugging face [transformers](https://huggingface.co/docs/transformers/en/index) library." article = "The model classifies sentiment of an input text (whether the text shows a positive or negative sentiment)." #Create the Gradio demo demo = gr.Interface(fn=predict, # mapping function from input to output inputs=[gr.Textbox(label="Input")], outputs=[gr.Label(label="Prediction")], title=title, description=description, article=article) # Launch the demo! demo.launch()