File size: 1,928 Bytes
ab4de6e
 
 
 
8ff07d7
197d2f5
6714a50
197d2f5
 
ab4de6e
99abe4d
eb9f908
ab4de6e
 
 
 
 
 
 
 
 
 
 
4f1a3ad
4894de4
 
 
 
 
 
ab4de6e
 
 
 
 
 
 
 
 
 
16ad565
6714a50
4f1a3ad
6714a50
ab4de6e
4f1a3ad
ab4de6e
 
4f1a3ad
ab4de6e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import streamlit as st
import pandas as pd
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import random as r
import gradio as gr
import asyncio

gr.Interface.load("models/APJ23/MultiHeaded_Sentiment_Analysis_Model").launch()

tokenizer = AutoTokenizer.from_pretrained("APJ23/MultiHeaded_Sentiment_Analysis_Model", local_files_only=True)
model = AutoModelForSequenceClassification.from_pretrained("APJ23/MultiHeaded_Sentiment_Analysis_Model")

classes = {
    0: 'Non-Toxic',
    1: 'Toxic',
    2: 'Severely Toxic',
    3: 'Obscene',
    4: 'Threat',
    5: 'Insult',
    6: 'Identity Hate'
}
@st.cache(allow_output_mutation=True)
async def async_prediction(tweet,model,tokenizer):
    inputs = tokenizer(tweet, return_tensors="pt", padding=True, truncation=True)
    outputs = model(**inputs)
    predicted_class = torch.argmax(outputs.logits, dim=1)
    predicted_prob = torch.softmax(outputs.logits, dim=1)[0][predicted_class].item()
    return classes[predicted_class], predicted_prob
    
def create_table(predictions):
    data = {'Tweet': [], 'Highest Toxicity Class': [], 'Probability': []}
    for tweet, prediction in predictions.items():
        data['Tweet'].append(tweet)
        data['Highest Toxicity Class'].append(prediction[0])
        data['Probability'].append(prediction[1])
    df = pd.DataFrame(data)
    return df

st.title('Toxicity Prediction App')
tweet=st.text_input('Enter a tweet to check for toxicity')
async def run_async_function():
    result = await async_prediction(tweet, model, tokenizer)
    return result
if st.button('Predict'):
    predicted_class_label, predicted_prob = asyncio.run(run_async_function())
    prediction_text = f'Prediction: {predicted_class_label} ({predicted_prob:.2f})'
    st.write(prediction_text)
    predictions = {tweet: (predicted_class_label, predicted_prob)}
    table = create_table(predictions)
    st.table(table)