File size: 2,173 Bytes
5987a9f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e33ced9
7af97a7
e33ced9
 
5987a9f
 
 
 
 
 
e33ced9
 
 
 
f9612f8
2e3f138
 
e33ced9
 
bddc534
7af97a7
bddc534
2e3f138
 
bddc534
7af97a7
2e3f138
 
bddc534
2e3f138
 
bddc534
2e3f138
 
 
db0c252
2e3f138
e33ced9
 
bddc534
e33ced9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e3f138
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import os
import subprocess

def install_packages():
    packages = [
        "torch",
        "transformers",
        "huggingface-hub",
        "gradio",
        "accelerate",
        "onnxruntime",
        "onnxruntime-tools",
        "optimum",
    ]
    for package in packages:
        result = subprocess.run(f'pip install {package}', shell=True)
        if result.returncode != 0:
            print(f"Failed to install {package}")
        else:
            print(f"Successfully installed {package}")

install_packages()

import gradio as gr
from huggingface_hub import login
from optimum.onnxruntime import ORTModelForSequenceClassification
from transformers import AutoTokenizer, pipeline

model_id = "HassamAliCADI/SentimentOnx"
hf_token = os.environ.get("NLP")

if hf_token:
    login(hf_token)
else:
    print("NLP token not found.")

model = ORTModelForSequenceClassification.from_pretrained(model_id)
tokenizer = AutoTokenizer.from_pretrained(model_id)

# Set top_k=3 to get the top 3 results
# Define the pipeline
pipe = pipeline(task="text-classification", model=model, tokenizer=tokenizer)

def classify_text(text):
    # start_time = time.time()
    

    results = pipe(text, return_all_scores=True)
    
    # end_time = time.time()

    output = f"Sentence: {text}\n"
    

    sorted_results = sorted(results[0], key=lambda x: x['score'], reverse=True)
    

    for i, result in enumerate(sorted_results[:3]):  # Limiting to the top 3 results
        output += f"Label {i+1}: {result['label']}, Score: {result['score']:.4f}\n"
    
    # output += f"Generation time: {end_time - start_time:.2f} seconds\n"
    
    return output


gr.Interface(
    fn=classify_text,
    title="Sentiment Classifier",
    description="Enter text to classify sentiment",
    inputs=gr.Textbox(
        label="Input Text",
        placeholder="Type something here..."
    ),
    outputs=gr.Textbox(
        label="Classification Results"
    ),
    examples=[
        ["I am deeply disappointed in your bad performance in last league match loss, and quite disappointed, sad because of it."],
        ["I am very happy with your excellent performance!"]
    ]
).launch()