File size: 1,113 Bytes
3a8d998
65afd8a
 
01130ab
94a7f16
bc1afba
65afd8a
01130ab
65afd8a
 
 
 
 
01130ab
65afd8a
 
 
 
 
01130ab
 
bc1afba
 
 
 
 
 
 
01130ab
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import spaces
from transformers import pipeline as tpipeline
from optimum.pipelines import pipeline as opipline

#@spaces.GPU(duration=60)
def classify(tweet, event_model, hftoken, threshold):
    results = {"text": None,  "event": None, "score": None}
    
    # event type prediction with transformers pipeline
    # event_predictor = pipeline(task="text-classification", model=event_model, 
    #                            batch_size=512, token=hftoken, device="cpu")
    # tokenizer_kwargs = {'padding': True, 'truncation': True, 'max_length': 512}
    # prediction = event_predictor(tweet, **tokenizer_kwargs)[0]


    # with onnx pipeline 
    onnx_classifier = pipeline("text-classification", model=event_model, accelerator="ort")
    prediction = onnx_classifier(text)[0]
    
    
    results["text"] = tweet
    
    if prediction["label"] != "none" and round(prediction["score"], 2) <= threshold:
        results["event"] = "none"
        results["score"] = prediction["score"]
    else:
        results["event"] = prediction["label"]
        results["score"] = prediction["score"]

    return results