service-internal's picture
Upload main.py
a2004de verified
raw
history blame
1.33 kB
from fastapi import FastAPI, Request
from transformers import AutoModelForSequenceClassification, AutoTokenizer, AutoConfig
from scipy.special import softmax
import numpy as np
import uvicorn
app = FastAPI()
# Load model and tokenizer
MODEL = "cardiffnlp/twitter-roberta-base-sentiment-latest"
tokenizer = AutoTokenizer.from_pretrained(MODEL)
config = AutoConfig.from_pretrained(MODEL)
model = AutoModelForSequenceClassification.from_pretrained(MODEL)
# Preprocessing function
def preprocess(text):
tokens = []
for t in text.split():
if t.startswith("@") and len(t) > 1:
t = "@user"
elif t.startswith("http"):
t = "http"
tokens.append(t)
return " ".join(tokens)
# Inference route
@app.post("/analyze")
async def analyze(request: Request):
data = await request.json()
text = preprocess(data.get("text", ""))
encoded_input = tokenizer(text, return_tensors='pt')
output = model(**encoded_input)
scores = output[0][0].detach().numpy()
scores = softmax(scores)
ranking = np.argsort(scores)[::-1]
result = []
for i in ranking:
label = config.id2label[i]
score = round(float(scores[i]), 4)
result.append({"label": label, "score": score})
return {"result": result}