Spaces:
Sleeping
Sleeping
from fastapi import FastAPI, Request | |
from pydantic import BaseModel | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
import torch | |
import os | |
# Set a custom cache directory | |
os.environ["TRANSFORMERS_CACHE"] = "./hf_cache" | |
# Load model and tokenizer once at startup | |
model_name = "tabularisai/multilingual-sentiment-analysis" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
app = FastAPI() | |
# Sentiment map | |
sentiment_map = { | |
0: "Very Negative", | |
1: "Negative", | |
2: "Neutral", | |
3: "Positive", | |
4: "Very Positive" | |
} | |
# Request body schema | |
class ReviewRequest(BaseModel): | |
text: str | |
def predict_sentiment(review: ReviewRequest): | |
inputs = tokenizer(review.text, return_tensors="pt", truncation=True, padding=True, max_length=512) | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1) | |
predicted_label = torch.argmax(probabilities, dim=-1).item() | |
sentiment = sentiment_map[predicted_label] | |
return {"text": review.text, "sentiment": sentiment} | |