Spaces:
Sleeping
Sleeping
# inference/app.py | |
from fastapi import FastAPI | |
from pydantic import BaseModel | |
from pathlib import Path | |
from transformers import pipeline | |
import os | |
# Put all caches in writable /tmp | |
os.environ.setdefault("HF_HOME", "/tmp/hf") | |
os.environ.setdefault("TRANSFORMERS_CACHE", "/tmp/transformers") | |
os.environ.setdefault("HF_DATASETS_CACHE", "/tmp/hf_datasets") | |
os.environ.setdefault("HF_HUB_DISABLE_TELEMETRY", "1") | |
app = FastAPI(title="Incident ML Inference API") | |
# LOCAL_MODEL = Path(__file__).resolve().parents[1] / "models" / "incident_classifier" | |
# # Category classifier (your fine-tuned model if available) | |
# if LOCAL_MODEL.exists(): | |
# incident_classifier = pipeline("text-classification", model="brijeshpandya/incident-classifier") | |
# else: | |
# incident_classifier = pipeline("text-classification", model="cardiffnlp/twitter-xlm-roberta-base") | |
incident_classifier = pipeline("text-classification", model="brijeshpandya/incident-classifier") | |
# Sentiment (keep public model for now) | |
sentiment_analyzer = pipeline("sentiment-analysis", model="cardiffnlp/twitter-xlm-roberta-base-sentiment") | |
class AnalyzeIn(BaseModel): | |
text: str | |
def health(): return {"ok": True, "using_local_model": LOCAL_MODEL.exists()} | |
def analyze(data: AnalyzeIn): | |
return { | |
"category": incident_classifier(data.text), | |
"sentiment": sentiment_analyzer(data.text) | |
} | |