loom / app.py
greykaizen's picture
Update app.py for fixing the runtime
61c449a verified
raw
history blame
550 Bytes
import gradio as gr
from fastapi import FastAPI
from pydantic import BaseModel
from transformers import pipeline
app = FastAPI()
moderate_pipe = pipeline("text-classification", model="KoalaAI/Text-Moderation")
class TextInput(BaseModel):
text: str
@app.post("/moderate")
async def moderate_text(input: TextInput):
results = moderate_pipe(input.text)
return {r["label"]: r["score"] for r in results}
# Gradio interface to expose the model API via a Space
gr.Interface(fn=moderate_text, inputs="text", outputs="json").launch(share=True)