Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification # β required | |
# Load model | |
model_id = "Rerandaka/Cild_safety_bigbird" | |
tokenizer = AutoTokenizer.from_pretrained(model_id, use_fast=False) | |
model = AutoModelForSequenceClassification.from_pretrained(model_id) | |
# Inference function | |
def classify(text): | |
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=512) | |
with torch.no_grad(): | |
logits = model(**inputs).logits | |
predicted_class = torch.argmax(logits, dim=1).item() | |
return str(predicted_class) | |
# API-ready Gradio Interface | |
demo = gr.Interface( | |
fn=classify, | |
inputs=gr.Textbox(label="Enter text"), | |
outputs=gr.Textbox(label="Prediction") | |
) | |
# β Enable API and queue | |
demo.queue() | |
demo.launch(show_api=True) | |