import gradio as gr from transformers import AutoTokenizer, AutoModelForSequenceClassification import torch # Load model model_id = "Rerandaka/Cild_safety_bigbird" tokenizer = AutoTokenizer.from_pretrained(model_id, use_fast=False) model = AutoModelForSequenceClassification.from_pretrained(model_id) # Inference function def classify(text): inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=512) with torch.no_grad(): logits = model(**inputs).logits predicted_class = torch.argmax(logits, dim=1).item() return str(predicted_class) # Use Blocks with gr.Blocks() as demo: txt_in = gr.Textbox(label="Enter text") txt_out = gr.Textbox(label="Prediction") btn = gr.Button("Classify") btn.click(classify, txt_in, txt_out) # ✅ Enable a predictable API endpoint demo.load(fn=classify, inputs=txt_in, outputs=txt_out, api_name="predict") demo.launch()