Nainglinthu's picture
Update app.py
d47933d verified
raw
history blame contribute delete
734 Bytes
from transformers import pipeline
import gradio as gr
# Load your model & tokenizer from your saved local folder or HF repo
model_path = "Nainglinthu/quest_model" # your Hugging Face model repo name
# Initialize pipeline once
classifier = pipeline("text-classification", model=model_path)
# Define function to classify text
def classify_text(text):
results = classifier(text)
return results
# Gradio interface setup
iface = gr.Interface(
fn=classify_text,
inputs=gr.Textbox(lines=5, placeholder="Enter legal text here..."),
outputs=gr.JSON(),
title="Legal Text Classification",
description="Classify legal text using your fine-tuned Legal BERT model."
)
if __name__ == "__main__":
iface.launch()