File size: 734 Bytes
bf1764e
 
 
d47933d
 
 
 
 
bf1764e
d47933d
 
 
 
bf1764e
d47933d
 
 
 
 
 
 
bf1764e
 
d47933d
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from transformers import pipeline
import gradio as gr

# Load your model & tokenizer from your saved local folder or HF repo
model_path = "Nainglinthu/quest_model"  # your Hugging Face model repo name

# Initialize pipeline once
classifier = pipeline("text-classification", model=model_path)

# Define function to classify text
def classify_text(text):
    results = classifier(text)
    return results

# Gradio interface setup
iface = gr.Interface(
    fn=classify_text,
    inputs=gr.Textbox(lines=5, placeholder="Enter legal text here..."),
    outputs=gr.JSON(),
    title="Legal Text Classification",
    description="Classify legal text using your fine-tuned Legal BERT model."
)

if __name__ == "__main__":
    iface.launch()