Mhammad Ibrahim
Explicitly load model with from_tf=True
7e000d1
raw
history blame
957 Bytes
# import gradio as gr
# def greet(name):
# return "Hello " + name + "!!"
# demo = gr.Interface(fn=greet, inputs="text", outputs="text")
# demo.launch()
import gradio as gr
from transformers import AutoTokenizer, AutoModelForTokenClassification, pipeline
# Load model and tokenizer from Hugging Face Hub
tokenizer = AutoTokenizer.from_pretrained("Mhammad2023/bert-finetuned-ner")
model = AutoModelForTokenClassification.from_pretrained("Mhammad2023/bert-finetuned-ner", from_tf=True)
classifier = pipeline("token-classification", model=model, tokenizer=tokenizer)
def predict(text):
results = classifier(text)
if not results:
return "No entities found"
output = []
for entity in results:
output.append(f"{entity['word']}: {entity['entity']} ({round(entity['score']*100, 2)}%)")
return "\n".join(output)
gr.Interface(fn=predict, inputs="text", outputs="text", title="Named Entity Recognition").launch()