genome / app.py
mgbam's picture
Update app.py
c54beea verified
raw
history blame contribute delete
676 Bytes
from transformers import pipeline
import gradio as gr
# Load the model
pipe = pipeline("text-classification", model="mgbam/roberta-yelp-genomic-bottleneck")
def classify_text(text):
results = pipe(text)
# Extract and format results
formatted_results = [
f"Label: {result['label']}, Score: {result['score']:.2f}" for result in results
]
return "\n".join(formatted_results)
# Gradio interface
interface = gr.Interface(
fn=classify_text,
inputs="text",
outputs="text",
title="Text Classification",
description="Classify text using the RoBERTa-Yelp-Genomic-Bottleneck model."
)
if __name__ == "__main__":
interface.launch()