vikigitonga11's picture
Update app.py
7a0884e verified
raw
history blame
1.86 kB
import gradio as gr
from transformers import T5Tokenizer, T5ForConditionalGeneration
# Load T5-small model and tokenizer
model_name = "t5-small"
tokenizer = T5Tokenizer.from_pretrained(model_name)
model = T5ForConditionalGeneration.from_pretrained(model_name)
def generate_paraphrase(text, max_length, temperature, num_outputs):
"""Generate paraphrased versions of the input text using T5-small."""
if not text.strip():
return ["⚠️ Please enter some text to paraphrase."]
input_text = f"paraphrase: {text} </s>"
input_ids = tokenizer.encode(input_text, return_tensors="pt")
outputs = model.generate(
input_ids,
max_length=max_length,
top_k=50,
top_p=0.95,
num_return_sequences=num_outputs,
do_sample=True
)
paraphrased_texts = [tokenizer.decode(output, skip_special_tokens=True) for output in outputs]
return paraphrased_texts # Returns a list of paraphrases
# Define Gradio Interface
description = """
## ✨ AI Paraphrasing Tool
Enter a sentence and let AI generate multiple paraphrased versions!
- Adjust **max length** for longer outputs.
- Tune **temperature** for more creative results.
- Choose **number of outputs** to generate multiple variations.
"""
demo = gr.Interface(
fn=generate_paraphrase,
inputs=[
gr.Textbox(label="Enter text", placeholder="Type a sentence to paraphrase..."),
gr.Slider(20, 100, value=50, step=5, label="Max Output Length"),
gr.Slider(0.5, 1.5, value=1.0, step=0.1, label="Creativity (Temperature)"),
gr.Dropdown(choices=[1, 2, 3, 4, 5], value=1, label="Number of Outputs")
],
outputs=gr.Textbox(label="Paraphrased Text", lines=5), # Allows multiple outputs
title="📝 AI Paraphraser",
description=description,
theme="huggingface",
live=True,
)
demo.launch()