|
import gradio as gr |
|
|
|
import torch |
|
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM |
|
|
|
model_name = "gregorlied/flan-t5-base-summarization" |
|
|
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
|
model = AutoModelForSeq2SeqLM.from_pretrained( |
|
model_name, |
|
device_map="auto", |
|
attn_implementation='eager', |
|
trust_remote_code=True, |
|
).eval() |
|
|
|
device = 'cuda' if torch.cuda.is_available() else 'cpu' |
|
|
|
def summarize(text): |
|
if not text.strip(): |
|
return "Please enter some text to summarize." |
|
|
|
prompt = "summarize: " + text |
|
model_inputs = tokenizer([prompt], return_tensors="pt", truncation=True, max_length=512).to(device) |
|
|
|
generated_ids = model.generate( |
|
input_ids=model_inputs["input_ids"], |
|
attention_mask=model_inputs["attention_mask"], |
|
max_new_tokens=256, |
|
) |
|
|
|
response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True) |
|
return response[0] |
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("## 📝 Summarization for News, SciTLDR and Dialog Texts") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
input_text = gr.Textbox( |
|
label="Input Text", |
|
autoscroll=False, |
|
lines=15, |
|
max_lines=15, |
|
placeholder="Paste your article or paragraph here...", |
|
) |
|
with gr.Column(): |
|
output_text = gr.Textbox( |
|
label="Summary", |
|
autoscroll=False, |
|
lines=15, |
|
max_lines=15, |
|
show_copy_button=True, |
|
) |
|
|
|
with gr.Row(): |
|
summarize_btn = gr.Button("Summarize") |
|
summarize_btn.click( |
|
fn=summarize, |
|
inputs=input_text, |
|
outputs=output_text, |
|
show_progress=True, |
|
) |
|
|
|
with gr.Row(): |
|
examples = gr.Examples( |
|
label="Example – News", |
|
examples=[ |
|
"By . Mail On Sunday Reporter . Former editor Andy Coulson was sentenced to 18 months in prison at the Old Bailey for phone hacking . Andy Coulson plans to ‘make the best’ of his prison sentence for phone hacking by teaching fellow inmates how to read and write. The former director of communications for David Cameron, who was sentenced to 18 months on Friday for his part in the hacking conspiracy during his time as editor of News of the World, would also like to join the Samaritans. Coulson’s plans for inmate-to-inmate mentoring were revealed yesterday by former Tory Cabinet Minister Jonathan Aitken, who received an 18-month sentence for perjury in 1999. In an open letter to Coulson, Aitken said: ‘A few hours before your sentencing, you told me that you were “determined to make the best of prison”… You were interested in becoming a “listener” (prison Samaritans), or working for the Shannon Trust’s Toe By Toe programme, which enables young illiterate offenders to be taught reading and writing skills by other prisoners (just the job for an ex-editor!).’ Aitken added that Coulson’s enthusiasm ‘speaks volumes for the positive way you are facing your sentence with humility and realism’. Coulson, 46, was one of four ex-journalists at the tabloid to be sentenced on Friday, along with private investigator Glenn Mulcaire. Five others, including former News International chief Rebekah Brooks, were cleared of all charges late last month. Coulson, the former director of communications for David Cameron, wants to 'make the best' of his prison sentence by teaching inmates how to read and write. He also wants to join the Samaritans .", |
|
], |
|
fn=summarize, |
|
inputs=input_text, |
|
outputs=output_text, |
|
cache_examples="lazy", |
|
) |
|
|
|
with gr.Row(): |
|
examples = gr.Examples( |
|
label="Example – SciTLDR", |
|
examples=[ |
|
"Self-attention-based Transformer has demonstrated the state-of-the-art performances in a number of natural language processing tasks. Self attention is able to model long-term dependencies, but it may suffer from the extraction of irrelevant information in the context. To tackle the problem, we propose a novel model called Sparse Transformer. Sparse Transformer is able to improve the concentration of attention on the global context through an explicit selection of the most relevant segments. Extensive experimental results on a series of natural language processing tasks, including neural machine translation, image captioning, and language modeling, all demonstrate the advantages of Sparse Transformer in model performance.\nSparse Transformer reaches the state-of-the-art performances in the IWSLT 2015 English-to-Vietnamese translation and IWSLT 2014 German-to-English translation. In addition, we conduct qualitative analysis to account for Sparse Transformer's superior performance.", |
|
], |
|
fn=summarize, |
|
inputs=input_text, |
|
outputs=output_text, |
|
cache_examples="lazy" |
|
) |
|
|
|
with gr.Row(): |
|
examples = gr.Examples( |
|
label="Example – Dialog", |
|
examples=[ |
|
"#Person1#: Hi Tony, it's Alice, my cars broken down.\n#Person2#: Oh no, where are you? Have you had your car taken to a garage?\n#Person1#: I'm waiting for a repairman to come out and fix it. Can you believe it? I'm at some traffic lights and I'm causing a traffic jam.\n#Person2#: Don't worry. I'll come and get you. I need to go to the bank anyway." |
|
], |
|
fn=summarize, |
|
inputs=input_text, |
|
outputs=output_text, |
|
cache_examples="lazy", |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|