Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| import torch | |
| # Load summarizer | |
| device = 0 if torch.cuda.is_available() else -1 | |
| summarizer = pipeline( | |
| "summarization", | |
| model="csebuetnlp/mT5_multilingual_XLSum", | |
| tokenizer="csebuetnlp/mT5_multilingual_XLSum", | |
| device=device | |
| ) | |
| print("β Model loaded on:", "GPU" if device == 0 else "CPU") | |
| # Function for API and UI | |
| def summarize_text(text): | |
| if not text.strip(): | |
| return "β Error: No text provided." | |
| max_len = 1000 | |
| clean_text = text.strip()[:max_len] | |
| result = summarizer([clean_text], max_length=130, min_length=30, do_sample=False) | |
| return result[0]["summary_text"] | |
| # Gradio Interface (UI + API) | |
| iface = gr.Interface( | |
| fn=summarize_text, | |
| inputs=gr.Textbox(lines=10, placeholder="Paste your news article here..."), | |
| outputs="text", | |
| title="Multilingual News Summarizer", | |
| description="Summarizes news articles using mT5 multilingual XLSum model." | |
| ) | |
| iface.launch() | |