ganeshkamath89's picture
using wikipediaapi to get wikipedia summary
a58d1af verified
raw
history blame
1.01 kB
from transformers import pipeline
import gradio as gr
import wikipediaapi
def extract_article_summary(url):
wiki_wiki = wikipediaapi.Wikipedia('MyProjectName ([email protected])', 'en')
page_py = wiki_wiki.page("Python_(programming_language)")
text = page_py.summary[0:60]
summarizer = pipeline("summarization", model = "facebook/bart-large-cnn")
return summarizer(text)[0]['summary_text']
sample_url = [['AMD'],
['Semiconductor_industry'],
['Artificial_intelligence']]
desc = '''
Let Hugging Face models summarize Wikipedia articles for you.
Note: Shorter articles generate faster summaries.
This summarizer uses bart-large-cnn model by Facebook
'''
demo = gr.Interface (
extract_article_summary,
inputs = gr.Textbox(
lines = 2,
label = 'URL'
),
outputs = 'text',
title = 'Wikipedia Summarizer',
theme = 'huggingface',
description = desc,
examples=sample_url
)
demo.launch()