File size: 1,012 Bytes
1eb87da
 
a58d1af
1eb87da
975895b
a58d1af
 
 
 
 
1eb87da
a58d1af
 
 
1eb87da
 
a58d1af
1eb87da
 
 
 
d0eb069
975895b
d0eb069
 
 
 
 
a58d1af
d0eb069
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
from transformers import pipeline
import gradio as gr
import wikipediaapi

def extract_article_summary(url):
    wiki_wiki = wikipediaapi.Wikipedia('MyProjectName ([email protected])', 'en')
    page_py = wiki_wiki.page("Python_(programming_language)")
    text = page_py.summary[0:60]
    summarizer = pipeline("summarization", model = "facebook/bart-large-cnn")
    return summarizer(text)[0]['summary_text']

sample_url = [['AMD'],
              ['Semiconductor_industry'],
              ['Artificial_intelligence']]

desc =  '''
        Let Hugging Face models summarize Wikipedia articles for you. 
        Note: Shorter articles generate faster summaries.
        This summarizer uses bart-large-cnn model by Facebook
        '''

demo = gr.Interface (
    extract_article_summary, 
    inputs = gr.Textbox(
        lines = 2,
        label = 'URL'
    ),
    outputs = 'text',
    title = 'Wikipedia Summarizer',
    theme = 'huggingface',
    description = desc,
    examples=sample_url
)

demo.launch()