Burcin commited on
Commit
e7ac810
·
1 Parent(s): 77cd680

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -19
app.py CHANGED
@@ -12,24 +12,15 @@ from heapq import nlargest
12
  import warnings
13
  from sklearn.feature_extraction.text import TfidfVectorizer
14
  import numpy as np
15
- #from transformers import PegasusForConditionalGeneration, PegasusTokenizer
16
 
17
  warnings.filterwarnings("ignore")
18
 
 
 
19
  def get_wiki_original_text(inp):
20
  text = wikipedia.summary(inp)
21
  return text
22
 
23
- """
24
- def get_wiki_summary_by_pegasus(inp):
25
- text = wikipedia.summary(inp)
26
- tokenizer = PegasusTokenizer.from_pretrained('google/pegasus-xsum')
27
- tokens = tokenizer(text, truncation=True, padding="longest", return_tensors="pt")
28
- model = PegasusForConditionalGeneration.from_pretrained("google/pegasus-xsum")
29
- summary = model.generate(**tokens)
30
- return tokenizer.decode(summary)
31
- """
32
-
33
 
34
  def get_wiki_summary_by_lem(inp):
35
  text = wikipedia.summary(inp)
@@ -111,21 +102,16 @@ def get_wiki_summary_by_tfidf(inp):
111
 
112
 
113
 
114
- desc = """This interface allows you to summarize Wikipedia explanations. Only requirement is to write the topic. For summarization this model uses extractive summarization method and the number of sentences in the output depends on the length of the original text."""
115
-
116
-
117
- x = """ Europe """
118
-
119
- y = ''' Great Depression '''
120
 
121
- z = ''' Crocodile Dundee '''
122
 
123
- sample = [[x],[y],[z]]
124
 
125
 
126
  iface = Parallel(gr.Interface(fn=get_wiki_original_text, inputs=gr.inputs.Textbox(label="Text"), outputs="text", description='Original Text'),
127
  gr.Interface(fn=get_wiki_summary_by_lem, inputs=gr.inputs.Textbox(label="Text"), outputs="text", description='Summary 1'),
128
  gr.Interface(fn=get_wiki_summary_by_tfidf, inputs=gr.inputs.Textbox(label="Text"), outputs="text", description='Summary 2'),
 
129
  title= 'Text Summarizer',
130
  description = desc,
131
  examples=sample,
 
12
  import warnings
13
  from sklearn.feature_extraction.text import TfidfVectorizer
14
  import numpy as np
 
15
 
16
  warnings.filterwarnings("ignore")
17
 
18
+ pegasus = gr.Interface.load("huggingface/google/pegasus-xsum")
19
+
20
  def get_wiki_original_text(inp):
21
  text = wikipedia.summary(inp)
22
  return text
23
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  def get_wiki_summary_by_lem(inp):
26
  text = wikipedia.summary(inp)
 
102
 
103
 
104
 
105
+ desc = """This interface allows you to summarize Wikipedia contents. Only requirement is to write the topic and it collects content by fetching from Wikipedia. For summarization this model uses 2 different extractive summarization methods and the number of sentences in the output depends on the length of the original text."""
 
 
 
 
 
106
 
 
107
 
108
+ sample = [['Europe'],['Great Depression'],['Crocodile Dundee']]
109
 
110
 
111
  iface = Parallel(gr.Interface(fn=get_wiki_original_text, inputs=gr.inputs.Textbox(label="Text"), outputs="text", description='Original Text'),
112
  gr.Interface(fn=get_wiki_summary_by_lem, inputs=gr.inputs.Textbox(label="Text"), outputs="text", description='Summary 1'),
113
  gr.Interface(fn=get_wiki_summary_by_tfidf, inputs=gr.inputs.Textbox(label="Text"), outputs="text", description='Summary 2'),
114
+ pegasus,
115
  title= 'Text Summarizer',
116
  description = desc,
117
  examples=sample,