Burcin commited on
Commit
92e0d6e
·
1 Parent(s): 4107377

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -12,7 +12,7 @@ from heapq import nlargest
12
  import warnings
13
  from sklearn.feature_extraction.text import TfidfVectorizer
14
  import numpy as np
15
- from transformers import PegasusForConditionalGeneration, PegasusTokenizer
16
 
17
  warnings.filterwarnings("ignore")
18
 
@@ -20,7 +20,7 @@ def get_wiki_original_text(inp):
20
  text = wikipedia.summary(inp)
21
  return text
22
 
23
-
24
  def get_wiki_summary_by_pegasus(inp):
25
  text = wikipedia.summary(inp)
26
  tokenizer = PegasusTokenizer.from_pretrained('google/pegasus-xsum')
@@ -28,7 +28,7 @@ def get_wiki_summary_by_pegasus(inp):
28
  model = PegasusForConditionalGeneration.from_pretrained("google/pegasus-xsum")
29
  summary = model.generate(**tokens)
30
  return tokenizer.decode(summary)
31
-
32
 
33
 
34
  def get_wiki_summary_by_lem(inp):
@@ -89,7 +89,7 @@ sample = [['Europe'], ['Great Depression'], ['Crocodile Dundee']]
89
 
90
  iface = Parallel(gr.Interface(fn=get_wiki_original_text, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
91
  gr.Interface(fn=get_wiki_summary_by_lem, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
92
- gr.Interface(fn=get_wiki_summary_by_pegasus, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
93
 
94
 
95
 
 
12
  import warnings
13
  from sklearn.feature_extraction.text import TfidfVectorizer
14
  import numpy as np
15
+ #from transformers import PegasusForConditionalGeneration, PegasusTokenizer
16
 
17
  warnings.filterwarnings("ignore")
18
 
 
20
  text = wikipedia.summary(inp)
21
  return text
22
 
23
+ """
24
  def get_wiki_summary_by_pegasus(inp):
25
  text = wikipedia.summary(inp)
26
  tokenizer = PegasusTokenizer.from_pretrained('google/pegasus-xsum')
 
28
  model = PegasusForConditionalGeneration.from_pretrained("google/pegasus-xsum")
29
  summary = model.generate(**tokens)
30
  return tokenizer.decode(summary)
31
+ """
32
 
33
 
34
  def get_wiki_summary_by_lem(inp):
 
89
 
90
  iface = Parallel(gr.Interface(fn=get_wiki_original_text, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
91
  gr.Interface(fn=get_wiki_summary_by_lem, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
92
+ #gr.Interface(fn=get_wiki_summary_by_pegasus, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
93
 
94
 
95