UNIST-Eunchan commited on
Commit
2c13377
·
1 Parent(s): 949eeaf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -38,7 +38,7 @@ def infer(input_ids, max_length, temperature, top_k, top_p):
38
  return output_sequences
39
 
40
 
41
-
42
  def chunking(book_text):
43
  sentences = sent_tokenize(book_text)
44
  segments = []
@@ -96,7 +96,7 @@ st.title("Book Summarization 📚")
96
  st.write("The almighty king of text generation, GPT-2 comes in four available sizes, only three of which have been publicly made available. Feared for its fake news generation capabilities, it currently stands as the most syntactically coherent model. A direct successor to the original GPT, it reinforces the already established pre-training/fine-tuning killer duo. From the paper: Language Models are Unsupervised Multitask Learners by Alec Radford, Jeffrey Wu, Rewon Child, David Luan, Dario Amodei and Ilya Sutskever.")
97
 
98
  #book_index = st.sidebar.slider("Select Book Example", value = 0,min_value = 0, max_value=4)
99
- sent = st.text_area("Text", _book, height = 1550000)
100
  max_length = st.sidebar.slider("Max Length", value = 512,min_value = 10, max_value=1024)
101
  temperature = st.sidebar.slider("Temperature", value = 1.0, min_value = 0.0, max_value=1.0, step=0.05)
102
  top_k = st.sidebar.slider("Top-k", min_value = 0, max_value=5, value = 0)
@@ -133,7 +133,8 @@ def generate_output(test_samples):
133
  chunked_segments = chunking(test_book[0]['book'])
134
 
135
 
136
- for segment in range(len(chunked_segments)):
137
 
138
- summaries = generate_output(segment)
139
- st.write(summaries[-1])
 
 
38
  return output_sequences
39
 
40
 
41
+ @st.cache_data
42
  def chunking(book_text):
43
  sentences = sent_tokenize(book_text)
44
  segments = []
 
96
  st.write("The almighty king of text generation, GPT-2 comes in four available sizes, only three of which have been publicly made available. Feared for its fake news generation capabilities, it currently stands as the most syntactically coherent model. A direct successor to the original GPT, it reinforces the already established pre-training/fine-tuning killer duo. From the paper: Language Models are Unsupervised Multitask Learners by Alec Radford, Jeffrey Wu, Rewon Child, David Luan, Dario Amodei and Ilya Sutskever.")
97
 
98
  #book_index = st.sidebar.slider("Select Book Example", value = 0,min_value = 0, max_value=4)
99
+ sent = st.text_area("Text", _book, height = 100000)
100
  max_length = st.sidebar.slider("Max Length", value = 512,min_value = 10, max_value=1024)
101
  temperature = st.sidebar.slider("Temperature", value = 1.0, min_value = 0.0, max_value=1.0, step=0.05)
102
  top_k = st.sidebar.slider("Top-k", min_value = 0, max_value=5, value = 0)
 
133
  chunked_segments = chunking(test_book[0]['book'])
134
 
135
 
136
+ for i in range(len(chunked_segments)):
137
 
138
+ summaries = generate_output(chunked_segments[i])
139
+ st.write(f'A summary of Segment {i}.')
140
+ st.success(summaries[-1])