Shivam29rathore commited on
Commit
ac3def5
·
1 Parent(s): 7d2dbe8

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -77
app.py DELETED
@@ -1,77 +0,0 @@
1
- from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
- import pickle
3
- import torch
4
-
5
-
6
- import io
7
-
8
-
9
- #contents = pickle.load(f) becomes...
10
- #contents = CPU_Unpickler(f).load()
11
-
12
-
13
- model_path = "t5_10k_small_cpu.sav"
14
-
15
- #load model from drive
16
- with open(model_path, "rb") as f:
17
- model= pickle.load(f)
18
-
19
-
20
- #tokenizer = AutoTokenizer.from_pretrained(checkpoint)
21
- #model = AutoModelForSeq2SeqLM.from_pretrained(checkpoint)
22
-
23
-
24
- import nltk
25
- from finbert_embedding.embedding import FinbertEmbedding
26
- import pandas as pd
27
- from nltk.cluster import KMeansClusterer
28
- import numpy as np
29
- import os
30
- from scipy.spatial import distance_matrix
31
- from tensorflow.python.lib.io import file_io
32
- import pickle
33
-
34
- nltk.download('punkt')
35
-
36
-
37
- import itertools
38
-
39
- def make_abstractive(word):
40
- import os
41
- data_path = "/tmp/"
42
- if not os.path.exists(data_path):
43
- os.makedirs(data_path)
44
- input_ = "/tmp/input.txt"
45
-
46
- with open(input_, "w") as file:
47
- file.write(word)
48
- # read the written txt into a variable
49
- with open(input_ , 'r') as f:
50
- text_ = f.read()
51
-
52
- def clean_data(texts):
53
- import re
54
- words = list()
55
- for text in texts.split():
56
- text = re.sub(r'\n','',text)
57
- text = re.sub(r'\s$','',text)
58
- words.append(text)
59
-
60
- return "summarize " + " ".join(words)
61
- text = clean_data(text_)
62
-
63
- final_summary = []
64
- for x in range(0,len(text)-1256,1256):
65
- text_to_summarize= text[x:x+1256]
66
- final_summary.append(model.predict(text_to_summarize))
67
-
68
- final_list = list(itertools.chain.from_iterable(final_summary))
69
- final_list = ''.join(final_list)
70
- return final_list
71
-
72
- import gradio as gr
73
-
74
- iface = gr.Interface(fn= make_abstractive,
75
- inputs =gr.inputs.Textbox(lines=15,placeholder="Enter your text !!"),
76
- outputs="text",title="Document Summarizer",description ="An AI that makes your life easier by helping you summarise long texts.")
77
- iface.launch()