File size: 1,528 Bytes
a26068c
 
6bfa6c6
a26068c
 
 
 
 
 
 
 
 
 
 
0cd5f06
 
a26068c
 
 
 
 
 
 
 
 
 
6bfa6c6
 
 
 
 
a26068c
6bfa6c6
a26068c
6bfa6c6
 
 
 
db6f361
6bfa6c6
 
 
a26068c
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import streamlit as st

@st.cache_resource 
def load_topic_transfomers():
  from transformers import pipeline
  try:
      topic_classifier = pipeline("zero-shot-classification", model="facebook/bart-large-mnli",device="cuda", compute_type="float16")
  except Exception as e:
      topic_classifier = pipeline("zero-shot-classification", model="facebook/bart-large-mnli")
      print("Error: ", e)
  return topic_classifier

def suggest_topic(text):

    # while len(text)> 1024:
    #     text = summarize(whole_text[:-10])

    possible_topics = ["Gadgets", 'Business','Finance', 'Health', 'Sports',  'Politics','Government','Science','Education', 'Travel', 'Tourism', 'Finance & Economics','Market','Technology','Scientific Discovery',
                      'Entertainment','Environment','News & Media' "Space,Universe & Cosmos", "Fashion", "Manufacturing and Constructions","Law & Crime","Motivation", "Development & Socialization",  "Archeology"]

    result = topic_classifier(text, possible_topics)

    return result['labels']

st.title("Topic Suggestion")

if 'topic_model' not in st.session_state:
    with st.spinner("Loading Model....."):
        topic_classifier = load_topic_transfomers() 
        st.success("Model_loaded")
    st.session_state.topic_model = True

whole_text = st.text_input("Enter the text Here: ")

try:
    predicted_topic = suggest_topic(whole_text)
    
    st.write('Suggested Topics')
    for i in predicted_topic[:10]:
        st.write(i)
except Exception as e:
    print("Error", e)