georad commited on
Commit
c718aa0
Β·
verified Β·
1 Parent(s): 7962aa5

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +156 -0
app.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import torch
3
+ import uuid
4
+ import nest_asyncio
5
+ import asyncio
6
+
7
+ torch.classes.__path__ = []
8
+
9
+ # Setup for HTTP API Calls
10
+ if 'device_id' not in st.session_state:
11
+ st.session_state.device_id = str(uuid.uuid3())
12
+
13
+ if "feedback_key" not in st.session_state:
14
+ st.session_state.feedback_key = 0
15
+
16
+
17
+ def launch_bot():
18
+ def reset():
19
+ st.session_state.messages = [{"role": "assistant", "content": "How may I help you?", "avatar": 'πŸ€–'}]
20
+ st.session_state.ex_prompt = None
21
+ st.session_state.first_turn = True
22
+
23
+
24
+ def generate_response(question):
25
+ response = vq.submit_query(question, languages[st.session_state.language])
26
+ return response
27
+
28
+ def generate_streaming_response(question):
29
+ response = vq.submit_query_streaming(question, languages[st.session_state.language])
30
+ return response
31
+
32
+ def show_example_questions():
33
+ if len(st.session_state.example_messages) > 0 and st.session_state.first_turn:
34
+ selected_example = pills("Questions to Try:", st.session_state.example_messages, index=None)
35
+ if selected_example:
36
+ st.session_state.ex_prompt = selected_example
37
+ st.session_state.first_turn = False
38
+ return True
39
+ return False
40
+
41
+ if 'cfg' not in st.session_state:
42
+ corpus_keys = str(os.environ['corpus_keys']).split(',')
43
+ cfg = OmegaConf.create({
44
+ 'corpus_keys': corpus_keys,
45
+ 'api_key': str(os.environ['api_key']),
46
+ 'title': os.environ['title'],
47
+ 'source_data_desc': os.environ['source_data_desc'],
48
+ 'streaming': isTrue(os.environ.get('streaming', False)),
49
+ 'prompt_name': os.environ.get('prompt_name', None),
50
+ 'examples': os.environ.get('examples', None),
51
+ 'language': 'English'
52
+ })
53
+ st.session_state.cfg = cfg
54
+ st.session_state.ex_prompt = None
55
+ st.session_state.first_turn = True
56
+ st.session_state.language = cfg.language
57
+ example_messages = [example.strip() for example in cfg.examples.split(",")]
58
+ st.session_state.example_messages = [em for em in example_messages if len(em)>0][:max_examples]
59
+
60
+ st.session_state.vq = VectaraQuery(cfg.api_key, cfg.corpus_keys, cfg.prompt_name)
61
+
62
+ cfg = st.session_state.cfg
63
+ vq = st.session_state.vq
64
+ st.set_page_config(page_title=cfg.title, layout="wide")
65
+
66
+ # left side content
67
+ with st.sidebar:
68
+ #image = Image.open('Vectara-logo.png')
69
+ #st.image(image, width=175)
70
+ st.markdown(f"## About\n\n"
71
+ f"This demo uses outside RAG to ask questions about {cfg.source_data_desc}\n")
72
+
73
+ cfg.language = st.selectbox('Language:', languages.keys())
74
+ if st.session_state.language != cfg.language:
75
+ st.session_state.language = cfg.language
76
+ reset()
77
+ st.rerun()
78
+
79
+ st.markdown("\n")
80
+ bc1, _ = st.columns([1, 1])
81
+ with bc1:
82
+ if st.button('Start Over'):
83
+ reset()
84
+ st.rerun()
85
+
86
+ st.markdown("---")
87
+ st.markdown(
88
+ "## Temporary test demo only\n"
89
+ )
90
+
91
+ st.markdown(f"<center> <h2> Header Demo Test: {cfg.title} </h2> </center>", unsafe_allow_html=True)
92
+
93
+ if "messages" not in st.session_state.keys():
94
+ reset()
95
+
96
+ # Display chat messages
97
+ for message in st.session_state.messages:
98
+ with st.chat_message(message["role"], avatar=message["avatar"]):
99
+ st.write(message["content"])
100
+
101
+ example_container = st.empty()
102
+ with example_container:
103
+ if show_example_questions():
104
+ example_container.empty()
105
+ st.rerun()
106
+
107
+ # select prompt from example question or user provided input
108
+ if st.session_state.ex_prompt:
109
+ prompt = st.session_state.ex_prompt
110
+ else:
111
+ prompt = st.chat_input()
112
+ if prompt:
113
+ st.session_state.messages.append({"role": "user", "content": prompt, "avatar": 'πŸ§‘β€πŸ’»'})
114
+ with st.chat_message("user", avatar="πŸ§‘β€πŸ’»"):
115
+ st.write(prompt)
116
+ st.session_state.ex_prompt = None
117
+
118
+ # Generate a new response if last message is not from assistant
119
+ if st.session_state.messages[-1]["role"] != "assistant":
120
+ with st.chat_message("assistant", avatar="πŸ€–"):
121
+ if cfg.streaming:
122
+ stream = generate_streaming_response(prompt)
123
+ response = st.write_stream(stream)
124
+ else:
125
+ with st.spinner("Thinking..."):
126
+ response = generate_response(prompt)
127
+ st.write(response)
128
+
129
+ response = escape_dollars_outside_latex(response)
130
+ message = {"role": "assistant", "content": response, "avatar": 'πŸ€–'}
131
+ st.session_state.messages.append(message)
132
+
133
+ # Send query and response to Amplitude Analytics
134
+ send_amplitude_data(
135
+ user_query=st.session_state.messages[-2]["content"],
136
+ chat_response=st.session_state.messages[-1]["content"],
137
+ demo_name=cfg["title"],
138
+ language=st.session_state.language
139
+ )
140
+ st.rerun()
141
+
142
+ if (st.session_state.messages[-1]["role"] == "assistant") & (st.session_state.messages[-1]["content"] != "How may I help you?"):
143
+ streamlit_feedback(feedback_type="thumbs", on_submit = thumbs_feedback, key = st.session_state.feedback_key,
144
+ kwargs = {"user_query": st.session_state.messages[-2]["content"],
145
+ "chat_response": st.session_state.messages[-1]["content"],
146
+ "demo_name": cfg["title"],
147
+ "response_language": st.session_state.language})
148
+
149
+
150
+
151
+
152
+ if __name__ == "__main__":
153
+ st.set_page_config(page_title="Sticky toolbar test", layout="wide")
154
+ nest_asyncio.apply()
155
+ asyncio.run(launch_bot())
156
+