MilanM commited on
Commit
ca40fb1
·
verified ·
1 Parent(s): 8276485

Create fading_moments.py

Browse files
Files changed (1) hide show
  1. fading_moments.py +264 -0
fading_moments.py ADDED
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from knowledge_bases import KNOWLEDGE_BASE_OPTIONS, SYSTEM_PROMPTS
3
+ import genparam
4
+ from functions import (
5
+ check_password,
6
+ initialize_session_state,
7
+ setup_client,
8
+ fetch_response,
9
+ capture_tokens
10
+ )
11
+
12
+ # Custom CSS for the three-column layout
13
+ three_column_style = """
14
+ <style>
15
+ .stColumn {
16
+ padding: 0.5rem;
17
+ border-right: 1px solid #dedede;
18
+ }
19
+ .stColumn:last-child {
20
+ border-right: none;
21
+ }
22
+ .chat-container {
23
+ height: calc(100vh - 200px);
24
+ overflow-y: auto;
25
+ display: flex;
26
+ flex-direction: column;
27
+ }
28
+ .chat-messages {
29
+ display: flex;
30
+ flex-direction: column;
31
+ gap: 1rem;
32
+ }
33
+ </style>
34
+ """
35
+
36
+ def main():
37
+ # Page configuration
38
+ st.set_page_config(
39
+ page_title="The Solutioning Sages",
40
+ page_icon="🪄",
41
+ initial_sidebar_state="collapsed",
42
+ layout="wide"
43
+ )
44
+
45
+ initialize_session_state()
46
+ st.markdown(three_column_style, unsafe_allow_html=True)
47
+
48
+ # Sidebar configuration
49
+ st.sidebar.header('The Solutioning Sages')
50
+ st.sidebar.divider()
51
+
52
+ # Knowledge Base Selection
53
+ selected_kb = st.sidebar.selectbox(
54
+ "Select Knowledge Base",
55
+ KNOWLEDGE_BASE_OPTIONS,
56
+ index=KNOWLEDGE_BASE_OPTIONS.index(st.session_state.selected_kb)
57
+ )
58
+
59
+ # Update knowledge base if selection changes
60
+ if selected_kb != st.session_state.selected_kb:
61
+ st.session_state.selected_kb = selected_kb
62
+
63
+ # Display current knowledge base contents
64
+ with st.sidebar.expander("Knowledge Base Contents"):
65
+ st.write("📄 [Knowledge base files would be listed here]")
66
+
67
+ # Display active model information
68
+ st.sidebar.divider()
69
+ active_model = genparam.SELECTED_MODEL_1 if genparam.ACTIVE_MODEL == 0 else genparam.SELECTED_MODEL_2
70
+ st.sidebar.markdown("**Active Model:**")
71
+ st.sidebar.code(active_model)
72
+
73
+ st.sidebar.divider()
74
+
75
+ # Display token statistics in sidebar
76
+ st.sidebar.subheader("Token Usage Statistics")
77
+ if st.session_state.token_statistics:
78
+ interaction_count = 0
79
+ stats_by_time = {}
80
+
81
+ # Group stats by timestamp
82
+ for stat in st.session_state.token_statistics:
83
+ if stat["timestamp"] not in stats_by_time:
84
+ stats_by_time[stat["timestamp"]] = []
85
+ stats_by_time[stat["timestamp"]].append(stat)
86
+
87
+ # Display grouped stats
88
+ for timestamp, stats in stats_by_time.items():
89
+ interaction_count += 1
90
+ st.sidebar.markdown(f"**Interaction {interaction_count}** ({timestamp})")
91
+
92
+ total_input = sum(stat['input_tokens'] for stat in stats)
93
+ total_output = sum(stat['output_tokens'] for stat in stats)
94
+ total = total_input + total_output
95
+
96
+ for stat in stats:
97
+ st.sidebar.markdown(
98
+ f"_{stat['bot_name']}_ \n"
99
+ f"Input: {stat['input_tokens']} tokens \n"
100
+ f"Output: {stat['output_tokens']} tokens \n"
101
+ f"Total: {stat['total_tokens']} tokens"
102
+ )
103
+
104
+ st.sidebar.markdown("**Interaction Totals:**")
105
+ st.sidebar.markdown(
106
+ f"Total Input: {total_input} tokens \n"
107
+ f"Total Output: {total_output} tokens \n"
108
+ f"Total Usage: {total} tokens"
109
+ )
110
+ st.sidebar.markdown("---")
111
+
112
+ if not check_password():
113
+ st.stop()
114
+
115
+ # Initialize WatsonX client
116
+ wml_credentials, client = setup_client()
117
+
118
+ # Get user input
119
+ user_input = st.chat_input("Ask your question here", key="user_input")
120
+
121
+ if user_input:
122
+ # Create three columns
123
+ col1, col2, col3 = st.columns(3)
124
+
125
+ # First column - PATH-er B.
126
+ with col1:
127
+ st.markdown("<div class='chat-container'>", unsafe_allow_html=True)
128
+ st.subheader(f"{genparam.BOT_1_AVATAR} {genparam.BOT_1_NAME}")
129
+ st.markdown("<div class='chat-messages'>", unsafe_allow_html=True)
130
+
131
+ # Display chat history
132
+ for message in st.session_state.chat_history_1:
133
+ with st.chat_message(message["role"], avatar=message.get("avatar", None)):
134
+ st.markdown(message['content'])
135
+
136
+ # Display new messages
137
+ with st.chat_message("user", avatar=genparam.USER_AVATAR):
138
+ st.markdown(user_input)
139
+
140
+ st.session_state.chat_history_1.append({
141
+ "role": "user",
142
+ "content": user_input,
143
+ "avatar": genparam.USER_AVATAR
144
+ })
145
+
146
+ # Get bot response
147
+ system_prompt = SYSTEM_PROMPTS[st.session_state.selected_kb]["bot_1"]
148
+ stream, prompt_data = fetch_response(
149
+ user_input,
150
+ client,
151
+ system_prompt,
152
+ st.session_state.chat_history_1
153
+ )
154
+
155
+ with st.chat_message(genparam.BOT_1_NAME, avatar=genparam.BOT_1_AVATAR):
156
+ response = st.write_stream(stream)
157
+
158
+ st.session_state.chat_history_1.append({
159
+ "role": genparam.BOT_1_NAME,
160
+ "content": response,
161
+ "avatar": genparam.BOT_1_AVATAR
162
+ })
163
+
164
+ # Capture tokens if enabled
165
+ if genparam.TOKEN_CAPTURE_ENABLED:
166
+ token_stats = capture_tokens(prompt_data, response, client, genparam.BOT_1_NAME)
167
+ if token_stats:
168
+ st.session_state.token_statistics.append(token_stats)
169
+
170
+ st.markdown("</div></div>", unsafe_allow_html=True)
171
+
172
+ # Second column - MOD-ther S.
173
+ with col2:
174
+ st.markdown("<div class='chat-container'>", unsafe_allow_html=True)
175
+ st.subheader(f"{genparam.BOT_2_AVATAR} {genparam.BOT_2_NAME}")
176
+ st.markdown("<div class='chat-messages'>", unsafe_allow_html=True)
177
+
178
+ # Display chat history
179
+ for message in st.session_state.chat_history_2:
180
+ with st.chat_message(message["role"], avatar=message.get("avatar", None)):
181
+ st.markdown(message['content'])
182
+
183
+ st.session_state.chat_history_2.append({
184
+ "role": "user",
185
+ "content": user_input,
186
+ "avatar": genparam.USER_AVATAR
187
+ })
188
+
189
+ # Get bot response
190
+ system_prompt = SYSTEM_PROMPTS[st.session_state.selected_kb]["bot_2"]
191
+ stream, prompt_data = fetch_response(
192
+ user_input,
193
+ client,
194
+ system_prompt,
195
+ st.session_state.chat_history_2
196
+ )
197
+
198
+ with st.chat_message(genparam.BOT_2_NAME, avatar=genparam.BOT_2_AVATAR):
199
+ response = st.write_stream(stream)
200
+
201
+ st.session_state.chat_history_2.append({
202
+ "role": genparam.BOT_2_NAME,
203
+ "content": response,
204
+ "avatar": genparam.BOT_2_AVATAR
205
+ })
206
+
207
+ # Capture tokens if enabled
208
+ if genparam.TOKEN_CAPTURE_ENABLED:
209
+ token_stats = capture_tokens(prompt_data, response, client, genparam.BOT_2_NAME)
210
+ if token_stats:
211
+ st.session_state.token_statistics.append(token_stats)
212
+
213
+ st.markdown("</div></div>", unsafe_allow_html=True)
214
+
215
+ # Third column - SYS-ter V.
216
+ with col3:
217
+ st.markdown("<div class='chat-container'>", unsafe_allow_html=True)
218
+ st.subheader(f"{genparam.BOT_3_AVATAR} {genparam.BOT_3_NAME}")
219
+ st.markdown("<div class='chat-messages'>", unsafe_allow_html=True)
220
+
221
+ # Display chat history
222
+ for message in st.session_state.chat_history_3:
223
+ with st.chat_message(message["role"], avatar=message.get("avatar", None)):
224
+ st.markdown(message['content'])
225
+
226
+ st.session_state.chat_history_3.append({
227
+ "role": "user",
228
+ "content": user_input,
229
+ "avatar": genparam.USER_AVATAR
230
+ })
231
+
232
+ # Get bot response
233
+ system_prompt = SYSTEM_PROMPTS[st.session_state.selected_kb]["bot_3"]
234
+ stream, prompt_data = fetch_response(
235
+ user_input,
236
+ client,
237
+ system_prompt,
238
+ st.session_state.chat_history_3
239
+ )
240
+
241
+ with st.chat_message(genparam.BOT_3_NAME, avatar=genparam.BOT_3_AVATAR):
242
+ response = st.write_stream(stream)
243
+
244
+ st.session_state.chat_history_3.append({
245
+ "role": genparam.BOT_3_NAME,
246
+ "content": response,
247
+ "avatar": genparam.BOT_3_AVATAR
248
+ })
249
+
250
+ # Capture tokens if enabled
251
+ if genparam.TOKEN_CAPTURE_ENABLED:
252
+ token_stats = capture_tokens(prompt_data, response, client, genparam.BOT_3_NAME)
253
+ if token_stats:
254
+ st.session_state.token_statistics.append(token_stats)
255
+
256
+ st.markdown("</div></div>", unsafe_allow_html=True)
257
+
258
+ # Update sidebar with new question
259
+ st.sidebar.markdown("---")
260
+ st.sidebar.markdown("**Latest Question:**")
261
+ st.sidebar.markdown(f"_{user_input}_")
262
+
263
+ if __name__ == "__main__":
264
+ main()