uumerrr684 commited on
Commit
dda64ac
Β·
verified Β·
1 Parent(s): 3253c41

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +475 -62
app.py CHANGED
@@ -2,17 +2,18 @@ import requests
2
  import os
3
  import json
4
  import streamlit as st
5
- from datetime import datetime
6
  import time
 
7
 
8
  # Page configuration
9
  st.set_page_config(
10
  page_title="Chat Flow πŸ•·",
11
  page_icon="πŸ’¬",
12
- initial_sidebar_state="collapsed"
13
  )
14
 
15
- # White background
16
  st.markdown("""
17
  <style>
18
  .stApp {
@@ -38,11 +39,180 @@ st.markdown("""
38
  font-size: 0.8em;
39
  font-style: italic;
40
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  </style>
42
  """, unsafe_allow_html=True)
43
 
44
  # File to store chat history
45
  HISTORY_FILE = "chat_history.json"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
  def load_chat_history():
48
  """Load chat history from file"""
@@ -71,10 +241,192 @@ def clear_chat_history():
71
  except Exception as e:
72
  st.error(f"Error clearing chat history: {e}")
73
 
74
- # Initialize session state with saved history
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  if "messages" not in st.session_state:
76
  st.session_state.messages = load_chat_history()
77
 
 
 
 
78
  # Get API key
79
  OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY")
80
 
@@ -90,23 +442,22 @@ def check_api_status():
90
  except:
91
  return "Error"
92
 
93
-
94
  def get_ai_response(messages, model="openai/gpt-3.5-turbo"):
95
  if not OPENROUTER_API_KEY:
96
  return "No API key found. Please add OPENROUTER_API_KEY to environment variables."
97
-
98
  url = "https://openrouter.ai/api/v1/chat/completions"
99
  headers = {
100
  "Content-Type": "application/json",
101
  "Authorization": f"Bearer {OPENROUTER_API_KEY}",
102
- "HTTP-Referer": "http://localhost:8501", # Optional: Your site URL
103
- "X-Title": "Streamlit AI Assistant" # Optional: Your app name
104
  }
105
-
106
- # Create system message and user messages
107
- api_messages = [{"role": "system", "content": "You are a helpful AI assistant. Provide clear and helpful responses."}]
108
  api_messages.extend(messages)
109
-
110
  data = {
111
  "model": model,
112
  "messages": api_messages,
@@ -117,11 +468,10 @@ def get_ai_response(messages, model="openai/gpt-3.5-turbo"):
117
  "frequency_penalty": 0,
118
  "presence_penalty": 0
119
  }
120
-
121
  try:
122
  response = requests.post(url, headers=headers, json=data, stream=True, timeout=60)
123
-
124
- # Better error handling
125
  if response.status_code != 200:
126
  error_detail = ""
127
  try:
@@ -129,17 +479,12 @@ def get_ai_response(messages, model="openai/gpt-3.5-turbo"):
129
  error_detail = error_data.get('error', {}).get('message', f"HTTP {response.status_code}")
130
  except:
131
  error_detail = f"HTTP {response.status_code}: {response.reason}"
132
-
133
  yield f"API Error: {error_detail}. Please try a different model or check your API key."
134
  return
135
-
136
  full_response = ""
137
- buffer = ""
138
-
139
- # Using your working streaming logic
140
  for line in response.iter_lines():
141
  if line:
142
- # The server sends lines starting with "data: ..."
143
  if line.startswith(b"data: "):
144
  data_str = line[len(b"data: "):].decode("utf-8")
145
  if data_str.strip() == "[DONE]":
@@ -150,11 +495,9 @@ def get_ai_response(messages, model="openai/gpt-3.5-turbo"):
150
  if delta:
151
  full_response += delta
152
  yield full_response
153
- except json.JSONDecodeError:
154
  continue
155
- except (KeyError, IndexError):
156
- continue
157
-
158
  except requests.exceptions.Timeout:
159
  yield "Request timed out. Please try again with a shorter message or different model."
160
  except requests.exceptions.ConnectionError:
@@ -164,15 +507,73 @@ def get_ai_response(messages, model="openai/gpt-3.5-turbo"):
164
  except Exception as e:
165
  yield f"Unexpected error: {str(e)}. Please try again or contact support."
166
 
 
 
167
  # Header
168
- st.title("AI Assistant")
169
- st.caption("Ask me anything")
170
 
171
  # Sidebar
172
  with st.sidebar:
173
- st.header("Settings")
 
 
 
 
 
 
 
 
 
 
 
174
 
175
- # API Status
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
  status = check_api_status()
177
  if status == "Connected":
178
  st.success("🟒 API Connected")
@@ -180,10 +581,10 @@ with st.sidebar:
180
  st.error("No API Key")
181
  else:
182
  st.warning("Connection Issue")
183
-
184
  st.divider()
185
-
186
- # All models including new ones
187
  models = [
188
  ("GPT-3.5 Turbo", "openai/gpt-3.5-turbo"),
189
  ("LLaMA 3.1 8B", "meta-llama/llama-3.1-8b-instruct"),
@@ -196,45 +597,45 @@ with st.sidebar:
196
  ("Gemma 3 4B", "google/gemma-3-4b-it:free"),
197
  ("Auto (Best Available)", "openrouter/auto")
198
  ]
199
-
200
  model_names = [name for name, _ in models]
201
  model_ids = [model_id for _, model_id in models]
202
-
203
- selected_index = st.selectbox("Model", range(len(model_names)),
204
- format_func=lambda x: model_names[x],
205
- index=0)
206
  selected_model = model_ids[selected_index]
207
-
208
  # Show selected model ID in green
209
  st.markdown(f"**Model ID:** <span class='model-id'>{selected_model}</span>", unsafe_allow_html=True)
210
-
211
  st.divider()
212
-
213
  # Chat History Controls
214
  st.header("Chat History")
215
-
216
  # Show number of messages
217
  if st.session_state.messages:
218
  st.info(f"Messages stored: {len(st.session_state.messages)}")
219
-
220
  # Auto-save toggle
221
  auto_save = st.checkbox("Auto-save messages", value=True)
222
-
223
  # Manual save/load buttons
224
  col1, col2 = st.columns(2)
225
  with col1:
226
  if st.button("Save History", use_container_width=True):
227
  save_chat_history(st.session_state.messages)
228
  st.success("History saved!")
229
-
230
  with col2:
231
  if st.button("Load History", use_container_width=True):
232
  st.session_state.messages = load_chat_history()
233
  st.success("History loaded!")
234
  st.rerun()
235
-
236
  st.divider()
237
-
238
  # View History
239
  if st.button("View History File", use_container_width=True):
240
  if os.path.exists(HISTORY_FILE):
@@ -243,7 +644,7 @@ with st.sidebar:
243
  st.text_area("Chat History (JSON)", history_content, height=200)
244
  else:
245
  st.warning("No history file found")
246
-
247
  # Download History
248
  if os.path.exists(HISTORY_FILE):
249
  with open(HISTORY_FILE, 'rb') as f:
@@ -254,18 +655,16 @@ with st.sidebar:
254
  mime="application/json",
255
  use_container_width=True
256
  )
257
-
258
  st.divider()
259
-
260
  # Clear controls
261
  if st.button("Clear Chat", use_container_width=True, type="secondary"):
262
  clear_chat_history()
263
  st.success("Chat cleared!")
264
  st.rerun()
265
 
266
- # Show welcome message when no messages
267
- if not st.session_state.messages:
268
- st.info("How can I help you today?")
269
 
270
  # Display chat messages
271
  for message in st.session_state.messages:
@@ -278,52 +677,66 @@ for message in st.session_state.messages:
278
  if len(parts) > 1:
279
  model_name = parts[1].replace("***", "").replace("**", "")
280
  st.markdown(main_content)
281
- st.markdown(f"<div class='model-attribution'>Response created by: <strong>{model_name}</strong></div>", unsafe_allow_html=True)
 
 
282
  else:
283
  st.markdown(message["content"])
284
  else:
285
  st.markdown(message["content"])
286
 
287
- # Chat input
288
- if prompt := st.chat_input("Ask anything..."):
 
 
 
289
  # Add user message
290
  user_message = {"role": "user", "content": prompt}
291
  st.session_state.messages.append(user_message)
292
-
293
  # Auto-save if enabled
 
 
 
294
  if auto_save:
295
  save_chat_history(st.session_state.messages)
296
 
 
 
 
297
  # Display user message
298
  with st.chat_message("user"):
299
  st.markdown(prompt)
300
-
301
  # Get AI response
302
  with st.chat_message("assistant"):
303
  placeholder = st.empty()
304
-
305
  full_response = ""
306
  try:
307
  for response in get_ai_response(st.session_state.messages, selected_model):
308
  full_response = response
309
  placeholder.markdown(full_response + "β–Œ")
310
-
311
  # Remove cursor and show final response
312
  placeholder.markdown(full_response)
313
-
314
  except Exception as e:
315
  error_msg = f"An error occurred: {str(e)}"
316
  placeholder.markdown(error_msg)
317
  full_response = error_msg
318
-
319
  # Add AI response to messages with attribution
320
  full_response_with_attribution = full_response + f"\n\n---\n*Response created by: **{model_names[selected_index]}***"
321
  assistant_message = {"role": "assistant", "content": full_response_with_attribution}
322
  st.session_state.messages.append(assistant_message)
323
-
324
  # Auto-save if enabled
325
  if auto_save:
326
  save_chat_history(st.session_state.messages)
 
 
 
327
 
328
  # Show currently using model
329
  st.caption(f"Currently using: **{model_names[selected_index]}**")
 
2
  import os
3
  import json
4
  import streamlit as st
5
+ from datetime import datetime, timedelta
6
  import time
7
+ import uuid
8
 
9
  # Page configuration
10
  st.set_page_config(
11
  page_title="Chat Flow πŸ•·",
12
  page_icon="πŸ’¬",
13
+ initial_sidebar_state="expanded"
14
  )
15
 
16
+ # Enhanced CSS with chat history styling and BLACK NEW CHAT BUTTON
17
  st.markdown("""
18
  <style>
19
  .stApp {
 
39
  font-size: 0.8em;
40
  font-style: italic;
41
  }
42
+
43
+ /* NEW CHAT BUTTON - Black background, white text */
44
+ .stButton > button[kind="primary"] {
45
+ background-color: #000000 !important;
46
+ border-color: #000000 !important;
47
+ color: #ffffff !important;
48
+ }
49
+
50
+ .stButton > button[kind="primary"]:hover {
51
+ background-color: #333333 !important;
52
+ border-color: #333333 !important;
53
+ color: #ffffff !important;
54
+ }
55
+
56
+ .stButton > button[kind="primary"]:active {
57
+ background-color: #1a1a1a !important;
58
+ border-color: #1a1a1a !important;
59
+ color: #ffffff !important;
60
+ }
61
+
62
+ .stButton > button[kind="primary"]:focus {
63
+ background-color: #000000 !important;
64
+ border-color: #000000 !important;
65
+ color: #ffffff !important;
66
+ box-shadow: 0 0 0 0.2rem rgba(0, 0, 0, 0.25) !important;
67
+ }
68
+
69
+ /* Chat history styling */
70
+ .chat-history-item {
71
+ padding: 8px 12px;
72
+ margin: 4px 0;
73
+ border-radius: 8px;
74
+ border: 1px solid #e0e0e0;
75
+ background: #f8f9fa;
76
+ cursor: pointer;
77
+ transition: all 0.2s;
78
+ }
79
+
80
+ .chat-history-item:hover {
81
+ background: #e9ecef;
82
+ border-color: #28a745;
83
+ }
84
+
85
+ .chat-history-item.active {
86
+ background: #28a745;
87
+ color: white;
88
+ border-color: #28a745;
89
+ }
90
+
91
+ .chat-title {
92
+ font-weight: 500;
93
+ font-size: 0.9em;
94
+ margin-bottom: 2px;
95
+ }
96
+
97
+ .chat-date {
98
+ font-size: 0.75em;
99
+ opacity: 0.7;
100
+ }
101
+
102
+ .new-chat-btn {
103
+ width: 100%;
104
+ margin-bottom: 16px;
105
+ }
106
  </style>
107
  """, unsafe_allow_html=True)
108
 
109
  # File to store chat history
110
  HISTORY_FILE = "chat_history.json"
111
+ USERS_FILE = "online_users.json"
112
+ SESSIONS_FILE = "chat_sessions.json"
113
+
114
+ # ================= USER FUNCTIONS =================
115
+
116
+ def get_user_id():
117
+ """Get unique ID for this user session"""
118
+ if 'user_id' not in st.session_state:
119
+ st.session_state.user_id = str(uuid.uuid4())[:8]
120
+ return st.session_state.user_id
121
+
122
+ def update_online_users():
123
+ """Update user status"""
124
+ try:
125
+ # Load current users
126
+ users = {}
127
+ if os.path.exists(USERS_FILE):
128
+ with open(USERS_FILE, 'r') as f:
129
+ users = json.load(f)
130
+
131
+ user_id = get_user_id()
132
+
133
+ # Update user info
134
+ users[user_id] = {
135
+ 'last_seen': datetime.now().isoformat(),
136
+ 'name': f'User-{user_id}',
137
+ 'session_start': users.get(user_id, {}).get('session_start', datetime.now().isoformat())
138
+ }
139
+
140
+ # Clean up old users (not seen in 5 minutes)
141
+ current_time = datetime.now()
142
+ active_users = {}
143
+ for uid, data in users.items():
144
+ try:
145
+ last_seen = datetime.fromisoformat(data['last_seen'])
146
+ if current_time - last_seen < timedelta(minutes=5):
147
+ active_users[uid] = data
148
+ except:
149
+ continue
150
+
151
+ # Save updated users
152
+ with open(USERS_FILE, 'w') as f:
153
+ json.dump(active_users, f, indent=2)
154
+
155
+ return len(active_users)
156
+
157
+ except Exception as e:
158
+ st.error(f"User tracking error: {e}")
159
+ return 1
160
+
161
+ def show_online_users():
162
+ """Display online users count"""
163
+ st.header("πŸ‘₯ Who's Online")
164
+
165
+ try:
166
+ if not os.path.exists(USERS_FILE):
167
+ st.info("No user data yet")
168
+ return 0
169
+
170
+ with open(USERS_FILE, 'r') as f:
171
+ users = json.load(f)
172
+
173
+ if not users:
174
+ st.info("No active users")
175
+ return 0
176
+
177
+ online_count = len(users)
178
+
179
+ # Show count
180
+ if online_count == 1:
181
+ st.success("🟒 Just you online")
182
+ else:
183
+ st.success(f"🟒 {online_count} people online")
184
+
185
+ st.divider()
186
+
187
+ # Show each user
188
+ current_user_id = get_user_id()
189
+ for user_id, data in users.items():
190
+ is_current_user = (user_id == current_user_id)
191
+
192
+ # User header
193
+ if is_current_user:
194
+ st.markdown("**πŸ‘€ You**")
195
+ else:
196
+ st.markdown(f"**πŸ‘€ {data.get('name', user_id)}**")
197
+
198
+ # Show session info
199
+ try:
200
+ session_start = datetime.fromisoformat(data['session_start'])
201
+ duration = datetime.now() - session_start
202
+ minutes = int(duration.total_seconds() / 60)
203
+ st.caption(f"πŸ• Online for {minutes} minutes")
204
+ except:
205
+ st.caption("πŸ• Session time unknown")
206
+
207
+ st.divider()
208
+
209
+ return online_count
210
+
211
+ except Exception as e:
212
+ st.error(f"Error showing users: {e}")
213
+ return 0
214
+
215
+ # ================= CHAT FUNCTIONS =================
216
 
217
  def load_chat_history():
218
  """Load chat history from file"""
 
241
  except Exception as e:
242
  st.error(f"Error clearing chat history: {e}")
243
 
244
+ def load_chat_sessions():
245
+ """Load all chat sessions"""
246
+ try:
247
+ if os.path.exists(SESSIONS_FILE):
248
+ with open(SESSIONS_FILE, 'r', encoding='utf-8') as f:
249
+ return json.load(f)
250
+ except Exception as e:
251
+ st.error(f"Error loading chat sessions: {e}")
252
+ return {}
253
+
254
+ def save_chat_sessions(sessions):
255
+ """Save chat sessions to file"""
256
+ try:
257
+ with open(SESSIONS_FILE, 'w', encoding='utf-8') as f:
258
+ json.dump(sessions, f, ensure_ascii=False, indent=2)
259
+ except Exception as e:
260
+ st.error(f"Error saving chat sessions: {e}")
261
+
262
+ def get_session_id():
263
+ """Get or create session ID"""
264
+ if 'session_id' not in st.session_state:
265
+ st.session_state.session_id = str(uuid.uuid4())
266
+ return st.session_state.session_id
267
+
268
+ def get_chat_title(messages):
269
+ """Generate a title for the chat based on conversation content using AI"""
270
+ if not messages:
271
+ return "New Chat"
272
+
273
+ if len(messages) <= 1:
274
+ for msg in messages:
275
+ if msg["role"] == "user":
276
+ content = msg["content"]
277
+ if len(content) > 30:
278
+ return content[:30] + "..."
279
+ return content
280
+ return "New Chat"
281
+
282
+ try:
283
+ return generate_smart_title(messages)
284
+ except:
285
+ for msg in messages:
286
+ if msg["role"] == "user":
287
+ content = msg["content"]
288
+ if len(content) > 30:
289
+ return content[:30] + "..."
290
+ return content
291
+ return "New Chat"
292
+
293
+ def generate_smart_title(messages):
294
+ """Use AI to generate a smart title for the conversation"""
295
+ if not OPENROUTER_API_KEY:
296
+ for msg in messages:
297
+ if msg["role"] == "user":
298
+ content = msg["content"]
299
+ if len(content) > 30:
300
+ return content[:30] + "..."
301
+ return content
302
+ return "New Chat"
303
+
304
+ conversation_text = ""
305
+ message_count = 0
306
+
307
+ for msg in messages:
308
+ if message_count >= 6:
309
+ break
310
+ if msg["role"] in ["user", "assistant"]:
311
+ role = "User" if msg["role"] == "user" else "Assistant"
312
+ content = msg["content"]
313
+ if "Response created by:" in content:
314
+ content = content.split("\n\n---\n*Response created by:")[0]
315
+ conversation_text += f"{role}: {content[:200]}...\n"
316
+ message_count += 1
317
+
318
+ title_prompt = f"""Based on this conversation, generate a short, descriptive title (2-5 words max):
319
+
320
+ {conversation_text}
321
+
322
+ Generate only a brief title that captures the main topic. Examples:
323
+ - "Python Code Help"
324
+ - "Recipe Ideas"
325
+ - "Travel Planning"
326
+ - "Math Problem"
327
+ - "Writing Assistance"
328
+
329
+ Title:"""
330
+
331
+ url = "https://openrouter.ai/api/v1/chat/completions"
332
+ headers = {
333
+ "Content-Type": "application/json",
334
+ "Authorization": f"Bearer {OPENROUTER_API_KEY}",
335
+ "HTTP-Referer": "http://localhost:8501",
336
+ "X-Title": "Streamlit AI Assistant"
337
+ }
338
+
339
+ data = {
340
+ "model": "openai/gpt-3.5-turbo",
341
+ "messages": [{"role": "user", "content": title_prompt}],
342
+ "max_tokens": 20,
343
+ "temperature": 0.3,
344
+ "stream": False
345
+ }
346
+
347
+ try:
348
+ response = requests.post(url, headers=headers, json=data, timeout=10)
349
+ if response.status_code == 200:
350
+ result = response.json()
351
+ title = result["choices"][0]["message"]["content"].strip()
352
+ title = title.replace('"', '').replace("Title:", "").strip()
353
+ if len(title) > 40:
354
+ title = title[:40] + "..."
355
+ return title if title else "New Chat"
356
+ except Exception as e:
357
+ pass
358
+
359
+ for msg in messages:
360
+ if msg["role"] == "user":
361
+ content = msg["content"]
362
+ if len(content) > 30:
363
+ return content[:30] + "..."
364
+ return content
365
+ return "New Chat"
366
+
367
+ def save_current_session():
368
+ """Save current chat session with smart AI-generated title"""
369
+ if not st.session_state.messages:
370
+ return
371
+
372
+ sessions = load_chat_sessions()
373
+ session_id = get_session_id()
374
+
375
+ user_messages = [msg for msg in st.session_state.messages if msg["role"] == "user"]
376
+ assistant_messages = [msg for msg in st.session_state.messages if msg["role"] == "assistant"]
377
+
378
+ if len(user_messages) >= 1 and len(assistant_messages) >= 1:
379
+ title = get_chat_title(st.session_state.messages)
380
+ else:
381
+ title = "New Chat"
382
+ if user_messages:
383
+ first_message = user_messages[0]["content"]
384
+ if len(first_message) > 30:
385
+ title = first_message[:30] + "..."
386
+ else:
387
+ title = first_message
388
+
389
+ sessions[session_id] = {
390
+ "title": title,
391
+ "messages": st.session_state.messages,
392
+ "created_at": sessions.get(session_id, {}).get("created_at", datetime.now().isoformat()),
393
+ "updated_at": datetime.now().isoformat()
394
+ }
395
+
396
+ save_chat_sessions(sessions)
397
+
398
+ def load_session(session_id):
399
+ """Load a specific chat session"""
400
+ sessions = load_chat_sessions()
401
+ if session_id in sessions:
402
+ st.session_state.messages = sessions[session_id]["messages"]
403
+ st.session_state.session_id = session_id
404
+ return True
405
+ return False
406
+
407
+ def delete_session(session_id):
408
+ """Delete a chat session"""
409
+ sessions = load_chat_sessions()
410
+ if session_id in sessions:
411
+ del sessions[session_id]
412
+ save_chat_sessions(sessions)
413
+ return True
414
+ return False
415
+
416
+ def start_new_chat():
417
+ """Start a new chat session"""
418
+ if st.session_state.messages:
419
+ save_current_session()
420
+ st.session_state.messages = []
421
+ st.session_state.session_id = str(uuid.uuid4())
422
+
423
+ # Initialize session state
424
  if "messages" not in st.session_state:
425
  st.session_state.messages = load_chat_history()
426
 
427
+ if "session_id" not in st.session_state:
428
+ st.session_state.session_id = str(uuid.uuid4())
429
+
430
  # Get API key
431
  OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY")
432
 
 
442
  except:
443
  return "Error"
444
 
 
445
  def get_ai_response(messages, model="openai/gpt-3.5-turbo"):
446
  if not OPENROUTER_API_KEY:
447
  return "No API key found. Please add OPENROUTER_API_KEY to environment variables."
448
+
449
  url = "https://openrouter.ai/api/v1/chat/completions"
450
  headers = {
451
  "Content-Type": "application/json",
452
  "Authorization": f"Bearer {OPENROUTER_API_KEY}",
453
+ "HTTP-Referer": "http://localhost:8501",
454
+ "X-Title": "Streamlit AI Assistant"
455
  }
456
+
457
+ api_messages = [
458
+ {"role": "system", "content": "You are a helpful AI assistant. Provide clear and helpful responses."}]
459
  api_messages.extend(messages)
460
+
461
  data = {
462
  "model": model,
463
  "messages": api_messages,
 
468
  "frequency_penalty": 0,
469
  "presence_penalty": 0
470
  }
471
+
472
  try:
473
  response = requests.post(url, headers=headers, json=data, stream=True, timeout=60)
474
+
 
475
  if response.status_code != 200:
476
  error_detail = ""
477
  try:
 
479
  error_detail = error_data.get('error', {}).get('message', f"HTTP {response.status_code}")
480
  except:
481
  error_detail = f"HTTP {response.status_code}: {response.reason}"
 
482
  yield f"API Error: {error_detail}. Please try a different model or check your API key."
483
  return
484
+
485
  full_response = ""
 
 
 
486
  for line in response.iter_lines():
487
  if line:
 
488
  if line.startswith(b"data: "):
489
  data_str = line[len(b"data: "):].decode("utf-8")
490
  if data_str.strip() == "[DONE]":
 
495
  if delta:
496
  full_response += delta
497
  yield full_response
498
+ except (json.JSONDecodeError, KeyError, IndexError):
499
  continue
500
+
 
 
501
  except requests.exceptions.Timeout:
502
  yield "Request timed out. Please try again with a shorter message or different model."
503
  except requests.exceptions.ConnectionError:
 
507
  except Exception as e:
508
  yield f"Unexpected error: {str(e)}. Please try again or contact support."
509
 
510
+ # ================= MAIN APP =================
511
+
512
  # Header
513
+ st.title("Chat Flow πŸ•·")
514
+ st.caption("10 powerful Models, one simple chat.")
515
 
516
  # Sidebar
517
  with st.sidebar:
518
+ # New Chat Button (BLACK)
519
+ if st.button("βž• New Chat", use_container_width=True, type="primary"):
520
+ start_new_chat()
521
+ st.rerun()
522
+
523
+ st.divider()
524
+
525
+ # ONLINE USERS SECTION
526
+ online_count = show_online_users()
527
+
528
+ # Update user tracking
529
+ update_online_users()
530
 
531
+ # Quick refresh for users
532
+ if st.button("πŸ”„ Refresh Users", use_container_width=True):
533
+ st.rerun()
534
+
535
+ st.divider()
536
+
537
+ # Chat Sessions
538
+ sessions = load_chat_sessions()
539
+ current_session_id = get_session_id()
540
+
541
+ if sessions:
542
+ st.subheader("Previous Chats")
543
+ sorted_sessions = sorted(sessions.items(), key=lambda x: x[1].get("updated_at", x[1].get("created_at", "")), reverse=True)
544
+
545
+ for session_id, session_data in sorted_sessions:
546
+ if session_id == current_session_id:
547
+ st.markdown(f"πŸ”Ή **{session_data['title']}**")
548
+ else:
549
+ col_load, col_delete = st.columns([3, 1])
550
+ with col_load:
551
+ if st.button(f"πŸ’­ {session_data['title']}", key=f"load_{session_id}", use_container_width=True):
552
+ if st.session_state.messages:
553
+ save_current_session()
554
+ load_session(session_id)
555
+ st.rerun()
556
+ with col_delete:
557
+ if st.button("βœ•", key=f"delete_{session_id}"):
558
+ delete_session(session_id)
559
+ if session_id == current_session_id:
560
+ start_new_chat()
561
+ st.rerun()
562
+
563
+ if "updated_at" in session_data:
564
+ update_time = datetime.fromisoformat(session_data["updated_at"])
565
+ st.caption(f"Updated: {update_time.strftime('%m/%d %H:%M')}")
566
+ st.markdown("---")
567
+ else:
568
+ st.info("No previous chats yet")
569
+
570
+ if st.session_state.messages:
571
+ save_current_session()
572
+
573
+ st.divider()
574
+
575
+ # Settings Section
576
+ st.header("Settings")
577
  status = check_api_status()
578
  if status == "Connected":
579
  st.success("🟒 API Connected")
 
581
  st.error("No API Key")
582
  else:
583
  st.warning("Connection Issue")
584
+
585
  st.divider()
586
+
587
+ # Model Selection
588
  models = [
589
  ("GPT-3.5 Turbo", "openai/gpt-3.5-turbo"),
590
  ("LLaMA 3.1 8B", "meta-llama/llama-3.1-8b-instruct"),
 
597
  ("Gemma 3 4B", "google/gemma-3-4b-it:free"),
598
  ("Auto (Best Available)", "openrouter/auto")
599
  ]
600
+
601
  model_names = [name for name, _ in models]
602
  model_ids = [model_id for _, model_id in models]
603
+
604
+ selected_index = st.selectbox("Model", range(len(model_names)),
605
+ format_func=lambda x: model_names[x],
606
+ index=0)
607
  selected_model = model_ids[selected_index]
608
+
609
  # Show selected model ID in green
610
  st.markdown(f"**Model ID:** <span class='model-id'>{selected_model}</span>", unsafe_allow_html=True)
611
+
612
  st.divider()
613
+
614
  # Chat History Controls
615
  st.header("Chat History")
616
+
617
  # Show number of messages
618
  if st.session_state.messages:
619
  st.info(f"Messages stored: {len(st.session_state.messages)}")
620
+
621
  # Auto-save toggle
622
  auto_save = st.checkbox("Auto-save messages", value=True)
623
+
624
  # Manual save/load buttons
625
  col1, col2 = st.columns(2)
626
  with col1:
627
  if st.button("Save History", use_container_width=True):
628
  save_chat_history(st.session_state.messages)
629
  st.success("History saved!")
630
+
631
  with col2:
632
  if st.button("Load History", use_container_width=True):
633
  st.session_state.messages = load_chat_history()
634
  st.success("History loaded!")
635
  st.rerun()
636
+
637
  st.divider()
638
+
639
  # View History
640
  if st.button("View History File", use_container_width=True):
641
  if os.path.exists(HISTORY_FILE):
 
644
  st.text_area("Chat History (JSON)", history_content, height=200)
645
  else:
646
  st.warning("No history file found")
647
+
648
  # Download History
649
  if os.path.exists(HISTORY_FILE):
650
  with open(HISTORY_FILE, 'rb') as f:
 
655
  mime="application/json",
656
  use_container_width=True
657
  )
658
+
659
  st.divider()
660
+
661
  # Clear controls
662
  if st.button("Clear Chat", use_container_width=True, type="secondary"):
663
  clear_chat_history()
664
  st.success("Chat cleared!")
665
  st.rerun()
666
 
667
+ # ================= MAIN CHAT AREA =================
 
 
668
 
669
  # Display chat messages
670
  for message in st.session_state.messages:
 
677
  if len(parts) > 1:
678
  model_name = parts[1].replace("***", "").replace("**", "")
679
  st.markdown(main_content)
680
+ st.markdown(
681
+ f"<div class='model-attribution'>Response created by: <strong>{model_name}</strong></div>",
682
+ unsafe_allow_html=True)
683
  else:
684
  st.markdown(message["content"])
685
  else:
686
  st.markdown(message["content"])
687
 
688
+ # Chat input - MAIN CHAT FUNCTIONALITY
689
+ if prompt := st.chat_input("Chat Smarter. Chat many Brains"):
690
+ # Update user tracking when user sends message
691
+ update_online_users()
692
+
693
  # Add user message
694
  user_message = {"role": "user", "content": prompt}
695
  st.session_state.messages.append(user_message)
696
+
697
  # Auto-save if enabled
698
+ if 'auto_save' not in locals():
699
+ auto_save = True
700
+
701
  if auto_save:
702
  save_chat_history(st.session_state.messages)
703
 
704
+ # Always auto-save the current session
705
+ save_current_session()
706
+
707
  # Display user message
708
  with st.chat_message("user"):
709
  st.markdown(prompt)
710
+
711
  # Get AI response
712
  with st.chat_message("assistant"):
713
  placeholder = st.empty()
714
+
715
  full_response = ""
716
  try:
717
  for response in get_ai_response(st.session_state.messages, selected_model):
718
  full_response = response
719
  placeholder.markdown(full_response + "β–Œ")
720
+
721
  # Remove cursor and show final response
722
  placeholder.markdown(full_response)
723
+
724
  except Exception as e:
725
  error_msg = f"An error occurred: {str(e)}"
726
  placeholder.markdown(error_msg)
727
  full_response = error_msg
728
+
729
  # Add AI response to messages with attribution
730
  full_response_with_attribution = full_response + f"\n\n---\n*Response created by: **{model_names[selected_index]}***"
731
  assistant_message = {"role": "assistant", "content": full_response_with_attribution}
732
  st.session_state.messages.append(assistant_message)
733
+
734
  # Auto-save if enabled
735
  if auto_save:
736
  save_chat_history(st.session_state.messages)
737
+
738
+ # Always auto-save the current session
739
+ save_current_session()
740
 
741
  # Show currently using model
742
  st.caption(f"Currently using: **{model_names[selected_index]}**")