chatflow / app.py
uumerrr684's picture
Update app.py
ce9fd39 verified
raw
history blame
18.2 kB
import requests
import os
import json
import streamlit as st
from datetime import datetime, timedelta
import time
import uuid
# Page configuration - IMPROVED for mobile
st.set_page_config(
page_title="Chat Flow πŸ•·",
page_icon="πŸ’¬",
initial_sidebar_state="auto", # Changed from collapsed to auto
layout="wide" # Better for mobile
)
# MOBILE-RESPONSIVE CSS
st.markdown("""
<style>
/* Main app styling */
.stApp {
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
min-height: 100vh;
}
/* Container responsive sizing */
.main .block-container {
max-width: 100%;
padding-left: 1rem;
padding-right: 1rem;
padding-top: 1rem;
}
/* Hide Streamlit branding */
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
header {visibility: hidden;}
.stDeployButton {display: none;}
/* Chat container styling */
.stChatMessage {
background: rgba(255, 255, 255, 0.95);
border-radius: 15px;
margin: 0.5rem 0;
padding: 1rem;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
backdrop-filter: blur(10px);
}
/* User messages - right aligned with blue background */
.stChatMessage[data-testid="user-message"] {
background: linear-gradient(135deg, #4facfe 0%, #00f2fe 100%);
color: white;
margin-left: 20%;
}
/* Assistant messages - left aligned with white background */
.stChatMessage[data-testid="assistant-message"] {
background: rgba(255, 255, 255, 0.98);
color: #333;
margin-right: 20%;
}
/* Mobile responsiveness */
@media (max-width: 768px) {
.main .block-container {
padding: 0.5rem;
max-width: 100%;
}
.stChatMessage[data-testid="user-message"] {
margin-left: 10%;
font-size: 14px;
}
.stChatMessage[data-testid="assistant-message"] {
margin-right: 10%;
font-size: 14px;
}
/* Make sidebar collapsible on mobile */
.css-1d391kg {
width: 100% !important;
min-width: 100% !important;
}
/* Header adjustments for mobile */
h1 {
font-size: 1.5rem !important;
text-align: center;
}
.stSelectbox label {
font-size: 14px;
}
.stButton button {
font-size: 14px;
padding: 0.5rem;
}
}
/* Extra small screens (iPhone 5, etc) */
@media (max-width: 480px) {
.main .block-container {
padding: 0.25rem;
}
.stChatMessage {
padding: 0.75rem;
margin: 0.25rem 0;
}
.stChatMessage[data-testid="user-message"] {
margin-left: 5%;
}
.stChatMessage[data-testid="assistant-message"] {
margin-right: 5%;
}
h1 {
font-size: 1.2rem !important;
}
.stChatInput {
font-size: 16px; /* Prevents zoom on iOS */
}
}
/* Chat input styling */
.stChatInput {
background: rgba(255, 255, 255, 0.9);
border-radius: 25px;
border: 2px solid rgba(255, 255, 255, 0.3);
backdrop-filter: blur(10px);
}
.stChatInput input {
background: transparent;
border: none;
color: #333;
}
.stChatInput input::placeholder {
color: #666;
}
/* Sidebar improvements */
.css-1d391kg {
background: rgba(255, 255, 255, 0.95);
backdrop-filter: blur(15px);
}
/* Model info styling */
.model-id {
color: #28a745;
font-family: monospace;
font-weight: bold;
}
.model-attribution {
color: #28a745;
font-size: 0.8em;
font-style: italic;
text-align: right;
margin-top: 0.5rem;
padding-top: 0.5rem;
border-top: 1px solid #eee;
}
/* Status indicators */
.online-indicator {
display: inline-block;
width: 10px;
height: 10px;
background: #28a745;
border-radius: 50%;
margin-right: 5px;
}
/* Button improvements */
.stButton button {
border-radius: 10px;
border: none;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
color: white;
transition: all 0.3s;
}
.stButton button:hover {
transform: translateY(-2px);
box-shadow: 0 4px 15px rgba(0,0,0,0.2);
}
/* Title styling */
h1 {
color: white;
text-shadow: 2px 2px 4px rgba(0,0,0,0.3);
margin-bottom: 0;
}
.stCaption {
color: rgba(255, 255, 255, 0.8);
text-align: center;
}
/* Responsive text sizing */
@media (max-width: 600px) {
.stMarkdown {
font-size: 14px;
}
.stCaption {
font-size: 12px;
}
}
</style>
""", unsafe_allow_html=True)
# File to store chat history
HISTORY_FILE = "chat_history.json"
# File to store online users
USERS_FILE = "online_users.json"
def load_chat_history():
"""Load chat history from file"""
try:
if os.path.exists(HISTORY_FILE):
with open(HISTORY_FILE, 'r', encoding='utf-8') as f:
return json.load(f)
except Exception as e:
st.error(f"Error loading chat history: {e}")
return []
def save_chat_history(messages):
"""Save chat history to file"""
try:
with open(HISTORY_FILE, 'w', encoding='utf-8') as f:
json.dump(messages, f, ensure_ascii=False, indent=2)
except Exception as e:
st.error(f"Error saving chat history: {e}")
def clear_chat_history():
"""Clear chat history file"""
try:
if os.path.exists(HISTORY_FILE):
os.remove(HISTORY_FILE)
st.session_state.messages = []
except Exception as e:
st.error(f"Error clearing chat history: {e}")
# User tracking functions
def get_user_id():
"""Get unique ID for this user session"""
if 'user_id' not in st.session_state:
st.session_state.user_id = str(uuid.uuid4())[:8]
return st.session_state.user_id
def update_online_users():
"""Update that this user is online right now"""
try:
users = {}
if os.path.exists(USERS_FILE):
with open(USERS_FILE, 'r') as f:
users = json.load(f)
user_id = get_user_id()
users[user_id] = {
'last_seen': datetime.now().isoformat(),
'name': f'User-{user_id}'
}
# Remove users not seen in last 5 minutes
current_time = datetime.now()
active_users = {}
for uid, data in users.items():
last_seen = datetime.fromisoformat(data['last_seen'])
if current_time - last_seen < timedelta(minutes=5):
active_users[uid] = data
with open(USERS_FILE, 'w') as f:
json.dump(active_users, f, indent=2)
return len(active_users)
except Exception:
return 1
def get_online_count():
"""Get number of people currently online"""
try:
if not os.path.exists(USERS_FILE):
return 0
with open(USERS_FILE, 'r') as f:
users = json.load(f)
current_time = datetime.now()
active_count = 0
for data in users.values():
last_seen = datetime.fromisoformat(data['last_seen'])
if current_time - last_seen < timedelta(minutes=5):
active_count += 1
return active_count
except Exception:
return 0
# Initialize session state with saved history
if "messages" not in st.session_state:
st.session_state.messages = load_chat_history()
# Get API key
OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY")
@st.cache_data(ttl=300)
def check_api_status():
if not OPENROUTER_API_KEY:
return "No API Key"
try:
url = "https://openrouter.ai/api/v1/models"
headers = {"Authorization": f"Bearer {OPENROUTER_API_KEY}"}
response = requests.get(url, headers=headers, timeout=10)
return "Connected" if response.status_code == 200 else "Error"
except:
return "Error"
def get_ai_response(messages, model="openai/gpt-3.5-turbo"):
if not OPENROUTER_API_KEY:
return "No API key found. Please add OPENROUTER_API_KEY to environment variables."
url = "https://openrouter.ai/api/v1/chat/completions"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
"HTTP-Referer": "http://localhost:8501",
"X-Title": "Streamlit AI Assistant"
}
api_messages = [{"role": "system", "content": "You are a helpful AI assistant. Provide clear and helpful responses."}]
api_messages.extend(messages)
data = {
"model": model,
"messages": api_messages,
"stream": True,
"max_tokens": 2000,
"temperature": 0.7,
"top_p": 1,
"frequency_penalty": 0,
"presence_penalty": 0
}
try:
response = requests.post(url, headers=headers, json=data, stream=True, timeout=60)
if response.status_code != 200:
error_detail = ""
try:
error_data = response.json()
error_detail = error_data.get('error', {}).get('message', f"HTTP {response.status_code}")
except:
error_detail = f"HTTP {response.status_code}: {response.reason}"
yield f"API Error: {error_detail}. Please try a different model or check your API key."
return
full_response = ""
for line in response.iter_lines():
if line:
if line.startswith(b"data: "):
data_str = line[len(b"data: "):].decode("utf-8")
if data_str.strip() == "[DONE]":
break
try:
data = json.loads(data_str)
delta = data["choices"][0]["delta"].get("content", "")
if delta:
full_response += delta
yield full_response
except json.JSONDecodeError:
continue
except (KeyError, IndexError):
continue
except requests.exceptions.Timeout:
yield "Request timed out. Please try again with a shorter message or different model."
except requests.exceptions.ConnectionError:
yield "Connection error. Please check your internet connection and try again."
except requests.exceptions.RequestException as e:
yield f"Request error: {str(e)}. Please try again."
except Exception as e:
yield f"Unexpected error: {str(e)}. Please try again or contact support."
# MAIN UI LAYOUT - Mobile First Approach
col1, col2, col3 = st.columns([1, 2, 1])
with col2:
st.title("Chat Flow πŸ•·")
st.caption("10 powerful Models, one simple chat.")
# Mobile-friendly sidebar
with st.sidebar:
st.header("βš™οΈ Settings")
# API Status with better mobile display
status = check_api_status()
if status == "Connected":
st.success("🟒 API Connected")
elif status == "No API Key":
st.error("❌ No API Key")
else:
st.warning("⚠️ Connection Issue")
st.divider()
# Online Users Section - Mobile Optimized
st.header("πŸ‘₯ Live Users")
online_count = update_online_users()
if online_count == 1:
st.info("🟒 Just you online")
else:
st.success(f"🟒 {online_count} people online")
your_id = get_user_id()
st.caption(f"You: User-{your_id}")
if st.button("πŸ”„ Refresh", use_container_width=True):
st.rerun()
# Debug section - collapsible for mobile
with st.expander("πŸ” Debug Info"):
if os.path.exists(USERS_FILE):
with open(USERS_FILE, 'r') as f:
users = json.load(f)
st.write(f"Users in file: {len(users)}")
for uid, data in users.items():
last_seen_time = datetime.fromisoformat(data['last_seen'])
time_ago = datetime.now() - last_seen_time
minutes_ago = int(time_ago.total_seconds() / 60)
st.write(f"- {uid}: {minutes_ago} min ago")
else:
st.write("No users file yet")
st.divider()
# Model Selection - Mobile Optimized
st.header("πŸ€– AI Models")
models = [
("GPT-3.5 Turbo", "openai/gpt-3.5-turbo"),
("LLaMA 3.1 8B", "meta-llama/llama-3.1-8b-instruct"),
("LLaMA 3.1 70B", "meta-llama/llama-3.1-70b-instruct"),
("DeepSeek Chat v3", "deepseek/deepseek-chat-v3-0324:free"),
("DeepSeek R1", "deepseek/deepseek-r1-0528:free"),
("Qwen3 Coder", "qwen/qwen3-coder:free"),
("Microsoft MAI DS R1", "microsoft/mai-ds-r1:free"),
("Gemma 3 27B", "google/gemma-3-27b-it:free"),
("Gemma 3 4B", "google/gemma-3-4b-it:free"),
("Auto (Best Available)", "openrouter/auto")
]
model_names = [name for name, _ in models]
model_ids = [model_id for _, model_id in models]
selected_index = st.selectbox("Choose Model", range(len(model_names)),
format_func=lambda x: model_names[x],
index=0)
selected_model = model_ids[selected_index]
st.markdown(f"**Model ID:** <span class='model-id'>{selected_model}</span>", unsafe_allow_html=True)
st.divider()
# Chat History Controls - Mobile Friendly
st.header("πŸ’¬ Chat History")
if st.session_state.messages:
st.info(f"πŸ“ {len(st.session_state.messages)} messages stored")
auto_save = st.checkbox("Auto-save messages", value=True)
# Compact buttons for mobile
col1, col2 = st.columns(2)
with col1:
if st.button("πŸ’Ύ Save", use_container_width=True):
save_chat_history(st.session_state.messages)
st.success("Saved!")
with col2:
if st.button("πŸ“‚ Load", use_container_width=True):
st.session_state.messages = load_chat_history()
st.success("Loaded!")
st.rerun()
# Additional controls in expander to save space
with st.expander("More Options"):
if st.button("πŸ‘οΈ View History File", use_container_width=True):
if os.path.exists(HISTORY_FILE):
with open(HISTORY_FILE, 'r', encoding='utf-8') as f:
history_content = f.read()
st.text_area("Chat History (JSON)", history_content, height=150)
else:
st.warning("No history file found")
# Download History
if os.path.exists(HISTORY_FILE):
with open(HISTORY_FILE, 'rb') as f:
st.download_button(
label="⬇️ Download History",
data=f.read(),
file_name=f"chat_history_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json",
mime="application/json",
use_container_width=True
)
if st.button("πŸ—‘οΈ Clear Chat", use_container_width=True, type="secondary"):
clear_chat_history()
st.success("Chat cleared!")
st.rerun()
# Display chat messages with responsive design
for message in st.session_state.messages:
with st.chat_message(message["role"]):
if message["role"] == "assistant" and "Response created by:" in message["content"]:
parts = message["content"].split("\n\n---\n*Response created by:")
main_content = parts[0]
if len(parts) > 1:
model_name = parts[1].replace("***", "").replace("**", "")
st.markdown(main_content)
st.markdown(f"<div class='model-attribution'>Response created by: <strong>{model_name}</strong></div>", unsafe_allow_html=True)
else:
st.markdown(message["content"])
else:
st.markdown(message["content"])
# Chat input with mobile-friendly placeholder
if prompt := st.chat_input("πŸ’¬ Chat with AI... (works on all devices!)"):
update_online_users()
user_message = {"role": "user", "content": prompt}
st.session_state.messages.append(user_message)
if auto_save:
save_chat_history(st.session_state.messages)
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
placeholder = st.empty()
full_response = ""
try:
for response in get_ai_response(st.session_state.messages, selected_model):
full_response = response
placeholder.markdown(full_response + "β–Œ")
placeholder.markdown(full_response)
except Exception as e:
error_msg = f"An error occurred: {str(e)}"
placeholder.markdown(error_msg)
full_response = error_msg
full_response_with_attribution = full_response + f"\n\n---\n*Response created by: **{model_names[selected_index]}***"
assistant_message = {"role": "assistant", "content": full_response_with_attribution}
st.session_state.messages.append(assistant_message)
if auto_save:
save_chat_history(st.session_state.messages)
# Mobile-friendly footer
col1, col2, col3 = st.columns([1, 2, 1])
with col2:
st.caption(f"Currently using: **{model_names[selected_index]}**")
st.caption("πŸ“± Optimized for all mobile devices")