import React, { useState, useEffect, useRef } from 'react'; import { Send, Plus, MessageSquare, Settings, Users, Download, Trash2, RefreshCw, Bot } from 'lucide-react'; const ChatFlowApp = () => { const [messages, setMessages] = useState([]); const [currentMessage, setCurrentMessage] = useState(''); const [selectedModel, setSelectedModel] = useState('openai/gpt-3.5-turbo'); const [isLoading, setIsLoading] = useState(false); const [sessions, setSessions] = useState([]); const [currentSessionId, setCurrentSessionId] = useState('default'); const [onlineUsers, setOnlineUsers] = useState(1); const [apiStatus, setApiStatus] = useState('Connected'); const [autoSave, setAutoSave] = useState(true); const messagesEndRef = useRef(null); const [userId] = useState('User-' + Math.random().toString(36).substr(2, 8)); const models = [ { name: "GPT-3.5 Turbo", id: "openai/gpt-3.5-turbo" }, { name: "LLaMA 3.1 8B", id: "meta-llama/llama-3.1-8b-instruct" }, { name: "LLaMA 3.1 70B", id: "meta-llama/llama-3.1-70b-instruct" }, { name: "DeepSeek Chat v3", id: "deepseek/deepseek-chat-v3-0324:free" }, { name: "DeepSeek R1", id: "deepseek/deepseek-r1-0528:free" }, { name: "Qwen3 Coder", id: "qwen/qwen3-coder:free" }, { name: "Microsoft MAI DS R1", id: "microsoft/mai-ds-r1:free" }, { name: "Gemma 3 27B", id: "google/gemma-3-27b-it:free" }, { name: "Gemma 3 4B", id: "google/gemma-3-4b-it:free" }, { name: "Auto (Best Available)", id: "openrouter/auto" } ]; const scrollToBottom = () => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); }; useEffect(() => { scrollToBottom(); }, [messages]); useEffect(() => { // Simulate online users update const interval = setInterval(() => { setOnlineUsers(Math.floor(Math.random() * 5) + 1); }, 10000); return () => clearInterval(interval); }, []); useEffect(() => { // Check API status on component mount const checkAPIStatus = async () => { try { const OPENROUTER_API_KEY = process.env.REACT_APP_OPENROUTER_API_KEY || window.OPENROUTER_API_KEY || process.env.OPENROUTER_API_KEY; if (!OPENROUTER_API_KEY) { setApiStatus('No API Key'); return; } const response = await fetch("https://openrouter.ai/api/v1/models", { headers: { "Authorization": `Bearer ${OPENROUTER_API_KEY}` } }); setApiStatus(response.ok ? 'Connected' : 'Error'); } catch { setApiStatus('Error'); } }; checkAPIStatus(); }, []); const generateChatTitle = (msgs) => { if (!msgs || msgs.length === 0) return "New Chat"; const firstUserMessage = msgs.find(m => m.role === 'user'); if (!firstUserMessage) return "New Chat"; const content = firstUserMessage.content; return content.length > 30 ? content.substring(0, 30) + "..." : content; }; const startNewChat = () => { if (messages.length > 0) { const newSession = { id: 'session-' + Date.now(), title: generateChatTitle(messages), messages: [...messages], createdAt: new Date().toISOString(), updatedAt: new Date().toISOString() }; setSessions(prev => [newSession, ...prev]); } setMessages([]); setCurrentSessionId('session-' + Date.now()); }; const loadSession = (session) => { if (messages.length > 0) { const currentSession = { id: currentSessionId, title: generateChatTitle(messages), messages: [...messages], createdAt: new Date().toISOString(), updatedAt: new Date().toISOString() }; setSessions(prev => { const filtered = prev.filter(s => s.id !== currentSessionId); return [currentSession, ...filtered]; }); } setMessages(session.messages); setCurrentSessionId(session.id); }; const deleteSession = (sessionId) => { setSessions(prev => prev.filter(s => s.id !== sessionId)); if (sessionId === currentSessionId) { startNewChat(); } }; // OpenRouter API integration const getAIResponse = async (userMessage) => { setIsLoading(true); try { // Get API key from environment variables (Hugging Face Spaces secrets) const OPENROUTER_API_KEY = process.env.REACT_APP_OPENROUTER_API_KEY || window.OPENROUTER_API_KEY || process.env.OPENROUTER_API_KEY; if (!OPENROUTER_API_KEY) { throw new Error("No API key found. Please add OPENROUTER_API_KEY to environment variables."); } const url = "https://openrouter.ai/api/v1/chat/completions"; const headers = { "Content-Type": "application/json", "Authorization": `Bearer ${OPENROUTER_API_KEY}`, "HTTP-Referer": "https://huggingface.co/spaces", "X-Title": "Chat Flow AI Assistant" }; // Prepare messages for API const apiMessages = [ { role: "system", content: "You are a helpful AI assistant. Provide clear and helpful responses." }, ...messages.map(msg => ({ role: msg.role, content: msg.content.split('\n\n---\n*Response created by:')[0] // Remove attribution from content })), { role: "user", content: userMessage } ]; const data = { model: selectedModel, messages: apiMessages, stream: false, // Set to false for simpler handling in React max_tokens: 2000, temperature: 0.7, top_p: 1, frequency_penalty: 0, presence_penalty: 0 }; const response = await fetch(url, { method: 'POST', headers: headers, body: JSON.stringify(data) }); if (!response.ok) { let errorDetail = ""; try { const errorData = await response.json(); errorDetail = errorData.error?.message || `HTTP ${response.status}`; } catch { errorDetail = `HTTP ${response.status}: ${response.statusText}`; } throw new Error(`API Error: ${errorDetail}. Please try a different model or check your API key.`); } const result = await response.json(); const aiResponse = result.choices[0].message.content; const selectedModelName = models.find(m => m.id === selectedModel)?.name || "AI"; setIsLoading(false); return aiResponse + `\n\n---\n*Response created by: **${selectedModelName}***`; } catch (error) { setIsLoading(false); console.error('API Error:', error); if (error.message.includes('timeout')) { return "Request timed out. Please try again with a shorter message or different model."; } else if (error.message.includes('Connection')) { return "Connection error. Please check your internet connection and try again."; } else { return `Error: ${error.message}`; } } }; const handleSendMessage = async (e) => { e.preventDefault(); if (!currentMessage.trim() || isLoading) return; const userMessage = { role: 'user', content: currentMessage.trim(), timestamp: new Date().toISOString() }; setMessages(prev => [...prev, userMessage]); const messageToSend = currentMessage.trim(); setCurrentMessage(''); try { const aiResponse = await getAIResponse(messageToSend); const assistantMessage = { role: 'assistant', content: aiResponse, timestamp: new Date().toISOString() }; setMessages(prev => [...prev, assistantMessage]); } catch (error) { const errorMessage = { role: 'assistant', content: 'Sorry, I encountered an error. Please try again.', timestamp: new Date().toISOString() }; setMessages(prev => [...prev, errorMessage]); } }; const clearChat = () => { setMessages([]); }; const downloadHistory = () => { const dataStr = JSON.stringify(messages, null, 2); const dataBlob = new Blob([dataStr], { type: 'application/json' }); const url = URL.createObjectURL(dataBlob); const link = document.createElement('a'); link.href = url; link.download = `chat_history_${new Date().toISOString().split('T')[0]}.json`; link.click(); URL.revokeObjectURL(url); }; const getSelectedModelName = () => { return models.find(m => m.id === selectedModel)?.name || "GPT-3.5 Turbo"; }; return (
No previous chats yet
)}You: {userId}
{selectedModel}
A personal assistant streamlines your life by managing tasks, schedules, and communications efficiently.