import React, { useState, useEffect, useRef, useContext } from 'react'; import { Mic, Loader, Eye, EyeOff, AlertCircle, RefreshCw, X, Send } from 'lucide-react'; import { animateOrb } from '../utils/animation.js'; import { ThemeContext } from '../main.jsx'; import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; const VoiceAssistant = () => { const [isListening, setIsListening] = useState(false); const [transcript, setTranscript] = useState(''); const [textInput, setTextInput] = useState(''); const [response, setResponse] = useState({ response: '', links: [], media_links: [], personal_info: [] }); const [isProcessing, setIsProcessing] = useState(false); const [isPlaying, setIsPlaying] = useState(false); const [showResponse, setShowResponse] = useState(true); const [error, setError] = useState(''); const [activeTab, setActiveTab] = useState('response'); const [selectedMedia, setSelectedMedia] = useState(null); const [latency, setLatency] = useState(null); const [currentResponseText, setCurrentResponseText] = useState(''); const canvasRef = useRef(null); const mediaRecorderRef = useRef(null); const websocketRef = useRef(null); const audioContextRef = useRef(null); const analyserRef = useRef(null); const silenceTimeoutRef = useRef(null); const maxRecordTimeoutRef = useRef(null); const audioQueueRef = useRef([]); const isPlayingRef = useRef(false); const currentAudioRef = useRef(null); const requestStartTimeRef = useRef(null); const { theme } = useContext(ThemeContext); // Initialize WebSocket useEffect(() => { websocketRef.current = new WebSocket('wss://abdullah-khaled-ai-voice-secretary.hf.space/ws'); websocketRef.current.onopen = () => { console.log('WebSocket connected'); setError(''); }; websocketRef.current.onmessage = async (event) => { try { console.log('WebSocket message received:', event.data); let data; try { data = JSON.parse(event.data); } catch (parseError) { console.error('Failed to parse WebSocket message:', parseError, 'Raw data:', event.data); setError('Invalid server response format. Please try again.'); setIsProcessing(false); setLatency(null); return; } // Validate expected structure if (!data || typeof data !== 'object' || !('transcript' in data && 'response' in data && 'segment_index' in data)) { console.error('Unexpected message structure:', data); setError('Received malformed response from server. Please try again.'); setIsProcessing(false); setLatency(null); return; } setTranscript(data.transcript || ''); setCurrentResponseText(data.response?.response || ''); if (data.is_last_segment) { setResponse(data.response || { response: '', links: [], media_links: [], personal_info: [] }); setIsProcessing(false); } else { setIsProcessing(true); } // Calculate latency if (requestStartTimeRef.current && data.segment_index === -1) { const endTime = performance.now(); const latencyMs = endTime - requestStartTimeRef.current; console.log(`Audio query latency: ${latencyMs.toFixed(2)} ms`); setLatency((latencyMs / 1000).toFixed(2)); requestStartTimeRef.current = null; } if (data.audio_segment && !data.is_last_segment) { audioQueueRef.current.push(data.audio_segment); if (!isPlayingRef.current) { playNextAudio(); } } } catch (err) { console.error('Error processing WebSocket message:', err, 'Raw data:', event.data); setError('Error processing server response. Please check the server logs and try again.'); setIsProcessing(false); setLatency(null); } }; websocketRef.current.onclose = () => { console.log('WebSocket disconnected'); setError('WebSocket connection lost. Please refresh the page or check the server.'); setIsProcessing(false); setIsListening(false); setIsPlaying(false); setLatency(null); }; websocketRef.current.onerror = (error) => { console.error('WebSocket error:', error); setError('Error connecting to server. Please ensure the server is running on ws://localhost:8000.'); setIsProcessing(false); setIsListening(false); setIsPlaying(false); setLatency(null); }; return () => { if (websocketRef.current) { websocketRef.current.close(); } }; }, []); // Orb animation useEffect(() => { const canvas = canvasRef.current; if (canvas) { const cleanup = animateOrb(canvas, isListening); return cleanup; } }, [isListening]); // Play audio segments sequentially const playNextAudio = async () => { if (audioQueueRef.current.length === 0) { setIsPlaying(false); isPlayingRef.current = false; currentAudioRef.current = null; return; } setIsPlaying(true); isPlayingRef.current = true; const audioSegment = audioQueueRef.current.shift(); try { // Ensure the audio segment is a valid base64 string if (!audioSegment || typeof audioSegment !== 'string') { throw new Error('Invalid audio segment received'); } const base64String = audioSegment.startsWith('data:audio/wav;base64,') ? audioSegment : `data:audio/wav;base64,${audioSegment}`; const response = await fetch(base64String); if (!response.ok) { throw new Error(`Failed to fetch audio: ${response.statusText}`); } const audioBlob = await response.blob(); const audioUrl = URL.createObjectURL(audioBlob); const audio = new Audio(audioUrl); currentAudioRef.current = audio; audio.onended = () => { URL.revokeObjectURL(audioUrl); currentAudioRef.current = null; playNextAudio(); }; audio.onerror = (error) => { console.error('Audio playback error:', error); URL.revokeObjectURL(audioUrl); currentAudioRef.current = null; setError('Failed to play audio response. Please try again.'); setIsPlaying(false); isPlayingRef.current = false; audioQueueRef.current = []; }; await audio.play(); } catch (error) { console.error('Error playing audio:', error); setError('Error playing assistant response: ' + error.message); setIsPlaying(false); isPlayingRef.current = false; audioQueueRef.current = []; currentAudioRef.current = null; } }; // Stop all audio playback const stopAudioPlayback = () => { if (currentAudioRef.current) { currentAudioRef.current.pause(); currentAudioRef.current = null; } audioQueueRef.current = []; setIsPlaying(false); isPlayingRef.current = false; }; // Check MediaRecorder and microphone availability const isMediaRecorderSupported = () => { return !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia && window.MediaRecorder); }; const checkMicrophoneAvailability = async () => { try { const devices = await navigator.mediaDevices.enumerateDevices(); const audioInputs = devices.filter(device => device.kind === 'audioinput'); if (audioInputs.length === 0) { return { available: false, message: 'No microphone detected. Please connect a microphone and refresh the page.' }; } return { available: true, message: '' }; } catch (error) { console.error('Error checking devices:', error); return { available: false, message: 'Error accessing audio devices. Please ensure a microphone is connected and try again.' }; } }; // Get browser-specific permission instructions const getPermissionInstructions = () => { const userAgent = navigator.userAgent.toLowerCase(); if (userAgent.includes('chrome')) { return 'Please enable microphone permissions in Chrome by clicking the lock icon in the address bar, setting "Microphone" to "Allow", and refreshing the page.'; } else if (userAgent.includes('firefox')) { return 'Please enable microphone permissions in Firefox by clicking the permissions icon in the address bar, allowing microphone access, and refreshing the page.'; } else if (userAgent.includes('safari')) { return 'Please enable microphone permissions in Safari by going to Safari > Settings > Websites > Microphone, setting this site to "Allow", and refreshing the page.'; } else { return 'Please enable microphone permissions in your browser settings and refresh the page. Check your browsers help documentation for specific instructions.'; } }; // Handle microphone recording with silence detection const handleMicClick = async () => { if (isListening) { if (mediaRecorderRef.current) { mediaRecorderRef.current.stop(); } setIsListening(false); return; } stopAudioPlayback(); if (!isMediaRecorderSupported()) { setError('Your browser does not support audio recording. Please use a modern browser like Chrome, Firefox, or Safari.'); return; } const micCheck = await checkMicrophoneAvailability(); if (!micCheck.available) { setError(micCheck.message); return; } try { const permissionStatus = await navigator.permissions.query({ name: 'microphone' }); if (permissionStatus.state === 'denied') { setError(`Microphone access is denied. ${getPermissionInstructions()}`); return; } const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); audioContextRef.current = new (window.AudioContext || window.webkitAudioContext)(); analyserRef.current = audioContextRef.current.createAnalyser(); const source = audioContextRef.current.createMediaStreamSource(stream); source.connect(analyserRef.current); analyserRef.current.fftSize = 2048; const dataArray = new Uint8Array(analyserRef.current.fftSize); const mimeType = MediaRecorder.isTypeSupported('audio/wav') ? 'audio/wav' : 'audio/webm'; mediaRecorderRef.current = new MediaRecorder(stream, { mimeType }); const chunks = []; mediaRecorderRef.current.ondataavailable = (e) => { chunks.push(e.data); }; mediaRecorderRef.current.onstop = async () => { const blob = new Blob(chunks, { type: mimeType }); const reader = new FileReader(); reader.onloadend = () => { const base64data = reader.result.split(',')[1]; if (websocketRef.current && websocketRef.current.readyState === WebSocket.OPEN) { console.log('Sending audio data via WebSocket'); requestStartTimeRef.current = performance.now(); websocketRef.current.send(base64data); setIsProcessing(true); } else { setError('WebSocket connection is not open. Please try again.'); setIsProcessing(false); setLatency(null); } }; reader.readAsDataURL(blob); clearTimeout(silenceTimeoutRef.current); clearTimeout(maxRecordTimeoutRef.current); if (audioContextRef.current) { audioContextRef.current.close(); audioContextRef.current = null; } }; const detectSilence = () => { if (!analyserRef.current) return; analyserRef.current.getByteFrequencyData(dataArray); const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length; if (average < 10) { clearTimeout(silenceTimeoutRef.current); silenceTimeoutRef.current = setTimeout(() => { if (mediaRecorderRef.current && mediaRecorderRef.current.state === 'recording') { mediaRecorderRef.current.stop(); setIsListening(false); } }, 8000); } else { clearTimeout(silenceTimeoutRef.current); } if (isListening) { requestAnimationFrame(detectSilence); } }; mediaRecorderRef.current.start(); setIsListening(true); setTranscript('Listening...'); setError(''); detectSilence(); maxRecordTimeoutRef.current = setTimeout(() => { if (mediaRecorderRef.current && mediaRecorderRef.current.state === 'recording') { mediaRecorderRef.current.stop(); setIsListening(false); } }, 40000); } catch (error) { console.error('Error accessing microphone:', error); let errorMessage = 'Unable to access microphone. '; if (error.name === 'NotFoundError' || error.name === 'DevicesNotFoundError') { errorMessage += 'No microphone found. Please connect a microphone and try again.'; } else if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') { errorMessage += getPermissionInstructions(); } else if (error.name === 'NotReadableError' || error.name === 'TrackStartError') { errorMessage += 'Microphone is in use by another application or not accessible. Please close other apps using the microphone and try again.'; } else { errorMessage += 'An unexpected error occurred. Please ensure a microphone is connected, permissions are granted, and try again.'; } setError(errorMessage); setIsListening(false); setLatency(null); } }; // Handle text input submission const handleTextSubmit = async (e) => { e.preventDefault(); if (!textInput.trim()) { setError('Please enter a query.'); setLatency(null); return; } stopAudioPlayback(); setTranscript(textInput); setIsProcessing(true); setError(''); setLatency(null); setCurrentResponseText(''); try { console.log('Sending POST request to /text_query with query:', textInput); requestStartTimeRef.current = performance.now(); const response = await fetch('https://abdullah-khaled-ai-voice-secretary.hf.space/text_query', { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json', }, body: JSON.stringify({ query: textInput }), }); if (!response.ok) { const errorText = await response.text(); console.error('Text query failed:', response.status, errorText); throw new Error(`HTTP error! Status: ${response.status}, Message: ${errorText}`); } const data = await response.json(); if (!data.response) { throw new Error('Invalid response format from server'); } const endTime = performance.now(); const latencyMs = endTime - requestStartTimeRef.current; console.log(`Text query latency: ${latencyMs.toFixed(2)} ms`); setLatency((latencyMs / 1000).toFixed(2)); requestStartTimeRef.current = null; setResponse(data); setCurrentResponseText(data.response); setIsProcessing(false); setTextInput(''); } catch (error) { console.error('Error sending text query:', error); setError(`Failed to process text query: ${error.message}.`); setIsProcessing(false); setLatency(null); } }; // Handle retry button click const handleRetryClick = () => { setError(''); setLatency(null); handleMicClick(); }; // Handle media click for lightbox const handleMediaClick = (media) => { setSelectedMedia(media); }; // Close lightbox const closeLightbox = () => { setSelectedMedia(null); }; // Determine if the media is a video const isVideo = (url) => { return /\.(mp4|webm|ogg)$/i.test(url); }; return (
{transcript || 'Click the microphone or type to start interacting'}
Response time: {latency ? `${latency} seconds` : 'Not available'}
{children}
, strong: ({ children }) => {children}, em: ({ children }) => {children}, code: ({ node, inline, children, className }) => ( inline ? ({children}
) : (
{children}
)
),
a: ({ href, children }) => (
{children}
),
ul: ({ children }) => {children}), }} > {currentResponseText}
No links available.
)}No media available. Try asking about a specific project.
)}No personal information available. Try asking for contact details.
)}{error}