|
|
|
document.addEventListener('DOMContentLoaded', function() { |
|
|
|
let peerConnection; |
|
let webrtc_id; |
|
let audioContext, analyser, audioSource; |
|
let audioLevel = 0; |
|
let animationFrame; |
|
let isRecording = false; |
|
let eventSource; |
|
|
|
|
|
const startButton = document.getElementById('start-button'); |
|
const transcriptDiv = document.getElementById('transcript'); |
|
|
|
|
|
console.log('DOM loaded. startButton:', startButton, 'transcriptDiv:', transcriptDiv); |
|
|
|
|
|
let currentParagraph = null; |
|
let lastUpdateTime = Date.now(); |
|
|
|
|
|
function showError(message) { |
|
console.error('Error:', message); |
|
const toast = document.getElementById('error-toast'); |
|
toast.textContent = message; |
|
toast.style.display = 'block'; |
|
|
|
|
|
setTimeout(() => { |
|
toast.style.display = 'none'; |
|
}, 5000); |
|
} |
|
|
|
|
|
function handleMessage(event) { |
|
|
|
const eventJson = JSON.parse(event.data); |
|
|
|
if (eventJson.type === "error") { |
|
showError(eventJson.message); |
|
} |
|
|
|
console.log('Received message:', event.data); |
|
} |
|
|
|
|
|
function updateButtonState() { |
|
|
|
if (peerConnection && (peerConnection.connectionState === 'connecting' || peerConnection.connectionState === 'new')) { |
|
startButton.innerHTML = ` |
|
<div class="icon-with-spinner"> |
|
<div class="spinner"></div> |
|
<span>Connecting...</span> |
|
</div> |
|
`; |
|
isRecording = false; |
|
|
|
} else if (peerConnection && peerConnection.connectionState === 'connected') { |
|
startButton.innerHTML = ` |
|
<div class="pulse-container"> |
|
<div class="pulse-circle"></div> |
|
<span>Stop Recording</span> |
|
</div> |
|
`; |
|
isRecording = true; |
|
|
|
} else { |
|
startButton.innerHTML = 'Start Recording'; |
|
isRecording = false; |
|
} |
|
console.log('Button state updated. isRecording:', isRecording); |
|
} |
|
|
|
|
|
function setupAudioVisualization(stream) { |
|
|
|
if (!audioContext) { |
|
|
|
audioContext = new (window.AudioContext || window.webkitAudioContext)(); |
|
} else { |
|
|
|
if (audioContext.state === 'suspended') { |
|
audioContext.resume(); |
|
} |
|
} |
|
|
|
|
|
analyser = audioContext.createAnalyser(); |
|
|
|
audioSource = audioContext.createMediaStreamSource(stream); |
|
|
|
audioSource.connect(analyser); |
|
|
|
analyser.fftSize = 64; |
|
|
|
const dataArray = new Uint8Array(analyser.frequencyBinCount); |
|
|
|
|
|
function updateAudioLevel() { |
|
|
|
analyser.getByteFrequencyData(dataArray); |
|
|
|
const average = Array.from(dataArray).reduce((a, b) => a + b, 0) / dataArray.length; |
|
|
|
audioLevel = average / 255; |
|
|
|
|
|
const pulseCircle = document.querySelector('.pulse-circle'); |
|
if (pulseCircle) { |
|
pulseCircle.style.setProperty('--audio-level', 1 + audioLevel); |
|
} |
|
|
|
|
|
animationFrame = requestAnimationFrame(updateAudioLevel); |
|
} |
|
|
|
updateAudioLevel(); |
|
} |
|
|
|
|
|
async function setupWebRTC() { |
|
console.log('Setting up WebRTC connection...'); |
|
|
|
try { |
|
|
|
const config = window.__RTC_CONFIGURATION__; |
|
console.log('WebRTC configuration:', config); |
|
|
|
|
|
peerConnection = new RTCPeerConnection(config); |
|
console.log('Created peer connection:', peerConnection); |
|
|
|
|
|
const connectionTimeout = setTimeout(() => { |
|
if (peerConnection && peerConnection.connectionState !== 'connected') { |
|
showError('Connection timeout. Please check your network and try again.'); |
|
stop(true); |
|
} |
|
}, 15000); |
|
|
|
|
|
const timeoutId = setTimeout(() => { |
|
const toast = document.getElementById('error-toast'); |
|
toast.textContent = "Connection is taking longer than usual. Are you on a VPN?"; |
|
toast.className = 'toast warning'; |
|
toast.style.display = 'block'; |
|
|
|
|
|
setTimeout(() => { |
|
toast.style.display = 'none'; |
|
}, 5000); |
|
}, 5000); |
|
|
|
|
|
updateButtonState(); |
|
|
|
|
|
console.log('Requesting microphone access...'); |
|
const stream = await navigator.mediaDevices.getUserMedia({ |
|
audio: true |
|
}); |
|
console.log('Microphone access granted:', stream); |
|
|
|
|
|
setupAudioVisualization(stream); |
|
|
|
|
|
stream.getTracks().forEach(track => { |
|
peerConnection.addTrack(track, stream); |
|
}); |
|
console.log('Added audio tracks to connection'); |
|
|
|
|
|
peerConnection.addEventListener('connectionstatechange', () => { |
|
|
|
console.log('connectionstatechange', peerConnection.connectionState); |
|
|
|
|
|
if (peerConnection.connectionState === 'connected') { |
|
clearTimeout(timeoutId); |
|
clearTimeout(connectionTimeout); |
|
const toast = document.getElementById('error-toast'); |
|
toast.style.display = 'none'; |
|
console.log('Connection established successfully'); |
|
|
|
} else if (peerConnection.connectionState === 'failed' || |
|
peerConnection.connectionState === 'disconnected' || |
|
peerConnection.connectionState === 'closed') { |
|
showError('Connection lost. Please try again.'); |
|
stop(); |
|
} |
|
|
|
updateButtonState(); |
|
}); |
|
|
|
|
|
const dataChannel = peerConnection.createDataChannel('text'); |
|
dataChannel.onmessage = handleMessage; |
|
console.log('Created data channel'); |
|
|
|
|
|
peerConnection.onicecandidate = ({ candidate }) => { |
|
if (candidate) { |
|
console.log("Sending ICE candidate", candidate); |
|
fetch('/webrtc/offer', { |
|
method: 'POST', |
|
headers: { 'Content-Type': 'application/json' }, |
|
body: JSON.stringify({ |
|
candidate: candidate.toJSON(), |
|
webrtc_id: webrtc_id, |
|
type: "ice-candidate", |
|
}) |
|
}); |
|
} |
|
}; |
|
|
|
|
|
console.log('Creating connection offer...'); |
|
const offer = await peerConnection.createOffer(); |
|
|
|
await peerConnection.setLocalDescription(offer); |
|
console.log('Local description set'); |
|
|
|
|
|
webrtc_id = Math.random().toString(36).substring(7); |
|
console.log('Generated webrtc_id:', webrtc_id); |
|
|
|
|
|
console.log('Sending offer to server...'); |
|
const response = await fetch('/webrtc/offer', { |
|
method: 'POST', |
|
headers: { 'Content-Type': 'application/json' }, |
|
body: JSON.stringify({ |
|
sdp: peerConnection.localDescription.sdp, |
|
type: peerConnection.localDescription.type, |
|
webrtc_id: webrtc_id |
|
}) |
|
}); |
|
console.log('Server responded to offer'); |
|
|
|
|
|
const serverResponse = await response.json(); |
|
console.log('Server response:', serverResponse); |
|
|
|
|
|
if (serverResponse.status === 'failed') { |
|
showError(serverResponse.meta.error === 'concurrency_limit_reached' |
|
? `Too many connections. Maximum limit is ${serverResponse.meta.limit}` |
|
: serverResponse.meta.error); |
|
stop(); |
|
startButton.textContent = 'Start Recording'; |
|
return; |
|
} |
|
|
|
|
|
console.log('Setting remote description...'); |
|
await peerConnection.setRemoteDescription(serverResponse); |
|
console.log('Remote description set'); |
|
|
|
|
|
console.log('Creating event source for transcription...'); |
|
eventSource = new EventSource('/transcript?webrtc_id=' + webrtc_id); |
|
|
|
eventSource.onerror = (event) => { |
|
console.error("EventSource error:", event); |
|
showError("Transcription connection lost. Please try again."); |
|
}; |
|
|
|
eventSource.addEventListener("output", (event) => { |
|
console.log("Received transcript chunk:", event.data); |
|
|
|
appendTranscript(event.data); |
|
}); |
|
|
|
console.log('WebRTC setup complete, waiting for connection...'); |
|
} catch (err) { |
|
|
|
console.error('Error setting up WebRTC:', err); |
|
showError('Failed to establish connection: ' + err.message); |
|
stop(); |
|
startButton.textContent = 'Start Recording'; |
|
} |
|
} |
|
|
|
function appendTranscriptSimple(text) { |
|
const p = document.createElement('p'); |
|
p.textContent = text; |
|
transcriptDiv.appendChild(p); |
|
transcriptDiv.scrollTop = transcriptDiv.scrollHeight; |
|
} |
|
|
|
|
|
function appendTranscript(text) { |
|
|
|
const formattedText = text.trim(); |
|
if (!formattedText) return; |
|
|
|
const now = Date.now(); |
|
const timeSinceLastUpdate = now - lastUpdateTime; |
|
lastUpdateTime = now; |
|
|
|
|
|
if (!currentParagraph) { |
|
|
|
currentParagraph = document.createElement('p'); |
|
currentParagraph.classList.add('current'); |
|
transcriptDiv.appendChild(currentParagraph); |
|
currentParagraph.textContent = formattedText; |
|
} else { |
|
|
|
const currentText = currentParagraph.textContent; |
|
|
|
|
|
let cleanedText = formattedText; |
|
|
|
|
|
const words = currentText.split(/\s+/); |
|
const lastWord = words[words.length - 1].replace(/[^\w]/g, '').toLowerCase(); |
|
|
|
if (lastWord && lastWord.length > 2) { |
|
|
|
const regex = new RegExp(`^${lastWord}`, 'i'); |
|
if (regex.test(cleanedText.replace(/[^\w]/g, ''))) { |
|
|
|
cleanedText = cleanedText.replace(regex, '').trim(); |
|
} |
|
} |
|
|
|
|
|
let finalText = currentText; |
|
|
|
|
|
|
|
if (!/[\s.,!?]$/.test(finalText) && !/^[.,!?]/.test(cleanedText) && cleanedText) { |
|
finalText += ' '; |
|
} |
|
|
|
|
|
finalText += cleanedText; |
|
|
|
|
|
finalText = finalText.replace(/([.,!?])([a-zA-Z])/g, '$1 $2'); |
|
|
|
|
|
currentParagraph.textContent = finalText; |
|
} |
|
|
|
|
|
if (/[.!?]$/.test(formattedText) || timeSinceLastUpdate > 5000) { |
|
|
|
if (currentParagraph) { |
|
currentParagraph.classList.remove('current'); |
|
} |
|
|
|
|
|
currentParagraph = null; |
|
} |
|
|
|
|
|
const paragraphs = transcriptDiv.getElementsByTagName('p'); |
|
while (paragraphs.length > 10) { |
|
transcriptDiv.removeChild(paragraphs[0]); |
|
} |
|
|
|
|
|
requestAnimationFrame(() => { |
|
transcriptDiv.scrollTop = transcriptDiv.scrollHeight; |
|
}); |
|
} |
|
|
|
|
|
function stop(restartOnTimeout = false) { |
|
console.log('Stopping recording...'); |
|
|
|
if (animationFrame) { |
|
cancelAnimationFrame(animationFrame); |
|
animationFrame = null; |
|
} |
|
|
|
|
|
if (audioContext) { |
|
audioContext.suspend(); |
|
} |
|
|
|
|
|
if (peerConnection) { |
|
const senders = peerConnection.getSenders(); |
|
if (senders) { |
|
senders.forEach(sender => { |
|
if (sender.track) { |
|
sender.track.stop(); |
|
} |
|
}); |
|
} |
|
|
|
|
|
peerConnection.close(); |
|
peerConnection = null; |
|
} |
|
|
|
|
|
if (eventSource) { |
|
eventSource.close(); |
|
eventSource = null; |
|
} |
|
|
|
|
|
audioLevel = 0; |
|
|
|
updateButtonState(); |
|
|
|
|
|
if (!restartOnTimeout) { |
|
|
|
if (window.confirm('Clear transcript?')) { |
|
|
|
transcriptDiv.innerHTML = ''; |
|
currentParagraph = null; |
|
} else { |
|
|
|
if (currentParagraph) { |
|
currentParagraph.classList.remove('current'); |
|
currentParagraph = null; |
|
} |
|
} |
|
} |
|
|
|
|
|
lastUpdateTime = Date.now(); |
|
console.log('Recording stopped'); |
|
|
|
|
|
if (restartOnTimeout) { |
|
setupWebRTC(); |
|
} |
|
} |
|
|
|
|
|
window.addEventListener('beforeunload', () => { |
|
stop(); |
|
}); |
|
|
|
|
|
startButton.addEventListener('click', () => { |
|
console.log('Start button clicked. isRecording:', isRecording); |
|
if (!isRecording) { |
|
|
|
setupWebRTC(); |
|
} else { |
|
|
|
stop(); |
|
} |
|
}); |
|
|
|
|
|
console.log('Initializing UI...'); |
|
|
|
const elementsToCheck = [ |
|
transcriptDiv, |
|
startButton, |
|
document.getElementById('error-toast') |
|
]; |
|
|
|
|
|
elementsToCheck.forEach(el => { |
|
if (el) { |
|
|
|
el.style.display = el.tagName.toLowerCase() === 'button' ? 'block' : |
|
(el.id === 'transcript' ? 'block' : 'none'); |
|
} |
|
}); |
|
|
|
|
|
document.body.style.backgroundColor = 'var(--background-dark)'; |
|
document.body.style.color = 'var(--text-light)'; |
|
|
|
|
|
startButton.style.backgroundColor = 'rgba(249, 164, 92, 1.0)'; |
|
startButton.style.color = 'black'; |
|
|
|
console.log('UI initialization complete'); |
|
}); |