Spaces:
Running
Running
| <html lang="en"> | |
| <head> | |
| <meta charset="UTF-8"> | |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
| <title>Smart Audio Recorder</title> | |
| <style> | |
| body { | |
| font-family: 'Arial', sans-serif; | |
| max-width: 800px; | |
| margin: 0 auto; | |
| padding: 20px; | |
| background-color: #f5f5f5; | |
| } | |
| .container { | |
| background-color: white; | |
| padding: 20px; | |
| border-radius: 10px; | |
| box-shadow: 0 2px 4px rgba(0,0,0,0.1); | |
| } | |
| .record-button { | |
| background-color: #ff4444; | |
| color: white; | |
| border: none; | |
| padding: 15px 30px; | |
| border-radius: 25px; | |
| font-size: 18px; | |
| cursor: pointer; | |
| transition: background-color 0.3s; | |
| } | |
| .record-button.recording { | |
| background-color: #cc0000; | |
| animation: pulse 1.5s infinite; | |
| } | |
| .status { | |
| margin-top: 20px; | |
| padding: 10px; | |
| border-radius: 5px; | |
| background-color: #f8f9fa; | |
| } | |
| @keyframes pulse { | |
| 0% { transform: scale(1); } | |
| 50% { transform: scale(1.05); } | |
| 100% { transform: scale(1); } | |
| } | |
| .meter { | |
| height: 20px; | |
| background-color: #e9ecef; | |
| border-radius: 10px; | |
| margin: 20px 0; | |
| overflow: hidden; | |
| } | |
| .meter-fill { | |
| height: 100%; | |
| width: 0%; | |
| background-color: #4CAF50; | |
| transition: width 0.1s; | |
| } | |
| .error { | |
| color: #dc3545; | |
| padding: 10px; | |
| margin-top: 10px; | |
| border: 1px solid #dc3545; | |
| border-radius: 5px; | |
| display: none; | |
| } | |
| </style> | |
| </head> | |
| <body> | |
| <div class="container"> | |
| <h1>Smart Audio Recorder</h1> | |
| <p>Records when active audio is detected. Saves only recordings with more than 5 seconds of active audio.</p> | |
| <button id="recordButton" class="record-button">Start Recording</button> | |
| <div class="meter"> | |
| <div id="meterFill" class="meter-fill"></div> | |
| </div> | |
| <div id="status" class="status">Ready to record</div> | |
| <div id="error" class="error"></div> | |
| </div> | |
| <script> | |
| class SmartRecorder { | |
| constructor() { | |
| this.mediaRecorder = null; | |
| this.audioContext = null; | |
| this.analyser = null; | |
| this.chunks = []; | |
| this.activeAudioTime = 0; | |
| this.lastActiveTime = 0; | |
| this.isRecording = false; | |
| this.silenceThreshold = 0.015; | |
| this.minActiveAudio = 5; // seconds | |
| this.recordButton = document.getElementById('recordButton'); | |
| this.status = document.getElementById('status'); | |
| this.meterFill = document.getElementById('meterFill'); | |
| this.errorDiv = document.getElementById('error'); | |
| this.recordButton.addEventListener('click', () => this.toggleRecording()); | |
| // Check if we're running on localhost or HTTPS | |
| if (!(window.location.protocol === 'https:' || window.location.hostname === 'localhost' || window.location.hostname === '127.0.0.1')) { | |
| this.showError('This application requires HTTPS or localhost to access the microphone.'); | |
| this.recordButton.disabled = true; | |
| return; | |
| } | |
| this.setupAudioContext(); | |
| } | |
| showError(message) { | |
| this.errorDiv.textContent = message; | |
| this.errorDiv.style.display = 'block'; | |
| console.error(message); | |
| } | |
| async setupAudioContext() { | |
| try { | |
| const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); | |
| this.audioContext = new AudioContext(); | |
| const source = this.audioContext.createMediaStreamSource(stream); | |
| this.analyser = this.audioContext.createAnalyser(); | |
| this.analyser.fftSize = 2048; | |
| source.connect(this.analyser); | |
| this.errorDiv.style.display = 'none'; | |
| } catch (err) { | |
| this.showError(`Error accessing microphone: ${err.message}`); | |
| this.recordButton.disabled = true; | |
| } | |
| } | |
| async toggleRecording() { | |
| if (!this.isRecording) { | |
| await this.startRecording(); | |
| } else { | |
| await this.stopRecording(); | |
| } | |
| } | |
| async startRecording() { | |
| try { | |
| const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); | |
| this.mediaRecorder = new MediaRecorder(stream); | |
| this.chunks = []; | |
| this.activeAudioTime = 0; | |
| this.lastActiveTime = Date.now(); | |
| this.isRecording = true; | |
| this.mediaRecorder.ondataavailable = (e) => this.chunks.push(e.data); | |
| this.mediaRecorder.start(); | |
| this.recordButton.classList.add('recording'); | |
| this.recordButton.textContent = 'Stop Recording'; | |
| this.status.textContent = 'Recording...'; | |
| this.errorDiv.style.display = 'none'; | |
| this.startAudioAnalysis(); | |
| } catch (err) { | |
| this.showError(`Error starting recording: ${err.message}`); | |
| } | |
| } | |
| async stopRecording() { | |
| if (!this.mediaRecorder) return; | |
| this.isRecording = false; | |
| this.mediaRecorder.stop(); | |
| this.recordButton.classList.remove('recording'); | |
| this.recordButton.textContent = 'Start Recording'; | |
| this.mediaRecorder.onstop = async () => { | |
| if (this.activeAudioTime >= this.minActiveAudio) { | |
| const blob = new Blob(this.chunks, { type: 'audio/wav' }); | |
| const url = URL.createObjectURL(blob); | |
| const link = document.createElement('a'); | |
| link.href = url; | |
| link.download = `recording_${new Date().toISOString()}.wav`; | |
| link.click(); | |
| this.status.textContent = `Saved recording with ${this.activeAudioTime.toFixed(1)} seconds of active audio`; | |
| } else { | |
| this.status.textContent = `Recording discarded: Only ${this.activeAudioTime.toFixed(1)} seconds of active audio (minimum ${this.minActiveAudio}s required)`; | |
| } | |
| this.meterFill.style.width = '0%'; | |
| }; | |
| } | |
| startAudioAnalysis() { | |
| const analyzeFrame = () => { | |
| if (!this.isRecording) return; | |
| const dataArray = new Float32Array(this.analyser.frequencyBinCount); | |
| this.analyser.getFloatTimeDomainData(dataArray); | |
| const rms = Math.sqrt(dataArray.reduce((acc, val) => acc + val * val, 0) / dataArray.length); | |
| if (rms > this.silenceThreshold) { | |
| const now = Date.now(); | |
| const timeDiff = (now - this.lastActiveTime) / 1000; | |
| this.activeAudioTime += timeDiff; | |
| this.lastActiveTime = now; | |
| } | |
| // Update meter | |
| const meterLevel = Math.min(100, (rms * 400)); | |
| this.meterFill.style.width = `${meterLevel}%`; | |
| // Update status with active audio time | |
| if (this.isRecording) { | |
| this.status.textContent = `Recording... Active audio: ${this.activeAudioTime.toFixed(1)}s`; | |
| } | |
| requestAnimationFrame(analyzeFrame); | |
| }; | |
| analyzeFrame(); | |
| } | |
| } | |
| // Initialize recorder when page loads | |
| window.addEventListener('load', () => { | |
| new SmartRecorder(); | |
| }); | |
| </script> | |
| </body> | |
| </html> |