Spaces:
Building
Building
// audio-stream.service.ts güncelleme | |
// Linear16 format desteği eklenmiş hali | |
import { Injectable, OnDestroy } from '@angular/core'; | |
import { Subject, Observable, throwError } from 'rxjs'; | |
export interface AudioChunk { | |
data: string; // Base64 encoded audio | |
timestamp: number; | |
} | |
export interface AudioStreamError { | |
type: 'permission' | 'device' | 'browser' | 'unknown'; | |
message: string; | |
originalError?: any; | |
} | |
({ | |
providedIn: 'root' | |
}) | |
export class AudioStreamService implements OnDestroy { | |
private mediaRecorder: MediaRecorder | null = null; | |
private audioStream: MediaStream | null = null; | |
private audioChunkSubject = new Subject<AudioChunk>(); | |
private recordingStateSubject = new Subject<boolean>(); | |
private errorSubject = new Subject<AudioStreamError>(); | |
private volumeLevelSubject = new Subject<number>(); | |
public audioChunk$ = this.audioChunkSubject.asObservable(); | |
public recordingState$ = this.recordingStateSubject.asObservable(); | |
public error$ = this.errorSubject.asObservable(); | |
public volumeLevel$ = this.volumeLevelSubject.asObservable(); | |
// Audio analysis | |
private audioContext: AudioContext | null = null; | |
private analyser: AnalyserNode | null = null; | |
private volumeInterval: any; | |
// Linear16 conversion için eklemeler | |
private scriptProcessor: ScriptProcessorNode | null = null; | |
private source: MediaStreamAudioSourceNode | null = null; | |
private useLinear16 = true; // Linear16 kullanım flag'i | |
// Audio constraints | |
private constraints = { | |
audio: { | |
channelCount: 1, | |
sampleRate: 16000, | |
echoCancellation: true, | |
noiseSuppression: true, | |
autoGainControl: true, | |
// ✅ Mikrofon kazancını artır | |
googAutoGainControl: true, | |
googAutoGainControl2: true, | |
googEchoCancellation: true, | |
googNoiseSuppression: true, | |
googHighpassFilter: false, | |
googTypingNoiseDetection: false | |
} | |
}; | |
ngOnDestroy(): void { | |
this.cleanup(); | |
} | |
static checkBrowserSupport(): boolean { | |
return !!( | |
navigator.mediaDevices && | |
typeof navigator.mediaDevices.getUserMedia === 'function' && | |
(window.MediaRecorder || window.AudioContext) | |
); | |
} | |
async startRecording(): Promise<void> { | |
try { | |
console.log('🎤 [AudioStream] startRecording called', { | |
isAlreadyRecording: this.isRecording(), | |
useLinear16: this.useLinear16, | |
timestamp: new Date().toISOString() | |
}); | |
if ((this.mediaRecorder && this.mediaRecorder.state !== 'inactive') || this.scriptProcessor) { | |
console.warn('Recording already in progress'); | |
return; | |
} | |
// Check browser support | |
if (!AudioStreamService.checkBrowserSupport()) { | |
const error = this.createError('browser', 'Browser does not support audio recording'); | |
this.errorSubject.next(error); | |
throw error; | |
} | |
try { | |
// Get audio stream | |
this.audioStream = await navigator.mediaDevices.getUserMedia(this.constraints); | |
console.log('✅ [AudioStream] Got media stream'); | |
if (this.useLinear16) { | |
// Linear16 format için Web Audio API kullan | |
await this.startLinear16Recording(); | |
} else { | |
// Standart MediaRecorder kullan (WebM-Opus) | |
await this.startMediaRecorderRecording(); | |
} | |
this.recordingStateSubject.next(true); | |
console.log('✅ [AudioStream] Recording started successfully'); | |
// Start volume monitoring | |
this.startVolumeMonitoring(); | |
} catch (error: any) { | |
console.error('❌ [AudioStream] getUserMedia error:', error); | |
let audioError: AudioStreamError; | |
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') { | |
audioError = this.createError('permission', 'Microphone permission denied'); | |
} else if (error.name === 'NotFoundError' || error.name === 'DevicesNotFoundError') { | |
audioError = this.createError('device', 'No microphone found'); | |
} else { | |
audioError = this.createError('unknown', `Failed to access microphone: ${error.message}`, error); | |
} | |
this.errorSubject.next(audioError); | |
throw audioError; | |
} | |
} catch (error) { | |
console.error('❌ [AudioStream] startRecording error:', error); | |
this.cleanup(); | |
throw error; | |
} | |
} | |
private async startLinear16Recording(): Promise<void> { | |
console.log('🎵 Starting Linear16 recording with Web Audio API'); | |
// Create audio context with specific sample rate | |
this.audioContext = new AudioContext({ sampleRate: 16000 }); | |
// Create source from stream | |
this.source = this.audioContext.createMediaStreamSource(this.audioStream!); | |
// Create script processor for raw PCM access | |
this.scriptProcessor = this.audioContext.createScriptProcessor(2048, 1, 1); | |
// Debug için chunk counter | |
let chunkCounter = 0; | |
this.scriptProcessor.onaudioprocess = (audioEvent) => { | |
// Get PCM data from input buffer | |
const inputData = audioEvent.inputBuffer.getChannelData(0); | |
// Debug: İlk 5 chunk için detaylı log | |
if (chunkCounter < 5) { | |
const maxAmplitude = Math.max(...inputData.map(Math.abs)); | |
const avgAmplitude = inputData.reduce((sum, val) => sum + Math.abs(val), 0) / inputData.length; | |
console.log(`🎤 Audio Debug Chunk #${chunkCounter}:`, { | |
bufferLength: inputData.length, | |
maxAmplitude: maxAmplitude.toFixed(6), | |
avgAmplitude: avgAmplitude.toFixed(6), | |
firstSamples: Array.from(inputData.slice(0, 10)).map(v => v.toFixed(4)), | |
silent: maxAmplitude < 0.001 | |
}); | |
} | |
// Convert Float32Array to Int16Array (Linear16) | |
const pcmData = this.float32ToInt16(inputData); | |
// Debug: PCM dönüşümünü kontrol et | |
if (chunkCounter < 5) { | |
const pcmArray = Array.from(pcmData.slice(0, 10)); | |
console.log(`🔄 PCM Conversion #${chunkCounter}:`, { | |
firstPCMSamples: pcmArray, | |
maxPCM: Math.max(...Array.from(pcmData).map(Math.abs)) | |
}); | |
} | |
// Convert to base64 | |
const base64Data = this.arrayBufferToBase64(pcmData.buffer); | |
// Debug: Base64 çıktısını kontrol et | |
if (chunkCounter < 5) { | |
console.log(`📦 Base64 Output #${chunkCounter}:`, { | |
base64Length: base64Data.length, | |
base64Preview: base64Data.substring(0, 50) + '...' | |
}); | |
} | |
chunkCounter++; | |
// Send chunk | |
this.audioChunkSubject.next({ | |
data: base64Data, | |
timestamp: Date.now() | |
}); | |
}; | |
// Connect nodes | |
this.source.connect(this.scriptProcessor); | |
this.scriptProcessor.connect(this.audioContext.destination); | |
// Test: Mikrofon seviyesini kontrol et | |
setTimeout(() => { | |
if (this.source && this.audioContext) { | |
console.log('🎙️ Audio Context State:', this.audioContext.state); | |
console.log('🎙️ Sample Rate:', this.audioContext.sampleRate); | |
} | |
}, 1000); | |
console.log('✅ Linear16 recording setup complete'); | |
} | |
private async startMediaRecorderRecording(): Promise<void> { | |
// Original MediaRecorder implementation | |
const mimeType = this.getPreferredMimeType(); | |
const options: MediaRecorderOptions = {}; | |
if (mimeType) { | |
options.mimeType = mimeType; | |
} | |
this.mediaRecorder = new MediaRecorder(this.audioStream!, options); | |
console.log(`✅ [AudioStream] MediaRecorder created with MIME type: ${mimeType || 'default'}`); | |
this.setupMediaRecorderHandlers(); | |
this.mediaRecorder.start(100); | |
} | |
private float32ToInt16(buffer: Float32Array): Int16Array { | |
const l = buffer.length; | |
const result = new Int16Array(l); | |
// ✅ Gain artır - ses seviyesi çok düşük | |
const gain = 2.0; // 2x gain uygula | |
for (let i = 0; i < l; i++) { | |
// Apply gain and clamp to [-1, 1] | |
let amplified = buffer[i] * gain; | |
let clamped = Math.max(-1, Math.min(1, amplified)); | |
// Convert float32 [-1, 1] to int16 [-32768, 32767] | |
result[i] = clamped < 0 ? clamped * 0x8000 : clamped * 0x7FFF; | |
} | |
return result; | |
} | |
private arrayBufferToBase64(buffer: ArrayBuffer): string { | |
const bytes = new Uint8Array(buffer); | |
let binary = ''; | |
for (let i = 0; i < bytes.byteLength; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
return btoa(binary); | |
} | |
stopRecording(): void { | |
try { | |
console.log('🛑 [AudioStream] stopRecording called', { | |
hasMediaRecorder: !!this.mediaRecorder, | |
hasScriptProcessor: !!this.scriptProcessor, | |
state: this.mediaRecorder?.state, | |
timestamp: new Date().toISOString() | |
}); | |
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') { | |
this.mediaRecorder.stop(); | |
} | |
this.cleanup(); | |
this.recordingStateSubject.next(false); | |
console.log('🛑 [AudioStream] Audio recording stopped successfully'); | |
} catch (error) { | |
console.error('❌ [AudioStream] Error stopping recording:', error); | |
this.cleanup(); | |
} | |
} | |
private setupMediaRecorderHandlers(): void { | |
if (!this.mediaRecorder) return; | |
// Handle data available | |
this.mediaRecorder.ondataavailable = async (event) => { | |
try { | |
if (event.data && event.data.size > 0) { | |
const base64Data = await this.blobToBase64(event.data); | |
this.audioChunkSubject.next({ | |
data: base64Data, | |
timestamp: Date.now() | |
}); | |
} | |
} catch (error) { | |
console.error('Error processing audio chunk:', error); | |
this.errorSubject.next(this.createError('unknown', 'Failed to process audio chunk', error)); | |
} | |
}; | |
// Handle recording stop | |
this.mediaRecorder.onstop = () => { | |
console.log('MediaRecorder stopped'); | |
this.cleanup(); | |
}; | |
// Handle errors | |
this.mediaRecorder.onerror = (event: any) => { | |
console.error('MediaRecorder error:', event); | |
const error = this.createError('unknown', `Recording error: ${event.error?.message || 'Unknown error'}`, event.error); | |
this.errorSubject.next(error); | |
this.stopRecording(); | |
}; | |
} | |
private getPreferredMimeType(): string { | |
const types = [ | |
'audio/webm;codecs=opus', | |
'audio/webm', | |
'audio/ogg;codecs=opus', | |
'audio/ogg', | |
'audio/mp4' | |
]; | |
for (const type of types) { | |
if (MediaRecorder.isTypeSupported(type)) { | |
console.log(`Using MIME type: ${type}`); | |
return type; | |
} | |
} | |
// Return empty to use browser default | |
console.warn('No supported MIME types found, using browser default'); | |
return ''; | |
} | |
private async blobToBase64(blob: Blob): Promise<string> { | |
return new Promise((resolve, reject) => { | |
const reader = new FileReader(); | |
reader.onloadend = () => { | |
if (reader.result && typeof reader.result === 'string') { | |
// Remove data URL prefix | |
const base64 = reader.result.split(',')[1]; | |
resolve(base64); | |
} else { | |
reject(new Error('Failed to convert blob to base64')); | |
} | |
}; | |
reader.onerror = () => { | |
reject(new Error('FileReader error')); | |
}; | |
reader.readAsDataURL(blob); | |
}); | |
} | |
// Volume level monitoring | |
private startVolumeMonitoring(): void { | |
if (!this.audioStream) return; | |
try { | |
// Eğer Linear16 için zaten audioContext varsa, onu kullan | |
if (!this.audioContext) { | |
this.audioContext = new AudioContext(); | |
this.source = this.audioContext.createMediaStreamSource(this.audioStream); | |
} | |
this.analyser = this.audioContext.createAnalyser(); | |
if (this.source) { | |
this.source.connect(this.analyser); | |
} | |
this.analyser.fftSize = 256; | |
const dataArray = new Uint8Array(this.analyser.frequencyBinCount); | |
// Monitor volume every 100ms | |
this.volumeInterval = setInterval(() => { | |
if (this.analyser) { | |
this.analyser.getByteFrequencyData(dataArray); | |
// Calculate average volume | |
const sum = dataArray.reduce((acc, val) => acc + val, 0); | |
const average = sum / dataArray.length; | |
const normalizedVolume = average / 255; // Normalize to 0-1 | |
this.volumeLevelSubject.next(normalizedVolume); | |
} | |
}, 100); | |
} catch (error) { | |
console.warn('Failed to start volume monitoring:', error); | |
} | |
} | |
private stopVolumeMonitoring(): void { | |
if (this.volumeInterval) { | |
clearInterval(this.volumeInterval); | |
this.volumeInterval = null; | |
} | |
// AudioContext'i Linear16 kullanıyorsa kapatma | |
if (this.audioContext && !this.useLinear16) { | |
try { | |
this.audioContext.close(); | |
} catch (error) { | |
console.warn('Error closing audio context:', error); | |
} | |
this.audioContext = null; | |
this.analyser = null; | |
} | |
} | |
async getVolumeLevel(): Promise<number> { | |
if (!this.audioStream || !this.analyser) return 0; | |
try { | |
const dataArray = new Uint8Array(this.analyser.frequencyBinCount); | |
this.analyser.getByteFrequencyData(dataArray); | |
// Calculate average volume | |
const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length; | |
return average / 255; // Normalize to 0-1 | |
} catch (error) { | |
console.error('Error getting volume level:', error); | |
return 0; | |
} | |
} | |
// Check microphone permissions | |
async checkMicrophonePermission(): Promise<PermissionState> { | |
try { | |
// First check if Permissions API is available | |
if (!navigator.permissions || !navigator.permissions.query) { | |
console.warn('Permissions API not supported'); | |
// Try to check by attempting getUserMedia with video disabled | |
try { | |
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false }); | |
stream.getTracks().forEach(track => track.stop()); | |
return 'granted'; | |
} catch (error: any) { | |
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') { | |
return 'denied'; | |
} | |
return 'prompt'; | |
} | |
} | |
// Use Permissions API | |
const result = await navigator.permissions.query({ name: 'microphone' as PermissionName }); | |
return result.state; | |
} catch (error) { | |
console.warn('Error checking microphone permission:', error); | |
// Assume prompt state if we can't determine | |
return 'prompt'; | |
} | |
} | |
private cleanup(): void { | |
try { | |
// Stop media recorder | |
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') { | |
this.mediaRecorder.stop(); | |
} | |
this.mediaRecorder = null; | |
// Stop script processor for Linear16 | |
if (this.scriptProcessor) { | |
this.scriptProcessor.disconnect(); | |
this.scriptProcessor = null; | |
} | |
if (this.source && !this.analyser) { | |
this.source.disconnect(); | |
this.source = null; | |
} | |
// Stop all tracks | |
if (this.audioStream) { | |
this.audioStream.getTracks().forEach(track => { | |
track.stop(); | |
}); | |
this.audioStream = null; | |
} | |
// Stop volume monitoring | |
this.stopVolumeMonitoring(); | |
// Close audio context if using Linear16 | |
if (this.audioContext && this.useLinear16) { | |
try { | |
this.audioContext.close(); | |
} catch (error) { | |
console.warn('Error closing audio context:', error); | |
} | |
this.audioContext = null; | |
} | |
} catch (error) { | |
console.error('Error during cleanup:', error); | |
} | |
} | |
private createError(type: AudioStreamError['type'], message: string, originalError?: any): AudioStreamError { | |
return { | |
type, | |
message, | |
originalError | |
}; | |
} | |
// Get recording state | |
isRecording(): boolean { | |
return (this.mediaRecorder !== null && this.mediaRecorder.state === 'recording') || | |
(this.scriptProcessor !== null); | |
} | |
// Get available audio devices | |
async getAudioDevices(): Promise<MediaDeviceInfo[]> { | |
try { | |
const devices = await navigator.mediaDevices.enumerateDevices(); | |
return devices.filter(device => device.kind === 'audioinput'); | |
} catch (error) { | |
console.error('Error enumerating devices:', error); | |
return []; | |
} | |
} | |
// Switch audio device | |
async switchAudioDevice(deviceId: string): Promise<void> { | |
if (this.isRecording()) { | |
// Stop current recording | |
this.stopRecording(); | |
// Update constraints with new device | |
this.constraints.audio = { | |
...this.constraints.audio, | |
deviceId: { exact: deviceId } | |
} as any; | |
// Restart recording with new device | |
await this.startRecording(); | |
} else { | |
// Just update constraints for next recording | |
this.constraints.audio = { | |
...this.constraints.audio, | |
deviceId: { exact: deviceId } | |
} as any; | |
} | |
} | |
// Linear16 format kullanımını aç/kapa | |
setUseLinear16(use: boolean): void { | |
this.useLinear16 = use; | |
console.log(`Audio format switched to: ${use ? 'Linear16' : 'WebM-Opus'}`); | |
} | |
} |