flare / flare-ui /src /app /services /audio-stream.service.ts
ciyidogan's picture
Update flare-ui/src/app/services/audio-stream.service.ts
fcd04fe verified
raw
history blame
11.7 kB
// audio-stream.service.ts
// Path: /flare-ui/src/app/services/audio-stream.service.ts
import { Injectable, OnDestroy } from '@angular/core';
import { Subject, Observable, throwError } from 'rxjs';
export interface AudioChunk {
data: string; // Base64 encoded audio
timestamp: number;
}
export interface AudioStreamError {
type: 'permission' | 'device' | 'browser' | 'unknown';
message: string;
originalError?: any;
}
@Injectable({
providedIn: 'root'
})
export class AudioStreamService implements OnDestroy {
private mediaRecorder: MediaRecorder | null = null;
private audioStream: MediaStream | null = null;
private audioChunkSubject = new Subject<AudioChunk>();
private recordingStateSubject = new Subject<boolean>();
private errorSubject = new Subject<AudioStreamError>();
private volumeLevelSubject = new Subject<number>();
public audioChunk$ = this.audioChunkSubject.asObservable();
public recordingState$ = this.recordingStateSubject.asObservable();
public error$ = this.errorSubject.asObservable();
public volumeLevel$ = this.volumeLevelSubject.asObservable();
// Audio analysis
private audioContext: AudioContext | null = null;
private analyser: AnalyserNode | null = null;
private volumeInterval: any;
// Audio constraints
private constraints = {
audio: {
channelCount: 1,
sampleRate: 16000,
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
};
ngOnDestroy(): void {
this.cleanup();
}
static checkBrowserSupport(): boolean {
return !!(
navigator.mediaDevices &&
typeof navigator.mediaDevices.getUserMedia === 'function' &&
window.MediaRecorder
);
}
async startRecording(): Promise<void> {
try {
// Check browser support
if (!AudioStreamService.checkBrowserSupport()) {
throw this.createError('browser', 'Your browser does not support audio recording');
}
// Check permission first
const permission = await this.checkMicrophonePermission();
if (permission === 'denied') {
throw this.createError('permission', 'Microphone permission denied');
}
// Request microphone access
try {
this.audioStream = await navigator.mediaDevices.getUserMedia(this.constraints);
} catch (error: any) {
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
throw this.createError('permission', 'Microphone access denied', error);
} else if (error.name === 'NotFoundError' || error.name === 'DevicesNotFoundError') {
throw this.createError('device', 'No microphone found', error);
} else {
throw this.createError('device', `Failed to access microphone: ${error.message}`, error);
}
}
// Create MediaRecorder with appropriate format
const options: MediaRecorderOptions = {
mimeType: this.getPreferredMimeType()
};
try {
this.mediaRecorder = new MediaRecorder(this.audioStream, options);
} catch (error) {
// Fallback to default options if preferred mime type fails
console.warn('Failed with preferred mime type, using defaults:', error);
this.mediaRecorder = new MediaRecorder(this.audioStream);
}
// Set up event handlers
this.setupMediaRecorderHandlers();
// Start volume monitoring
this.startVolumeMonitoring();
// Start recording with timeslice for real-time streaming
this.mediaRecorder.start(100); // Send chunks every 100ms
this.recordingStateSubject.next(true);
console.log('✅ Audio recording started');
} catch (error: any) {
console.error('Failed to start recording:', error);
this.cleanup();
// Emit error
if (error.type) {
this.errorSubject.next(error);
} else {
this.errorSubject.next(this.createError('unknown', error.message || 'Failed to start recording', error));
}
throw error;
}
}
stopRecording(): void {
try {
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop();
}
this.cleanup();
this.recordingStateSubject.next(false);
console.log('🛑 Audio recording stopped');
} catch (error) {
console.error('Error stopping recording:', error);
this.cleanup();
}
}
private setupMediaRecorderHandlers(): void {
if (!this.mediaRecorder) return;
// Handle data available
this.mediaRecorder.ondataavailable = async (event) => {
try {
if (event.data && event.data.size > 0) {
const base64Data = await this.blobToBase64(event.data);
this.audioChunkSubject.next({
data: base64Data,
timestamp: Date.now()
});
}
} catch (error) {
console.error('Error processing audio chunk:', error);
this.errorSubject.next(this.createError('unknown', 'Failed to process audio chunk', error));
}
};
// Handle recording stop
this.mediaRecorder.onstop = () => {
console.log('MediaRecorder stopped');
this.cleanup();
};
// Handle errors
this.mediaRecorder.onerror = (event: any) => {
console.error('MediaRecorder error:', event);
const error = this.createError('unknown', `Recording error: ${event.error?.message || 'Unknown error'}`, event.error);
this.errorSubject.next(error);
this.stopRecording();
};
}
private getPreferredMimeType(): string {
const types = [
'audio/webm;codecs=opus',
'audio/webm',
'audio/ogg;codecs=opus',
'audio/ogg',
'audio/mp4'
];
for (const type of types) {
if (MediaRecorder.isTypeSupported(type)) {
console.log(`Using MIME type: ${type}`);
return type;
}
}
// Return empty to use browser default
console.warn('No supported MIME types found, using browser default');
return '';
}
private async blobToBase64(blob: Blob): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onloadend = () => {
if (reader.result && typeof reader.result === 'string') {
// Remove data URL prefix
const base64 = reader.result.split(',')[1];
resolve(base64);
} else {
reject(new Error('Failed to convert blob to base64'));
}
};
reader.onerror = () => {
reject(new Error('FileReader error'));
};
reader.readAsDataURL(blob);
});
}
// Volume level monitoring
private startVolumeMonitoring(): void {
if (!this.audioStream) return;
try {
this.audioContext = new AudioContext();
this.analyser = this.audioContext.createAnalyser();
const source = this.audioContext.createMediaStreamSource(this.audioStream);
source.connect(this.analyser);
this.analyser.fftSize = 256;
const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
// Monitor volume every 100ms
this.volumeInterval = setInterval(() => {
if (this.analyser) {
this.analyser.getByteFrequencyData(dataArray);
// Calculate average volume
const sum = dataArray.reduce((acc, val) => acc + val, 0);
const average = sum / dataArray.length;
const normalizedVolume = average / 255; // Normalize to 0-1
this.volumeLevelSubject.next(normalizedVolume);
}
}, 100);
} catch (error) {
console.warn('Failed to start volume monitoring:', error);
}
}
private stopVolumeMonitoring(): void {
if (this.volumeInterval) {
clearInterval(this.volumeInterval);
this.volumeInterval = null;
}
if (this.audioContext) {
try {
this.audioContext.close();
} catch (error) {
console.warn('Error closing audio context:', error);
}
this.audioContext = null;
this.analyser = null;
}
}
async getVolumeLevel(): Promise<number> {
if (!this.audioStream || !this.analyser) return 0;
try {
const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
this.analyser.getByteFrequencyData(dataArray);
// Calculate average volume
const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
return average / 255; // Normalize to 0-1
} catch (error) {
console.error('Error getting volume level:', error);
return 0;
}
}
// Check microphone permissions
async checkMicrophonePermission(): Promise<PermissionState> {
try {
// First check if Permissions API is available
if (!navigator.permissions || !navigator.permissions.query) {
console.warn('Permissions API not supported');
// Try to check by attempting getUserMedia with video disabled
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
stream.getTracks().forEach(track => track.stop());
return 'granted';
} catch (error: any) {
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
return 'denied';
}
return 'prompt';
}
}
// Use Permissions API
const result = await navigator.permissions.query({ name: 'microphone' as PermissionName });
return result.state;
} catch (error) {
console.warn('Error checking microphone permission:', error);
// Assume prompt state if we can't determine
return 'prompt';
}
}
private cleanup(): void {
try {
// Stop media recorder
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop();
}
this.mediaRecorder = null;
// Stop all tracks
if (this.audioStream) {
this.audioStream.getTracks().forEach(track => {
track.stop();
});
this.audioStream = null;
}
// Stop volume monitoring
this.stopVolumeMonitoring();
} catch (error) {
console.error('Error during cleanup:', error);
}
}
private createError(type: AudioStreamError['type'], message: string, originalError?: any): AudioStreamError {
return {
type,
message,
originalError
};
}
// Get recording state
isRecording(): boolean {
return this.mediaRecorder !== null && this.mediaRecorder.state === 'recording';
}
// Get available audio devices
async getAudioDevices(): Promise<MediaDeviceInfo[]> {
try {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter(device => device.kind === 'audioinput');
} catch (error) {
console.error('Error enumerating devices:', error);
return [];
}
}
// Switch audio device
async switchAudioDevice(deviceId: string): Promise<void> {
if (this.isRecording()) {
// Stop current recording
this.stopRecording();
// Update constraints with new device
this.constraints.audio = {
...this.constraints.audio,
deviceId: { exact: deviceId }
} as any;
// Restart recording with new device
await this.startRecording();
} else {
// Just update constraints for next recording
this.constraints.audio = {
...this.constraints.audio,
deviceId: { exact: deviceId }
} as any;
}
}
}