Spaces:
Paused
Paused
Update flare-ui/src/app/services/audio-stream.service.ts
Browse files
flare-ui/src/app/services/audio-stream.service.ts
CHANGED
@@ -1,394 +1,502 @@
|
|
1 |
-
// audio-stream.service.ts
|
2 |
-
//
|
3 |
-
|
4 |
-
import { Injectable, OnDestroy } from '@angular/core';
|
5 |
-
import { Subject, Observable, throwError } from 'rxjs';
|
6 |
-
|
7 |
-
export interface AudioChunk {
|
8 |
-
data: string; // Base64 encoded audio
|
9 |
-
timestamp: number;
|
10 |
-
}
|
11 |
-
|
12 |
-
export interface AudioStreamError {
|
13 |
-
type: 'permission' | 'device' | 'browser' | 'unknown';
|
14 |
-
message: string;
|
15 |
-
originalError?: any;
|
16 |
-
}
|
17 |
-
|
18 |
-
@Injectable({
|
19 |
-
providedIn: 'root'
|
20 |
-
})
|
21 |
-
export class AudioStreamService implements OnDestroy {
|
22 |
-
private mediaRecorder: MediaRecorder | null = null;
|
23 |
-
private audioStream: MediaStream | null = null;
|
24 |
-
private audioChunkSubject = new Subject<AudioChunk>();
|
25 |
-
private recordingStateSubject = new Subject<boolean>();
|
26 |
-
private errorSubject = new Subject<AudioStreamError>();
|
27 |
-
private volumeLevelSubject = new Subject<number>();
|
28 |
-
|
29 |
-
public audioChunk$ = this.audioChunkSubject.asObservable();
|
30 |
-
public recordingState$ = this.recordingStateSubject.asObservable();
|
31 |
-
public error$ = this.errorSubject.asObservable();
|
32 |
-
public volumeLevel$ = this.volumeLevelSubject.asObservable();
|
33 |
-
|
34 |
-
// Audio analysis
|
35 |
-
private audioContext: AudioContext | null = null;
|
36 |
-
private analyser: AnalyserNode | null = null;
|
37 |
-
private volumeInterval: any;
|
38 |
-
|
39 |
-
//
|
40 |
-
private
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
//
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
this.
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
}
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
for (
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
}
|
259 |
-
}
|
260 |
-
|
261 |
-
private
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
}
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
|
332 |
-
|
333 |
-
|
334 |
-
|
335 |
-
|
336 |
-
|
337 |
-
|
338 |
-
|
339 |
-
|
340 |
-
|
341 |
-
|
342 |
-
|
343 |
-
|
344 |
-
|
345 |
-
|
346 |
-
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
|
351 |
-
|
352 |
-
|
353 |
-
|
354 |
-
|
355 |
-
|
356 |
-
|
357 |
-
|
358 |
-
|
359 |
-
|
360 |
-
|
361 |
-
|
362 |
-
|
363 |
-
|
364 |
-
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
return
|
369 |
-
}
|
370 |
-
|
371 |
-
|
372 |
-
|
373 |
-
|
374 |
-
|
375 |
-
|
376 |
-
|
377 |
-
|
378 |
-
//
|
379 |
-
|
380 |
-
|
381 |
-
|
382 |
-
|
383 |
-
|
384 |
-
|
385 |
-
|
386 |
-
|
387 |
-
|
388 |
-
|
389 |
-
|
390 |
-
|
391 |
-
|
392 |
-
|
393 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
394 |
}
|
|
|
1 |
+
// audio-stream.service.ts güncelleme
|
2 |
+
// Linear16 format desteği eklenmiş hali
|
3 |
+
|
4 |
+
import { Injectable, OnDestroy } from '@angular/core';
|
5 |
+
import { Subject, Observable, throwError } from 'rxjs';
|
6 |
+
|
7 |
+
export interface AudioChunk {
|
8 |
+
data: string; // Base64 encoded audio
|
9 |
+
timestamp: number;
|
10 |
+
}
|
11 |
+
|
12 |
+
export interface AudioStreamError {
|
13 |
+
type: 'permission' | 'device' | 'browser' | 'unknown';
|
14 |
+
message: string;
|
15 |
+
originalError?: any;
|
16 |
+
}
|
17 |
+
|
18 |
+
@Injectable({
|
19 |
+
providedIn: 'root'
|
20 |
+
})
|
21 |
+
export class AudioStreamService implements OnDestroy {
|
22 |
+
private mediaRecorder: MediaRecorder | null = null;
|
23 |
+
private audioStream: MediaStream | null = null;
|
24 |
+
private audioChunkSubject = new Subject<AudioChunk>();
|
25 |
+
private recordingStateSubject = new Subject<boolean>();
|
26 |
+
private errorSubject = new Subject<AudioStreamError>();
|
27 |
+
private volumeLevelSubject = new Subject<number>();
|
28 |
+
|
29 |
+
public audioChunk$ = this.audioChunkSubject.asObservable();
|
30 |
+
public recordingState$ = this.recordingStateSubject.asObservable();
|
31 |
+
public error$ = this.errorSubject.asObservable();
|
32 |
+
public volumeLevel$ = this.volumeLevelSubject.asObservable();
|
33 |
+
|
34 |
+
// Audio analysis
|
35 |
+
private audioContext: AudioContext | null = null;
|
36 |
+
private analyser: AnalyserNode | null = null;
|
37 |
+
private volumeInterval: any;
|
38 |
+
|
39 |
+
// Linear16 conversion için eklemeler
|
40 |
+
private scriptProcessor: ScriptProcessorNode | null = null;
|
41 |
+
private source: MediaStreamAudioSourceNode | null = null;
|
42 |
+
private useLinear16 = true; // Linear16 kullanım flag'i
|
43 |
+
|
44 |
+
// Audio constraints
|
45 |
+
private constraints = {
|
46 |
+
audio: {
|
47 |
+
channelCount: 1,
|
48 |
+
sampleRate: 16000,
|
49 |
+
echoCancellation: true,
|
50 |
+
noiseSuppression: true,
|
51 |
+
autoGainControl: true
|
52 |
+
}
|
53 |
+
};
|
54 |
+
|
55 |
+
ngOnDestroy(): void {
|
56 |
+
this.cleanup();
|
57 |
+
}
|
58 |
+
|
59 |
+
static checkBrowserSupport(): boolean {
|
60 |
+
return !!(
|
61 |
+
navigator.mediaDevices &&
|
62 |
+
typeof navigator.mediaDevices.getUserMedia === 'function' &&
|
63 |
+
(window.MediaRecorder || window.AudioContext)
|
64 |
+
);
|
65 |
+
}
|
66 |
+
|
67 |
+
async startRecording(): Promise<void> {
|
68 |
+
try {
|
69 |
+
console.log('🎤 [AudioStream] startRecording called', {
|
70 |
+
isAlreadyRecording: this.isRecording(),
|
71 |
+
useLinear16: this.useLinear16,
|
72 |
+
timestamp: new Date().toISOString()
|
73 |
+
});
|
74 |
+
|
75 |
+
if ((this.mediaRecorder && this.mediaRecorder.state !== 'inactive') || this.scriptProcessor) {
|
76 |
+
console.warn('Recording already in progress');
|
77 |
+
return;
|
78 |
+
}
|
79 |
+
|
80 |
+
// Check browser support
|
81 |
+
if (!AudioStreamService.checkBrowserSupport()) {
|
82 |
+
const error = this.createError('browser', 'Browser does not support audio recording');
|
83 |
+
this.errorSubject.next(error);
|
84 |
+
throw error;
|
85 |
+
}
|
86 |
+
|
87 |
+
try {
|
88 |
+
// Get audio stream
|
89 |
+
this.audioStream = await navigator.mediaDevices.getUserMedia(this.constraints);
|
90 |
+
console.log('✅ [AudioStream] Got media stream');
|
91 |
+
|
92 |
+
if (this.useLinear16) {
|
93 |
+
// Linear16 format için Web Audio API kullan
|
94 |
+
await this.startLinear16Recording();
|
95 |
+
} else {
|
96 |
+
// Standart MediaRecorder kullan (WebM-Opus)
|
97 |
+
await this.startMediaRecorderRecording();
|
98 |
+
}
|
99 |
+
|
100 |
+
this.recordingStateSubject.next(true);
|
101 |
+
console.log('✅ [AudioStream] Recording started successfully');
|
102 |
+
|
103 |
+
// Start volume monitoring
|
104 |
+
this.startVolumeMonitoring();
|
105 |
+
|
106 |
+
} catch (error: any) {
|
107 |
+
console.error('❌ [AudioStream] getUserMedia error:', error);
|
108 |
+
|
109 |
+
let audioError: AudioStreamError;
|
110 |
+
|
111 |
+
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
|
112 |
+
audioError = this.createError('permission', 'Microphone permission denied');
|
113 |
+
} else if (error.name === 'NotFoundError' || error.name === 'DevicesNotFoundError') {
|
114 |
+
audioError = this.createError('device', 'No microphone found');
|
115 |
+
} else {
|
116 |
+
audioError = this.createError('unknown', `Failed to access microphone: ${error.message}`, error);
|
117 |
+
}
|
118 |
+
|
119 |
+
this.errorSubject.next(audioError);
|
120 |
+
throw audioError;
|
121 |
+
}
|
122 |
+
} catch (error) {
|
123 |
+
console.error('❌ [AudioStream] startRecording error:', error);
|
124 |
+
this.cleanup();
|
125 |
+
throw error;
|
126 |
+
}
|
127 |
+
}
|
128 |
+
|
129 |
+
private async startLinear16Recording(): Promise<void> {
|
130 |
+
console.log('🎵 Starting Linear16 recording with Web Audio API');
|
131 |
+
|
132 |
+
// Create audio context with specific sample rate
|
133 |
+
this.audioContext = new AudioContext({ sampleRate: 16000 });
|
134 |
+
|
135 |
+
// Create source from stream
|
136 |
+
this.source = this.audioContext.createMediaStreamSource(this.audioStream!);
|
137 |
+
|
138 |
+
// Create script processor for raw PCM access
|
139 |
+
// Buffer size: 4096 samples, 1 input channel, 1 output channel
|
140 |
+
this.scriptProcessor = this.audioContext.createScriptProcessor(4096, 1, 1);
|
141 |
+
|
142 |
+
this.scriptProcessor.onaudioprocess = (audioEvent) => {
|
143 |
+
// Get PCM data from input buffer
|
144 |
+
const inputData = audioEvent.inputBuffer.getChannelData(0);
|
145 |
+
|
146 |
+
// Convert Float32Array to Int16Array (Linear16)
|
147 |
+
const pcmData = this.float32ToInt16(inputData);
|
148 |
+
|
149 |
+
// Convert to base64
|
150 |
+
const base64Data = this.arrayBufferToBase64(pcmData.buffer);
|
151 |
+
|
152 |
+
// Send chunk
|
153 |
+
this.audioChunkSubject.next({
|
154 |
+
data: base64Data,
|
155 |
+
timestamp: Date.now()
|
156 |
+
});
|
157 |
+
};
|
158 |
+
|
159 |
+
// Connect nodes
|
160 |
+
this.source.connect(this.scriptProcessor);
|
161 |
+
this.scriptProcessor.connect(this.audioContext.destination);
|
162 |
+
|
163 |
+
console.log('✅ Linear16 recording setup complete');
|
164 |
+
}
|
165 |
+
|
166 |
+
private async startMediaRecorderRecording(): Promise<void> {
|
167 |
+
// Original MediaRecorder implementation
|
168 |
+
const mimeType = this.getPreferredMimeType();
|
169 |
+
const options: MediaRecorderOptions = {};
|
170 |
+
if (mimeType) {
|
171 |
+
options.mimeType = mimeType;
|
172 |
+
}
|
173 |
+
|
174 |
+
this.mediaRecorder = new MediaRecorder(this.audioStream!, options);
|
175 |
+
console.log(`✅ [AudioStream] MediaRecorder created with MIME type: ${mimeType || 'default'}`);
|
176 |
+
|
177 |
+
this.setupMediaRecorderHandlers();
|
178 |
+
this.mediaRecorder.start(100);
|
179 |
+
}
|
180 |
+
|
181 |
+
private float32ToInt16(buffer: Float32Array): Int16Array {
|
182 |
+
const l = buffer.length;
|
183 |
+
const result = new Int16Array(l);
|
184 |
+
|
185 |
+
for (let i = 0; i < l; i++) {
|
186 |
+
// Convert float32 [-1, 1] to int16 [-32768, 32767]
|
187 |
+
const s = Math.max(-1, Math.min(1, buffer[i]));
|
188 |
+
result[i] = s < 0 ? s * 0x8000 : s * 0x7FFF;
|
189 |
+
}
|
190 |
+
|
191 |
+
return result;
|
192 |
+
}
|
193 |
+
|
194 |
+
private arrayBufferToBase64(buffer: ArrayBuffer): string {
|
195 |
+
const bytes = new Uint8Array(buffer);
|
196 |
+
let binary = '';
|
197 |
+
|
198 |
+
for (let i = 0; i < bytes.byteLength; i++) {
|
199 |
+
binary += String.fromCharCode(bytes[i]);
|
200 |
+
}
|
201 |
+
|
202 |
+
return btoa(binary);
|
203 |
+
}
|
204 |
+
|
205 |
+
stopRecording(): void {
|
206 |
+
try {
|
207 |
+
console.log('🛑 [AudioStream] stopRecording called', {
|
208 |
+
hasMediaRecorder: !!this.mediaRecorder,
|
209 |
+
hasScriptProcessor: !!this.scriptProcessor,
|
210 |
+
state: this.mediaRecorder?.state,
|
211 |
+
timestamp: new Date().toISOString()
|
212 |
+
});
|
213 |
+
|
214 |
+
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
|
215 |
+
this.mediaRecorder.stop();
|
216 |
+
}
|
217 |
+
|
218 |
+
this.cleanup();
|
219 |
+
this.recordingStateSubject.next(false);
|
220 |
+
console.log('🛑 [AudioStream] Audio recording stopped successfully');
|
221 |
+
} catch (error) {
|
222 |
+
console.error('❌ [AudioStream] Error stopping recording:', error);
|
223 |
+
this.cleanup();
|
224 |
+
}
|
225 |
+
}
|
226 |
+
|
227 |
+
private setupMediaRecorderHandlers(): void {
|
228 |
+
if (!this.mediaRecorder) return;
|
229 |
+
|
230 |
+
// Handle data available
|
231 |
+
this.mediaRecorder.ondataavailable = async (event) => {
|
232 |
+
try {
|
233 |
+
if (event.data && event.data.size > 0) {
|
234 |
+
const base64Data = await this.blobToBase64(event.data);
|
235 |
+
this.audioChunkSubject.next({
|
236 |
+
data: base64Data,
|
237 |
+
timestamp: Date.now()
|
238 |
+
});
|
239 |
+
}
|
240 |
+
} catch (error) {
|
241 |
+
console.error('Error processing audio chunk:', error);
|
242 |
+
this.errorSubject.next(this.createError('unknown', 'Failed to process audio chunk', error));
|
243 |
+
}
|
244 |
+
};
|
245 |
+
|
246 |
+
// Handle recording stop
|
247 |
+
this.mediaRecorder.onstop = () => {
|
248 |
+
console.log('MediaRecorder stopped');
|
249 |
+
this.cleanup();
|
250 |
+
};
|
251 |
+
|
252 |
+
// Handle errors
|
253 |
+
this.mediaRecorder.onerror = (event: any) => {
|
254 |
+
console.error('MediaRecorder error:', event);
|
255 |
+
const error = this.createError('unknown', `Recording error: ${event.error?.message || 'Unknown error'}`, event.error);
|
256 |
+
this.errorSubject.next(error);
|
257 |
+
this.stopRecording();
|
258 |
+
};
|
259 |
+
}
|
260 |
+
|
261 |
+
private getPreferredMimeType(): string {
|
262 |
+
const types = [
|
263 |
+
'audio/webm;codecs=opus',
|
264 |
+
'audio/webm',
|
265 |
+
'audio/ogg;codecs=opus',
|
266 |
+
'audio/ogg',
|
267 |
+
'audio/mp4'
|
268 |
+
];
|
269 |
+
|
270 |
+
for (const type of types) {
|
271 |
+
if (MediaRecorder.isTypeSupported(type)) {
|
272 |
+
console.log(`Using MIME type: ${type}`);
|
273 |
+
return type;
|
274 |
+
}
|
275 |
+
}
|
276 |
+
|
277 |
+
// Return empty to use browser default
|
278 |
+
console.warn('No supported MIME types found, using browser default');
|
279 |
+
return '';
|
280 |
+
}
|
281 |
+
|
282 |
+
private async blobToBase64(blob: Blob): Promise<string> {
|
283 |
+
return new Promise((resolve, reject) => {
|
284 |
+
const reader = new FileReader();
|
285 |
+
reader.onloadend = () => {
|
286 |
+
if (reader.result && typeof reader.result === 'string') {
|
287 |
+
// Remove data URL prefix
|
288 |
+
const base64 = reader.result.split(',')[1];
|
289 |
+
resolve(base64);
|
290 |
+
} else {
|
291 |
+
reject(new Error('Failed to convert blob to base64'));
|
292 |
+
}
|
293 |
+
};
|
294 |
+
reader.onerror = () => {
|
295 |
+
reject(new Error('FileReader error'));
|
296 |
+
};
|
297 |
+
reader.readAsDataURL(blob);
|
298 |
+
});
|
299 |
+
}
|
300 |
+
|
301 |
+
// Volume level monitoring
|
302 |
+
private startVolumeMonitoring(): void {
|
303 |
+
if (!this.audioStream) return;
|
304 |
+
|
305 |
+
try {
|
306 |
+
// Eğer Linear16 için zaten audioContext varsa, onu kullan
|
307 |
+
if (!this.audioContext) {
|
308 |
+
this.audioContext = new AudioContext();
|
309 |
+
this.source = this.audioContext.createMediaStreamSource(this.audioStream);
|
310 |
+
}
|
311 |
+
|
312 |
+
this.analyser = this.audioContext.createAnalyser();
|
313 |
+
|
314 |
+
if (this.source) {
|
315 |
+
this.source.connect(this.analyser);
|
316 |
+
}
|
317 |
+
|
318 |
+
this.analyser.fftSize = 256;
|
319 |
+
|
320 |
+
const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
|
321 |
+
|
322 |
+
// Monitor volume every 100ms
|
323 |
+
this.volumeInterval = setInterval(() => {
|
324 |
+
if (this.analyser) {
|
325 |
+
this.analyser.getByteFrequencyData(dataArray);
|
326 |
+
|
327 |
+
// Calculate average volume
|
328 |
+
const sum = dataArray.reduce((acc, val) => acc + val, 0);
|
329 |
+
const average = sum / dataArray.length;
|
330 |
+
const normalizedVolume = average / 255; // Normalize to 0-1
|
331 |
+
|
332 |
+
this.volumeLevelSubject.next(normalizedVolume);
|
333 |
+
}
|
334 |
+
}, 100);
|
335 |
+
} catch (error) {
|
336 |
+
console.warn('Failed to start volume monitoring:', error);
|
337 |
+
}
|
338 |
+
}
|
339 |
+
|
340 |
+
private stopVolumeMonitoring(): void {
|
341 |
+
if (this.volumeInterval) {
|
342 |
+
clearInterval(this.volumeInterval);
|
343 |
+
this.volumeInterval = null;
|
344 |
+
}
|
345 |
+
|
346 |
+
// AudioContext'i Linear16 kullanıyorsa kapatma
|
347 |
+
if (this.audioContext && !this.useLinear16) {
|
348 |
+
try {
|
349 |
+
this.audioContext.close();
|
350 |
+
} catch (error) {
|
351 |
+
console.warn('Error closing audio context:', error);
|
352 |
+
}
|
353 |
+
this.audioContext = null;
|
354 |
+
this.analyser = null;
|
355 |
+
}
|
356 |
+
}
|
357 |
+
|
358 |
+
async getVolumeLevel(): Promise<number> {
|
359 |
+
if (!this.audioStream || !this.analyser) return 0;
|
360 |
+
|
361 |
+
try {
|
362 |
+
const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
|
363 |
+
this.analyser.getByteFrequencyData(dataArray);
|
364 |
+
|
365 |
+
// Calculate average volume
|
366 |
+
const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
|
367 |
+
|
368 |
+
return average / 255; // Normalize to 0-1
|
369 |
+
} catch (error) {
|
370 |
+
console.error('Error getting volume level:', error);
|
371 |
+
return 0;
|
372 |
+
}
|
373 |
+
}
|
374 |
+
|
375 |
+
// Check microphone permissions
|
376 |
+
async checkMicrophonePermission(): Promise<PermissionState> {
|
377 |
+
try {
|
378 |
+
// First check if Permissions API is available
|
379 |
+
if (!navigator.permissions || !navigator.permissions.query) {
|
380 |
+
console.warn('Permissions API not supported');
|
381 |
+
// Try to check by attempting getUserMedia with video disabled
|
382 |
+
try {
|
383 |
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
|
384 |
+
stream.getTracks().forEach(track => track.stop());
|
385 |
+
return 'granted';
|
386 |
+
} catch (error: any) {
|
387 |
+
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
|
388 |
+
return 'denied';
|
389 |
+
}
|
390 |
+
return 'prompt';
|
391 |
+
}
|
392 |
+
}
|
393 |
+
|
394 |
+
// Use Permissions API
|
395 |
+
const result = await navigator.permissions.query({ name: 'microphone' as PermissionName });
|
396 |
+
return result.state;
|
397 |
+
} catch (error) {
|
398 |
+
console.warn('Error checking microphone permission:', error);
|
399 |
+
// Assume prompt state if we can't determine
|
400 |
+
return 'prompt';
|
401 |
+
}
|
402 |
+
}
|
403 |
+
|
404 |
+
private cleanup(): void {
|
405 |
+
try {
|
406 |
+
// Stop media recorder
|
407 |
+
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
|
408 |
+
this.mediaRecorder.stop();
|
409 |
+
}
|
410 |
+
this.mediaRecorder = null;
|
411 |
+
|
412 |
+
// Stop script processor for Linear16
|
413 |
+
if (this.scriptProcessor) {
|
414 |
+
this.scriptProcessor.disconnect();
|
415 |
+
this.scriptProcessor = null;
|
416 |
+
}
|
417 |
+
|
418 |
+
if (this.source && !this.analyser) {
|
419 |
+
this.source.disconnect();
|
420 |
+
this.source = null;
|
421 |
+
}
|
422 |
+
|
423 |
+
// Stop all tracks
|
424 |
+
if (this.audioStream) {
|
425 |
+
this.audioStream.getTracks().forEach(track => {
|
426 |
+
track.stop();
|
427 |
+
});
|
428 |
+
this.audioStream = null;
|
429 |
+
}
|
430 |
+
|
431 |
+
// Stop volume monitoring
|
432 |
+
this.stopVolumeMonitoring();
|
433 |
+
|
434 |
+
// Close audio context if using Linear16
|
435 |
+
if (this.audioContext && this.useLinear16) {
|
436 |
+
try {
|
437 |
+
this.audioContext.close();
|
438 |
+
} catch (error) {
|
439 |
+
console.warn('Error closing audio context:', error);
|
440 |
+
}
|
441 |
+
this.audioContext = null;
|
442 |
+
}
|
443 |
+
|
444 |
+
} catch (error) {
|
445 |
+
console.error('Error during cleanup:', error);
|
446 |
+
}
|
447 |
+
}
|
448 |
+
|
449 |
+
private createError(type: AudioStreamError['type'], message: string, originalError?: any): AudioStreamError {
|
450 |
+
return {
|
451 |
+
type,
|
452 |
+
message,
|
453 |
+
originalError
|
454 |
+
};
|
455 |
+
}
|
456 |
+
|
457 |
+
// Get recording state
|
458 |
+
isRecording(): boolean {
|
459 |
+
return (this.mediaRecorder !== null && this.mediaRecorder.state === 'recording') ||
|
460 |
+
(this.scriptProcessor !== null);
|
461 |
+
}
|
462 |
+
|
463 |
+
// Get available audio devices
|
464 |
+
async getAudioDevices(): Promise<MediaDeviceInfo[]> {
|
465 |
+
try {
|
466 |
+
const devices = await navigator.mediaDevices.enumerateDevices();
|
467 |
+
return devices.filter(device => device.kind === 'audioinput');
|
468 |
+
} catch (error) {
|
469 |
+
console.error('Error enumerating devices:', error);
|
470 |
+
return [];
|
471 |
+
}
|
472 |
+
}
|
473 |
+
|
474 |
+
// Switch audio device
|
475 |
+
async switchAudioDevice(deviceId: string): Promise<void> {
|
476 |
+
if (this.isRecording()) {
|
477 |
+
// Stop current recording
|
478 |
+
this.stopRecording();
|
479 |
+
|
480 |
+
// Update constraints with new device
|
481 |
+
this.constraints.audio = {
|
482 |
+
...this.constraints.audio,
|
483 |
+
deviceId: { exact: deviceId }
|
484 |
+
} as any;
|
485 |
+
|
486 |
+
// Restart recording with new device
|
487 |
+
await this.startRecording();
|
488 |
+
} else {
|
489 |
+
// Just update constraints for next recording
|
490 |
+
this.constraints.audio = {
|
491 |
+
...this.constraints.audio,
|
492 |
+
deviceId: { exact: deviceId }
|
493 |
+
} as any;
|
494 |
+
}
|
495 |
+
}
|
496 |
+
|
497 |
+
// Linear16 format kullanımını aç/kapa
|
498 |
+
setUseLinear16(use: boolean): void {
|
499 |
+
this.useLinear16 = use;
|
500 |
+
console.log(`Audio format switched to: ${use ? 'Linear16' : 'WebM-Opus'}`);
|
501 |
+
}
|
502 |
}
|