Ezmary commited on
Commit
29d8bf1
·
verified ·
1 Parent(s): 50f12c9

Update src/lib/audio-recorder.ts

Browse files
Files changed (1) hide show
  1. src/lib/audio-recorder.ts +55 -283
src/lib/audio-recorder.ts CHANGED
@@ -4,7 +4,6 @@ import { audioContext } from "./utils";
4
  import AudioRecordingWorklet from "./worklets/audio-processing";
5
  import SafariAudioRecordingWorklet from "./worklets/safari-audio-processing";
6
  import VolMeterWorket from "./worklets/vol-meter";
7
-
8
  import { createWorketFromSrc } from "./audioworklet-registry";
9
  import EventEmitter from "eventemitter3";
10
 
@@ -18,292 +17,81 @@ function arrayBufferToBase64(buffer: ArrayBuffer) {
18
  return window.btoa(binary);
19
  }
20
 
21
- // Add Safari-specific audio context creation
22
  async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> {
23
- console.log('Creating Safari audio context with options:', { sampleRate });
24
-
25
- // Safari requires webkit prefix
26
  const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext;
27
- console.log('Using AudioContext class:', AudioContextClass.name);
28
-
29
- const ctx = new AudioContextClass({
30
- sampleRate,
31
- latencyHint: 'interactive'
32
- });
33
-
34
- console.log('Safari AudioContext initial state:', {
35
- state: ctx.state,
36
- sampleRate: ctx.sampleRate,
37
- baseLatency: ctx.baseLatency,
38
- destination: ctx.destination,
39
- });
40
-
41
- // Safari requires user interaction to start audio context
42
  if (ctx.state === 'suspended') {
43
- console.log('Attempting to resume suspended Safari audio context...');
44
- try {
45
- await ctx.resume();
46
- console.log('Successfully resumed Safari audio context:', ctx.state);
47
- } catch (err) {
48
- console.error('Failed to resume Safari audio context:', err);
49
- throw err;
50
- }
51
  }
52
-
53
  return ctx;
54
  }
55
 
56
  export class AudioRecorder extends EventEmitter {
57
- stream: MediaStream | undefined;
58
- audioContext: AudioContext | undefined;
59
- source: MediaStreamAudioSourceNode | undefined;
60
  recording: boolean = false;
61
- recordingWorklet: AudioWorkletNode | undefined;
62
- vuWorklet: AudioWorkletNode | undefined;
63
-
64
  private starting: Promise<void> | null = null;
65
-
66
  isSafari: boolean;
67
- isIOS: boolean;
68
 
69
  constructor(public sampleRate = 16000) {
70
  super();
71
  this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
72
- this.isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !(window as any).MSStream;
73
- console.log('AudioRecorder initialized:', {
74
- isSafari: this.isSafari,
75
- isIOS: this.isIOS,
76
- sampleRate: this.sampleRate,
77
- userAgent: navigator.userAgent,
78
- webAudioSupport: !!(window.AudioContext || (window as any).webkitAudioContext),
79
- mediaDevicesSupport: !!navigator.mediaDevices
80
- });
81
  }
82
 
83
  async start() {
84
- if (!navigator.mediaDevices?.getUserMedia) {
85
- console.error('MediaDevices API not available:', {
86
- mediaDevices: !!navigator.mediaDevices,
87
- getUserMedia: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
88
- });
89
- throw new Error("Could not request user media");
90
- }
91
-
92
- console.log('Starting AudioRecorder with full environment info:', {
93
- userAgent: navigator.userAgent,
94
- platform: navigator.platform,
95
- vendor: navigator.vendor,
96
- audioWorkletSupport: !!(window.AudioWorklet),
97
- sampleRate: this.sampleRate,
98
- existingAudioContext: !!this.audioContext,
99
- existingStream: !!this.stream,
100
- isSafari: this.isSafari
101
- });
102
-
103
  this.starting = new Promise(async (resolve, reject) => {
104
  try {
105
- if (this.isSafari) {
106
- // Safari implementation
107
- console.log('Safari detected - using Safari-specific audio initialization');
108
-
109
- // 1. First get audio permissions
110
- console.log('Requesting audio permissions first for Safari...');
111
- const constraints = {
112
- audio: {
113
- echoCancellation: false,
114
- noiseSuppression: false,
115
- autoGainControl: false,
116
- sampleRate: this.sampleRate,
117
- channelCount: 1
118
- }
119
- };
120
- console.log('Safari audio constraints:', constraints);
121
-
122
- try {
123
- this.stream = await navigator.mediaDevices.getUserMedia(constraints);
124
- const track = this.stream.getAudioTracks()[0];
125
- console.log('Safari audio permissions granted:', {
126
- track: track.label,
127
- settings: track.getSettings(),
128
- constraints: track.getConstraints(),
129
- enabled: track.enabled,
130
- muted: track.muted,
131
- readyState: track.readyState
132
- });
133
- } catch (err) {
134
- console.error('Failed to get Safari audio permissions:', err);
135
- throw err;
136
- }
137
-
138
- // 2. Create and initialize audio context
139
- try {
140
- this.audioContext = await createSafariAudioContext(this.sampleRate);
141
- console.log('Safari audio context ready:', {
142
- state: this.audioContext.state,
143
- currentTime: this.audioContext.currentTime
144
- });
145
- } catch (err) {
146
- console.error('Failed to initialize Safari audio context:', err);
147
- throw err;
148
- }
149
-
150
- // 3. Create and connect audio source
151
- try {
152
- console.log('Creating Safari audio source...');
153
- this.source = this.audioContext.createMediaStreamSource(this.stream);
154
- console.log('Safari audio source created successfully:', {
155
- numberOfInputs: this.source.numberOfInputs,
156
- numberOfOutputs: this.source.numberOfOutputs,
157
- channelCount: this.source.channelCount
158
- });
159
- } catch (err) {
160
- console.error('Failed to create Safari audio source:', err);
161
- throw err;
162
- }
163
-
164
- // 4. Load and create worklets
165
- try {
166
- const recordingWorkletName = "audio-recorder-worklet";
167
- const vuWorkletName = "vu-meter";
168
-
169
- console.log('Loading Safari audio worklets...');
170
- await Promise.all([
171
- this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, SafariAudioRecordingWorklet)),
172
- this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket))
173
- ]);
174
- console.log('Safari audio worklet modules loaded');
175
-
176
- // Create Recording Worklet
177
- this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
178
- this.recordingWorklet.onprocessorerror = (event) => console.error('Safari AudioWorklet processor error:', event);
179
- this.recordingWorklet.port.onmessageerror = (event) => console.error('Safari AudioWorklet message error:', event);
180
- this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
181
- const data = ev.data.data;
182
- if (data?.int16arrayBuffer) {
183
- // --- رویداد دیتا برای ارسال به سرور ---
184
- this.emit("data", arrayBufferToBase64(data.int16arrayBuffer));
185
- }
186
- };
187
- console.log('Safari Recording WorkletNode created successfully');
188
-
189
- // --- 👇 تغییر اصلی اینجاست: اضافه کردن VU Meter برای سافاری 👇 ---
190
- this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
191
- this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
192
- if (ev.data.volume) {
193
- // --- رویداد حجم صدا برای انیمیشن ---
194
- this.emit("data", '', ev.data.volume);
195
- }
196
- };
197
- console.log('Safari VU Meter WorkletNode created successfully');
198
- // --- 👆 پایان تغییرات اصلی 👆 ---
199
-
200
- } catch (err) {
201
- console.error('Failed to setup Safari audio worklets:', err);
202
- throw err;
203
- }
204
-
205
- // 5. Connect nodes
206
- try {
207
- console.log('Connecting Safari audio nodes...');
208
- this.source.connect(this.recordingWorklet);
209
- this.source.connect(this.vuWorklet); // <-- اتصال VU meter به سورس
210
- console.log('Safari audio nodes connected successfully');
211
- } catch (err) {
212
- console.error('Failed to connect Safari audio nodes:', err);
213
- throw err;
214
- }
215
-
216
- } else {
217
- // Chrome/other browsers implementation
218
- console.log('Non-Safari browser detected - using standard audio initialization');
219
-
220
- const constraints = { audio: { echoCancellation: true, noiseSuppression: true, autoGainControl: true, sampleRate: this.sampleRate } };
221
- console.log('Chrome audio constraints:', constraints);
222
-
223
- try {
224
- this.stream = await navigator.mediaDevices.getUserMedia(constraints);
225
- const track = this.stream.getAudioTracks()[0];
226
- console.log('Chrome audio permissions granted:', { track: track.label, settings: track.getSettings() });
227
- } catch (err) {
228
- console.error('Failed to get Chrome audio permissions:', err);
229
- throw err;
230
- }
231
-
232
- try {
233
- this.audioContext = await audioContext({ sampleRate: this.sampleRate });
234
- console.log('Chrome audio context created:', { state: this.audioContext.state, sampleRate: this.audioContext.sampleRate });
235
- } catch (err) {
236
- console.error('Failed to create Chrome audio context:', err);
237
- throw err;
238
- }
239
-
240
- try {
241
- this.source = this.audioContext.createMediaStreamSource(this.stream);
242
- console.log('Chrome audio source created');
243
- } catch (err) {
244
- console.error('Failed to create Chrome audio source:', err);
245
- throw err;
246
  }
 
247
 
248
- try {
249
- const recordingWorkletName = "audio-recorder-worklet";
250
- const vuWorkletName = "vu-meter";
251
-
252
- await Promise.all([
253
- this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, AudioRecordingWorklet)),
254
- this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket))
255
- ]);
256
- console.log('Chrome audio worklets loaded');
257
-
258
- this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
259
- this.recordingWorklet.onprocessorerror = (event) => console.error('Chrome AudioWorklet processor error:', event);
260
- this.recordingWorklet.port.onmessageerror = (event) => console.error('Chrome AudioWorklet message error:', event);
261
-
262
- this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
263
-
264
- // یک onmessage handler برای هر دو رویداد
265
- let lastBase64 = '';
266
- let lastVolume = 0;
267
-
268
- const handleMessage = () => {
269
- this.emit("data", lastBase64, lastVolume);
270
- }
271
-
272
- this.recordingWorklet.port.onmessage = async (ev: MessageEvent) => {
273
- const arrayBuffer = ev.data.data?.int16arrayBuffer;
274
- if (arrayBuffer) {
275
- lastBase64 = arrayBufferToBase64(arrayBuffer);
276
- handleMessage();
277
- }
278
- };
279
-
280
- this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
281
- if (ev.data.volume !== undefined) {
282
- lastVolume = ev.data.volume;
283
- handleMessage();
284
- }
285
- };
286
-
287
- console.log('Chrome AudioWorkletNodes created');
288
-
289
- this.source.connect(this.recordingWorklet);
290
- this.source.connect(this.vuWorklet);
291
- console.log('Chrome audio nodes connected');
292
-
293
- } catch (err) {
294
- console.error('Failed to setup/connect Chrome audio nodes:', err);
295
- throw err;
296
  }
297
- }
298
 
 
 
 
299
  this.recording = true;
300
- console.log('Recording started successfully');
301
  resolve();
302
- this.starting = null;
303
  } catch (error) {
304
  console.error('Failed to start recording:', error);
305
  this.stop();
306
  reject(error);
 
307
  this.starting = null;
308
  }
309
  });
@@ -311,41 +99,25 @@ export class AudioRecorder extends EventEmitter {
311
  }
312
 
313
  stop() {
314
- console.log('Stopping audio recorder...');
315
  const handleStop = () => {
316
- try {
317
  this.recording = false;
318
- if (this.source) {
319
- console.log('Disconnecting audio source...');
320
- this.source.disconnect();
321
- }
322
- if (this.stream) {
323
- console.log('Stopping media stream tracks...');
324
- this.stream.getTracks().forEach(track => {
325
- track.stop();
326
- console.log('Stopped track:', track.label);
327
- });
328
- }
329
- if (this.audioContext && (this.audioContext.state === 'running' || this.isSafari)) {
330
- console.log('Closing audio context...');
331
- this.audioContext.close();
332
  }
333
  this.stream = undefined;
 
 
334
  this.recordingWorklet = undefined;
335
  this.vuWorklet = undefined;
336
- this.source = undefined;
337
- this.audioContext = undefined;
338
  this.emit("stop");
339
- console.log('Audio recorder stopped successfully');
340
- } catch (err) {
341
- console.error('Error while stopping audio recorder:', err);
342
- }
343
  };
344
  if (this.starting) {
345
- console.log('Stop called while starting - waiting for start to complete...');
346
- this.starting.then(handleStop);
347
- return;
348
  }
349
- handleStop();
350
  }
351
  }
 
4
  import AudioRecordingWorklet from "./worklets/audio-processing";
5
  import SafariAudioRecordingWorklet from "./worklets/safari-audio-processing";
6
  import VolMeterWorket from "./worklets/vol-meter";
 
7
  import { createWorketFromSrc } from "./audioworklet-registry";
8
  import EventEmitter from "eventemitter3";
9
 
 
17
  return window.btoa(binary);
18
  }
19
 
 
20
  async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> {
 
 
 
21
  const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext;
22
+ const ctx = new AudioContextClass({ sampleRate, latencyHint: 'interactive' });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  if (ctx.state === 'suspended') {
24
+ await ctx.resume();
 
 
 
 
 
 
 
25
  }
 
26
  return ctx;
27
  }
28
 
29
  export class AudioRecorder extends EventEmitter {
30
+ stream?: MediaStream;
31
+ audioContext?: AudioContext;
32
+ source?: MediaStreamAudioSourceNode;
33
  recording: boolean = false;
34
+ recordingWorklet?: AudioWorkletNode;
35
+ vuWorklet?: AudioWorkletNode;
 
36
  private starting: Promise<void> | null = null;
 
37
  isSafari: boolean;
 
38
 
39
  constructor(public sampleRate = 16000) {
40
  super();
41
  this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
 
 
 
 
 
 
 
 
 
42
  }
43
 
44
  async start() {
45
+ if (this.recording || this.starting) return;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  this.starting = new Promise(async (resolve, reject) => {
47
  try {
48
+ const constraints = {
49
+ audio: this.isSafari
50
+ ? { sampleRate: this.sampleRate, channelCount: 1 }
51
+ : { echoCancellation: true, noiseSuppression: true, autoGainControl: true, sampleRate: this.sampleRate }
52
+ };
53
+ this.stream = await navigator.mediaDevices.getUserMedia(constraints);
54
+
55
+ this.audioContext = this.isSafari
56
+ ? await createSafariAudioContext(this.sampleRate)
57
+ : await audioContext({ sampleRate: this.sampleRate });
58
+
59
+ this.source = this.audioContext.createMediaStreamSource(this.stream);
60
+
61
+ const recordingWorkletSrc = this.isSafari ? SafariAudioRecordingWorklet : AudioRecordingWorklet;
62
+ await this.audioContext.audioWorklet.addModule(createWorketFromSrc("recorder", recordingWorkletSrc));
63
+ await this.audioContext.audioWorklet.addModule(createWorketFromSrc("vumeter", VolMeterWorket));
64
+
65
+ this.recordingWorklet = new AudioWorkletNode(this.audioContext, "recorder", { processorOptions: { sampleRate: this.sampleRate } });
66
+ this.vuWorklet = new AudioWorkletNode(this.audioContext, "vumeter");
67
+
68
+ let lastVolume = 0;
69
+
70
+ this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
71
+ const arrayBuffer = ev.data.data?.int16arrayBuffer;
72
+ if (arrayBuffer) {
73
+ const base64 = arrayBufferToBase64(arrayBuffer);
74
+ // ارسال همزمان دیتا و آخرین حجم صدای دریافتی
75
+ this.emit("data", base64, lastVolume);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  }
77
+ };
78
 
79
+ this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
80
+ if (typeof ev.data.volume === 'number') {
81
+ lastVolume = ev.data.volume;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
  }
83
+ };
84
 
85
+ this.source.connect(this.recordingWorklet);
86
+ this.source.connect(this.vuWorklet);
87
+
88
  this.recording = true;
 
89
  resolve();
 
90
  } catch (error) {
91
  console.error('Failed to start recording:', error);
92
  this.stop();
93
  reject(error);
94
+ } finally {
95
  this.starting = null;
96
  }
97
  });
 
99
  }
100
 
101
  stop() {
102
+ if (!this.recording && !this.starting) return;
103
  const handleStop = () => {
 
104
  this.recording = false;
105
+ this.source?.disconnect();
106
+ this.stream?.getTracks().forEach(track => track.stop());
107
+ if (this.audioContext?.state === 'running') {
108
+ this.audioContext.close();
 
 
 
 
 
 
 
 
 
 
109
  }
110
  this.stream = undefined;
111
+ this.audioContext = undefined;
112
+ this.source = undefined;
113
  this.recordingWorklet = undefined;
114
  this.vuWorklet = undefined;
 
 
115
  this.emit("stop");
 
 
 
 
116
  };
117
  if (this.starting) {
118
+ this.starting.then(handleStop).catch(handleStop);
119
+ } else {
120
+ handleStop();
121
  }
 
122
  }
123
  }