Ezmary commited on
Commit
cdd3d29
·
verified ·
1 Parent(s): 59d8c15

Update src/lib/audio-recorder.ts

Browse files
Files changed (1) hide show
  1. src/lib/audio-recorder.ts +211 -97
src/lib/audio-recorder.ts CHANGED
@@ -18,12 +18,27 @@ function arrayBufferToBase64(buffer: ArrayBuffer) {
18
  return window.btoa(binary);
19
  }
20
 
 
21
  async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> {
22
  console.log('Creating Safari audio context with options:', { sampleRate });
 
 
23
  const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext;
24
  console.log('Using AudioContext class:', AudioContextClass.name);
25
- const ctx = new AudioContextClass({ sampleRate, latencyHint: 'interactive' });
26
- console.log('Safari AudioContext initial state:', { state: ctx.state, sampleRate: ctx.sampleRate, baseLatency: ctx.baseLatency, destination: ctx.destination });
 
 
 
 
 
 
 
 
 
 
 
 
27
  if (ctx.state === 'suspended') {
28
  console.log('Attempting to resume suspended Safari audio context...');
29
  try {
@@ -34,6 +49,7 @@ async function createSafariAudioContext(sampleRate: number): Promise<AudioContex
34
  throw err;
35
  }
36
  }
 
37
  return ctx;
38
  }
39
 
@@ -54,133 +70,236 @@ export class AudioRecorder extends EventEmitter {
54
  super();
55
  this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
56
  this.isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !(window as any).MSStream;
57
- console.log('AudioRecorder initialized:', { isSafari: this.isSafari, isIOS: this.isIOS, sampleRate: this.sampleRate, userAgent: navigator.userAgent, webAudioSupport: !!(window.AudioContext || (window as any).webkitAudioContext), mediaDevicesSupport: !!navigator.mediaDevices });
 
 
 
 
 
 
 
58
  }
59
 
60
  async start() {
61
  if (!navigator.mediaDevices?.getUserMedia) {
62
- console.error('MediaDevices API not available:', { mediaDevices: !!navigator.mediaDevices, getUserMedia: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia) });
 
 
 
63
  throw new Error("Could not request user media");
64
  }
65
 
66
- console.log('Starting AudioRecorder with full environment info:', { userAgent: navigator.userAgent, platform: navigator.platform, vendor: navigator.vendor, audioWorkletSupport: !!(window.AudioWorklet), sampleRate: this.sampleRate, existingAudioContext: !!this.audioContext, existingStream: !!this.stream, isSafari: this.isSafari });
 
 
 
 
 
 
 
 
 
67
 
68
  this.starting = new Promise(async (resolve, reject) => {
69
  try {
70
- const constraints = {
 
 
 
 
 
 
71
  audio: {
72
- echoCancellation: !this.isSafari, // Safari handles this differently
73
- noiseSuppression: !this.isSafari,
74
- autoGainControl: !this.isSafari,
75
  sampleRate: this.sampleRate,
76
  channelCount: 1
77
  }
78
- };
79
- console.log('Using audio constraints:', constraints);
80
-
81
- // 1. Get User Media
82
- try {
83
  this.stream = await navigator.mediaDevices.getUserMedia(constraints);
84
  const track = this.stream.getAudioTracks()[0];
85
- console.log('Audio permissions granted:', { track: track.label, settings: track.getSettings() });
86
- } catch (err) {
87
- console.error('Failed to get audio permissions:', err);
 
 
 
 
 
 
 
88
  throw err;
89
- }
90
 
91
- // 2. Create Audio Context
92
- try {
93
- this.audioContext = this.isSafari
94
- ? await createSafariAudioContext(this.sampleRate)
95
- : await audioContext({ sampleRate: this.sampleRate });
96
- console.log('Audio context ready:', { state: this.audioContext.state, sampleRate: this.audioContext.sampleRate });
97
- } catch (err) {
98
- console.error('Failed to initialize audio context:', err);
 
99
  throw err;
100
- }
101
 
102
- // 3. Create Audio Source
103
- try {
 
104
  this.source = this.audioContext.createMediaStreamSource(this.stream);
105
- console.log('Audio source created.');
106
- } catch (err) {
107
- console.error('Failed to create audio source:', err);
 
 
 
 
108
  throw err;
109
- }
110
 
111
- // 4. Load and Create Worklets (Recording and VU Meter for ALL browsers)
112
- try {
113
  const recordingWorkletName = "audio-recorder-worklet";
114
- const recordingWorkletSrc = createWorketFromSrc(recordingWorkletName, this.isSafari ? SafariAudioRecordingWorklet : AudioRecordingWorklet);
115
- await this.audioContext.audioWorklet.addModule(recordingWorkletSrc);
116
- console.log('Recording worklet module loaded.');
 
 
 
 
 
117
 
 
118
  this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
119
-
 
120
  this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
121
- const arrayBuffer = ev.data.data?.int16arrayBuffer;
122
- if (arrayBuffer) {
123
- // *** تغییر اصلی: حالا data و volume را با هم می‌فرستیم ***
124
- // توجه: در worklet ها محاسبه حجم صدا را اضافه خواهیم کرد
125
- const arrayBufferString = arrayBufferToBase64(arrayBuffer);
126
- // ما به جای رویداد مجزا، حجم را همراه با داده‌ها می‌فرستیم
127
- // این کار را در ControlTray.tsx مدیریت خواهیم کرد
128
- // this.emit("data", arrayBufferString);
129
- } else {
130
- console.warn('Invalid audio chunk received:', ev.data);
131
  }
132
  };
133
- this.recordingWorklet.onprocessorerror = (event) => console.error('AudioWorklet processor error:', event);
134
- console.log('Recording worklet node created.');
135
-
136
- // --- 👇 بخش حیاتی: افزودن VU Meter برای همه مرورگرها 👇 ---
137
- const vuWorkletName = "vu-meter";
138
- await this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket));
139
- this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
140
 
141
- // *** تغییر اصلی: رویداد data را با حجم صدا ترکیب می‌کنیم ***
 
142
  this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
143
- if (ev.data.volume && this.recordingWorklet) {
144
- // حالا هم داده خام صدا و هم حجم صدا را داریم
145
- // یک پیام به worklet ضبط می‌فرستیم تا داده‌های بافر شده را بگیریم
146
- this.recordingWorklet.port.postMessage({ getBuffer: true });
147
  }
148
  };
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
 
150
- // دریافت پاسخ از worklet ضبط و ارسال نهایی
151
- this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
152
- const arrayBuffer = ev.data.buffer;
153
- const volume = ev.data.volume; // فرض می‌کنیم worklet حجم را هم برمی‌گرداند
154
-
155
- if (arrayBuffer && typeof volume !== 'undefined') {
156
- const arrayBufferString = arrayBufferToBase64(arrayBuffer);
157
- // *** حالا هر دو مقدار را با هم emit می‌کنیم ***
158
- this.emit("data", arrayBufferString, volume);
159
- }
160
- };
161
 
162
- console.log('VU meter worklet created and configured.');
 
 
163
 
164
- } catch (err) {
165
- console.error('Failed to setup audio worklets:', err);
166
- throw err;
167
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
 
169
- // 5. Connect Nodes
170
- try {
171
  this.source.connect(this.recordingWorklet);
172
- this.source.connect(this.vuWorklet); // اتصال VU متر به منبع
173
- console.log('Audio nodes connected successfully.');
174
- } catch (err) {
175
- console.error('Failed to connect audio nodes:', err);
 
176
  throw err;
 
177
  }
178
-
179
  this.recording = true;
180
  console.log('Recording started successfully');
181
  resolve();
182
  this.starting = null;
183
-
184
  } catch (error) {
185
  console.error('Failed to start recording:', error);
186
  this.stop();
@@ -195,10 +314,10 @@ export class AudioRecorder extends EventEmitter {
195
  console.log('Stopping audio recorder...');
196
  const handleStop = () => {
197
  try {
 
198
  if (this.source) {
199
  console.log('Disconnecting audio source...');
200
  this.source.disconnect();
201
- this.source = undefined;
202
  }
203
  if (this.stream) {
204
  console.log('Stopping media stream tracks...');
@@ -206,32 +325,27 @@ export class AudioRecorder extends EventEmitter {
206
  track.stop();
207
  console.log('Stopped track:', track.label);
208
  });
209
- this.stream = undefined;
210
  }
211
- // بستن audioContext فقط در سافاری و در صورت نیاز
212
- if (this.audioContext && this.isSafari && this.audioContext.state !== 'closed') {
213
- console.log('Closing Safari audio context...');
214
  this.audioContext.close();
215
- this.audioContext = undefined;
216
  }
217
-
218
  this.recordingWorklet = undefined;
219
  this.vuWorklet = undefined;
220
- this.recording = false;
 
221
  this.emit("stop");
222
  console.log('Audio recorder stopped successfully');
223
  } catch (err) {
224
  console.error('Error while stopping audio recorder:', err);
225
  }
226
  };
227
-
228
  if (this.starting) {
229
  console.log('Stop called while starting - waiting for start to complete...');
230
- this.starting.then(handleStop).catch(handleStop);
231
- this.starting = null;
232
  return;
233
  }
234
-
235
  handleStop();
236
  }
237
  }
 
18
  return window.btoa(binary);
19
  }
20
 
21
+ // Add Safari-specific audio context creation
22
  async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> {
23
  console.log('Creating Safari audio context with options:', { sampleRate });
24
+
25
+ // Safari requires webkit prefix
26
  const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext;
27
  console.log('Using AudioContext class:', AudioContextClass.name);
28
+
29
+ const ctx = new AudioContextClass({
30
+ sampleRate,
31
+ latencyHint: 'interactive'
32
+ });
33
+
34
+ console.log('Safari AudioContext initial state:', {
35
+ state: ctx.state,
36
+ sampleRate: ctx.sampleRate,
37
+ baseLatency: ctx.baseLatency,
38
+ destination: ctx.destination,
39
+ });
40
+
41
+ // Safari requires user interaction to start audio context
42
  if (ctx.state === 'suspended') {
43
  console.log('Attempting to resume suspended Safari audio context...');
44
  try {
 
49
  throw err;
50
  }
51
  }
52
+
53
  return ctx;
54
  }
55
 
 
70
  super();
71
  this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
72
  this.isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !(window as any).MSStream;
73
+ console.log('AudioRecorder initialized:', {
74
+ isSafari: this.isSafari,
75
+ isIOS: this.isIOS,
76
+ sampleRate: this.sampleRate,
77
+ userAgent: navigator.userAgent,
78
+ webAudioSupport: !!(window.AudioContext || (window as any).webkitAudioContext),
79
+ mediaDevicesSupport: !!navigator.mediaDevices
80
+ });
81
  }
82
 
83
  async start() {
84
  if (!navigator.mediaDevices?.getUserMedia) {
85
+ console.error('MediaDevices API not available:', {
86
+ mediaDevices: !!navigator.mediaDevices,
87
+ getUserMedia: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
88
+ });
89
  throw new Error("Could not request user media");
90
  }
91
 
92
+ console.log('Starting AudioRecorder with full environment info:', {
93
+ userAgent: navigator.userAgent,
94
+ platform: navigator.platform,
95
+ vendor: navigator.vendor,
96
+ audioWorkletSupport: !!(window.AudioWorklet),
97
+ sampleRate: this.sampleRate,
98
+ existingAudioContext: !!this.audioContext,
99
+ existingStream: !!this.stream,
100
+ isSafari: this.isSafari
101
+ });
102
 
103
  this.starting = new Promise(async (resolve, reject) => {
104
  try {
105
+ if (this.isSafari) {
106
+ // Safari implementation
107
+ console.log('Safari detected - using Safari-specific audio initialization');
108
+
109
+ // 1. First get audio permissions
110
+ console.log('Requesting audio permissions first for Safari...');
111
+ const constraints = {
112
  audio: {
113
+ echoCancellation: false,
114
+ noiseSuppression: false,
115
+ autoGainControl: false,
116
  sampleRate: this.sampleRate,
117
  channelCount: 1
118
  }
119
+ };
120
+ console.log('Safari audio constraints:', constraints);
121
+
122
+ try {
 
123
  this.stream = await navigator.mediaDevices.getUserMedia(constraints);
124
  const track = this.stream.getAudioTracks()[0];
125
+ console.log('Safari audio permissions granted:', {
126
+ track: track.label,
127
+ settings: track.getSettings(),
128
+ constraints: track.getConstraints(),
129
+ enabled: track.enabled,
130
+ muted: track.muted,
131
+ readyState: track.readyState
132
+ });
133
+ } catch (err) {
134
+ console.error('Failed to get Safari audio permissions:', err);
135
  throw err;
136
+ }
137
 
138
+ // 2. Create and initialize audio context
139
+ try {
140
+ this.audioContext = await createSafariAudioContext(this.sampleRate);
141
+ console.log('Safari audio context ready:', {
142
+ state: this.audioContext.state,
143
+ currentTime: this.audioContext.currentTime
144
+ });
145
+ } catch (err) {
146
+ console.error('Failed to initialize Safari audio context:', err);
147
  throw err;
148
+ }
149
 
150
+ // 3. Create and connect audio source
151
+ try {
152
+ console.log('Creating Safari audio source...');
153
  this.source = this.audioContext.createMediaStreamSource(this.stream);
154
+ console.log('Safari audio source created successfully:', {
155
+ numberOfInputs: this.source.numberOfInputs,
156
+ numberOfOutputs: this.source.numberOfOutputs,
157
+ channelCount: this.source.channelCount
158
+ });
159
+ } catch (err) {
160
+ console.error('Failed to create Safari audio source:', err);
161
  throw err;
162
+ }
163
 
164
+ // 4. Load and create worklets
165
+ try {
166
  const recordingWorkletName = "audio-recorder-worklet";
167
+ const vuWorkletName = "vu-meter";
168
+
169
+ console.log('Loading Safari audio worklets...');
170
+ await Promise.all([
171
+ this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, SafariAudioRecordingWorklet)),
172
+ this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket))
173
+ ]);
174
+ console.log('Safari audio worklet modules loaded');
175
 
176
+ // Create Recording Worklet
177
  this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
178
+ this.recordingWorklet.onprocessorerror = (event) => console.error('Safari AudioWorklet processor error:', event);
179
+ this.recordingWorklet.port.onmessageerror = (event) => console.error('Safari AudioWorklet message error:', event);
180
  this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
181
+ const data = ev.data.data;
182
+ if (data?.int16arrayBuffer) {
183
+ // --- رویداد دیتا برای ارسال به سرور ---
184
+ this.emit("data", arrayBufferToBase64(data.int16arrayBuffer));
 
 
 
 
 
 
185
  }
186
  };
187
+ console.log('Safari Recording WorkletNode created successfully');
 
 
 
 
 
 
188
 
189
+ // --- 👇 تغییر اصلی اینجاست: اضافه کردن VU Meter برای سافاری 👇 ---
190
+ this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
191
  this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
192
+ if (ev.data.volume) {
193
+ // --- رویداد حجم صدا برای انیمیشن ---
194
+ this.emit("data", '', ev.data.volume);
 
195
  }
196
  };
197
+ console.log('Safari VU Meter WorkletNode created successfully');
198
+ // --- 👆 پایان تغییرات اصلی 👆 ---
199
+
200
+ } catch (err) {
201
+ console.error('Failed to setup Safari audio worklets:', err);
202
+ throw err;
203
+ }
204
+
205
+ // 5. Connect nodes
206
+ try {
207
+ console.log('Connecting Safari audio nodes...');
208
+ this.source.connect(this.recordingWorklet);
209
+ this.source.connect(this.vuWorklet); // <-- اتصال VU meter به سورس
210
+ console.log('Safari audio nodes connected successfully');
211
+ } catch (err) {
212
+ console.error('Failed to connect Safari audio nodes:', err);
213
+ throw err;
214
+ }
215
+
216
+ } else {
217
+ // Chrome/other browsers implementation
218
+ console.log('Non-Safari browser detected - using standard audio initialization');
219
+
220
+ const constraints = { audio: { echoCancellation: true, noiseSuppression: true, autoGainControl: true, sampleRate: this.sampleRate } };
221
+ console.log('Chrome audio constraints:', constraints);
222
+
223
+ try {
224
+ this.stream = await navigator.mediaDevices.getUserMedia(constraints);
225
+ const track = this.stream.getAudioTracks()[0];
226
+ console.log('Chrome audio permissions granted:', { track: track.label, settings: track.getSettings() });
227
+ } catch (err) {
228
+ console.error('Failed to get Chrome audio permissions:', err);
229
+ throw err;
230
+ }
231
+
232
+ try {
233
+ this.audioContext = await audioContext({ sampleRate: this.sampleRate });
234
+ console.log('Chrome audio context created:', { state: this.audioContext.state, sampleRate: this.audioContext.sampleRate });
235
+ } catch (err) {
236
+ console.error('Failed to create Chrome audio context:', err);
237
+ throw err;
238
+ }
239
+
240
+ try {
241
+ this.source = this.audioContext.createMediaStreamSource(this.stream);
242
+ console.log('Chrome audio source created');
243
+ } catch (err) {
244
+ console.error('Failed to create Chrome audio source:', err);
245
+ throw err;
246
+ }
247
+
248
+ try {
249
+ const recordingWorkletName = "audio-recorder-worklet";
250
+ const vuWorkletName = "vu-meter";
251
+
252
+ await Promise.all([
253
+ this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, AudioRecordingWorklet)),
254
+ this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket))
255
+ ]);
256
+ console.log('Chrome audio worklets loaded');
257
+
258
+ this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
259
+ this.recordingWorklet.onprocessorerror = (event) => console.error('Chrome AudioWorklet processor error:', event);
260
+ this.recordingWorklet.port.onmessageerror = (event) => console.error('Chrome AudioWorklet message error:', event);
261
 
262
+ this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
 
 
 
 
 
 
 
 
 
 
263
 
264
+ // یک onmessage handler برای هر دو رویداد
265
+ let lastBase64 = '';
266
+ let lastVolume = 0;
267
 
268
+ const handleMessage = () => {
269
+ this.emit("data", lastBase64, lastVolume);
270
+ }
271
+
272
+ this.recordingWorklet.port.onmessage = async (ev: MessageEvent) => {
273
+ const arrayBuffer = ev.data.data?.int16arrayBuffer;
274
+ if (arrayBuffer) {
275
+ lastBase64 = arrayBufferToBase64(arrayBuffer);
276
+ handleMessage();
277
+ }
278
+ };
279
+
280
+ this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
281
+ if (ev.data.volume !== undefined) {
282
+ lastVolume = ev.data.volume;
283
+ handleMessage();
284
+ }
285
+ };
286
+
287
+ console.log('Chrome AudioWorkletNodes created');
288
 
 
 
289
  this.source.connect(this.recordingWorklet);
290
+ this.source.connect(this.vuWorklet);
291
+ console.log('Chrome audio nodes connected');
292
+
293
+ } catch (err) {
294
+ console.error('Failed to setup/connect Chrome audio nodes:', err);
295
  throw err;
296
+ }
297
  }
298
+
299
  this.recording = true;
300
  console.log('Recording started successfully');
301
  resolve();
302
  this.starting = null;
 
303
  } catch (error) {
304
  console.error('Failed to start recording:', error);
305
  this.stop();
 
314
  console.log('Stopping audio recorder...');
315
  const handleStop = () => {
316
  try {
317
+ this.recording = false;
318
  if (this.source) {
319
  console.log('Disconnecting audio source...');
320
  this.source.disconnect();
 
321
  }
322
  if (this.stream) {
323
  console.log('Stopping media stream tracks...');
 
325
  track.stop();
326
  console.log('Stopped track:', track.label);
327
  });
 
328
  }
329
+ if (this.audioContext && (this.audioContext.state === 'running' || this.isSafari)) {
330
+ console.log('Closing audio context...');
 
331
  this.audioContext.close();
 
332
  }
333
+ this.stream = undefined;
334
  this.recordingWorklet = undefined;
335
  this.vuWorklet = undefined;
336
+ this.source = undefined;
337
+ this.audioContext = undefined;
338
  this.emit("stop");
339
  console.log('Audio recorder stopped successfully');
340
  } catch (err) {
341
  console.error('Error while stopping audio recorder:', err);
342
  }
343
  };
 
344
  if (this.starting) {
345
  console.log('Stop called while starting - waiting for start to complete...');
346
+ this.starting.then(handleStop);
 
347
  return;
348
  }
 
349
  handleStop();
350
  }
351
  }