File size: 13,718 Bytes
36348bf
7f2a14a
0362c08
59d8c15
 
0362c08
 
59d8c15
 
7f2a14a
59d8c15
 
 
 
 
 
 
 
 
 
cdd3d29
59d8c15
 
cdd3d29
 
59d8c15
 
cdd3d29
 
 
 
 
 
 
 
 
 
 
 
 
 
59d8c15
 
0362c08
59d8c15
 
0362c08
59d8c15
 
36348bf
 
cdd3d29
59d8c15
 
7f2a14a
59d8c15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cdd3d29
 
 
 
 
 
 
 
7f2a14a
 
59d8c15
 
cdd3d29
 
 
 
59d8c15
7f2a14a
0362c08
cdd3d29
 
 
 
 
 
 
 
 
 
59d8c15
 
 
cdd3d29
 
 
 
 
 
 
59d8c15
cdd3d29
 
 
59d8c15
 
 
cdd3d29
 
 
 
59d8c15
 
cdd3d29
 
 
 
 
 
 
 
 
 
59d8c15
cdd3d29
59d8c15
cdd3d29
 
 
 
 
 
 
 
 
59d8c15
cdd3d29
59d8c15
cdd3d29
 
 
59d8c15
cdd3d29
 
 
 
 
 
 
59d8c15
cdd3d29
59d8c15
cdd3d29
 
59d8c15
cdd3d29
 
 
 
 
 
 
 
59d8c15
cdd3d29
59d8c15
cdd3d29
 
59d8c15
cdd3d29
 
 
 
59d8c15
 
cdd3d29
59d8c15
cdd3d29
 
59d8c15
cdd3d29
 
 
59d8c15
 
cdd3d29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59d8c15
cdd3d29
59d8c15
cdd3d29
 
 
59d8c15
cdd3d29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59d8c15
 
cdd3d29
 
 
 
 
59d8c15
cdd3d29
59d8c15
cdd3d29
59d8c15
 
 
 
 
 
 
 
 
 
 
 
36348bf
 
59d8c15
 
 
 
cdd3d29
59d8c15
 
 
 
 
 
 
 
 
 
 
cdd3d29
 
59d8c15
 
cdd3d29
59d8c15
 
cdd3d29
 
59d8c15
 
 
 
 
 
 
 
cdd3d29
59d8c15
36348bf
59d8c15
7f2a14a
36348bf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
// src/lib/audio-recorder.ts

import { audioContext } from "./utils";
import AudioRecordingWorklet from "./worklets/audio-processing";
import SafariAudioRecordingWorklet from "./worklets/safari-audio-processing";
import VolMeterWorket from "./worklets/vol-meter";

import { createWorketFromSrc } from "./audioworklet-registry";
import EventEmitter from "eventemitter3";

function arrayBufferToBase64(buffer: ArrayBuffer) {
  var binary = "";
  var bytes = new Uint8Array(buffer);
  var len = bytes.byteLength;
  for (var i = 0; i < len; i++) {
    binary += String.fromCharCode(bytes[i]);
  }
  return window.btoa(binary);
}

// Add Safari-specific audio context creation
async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> {
  console.log('Creating Safari audio context with options:', { sampleRate });
  
  // Safari requires webkit prefix
  const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext;
  console.log('Using AudioContext class:', AudioContextClass.name);
  
  const ctx = new AudioContextClass({
    sampleRate,
    latencyHint: 'interactive'
  });
  
  console.log('Safari AudioContext initial state:', {
    state: ctx.state,
    sampleRate: ctx.sampleRate,
    baseLatency: ctx.baseLatency,
    destination: ctx.destination,
  });

  // Safari requires user interaction to start audio context
  if (ctx.state === 'suspended') {
    console.log('Attempting to resume suspended Safari audio context...');
    try {
      await ctx.resume();
      console.log('Successfully resumed Safari audio context:', ctx.state);
    } catch (err) {
      console.error('Failed to resume Safari audio context:', err);
      throw err;
    }
  }

  return ctx;
}

export class AudioRecorder extends EventEmitter {
  stream: MediaStream | undefined;
  audioContext: AudioContext | undefined;
  source: MediaStreamAudioSourceNode | undefined;
  recording: boolean = false;
  recordingWorklet: AudioWorkletNode | undefined;
  vuWorklet: AudioWorkletNode | undefined;

  private starting: Promise<void> | null = null;

  isSafari: boolean;
  isIOS: boolean;

  constructor(public sampleRate = 16000) {
    super();
    this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
    this.isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !(window as any).MSStream;
    console.log('AudioRecorder initialized:', { 
      isSafari: this.isSafari, 
      isIOS: this.isIOS,
      sampleRate: this.sampleRate,
      userAgent: navigator.userAgent,
      webAudioSupport: !!(window.AudioContext || (window as any).webkitAudioContext),
      mediaDevicesSupport: !!navigator.mediaDevices
    });
  }

  async start() {
    if (!navigator.mediaDevices?.getUserMedia) {
      console.error('MediaDevices API not available:', { 
        mediaDevices: !!navigator.mediaDevices,
        getUserMedia: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
      });
      throw new Error("Could not request user media");
    }

    console.log('Starting AudioRecorder with full environment info:', {
      userAgent: navigator.userAgent,
      platform: navigator.platform,
      vendor: navigator.vendor,
      audioWorkletSupport: !!(window.AudioWorklet),
      sampleRate: this.sampleRate,
      existingAudioContext: !!this.audioContext,
      existingStream: !!this.stream,
      isSafari: this.isSafari
    });

    this.starting = new Promise(async (resolve, reject) => {
      try {
        if (this.isSafari) {
          // Safari implementation
          console.log('Safari detected - using Safari-specific audio initialization');
          
          // 1. First get audio permissions
          console.log('Requesting audio permissions first for Safari...');
          const constraints = {
            audio: {
              echoCancellation: false,
              noiseSuppression: false,
              autoGainControl: false,
              sampleRate: this.sampleRate,
              channelCount: 1
            }
          };
          console.log('Safari audio constraints:', constraints);
          
          try {
            this.stream = await navigator.mediaDevices.getUserMedia(constraints);
            const track = this.stream.getAudioTracks()[0];
            console.log('Safari audio permissions granted:', {
              track: track.label,
              settings: track.getSettings(),
              constraints: track.getConstraints(),
              enabled: track.enabled,
              muted: track.muted,
              readyState: track.readyState
            });
          } catch (err) {
            console.error('Failed to get Safari audio permissions:', err);
            throw err;
          }

          // 2. Create and initialize audio context
          try {
            this.audioContext = await createSafariAudioContext(this.sampleRate);
            console.log('Safari audio context ready:', {
              state: this.audioContext.state,
              currentTime: this.audioContext.currentTime
            });
          } catch (err) {
            console.error('Failed to initialize Safari audio context:', err);
            throw err;
          }

          // 3. Create and connect audio source
          try {
            console.log('Creating Safari audio source...');
            this.source = this.audioContext.createMediaStreamSource(this.stream);
            console.log('Safari audio source created successfully:', {
              numberOfInputs: this.source.numberOfInputs,
              numberOfOutputs: this.source.numberOfOutputs,
              channelCount: this.source.channelCount
            });
          } catch (err) {
            console.error('Failed to create Safari audio source:', err);
            throw err;
          }

          // 4. Load and create worklets
          try {
            const recordingWorkletName = "audio-recorder-worklet";
            const vuWorkletName = "vu-meter";

            console.log('Loading Safari audio worklets...');
            await Promise.all([
                this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, SafariAudioRecordingWorklet)),
                this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket))
            ]);
            console.log('Safari audio worklet modules loaded');

            // Create Recording Worklet
            this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
            this.recordingWorklet.onprocessorerror = (event) => console.error('Safari AudioWorklet processor error:', event);
            this.recordingWorklet.port.onmessageerror = (event) => console.error('Safari AudioWorklet message error:', event);
            this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
              const data = ev.data.data;
              if (data?.int16arrayBuffer) {
                // --- رویداد دیتا برای ارسال به سرور ---
                this.emit("data", arrayBufferToBase64(data.int16arrayBuffer));
              }
            };
            console.log('Safari Recording WorkletNode created successfully');

            // --- 👇 تغییر اصلی اینجاست: اضافه کردن VU Meter برای سافاری 👇 ---
            this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
            this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
                if (ev.data.volume) {
                    // --- رویداد حجم صدا برای انیمیشن ---
                    this.emit("data", '', ev.data.volume); 
                }
            };
            console.log('Safari VU Meter WorkletNode created successfully');
            // --- 👆 پایان تغییرات اصلی 👆 ---

          } catch (err) {
            console.error('Failed to setup Safari audio worklets:', err);
            throw err;
          }

          // 5. Connect nodes
          try {
            console.log('Connecting Safari audio nodes...');
            this.source.connect(this.recordingWorklet);
            this.source.connect(this.vuWorklet); // <-- اتصال VU meter به سورس
            console.log('Safari audio nodes connected successfully');
          } catch (err) {
            console.error('Failed to connect Safari audio nodes:', err);
            throw err;
          }

        } else {
          // Chrome/other browsers implementation
          console.log('Non-Safari browser detected - using standard audio initialization');
          
          const constraints = { audio: { echoCancellation: true, noiseSuppression: true, autoGainControl: true, sampleRate: this.sampleRate } };
          console.log('Chrome audio constraints:', constraints);
          
          try {
            this.stream = await navigator.mediaDevices.getUserMedia(constraints);
            const track = this.stream.getAudioTracks()[0];
            console.log('Chrome audio permissions granted:', { track: track.label, settings: track.getSettings() });
          } catch (err) {
            console.error('Failed to get Chrome audio permissions:', err);
            throw err;
          }
          
          try {
            this.audioContext = await audioContext({ sampleRate: this.sampleRate });
            console.log('Chrome audio context created:', { state: this.audioContext.state, sampleRate: this.audioContext.sampleRate });
          } catch (err) {
            console.error('Failed to create Chrome audio context:', err);
            throw err;
          }

          try {
            this.source = this.audioContext.createMediaStreamSource(this.stream);
            console.log('Chrome audio source created');
          } catch (err) {
            console.error('Failed to create Chrome audio source:', err);
            throw err;
          }

          try {
            const recordingWorkletName = "audio-recorder-worklet";
            const vuWorkletName = "vu-meter";

            await Promise.all([
                this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, AudioRecordingWorklet)),
                this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket))
            ]);
            console.log('Chrome audio worklets loaded');

            this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
            this.recordingWorklet.onprocessorerror = (event) => console.error('Chrome AudioWorklet processor error:', event);
            this.recordingWorklet.port.onmessageerror = (event) => console.error('Chrome AudioWorklet message error:', event);
            
            this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
            
            // یک onmessage handler برای هر دو رویداد
            let lastBase64 = '';
            let lastVolume = 0;

            const handleMessage = () => {
                this.emit("data", lastBase64, lastVolume);
            }

            this.recordingWorklet.port.onmessage = async (ev: MessageEvent) => {
              const arrayBuffer = ev.data.data?.int16arrayBuffer;
              if (arrayBuffer) {
                lastBase64 = arrayBufferToBase64(arrayBuffer);
                handleMessage();
              }
            };
            
            this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
              if (ev.data.volume !== undefined) {
                lastVolume = ev.data.volume;
                handleMessage();
              }
            };
            
            console.log('Chrome AudioWorkletNodes created');

            this.source.connect(this.recordingWorklet);
            this.source.connect(this.vuWorklet);
            console.log('Chrome audio nodes connected');

          } catch (err) {
            console.error('Failed to setup/connect Chrome audio nodes:', err);
            throw err;
          }
        }

        this.recording = true;
        console.log('Recording started successfully');
        resolve();
        this.starting = null;
      } catch (error) {
        console.error('Failed to start recording:', error);
        this.stop();
        reject(error);
        this.starting = null;
      }
    });
    return this.starting;
  }

  stop() {
    console.log('Stopping audio recorder...');
    const handleStop = () => {
      try {
        this.recording = false;
        if (this.source) {
          console.log('Disconnecting audio source...');
          this.source.disconnect();
        }
        if (this.stream) {
          console.log('Stopping media stream tracks...');
          this.stream.getTracks().forEach(track => {
            track.stop();
            console.log('Stopped track:', track.label);
          });
        }
        if (this.audioContext && (this.audioContext.state === 'running' || this.isSafari)) {
          console.log('Closing audio context...');
          this.audioContext.close();
        }
        this.stream = undefined;
        this.recordingWorklet = undefined;
        this.vuWorklet = undefined;
        this.source = undefined;
        this.audioContext = undefined;
        this.emit("stop");
        console.log('Audio recorder stopped successfully');
      } catch (err) {
        console.error('Error while stopping audio recorder:', err);
      }
    };
    if (this.starting) {
      console.log('Stop called while starting - waiting for start to complete...');
      this.starting.then(handleStop);
      return;
    }
    handleStop();
  }
}