Ezmary commited on
Commit
59d8c15
·
verified ·
1 Parent(s): efe5b15

Update src/lib/audio-recorder.ts

Browse files
Files changed (1) hide show
  1. src/lib/audio-recorder.ts +217 -88
src/lib/audio-recorder.ts CHANGED
@@ -1,108 +1,237 @@
1
  // src/lib/audio-recorder.ts
2
 
3
- import EventEmitter from "eventemitter3";
4
  import { audioContext } from "./utils";
 
 
5
  import VolMeterWorket from "./worklets/vol-meter";
6
 
7
- const CHUNK_SIZE = 2048;
8
-
9
- export class AudioRecorder extends EventEmitter {
10
- recording = false;
11
- private audioCtx: AudioContext | null = null;
12
- private microphone: MediaStreamAudioSourceNode | null = null;
13
- private processor: ScriptProcessorNode | null = null;
14
- private worklet: AudioWorkletNode | null = null;
15
- private stream: MediaStream | null = null;
16
-
17
- async start(): Promise<void> {
18
- if (this.recording) return;
19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  try {
21
- this.stream = await navigator.mediaDevices.getUserMedia({ audio: true });
22
- this.audioCtx = await audioContext({ id: "audio-in" });
23
-
24
- if (this.audioCtx.state === "suspended") {
25
- await this.audioCtx.resume();
26
- }
27
-
28
- await this.audioCtx.audioWorklet.addModule(VolMeterWorket);
29
-
30
- this.microphone = this.audioCtx.createMediaStreamSource(this.stream);
31
- this.processor = this.audioCtx.createScriptProcessor(CHUNK_SIZE, 1, 1);
32
-
33
- this.worklet = new AudioWorkletNode(this.audioCtx, "vumeter-in");
34
- this.worklet.port.onmessage = (ev) => {
35
- // رویداد جدید برای ارسال ولوم
36
- this.emit("volume", ev.data.volume);
37
- };
38
-
39
- this.processor.onaudioprocess = (e: AudioProcessingEvent) => {
40
- if (!this.recording) return;
41
- const inputData = e.inputBuffer.getChannelData(0);
42
- const pcm16Data = this.convertToPCM16(inputData);
43
- const base64 = this.toBase64(pcm16Data);
44
- // محاسبه ولوم در اینجا و ارسال آن همراه با داده‌ها
45
- const volume = this.getVolume(inputData);
46
- this.emit("data", base64, volume);
47
- };
48
-
49
- this.microphone.connect(this.processor);
50
- this.microphone.connect(this.worklet); // اتصال worklet برای گرفتن ولوم
51
- this.processor.connect(this.audioCtx.destination);
52
-
53
- this.recording = true;
54
- this.emit("start");
55
  } catch (err) {
56
- console.error("Error starting audio recording:", err);
57
- this.emit("error", err);
58
  }
59
  }
 
 
60
 
61
- stop(): void {
62
- if (!this.recording) return;
63
-
64
- this.recording = false;
65
- this.emit("stop");
66
-
67
- if (this.stream) {
68
- this.stream.getTracks().forEach((track) => track.stop());
69
- }
70
- if (this.microphone) {
71
- this.microphone.disconnect();
72
- }
73
- if (this.processor) {
74
- this.processor.disconnect();
75
- }
76
- if (this.worklet) {
77
- this.worklet.disconnect();
78
- }
79
  }
80
 
81
- private getVolume(data: Float32Array): number {
82
- let sum = 0;
83
- for (let i = 0; i < data.length; i++) {
84
- sum += data[i] * data[i];
85
  }
86
- const rms = Math.sqrt(sum / data.length);
87
- // نرمال‌سازی ولوم به یک مقدار بین 0 و 1
88
- return Math.min(1, rms * 5); // ضریب 5 برای محسوس‌تر کردن
89
- }
90
 
91
- private convertToPCM16(input: Float32Array): Int16Array {
92
- const output = new Int16Array(input.length);
93
- for (let i = 0; i < input.length; i++) {
94
- const s = Math.max(-1, Math.min(1, input[i]));
95
- output[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
96
- }
97
- return output;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
98
  }
99
 
100
- private toBase64(pcm16Data: Int16Array): string {
101
- const bytes = new Uint8Array(pcm16Data.buffer);
102
- let binary = "";
103
- for (let i = 0; i < bytes.byteLength; i++) {
104
- binary += String.fromCharCode(bytes[i]);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
  }
106
- return btoa(binary);
 
107
  }
108
  }
 
1
  // src/lib/audio-recorder.ts
2
 
 
3
  import { audioContext } from "./utils";
4
+ import AudioRecordingWorklet from "./worklets/audio-processing";
5
+ import SafariAudioRecordingWorklet from "./worklets/safari-audio-processing";
6
  import VolMeterWorket from "./worklets/vol-meter";
7
 
8
+ import { createWorketFromSrc } from "./audioworklet-registry";
9
+ import EventEmitter from "eventemitter3";
 
 
 
 
 
 
 
 
 
 
10
 
11
+ function arrayBufferToBase64(buffer: ArrayBuffer) {
12
+ var binary = "";
13
+ var bytes = new Uint8Array(buffer);
14
+ var len = bytes.byteLength;
15
+ for (var i = 0; i < len; i++) {
16
+ binary += String.fromCharCode(bytes[i]);
17
+ }
18
+ return window.btoa(binary);
19
+ }
20
+
21
+ async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> {
22
+ console.log('Creating Safari audio context with options:', { sampleRate });
23
+ const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext;
24
+ console.log('Using AudioContext class:', AudioContextClass.name);
25
+ const ctx = new AudioContextClass({ sampleRate, latencyHint: 'interactive' });
26
+ console.log('Safari AudioContext initial state:', { state: ctx.state, sampleRate: ctx.sampleRate, baseLatency: ctx.baseLatency, destination: ctx.destination });
27
+ if (ctx.state === 'suspended') {
28
+ console.log('Attempting to resume suspended Safari audio context...');
29
  try {
30
+ await ctx.resume();
31
+ console.log('Successfully resumed Safari audio context:', ctx.state);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  } catch (err) {
33
+ console.error('Failed to resume Safari audio context:', err);
34
+ throw err;
35
  }
36
  }
37
+ return ctx;
38
+ }
39
 
40
+ export class AudioRecorder extends EventEmitter {
41
+ stream: MediaStream | undefined;
42
+ audioContext: AudioContext | undefined;
43
+ source: MediaStreamAudioSourceNode | undefined;
44
+ recording: boolean = false;
45
+ recordingWorklet: AudioWorkletNode | undefined;
46
+ vuWorklet: AudioWorkletNode | undefined;
47
+
48
+ private starting: Promise<void> | null = null;
49
+
50
+ isSafari: boolean;
51
+ isIOS: boolean;
52
+
53
+ constructor(public sampleRate = 16000) {
54
+ super();
55
+ this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
56
+ this.isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !(window as any).MSStream;
57
+ console.log('AudioRecorder initialized:', { isSafari: this.isSafari, isIOS: this.isIOS, sampleRate: this.sampleRate, userAgent: navigator.userAgent, webAudioSupport: !!(window.AudioContext || (window as any).webkitAudioContext), mediaDevicesSupport: !!navigator.mediaDevices });
58
  }
59
 
60
+ async start() {
61
+ if (!navigator.mediaDevices?.getUserMedia) {
62
+ console.error('MediaDevices API not available:', { mediaDevices: !!navigator.mediaDevices, getUserMedia: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia) });
63
+ throw new Error("Could not request user media");
64
  }
 
 
 
 
65
 
66
+ console.log('Starting AudioRecorder with full environment info:', { userAgent: navigator.userAgent, platform: navigator.platform, vendor: navigator.vendor, audioWorkletSupport: !!(window.AudioWorklet), sampleRate: this.sampleRate, existingAudioContext: !!this.audioContext, existingStream: !!this.stream, isSafari: this.isSafari });
67
+
68
+ this.starting = new Promise(async (resolve, reject) => {
69
+ try {
70
+ const constraints = {
71
+ audio: {
72
+ echoCancellation: !this.isSafari, // Safari handles this differently
73
+ noiseSuppression: !this.isSafari,
74
+ autoGainControl: !this.isSafari,
75
+ sampleRate: this.sampleRate,
76
+ channelCount: 1
77
+ }
78
+ };
79
+ console.log('Using audio constraints:', constraints);
80
+
81
+ // 1. Get User Media
82
+ try {
83
+ this.stream = await navigator.mediaDevices.getUserMedia(constraints);
84
+ const track = this.stream.getAudioTracks()[0];
85
+ console.log('Audio permissions granted:', { track: track.label, settings: track.getSettings() });
86
+ } catch (err) {
87
+ console.error('Failed to get audio permissions:', err);
88
+ throw err;
89
+ }
90
+
91
+ // 2. Create Audio Context
92
+ try {
93
+ this.audioContext = this.isSafari
94
+ ? await createSafariAudioContext(this.sampleRate)
95
+ : await audioContext({ sampleRate: this.sampleRate });
96
+ console.log('Audio context ready:', { state: this.audioContext.state, sampleRate: this.audioContext.sampleRate });
97
+ } catch (err) {
98
+ console.error('Failed to initialize audio context:', err);
99
+ throw err;
100
+ }
101
+
102
+ // 3. Create Audio Source
103
+ try {
104
+ this.source = this.audioContext.createMediaStreamSource(this.stream);
105
+ console.log('Audio source created.');
106
+ } catch (err) {
107
+ console.error('Failed to create audio source:', err);
108
+ throw err;
109
+ }
110
+
111
+ // 4. Load and Create Worklets (Recording and VU Meter for ALL browsers)
112
+ try {
113
+ const recordingWorkletName = "audio-recorder-worklet";
114
+ const recordingWorkletSrc = createWorketFromSrc(recordingWorkletName, this.isSafari ? SafariAudioRecordingWorklet : AudioRecordingWorklet);
115
+ await this.audioContext.audioWorklet.addModule(recordingWorkletSrc);
116
+ console.log('Recording worklet module loaded.');
117
+
118
+ this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } });
119
+
120
+ this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
121
+ const arrayBuffer = ev.data.data?.int16arrayBuffer;
122
+ if (arrayBuffer) {
123
+ // *** تغییر اصلی: حالا data و volume را با هم می‌فرستیم ***
124
+ // توجه: در worklet ها محاسبه حجم صدا را اضافه خواهیم کرد
125
+ const arrayBufferString = arrayBufferToBase64(arrayBuffer);
126
+ // ما به جای رویداد مجزا، حجم را همراه با داده‌ها می‌فرستیم
127
+ // این کار را در ControlTray.tsx مدیریت خواهیم کرد
128
+ // this.emit("data", arrayBufferString);
129
+ } else {
130
+ console.warn('Invalid audio chunk received:', ev.data);
131
+ }
132
+ };
133
+ this.recordingWorklet.onprocessorerror = (event) => console.error('AudioWorklet processor error:', event);
134
+ console.log('Recording worklet node created.');
135
+
136
+ // --- 👇 بخش حیاتی: افزودن VU Meter برای همه مرورگرها 👇 ---
137
+ const vuWorkletName = "vu-meter";
138
+ await this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket));
139
+ this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName);
140
+
141
+ // *** تغییر اصلی: رویداد data را با حجم صدا ترکیب می‌کنیم ***
142
+ this.vuWorklet.port.onmessage = (ev: MessageEvent) => {
143
+ if (ev.data.volume && this.recordingWorklet) {
144
+ // حالا هم داده خام صدا و هم حجم صدا را داریم
145
+ // یک پیام به worklet ضبط می‌فرستیم تا داده‌های بافر شده را بگیریم
146
+ this.recordingWorklet.port.postMessage({ getBuffer: true });
147
+ }
148
+ };
149
+
150
+ // دریافت پاسخ از worklet ضبط و ارسال نهایی
151
+ this.recordingWorklet.port.onmessage = (ev: MessageEvent) => {
152
+ const arrayBuffer = ev.data.buffer;
153
+ const volume = ev.data.volume; // فرض می‌کنیم worklet حجم را هم برمی‌گرداند
154
+
155
+ if (arrayBuffer && typeof volume !== 'undefined') {
156
+ const arrayBufferString = arrayBufferToBase64(arrayBuffer);
157
+ // *** حالا هر دو مقدار را با هم emit می‌کنیم ***
158
+ this.emit("data", arrayBufferString, volume);
159
+ }
160
+ };
161
+
162
+ console.log('VU meter worklet created and configured.');
163
+
164
+ } catch (err) {
165
+ console.error('Failed to setup audio worklets:', err);
166
+ throw err;
167
+ }
168
+
169
+ // 5. Connect Nodes
170
+ try {
171
+ this.source.connect(this.recordingWorklet);
172
+ this.source.connect(this.vuWorklet); // اتصال VU متر به منبع
173
+ console.log('Audio nodes connected successfully.');
174
+ } catch (err) {
175
+ console.error('Failed to connect audio nodes:', err);
176
+ throw err;
177
+ }
178
+
179
+ this.recording = true;
180
+ console.log('Recording started successfully');
181
+ resolve();
182
+ this.starting = null;
183
+
184
+ } catch (error) {
185
+ console.error('Failed to start recording:', error);
186
+ this.stop();
187
+ reject(error);
188
+ this.starting = null;
189
+ }
190
+ });
191
+ return this.starting;
192
  }
193
 
194
+ stop() {
195
+ console.log('Stopping audio recorder...');
196
+ const handleStop = () => {
197
+ try {
198
+ if (this.source) {
199
+ console.log('Disconnecting audio source...');
200
+ this.source.disconnect();
201
+ this.source = undefined;
202
+ }
203
+ if (this.stream) {
204
+ console.log('Stopping media stream tracks...');
205
+ this.stream.getTracks().forEach(track => {
206
+ track.stop();
207
+ console.log('Stopped track:', track.label);
208
+ });
209
+ this.stream = undefined;
210
+ }
211
+ // بستن audioContext فقط در سافاری و در صورت نیاز
212
+ if (this.audioContext && this.isSafari && this.audioContext.state !== 'closed') {
213
+ console.log('Closing Safari audio context...');
214
+ this.audioContext.close();
215
+ this.audioContext = undefined;
216
+ }
217
+
218
+ this.recordingWorklet = undefined;
219
+ this.vuWorklet = undefined;
220
+ this.recording = false;
221
+ this.emit("stop");
222
+ console.log('Audio recorder stopped successfully');
223
+ } catch (err) {
224
+ console.error('Error while stopping audio recorder:', err);
225
+ }
226
+ };
227
+
228
+ if (this.starting) {
229
+ console.log('Stop called while starting - waiting for start to complete...');
230
+ this.starting.then(handleStop).catch(handleStop);
231
+ this.starting = null;
232
+ return;
233
  }
234
+
235
+ handleStop();
236
  }
237
  }