Spaces:
Running
Running
// src/lib/audio-recorder.ts | |
import { audioContext } from "./utils"; | |
import AudioRecordingWorklet from "./worklets/audio-processing"; | |
import SafariAudioRecordingWorklet from "./worklets/safari-audio-processing"; | |
import VolMeterWorket from "./worklets/vol-meter"; | |
import { createWorketFromSrc } from "./audioworklet-registry"; | |
import EventEmitter from "eventemitter3"; | |
function arrayBufferToBase64(buffer: ArrayBuffer) { | |
var binary = ""; | |
var bytes = new Uint8Array(buffer); | |
var len = bytes.byteLength; | |
for (var i = 0; i < len; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
return window.btoa(binary); | |
} | |
// Add Safari-specific audio context creation | |
async function createSafariAudioContext(sampleRate: number): Promise<AudioContext> { | |
console.log('Creating Safari audio context with options:', { sampleRate }); | |
// Safari requires webkit prefix | |
const AudioContextClass = (window as any).webkitAudioContext || window.AudioContext; | |
console.log('Using AudioContext class:', AudioContextClass.name); | |
const ctx = new AudioContextClass({ | |
sampleRate, | |
latencyHint: 'interactive' | |
}); | |
console.log('Safari AudioContext initial state:', { | |
state: ctx.state, | |
sampleRate: ctx.sampleRate, | |
baseLatency: ctx.baseLatency, | |
destination: ctx.destination, | |
}); | |
// Safari requires user interaction to start audio context | |
if (ctx.state === 'suspended') { | |
console.log('Attempting to resume suspended Safari audio context...'); | |
try { | |
await ctx.resume(); | |
console.log('Successfully resumed Safari audio context:', ctx.state); | |
} catch (err) { | |
console.error('Failed to resume Safari audio context:', err); | |
throw err; | |
} | |
} | |
return ctx; | |
} | |
export class AudioRecorder extends EventEmitter { | |
stream: MediaStream | undefined; | |
audioContext: AudioContext | undefined; | |
source: MediaStreamAudioSourceNode | undefined; | |
recording: boolean = false; | |
recordingWorklet: AudioWorkletNode | undefined; | |
vuWorklet: AudioWorkletNode | undefined; | |
private starting: Promise<void> | null = null; | |
isSafari: boolean; | |
isIOS: boolean; | |
constructor(public sampleRate = 16000) { | |
super(); | |
this.isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent); | |
this.isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !(window as any).MSStream; | |
console.log('AudioRecorder initialized:', { | |
isSafari: this.isSafari, | |
isIOS: this.isIOS, | |
sampleRate: this.sampleRate, | |
userAgent: navigator.userAgent, | |
webAudioSupport: !!(window.AudioContext || (window as any).webkitAudioContext), | |
mediaDevicesSupport: !!navigator.mediaDevices | |
}); | |
} | |
async start() { | |
if (!navigator.mediaDevices?.getUserMedia) { | |
console.error('MediaDevices API not available:', { | |
mediaDevices: !!navigator.mediaDevices, | |
getUserMedia: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia) | |
}); | |
throw new Error("Could not request user media"); | |
} | |
console.log('Starting AudioRecorder with full environment info:', { | |
userAgent: navigator.userAgent, | |
platform: navigator.platform, | |
vendor: navigator.vendor, | |
audioWorkletSupport: !!(window.AudioWorklet), | |
sampleRate: this.sampleRate, | |
existingAudioContext: !!this.audioContext, | |
existingStream: !!this.stream, | |
isSafari: this.isSafari | |
}); | |
this.starting = new Promise(async (resolve, reject) => { | |
try { | |
if (this.isSafari) { | |
// Safari implementation | |
console.log('Safari detected - using Safari-specific audio initialization'); | |
// 1. First get audio permissions | |
console.log('Requesting audio permissions first for Safari...'); | |
const constraints = { | |
audio: { | |
echoCancellation: false, | |
noiseSuppression: false, | |
autoGainControl: false, | |
sampleRate: this.sampleRate, | |
channelCount: 1 | |
} | |
}; | |
console.log('Safari audio constraints:', constraints); | |
try { | |
this.stream = await navigator.mediaDevices.getUserMedia(constraints); | |
const track = this.stream.getAudioTracks()[0]; | |
console.log('Safari audio permissions granted:', { | |
track: track.label, | |
settings: track.getSettings(), | |
constraints: track.getConstraints(), | |
enabled: track.enabled, | |
muted: track.muted, | |
readyState: track.readyState | |
}); | |
} catch (err) { | |
console.error('Failed to get Safari audio permissions:', err); | |
throw err; | |
} | |
// 2. Create and initialize audio context | |
try { | |
this.audioContext = await createSafariAudioContext(this.sampleRate); | |
console.log('Safari audio context ready:', { | |
state: this.audioContext.state, | |
currentTime: this.audioContext.currentTime | |
}); | |
} catch (err) { | |
console.error('Failed to initialize Safari audio context:', err); | |
throw err; | |
} | |
// 3. Create and connect audio source | |
try { | |
console.log('Creating Safari audio source...'); | |
this.source = this.audioContext.createMediaStreamSource(this.stream); | |
console.log('Safari audio source created successfully:', { | |
numberOfInputs: this.source.numberOfInputs, | |
numberOfOutputs: this.source.numberOfOutputs, | |
channelCount: this.source.channelCount | |
}); | |
} catch (err) { | |
console.error('Failed to create Safari audio source:', err); | |
throw err; | |
} | |
// 4. Load and create worklets | |
try { | |
const recordingWorkletName = "audio-recorder-worklet"; | |
const vuWorkletName = "vu-meter"; | |
console.log('Loading Safari audio worklets...'); | |
await Promise.all([ | |
this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, SafariAudioRecordingWorklet)), | |
this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket)) | |
]); | |
console.log('Safari audio worklet modules loaded'); | |
// Create Recording Worklet | |
this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } }); | |
this.recordingWorklet.onprocessorerror = (event) => console.error('Safari AudioWorklet processor error:', event); | |
this.recordingWorklet.port.onmessageerror = (event) => console.error('Safari AudioWorklet message error:', event); | |
this.recordingWorklet.port.onmessage = (ev: MessageEvent) => { | |
const data = ev.data.data; | |
if (data?.int16arrayBuffer) { | |
// --- رویداد دیتا برای ارسال به سرور --- | |
this.emit("data", arrayBufferToBase64(data.int16arrayBuffer)); | |
} | |
}; | |
console.log('Safari Recording WorkletNode created successfully'); | |
// --- 👇 تغییر اصلی اینجاست: اضافه کردن VU Meter برای سافاری 👇 --- | |
this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName); | |
this.vuWorklet.port.onmessage = (ev: MessageEvent) => { | |
if (ev.data.volume) { | |
// --- رویداد حجم صدا برای انیمیشن --- | |
this.emit("data", '', ev.data.volume); | |
} | |
}; | |
console.log('Safari VU Meter WorkletNode created successfully'); | |
// --- 👆 پایان تغییرات اصلی 👆 --- | |
} catch (err) { | |
console.error('Failed to setup Safari audio worklets:', err); | |
throw err; | |
} | |
// 5. Connect nodes | |
try { | |
console.log('Connecting Safari audio nodes...'); | |
this.source.connect(this.recordingWorklet); | |
this.source.connect(this.vuWorklet); // <-- اتصال VU meter به سورس | |
console.log('Safari audio nodes connected successfully'); | |
} catch (err) { | |
console.error('Failed to connect Safari audio nodes:', err); | |
throw err; | |
} | |
} else { | |
// Chrome/other browsers implementation | |
console.log('Non-Safari browser detected - using standard audio initialization'); | |
const constraints = { audio: { echoCancellation: true, noiseSuppression: true, autoGainControl: true, sampleRate: this.sampleRate } }; | |
console.log('Chrome audio constraints:', constraints); | |
try { | |
this.stream = await navigator.mediaDevices.getUserMedia(constraints); | |
const track = this.stream.getAudioTracks()[0]; | |
console.log('Chrome audio permissions granted:', { track: track.label, settings: track.getSettings() }); | |
} catch (err) { | |
console.error('Failed to get Chrome audio permissions:', err); | |
throw err; | |
} | |
try { | |
this.audioContext = await audioContext({ sampleRate: this.sampleRate }); | |
console.log('Chrome audio context created:', { state: this.audioContext.state, sampleRate: this.audioContext.sampleRate }); | |
} catch (err) { | |
console.error('Failed to create Chrome audio context:', err); | |
throw err; | |
} | |
try { | |
this.source = this.audioContext.createMediaStreamSource(this.stream); | |
console.log('Chrome audio source created'); | |
} catch (err) { | |
console.error('Failed to create Chrome audio source:', err); | |
throw err; | |
} | |
try { | |
const recordingWorkletName = "audio-recorder-worklet"; | |
const vuWorkletName = "vu-meter"; | |
await Promise.all([ | |
this.audioContext.audioWorklet.addModule(createWorketFromSrc(recordingWorkletName, AudioRecordingWorklet)), | |
this.audioContext.audioWorklet.addModule(createWorketFromSrc(vuWorkletName, VolMeterWorket)) | |
]); | |
console.log('Chrome audio worklets loaded'); | |
this.recordingWorklet = new AudioWorkletNode(this.audioContext, recordingWorkletName, { processorOptions: { sampleRate: this.sampleRate } }); | |
this.recordingWorklet.onprocessorerror = (event) => console.error('Chrome AudioWorklet processor error:', event); | |
this.recordingWorklet.port.onmessageerror = (event) => console.error('Chrome AudioWorklet message error:', event); | |
this.vuWorklet = new AudioWorkletNode(this.audioContext, vuWorkletName); | |
// یک onmessage handler برای هر دو رویداد | |
let lastBase64 = ''; | |
let lastVolume = 0; | |
const handleMessage = () => { | |
this.emit("data", lastBase64, lastVolume); | |
} | |
this.recordingWorklet.port.onmessage = async (ev: MessageEvent) => { | |
const arrayBuffer = ev.data.data?.int16arrayBuffer; | |
if (arrayBuffer) { | |
lastBase64 = arrayBufferToBase64(arrayBuffer); | |
handleMessage(); | |
} | |
}; | |
this.vuWorklet.port.onmessage = (ev: MessageEvent) => { | |
if (ev.data.volume !== undefined) { | |
lastVolume = ev.data.volume; | |
handleMessage(); | |
} | |
}; | |
console.log('Chrome AudioWorkletNodes created'); | |
this.source.connect(this.recordingWorklet); | |
this.source.connect(this.vuWorklet); | |
console.log('Chrome audio nodes connected'); | |
} catch (err) { | |
console.error('Failed to setup/connect Chrome audio nodes:', err); | |
throw err; | |
} | |
} | |
this.recording = true; | |
console.log('Recording started successfully'); | |
resolve(); | |
this.starting = null; | |
} catch (error) { | |
console.error('Failed to start recording:', error); | |
this.stop(); | |
reject(error); | |
this.starting = null; | |
} | |
}); | |
return this.starting; | |
} | |
stop() { | |
console.log('Stopping audio recorder...'); | |
const handleStop = () => { | |
try { | |
this.recording = false; | |
if (this.source) { | |
console.log('Disconnecting audio source...'); | |
this.source.disconnect(); | |
} | |
if (this.stream) { | |
console.log('Stopping media stream tracks...'); | |
this.stream.getTracks().forEach(track => { | |
track.stop(); | |
console.log('Stopped track:', track.label); | |
}); | |
} | |
if (this.audioContext && (this.audioContext.state === 'running' || this.isSafari)) { | |
console.log('Closing audio context...'); | |
this.audioContext.close(); | |
} | |
this.stream = undefined; | |
this.recordingWorklet = undefined; | |
this.vuWorklet = undefined; | |
this.source = undefined; | |
this.audioContext = undefined; | |
this.emit("stop"); | |
console.log('Audio recorder stopped successfully'); | |
} catch (err) { | |
console.error('Error while stopping audio recorder:', err); | |
} | |
}; | |
if (this.starting) { | |
console.log('Stop called while starting - waiting for start to complete...'); | |
this.starting.then(handleStop); | |
return; | |
} | |
handleStop(); | |
} | |
} |