code
stringlengths 24
2.07M
| docstring
stringlengths 25
85.3k
| func_name
stringlengths 1
92
| language
stringclasses 1
value | repo
stringlengths 5
64
| path
stringlengths 4
172
| url
stringlengths 44
218
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
function clearRecordedDataCB() {
arrayOfBlobs = [];
mediaRecorder = null;
self.timestamps = [];
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
Access to native MediaRecorder API
@method
@memberof MediaStreamRecorder
@instance
@example
var internal = recorder.getInternalRecorder();
internal.ondataavailable = function() {}; // override
internal.stream, internal.onpause, internal.onstop, etc.
@returns {Object} Returns internal recording object.
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function isMediaStreamActive() {
if (config.checkForInactiveTracks === false) {
// always return "true"
return true;
}
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
Set sample rates such as 8K or 16K. Reference: http://stackoverflow.com/a/28977136/552182
@property {number} desiredSampRate - Desired Bits per sample * 1000
@memberof StereoAudioRecorder
@instance
@example
var recorder = StereoAudioRecorder(mediaStream, {
desiredSampRate: 16 * 1000 // bits-per-sample * 1000
});
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function mergeLeftRightBuffers(config, callback) {
function mergeAudioBuffers(config, cb) {
var numberOfAudioChannels = config.numberOfAudioChannels;
// todo: "slice(0)" --- is it causes loop? Should be removed?
var leftBuffers = config.leftBuffers.slice(0);
var rightBuffers = config.rightBuffers.slice(0);
var sampleRate = config.sampleRate;
var internalInterleavedLength = config.internalInterleavedLength;
var desiredSampRate = config.desiredSampRate;
if (numberOfAudioChannels === 2) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
}
}
if (numberOfAudioChannels === 1) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
}
}
// set sample rate as desired sample rate
if (desiredSampRate) {
sampleRate = desiredSampRate;
}
// for changing the sampling rate, reference:
// http://stackoverflow.com/a/28977136/552182
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// interleave both channels together
var interleaved;
if (numberOfAudioChannels === 2) {
interleaved = interleave(leftBuffers, rightBuffers);
}
if (numberOfAudioChannels === 1) {
interleaved = leftBuffers;
}
var interleavedLength = interleaved.length;
// create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
// changed "44" to "36" via #401
view.setUint32(4, 36 + interleavedLength * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleavedLength * 2, true);
// write the PCM samples
var lng = interleavedLength;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
if (config.noWorker) {
mergeAudioBuffers(config, function(data) {
callback(data.buffer, data.view);
});
return;
}
var webWorker = processInWebWorker(mergeAudioBuffers);
webWorker.onmessage = function(event) {
callback(event.data.buffer, event.data.view);
// release memory
URL.revokeObjectURL(webWorker.workerURL);
// kill webworker (or Chrome will kill your page after ~25 calls)
webWorker.terminate();
};
webWorker.postMessage(config);
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeLeftRightBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function mergeAudioBuffers(config, cb) {
var numberOfAudioChannels = config.numberOfAudioChannels;
// todo: "slice(0)" --- is it causes loop? Should be removed?
var leftBuffers = config.leftBuffers.slice(0);
var rightBuffers = config.rightBuffers.slice(0);
var sampleRate = config.sampleRate;
var internalInterleavedLength = config.internalInterleavedLength;
var desiredSampRate = config.desiredSampRate;
if (numberOfAudioChannels === 2) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
}
}
if (numberOfAudioChannels === 1) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
}
}
// set sample rate as desired sample rate
if (desiredSampRate) {
sampleRate = desiredSampRate;
}
// for changing the sampling rate, reference:
// http://stackoverflow.com/a/28977136/552182
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// interleave both channels together
var interleaved;
if (numberOfAudioChannels === 2) {
interleaved = interleave(leftBuffers, rightBuffers);
}
if (numberOfAudioChannels === 1) {
interleaved = leftBuffers;
}
var interleavedLength = interleaved.length;
// create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
// changed "44" to "36" via #401
view.setUint32(4, 36 + interleavedLength * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleavedLength * 2, true);
// write the PCM samples
var lng = interleavedLength;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeAudioBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
interpolateArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
linearInterpolate
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
interleave
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
writeUTFBytes
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function processInWebWorker(_function) {
var workerURL = URL.createObjectURL(new Blob([_function.toString(),
';this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(workerURL);
worker.workerURL = workerURL;
return worker;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function resetVariables() {
leftchannel = [];
rightchannel = [];
recordingLength = 0;
isAudioProcessStarted = false;
recording = false;
isPaused = false;
context = null;
self.leftchannel = leftchannel;
self.rightchannel = rightchannel;
self.numberOfAudioChannels = numberOfAudioChannels;
self.desiredSampRate = desiredSampRate;
self.sampleRate = sampleRate;
self.recordingLength = recordingLength;
intervalsBasedBuffers = {
left: [],
right: [],
recordingLength: 0
};
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
resetVariables
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function clearRecordedDataCB() {
if (jsAudioNode) {
jsAudioNode.onaudioprocess = null;
jsAudioNode.disconnect();
jsAudioNode = null;
}
if (audioInput) {
audioInput.disconnect();
audioInput = null;
}
resetVariables();
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function onAudioProcessDataAvailable(e) {
if (isPaused) {
return;
}
if (isMediaStreamActive() === false) {
if (!config.disableLogs) {
console.log('MediaStream seems stopped.');
}
jsAudioNode.disconnect();
recording = false;
}
if (!recording) {
if (audioInput) {
audioInput.disconnect();
audioInput = null;
}
return;
}
/**
* This method is called on "onaudioprocess" event's first invocation.
* @method {function} onAudioProcessStarted
* @memberof StereoAudioRecorder
* @example
* recorder.onAudioProcessStarted: function() { };
*/
if (!isAudioProcessStarted) {
isAudioProcessStarted = true;
if (config.onAudioProcessStarted) {
config.onAudioProcessStarted();
}
if (config.initCallback) {
config.initCallback();
}
}
var left = e.inputBuffer.getChannelData(0);
// we clone the samples
var chLeft = new Float32Array(left);
leftchannel.push(chLeft);
if (numberOfAudioChannels === 2) {
var right = e.inputBuffer.getChannelData(1);
var chRight = new Float32Array(right);
rightchannel.push(chRight);
}
recordingLength += bufferSize;
// export raw PCM
self.recordingLength = recordingLength;
if (typeof config.timeSlice !== 'undefined') {
intervalsBasedBuffers.recordingLength += bufferSize;
intervalsBasedBuffers.left.push(chLeft);
if (numberOfAudioChannels === 2) {
intervalsBasedBuffers.right.push(chRight);
}
}
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
onAudioProcessDataAvailable
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function looper() {
if (!recording || typeof config.ondataavailable !== 'function' || typeof config.timeSlice === 'undefined') {
return;
}
if (intervalsBasedBuffers.left.length) {
mergeLeftRightBuffers({
desiredSampRate: desiredSampRate,
sampleRate: sampleRate,
numberOfAudioChannels: numberOfAudioChannels,
internalInterleavedLength: intervalsBasedBuffers.recordingLength,
leftBuffers: intervalsBasedBuffers.left,
rightBuffers: numberOfAudioChannels === 1 ? [] : intervalsBasedBuffers.right
}, function(buffer, view) {
var blob = new Blob([view], {
type: 'audio/wav'
});
config.ondataavailable(blob);
setTimeout(looper, config.timeSlice);
});
intervalsBasedBuffers = {
left: [],
right: [],
recordingLength: 0
};
} else {
setTimeout(looper, config.timeSlice);
}
}
|
This method is called on "onaudioprocess" event's first invocation.
@method {function} onAudioProcessStarted
@memberof StereoAudioRecorder
@example
recorder.onAudioProcessStarted: function() { };
|
looper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function clearRecordedDataCB() {
whammy.frames = [];
isRecording = false;
isPausedRecording = false;
}
|
This method resets currently recorded data.
@method
@memberof CanvasRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function cloneCanvas() {
//create a new canvas
var newCanvas = document.createElement('canvas');
var context = newCanvas.getContext('2d');
//set dimensions
newCanvas.width = htmlElement.width;
newCanvas.height = htmlElement.height;
//apply the old canvas to the new one
context.drawImage(htmlElement, 0, 0);
//return the new canvas
return newCanvas;
}
|
This method resets currently recorded data.
@method
@memberof CanvasRecorder
@example
recorder.clearRecordedData();
|
cloneCanvas
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function drawCanvasFrame() {
if (isPausedRecording) {
lastTime = new Date().getTime();
return setTimeout(drawCanvasFrame, 500);
}
if (htmlElement.nodeName.toLowerCase() === 'canvas') {
var duration = new Date().getTime() - lastTime;
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
whammy.frames.push({
image: cloneCanvas(),
duration: duration
});
if (isRecording) {
setTimeout(drawCanvasFrame, config.frameInterval);
}
return;
}
html2canvas(htmlElement, {
grabMouse: typeof config.showMousePointer === 'undefined' || config.showMousePointer,
onrendered: function(canvas) {
var duration = new Date().getTime() - lastTime;
if (!duration) {
return setTimeout(drawCanvasFrame, config.frameInterval);
}
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
whammy.frames.push({
image: canvas.toDataURL('image/webp', 1),
duration: duration
});
if (isRecording) {
setTimeout(drawCanvasFrame, config.frameInterval);
}
}
});
}
|
This method resets currently recorded data.
@method
@memberof CanvasRecorder
@example
recorder.clearRecordedData();
|
drawCanvasFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function drawFrames(frameInterval) {
frameInterval = typeof frameInterval !== 'undefined' ? frameInterval : 10;
var duration = new Date().getTime() - lastTime;
if (!duration) {
return setTimeout(drawFrames, frameInterval, frameInterval);
}
if (isPausedRecording) {
lastTime = new Date().getTime();
return setTimeout(drawFrames, 100);
}
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
if (video.paused) {
// via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316
// Tweak for Android Chrome
video.play();
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
whammy.frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
if (!isStopDrawing) {
setTimeout(drawFrames, frameInterval, frameInterval);
}
}
|
Draw and push frames to Whammy
@param {integer} frameInterval - set minimum interval (in milliseconds) between each time we push a frame to Whammy
|
drawFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function asyncLoop(o) {
var i = -1,
length = o.length;
(function loop() {
i++;
if (i === length) {
o.callback();
return;
}
// "setTimeout" added by Jim McLeod
setTimeout(function() {
o.functionToLoop(loop, i);
}, 1);
})();
}
|
Draw and push frames to Whammy
@param {integer} frameInterval - set minimum interval (in milliseconds) between each time we push a frame to Whammy
|
asyncLoop
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance, callback) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var resultFrames = [];
var checkUntilNotBlack = _framesToCheck === -1;
var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
_framesToCheck : _frames.length;
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var doNotCheckNext = false;
asyncLoop({
length: endCheckFrame,
functionToLoop: function(loop, f) {
var matchPixCount, endPixCheck, maxPixCount;
var finishImage = function() {
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
loop();
};
if (!doNotCheckNext) {
var image = new Image();
image.onload = function() {
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
finishImage();
};
image.src = _frames[f].image;
} else {
finishImage();
}
},
callback: function() {
resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
if (resultFrames.length <= 0) {
// at least one last frame should be available for next manipulation
// if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
resultFrames.push(_frames[_frames.length - 1]);
}
callback(resultFrames);
}
});
}
|
remove black frames from the beginning to the specified frame
@param {Array} _frames - array of frames to be checked
@param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found)
@param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all
@param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all
@returns {Array} - array of frames
|
dropBlackFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
finishImage = function() {
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
loop();
}
|
remove black frames from the beginning to the specified frame
@param {Array} _frames - array of frames to be checked
@param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found)
@param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all
@param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all
@returns {Array} - array of frames
|
finishImage
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function clearRecordedDataCB() {
whammy.frames = [];
isStopDrawing = true;
isPausedRecording = false;
}
|
This method resets currently recorded data.
@method
@memberof WhammyRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
URL.revokeObjectURL(blob);
return worker;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function whammyInWebWorker(frames) {
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
var webm = new ArrayToWebM(frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
postMessage(webm);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
whammyInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
ArrayToWebM
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getClusterData
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
checkFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
numToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
strToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
bitsToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
generateEBML
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
toBinStrOld
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
makeSimpleBlock
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
parseWebP
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getStrLength
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
parseRIFF
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
doubleToString
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function putInDB() {
var transaction = db.transaction([self.dataStoreName], 'readwrite');
if (self.videoBlob) {
transaction.objectStore(self.dataStoreName).put(self.videoBlob, 'videoBlob');
}
if (self.gifBlob) {
transaction.objectStore(self.dataStoreName).put(self.gifBlob, 'gifBlob');
}
if (self.audioBlob) {
transaction.objectStore(self.dataStoreName).put(self.audioBlob, 'audioBlob');
}
function getFromStore(portionName) {
transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {
if (self.callback) {
self.callback(event.target.result, portionName);
}
};
}
getFromStore('audioBlob');
getFromStore('videoBlob');
getFromStore('gifBlob');
}
|
This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
@method
@memberof DiskStorage
@internal
@example
DiskStorage.init();
|
putInDB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getFromStore(portionName) {
transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {
if (self.callback) {
self.callback(event.target.result, portionName);
}
};
}
|
This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
@method
@memberof DiskStorage
@internal
@example
DiskStorage.init();
|
getFromStore
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function drawVideoFrame(time) {
if (self.clearedRecordedData === true) {
return;
}
if (isPausedRecording) {
return setTimeout(function() {
drawVideoFrame(time);
}, 100);
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) {
return;
}
if (!isHTMLObject && video.paused) {
// via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316
// Tweak for Android Chrome
video.play();
}
if (!isHTMLObject) {
context.drawImage(video, 0, 0, canvas.width, canvas.height);
}
if (config.onGifPreview) {
config.onGifPreview(canvas.toDataURL('image/png'));
}
gifEncoder.addFrame(context);
lastFrameTime = time;
}
|
This method records MediaStream.
@method
@memberof GifRecorder
@example
recorder.record();
|
drawVideoFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function clearRecordedDataCB() {
if (gifEncoder) {
gifEncoder.stream().bin = [];
}
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function MultiStreamsMixer(arrayOfMediaStreams, elementClass) {
var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45';
(function(that) {
if (typeof RecordRTC !== 'undefined') {
return;
}
if (!that) {
return;
}
if (typeof window !== 'undefined') {
return;
}
if (typeof global === 'undefined') {
return;
}
global.navigator = {
userAgent: browserFakeUserAgent,
getUserMedia: function() {}
};
if (!global.console) {
global.console = {};
}
if (typeof global.console.log === 'undefined' || typeof global.console.error === 'undefined') {
global.console.error = global.console.log = global.console.log || function() {
console.log(arguments);
};
}
if (typeof document === 'undefined') {
/*global document:true */
that.document = {
documentElement: {
appendChild: function() {
return '';
}
}
};
document.createElement = document.captureStream = document.mozCaptureStream = function() {
var obj = {
getContext: function() {
return obj;
},
play: function() {},
pause: function() {},
drawImage: function() {},
toDataURL: function() {
return '';
},
style: {}
};
return obj;
};
that.HTMLVideoElement = function() {};
}
if (typeof location === 'undefined') {
/*global location:true */
that.location = {
protocol: 'file:',
href: '',
hash: ''
};
}
if (typeof screen === 'undefined') {
/*global screen:true */
that.screen = {
width: 0,
height: 0
};
}
if (typeof URL === 'undefined') {
/*global screen:true */
that.URL = {
createObjectURL: function() {
return '';
},
revokeObjectURL: function() {
return '';
}
};
}
/*global window:true */
that.window = global;
})(typeof global !== 'undefined' ? global : null);
// requires: chrome://flags/#enable-experimental-web-platform-features
elementClass = elementClass || 'multi-streams-mixer';
var videos = [];
var isStopDrawingFrames = false;
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
canvas.style.opacity = 0;
canvas.style.position = 'absolute';
canvas.style.zIndex = -1;
canvas.style.top = '-1000em';
canvas.style.left = '-1000em';
canvas.className = elementClass;
(document.body || document.documentElement).appendChild(canvas);
this.disableLogs = false;
this.frameInterval = 10;
this.width = 360;
this.height = 240;
// use gain node to prevent echo
this.useGainNode = true;
var self = this;
// _____________________________
// Cross-Browser-Declarations.js
// WebAudio API representer
var AudioContext = window.AudioContext;
if (typeof AudioContext === 'undefined') {
if (typeof webkitAudioContext !== 'undefined') {
/*global AudioContext:true */
AudioContext = webkitAudioContext;
}
if (typeof mozAudioContext !== 'undefined') {
/*global AudioContext:true */
AudioContext = mozAudioContext;
}
}
/*jshint -W079 */
var URL = window.URL;
if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') {
/*global URL:true */
URL = webkitURL;
}
if (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator?
if (typeof navigator.webkitGetUserMedia !== 'undefined') {
navigator.getUserMedia = navigator.webkitGetUserMedia;
}
if (typeof navigator.mozGetUserMedia !== 'undefined') {
navigator.getUserMedia = navigator.mozGetUserMedia;
}
}
var MediaStream = window.MediaStream;
if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {
MediaStream = webkitMediaStream;
}
/*global MediaStream:true */
if (typeof MediaStream !== 'undefined') {
// override "stop" method for all browsers
if (typeof MediaStream.prototype.stop === 'undefined') {
MediaStream.prototype.stop = function() {
this.getTracks().forEach(function(track) {
track.stop();
});
};
}
}
var Storage = {};
if (typeof AudioContext !== 'undefined') {
Storage.AudioContext = AudioContext;
} else if (typeof webkitAudioContext !== 'undefined') {
Storage.AudioContext = webkitAudioContext;
}
function setSrcObject(stream, element) {
if ('srcObject' in element) {
element.srcObject = stream;
} else if ('mozSrcObject' in element) {
element.mozSrcObject = stream;
} else {
element.srcObject = stream;
}
}
this.startDrawingFrames = function() {
drawVideosToCanvas();
};
function drawVideosToCanvas() {
if (isStopDrawingFrames) {
return;
}
var videosLength = videos.length;
var fullcanvas = false;
var remaining = [];
videos.forEach(function(video) {
if (!video.stream) {
video.stream = {};
}
if (video.stream.fullcanvas) {
fullcanvas = video;
} else {
// todo: video.stream.active or video.stream.live to fix blank frames issues?
remaining.push(video);
}
});
if (fullcanvas) {
canvas.width = fullcanvas.stream.width;
canvas.height = fullcanvas.stream.height;
} else if (remaining.length) {
canvas.width = videosLength > 1 ? remaining[0].width * 2 : remaining[0].width;
var height = 1;
if (videosLength === 3 || videosLength === 4) {
height = 2;
}
if (videosLength === 5 || videosLength === 6) {
height = 3;
}
if (videosLength === 7 || videosLength === 8) {
height = 4;
}
if (videosLength === 9 || videosLength === 10) {
height = 5;
}
canvas.height = remaining[0].height * height;
} else {
canvas.width = self.width || 360;
canvas.height = self.height || 240;
}
if (fullcanvas && fullcanvas instanceof HTMLVideoElement) {
drawImage(fullcanvas);
}
remaining.forEach(function(video, idx) {
drawImage(video, idx);
});
setTimeout(drawVideosToCanvas, self.frameInterval);
}
function drawImage(video, idx) {
if (isStopDrawingFrames) {
return;
}
var x = 0;
var y = 0;
var width = video.width;
var height = video.height;
if (idx === 1) {
x = video.width;
}
if (idx === 2) {
y = video.height;
}
if (idx === 3) {
x = video.width;
y = video.height;
}
if (idx === 4) {
y = video.height * 2;
}
if (idx === 5) {
x = video.width;
y = video.height * 2;
}
if (idx === 6) {
y = video.height * 3;
}
if (idx === 7) {
x = video.width;
y = video.height * 3;
}
if (typeof video.stream.left !== 'undefined') {
x = video.stream.left;
}
if (typeof video.stream.top !== 'undefined') {
y = video.stream.top;
}
if (typeof video.stream.width !== 'undefined') {
width = video.stream.width;
}
if (typeof video.stream.height !== 'undefined') {
height = video.stream.height;
}
context.drawImage(video, x, y, width, height);
if (typeof video.stream.onRender === 'function') {
video.stream.onRender(context, x, y, width, height, idx);
}
}
function getMixedStream() {
isStopDrawingFrames = false;
var mixedVideoStream = getMixedVideoStream();
var mixedAudioStream = getMixedAudioStream();
if (mixedAudioStream) {
mixedAudioStream.getTracks().filter(function(t) {
return t.kind === 'audio';
}).forEach(function(track) {
mixedVideoStream.addTrack(track);
});
}
var fullcanvas;
arrayOfMediaStreams.forEach(function(stream) {
if (stream.fullcanvas) {
fullcanvas = true;
}
});
// mixedVideoStream.prototype.appendStreams = appendStreams;
// mixedVideoStream.prototype.resetVideoStreams = resetVideoStreams;
// mixedVideoStream.prototype.clearRecordedData = clearRecordedData;
return mixedVideoStream;
}
function getMixedVideoStream() {
resetVideoStreams();
var capturedStream;
if ('captureStream' in canvas) {
capturedStream = canvas.captureStream();
} else if ('mozCaptureStream' in canvas) {
capturedStream = canvas.mozCaptureStream();
} else if (!self.disableLogs) {
console.error('Upgrade to latest Chrome or otherwise enable this flag: chrome://flags/#enable-experimental-web-platform-features');
}
var videoStream = new MediaStream();
capturedStream.getTracks().filter(function(t) {
return t.kind === 'video';
}).forEach(function(track) {
videoStream.addTrack(track);
});
canvas.stream = videoStream;
return videoStream;
}
function getMixedAudioStream() {
// via: @pehrsons
if (!Storage.AudioContextConstructor) {
Storage.AudioContextConstructor = new Storage.AudioContext();
}
self.audioContext = Storage.AudioContextConstructor;
self.audioSources = [];
if (self.useGainNode === true) {
self.gainNode = self.audioContext.createGain();
self.gainNode.connect(self.audioContext.destination);
self.gainNode.gain.value = 0; // don't hear self
}
var audioTracksLength = 0;
arrayOfMediaStreams.forEach(function(stream) {
if (!stream.getTracks().filter(function(t) {
return t.kind === 'audio';
}).length) {
return;
}
audioTracksLength++;
var audioSource = self.audioContext.createMediaStreamSource(stream);
if (self.useGainNode === true) {
audioSource.connect(self.gainNode);
}
self.audioSources.push(audioSource);
});
if (!audioTracksLength) {
// because "self.audioContext" is not initialized
// that's why we've to ignore rest of the code
return;
}
self.audioDestination = self.audioContext.createMediaStreamDestination();
self.audioSources.forEach(function(audioSource) {
audioSource.connect(self.audioDestination);
});
return self.audioDestination.stream;
}
function getVideo(stream) {
var video = document.createElement('video');
setSrcObject(stream, video);
video.className = elementClass;
video.muted = true;
video.volume = 0;
video.width = stream.width || self.width || 360;
video.height = stream.height || self.height || 240;
video.play();
return video;
}
this.appendStreams = function(streams) {
if (!streams) {
throw 'First parameter is required.';
}
if (!(streams instanceof Array)) {
streams = [streams];
}
streams.forEach(function(stream) {
var newStream = new MediaStream();
if (stream.getTracks().filter(function(t) {
return t.kind === 'video';
}).length) {
var video = getVideo(stream);
video.stream = stream;
videos.push(video);
newStream.addTrack(stream.getTracks().filter(function(t) {
return t.kind === 'video';
})[0]);
}
if (stream.getTracks().filter(function(t) {
return t.kind === 'audio';
}).length) {
var audioSource = self.audioContext.createMediaStreamSource(stream);
self.audioDestination = self.audioContext.createMediaStreamDestination();
audioSource.connect(self.audioDestination);
newStream.addTrack(self.audioDestination.stream.getTracks().filter(function(t) {
return t.kind === 'audio';
})[0]);
}
arrayOfMediaStreams.push(newStream);
});
};
this.releaseStreams = function() {
videos = [];
isStopDrawingFrames = true;
if (self.gainNode) {
self.gainNode.disconnect();
self.gainNode = null;
}
if (self.audioSources.length) {
self.audioSources.forEach(function(source) {
source.disconnect();
});
self.audioSources = [];
}
if (self.audioDestination) {
self.audioDestination.disconnect();
self.audioDestination = null;
}
if (self.audioContext) {
self.audioContext.close();
}
self.audioContext = null;
context.clearRect(0, 0, canvas.width, canvas.height);
if (canvas.stream) {
canvas.stream.stop();
canvas.stream = null;
}
};
this.resetVideoStreams = function(streams) {
if (streams && !(streams instanceof Array)) {
streams = [streams];
}
resetVideoStreams(streams);
};
function resetVideoStreams(streams) {
videos = [];
streams = streams || arrayOfMediaStreams;
// via: @adrian-ber
streams.forEach(function(stream) {
if (!stream.getTracks().filter(function(t) {
return t.kind === 'video';
}).length) {
return;
}
var video = getVideo(stream);
video.stream = stream;
videos.push(video);
});
}
// for debugging
this.name = 'MultiStreamsMixer';
this.toString = function() {
return this.name;
};
this.getMixedStream = getMixedStream;
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
MultiStreamsMixer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function setSrcObject(stream, element) {
if ('srcObject' in element) {
element.srcObject = stream;
} else if ('mozSrcObject' in element) {
element.mozSrcObject = stream;
} else {
element.srcObject = stream;
}
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
setSrcObject
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function drawVideosToCanvas() {
if (isStopDrawingFrames) {
return;
}
var videosLength = videos.length;
var fullcanvas = false;
var remaining = [];
videos.forEach(function(video) {
if (!video.stream) {
video.stream = {};
}
if (video.stream.fullcanvas) {
fullcanvas = video;
} else {
// todo: video.stream.active or video.stream.live to fix blank frames issues?
remaining.push(video);
}
});
if (fullcanvas) {
canvas.width = fullcanvas.stream.width;
canvas.height = fullcanvas.stream.height;
} else if (remaining.length) {
canvas.width = videosLength > 1 ? remaining[0].width * 2 : remaining[0].width;
var height = 1;
if (videosLength === 3 || videosLength === 4) {
height = 2;
}
if (videosLength === 5 || videosLength === 6) {
height = 3;
}
if (videosLength === 7 || videosLength === 8) {
height = 4;
}
if (videosLength === 9 || videosLength === 10) {
height = 5;
}
canvas.height = remaining[0].height * height;
} else {
canvas.width = self.width || 360;
canvas.height = self.height || 240;
}
if (fullcanvas && fullcanvas instanceof HTMLVideoElement) {
drawImage(fullcanvas);
}
remaining.forEach(function(video, idx) {
drawImage(video, idx);
});
setTimeout(drawVideosToCanvas, self.frameInterval);
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
drawVideosToCanvas
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function drawImage(video, idx) {
if (isStopDrawingFrames) {
return;
}
var x = 0;
var y = 0;
var width = video.width;
var height = video.height;
if (idx === 1) {
x = video.width;
}
if (idx === 2) {
y = video.height;
}
if (idx === 3) {
x = video.width;
y = video.height;
}
if (idx === 4) {
y = video.height * 2;
}
if (idx === 5) {
x = video.width;
y = video.height * 2;
}
if (idx === 6) {
y = video.height * 3;
}
if (idx === 7) {
x = video.width;
y = video.height * 3;
}
if (typeof video.stream.left !== 'undefined') {
x = video.stream.left;
}
if (typeof video.stream.top !== 'undefined') {
y = video.stream.top;
}
if (typeof video.stream.width !== 'undefined') {
width = video.stream.width;
}
if (typeof video.stream.height !== 'undefined') {
height = video.stream.height;
}
context.drawImage(video, x, y, width, height);
if (typeof video.stream.onRender === 'function') {
video.stream.onRender(context, x, y, width, height, idx);
}
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
drawImage
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getMixedStream() {
isStopDrawingFrames = false;
var mixedVideoStream = getMixedVideoStream();
var mixedAudioStream = getMixedAudioStream();
if (mixedAudioStream) {
mixedAudioStream.getTracks().filter(function(t) {
return t.kind === 'audio';
}).forEach(function(track) {
mixedVideoStream.addTrack(track);
});
}
var fullcanvas;
arrayOfMediaStreams.forEach(function(stream) {
if (stream.fullcanvas) {
fullcanvas = true;
}
});
// mixedVideoStream.prototype.appendStreams = appendStreams;
// mixedVideoStream.prototype.resetVideoStreams = resetVideoStreams;
// mixedVideoStream.prototype.clearRecordedData = clearRecordedData;
return mixedVideoStream;
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
getMixedStream
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getMixedVideoStream() {
resetVideoStreams();
var capturedStream;
if ('captureStream' in canvas) {
capturedStream = canvas.captureStream();
} else if ('mozCaptureStream' in canvas) {
capturedStream = canvas.mozCaptureStream();
} else if (!self.disableLogs) {
console.error('Upgrade to latest Chrome or otherwise enable this flag: chrome://flags/#enable-experimental-web-platform-features');
}
var videoStream = new MediaStream();
capturedStream.getTracks().filter(function(t) {
return t.kind === 'video';
}).forEach(function(track) {
videoStream.addTrack(track);
});
canvas.stream = videoStream;
return videoStream;
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
getMixedVideoStream
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getMixedAudioStream() {
// via: @pehrsons
if (!Storage.AudioContextConstructor) {
Storage.AudioContextConstructor = new Storage.AudioContext();
}
self.audioContext = Storage.AudioContextConstructor;
self.audioSources = [];
if (self.useGainNode === true) {
self.gainNode = self.audioContext.createGain();
self.gainNode.connect(self.audioContext.destination);
self.gainNode.gain.value = 0; // don't hear self
}
var audioTracksLength = 0;
arrayOfMediaStreams.forEach(function(stream) {
if (!stream.getTracks().filter(function(t) {
return t.kind === 'audio';
}).length) {
return;
}
audioTracksLength++;
var audioSource = self.audioContext.createMediaStreamSource(stream);
if (self.useGainNode === true) {
audioSource.connect(self.gainNode);
}
self.audioSources.push(audioSource);
});
if (!audioTracksLength) {
// because "self.audioContext" is not initialized
// that's why we've to ignore rest of the code
return;
}
self.audioDestination = self.audioContext.createMediaStreamDestination();
self.audioSources.forEach(function(audioSource) {
audioSource.connect(self.audioDestination);
});
return self.audioDestination.stream;
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
getMixedAudioStream
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getVideo(stream) {
var video = document.createElement('video');
setSrcObject(stream, video);
video.className = elementClass;
video.muted = true;
video.volume = 0;
video.width = stream.width || self.width || 360;
video.height = stream.height || self.height || 240;
video.play();
return video;
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
getVideo
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function resetVideoStreams(streams) {
videos = [];
streams = streams || arrayOfMediaStreams;
// via: @adrian-ber
streams.forEach(function(stream) {
if (!stream.getTracks().filter(function(t) {
return t.kind === 'video';
}).length) {
return;
}
var video = getVideo(stream);
video.stream = stream;
videos.push(video);
});
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
resetVideoStreams
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function getAllVideoTracks() {
var tracks = [];
arrayOfMediaStreams.forEach(function(stream) {
getTracks(stream, 'video').forEach(function(track) {
tracks.push(track);
});
});
return tracks;
}
|
This method records all MediaStreams.
@method
@memberof MultiStreamRecorder
@example
recorder.record();
|
getAllVideoTracks
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function terminate() {
if (!worker) {
return;
}
worker.postMessage(null);
worker.terminate();
worker = null;
}
|
This method resumes the recording process.
@method
@memberof WebAssemblyRecorder
@example
recorder.resume();
|
terminate
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/RecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/RecordRTC.js
|
MIT
|
function clearRecordedDataCB() {
whammy.frames = [];
isRecording = false;
isPausedRecording = false;
}
|
This method resets currently recorded data.
@method
@memberof CanvasRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/CanvasRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/CanvasRecorder.js
|
MIT
|
function cloneCanvas() {
//create a new canvas
var newCanvas = document.createElement('canvas');
var context = newCanvas.getContext('2d');
//set dimensions
newCanvas.width = htmlElement.width;
newCanvas.height = htmlElement.height;
//apply the old canvas to the new one
context.drawImage(htmlElement, 0, 0);
//return the new canvas
return newCanvas;
}
|
This method resets currently recorded data.
@method
@memberof CanvasRecorder
@example
recorder.clearRecordedData();
|
cloneCanvas
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/CanvasRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/CanvasRecorder.js
|
MIT
|
function drawCanvasFrame() {
if (isPausedRecording) {
lastTime = new Date().getTime();
return setTimeout(drawCanvasFrame, 500);
}
if (htmlElement.nodeName.toLowerCase() === 'canvas') {
var duration = new Date().getTime() - lastTime;
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
whammy.frames.push({
image: cloneCanvas(),
duration: duration
});
if (isRecording) {
setTimeout(drawCanvasFrame, config.frameInterval);
}
return;
}
html2canvas(htmlElement, {
grabMouse: typeof config.showMousePointer === 'undefined' || config.showMousePointer,
onrendered: function(canvas) {
var duration = new Date().getTime() - lastTime;
if (!duration) {
return setTimeout(drawCanvasFrame, config.frameInterval);
}
// via #206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
whammy.frames.push({
image: canvas.toDataURL('image/webp', 1),
duration: duration
});
if (isRecording) {
setTimeout(drawCanvasFrame, config.frameInterval);
}
}
});
}
|
This method resets currently recorded data.
@method
@memberof CanvasRecorder
@example
recorder.clearRecordedData();
|
drawCanvasFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/CanvasRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/CanvasRecorder.js
|
MIT
|
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
|
Return human-readable file size.
@param {number} bytes - Pass bytes and get formatted string.
@returns {string} - formatted string
@example
bytesToSize(1024*1024*5) === '5 GB'
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
bytesToSize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Cross-Browser-Declarations.js
|
MIT
|
function invokeSaveAsDialog(file, fileName) {
if (!file) {
throw 'Blob object is required.';
}
if (!file.type) {
try {
file.type = 'video/webm';
} catch (e) {}
}
var fileExtension = (file.type || 'video/webm').split('/')[1];
if (fileName && fileName.indexOf('.') !== -1) {
var splitted = fileName.split('.');
fileName = splitted[0];
fileExtension = splitted[1];
}
var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
return navigator.msSaveOrOpenBlob(file, fileFullName);
} else if (typeof navigator.msSaveBlob !== 'undefined') {
return navigator.msSaveBlob(file, fileFullName);
}
var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(file);
hyperlink.download = fileFullName;
hyperlink.style = 'display:none;opacity:0;color:transparent;';
(document.body || document.documentElement).appendChild(hyperlink);
if (typeof hyperlink.click === 'function') {
hyperlink.click();
} else {
hyperlink.target = '_blank';
hyperlink.dispatchEvent(new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
}));
}
URL.revokeObjectURL(hyperlink.href);
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
invokeSaveAsDialog
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Cross-Browser-Declarations.js
|
MIT
|
function isElectron() {
// Renderer process
if (typeof window !== 'undefined' && typeof window.process === 'object' && window.process.type === 'renderer') {
return true;
}
// Main process
if (typeof process !== 'undefined' && typeof process.versions === 'object' && !!process.versions.electron) {
return true;
}
// Detect the user agent when the `nodeIntegration` option is set to true
if (typeof navigator === 'object' && typeof navigator.userAgent === 'string' && navigator.userAgent.indexOf('Electron') >= 0) {
return true;
}
return false;
}
|
from: https://github.com/cheton/is-electron/blob/master/index.js
|
isElectron
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Cross-Browser-Declarations.js
|
MIT
|
function getTracks(stream, kind) {
if (!stream || !stream.getTracks) {
return [];
}
return stream.getTracks().filter(function(t) {
return t.kind === (kind || 'audio');
});
}
|
from: https://github.com/cheton/is-electron/blob/master/index.js
|
getTracks
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Cross-Browser-Declarations.js
|
MIT
|
function setSrcObject(stream, element) {
if ('srcObject' in element) {
element.srcObject = stream;
} else if ('mozSrcObject' in element) {
element.mozSrcObject = stream;
} else {
element.srcObject = stream;
}
}
|
from: https://github.com/cheton/is-electron/blob/master/index.js
|
setSrcObject
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Cross-Browser-Declarations.js
|
MIT
|
function getSeekableBlob(inputBlob, callback) {
// EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml
if (typeof EBML === 'undefined') {
throw new Error('Please link: https://www.webrtc-experiment.com/EBML.js');
}
var reader = new EBML.Reader();
var decoder = new EBML.Decoder();
var tools = EBML.tools;
var fileReader = new FileReader();
fileReader.onload = function(e) {
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function(element) {
reader.read(element);
});
reader.stop();
var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
var body = this.result.slice(reader.metadataSize);
var newBlob = new Blob([refinedMetadataBuf, body], {
type: 'video/webm'
});
callback(newBlob);
};
fileReader.readAsArrayBuffer(inputBlob);
}
|
@param {Blob} file - File or Blob object.
@param {function} callback - Callback function.
@example
getSeekableBlob(blob or file, callback);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
getSeekableBlob
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Cross-Browser-Declarations.js
|
MIT
|
function putInDB() {
var transaction = db.transaction([self.dataStoreName], 'readwrite');
if (self.videoBlob) {
transaction.objectStore(self.dataStoreName).put(self.videoBlob, 'videoBlob');
}
if (self.gifBlob) {
transaction.objectStore(self.dataStoreName).put(self.gifBlob, 'gifBlob');
}
if (self.audioBlob) {
transaction.objectStore(self.dataStoreName).put(self.audioBlob, 'audioBlob');
}
function getFromStore(portionName) {
transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {
if (self.callback) {
self.callback(event.target.result, portionName);
}
};
}
getFromStore('audioBlob');
getFromStore('videoBlob');
getFromStore('gifBlob');
}
|
This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
@method
@memberof DiskStorage
@internal
@example
DiskStorage.init();
|
putInDB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/DiskStorage.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/DiskStorage.js
|
MIT
|
function getFromStore(portionName) {
transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {
if (self.callback) {
self.callback(event.target.result, portionName);
}
};
}
|
This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
@method
@memberof DiskStorage
@internal
@example
DiskStorage.init();
|
getFromStore
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/DiskStorage.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/DiskStorage.js
|
MIT
|
function drawVideoFrame(time) {
if (self.clearedRecordedData === true) {
return;
}
if (isPausedRecording) {
return setTimeout(function() {
drawVideoFrame(time);
}, 100);
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) {
return;
}
if (!isHTMLObject && video.paused) {
// via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316
// Tweak for Android Chrome
video.play();
}
if (!isHTMLObject) {
context.drawImage(video, 0, 0, canvas.width, canvas.height);
}
if (config.onGifPreview) {
config.onGifPreview(canvas.toDataURL('image/png'));
}
gifEncoder.addFrame(context);
lastFrameTime = time;
}
|
This method records MediaStream.
@method
@memberof GifRecorder
@example
recorder.record();
|
drawVideoFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/GifRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/GifRecorder.js
|
MIT
|
function clearRecordedDataCB() {
if (gifEncoder) {
gifEncoder.stream().bin = [];
}
}
|
This method resets currently recorded data.
@method
@memberof GifRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/GifRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/GifRecorder.js
|
MIT
|
function updateTimeStamp() {
self.timestamps.push(new Date().getTime());
if (typeof config.onTimeStamp === 'function') {
config.onTimeStamp(self.timestamps[self.timestamps.length - 1], self.timestamps);
}
}
|
@property {Array} timestamps - Array of time stamps
@memberof MediaStreamRecorder
@example
console.log(recorder.timestamps);
|
updateTimeStamp
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/MediaStreamRecorder.js
|
MIT
|
function getMimeType(secondObject) {
if (mediaRecorder && mediaRecorder.mimeType) {
return mediaRecorder.mimeType;
}
return secondObject.mimeType || 'video/webm';
}
|
@property {Array} timestamps - Array of time stamps
@memberof MediaStreamRecorder
@example
console.log(recorder.timestamps);
|
getMimeType
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/MediaStreamRecorder.js
|
MIT
|
function clearRecordedDataCB() {
arrayOfBlobs = [];
mediaRecorder = null;
self.timestamps = [];
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/MediaStreamRecorder.js
|
MIT
|
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
Access to native MediaRecorder API
@method
@memberof MediaStreamRecorder
@instance
@example
var internal = recorder.getInternalRecorder();
internal.ondataavailable = function() {}; // override
internal.stream, internal.onpause, internal.onstop, etc.
@returns {Object} Returns internal recording object.
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/MediaStreamRecorder.js
|
MIT
|
function getDataURL(blob, callback00) {
if (typeof Worker !== 'undefined') {
var webWorker = processInWebWorker(function readFile(_blob) {
postMessage(new FileReaderSync().readAsDataURL(_blob));
});
webWorker.onmessage = function(event) {
callback00(event.data);
};
webWorker.postMessage(blob);
} else {
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onload = function(event) {
callback00(event.target.result);
};
}
}
|
This method can be used to manually get all recorded blobs' DataURLs.
@param {function} callback - All recorded blobs' DataURLs are passed back to the "callback" function.
@method
@memberof MRecordRTC
@example
recorder.getDataURL(function(recording){
var audioDataURL = recording.audio;
var videoDataURL = recording.video;
var gifDataURL = recording.gif;
});
|
getDataURL
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/MRecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/MRecordRTC.js
|
MIT
|
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
var url;
if (typeof URL !== 'undefined') {
url = URL;
} else if (typeof webkitURL !== 'undefined') {
url = webkitURL;
} else {
throw 'Neither URL nor webkitURL detected.';
}
url.revokeObjectURL(blob);
return worker;
}
|
This method can be used to manually get all recorded blobs' DataURLs.
@param {function} callback - All recorded blobs' DataURLs are passed back to the "callback" function.
@method
@memberof MRecordRTC
@example
recorder.getDataURL(function(recording){
var audioDataURL = recording.audio;
var videoDataURL = recording.video;
var gifDataURL = recording.gif;
});
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/MRecordRTC.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/MRecordRTC.js
|
MIT
|
function getAllVideoTracks() {
var tracks = [];
arrayOfMediaStreams.forEach(function(stream) {
getTracks(stream, 'video').forEach(function(track) {
tracks.push(track);
});
});
return tracks;
}
|
This method records all MediaStreams.
@method
@memberof MultiStreamRecorder
@example
recorder.record();
|
getAllVideoTracks
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/MultiStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/MultiStreamRecorder.js
|
MIT
|
function isMediaStreamActive() {
if (config.checkForInactiveTracks === false) {
// always return "true"
return true;
}
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
Set sample rates such as 8K or 16K. Reference: http://stackoverflow.com/a/28977136/552182
@property {number} desiredSampRate - Desired Bits per sample * 1000
@memberof StereoAudioRecorder
@instance
@example
var recorder = StereoAudioRecorder(mediaStream, {
desiredSampRate: 16 * 1000 // bits-per-sample * 1000
});
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function mergeLeftRightBuffers(config, callback) {
function mergeAudioBuffers(config, cb) {
var numberOfAudioChannels = config.numberOfAudioChannels;
// todo: "slice(0)" --- is it causes loop? Should be removed?
var leftBuffers = config.leftBuffers.slice(0);
var rightBuffers = config.rightBuffers.slice(0);
var sampleRate = config.sampleRate;
var internalInterleavedLength = config.internalInterleavedLength;
var desiredSampRate = config.desiredSampRate;
if (numberOfAudioChannels === 2) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
}
}
if (numberOfAudioChannels === 1) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
}
}
// set sample rate as desired sample rate
if (desiredSampRate) {
sampleRate = desiredSampRate;
}
// for changing the sampling rate, reference:
// http://stackoverflow.com/a/28977136/552182
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// interleave both channels together
var interleaved;
if (numberOfAudioChannels === 2) {
interleaved = interleave(leftBuffers, rightBuffers);
}
if (numberOfAudioChannels === 1) {
interleaved = leftBuffers;
}
var interleavedLength = interleaved.length;
// create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
// changed "44" to "36" via #401
view.setUint32(4, 36 + interleavedLength * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleavedLength * 2, true);
// write the PCM samples
var lng = interleavedLength;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
if (config.noWorker) {
mergeAudioBuffers(config, function(data) {
callback(data.buffer, data.view);
});
return;
}
var webWorker = processInWebWorker(mergeAudioBuffers);
webWorker.onmessage = function(event) {
callback(event.data.buffer, event.data.view);
// release memory
URL.revokeObjectURL(webWorker.workerURL);
// kill webworker (or Chrome will kill your page after ~25 calls)
webWorker.terminate();
};
webWorker.postMessage(config);
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeLeftRightBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function mergeAudioBuffers(config, cb) {
var numberOfAudioChannels = config.numberOfAudioChannels;
// todo: "slice(0)" --- is it causes loop? Should be removed?
var leftBuffers = config.leftBuffers.slice(0);
var rightBuffers = config.rightBuffers.slice(0);
var sampleRate = config.sampleRate;
var internalInterleavedLength = config.internalInterleavedLength;
var desiredSampRate = config.desiredSampRate;
if (numberOfAudioChannels === 2) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
}
}
if (numberOfAudioChannels === 1) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
}
}
// set sample rate as desired sample rate
if (desiredSampRate) {
sampleRate = desiredSampRate;
}
// for changing the sampling rate, reference:
// http://stackoverflow.com/a/28977136/552182
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// interleave both channels together
var interleaved;
if (numberOfAudioChannels === 2) {
interleaved = interleave(leftBuffers, rightBuffers);
}
if (numberOfAudioChannels === 1) {
interleaved = leftBuffers;
}
var interleavedLength = interleaved.length;
// create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
// changed "44" to "36" via #401
view.setUint32(4, 36 + interleavedLength * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleavedLength * 2, true);
// write the PCM samples
var lng = interleavedLength;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeAudioBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
interpolateArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
linearInterpolate
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
interleave
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
writeUTFBytes
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function processInWebWorker(_function) {
var workerURL = URL.createObjectURL(new Blob([_function.toString(),
';this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(workerURL);
worker.workerURL = workerURL;
return worker;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function resetVariables() {
leftchannel = [];
rightchannel = [];
recordingLength = 0;
isAudioProcessStarted = false;
recording = false;
isPaused = false;
context = null;
self.leftchannel = leftchannel;
self.rightchannel = rightchannel;
self.numberOfAudioChannels = numberOfAudioChannels;
self.desiredSampRate = desiredSampRate;
self.sampleRate = sampleRate;
self.recordingLength = recordingLength;
intervalsBasedBuffers = {
left: [],
right: [],
recordingLength: 0
};
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
resetVariables
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function clearRecordedDataCB() {
if (jsAudioNode) {
jsAudioNode.onaudioprocess = null;
jsAudioNode.disconnect();
jsAudioNode = null;
}
if (audioInput) {
audioInput.disconnect();
audioInput = null;
}
resetVariables();
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function onAudioProcessDataAvailable(e) {
if (isPaused) {
return;
}
if (isMediaStreamActive() === false) {
if (!config.disableLogs) {
console.log('MediaStream seems stopped.');
}
jsAudioNode.disconnect();
recording = false;
}
if (!recording) {
if (audioInput) {
audioInput.disconnect();
audioInput = null;
}
return;
}
/**
* This method is called on "onaudioprocess" event's first invocation.
* @method {function} onAudioProcessStarted
* @memberof StereoAudioRecorder
* @example
* recorder.onAudioProcessStarted: function() { };
*/
if (!isAudioProcessStarted) {
isAudioProcessStarted = true;
if (config.onAudioProcessStarted) {
config.onAudioProcessStarted();
}
if (config.initCallback) {
config.initCallback();
}
}
var left = e.inputBuffer.getChannelData(0);
// we clone the samples
var chLeft = new Float32Array(left);
leftchannel.push(chLeft);
if (numberOfAudioChannels === 2) {
var right = e.inputBuffer.getChannelData(1);
var chRight = new Float32Array(right);
rightchannel.push(chRight);
}
recordingLength += bufferSize;
// export raw PCM
self.recordingLength = recordingLength;
if (typeof config.timeSlice !== 'undefined') {
intervalsBasedBuffers.recordingLength += bufferSize;
intervalsBasedBuffers.left.push(chLeft);
if (numberOfAudioChannels === 2) {
intervalsBasedBuffers.right.push(chRight);
}
}
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
onAudioProcessDataAvailable
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function looper() {
if (!recording || typeof config.ondataavailable !== 'function' || typeof config.timeSlice === 'undefined') {
return;
}
if (intervalsBasedBuffers.left.length) {
mergeLeftRightBuffers({
desiredSampRate: desiredSampRate,
sampleRate: sampleRate,
numberOfAudioChannels: numberOfAudioChannels,
internalInterleavedLength: intervalsBasedBuffers.recordingLength,
leftBuffers: intervalsBasedBuffers.left,
rightBuffers: numberOfAudioChannels === 1 ? [] : intervalsBasedBuffers.right
}, function(buffer, view) {
var blob = new Blob([view], {
type: 'audio/wav'
});
config.ondataavailable(blob);
setTimeout(looper, config.timeSlice);
});
intervalsBasedBuffers = {
left: [],
right: [],
recordingLength: 0
};
} else {
setTimeout(looper, config.timeSlice);
}
}
|
This method is called on "onaudioprocess" event's first invocation.
@method {function} onAudioProcessStarted
@memberof StereoAudioRecorder
@example
recorder.onAudioProcessStarted: function() { };
|
looper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/StereoAudioRecorder.js
|
MIT
|
function terminate() {
if (!worker) {
return;
}
worker.postMessage(null);
worker.terminate();
worker = null;
}
|
This method resumes the recording process.
@method
@memberof WebAssemblyRecorder
@example
recorder.resume();
|
terminate
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/WebAssemblyRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/WebAssemblyRecorder.js
|
MIT
|
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
URL.revokeObjectURL(blob);
return worker;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function whammyInWebWorker(frames) {
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
var webm = new ArrayToWebM(frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
postMessage(webm);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
whammyInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
ArrayToWebM
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getClusterData
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
checkFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
numToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
strToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
bitsToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
generateEBML
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
toBinStrOld
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
makeSimpleBlock
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
parseWebP
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getStrLength
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/dev/Whammy.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/dev/Whammy.js
|
MIT
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.