code
stringlengths 24
2.07M
| docstring
stringlengths 25
85.3k
| func_name
stringlengths 1
92
| language
stringclasses 1
value | repo
stringlengths 5
64
| path
stringlengths 4
172
| url
stringlengths 44
218
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
function factory(name, bigendian, unsigned) {
var posH = bigendian ? 0 : 4;
var posL = bigendian ? 4 : 0;
var pos0 = bigendian ? 0 : 3;
var pos1 = bigendian ? 1 : 2;
var pos2 = bigendian ? 2 : 1;
var pos3 = bigendian ? 3 : 0;
var fromPositive = bigendian ? fromPositiveBE : fromPositiveLE;
var fromNegative = bigendian ? fromNegativeBE : fromNegativeLE;
var proto = Int64.prototype;
var isName = "is" + name;
var _isInt64 = "_" + isName;
// properties
proto.buffer = void 0;
proto.offset = 0;
proto[_isInt64] = true;
// methods
proto.toNumber = toNumber;
proto.toString = toString;
proto.toJSON = toNumber;
proto.toArray = toArray;
// add .toBuffer() method only when Buffer available
if (BUFFER) proto.toBuffer = toBuffer;
// add .toArrayBuffer() method only when Uint8Array available
if (UINT8ARRAY) proto.toArrayBuffer = toArrayBuffer;
// isUint64BE, isInt64BE
Int64[isName] = isInt64;
// CommonJS
exports[name] = Int64;
return Int64;
// constructor
function Int64(buffer, offset, value, raddix) {
if (!(this instanceof Int64)) return new Int64(buffer, offset, value, raddix);
return init(this, buffer, offset, value, raddix);
}
// isUint64BE, isInt64BE
function isInt64(b) {
return !!(b && b[_isInt64]);
}
// initializer
function init(that, buffer, offset, value, raddix) {
if (UINT8ARRAY && ARRAYBUFFER) {
if (buffer instanceof ARRAYBUFFER) buffer = new UINT8ARRAY(buffer);
if (value instanceof ARRAYBUFFER) value = new UINT8ARRAY(value);
}
// Int64BE() style
if (!buffer && !offset && !value && !storage) {
// shortcut to initialize with zero
that.buffer = newArray(ZERO, 0);
return;
}
// Int64BE(value, raddix) style
if (!isValidBuffer(buffer, offset)) {
var _storage = storage || Array;
raddix = offset;
value = buffer;
offset = 0;
buffer = new _storage(8);
}
that.buffer = buffer;
that.offset = offset |= 0;
// Int64BE(buffer, offset) style
if (UNDEFINED === typeof value) return;
// Int64BE(buffer, offset, value, raddix) style
if ("string" === typeof value) {
fromString(buffer, offset, value, raddix || 10);
} else if (isValidBuffer(value, raddix)) {
fromArray(buffer, offset, value, raddix);
} else if ("number" === typeof raddix) {
writeInt32(buffer, offset + posH, value); // high
writeInt32(buffer, offset + posL, raddix); // low
} else if (value > 0) {
fromPositive(buffer, offset, value); // positive
} else if (value < 0) {
fromNegative(buffer, offset, value); // negative
} else {
fromArray(buffer, offset, ZERO, 0); // zero, NaN and others
}
}
function fromString(buffer, offset, str, raddix) {
var pos = 0;
var len = str.length;
var high = 0;
var low = 0;
if (str[0] === "-") pos++;
var sign = pos;
while (pos < len) {
var chr = parseInt(str[pos++], raddix);
if (!(chr >= 0)) break; // NaN
low = low * raddix + chr;
high = high * raddix + Math.floor(low / BIT32);
low %= BIT32;
}
if (sign) {
high = ~high;
if (low) {
low = BIT32 - low;
} else {
high++;
}
}
writeInt32(buffer, offset + posH, high);
writeInt32(buffer, offset + posL, low);
}
function toNumber() {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
if (!unsigned) high |= 0; // a trick to get signed
return high ? (high * BIT32 + low) : low;
}
function toString(radix) {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
var str = "";
var sign = !unsigned && (high & 0x80000000);
if (sign) {
high = ~high;
low = BIT32 - low;
}
radix = radix || 10;
while (1) {
var mod = (high % radix) * BIT32 + low;
high = Math.floor(high / radix);
low = Math.floor(mod / radix);
str = (mod % radix).toString(radix) + str;
if (!high && !low) break;
}
if (sign) {
str = "-" + str;
}
return str;
}
function writeInt32(buffer, offset, value) {
buffer[offset + pos3] = value & 255;
value = value >> 8;
buffer[offset + pos2] = value & 255;
value = value >> 8;
buffer[offset + pos1] = value & 255;
value = value >> 8;
buffer[offset + pos0] = value & 255;
}
function readInt32(buffer, offset) {
return (buffer[offset + pos0] * BIT24) +
(buffer[offset + pos1] << 16) +
(buffer[offset + pos2] << 8) +
buffer[offset + pos3];
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
factory
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function Int64(buffer, offset, value, raddix) {
if (!(this instanceof Int64)) return new Int64(buffer, offset, value, raddix);
return init(this, buffer, offset, value, raddix);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
Int64
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function isInt64(b) {
return !!(b && b[_isInt64]);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isInt64
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function init(that, buffer, offset, value, raddix) {
if (UINT8ARRAY && ARRAYBUFFER) {
if (buffer instanceof ARRAYBUFFER) buffer = new UINT8ARRAY(buffer);
if (value instanceof ARRAYBUFFER) value = new UINT8ARRAY(value);
}
// Int64BE() style
if (!buffer && !offset && !value && !storage) {
// shortcut to initialize with zero
that.buffer = newArray(ZERO, 0);
return;
}
// Int64BE(value, raddix) style
if (!isValidBuffer(buffer, offset)) {
var _storage = storage || Array;
raddix = offset;
value = buffer;
offset = 0;
buffer = new _storage(8);
}
that.buffer = buffer;
that.offset = offset |= 0;
// Int64BE(buffer, offset) style
if (UNDEFINED === typeof value) return;
// Int64BE(buffer, offset, value, raddix) style
if ("string" === typeof value) {
fromString(buffer, offset, value, raddix || 10);
} else if (isValidBuffer(value, raddix)) {
fromArray(buffer, offset, value, raddix);
} else if ("number" === typeof raddix) {
writeInt32(buffer, offset + posH, value); // high
writeInt32(buffer, offset + posL, raddix); // low
} else if (value > 0) {
fromPositive(buffer, offset, value); // positive
} else if (value < 0) {
fromNegative(buffer, offset, value); // negative
} else {
fromArray(buffer, offset, ZERO, 0); // zero, NaN and others
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
init
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function fromString(buffer, offset, str, raddix) {
var pos = 0;
var len = str.length;
var high = 0;
var low = 0;
if (str[0] === "-") pos++;
var sign = pos;
while (pos < len) {
var chr = parseInt(str[pos++], raddix);
if (!(chr >= 0)) break; // NaN
low = low * raddix + chr;
high = high * raddix + Math.floor(low / BIT32);
low %= BIT32;
}
if (sign) {
high = ~high;
if (low) {
low = BIT32 - low;
} else {
high++;
}
}
writeInt32(buffer, offset + posH, high);
writeInt32(buffer, offset + posL, low);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromString
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function toNumber() {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
if (!unsigned) high |= 0; // a trick to get signed
return high ? (high * BIT32 + low) : low;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toNumber
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function toString(radix) {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
var str = "";
var sign = !unsigned && (high & 0x80000000);
if (sign) {
high = ~high;
low = BIT32 - low;
}
radix = radix || 10;
while (1) {
var mod = (high % radix) * BIT32 + low;
high = Math.floor(high / radix);
low = Math.floor(mod / radix);
str = (mod % radix).toString(radix) + str;
if (!high && !low) break;
}
if (sign) {
str = "-" + str;
}
return str;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toString
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function writeInt32(buffer, offset, value) {
buffer[offset + pos3] = value & 255;
value = value >> 8;
buffer[offset + pos2] = value & 255;
value = value >> 8;
buffer[offset + pos1] = value & 255;
value = value >> 8;
buffer[offset + pos0] = value & 255;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
writeInt32
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function readInt32(buffer, offset) {
return (buffer[offset + pos0] * BIT24) +
(buffer[offset + pos1] << 16) +
(buffer[offset + pos2] << 8) +
buffer[offset + pos3];
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
readInt32
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function toArray(raw) {
var buffer = this.buffer;
var offset = this.offset;
storage = null; // Array
if (raw !== false && offset === 0 && buffer.length === 8 && isArray(buffer)) return buffer;
return newArray(buffer, offset);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function toBuffer(raw) {
var buffer = this.buffer;
var offset = this.offset;
storage = BUFFER;
if (raw !== false && offset === 0 && buffer.length === 8 && Buffer.isBuffer(buffer)) return buffer;
var dest = new BUFFER(8);
fromArray(dest, 0, buffer, offset);
return dest;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function toArrayBuffer(raw) {
var buffer = this.buffer;
var offset = this.offset;
var arrbuf = buffer.buffer;
storage = UINT8ARRAY;
if (raw !== false && offset === 0 && (arrbuf instanceof ARRAYBUFFER) && arrbuf.byteLength === 8) return arrbuf;
var dest = new UINT8ARRAY(8);
fromArray(dest, 0, buffer, offset);
return dest.buffer;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toArrayBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function isValidBuffer(buffer, offset) {
var len = buffer && buffer.length;
offset |= 0;
return len && (offset + 8 <= len) && ("string" !== typeof buffer[offset]);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isValidBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function fromArray(destbuf, destoff, srcbuf, srcoff) {
destoff |= 0;
srcoff |= 0;
for (var i = 0; i < 8; i++) {
destbuf[destoff++] = srcbuf[srcoff++] & 255;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function newArray(buffer, offset) {
return Array.prototype.slice.call(buffer, offset, offset + 8);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
newArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function fromPositiveBE(buffer, offset, value) {
var pos = offset + 8;
while (pos > offset) {
buffer[--pos] = value & 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromPositiveBE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function fromNegativeBE(buffer, offset, value) {
var pos = offset + 8;
value++;
while (pos > offset) {
buffer[--pos] = ((-value) & 255) ^ 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromNegativeBE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function fromPositiveLE(buffer, offset, value) {
var end = offset + 8;
while (offset < end) {
buffer[offset++] = value & 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromPositiveLE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function fromNegativeLE(buffer, offset, value) {
var end = offset + 8;
value++;
while (offset < end) {
buffer[offset++] = ((-value) & 255) ^ 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromNegativeLE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function _isArray(val) {
return !!val && "[object Array]" == Object.prototype.toString.call(val);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
_isArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/EBML.js
|
MIT
|
function updateTimeStamp() {
self.timestamps.push(new Date().getTime());
if (typeof config.onTimeStamp === 'function') {
config.onTimeStamp(self.timestamps[self.timestamps.length - 1], self.timestamps);
}
}
|
@property {Array} timestamps - Array of time stamps
@memberof MediaStreamRecorder
@example
console.log(recorder.timestamps);
|
updateTimeStamp
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
MIT
|
function getMimeType(secondObject) {
if (mediaRecorder && mediaRecorder.mimeType) {
return mediaRecorder.mimeType;
}
return secondObject.mimeType || 'video/webm';
}
|
@property {Array} timestamps - Array of time stamps
@memberof MediaStreamRecorder
@example
console.log(recorder.timestamps);
|
getMimeType
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
MIT
|
function clearRecordedDataCB() {
arrayOfBlobs = [];
mediaRecorder = null;
self.timestamps = [];
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
MIT
|
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
Access to native MediaRecorder API
@method
@memberof MediaStreamRecorder
@instance
@example
var internal = recorder.getInternalRecorder();
internal.ondataavailable = function() {}; // override
internal.stream, internal.onpause, internal.onstop, etc.
@returns {Object} Returns internal recording object.
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/MediaStreamRecorder.js
|
MIT
|
function getAllVideoTracks() {
var tracks = [];
arrayOfMediaStreams.forEach(function(stream) {
getTracks(stream, 'video').forEach(function(track) {
tracks.push(track);
});
});
return tracks;
}
|
This method records all MediaStreams.
@method
@memberof MultiStreamRecorder
@example
recorder.record();
|
getAllVideoTracks
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/MultiStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/MultiStreamRecorder.js
|
MIT
|
function isMediaStreamActive() {
if (config.checkForInactiveTracks === false) {
// always return "true"
return true;
}
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
Set sample rates such as 8K or 16K. Reference: http://stackoverflow.com/a/28977136/552182
@property {number} desiredSampRate - Desired Bits per sample * 1000
@memberof StereoAudioRecorder
@instance
@example
var recorder = StereoAudioRecorder(mediaStream, {
desiredSampRate: 16 * 1000 // bits-per-sample * 1000
});
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function mergeLeftRightBuffers(config, callback) {
function mergeAudioBuffers(config, cb) {
var numberOfAudioChannels = config.numberOfAudioChannels;
// todo: "slice(0)" --- is it causes loop? Should be removed?
var leftBuffers = config.leftBuffers.slice(0);
var rightBuffers = config.rightBuffers.slice(0);
var sampleRate = config.sampleRate;
var internalInterleavedLength = config.internalInterleavedLength;
var desiredSampRate = config.desiredSampRate;
if (numberOfAudioChannels === 2) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
}
}
if (numberOfAudioChannels === 1) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
}
}
// set sample rate as desired sample rate
if (desiredSampRate) {
sampleRate = desiredSampRate;
}
// for changing the sampling rate, reference:
// http://stackoverflow.com/a/28977136/552182
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// interleave both channels together
var interleaved;
if (numberOfAudioChannels === 2) {
interleaved = interleave(leftBuffers, rightBuffers);
}
if (numberOfAudioChannels === 1) {
interleaved = leftBuffers;
}
var interleavedLength = interleaved.length;
// create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
// changed "44" to "36" via #401
view.setUint32(4, 36 + interleavedLength * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleavedLength * 2, true);
// write the PCM samples
var lng = interleavedLength;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
if (config.noWorker) {
mergeAudioBuffers(config, function(data) {
callback(data.buffer, data.view);
});
return;
}
var webWorker = processInWebWorker(mergeAudioBuffers);
webWorker.onmessage = function(event) {
callback(event.data.buffer, event.data.view);
// release memory
URL.revokeObjectURL(webWorker.workerURL);
// kill webworker (or Chrome will kill your page after ~25 calls)
webWorker.terminate();
};
webWorker.postMessage(config);
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeLeftRightBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function mergeAudioBuffers(config, cb) {
var numberOfAudioChannels = config.numberOfAudioChannels;
// todo: "slice(0)" --- is it causes loop? Should be removed?
var leftBuffers = config.leftBuffers.slice(0);
var rightBuffers = config.rightBuffers.slice(0);
var sampleRate = config.sampleRate;
var internalInterleavedLength = config.internalInterleavedLength;
var desiredSampRate = config.desiredSampRate;
if (numberOfAudioChannels === 2) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
}
}
if (numberOfAudioChannels === 1) {
leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
if (desiredSampRate) {
leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
}
}
// set sample rate as desired sample rate
if (desiredSampRate) {
sampleRate = desiredSampRate;
}
// for changing the sampling rate, reference:
// http://stackoverflow.com/a/28977136/552182
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// interleave both channels together
var interleaved;
if (numberOfAudioChannels === 2) {
interleaved = interleave(leftBuffers, rightBuffers);
}
if (numberOfAudioChannels === 1) {
interleaved = leftBuffers;
}
var interleavedLength = interleaved.length;
// create wav file
var resultingBufferLength = 44 + interleavedLength * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
// changed "44" to "36" via #401
view.setUint32(4, 36 + interleavedLength * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleavedLength * 2, true);
// write the PCM samples
var lng = interleavedLength;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeAudioBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function interpolateArray(data, newSampleRate, oldSampleRate) {
var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
var newData = [];
var springFactor = Number((data.length - 1) / (fitCount - 1));
newData[0] = data[0];
for (var i = 1; i < fitCount - 1; i++) {
var tmp = i * springFactor;
var before = Number(Math.floor(tmp)).toFixed();
var after = Number(Math.ceil(tmp)).toFixed();
var atPoint = tmp - before;
newData[i] = linearInterpolate(data[before], data[after], atPoint);
}
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
interpolateArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function linearInterpolate(before, after, atPoint) {
return before + (after - before) * atPoint;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
linearInterpolate
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function mergeBuffers(channelBuffer, rLength) {
var result = new Float64Array(rLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
mergeBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float64Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
interleave
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
writeUTFBytes
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function processInWebWorker(_function) {
var workerURL = URL.createObjectURL(new Blob([_function.toString(),
';this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(workerURL);
worker.workerURL = workerURL;
return worker;
}
|
This method records MediaStream.
@method
@memberof StereoAudioRecorder
@example
recorder.record();
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function resetVariables() {
leftchannel = [];
rightchannel = [];
recordingLength = 0;
isAudioProcessStarted = false;
recording = false;
isPaused = false;
context = null;
self.leftchannel = leftchannel;
self.rightchannel = rightchannel;
self.numberOfAudioChannels = numberOfAudioChannels;
self.desiredSampRate = desiredSampRate;
self.sampleRate = sampleRate;
self.recordingLength = recordingLength;
intervalsBasedBuffers = {
left: [],
right: [],
recordingLength: 0
};
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
resetVariables
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function clearRecordedDataCB() {
if (jsAudioNode) {
jsAudioNode.onaudioprocess = null;
jsAudioNode.disconnect();
jsAudioNode = null;
}
if (audioInput) {
audioInput.disconnect();
audioInput = null;
}
resetVariables();
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
clearRecordedDataCB
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function onAudioProcessDataAvailable(e) {
if (isPaused) {
return;
}
if (isMediaStreamActive() === false) {
if (!config.disableLogs) {
console.log('MediaStream seems stopped.');
}
jsAudioNode.disconnect();
recording = false;
}
if (!recording) {
if (audioInput) {
audioInput.disconnect();
audioInput = null;
}
return;
}
/**
* This method is called on "onaudioprocess" event's first invocation.
* @method {function} onAudioProcessStarted
* @memberof StereoAudioRecorder
* @example
* recorder.onAudioProcessStarted: function() { };
*/
if (!isAudioProcessStarted) {
isAudioProcessStarted = true;
if (config.onAudioProcessStarted) {
config.onAudioProcessStarted();
}
if (config.initCallback) {
config.initCallback();
}
}
var left = e.inputBuffer.getChannelData(0);
// we clone the samples
var chLeft = new Float32Array(left);
leftchannel.push(chLeft);
if (numberOfAudioChannels === 2) {
var right = e.inputBuffer.getChannelData(1);
var chRight = new Float32Array(right);
rightchannel.push(chRight);
}
recordingLength += bufferSize;
// export raw PCM
self.recordingLength = recordingLength;
if (typeof config.timeSlice !== 'undefined') {
intervalsBasedBuffers.recordingLength += bufferSize;
intervalsBasedBuffers.left.push(chLeft);
if (numberOfAudioChannels === 2) {
intervalsBasedBuffers.right.push(chRight);
}
}
}
|
This method resets currently recorded data.
@method
@memberof StereoAudioRecorder
@example
recorder.clearRecordedData();
|
onAudioProcessDataAvailable
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
function looper() {
if (!recording || typeof config.ondataavailable !== 'function' || typeof config.timeSlice === 'undefined') {
return;
}
if (intervalsBasedBuffers.left.length) {
mergeLeftRightBuffers({
desiredSampRate: desiredSampRate,
sampleRate: sampleRate,
numberOfAudioChannels: numberOfAudioChannels,
internalInterleavedLength: intervalsBasedBuffers.recordingLength,
leftBuffers: intervalsBasedBuffers.left,
rightBuffers: numberOfAudioChannels === 1 ? [] : intervalsBasedBuffers.right
}, function(buffer, view) {
var blob = new Blob([view], {
type: 'audio/wav'
});
config.ondataavailable(blob);
setTimeout(looper, config.timeSlice);
});
intervalsBasedBuffers = {
left: [],
right: [],
recordingLength: 0
};
} else {
setTimeout(looper, config.timeSlice);
}
}
|
This method is called on "onaudioprocess" event's first invocation.
@method {function} onAudioProcessStarted
@memberof StereoAudioRecorder
@example
recorder.onAudioProcessStarted: function() { };
|
looper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Chrome-Extensions/screen-recording/RecordRTC/StereoAudioRecorder.js
|
MIT
|
pack = function(serialized) {
var cursor = 0,
i = 0,
j = 0,
endianness = BIG_ENDIAN;
var ab = new ArrayBuffer(serialized[0].byte_length + serialized[0].header_size);
var view = new DataView(ab);
for (i = 0; i < serialized.length; i++) {
var start = cursor,
header_size = serialized[i].header_size,
type = serialized[i].type,
length = serialized[i].length,
value = serialized[i].value,
byte_length = serialized[i].byte_length,
type_name = Length[type],
unit = type_name === null ? 0 : root[type_name + 'Array'].BYTES_PER_ELEMENT;
// Set type
if (type === Types.BUFFER) {
// on node.js Blob is emulated using Buffer type
view.setUint8(cursor, Types.BLOB, endianness);
} else {
view.setUint8(cursor, type, endianness);
}
cursor += TYPE_LENGTH;
if (debug) {
console.info('Packing', type, TypeNames[type]);
}
// Set length if required
if (type === Types.ARRAY || type === Types.OBJECT) {
view.setUint16(cursor, length, endianness);
cursor += LENGTH_LENGTH;
if (debug) {
console.info('Content Length', length);
}
}
// Set byte length
view.setUint32(cursor, byte_length, endianness);
cursor += BYTES_LENGTH;
if (debug) {
console.info('Header Size', header_size, 'bytes');
console.info('Byte Length', byte_length, 'bytes');
}
switch (type) {
case Types.NULL:
case Types.UNDEFINED:
// NULL and UNDEFINED doesn't have any payload
break;
case Types.STRING:
if (debug) {
console.info('Actual Content %c"' + value + '"', 'font-weight:bold;');
}
for (j = 0; j < length; j++, cursor += unit) {
view.setUint16(cursor, value.charCodeAt(j), endianness);
}
break;
case Types.NUMBER:
case Types.BOOLEAN:
if (debug) {
console.info('%c' + value.toString(), 'font-weight:bold;');
}
view['set' + type_name](cursor, value, endianness);
cursor += unit;
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
var _view = new Uint8Array(view.buffer, cursor, byte_length);
_view.set(new Uint8Array(value.buffer));
cursor += byte_length;
break;
case Types.ARRAYBUFFER:
case Types.BUFFER:
var _view = new Uint8Array(view.buffer, cursor, byte_length);
_view.set(new Uint8Array(value));
cursor += byte_length;
break;
case Types.BLOB:
case Types.ARRAY:
case Types.OBJECT:
break;
default:
throw 'TypeError: Unexpected type found.';
}
if (debug) {
binary_dump(view, start, cursor - start);
}
}
return view;
}
|
packs seriarized elements array into a packed ArrayBuffer
@param {Array} serialized Serialized array of elements.
@return {DataView} view of packed binary
|
pack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
MIT
|
unpack = function(view, cursor) {
var i = 0,
endianness = BIG_ENDIAN,
start = cursor;
var type, length, byte_length, value, elem;
// Retrieve "type"
type = view.getUint8(cursor, endianness);
cursor += TYPE_LENGTH;
if (debug) {
console.info('Unpacking', type, TypeNames[type]);
}
// Retrieve "length"
if (type === Types.ARRAY || type === Types.OBJECT) {
length = view.getUint16(cursor, endianness);
cursor += LENGTH_LENGTH;
if (debug) {
console.info('Content Length', length);
}
}
// Retrieve "byte_length"
byte_length = view.getUint32(cursor, endianness);
cursor += BYTES_LENGTH;
if (debug) {
console.info('Byte Length', byte_length, 'bytes');
}
var type_name = Length[type];
var unit = type_name === null ? 0 : root[type_name + 'Array'].BYTES_PER_ELEMENT;
switch (type) {
case Types.NULL:
case Types.UNDEFINED:
if (debug) {
binary_dump(view, start, cursor - start);
}
// NULL and UNDEFINED doesn't have any octet
value = null;
break;
case Types.STRING:
length = byte_length / unit;
var string = [];
for (i = 0; i < length; i++) {
var code = view.getUint16(cursor, endianness);
cursor += unit;
string.push(String.fromCharCode(code));
}
value = string.join('');
if (debug) {
console.info('Actual Content %c"' + value + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.NUMBER:
value = view.getFloat64(cursor, endianness);
cursor += unit;
if (debug) {
console.info('Actual Content %c"' + value.toString() + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.BOOLEAN:
value = view.getUint8(cursor, endianness) === 1 ? true : false;
cursor += unit;
if (debug) {
console.info('Actual Content %c"' + value.toString() + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
case Types.ARRAYBUFFER:
elem = view.buffer.slice(cursor, cursor + byte_length);
cursor += byte_length;
// If ArrayBuffer
if (type === Types.ARRAYBUFFER) {
value = elem;
// If other TypedArray
} else {
value = new root[type_name + 'Array'](elem);
}
if (debug) {
binary_dump(view, start, cursor - start);
}
break;
case Types.BLOB:
if (debug) {
binary_dump(view, start, cursor - start);
}
// If Blob is available (on browser)
if (root.Blob) {
var mime = unpack(view, cursor);
var buffer = unpack(view, mime.cursor);
cursor = buffer.cursor;
value = new Blob([buffer.value], {
type: mime.value
});
} else {
// node.js implementation goes here
elem = view.buffer.slice(cursor, cursor + byte_length);
cursor += byte_length;
// node.js implementatino uses Buffer to help Blob
value = new Buffer(elem);
}
break;
case Types.ARRAY:
if (debug) {
binary_dump(view, start, cursor - start);
}
value = [];
for (i = 0; i < length; i++) {
// Retrieve array element
elem = unpack(view, cursor);
cursor = elem.cursor;
value.push(elem.value);
}
break;
case Types.OBJECT:
if (debug) {
binary_dump(view, start, cursor - start);
}
value = {};
for (i = 0; i < length; i++) {
// Retrieve object key and value in sequence
var key = unpack(view, cursor);
var val = unpack(view, key.cursor);
cursor = val.cursor;
value[key.value] = val.value;
}
break;
default:
throw 'TypeError: Type not supported.';
}
return {
value: value,
cursor: cursor
};
}
|
Unpack binary data into an object with value and cursor
@param {DataView} view [description]
@param {Number} cursor [description]
@return {Object}
|
unpack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
MIT
|
deferredSerialize = function(array, callback) {
var length = array.length,
results = [],
count = 0,
byte_length = 0;
for (var i = 0; i < array.length; i++) {
(function(index) {
serialize(array[index], function(result) {
// store results in order
results[index] = result;
// count byte length
byte_length += result[0].header_size + result[0].byte_length;
// when all results are on table
if (++count === length) {
// finally concatenate all reuslts into a single array in order
var array = [];
for (var j = 0; j < results.length; j++) {
array = array.concat(results[j]);
}
callback(array, byte_length);
}
});
})(i);
}
}
|
deferred function to process multiple serialization in order
@param {array} array [description]
@param {Function} callback [description]
@return {void} no return value
|
deferredSerialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
MIT
|
serialize = function(obj, callback) {
var subarray = [],
unit = 1,
header_size = TYPE_LENGTH + BYTES_LENGTH,
type, byte_length = 0,
length = 0,
value = obj;
type = find_type(obj);
unit = Length[type] === undefined || Length[type] === null ? 0 :
root[Length[type] + 'Array'].BYTES_PER_ELEMENT;
switch (type) {
case Types.UNDEFINED:
case Types.NULL:
break;
case Types.NUMBER:
case Types.BOOLEAN:
byte_length = unit;
break;
case Types.STRING:
length = obj.length;
byte_length += length * unit;
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
length = obj.length;
byte_length += length * unit;
break;
case Types.ARRAY:
deferredSerialize(obj, function(subarray, byte_length) {
callback([{
type: type,
length: obj.length,
header_size: header_size + LENGTH_LENGTH,
byte_length: byte_length,
value: null
}].concat(subarray));
});
return;
case Types.OBJECT:
var deferred = [];
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
deferred.push(key);
deferred.push(obj[key]);
length++;
}
}
deferredSerialize(deferred, function(subarray, byte_length) {
callback([{
type: type,
length: length,
header_size: header_size + LENGTH_LENGTH,
byte_length: byte_length,
value: null
}].concat(subarray));
});
return;
case Types.ARRAYBUFFER:
byte_length += obj.byteLength;
break;
case Types.BLOB:
var mime_type = obj.type;
var reader = new FileReader();
reader.onload = function(e) {
deferredSerialize([mime_type, e.target.result], function(subarray, byte_length) {
callback([{
type: type,
length: length,
header_size: header_size,
byte_length: byte_length,
value: null
}].concat(subarray));
});
};
reader.onerror = function(e) {
throw 'FileReader Error: ' + e;
};
reader.readAsArrayBuffer(obj);
return;
case Types.BUFFER:
byte_length += obj.length;
break;
default:
throw 'TypeError: Type "' + obj.constructor.name + '" not supported.';
}
callback([{
type: type,
length: length,
header_size: header_size,
byte_length: byte_length,
value: value
}].concat(subarray));
}
|
Serializes object and return byte_length
@param {mixed} obj JavaScript object you want to serialize
@return {Array} Serialized array object
|
serialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
MIT
|
deserialize = function(buffer, callback) {
var view = buffer instanceof DataView ? buffer : new DataView(buffer);
var result = unpack(view, 0);
return result.value;
}
|
Deserialize binary and return JavaScript object
@param ArrayBuffer buffer ArrayBuffer you want to deserialize
@return mixed Retrieved JavaScript object
|
deserialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/Conversation.js/AndroidRTC/scripts/FileBufferReader.js
|
MIT
|
pack = function(serialized) {
var cursor = 0,
i = 0,
j = 0,
endianness = BIG_ENDIAN;
var ab = new ArrayBuffer(serialized[0].byte_length + serialized[0].header_size);
var view = new DataView(ab);
for (i = 0; i < serialized.length; i++) {
var start = cursor,
header_size = serialized[i].header_size,
type = serialized[i].type,
length = serialized[i].length,
value = serialized[i].value,
byte_length = serialized[i].byte_length,
type_name = Length[type],
unit = type_name === null ? 0 : window[type_name + 'Array'].BYTES_PER_ELEMENT;
// Set type
if (type === Types.BUFFER) {
// on node.js Blob is emulated using Buffer type
view.setUint8(cursor, Types.BLOB, endianness);
} else {
view.setUint8(cursor, type, endianness);
}
cursor += TYPE_LENGTH;
if (debug) {
console.info('Packing', type, TypeNames[type]);
}
// Set length if required
if (type === Types.ARRAY || type === Types.OBJECT) {
view.setUint16(cursor, length, endianness);
cursor += LENGTH_LENGTH;
if (debug) {
console.info('Content Length', length);
}
}
// Set byte length
view.setUint32(cursor, byte_length, endianness);
cursor += BYTES_LENGTH;
if (debug) {
console.info('Header Size', header_size, 'bytes');
console.info('Byte Length', byte_length, 'bytes');
}
switch (type) {
case Types.NULL:
case Types.UNDEFINED:
// NULL and UNDEFINED doesn't have any payload
break;
case Types.STRING:
if (debug) {
console.info('Actual Content %c"' + value + '"', 'font-weight:bold;');
}
for (j = 0; j < length; j++, cursor += unit) {
view.setUint16(cursor, value.charCodeAt(j), endianness);
}
break;
case Types.NUMBER:
case Types.BOOLEAN:
if (debug) {
console.info('%c' + value.toString(), 'font-weight:bold;');
}
view['set' + type_name](cursor, value, endianness);
cursor += unit;
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
var _view = new Uint8Array(view.buffer, cursor, byte_length);
_view.set(new Uint8Array(value.buffer));
cursor += byte_length;
break;
case Types.ARRAYBUFFER:
case Types.BUFFER:
var _view = new Uint8Array(view.buffer, cursor, byte_length);
_view.set(new Uint8Array(value));
cursor += byte_length;
break;
case Types.BLOB:
case Types.ARRAY:
case Types.OBJECT:
break;
default:
throw 'TypeError: Unexpected type found.';
}
if (debug) {
binary_dump(view, start, cursor - start);
}
}
return view;
}
|
packs seriarized elements array into a packed ArrayBuffer
@param {Array} serialized Serialized array of elements.
@return {DataView} view of packed binary
|
pack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/FileBufferReader.js
|
MIT
|
unpack = function(view, cursor) {
var i = 0,
endianness = BIG_ENDIAN,
start = cursor;
var type, length, byte_length, value, elem;
// Retrieve "type"
type = view.getUint8(cursor, endianness);
cursor += TYPE_LENGTH;
if (debug) {
console.info('Unpacking', type, TypeNames[type]);
}
// Retrieve "length"
if (type === Types.ARRAY || type === Types.OBJECT) {
length = view.getUint16(cursor, endianness);
cursor += LENGTH_LENGTH;
if (debug) {
console.info('Content Length', length);
}
}
// Retrieve "byte_length"
byte_length = view.getUint32(cursor, endianness);
cursor += BYTES_LENGTH;
if (debug) {
console.info('Byte Length', byte_length, 'bytes');
}
var type_name = Length[type];
var unit = type_name === null ? 0 : window[type_name + 'Array'].BYTES_PER_ELEMENT;
switch (type) {
case Types.NULL:
case Types.UNDEFINED:
if (debug) {
binary_dump(view, start, cursor - start);
}
// NULL and UNDEFINED doesn't have any octet
value = null;
break;
case Types.STRING:
length = byte_length / unit;
var string = [];
for (i = 0; i < length; i++) {
var code = view.getUint16(cursor, endianness);
cursor += unit;
string.push(String.fromCharCode(code));
}
value = string.join('');
if (debug) {
console.info('Actual Content %c"' + value + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.NUMBER:
value = view.getFloat64(cursor, endianness);
cursor += unit;
if (debug) {
console.info('Actual Content %c"' + value.toString() + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.BOOLEAN:
value = view.getUint8(cursor, endianness) === 1 ? true : false;
cursor += unit;
if (debug) {
console.info('Actual Content %c"' + value.toString() + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
case Types.ARRAYBUFFER:
elem = view.buffer.slice(cursor, cursor + byte_length);
cursor += byte_length;
// If ArrayBuffer
if (type === Types.ARRAYBUFFER) {
value = elem;
// If other TypedArray
} else {
value = new window[type_name + 'Array'](elem);
}
if (debug) {
binary_dump(view, start, cursor - start);
}
break;
case Types.BLOB:
if (debug) {
binary_dump(view, start, cursor - start);
}
// If Blob is available (on browser)
if (window.Blob) {
var mime = unpack(view, cursor);
var buffer = unpack(view, mime.cursor);
cursor = buffer.cursor;
value = new Blob([buffer.value], {
type: mime.value
});
} else {
// node.js implementation goes here
elem = view.buffer.slice(cursor, cursor + byte_length);
cursor += byte_length;
// node.js implementatino uses Buffer to help Blob
value = new Buffer(elem);
}
break;
case Types.ARRAY:
if (debug) {
binary_dump(view, start, cursor - start);
}
value = [];
for (i = 0; i < length; i++) {
// Retrieve array element
elem = unpack(view, cursor);
cursor = elem.cursor;
value.push(elem.value);
}
break;
case Types.OBJECT:
if (debug) {
binary_dump(view, start, cursor - start);
}
value = {};
for (i = 0; i < length; i++) {
// Retrieve object key and value in sequence
var key = unpack(view, cursor);
var val = unpack(view, key.cursor);
cursor = val.cursor;
value[key.value] = val.value;
}
break;
default:
throw 'TypeError: Type not supported.';
}
return {
value: value,
cursor: cursor
};
}
|
Unpack binary data into an object with value and cursor
@param {DataView} view [description]
@param {Number} cursor [description]
@return {Object}
|
unpack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/FileBufferReader.js
|
MIT
|
deferredSerialize = function(array, callback) {
var length = array.length,
results = [],
count = 0,
byte_length = 0;
for (var i = 0; i < array.length; i++) {
(function(index) {
serialize(array[index], function(result) {
// store results in order
results[index] = result;
// count byte length
byte_length += result[0].header_size + result[0].byte_length;
// when all results are on table
if (++count === length) {
// finally concatenate all reuslts into a single array in order
var array = [];
for (var j = 0; j < results.length; j++) {
array = array.concat(results[j]);
}
callback(array, byte_length);
}
});
})(i);
}
}
|
deferred function to process multiple serialization in order
@param {array} array [description]
@param {Function} callback [description]
@return {void} no return value
|
deferredSerialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/FileBufferReader.js
|
MIT
|
serialize = function(obj, callback) {
var subarray = [],
unit = 1,
header_size = TYPE_LENGTH + BYTES_LENGTH,
type, byte_length = 0,
length = 0,
value = obj;
type = find_type(obj);
unit = Length[type] === undefined || Length[type] === null ? 0 :
window[Length[type] + 'Array'].BYTES_PER_ELEMENT;
switch (type) {
case Types.UNDEFINED:
case Types.NULL:
break;
case Types.NUMBER:
case Types.BOOLEAN:
byte_length = unit;
break;
case Types.STRING:
length = obj.length;
byte_length += length * unit;
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
length = obj.length;
byte_length += length * unit;
break;
case Types.ARRAY:
deferredSerialize(obj, function(subarray, byte_length) {
callback([{
type: type,
length: obj.length,
header_size: header_size + LENGTH_LENGTH,
byte_length: byte_length,
value: null
}].concat(subarray));
});
return;
case Types.OBJECT:
var deferred = [];
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
deferred.push(key);
deferred.push(obj[key]);
length++;
}
}
deferredSerialize(deferred, function(subarray, byte_length) {
callback([{
type: type,
length: length,
header_size: header_size + LENGTH_LENGTH,
byte_length: byte_length,
value: null
}].concat(subarray));
});
return;
case Types.ARRAYBUFFER:
byte_length += obj.byteLength;
break;
case Types.BLOB:
var mime_type = obj.type;
var reader = new FileReader();
reader.onload = function(e) {
deferredSerialize([mime_type, e.target.result], function(subarray, byte_length) {
callback([{
type: type,
length: length,
header_size: header_size,
byte_length: byte_length,
value: null
}].concat(subarray));
});
};
reader.onerror = function(e) {
throw 'FileReader Error: ' + e;
};
reader.readAsArrayBuffer(obj);
return;
case Types.BUFFER:
byte_length += obj.length;
break;
default:
throw 'TypeError: Type "' + obj.constructor.name + '" not supported.';
}
callback([{
type: type,
length: length,
header_size: header_size,
byte_length: byte_length,
value: value
}].concat(subarray));
}
|
Serializes object and return byte_length
@param {mixed} obj JavaScript object you want to serialize
@return {Array} Serialized array object
|
serialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/FileBufferReader.js
|
MIT
|
deserialize = function(buffer, callback) {
var view = buffer instanceof DataView ? buffer : new DataView(buffer);
var result = unpack(view, 0);
return result.value;
}
|
Deserialize binary and return JavaScript object
@param ArrayBuffer buffer ArrayBuffer you want to deserialize
@return mixed Retrieved JavaScript object
|
deserialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/FileBufferReader.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/FileBufferReader.js
|
MIT
|
function extractVersion(uastring, expr, pos) {
var match = uastring.match(expr);
return match && match.length >= pos && parseInt(match[pos], 10);
}
|
Extract browser version out of the provided user agent string.
@param {!string} uastring userAgent string.
@param {!string} expr Regular expression used as match criteria.
@param {!number} pos position in the version string to be returned.
@return {!number} browser version.
|
extractVersion
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/demo/adapter-latest.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/demo/adapter-latest.js
|
MIT
|
function wrapPeerConnectionEvent(window, eventNameToWrap, wrapper) {
if (!window.RTCPeerConnection) {
return;
}
var proto = window.RTCPeerConnection.prototype;
var nativeAddEventListener = proto.addEventListener;
proto.addEventListener = function(nativeEventName, cb) {
if (nativeEventName !== eventNameToWrap) {
return nativeAddEventListener.apply(this, arguments);
}
var wrappedCallback = function(e) {
var modifiedEvent = wrapper(e);
if (modifiedEvent) {
cb(modifiedEvent);
}
};
this._eventMap = this._eventMap || {};
this._eventMap[cb] = wrappedCallback;
return nativeAddEventListener.apply(this, [nativeEventName,
wrappedCallback]);
};
var nativeRemoveEventListener = proto.removeEventListener;
proto.removeEventListener = function(nativeEventName, cb) {
if (nativeEventName !== eventNameToWrap || !this._eventMap
|| !this._eventMap[cb]) {
return nativeRemoveEventListener.apply(this, arguments);
}
var unwrappedCb = this._eventMap[cb];
delete this._eventMap[cb];
return nativeRemoveEventListener.apply(this, [nativeEventName,
unwrappedCb]);
};
Object.defineProperty(proto, 'on' + eventNameToWrap, {
get: function() {
return this['_on' + eventNameToWrap];
},
set: function(cb) {
if (this['_on' + eventNameToWrap]) {
this.removeEventListener(eventNameToWrap,
this['_on' + eventNameToWrap]);
delete this['_on' + eventNameToWrap];
}
if (cb) {
this.addEventListener(eventNameToWrap,
this['_on' + eventNameToWrap] = cb);
}
},
enumerable: true,
configurable: true
});
}
|
Extract browser version out of the provided user agent string.
@param {!string} uastring userAgent string.
@param {!string} expr Regular expression used as match criteria.
@param {!number} pos position in the version string to be returned.
@return {!number} browser version.
|
wrapPeerConnectionEvent
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/demo/adapter-latest.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/demo/adapter-latest.js
|
MIT
|
wrappedCallback = function(e) {
var modifiedEvent = wrapper(e);
if (modifiedEvent) {
cb(modifiedEvent);
}
}
|
Extract browser version out of the provided user agent string.
@param {!string} uastring userAgent string.
@param {!string} expr Regular expression used as match criteria.
@param {!number} pos position in the version string to be returned.
@return {!number} browser version.
|
wrappedCallback
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/demo/adapter-latest.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/demo/adapter-latest.js
|
MIT
|
pack = function(serialized) {
var cursor = 0,
i = 0,
j = 0,
endianness = BIG_ENDIAN;
var ab = new ArrayBuffer(serialized[0].byte_length + serialized[0].header_size);
var view = new DataView(ab);
for (i = 0; i < serialized.length; i++) {
var start = cursor,
header_size = serialized[i].header_size,
type = serialized[i].type,
length = serialized[i].length,
value = serialized[i].value,
byte_length = serialized[i].byte_length,
type_name = Length[type],
unit = type_name === null ? 0 : window[type_name + 'Array'].BYTES_PER_ELEMENT;
// Set type
if (type === Types.BUFFER) {
// on node.js Blob is emulated using Buffer type
view.setUint8(cursor, Types.BLOB, endianness);
} else {
view.setUint8(cursor, type, endianness);
}
cursor += TYPE_LENGTH;
if (debug) {
console.info('Packing', type, TypeNames[type]);
}
// Set length if required
if (type === Types.ARRAY || type === Types.OBJECT) {
view.setUint16(cursor, length, endianness);
cursor += LENGTH_LENGTH;
if (debug) {
console.info('Content Length', length);
}
}
// Set byte length
view.setUint32(cursor, byte_length, endianness);
cursor += BYTES_LENGTH;
if (debug) {
console.info('Header Size', header_size, 'bytes');
console.info('Byte Length', byte_length, 'bytes');
}
switch (type) {
case Types.NULL:
case Types.UNDEFINED:
// NULL and UNDEFINED doesn't have any payload
break;
case Types.STRING:
if (debug) {
console.info('Actual Content %c"' + value + '"', 'font-weight:bold;');
}
for (j = 0; j < length; j++, cursor += unit) {
view.setUint16(cursor, value.charCodeAt(j), endianness);
}
break;
case Types.NUMBER:
case Types.BOOLEAN:
if (debug) {
console.info('%c' + value.toString(), 'font-weight:bold;');
}
view['set' + type_name](cursor, value, endianness);
cursor += unit;
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
var _view = new Uint8Array(view.buffer, cursor, byte_length);
_view.set(new Uint8Array(value.buffer));
cursor += byte_length;
break;
case Types.ARRAYBUFFER:
case Types.BUFFER:
var _view = new Uint8Array(view.buffer, cursor, byte_length);
_view.set(new Uint8Array(value));
cursor += byte_length;
break;
case Types.BLOB:
case Types.ARRAY:
case Types.OBJECT:
break;
default:
throw 'TypeError: Unexpected type found.';
}
if (debug) {
binary_dump(view, start, cursor - start);
}
}
return view;
}
|
packs seriarized elements array into a packed ArrayBuffer
@param {Array} serialized Serialized array of elements.
@return {DataView} view of packed binary
|
pack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/dev/binarize.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/dev/binarize.js
|
MIT
|
unpack = function(view, cursor) {
var i = 0,
endianness = BIG_ENDIAN,
start = cursor;
var type, length, byte_length, value, elem;
// Retrieve "type"
type = view.getUint8(cursor, endianness);
cursor += TYPE_LENGTH;
if (debug) {
console.info('Unpacking', type, TypeNames[type]);
}
// Retrieve "length"
if (type === Types.ARRAY || type === Types.OBJECT) {
length = view.getUint16(cursor, endianness);
cursor += LENGTH_LENGTH;
if (debug) {
console.info('Content Length', length);
}
}
// Retrieve "byte_length"
byte_length = view.getUint32(cursor, endianness);
cursor += BYTES_LENGTH;
if (debug) {
console.info('Byte Length', byte_length, 'bytes');
}
var type_name = Length[type];
var unit = type_name === null ? 0 : window[type_name + 'Array'].BYTES_PER_ELEMENT;
switch (type) {
case Types.NULL:
case Types.UNDEFINED:
if (debug) {
binary_dump(view, start, cursor - start);
}
// NULL and UNDEFINED doesn't have any octet
value = null;
break;
case Types.STRING:
length = byte_length / unit;
var string = [];
for (i = 0; i < length; i++) {
var code = view.getUint16(cursor, endianness);
cursor += unit;
string.push(String.fromCharCode(code));
}
value = string.join('');
if (debug) {
console.info('Actual Content %c"' + value + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.NUMBER:
value = view.getFloat64(cursor, endianness);
cursor += unit;
if (debug) {
console.info('Actual Content %c"' + value.toString() + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.BOOLEAN:
value = view.getUint8(cursor, endianness) === 1 ? true : false;
cursor += unit;
if (debug) {
console.info('Actual Content %c"' + value.toString() + '"', 'font-weight:bold;');
binary_dump(view, start, cursor - start);
}
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
case Types.ARRAYBUFFER:
elem = view.buffer.slice(cursor, cursor + byte_length);
cursor += byte_length;
// If ArrayBuffer
if (type === Types.ARRAYBUFFER) {
value = elem;
// If other TypedArray
} else {
value = new window[type_name + 'Array'](elem);
}
if (debug) {
binary_dump(view, start, cursor - start);
}
break;
case Types.BLOB:
if (debug) {
binary_dump(view, start, cursor - start);
}
// If Blob is available (on browser)
if (window.Blob) {
var mime = unpack(view, cursor);
var buffer = unpack(view, mime.cursor);
cursor = buffer.cursor;
value = new Blob([buffer.value], {
type: mime.value
});
} else {
// node.js implementation goes here
elem = view.buffer.slice(cursor, cursor + byte_length);
cursor += byte_length;
// node.js implementatino uses Buffer to help Blob
value = new Buffer(elem);
}
break;
case Types.ARRAY:
if (debug) {
binary_dump(view, start, cursor - start);
}
value = [];
for (i = 0; i < length; i++) {
// Retrieve array element
elem = unpack(view, cursor);
cursor = elem.cursor;
value.push(elem.value);
}
break;
case Types.OBJECT:
if (debug) {
binary_dump(view, start, cursor - start);
}
value = {};
for (i = 0; i < length; i++) {
// Retrieve object key and value in sequence
var key = unpack(view, cursor);
var val = unpack(view, key.cursor);
cursor = val.cursor;
value[key.value] = val.value;
}
break;
default:
throw 'TypeError: Type not supported.';
}
return {
value: value,
cursor: cursor
};
}
|
Unpack binary data into an object with value and cursor
@param {DataView} view [description]
@param {Number} cursor [description]
@return {Object}
|
unpack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/dev/binarize.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/dev/binarize.js
|
MIT
|
deferredSerialize = function(array, callback) {
var length = array.length,
results = [],
count = 0,
byte_length = 0;
for (var i = 0; i < array.length; i++) {
(function(index) {
serialize(array[index], function(result) {
// store results in order
results[index] = result;
// count byte length
byte_length += result[0].header_size + result[0].byte_length;
// when all results are on table
if (++count === length) {
// finally concatenate all reuslts into a single array in order
var array = [];
for (var j = 0; j < results.length; j++) {
array = array.concat(results[j]);
}
callback(array, byte_length);
}
});
})(i);
}
}
|
deferred function to process multiple serialization in order
@param {array} array [description]
@param {Function} callback [description]
@return {void} no return value
|
deferredSerialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/dev/binarize.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/dev/binarize.js
|
MIT
|
serialize = function(obj, callback) {
var subarray = [],
unit = 1,
header_size = TYPE_LENGTH + BYTES_LENGTH,
type, byte_length = 0,
length = 0,
value = obj;
type = find_type(obj);
unit = Length[type] === undefined || Length[type] === null ? 0 :
window[Length[type] + 'Array'].BYTES_PER_ELEMENT;
switch (type) {
case Types.UNDEFINED:
case Types.NULL:
break;
case Types.NUMBER:
case Types.BOOLEAN:
byte_length = unit;
break;
case Types.STRING:
length = obj.length;
byte_length += length * unit;
break;
case Types.INT8ARRAY:
case Types.INT16ARRAY:
case Types.INT32ARRAY:
case Types.UINT8ARRAY:
case Types.UINT16ARRAY:
case Types.UINT32ARRAY:
case Types.FLOAT32ARRAY:
case Types.FLOAT64ARRAY:
length = obj.length;
byte_length += length * unit;
break;
case Types.ARRAY:
deferredSerialize(obj, function(subarray, byte_length) {
callback([{
type: type,
length: obj.length,
header_size: header_size + LENGTH_LENGTH,
byte_length: byte_length,
value: null
}].concat(subarray));
});
return;
case Types.OBJECT:
var deferred = [];
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
deferred.push(key);
deferred.push(obj[key]);
length++;
}
}
deferredSerialize(deferred, function(subarray, byte_length) {
callback([{
type: type,
length: length,
header_size: header_size + LENGTH_LENGTH,
byte_length: byte_length,
value: null
}].concat(subarray));
});
return;
case Types.ARRAYBUFFER:
byte_length += obj.byteLength;
break;
case Types.BLOB:
var mime_type = obj.type;
var reader = new FileReader();
reader.onload = function(e) {
deferredSerialize([mime_type, e.target.result], function(subarray, byte_length) {
callback([{
type: type,
length: length,
header_size: header_size,
byte_length: byte_length,
value: null
}].concat(subarray));
});
};
reader.onerror = function(e) {
throw 'FileReader Error: ' + e;
};
reader.readAsArrayBuffer(obj);
return;
case Types.BUFFER:
byte_length += obj.length;
break;
default:
throw 'TypeError: Type "' + obj.constructor.name + '" not supported.';
}
callback([{
type: type,
length: length,
header_size: header_size,
byte_length: byte_length,
value: value
}].concat(subarray));
}
|
Serializes object and return byte_length
@param {mixed} obj JavaScript object you want to serialize
@return {Array} Serialized array object
|
serialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/dev/binarize.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/dev/binarize.js
|
MIT
|
deserialize = function(buffer, callback) {
var view = buffer instanceof DataView ? buffer : new DataView(buffer);
var result = unpack(view, 0);
return result.value;
}
|
Deserialize binary and return JavaScript object
@param ArrayBuffer buffer ArrayBuffer you want to deserialize
@return mixed Retrieved JavaScript object
|
deserialize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
FileBufferReader/dev/binarize.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/FileBufferReader/dev/binarize.js
|
MIT
|
function invokeSaveAsDialog(file, fileName) {
if (!file) {
throw 'Blob object is required.';
}
if (!file.type) {
try {
file.type = 'video/webm';
} catch (e) {}
}
var fileExtension = (file.type || 'video/webm').split('/')[1];
if (fileName && fileName.indexOf('.') !== -1) {
var splitted = fileName.split('.');
fileName = splitted[0];
fileExtension = splitted[1];
}
var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
return navigator.msSaveOrOpenBlob(file, fileFullName);
} else if (typeof navigator.msSaveBlob !== 'undefined') {
return navigator.msSaveBlob(file, fileFullName);
}
var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(file);
hyperlink.target = '_blank';
hyperlink.download = fileFullName;
if (!!navigator.mozGetUserMedia) {
hyperlink.onclick = function() {
(document.body || document.documentElement).removeChild(hyperlink);
};
(document.body || document.documentElement).appendChild(hyperlink);
}
var evt = new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
});
hyperlink.dispatchEvent(evt);
if (!navigator.mozGetUserMedia) {
URL.revokeObjectURL(hyperlink.href);
}
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
invokeSaveAsDialog
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
bytesToSize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function isMediaRecorderCompatible() {
var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0;
var isChrome = !!window.chrome && !isOpera;
var isFirefox = typeof window.InstallTrigger !== 'undefined';
if (isFirefox) {
return true;
}
if (!isChrome) {
return false;
}
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var fullVersion = '' + parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion, 10);
var nameOffset, verOffset, ix;
if (isChrome) {
verOffset = nAgt.indexOf('Chrome');
fullVersion = nAgt.substring(verOffset + 7);
}
// trim the fullVersion string at semicolon/space if present
if ((ix = fullVersion.indexOf(';')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
if ((ix = fullVersion.indexOf(' ')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
majorVersion = parseInt('' + fullVersion, 10);
if (isNaN(majorVersion)) {
fullVersion = '' + parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion, 10);
}
return majorVersion >= 49;
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
isMediaRecorderCompatible
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function MediaRecorderWrapper(mediaStream) {
var self = this;
/**
* This method records MediaStream.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.start(5000);
*/
this.start = function(timeSlice, __disableLogs) {
this.timeSlice = timeSlice || 5000;
if (!self.mimeType) {
self.mimeType = 'video/webm';
}
if (self.mimeType.indexOf('audio') !== -1) {
if (mediaStream.getVideoTracks().length && mediaStream.getAudioTracks().length) {
var stream;
if (!!navigator.mozGetUserMedia) {
stream = new MediaStream();
stream.addTrack(mediaStream.getAudioTracks()[0]);
} else {
// webkitMediaStream
stream = new MediaStream(mediaStream.getAudioTracks());
}
mediaStream = stream;
}
}
if (self.mimeType.indexOf('audio') !== -1) {
self.mimeType = IsChrome ? 'audio/webm' : 'audio/ogg';
}
self.dontFireOnDataAvailableEvent = false;
var recorderHints = {
mimeType: self.mimeType
};
if (!self.disableLogs && !__disableLogs) {
console.log('Passing following params over MediaRecorder API.', recorderHints);
}
if (mediaRecorder) {
// mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page.
mediaRecorder = null;
}
if (IsChrome && !isMediaRecorderCompatible()) {
// to support video-only recording on stable
recorderHints = 'video/vp8';
}
// http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp
// https://wiki.mozilla.org/Gecko:MediaRecorder
// https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
// starting a recording session; which will initiate "Reading Thread"
// "Reading Thread" are used to prevent main-thread blocking scenarios
try {
mediaRecorder = new MediaRecorder(mediaStream, recorderHints);
} catch (e) {
// if someone passed NON_supported mimeType
// or if Firefox on Android
mediaRecorder = new MediaRecorder(mediaStream);
}
if ('canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(self.mimeType) === false) {
if (!self.disableLogs) {
console.warn('MediaRecorder API seems unable to record mimeType:', self.mimeType);
}
}
// i.e. stop recording when <video> is paused by the user; and auto restart recording
// when video is resumed. E.g. yourStream.getVideoTracks()[0].muted = true; // it will auto-stop recording.
if (self.ignoreMutedMedia === true) {
mediaRecorder.ignoreMutedMedia = true;
}
var firedOnDataAvailableOnce = false;
// Dispatching OnDataAvailable Handler
mediaRecorder.ondataavailable = function(e) {
// how to fix FF-corrupt-webm issues?
// should we leave this? e.data.size < 26800
if (!e.data || !e.data.size || e.data.size < 26800 || firedOnDataAvailableOnce) {
return;
}
firedOnDataAvailableOnce = true;
var blob = self.getNativeBlob ? e.data : new Blob([e.data], {
type: self.mimeType || 'video/webm'
});
self.ondataavailable(blob);
// self.dontFireOnDataAvailableEvent = true;
if (!!mediaRecorder && mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
mediaRecorder = null;
if (self.dontFireOnDataAvailableEvent) {
return;
}
// record next interval
self.start(timeSlice, '__disableLogs');
};
mediaRecorder.onerror = function(error) {
if (!self.disableLogs) {
if (error.name === 'InvalidState') {
console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.');
} else if (error.name === 'OutOfMemory') {
console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'IllegalStreamModification') {
console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'OtherRecordingError') {
console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'GenericError') {
console.error('The UA cannot provide the codec or recording option that has been requested.', error);
} else {
console.error('MediaRecorder Error', error);
}
}
// When the stream is "ended" set recording to 'inactive'
// and stop gathering data. Callers should not rely on
// exactness of the timeSlice value, especially
// if the timeSlice value is small. Callers should
// consider timeSlice as a minimum value
if (!!mediaRecorder && mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') {
mediaRecorder.stop();
}
};
// void start(optional long mTimeSlice)
// The interval of passing encoded data from EncodedBufferCache to onDataAvailable
// handler. "mTimeSlice < 0" means Session object does not push encoded data to
// onDataAvailable, instead, it passive wait the client side pull encoded data
// by calling requestData API.
try {
mediaRecorder.start(3.6e+6);
} catch (e) {
mediaRecorder = null;
}
setTimeout(function() {
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'recording') {
// "stop" method auto invokes "requestData"!
mediaRecorder.requestData();
// mediaRecorder.stop();
}
}, timeSlice);
// Start recording. If timeSlice has been provided, mediaRecorder will
// raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds.
// If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero.
};
/**
* This method stops recording MediaStream.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
if (!mediaRecorder) {
return;
}
// mediaRecorder.state === 'recording' means that media recorder is associated with "session"
// mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
if (mediaRecorder.state === 'recording') {
// "stop" method auto invokes "requestData"!
mediaRecorder.requestData();
setTimeout(function() {
self.dontFireOnDataAvailableEvent = true;
if (!!mediaRecorder && mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
mediaRecorder = null;
self.onstop();
}, 2000);
}
};
/**
* This method pauses the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'recording') {
mediaRecorder.pause();
}
this.dontFireOnDataAvailableEvent = true;
};
/**
* The recorded blobs are passed over this event.
* @event
* @memberof MediaStreamRecorder
* @example
* recorder.ondataavailable = function(data) {};
*/
this.ondataavailable = function(blob) {
console.log('recorded-blob', blob);
};
/**
* This method resumes the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
if (this.dontFireOnDataAvailableEvent) {
this.dontFireOnDataAvailableEvent = false;
var disableLogs = self.disableLogs;
self.disableLogs = true;
this.start(this.timeslice || 5000);
self.disableLogs = disableLogs;
return;
}
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'paused') {
mediaRecorder.resume();
}
};
/**
* This method resets currently recorded data.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
if (!mediaRecorder) {
return;
}
this.pause();
this.dontFireOnDataAvailableEvent = true;
this.stop();
};
this.onstop = function() {};
// Reference to "MediaRecorder" object
var mediaRecorder;
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
// this method checks if media stream is stopped
// or any track is ended.
(function looper() {
if (!mediaRecorder) {
return;
}
if (isMediaStreamActive() === false) {
self.stop();
return;
}
setTimeout(looper, 1000); // check every second
})();
}
|
Implementation of https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
The MediaRecorder accepts a mediaStream as input source passed from UA. When recorder starts,
a MediaEncoder will be created and accept the mediaStream as input source.
Encoder will get the raw data by track data changes, encode it by selected MIME Type, then store the encoded in EncodedBufferCache object.
The encoded data will be extracted on every timeslice passed from Start function call or by RequestData function.
Thread model:
When the recorder starts, it creates a "Media Encoder" thread to read data from MediaEncoder object and store buffer in EncodedBufferCache object.
Also extract the encoded data and create blobs on every timeslice passed from start function or RequestData function called by UA.
|
MediaRecorderWrapper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function StereoAudioRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new StereoAudioRecorderHelper(mediaStream, this);
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
this.onstop();
}
};
this.pause = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.pause();
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.resume();
};
this.ondataavailable = function() {};
this.onstop = function() {};
// Reference to "StereoAudioRecorder" object
var mediaRecorder;
var timeout;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
StereoAudioRecorder
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function StereoAudioRecorderHelper(mediaStream, root) {
// variables
var deviceSampleRate = 44100; // range: 22050 to 96000
if (!ObjectStore.AudioContextConstructor) {
ObjectStore.AudioContextConstructor = new ObjectStore.AudioContext();
}
// check device sample rate
deviceSampleRate = ObjectStore.AudioContextConstructor.sampleRate;
var leftchannel = [];
var rightchannel = [];
var scriptprocessornode;
var recording = false;
var recordingLength = 0;
var volume;
var audioInput;
var sampleRate = root.sampleRate || deviceSampleRate;
var mimeType = root.mimeType || 'audio/wav';
var isPCM = mimeType.indexOf('audio/pcm') > -1;
var context;
var numChannels = root.audioChannels || 2;
this.record = function() {
recording = true;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
};
this.requestData = function() {
if (isPaused) {
return;
}
if (recordingLength === 0) {
requestDataInvoked = false;
return;
}
requestDataInvoked = true;
// clone stuff
var internalLeftChannel = leftchannel.slice(0);
var internalRightChannel = rightchannel.slice(0);
var internalRecordingLength = recordingLength;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = [];
recordingLength = 0;
requestDataInvoked = false;
// we flat the left and right channels down
var leftBuffer = mergeBuffers(internalLeftChannel, internalRecordingLength);
var interleaved = leftBuffer;
// we interleave both channels together
if (numChannels === 2) {
var rightBuffer = mergeBuffers(internalRightChannel, internalRecordingLength); // bug fixed via #70,#71
interleaved = interleave(leftBuffer, rightBuffer);
}
if (isPCM) {
// our final binary blob
var blob = new Blob([convertoFloat32ToInt16(interleaved)], {
type: 'audio/pcm'
});
console.debug('audio recorded blob size:', bytesToSize(blob.size));
root.ondataavailable(blob);
return;
}
// we create our wav file
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
// -8 (via #97)
view.setUint32(4, 44 + interleaved.length * 2 - 8, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numChannels, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * numChannels * 2, true); // numChannels * 2 (via #71)
view.setUint16(32, numChannels * 2, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
// our final binary blob
var blob = new Blob([view], {
type: 'audio/wav'
});
console.debug('audio recorded blob size:', bytesToSize(blob.size));
root.ondataavailable(blob);
};
this.stop = function() {
// we stop recording
recording = false;
this.requestData();
audioInput.disconnect();
this.onstop();
};
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function convertoFloat32ToInt16(buffer) {
var l = buffer.length;
var buf = new Int16Array(l)
while (l--) {
buf[l] = buffer[l] * 0xFFFF; //convert to 16 bit
}
return buf.buffer
}
// creates the audio context
var context = ObjectStore.AudioContextConstructor;
// creates a gain node
ObjectStore.VolumeGainNode = context.createGain();
var volume = ObjectStore.VolumeGainNode;
// creates an audio node from the microphone incoming stream
ObjectStore.AudioInput = context.createMediaStreamSource(mediaStream);
// creates an audio node from the microphone incoming stream
var audioInput = ObjectStore.AudioInput;
// connect the stream to the gain node
audioInput.connect(volume);
/* From the spec: This value controls how frequently the audioprocess event is
dispatched and how many sample-frames need to be processed each call.
Lower values for buffer size will result in a lower (better) latency.
Higher values will be necessary to avoid audio breakup and glitches
Legal values are 256, 512, 1024, 2048, 4096, 8192, and 16384.*/
var bufferSize = root.bufferSize || 2048;
if (root.bufferSize === 0) {
bufferSize = 0;
}
if (context.createJavaScriptNode) {
scriptprocessornode = context.createJavaScriptNode(bufferSize, numChannels, numChannels);
} else if (context.createScriptProcessor) {
scriptprocessornode = context.createScriptProcessor(bufferSize, numChannels, numChannels);
} else {
throw 'WebAudio API has no support on this browser.';
}
bufferSize = scriptprocessornode.bufferSize;
console.debug('using audio buffer-size:', bufferSize);
var requestDataInvoked = false;
// sometimes "scriptprocessornode" disconnects from he destination-node
// and there is no exception thrown in this case.
// and obviously no further "ondataavailable" events will be emitted.
// below global-scope variable is added to debug such unexpected but "rare" cases.
window.scriptprocessornode = scriptprocessornode;
if (numChannels === 1) {
console.debug('All right-channels are skipped.');
}
var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
this.onstop = function() {};
// http://webaudio.github.io/web-audio-api/#the-scriptprocessornode-interface
scriptprocessornode.onaudioprocess = function(e) {
if (!recording || requestDataInvoked || isPaused) {
return;
}
var left = e.inputBuffer.getChannelData(0);
leftchannel.push(new Float32Array(left));
if (numChannels === 2) {
var right = e.inputBuffer.getChannelData(1);
rightchannel.push(new Float32Array(right));
}
recordingLength += bufferSize;
};
volume.connect(scriptprocessornode);
scriptprocessornode.connect(context.destination);
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
StereoAudioRecorderHelper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
interleave
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
mergeBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
writeUTFBytes
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function convertoFloat32ToInt16(buffer) {
var l = buffer.length;
var buf = new Int16Array(l)
while (l--) {
buf[l] = buffer[l] * 0xFFFF; //convert to 16 bit
}
return buf.buffer
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
convertoFloat32ToInt16
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function WhammyRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new WhammyRecorderHelper(mediaStream, this);
for (var prop in this) {
if (typeof this[prop] !== 'function') {
mediaRecorder[prop] = this[prop];
}
}
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
this.onstop();
}
};
this.onstop = function() {};
this.clearOldRecordedFrames = function() {
if (mediaRecorder) {
mediaRecorder.clearOldRecordedFrames();
}
};
this.pause = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.pause();
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.resume();
};
this.ondataavailable = function() {};
// Reference to "WhammyRecorder" object
var mediaRecorder;
var timeout;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
WhammyRecorder
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function WhammyRecorderHelper(mediaStream, root) {
this.record = function(timeSlice) {
if (!this.width) {
this.width = 320;
}
if (!this.height) {
this.height = 240;
}
if (this.video && this.video instanceof HTMLVideoElement) {
if (!this.width) {
this.width = video.videoWidth || video.clientWidth || 320;
}
if (!this.height) {
this.height = video.videoHeight || video.clientHeight || 240;
}
}
if (!this.video) {
this.video = {
width: this.width,
height: this.height
};
}
if (!this.canvas || !this.canvas.width || !this.canvas.height) {
this.canvas = {
width: this.width,
height: this.height
};
}
canvas.width = this.canvas.width;
canvas.height = this.canvas.height;
// setting defaults
if (this.video && this.video instanceof HTMLVideoElement) {
this.isHTMLObject = true;
video = this.video.cloneNode();
} else {
video = document.createElement('video');
video.src = URL.createObjectURL(mediaStream);
video.width = this.video.width;
video.height = this.video.height;
}
video.muted = true;
video.play();
lastTime = new Date().getTime();
whammy = new Whammy.Video(root.speed, root.quality);
console.log('canvas resolutions', canvas.width, '*', canvas.height);
console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);
drawFrames();
};
this.clearOldRecordedFrames = function() {
whammy.frames = [];
};
var requestDataInvoked = false;
this.requestData = function() {
if (isPaused) {
return;
}
if (!whammy.frames.length) {
requestDataInvoked = false;
return;
}
requestDataInvoked = true;
// clone stuff
var internalFrames = whammy.frames.slice(0);
// reset the frames for the new recording
whammy.frames = dropBlackFrames(internalFrames, -1);
whammy.compile(function(whammyBlob) {
root.ondataavailable(whammyBlob);
console.debug('video recorded blob size:', bytesToSize(whammyBlob.size));
});
whammy.frames = [];
requestDataInvoked = false;
};
var isOnStartedDrawingNonBlankFramesInvoked = false;
function drawFrames() {
if (isPaused) {
lastTime = new Date().getTime();
setTimeout(drawFrames, 500);
return;
}
if (isStopDrawing) {
return;
}
if (requestDataInvoked) {
return setTimeout(drawFrames, 100);
}
var duration = new Date().getTime() - lastTime;
if (!duration) {
return drawFrames();
}
// via webrtc-experiment#206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
if (!self.isHTMLObject && video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
if (!isStopDrawing) {
whammy.frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
}
if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(whammy.frames[whammy.frames.length - 1])) {
isOnStartedDrawingNonBlankFramesInvoked = true;
root.onStartedDrawingNonBlankFrames();
}
setTimeout(drawFrames, 10);
}
var isStopDrawing = false;
this.stop = function() {
isStopDrawing = true;
this.requestData();
this.onstop();
};
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var video;
var lastTime;
var whammy;
var self = this;
function isBlankFrame(frame, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var matchPixCount, endPixCheck, maxPixCount;
var image = new Image();
image.src = frame.image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
if (maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
return false;
} else {
return true;
}
}
function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var resultFrames = [];
var checkUntilNotBlack = _framesToCheck === -1;
var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
_framesToCheck : _frames.length;
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var doNotCheckNext = false;
for (var f = 0; f < endCheckFrame; f++) {
var matchPixCount, endPixCheck, maxPixCount;
if (!doNotCheckNext) {
var image = new Image();
image.src = _frames[f].image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
}
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
}
resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
if (resultFrames.length <= 0) {
// at least one last frame should be available for next manipulation
// if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
resultFrames.push(_frames[_frames.length - 1]);
}
return resultFrames;
}
var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
this.onstop = function() {};
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
WhammyRecorderHelper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function drawFrames() {
if (isPaused) {
lastTime = new Date().getTime();
setTimeout(drawFrames, 500);
return;
}
if (isStopDrawing) {
return;
}
if (requestDataInvoked) {
return setTimeout(drawFrames, 100);
}
var duration = new Date().getTime() - lastTime;
if (!duration) {
return drawFrames();
}
// via webrtc-experiment#206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
if (!self.isHTMLObject && video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
if (!isStopDrawing) {
whammy.frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
}
if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(whammy.frames[whammy.frames.length - 1])) {
isOnStartedDrawingNonBlankFramesInvoked = true;
root.onStartedDrawingNonBlankFrames();
}
setTimeout(drawFrames, 10);
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
drawFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function isBlankFrame(frame, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var matchPixCount, endPixCheck, maxPixCount;
var image = new Image();
image.src = frame.image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
if (maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
return false;
} else {
return true;
}
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
isBlankFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var resultFrames = [];
var checkUntilNotBlack = _framesToCheck === -1;
var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
_framesToCheck : _frames.length;
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var doNotCheckNext = false;
for (var f = 0; f < endCheckFrame; f++) {
var matchPixCount, endPixCheck, maxPixCount;
if (!doNotCheckNext) {
var image = new Image();
image.src = _frames[f].image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
}
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
}
resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
if (resultFrames.length <= 0) {
// at least one last frame should be available for next manipulation
// if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
resultFrames.push(_frames[_frames.length - 1]);
}
return resultFrames;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
dropBlackFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function GifRecorder(mediaStream) {
if (typeof GIFEncoder === 'undefined') {
throw 'Please link: https://cdn.webrtc-experiment.com/gif-recorder.js';
}
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
var imageWidth = this.videoWidth || 320;
var imageHeight = this.videoHeight || 240;
canvas.width = video.width = imageWidth;
canvas.height = video.height = imageHeight;
// external library to record as GIF images
gifEncoder = new GIFEncoder();
// void setRepeat(int iter)
// Sets the number of times the set of GIF frames should be played.
// Default is 1; 0 means play indefinitely.
gifEncoder.setRepeat(0);
// void setFrameRate(Number fps)
// Sets frame rate in frames per second.
// Equivalent to setDelay(1000/fps).
// Using "setDelay" instead of "setFrameRate"
gifEncoder.setDelay(this.frameRate || this.speed || 200);
// void setQuality(int quality)
// Sets quality of color quantization (conversion of images to the
// maximum 256 colors allowed by the GIF specification).
// Lower values (minimum = 1) produce better colors,
// but slow processing significantly. 10 is the default,
// and produces good color mapping at reasonable speeds.
// Values greater than 20 do not yield significant improvements in speed.
gifEncoder.setQuality(this.quality || 1);
// Boolean start()
// This writes the GIF Header and returns false if it fails.
gifEncoder.start();
startTime = Date.now();
function drawVideoFrame(time) {
if (isPaused) {
setTimeout(drawVideoFrame, 500, time);
return;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) {
return;
}
if (video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, imageWidth, imageHeight);
gifEncoder.addFrame(context);
// console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's');
// console.log("fps: ", 1000 / (time - lastFrameTime));
lastFrameTime = time;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
timeout = setTimeout(doneRecording, timeSlice);
};
function doneRecording() {
endTime = Date.now();
var gifBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
type: 'image/gif'
});
self.ondataavailable(gifBlob);
// todo: find a way to clear old recorded blobs
gifEncoder.stream().bin = [];
}
this.stop = function() {
if (lastAnimationFrame) {
cancelAnimationFrame(lastAnimationFrame);
clearTimeout(timeout);
doneRecording();
this.onstop();
}
};
this.onstop = function() {};
var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
this.ondataavailable = function() {};
this.onstop = function() {};
// Reference to itself
var self = this;
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var video = document.createElement('video');
video.muted = true;
video.autoplay = true;
video.src = URL.createObjectURL(mediaStream);
video.play();
var lastAnimationFrame = null;
var startTime, endTime, lastFrameTime;
var gifEncoder;
var timeout;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
GifRecorder
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function drawVideoFrame(time) {
if (isPaused) {
setTimeout(drawVideoFrame, 500, time);
return;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) {
return;
}
if (video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, imageWidth, imageHeight);
gifEncoder.addFrame(context);
// console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's');
// console.log("fps: ", 1000 / (time - lastFrameTime));
lastFrameTime = time;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
drawVideoFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function doneRecording() {
endTime = Date.now();
var gifBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
type: 'image/gif'
});
self.ondataavailable(gifBlob);
// todo: find a way to clear old recorded blobs
gifEncoder.stream().bin = [];
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
doneRecording
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function WhammyVideo(duration, quality) {
this.frames = [];
if (!duration) {
duration = 1;
}
this.duration = 1000 / duration;
this.quality = quality || 0.8;
}
|
Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
@summary A real time javascript webm encoder based on a canvas hack.
@typedef Whammy
@class
@example
var recorder = new Whammy().Video(15);
recorder.add(context || canvas || dataURL);
var output = recorder.compile();
|
WhammyVideo
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
URL.revokeObjectURL(blob);
return worker;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function whammyInWebWorker(frames) {
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
var webm = new ArrayToWebM(frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
postMessage(webm);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
whammyInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
ArrayToWebM
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getClusterData
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
checkFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
numToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
strToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
bitsToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
generateEBML
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
toBinStrOld
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
makeSimpleBlock
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
parseWebP
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getStrLength
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
parseRIFF
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
doubleToString
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function readAsArrayBuffer() {
if (!blobs[index]) {
return concatenateBuffers();
}
var reader = new FileReader();
reader.onload = function(event) {
buffers.push(event.target.result);
index++;
readAsArrayBuffer();
};
reader.readAsArrayBuffer(blobs[index]);
}
|
A more abstract-ish API.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
@param {?number} speed - 0.8
@param {?number} quality - 100
|
readAsArrayBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function concatenateBuffers() {
var byteLength = 0;
buffers.forEach(function(buffer) {
byteLength += buffer.byteLength;
});
var tmp = new Uint16Array(byteLength);
var lastOffset = 0;
buffers.forEach(function(buffer) {
// BYTES_PER_ELEMENT == 2 for Uint16Array
var reusableByteLength = buffer.byteLength;
if (reusableByteLength % 2 != 0) {
buffer = buffer.slice(0, reusableByteLength - 1)
}
tmp.set(new Uint16Array(buffer), lastOffset);
lastOffset += reusableByteLength;
});
var blob = new Blob([tmp.buffer], {
type: type
});
callback(blob);
}
|
A more abstract-ish API.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
@param {?number} speed - 0.8
@param {?number} quality - 100
|
concatenateBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/MediaStreamRecorder.js
|
MIT
|
function MediaRecorderWrapper(mediaStream) {
var self = this;
/**
* This method records MediaStream.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.start(5000);
*/
this.start = function(timeSlice, __disableLogs) {
this.timeSlice = timeSlice || 5000;
if (!self.mimeType) {
self.mimeType = 'video/webm';
}
if (self.mimeType.indexOf('audio') !== -1) {
if (mediaStream.getVideoTracks().length && mediaStream.getAudioTracks().length) {
var stream;
if (!!navigator.mozGetUserMedia) {
stream = new MediaStream();
stream.addTrack(mediaStream.getAudioTracks()[0]);
} else {
// webkitMediaStream
stream = new MediaStream(mediaStream.getAudioTracks());
}
mediaStream = stream;
}
}
if (self.mimeType.indexOf('audio') !== -1) {
self.mimeType = IsChrome ? 'audio/webm' : 'audio/ogg';
}
self.dontFireOnDataAvailableEvent = false;
var recorderHints = {
mimeType: self.mimeType
};
if (!self.disableLogs && !__disableLogs) {
console.log('Passing following params over MediaRecorder API.', recorderHints);
}
if (mediaRecorder) {
// mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page.
mediaRecorder = null;
}
if (IsChrome && !isMediaRecorderCompatible()) {
// to support video-only recording on stable
recorderHints = 'video/vp8';
}
// http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp
// https://wiki.mozilla.org/Gecko:MediaRecorder
// https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
// starting a recording session; which will initiate "Reading Thread"
// "Reading Thread" are used to prevent main-thread blocking scenarios
try {
mediaRecorder = new MediaRecorder(mediaStream, recorderHints);
} catch (e) {
// if someone passed NON_supported mimeType
// or if Firefox on Android
mediaRecorder = new MediaRecorder(mediaStream);
}
if ('canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(self.mimeType) === false) {
if (!self.disableLogs) {
console.warn('MediaRecorder API seems unable to record mimeType:', self.mimeType);
}
}
// i.e. stop recording when <video> is paused by the user; and auto restart recording
// when video is resumed. E.g. yourStream.getVideoTracks()[0].muted = true; // it will auto-stop recording.
if (self.ignoreMutedMedia === true) {
mediaRecorder.ignoreMutedMedia = true;
}
var firedOnDataAvailableOnce = false;
// Dispatching OnDataAvailable Handler
mediaRecorder.ondataavailable = function(e) {
// how to fix FF-corrupt-webm issues?
// should we leave this? e.data.size < 26800
if (!e.data || !e.data.size || e.data.size < 26800 || firedOnDataAvailableOnce) {
return;
}
firedOnDataAvailableOnce = true;
var blob = self.getNativeBlob ? e.data : new Blob([e.data], {
type: self.mimeType || 'video/webm'
});
self.ondataavailable(blob);
// self.dontFireOnDataAvailableEvent = true;
if (!!mediaRecorder && mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
mediaRecorder = null;
if (self.dontFireOnDataAvailableEvent) {
return;
}
// record next interval
self.start(timeSlice, '__disableLogs');
};
mediaRecorder.onerror = function(error) {
if (!self.disableLogs) {
if (error.name === 'InvalidState') {
console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.');
} else if (error.name === 'OutOfMemory') {
console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'IllegalStreamModification') {
console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'OtherRecordingError') {
console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'GenericError') {
console.error('The UA cannot provide the codec or recording option that has been requested.', error);
} else {
console.error('MediaRecorder Error', error);
}
}
// When the stream is "ended" set recording to 'inactive'
// and stop gathering data. Callers should not rely on
// exactness of the timeSlice value, especially
// if the timeSlice value is small. Callers should
// consider timeSlice as a minimum value
if (!!mediaRecorder && mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') {
mediaRecorder.stop();
}
};
// void start(optional long mTimeSlice)
// The interval of passing encoded data from EncodedBufferCache to onDataAvailable
// handler. "mTimeSlice < 0" means Session object does not push encoded data to
// onDataAvailable, instead, it passive wait the client side pull encoded data
// by calling requestData API.
try {
mediaRecorder.start(3.6e+6);
} catch (e) {
mediaRecorder = null;
}
setTimeout(function() {
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'recording') {
// "stop" method auto invokes "requestData"!
mediaRecorder.requestData();
// mediaRecorder.stop();
}
}, timeSlice);
// Start recording. If timeSlice has been provided, mediaRecorder will
// raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds.
// If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero.
};
/**
* This method stops recording MediaStream.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
if (!mediaRecorder) {
return;
}
// mediaRecorder.state === 'recording' means that media recorder is associated with "session"
// mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
if (mediaRecorder.state === 'recording') {
// "stop" method auto invokes "requestData"!
mediaRecorder.requestData();
setTimeout(function() {
self.dontFireOnDataAvailableEvent = true;
if (!!mediaRecorder && mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
mediaRecorder = null;
self.onstop();
}, 2000);
}
};
/**
* This method pauses the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'recording') {
mediaRecorder.pause();
}
this.dontFireOnDataAvailableEvent = true;
};
/**
* The recorded blobs are passed over this event.
* @event
* @memberof MediaStreamRecorder
* @example
* recorder.ondataavailable = function(data) {};
*/
this.ondataavailable = function(blob) {
console.log('recorded-blob', blob);
};
/**
* This method resumes the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
if (this.dontFireOnDataAvailableEvent) {
this.dontFireOnDataAvailableEvent = false;
var disableLogs = self.disableLogs;
self.disableLogs = true;
this.start(this.timeslice || 5000);
self.disableLogs = disableLogs;
return;
}
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'paused') {
mediaRecorder.resume();
}
};
/**
* This method resets currently recorded data.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
if (!mediaRecorder) {
return;
}
this.pause();
this.dontFireOnDataAvailableEvent = true;
this.stop();
};
this.onstop = function() {};
// Reference to "MediaRecorder" object
var mediaRecorder;
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
// this method checks if media stream is stopped
// or any track is ended.
(function looper() {
if (!mediaRecorder) {
return;
}
if (isMediaStreamActive() === false) {
self.stop();
return;
}
setTimeout(looper, 1000); // check every second
})();
}
|
Implementation of https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
The MediaRecorder accepts a mediaStream as input source passed from UA. When recorder starts,
a MediaEncoder will be created and accept the mediaStream as input source.
Encoder will get the raw data by track data changes, encode it by selected MIME Type, then store the encoded in EncodedBufferCache object.
The encoded data will be extracted on every timeslice passed from Start function call or by RequestData function.
Thread model:
When the recorder starts, it creates a "Media Encoder" thread to read data from MediaEncoder object and store buffer in EncodedBufferCache object.
Also extract the encoded data and create blobs on every timeslice passed from start function or RequestData function called by UA.
|
MediaRecorderWrapper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/AudioStreamRecorder/MediaRecorderWrapper.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/AudioStreamRecorder/MediaRecorderWrapper.js
|
MIT
|
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/AudioStreamRecorder/MediaRecorderWrapper.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/AudioStreamRecorder/MediaRecorderWrapper.js
|
MIT
|
function makeCallable(flashObj, methodName) {
flashObj[methodName] =
(function(methodName) {
return function() {
this.CallFunction(
'<invoke name="' + methodName + '" returntype="javascript">'
+ __flash__argumentsToXML(arguments, 0)
+ '</invoke>');
}; // dangling semi-colon for IE 6
})(methodName); // force re-closure to prevent IE memory leaks
}
|
Exposes the given methodName on the given flashObj to make it callable, without
using an eval() to speed things up. Note that only strings therefore can be passed
_back_ from Flash, though complex types can be passed from JavaScript to Flash.
|
makeCallable
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/AudioStreamRecorder/lib/recorder.js/recorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/AudioStreamRecorder/lib/recorder.js/recorder.js
|
MIT
|
function invokeSaveAsDialog(file, fileName) {
if (!file) {
throw 'Blob object is required.';
}
if (!file.type) {
try {
file.type = 'video/webm';
} catch (e) {}
}
var fileExtension = (file.type || 'video/webm').split('/')[1];
if (fileName && fileName.indexOf('.') !== -1) {
var splitted = fileName.split('.');
fileName = splitted[0];
fileExtension = splitted[1];
}
var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
return navigator.msSaveOrOpenBlob(file, fileFullName);
} else if (typeof navigator.msSaveBlob !== 'undefined') {
return navigator.msSaveBlob(file, fileFullName);
}
var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(file);
hyperlink.target = '_blank';
hyperlink.download = fileFullName;
if (!!navigator.mozGetUserMedia) {
hyperlink.onclick = function() {
(document.body || document.documentElement).removeChild(hyperlink);
};
(document.body || document.documentElement).appendChild(hyperlink);
}
var evt = new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
});
hyperlink.dispatchEvent(evt);
if (!navigator.mozGetUserMedia) {
URL.revokeObjectURL(hyperlink.href);
}
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
invokeSaveAsDialog
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/common/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/common/Cross-Browser-Declarations.js
|
MIT
|
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
bytesToSize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/common/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/common/Cross-Browser-Declarations.js
|
MIT
|
function isMediaRecorderCompatible() {
var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0;
var isChrome = !!window.chrome && !isOpera;
var isFirefox = typeof window.InstallTrigger !== 'undefined';
if (isFirefox) {
return true;
}
if (!isChrome) {
return false;
}
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var fullVersion = '' + parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion, 10);
var nameOffset, verOffset, ix;
if (isChrome) {
verOffset = nAgt.indexOf('Chrome');
fullVersion = nAgt.substring(verOffset + 7);
}
// trim the fullVersion string at semicolon/space if present
if ((ix = fullVersion.indexOf(';')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
if ((ix = fullVersion.indexOf(' ')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
majorVersion = parseInt('' + fullVersion, 10);
if (isNaN(majorVersion)) {
fullVersion = '' + parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion, 10);
}
return majorVersion >= 49;
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
isMediaRecorderCompatible
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/common/Cross-Browser-Declarations.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/common/Cross-Browser-Declarations.js
|
MIT
|
function invokeSaveAsDialog(file, fileName) {
if (!file) {
throw 'Blob object is required.';
}
if (!file.type) {
try {
file.type = 'video/webm';
} catch (e) {}
}
var fileExtension = (file.type || 'video/webm').split('/')[1];
if (fileName && fileName.indexOf('.') !== -1) {
var splitted = fileName.split('.');
fileName = splitted[0];
fileExtension = splitted[1];
}
var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
return navigator.msSaveOrOpenBlob(file, fileFullName);
} else if (typeof navigator.msSaveBlob !== 'undefined') {
return navigator.msSaveBlob(file, fileFullName);
}
var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(file);
hyperlink.target = '_blank';
hyperlink.download = fileFullName;
if (!!navigator.mozGetUserMedia) {
hyperlink.onclick = function() {
(document.body || document.documentElement).removeChild(hyperlink);
};
(document.body || document.documentElement).appendChild(hyperlink);
}
var evt = new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
});
hyperlink.dispatchEvent(evt);
if (!navigator.mozGetUserMedia) {
URL.revokeObjectURL(hyperlink.href);
}
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
invokeSaveAsDialog
|
javascript
|
muaz-khan/WebRTC-Experiment
|
MediaStreamRecorder/common/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/MediaStreamRecorder/common/MediaStreamRecorder.js
|
MIT
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.