code
stringlengths 24
2.07M
| docstring
stringlengths 25
85.3k
| func_name
stringlengths 1
92
| language
stringclasses 1
value | repo
stringlengths 5
64
| path
stringlengths 4
172
| url
stringlengths 44
218
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
function base64ToBytes (str) {
return base64.toByteArray(base64clean(str))
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
base64ToBytes
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function blitBuffer (src, dst, offset, length) {
for (var i = 0; i < length; ++i) {
if ((i + offset >= dst.length) || (i >= src.length)) break
dst[i + offset] = src[i]
}
return i
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
blitBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function isInstance (obj, type) {
return obj instanceof type ||
(obj != null && obj.constructor != null && obj.constructor.name != null &&
obj.constructor.name === type.name)
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isInstance
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function numberIsNaN (obj) {
// For IE11 support
return obj !== obj // eslint-disable-line no-self-compare
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
numberIsNaN
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function readLacedData (reader, lacing) {
if (!lacing) return [reader.nextBuffer()]
var i, frameSize
var frames = []
var framesNum = reader.nextUInt8() + 1 // number of frames
if (lacing === FIXED_SIZE_LACING) {
// remaining data should be divisible by the number of frames
if (reader.length % framesNum !== 0) throw new Error('Fixed-Size Lacing Error')
frameSize = reader.length / framesNum
for (i = 0; i < framesNum; i++) {
frames.push(reader.nextBuffer(frameSize))
}
return frames
}
var frameSizes = []
if (lacing === XIPH_LACING) {
for (i = 0; i < framesNum - 1; i++) {
var val
frameSize = 0
do {
val = reader.nextUInt8()
frameSize += val
} while (val === 0xff)
frameSizes.push(frameSize)
}
} else if (lacing === EBML_LACING) {
// first frame
frameSize = reader.nextUIntV()
frameSizes.push(frameSize)
// middle frames
for (i = 1; i < framesNum - 1; i++) {
frameSize += reader.nextIntV()
frameSizes.push(frameSize)
}
}
for (i = 0; i < framesNum - 1; i++) {
frames.push(reader.nextBuffer(frameSizes[i]))
}
// last frame (remaining buffer)
frames.push(reader.nextBuffer())
return frames
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
readLacedData
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function BufferReader (buffer) {
this.buffer = buffer
this.offset = 0
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
BufferReader
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function EventEmitter() {
this._events = this._events || {};
this._maxListeners = this._maxListeners || undefined;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
EventEmitter
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function g() {
this.removeListener(type, g);
if (!fired) {
fired = true;
listener.apply(this, arguments);
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
g
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function isFunction(arg) {
return typeof arg === 'function';
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isFunction
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function isNumber(arg) {
return typeof arg === 'number';
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isNumber
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function isObject(arg) {
return typeof arg === 'object' && arg !== null;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isObject
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function isUndefined(arg) {
return arg === void 0;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isUndefined
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function factory(name, bigendian, unsigned) {
var posH = bigendian ? 0 : 4;
var posL = bigendian ? 4 : 0;
var pos0 = bigendian ? 0 : 3;
var pos1 = bigendian ? 1 : 2;
var pos2 = bigendian ? 2 : 1;
var pos3 = bigendian ? 3 : 0;
var fromPositive = bigendian ? fromPositiveBE : fromPositiveLE;
var fromNegative = bigendian ? fromNegativeBE : fromNegativeLE;
var proto = Int64.prototype;
var isName = "is" + name;
var _isInt64 = "_" + isName;
// properties
proto.buffer = void 0;
proto.offset = 0;
proto[_isInt64] = true;
// methods
proto.toNumber = toNumber;
proto.toString = toString;
proto.toJSON = toNumber;
proto.toArray = toArray;
// add .toBuffer() method only when Buffer available
if (BUFFER) proto.toBuffer = toBuffer;
// add .toArrayBuffer() method only when Uint8Array available
if (UINT8ARRAY) proto.toArrayBuffer = toArrayBuffer;
// isUint64BE, isInt64BE
Int64[isName] = isInt64;
// CommonJS
exports[name] = Int64;
return Int64;
// constructor
function Int64(buffer, offset, value, raddix) {
if (!(this instanceof Int64)) return new Int64(buffer, offset, value, raddix);
return init(this, buffer, offset, value, raddix);
}
// isUint64BE, isInt64BE
function isInt64(b) {
return !!(b && b[_isInt64]);
}
// initializer
function init(that, buffer, offset, value, raddix) {
if (UINT8ARRAY && ARRAYBUFFER) {
if (buffer instanceof ARRAYBUFFER) buffer = new UINT8ARRAY(buffer);
if (value instanceof ARRAYBUFFER) value = new UINT8ARRAY(value);
}
// Int64BE() style
if (!buffer && !offset && !value && !storage) {
// shortcut to initialize with zero
that.buffer = newArray(ZERO, 0);
return;
}
// Int64BE(value, raddix) style
if (!isValidBuffer(buffer, offset)) {
var _storage = storage || Array;
raddix = offset;
value = buffer;
offset = 0;
buffer = new _storage(8);
}
that.buffer = buffer;
that.offset = offset |= 0;
// Int64BE(buffer, offset) style
if (UNDEFINED === typeof value) return;
// Int64BE(buffer, offset, value, raddix) style
if ("string" === typeof value) {
fromString(buffer, offset, value, raddix || 10);
} else if (isValidBuffer(value, raddix)) {
fromArray(buffer, offset, value, raddix);
} else if ("number" === typeof raddix) {
writeInt32(buffer, offset + posH, value); // high
writeInt32(buffer, offset + posL, raddix); // low
} else if (value > 0) {
fromPositive(buffer, offset, value); // positive
} else if (value < 0) {
fromNegative(buffer, offset, value); // negative
} else {
fromArray(buffer, offset, ZERO, 0); // zero, NaN and others
}
}
function fromString(buffer, offset, str, raddix) {
var pos = 0;
var len = str.length;
var high = 0;
var low = 0;
if (str[0] === "-") pos++;
var sign = pos;
while (pos < len) {
var chr = parseInt(str[pos++], raddix);
if (!(chr >= 0)) break; // NaN
low = low * raddix + chr;
high = high * raddix + Math.floor(low / BIT32);
low %= BIT32;
}
if (sign) {
high = ~high;
if (low) {
low = BIT32 - low;
} else {
high++;
}
}
writeInt32(buffer, offset + posH, high);
writeInt32(buffer, offset + posL, low);
}
function toNumber() {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
if (!unsigned) high |= 0; // a trick to get signed
return high ? (high * BIT32 + low) : low;
}
function toString(radix) {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
var str = "";
var sign = !unsigned && (high & 0x80000000);
if (sign) {
high = ~high;
low = BIT32 - low;
}
radix = radix || 10;
while (1) {
var mod = (high % radix) * BIT32 + low;
high = Math.floor(high / radix);
low = Math.floor(mod / radix);
str = (mod % radix).toString(radix) + str;
if (!high && !low) break;
}
if (sign) {
str = "-" + str;
}
return str;
}
function writeInt32(buffer, offset, value) {
buffer[offset + pos3] = value & 255;
value = value >> 8;
buffer[offset + pos2] = value & 255;
value = value >> 8;
buffer[offset + pos1] = value & 255;
value = value >> 8;
buffer[offset + pos0] = value & 255;
}
function readInt32(buffer, offset) {
return (buffer[offset + pos0] * BIT24) +
(buffer[offset + pos1] << 16) +
(buffer[offset + pos2] << 8) +
buffer[offset + pos3];
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
factory
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function Int64(buffer, offset, value, raddix) {
if (!(this instanceof Int64)) return new Int64(buffer, offset, value, raddix);
return init(this, buffer, offset, value, raddix);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
Int64
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function isInt64(b) {
return !!(b && b[_isInt64]);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isInt64
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function init(that, buffer, offset, value, raddix) {
if (UINT8ARRAY && ARRAYBUFFER) {
if (buffer instanceof ARRAYBUFFER) buffer = new UINT8ARRAY(buffer);
if (value instanceof ARRAYBUFFER) value = new UINT8ARRAY(value);
}
// Int64BE() style
if (!buffer && !offset && !value && !storage) {
// shortcut to initialize with zero
that.buffer = newArray(ZERO, 0);
return;
}
// Int64BE(value, raddix) style
if (!isValidBuffer(buffer, offset)) {
var _storage = storage || Array;
raddix = offset;
value = buffer;
offset = 0;
buffer = new _storage(8);
}
that.buffer = buffer;
that.offset = offset |= 0;
// Int64BE(buffer, offset) style
if (UNDEFINED === typeof value) return;
// Int64BE(buffer, offset, value, raddix) style
if ("string" === typeof value) {
fromString(buffer, offset, value, raddix || 10);
} else if (isValidBuffer(value, raddix)) {
fromArray(buffer, offset, value, raddix);
} else if ("number" === typeof raddix) {
writeInt32(buffer, offset + posH, value); // high
writeInt32(buffer, offset + posL, raddix); // low
} else if (value > 0) {
fromPositive(buffer, offset, value); // positive
} else if (value < 0) {
fromNegative(buffer, offset, value); // negative
} else {
fromArray(buffer, offset, ZERO, 0); // zero, NaN and others
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
init
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function fromString(buffer, offset, str, raddix) {
var pos = 0;
var len = str.length;
var high = 0;
var low = 0;
if (str[0] === "-") pos++;
var sign = pos;
while (pos < len) {
var chr = parseInt(str[pos++], raddix);
if (!(chr >= 0)) break; // NaN
low = low * raddix + chr;
high = high * raddix + Math.floor(low / BIT32);
low %= BIT32;
}
if (sign) {
high = ~high;
if (low) {
low = BIT32 - low;
} else {
high++;
}
}
writeInt32(buffer, offset + posH, high);
writeInt32(buffer, offset + posL, low);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromString
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function toNumber() {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
if (!unsigned) high |= 0; // a trick to get signed
return high ? (high * BIT32 + low) : low;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toNumber
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function toString(radix) {
var buffer = this.buffer;
var offset = this.offset;
var high = readInt32(buffer, offset + posH);
var low = readInt32(buffer, offset + posL);
var str = "";
var sign = !unsigned && (high & 0x80000000);
if (sign) {
high = ~high;
low = BIT32 - low;
}
radix = radix || 10;
while (1) {
var mod = (high % radix) * BIT32 + low;
high = Math.floor(high / radix);
low = Math.floor(mod / radix);
str = (mod % radix).toString(radix) + str;
if (!high && !low) break;
}
if (sign) {
str = "-" + str;
}
return str;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toString
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function writeInt32(buffer, offset, value) {
buffer[offset + pos3] = value & 255;
value = value >> 8;
buffer[offset + pos2] = value & 255;
value = value >> 8;
buffer[offset + pos1] = value & 255;
value = value >> 8;
buffer[offset + pos0] = value & 255;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
writeInt32
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function readInt32(buffer, offset) {
return (buffer[offset + pos0] * BIT24) +
(buffer[offset + pos1] << 16) +
(buffer[offset + pos2] << 8) +
buffer[offset + pos3];
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
readInt32
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function toArray(raw) {
var buffer = this.buffer;
var offset = this.offset;
storage = null; // Array
if (raw !== false && offset === 0 && buffer.length === 8 && isArray(buffer)) return buffer;
return newArray(buffer, offset);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function toBuffer(raw) {
var buffer = this.buffer;
var offset = this.offset;
storage = BUFFER;
if (raw !== false && offset === 0 && buffer.length === 8 && Buffer.isBuffer(buffer)) return buffer;
var dest = new BUFFER(8);
fromArray(dest, 0, buffer, offset);
return dest;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function toArrayBuffer(raw) {
var buffer = this.buffer;
var offset = this.offset;
var arrbuf = buffer.buffer;
storage = UINT8ARRAY;
if (raw !== false && offset === 0 && (arrbuf instanceof ARRAYBUFFER) && arrbuf.byteLength === 8) return arrbuf;
var dest = new UINT8ARRAY(8);
fromArray(dest, 0, buffer, offset);
return dest.buffer;
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
toArrayBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function isValidBuffer(buffer, offset) {
var len = buffer && buffer.length;
offset |= 0;
return len && (offset + 8 <= len) && ("string" !== typeof buffer[offset]);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
isValidBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function fromArray(destbuf, destoff, srcbuf, srcoff) {
destoff |= 0;
srcoff |= 0;
for (var i = 0; i < 8; i++) {
destbuf[destoff++] = srcbuf[srcoff++] & 255;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function newArray(buffer, offset) {
return Array.prototype.slice.call(buffer, offset, offset + 8);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
newArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function fromPositiveBE(buffer, offset, value) {
var pos = offset + 8;
while (pos > offset) {
buffer[--pos] = value & 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromPositiveBE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function fromNegativeBE(buffer, offset, value) {
var pos = offset + 8;
value++;
while (pos > offset) {
buffer[--pos] = ((-value) & 255) ^ 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromNegativeBE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function fromPositiveLE(buffer, offset, value) {
var end = offset + 8;
while (offset < end) {
buffer[offset++] = value & 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromPositiveLE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function fromNegativeLE(buffer, offset, value) {
var end = offset + 8;
value++;
while (offset < end) {
buffer[offset++] = ((-value) & 255) ^ 255;
value /= 256;
}
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
fromNegativeLE
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function _isArray(val) {
return !!val && "[object Array]" == Object.prototype.toString.call(val);
}
|
Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
_isArray
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/libs/EBML.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/libs/EBML.js
|
MIT
|
function GLGE_mathUnitTest() {
var a=GLGE.Vec([1,2,3,4]);
var b=GLGE.Vec4(GLGE.getVec4(a,3),
GLGE.get1basedVec4(a,3),
GLGE.getVec4(a,1),
GLGE.getVec4(a,0));
var c=GLGE.identMatrix();
var d=GLGE.mulMat4Vec4(c,b);
if (GLGE.getVec4(d,0)!=4||
GLGE.getVec4(d,1)!=3||
GLGE.getVec4(d,2)!=2||
GLGE.getVec4(d,3)!=1) {
throw "Unit Test 1 failed MatVecMul "+d;
}
var m=GLGE.Mat4([3,4,5,0,.5,.75,0,0,.75,.5,0,0,.25,.25,1,1]);
var m1=GLGE.Mat4([2,1,8,2,1,4,3,2,1,.5,6.5,2,8,3,1,.25]);
var mm1=GLGE.mulMat4(m,m1);
var am1=GLGE.Mat4([15,21.5,68.5,24,
1.75,3.5,6.25,2.5,
2,2.75,7.5,2.5,
9.75,4.75,10.25,3.25]);
for (var i=0;i<4;++i) {
for (var j=0;j<4;++j) {
var diff=GLGE.getMat4(mm1,i,j)-GLGE.getMat4(am1,i,j);
if (diff<.000001&&diff>-.000001) {
}else {
throw "Unit Test 1 failed Multiplication "+GLGE.getMat4(mm1,i,j)+" != "+GLGE.getMat4(am1,i,j);
}
}
}
var inv = GLGE.inverseMat4(m);
var k = GLGE.mulMat4(m,inv);
var l = GLGE.mulMat4(inv,m);
for (var i=0;i<4;++i) {
for (var j=0;j<4;++j) {
var diff=GLGE.getMat4(k,i,j)-GLGE.getMat4(c,i,j);
if (diff<.0001&&diff>-.0001) {
}else {
throw "Unit Test 1 failed Inverse "+GLGE.getMat4(k,i,j)+" != "+GLGE.getMat4(c,i,j);
}
}
}
}
|
Creates a rotation matrix
@returns {Array} value an array GLGE.Vec or 3 paramters
@returns {GLGE.Mat} the rotation matrix
|
GLGE_mathUnitTest
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
MIT
|
parseFloat2=function(val){
if(typeof val!="number") return parseFloat(val);
else return val;
}
|
@namespace Holds the functionality of the library
|
parseFloat2
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
MIT
|
matfunc=function(idx){
return function(material){
this.setMaterial(material,idx);
}
}
|
Sets the Material to use
@param {GLGE.Material} material the material to use
@param {number} surface the surface to attach the material to
|
matfunc
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
MIT
|
function min(a,b){
return (a>b?b:a);
}
|
Creates a new object and added the meshes parse in the geomertry
@param {string} id id of the geomerty to parse
@private
|
min
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
MIT
|
function getLastNumber(str){
var retval="";
for (var i=str.length-1;i>=0;--i)
if (str[i]>="0"&&str[i]<="9")
retval=str[i]+retval;
if (retval.length==0) return "0";
return retval;
}
|
gets the material alpha from the transparent color
@param {color} the transparent color
@param {opaque} the transparent color opaque attribute value
@param {transparency} the transparency value
@private
|
getLastNumber
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
MIT
|
findChild = function(root) {
if (root.hasCamera) {
tempCamera = root;
return;
}
if (!root.children) {
return;
}
for ( var i = 0; i < root.children.length && !tempCamera; i++) {
findChild(root.children[i]);
}
}
|
Initializes the Object/Scene when the collada document has been loaded
@private
|
findChild
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RecordRTC/WebGL-Recording/vendor/glge-compiled.js
|
MIT
|
function invokeSaveAsDialog(file, fileName) {
if (!file) {
throw 'Blob object is required.';
}
if (!file.type) {
try {
file.type = 'video/webm';
} catch (e) {}
}
var fileExtension = (file.type || 'video/webm').split('/')[1];
if (fileName && fileName.indexOf('.') !== -1) {
var splitted = fileName.split('.');
fileName = splitted[0];
fileExtension = splitted[1];
}
var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
return navigator.msSaveOrOpenBlob(file, fileFullName);
} else if (typeof navigator.msSaveBlob !== 'undefined') {
return navigator.msSaveBlob(file, fileFullName);
}
var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(file);
hyperlink.target = '_blank';
hyperlink.download = fileFullName;
if (!!navigator.mozGetUserMedia) {
hyperlink.onclick = function() {
(document.body || document.documentElement).removeChild(hyperlink);
};
(document.body || document.documentElement).appendChild(hyperlink);
}
var evt = new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
});
hyperlink.dispatchEvent(evt);
if (!navigator.mozGetUserMedia) {
URL.revokeObjectURL(hyperlink.href);
}
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
invokeSaveAsDialog
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
bytesToSize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function isMediaRecorderCompatible() {
var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0;
var isChrome = !!window.chrome && !isOpera;
var isFirefox = typeof window.InstallTrigger !== 'undefined';
if (isFirefox) {
return true;
}
if (!isChrome) {
return false;
}
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var fullVersion = '' + parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion, 10);
var nameOffset, verOffset, ix;
if (isChrome) {
verOffset = nAgt.indexOf('Chrome');
fullVersion = nAgt.substring(verOffset + 7);
}
// trim the fullVersion string at semicolon/space if present
if ((ix = fullVersion.indexOf(';')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
if ((ix = fullVersion.indexOf(' ')) !== -1) {
fullVersion = fullVersion.substring(0, ix);
}
majorVersion = parseInt('' + fullVersion, 10);
if (isNaN(majorVersion)) {
fullVersion = '' + parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion, 10);
}
return majorVersion >= 49;
}
|
@param {Blob} file - File or Blob object. This parameter is required.
@param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
@example
invokeSaveAsDialog(blob or file, [optional] fileName);
@see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
isMediaRecorderCompatible
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function MediaRecorderWrapper(mediaStream) {
var self = this;
/**
* This method records MediaStream.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.start(5000);
*/
this.start = function(timeSlice, __disableLogs) {
this.timeSlice = timeSlice || 5000;
if (!self.mimeType) {
self.mimeType = 'video/webm';
}
if (self.mimeType.indexOf('audio') !== -1) {
if (mediaStream.getVideoTracks().length && mediaStream.getAudioTracks().length) {
var stream;
if (!!navigator.mozGetUserMedia) {
stream = new MediaStream();
stream.addTrack(mediaStream.getAudioTracks()[0]);
} else {
// webkitMediaStream
stream = new MediaStream(mediaStream.getAudioTracks());
}
mediaStream = stream;
}
}
if (self.mimeType.indexOf('audio') !== -1) {
self.mimeType = IsChrome ? 'audio/webm' : 'audio/ogg';
}
self.dontFireOnDataAvailableEvent = false;
var recorderHints = {
mimeType: self.mimeType
};
if (!self.disableLogs && !__disableLogs) {
console.log('Passing following params over MediaRecorder API.', recorderHints);
}
if (mediaRecorder) {
// mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page.
mediaRecorder = null;
}
if (IsChrome && !isMediaRecorderCompatible()) {
// to support video-only recording on stable
recorderHints = 'video/vp8';
}
// http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp
// https://wiki.mozilla.org/Gecko:MediaRecorder
// https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
// starting a recording session; which will initiate "Reading Thread"
// "Reading Thread" are used to prevent main-thread blocking scenarios
try {
mediaRecorder = new MediaRecorder(mediaStream, recorderHints);
} catch (e) {
// if someone passed NON_supported mimeType
// or if Firefox on Android
mediaRecorder = new MediaRecorder(mediaStream);
}
if ('canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(self.mimeType) === false) {
if (!self.disableLogs) {
console.warn('MediaRecorder API seems unable to record mimeType:', self.mimeType);
}
}
// i.e. stop recording when <video> is paused by the user; and auto restart recording
// when video is resumed. E.g. yourStream.getVideoTracks()[0].muted = true; // it will auto-stop recording.
if (self.ignoreMutedMedia === true) {
mediaRecorder.ignoreMutedMedia = true;
}
var firedOnDataAvailableOnce = false;
// Dispatching OnDataAvailable Handler
mediaRecorder.ondataavailable = function(e) {
// how to fix FF-corrupt-webm issues?
// should we leave this? e.data.size < 26800
if (!e.data || !e.data.size || e.data.size < 26800 || firedOnDataAvailableOnce) {
return;
}
firedOnDataAvailableOnce = true;
var blob = self.getNativeBlob ? e.data : new Blob([e.data], {
type: self.mimeType || 'video/webm'
});
self.ondataavailable(blob);
// self.dontFireOnDataAvailableEvent = true;
if (!!mediaRecorder && mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
mediaRecorder = null;
if (self.dontFireOnDataAvailableEvent) {
return;
}
// record next interval
self.start(timeSlice, '__disableLogs');
};
mediaRecorder.onerror = function(error) {
if (!self.disableLogs) {
if (error.name === 'InvalidState') {
console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.');
} else if (error.name === 'OutOfMemory') {
console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'IllegalStreamModification') {
console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'OtherRecordingError') {
console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.');
} else if (error.name === 'GenericError') {
console.error('The UA cannot provide the codec or recording option that has been requested.', error);
} else {
console.error('MediaRecorder Error', error);
}
}
// When the stream is "ended" set recording to 'inactive'
// and stop gathering data. Callers should not rely on
// exactness of the timeSlice value, especially
// if the timeSlice value is small. Callers should
// consider timeSlice as a minimum value
if (!!mediaRecorder && mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') {
mediaRecorder.stop();
}
};
// void start(optional long mTimeSlice)
// The interval of passing encoded data from EncodedBufferCache to onDataAvailable
// handler. "mTimeSlice < 0" means Session object does not push encoded data to
// onDataAvailable, instead, it passive wait the client side pull encoded data
// by calling requestData API.
try {
mediaRecorder.start(3.6e+6);
} catch (e) {
mediaRecorder = null;
}
setTimeout(function() {
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'recording') {
// "stop" method auto invokes "requestData"!
mediaRecorder.requestData();
// mediaRecorder.stop();
}
}, timeSlice);
// Start recording. If timeSlice has been provided, mediaRecorder will
// raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds.
// If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero.
};
/**
* This method stops recording MediaStream.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
*/
this.stop = function(callback) {
if (!mediaRecorder) {
return;
}
// mediaRecorder.state === 'recording' means that media recorder is associated with "session"
// mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
if (mediaRecorder.state === 'recording') {
// "stop" method auto invokes "requestData"!
mediaRecorder.requestData();
setTimeout(function() {
self.dontFireOnDataAvailableEvent = true;
if (!!mediaRecorder && mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
mediaRecorder = null;
self.onstop();
}, 2000);
}
};
/**
* This method pauses the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.pause();
*/
this.pause = function() {
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'recording') {
mediaRecorder.pause();
}
this.dontFireOnDataAvailableEvent = true;
};
/**
* The recorded blobs are passed over this event.
* @event
* @memberof MediaStreamRecorder
* @example
* recorder.ondataavailable = function(data) {};
*/
this.ondataavailable = function(blob) {
console.log('recorded-blob', blob);
};
/**
* This method resumes the recording process.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.resume();
*/
this.resume = function() {
if (this.dontFireOnDataAvailableEvent) {
this.dontFireOnDataAvailableEvent = false;
var disableLogs = self.disableLogs;
self.disableLogs = true;
this.start(this.timeslice || 5000);
self.disableLogs = disableLogs;
return;
}
if (!mediaRecorder) {
return;
}
if (mediaRecorder.state === 'paused') {
mediaRecorder.resume();
}
};
/**
* This method resets currently recorded data.
* @method
* @memberof MediaStreamRecorder
* @example
* recorder.clearRecordedData();
*/
this.clearRecordedData = function() {
if (!mediaRecorder) {
return;
}
this.pause();
this.dontFireOnDataAvailableEvent = true;
this.stop();
};
this.onstop = function() {};
// Reference to "MediaRecorder" object
var mediaRecorder;
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
// this method checks if media stream is stopped
// or any track is ended.
(function looper() {
if (!mediaRecorder) {
return;
}
if (isMediaStreamActive() === false) {
self.stop();
return;
}
setTimeout(looper, 1000); // check every second
})();
}
|
Implementation of https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
The MediaRecorder accepts a mediaStream as input source passed from UA. When recorder starts,
a MediaEncoder will be created and accept the mediaStream as input source.
Encoder will get the raw data by track data changes, encode it by selected MIME Type, then store the encoded in EncodedBufferCache object.
The encoded data will be extracted on every timeslice passed from Start function call or by RequestData function.
Thread model:
When the recorder starts, it creates a "Media Encoder" thread to read data from MediaEncoder object and store buffer in EncodedBufferCache object.
Also extract the encoded data and create blobs on every timeslice passed from start function or RequestData function called by UA.
|
MediaRecorderWrapper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function isMediaStreamActive() {
if ('active' in mediaStream) {
if (!mediaStream.active) {
return false;
}
} else if ('ended' in mediaStream) { // old hack
if (mediaStream.ended) {
return false;
}
}
return true;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
isMediaStreamActive
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function StereoAudioRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new StereoAudioRecorderHelper(mediaStream, this);
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
this.onstop();
}
};
this.pause = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.pause();
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.resume();
};
this.ondataavailable = function() {};
this.onstop = function() {};
// Reference to "StereoAudioRecorder" object
var mediaRecorder;
var timeout;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
StereoAudioRecorder
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function StereoAudioRecorderHelper(mediaStream, root) {
// variables
var deviceSampleRate = 44100; // range: 22050 to 96000
if (!ObjectStore.AudioContextConstructor) {
ObjectStore.AudioContextConstructor = new ObjectStore.AudioContext();
}
// check device sample rate
deviceSampleRate = ObjectStore.AudioContextConstructor.sampleRate;
var leftchannel = [];
var rightchannel = [];
var scriptprocessornode;
var recording = false;
var recordingLength = 0;
var volume;
var audioInput;
var sampleRate = root.sampleRate || deviceSampleRate;
var mimeType = root.mimeType || 'audio/wav';
var isPCM = mimeType.indexOf('audio/pcm') > -1;
var context;
var numChannels = root.audioChannels || 2;
this.record = function() {
recording = true;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
};
this.requestData = function() {
if (isPaused) {
return;
}
if (recordingLength === 0) {
requestDataInvoked = false;
return;
}
requestDataInvoked = true;
// clone stuff
var internalLeftChannel = leftchannel.slice(0);
var internalRightChannel = rightchannel.slice(0);
var internalRecordingLength = recordingLength;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = [];
recordingLength = 0;
requestDataInvoked = false;
// we flat the left and right channels down
var leftBuffer = mergeBuffers(internalLeftChannel, internalRecordingLength);
var interleaved = leftBuffer;
// we interleave both channels together
if (numChannels === 2) {
var rightBuffer = mergeBuffers(internalRightChannel, internalRecordingLength); // bug fixed via #70,#71
interleaved = interleave(leftBuffer, rightBuffer);
}
if (isPCM) {
// our final binary blob
var blob = new Blob([convertoFloat32ToInt16(interleaved)], {
type: 'audio/pcm'
});
console.debug('audio recorded blob size:', bytesToSize(blob.size));
root.ondataavailable(blob);
return;
}
// we create our wav file
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
// -8 (via #97)
view.setUint32(4, 44 + interleaved.length * 2 - 8, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numChannels, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * numChannels * 2, true); // numChannels * 2 (via #71)
view.setUint16(32, numChannels * 2, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
// our final binary blob
var blob = new Blob([view], {
type: 'audio/wav'
});
console.debug('audio recorded blob size:', bytesToSize(blob.size));
root.ondataavailable(blob);
};
this.stop = function() {
// we stop recording
recording = false;
this.requestData();
audioInput.disconnect();
this.onstop();
};
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function convertoFloat32ToInt16(buffer) {
var l = buffer.length;
var buf = new Int16Array(l)
while (l--) {
buf[l] = buffer[l] * 0xFFFF; //convert to 16 bit
}
return buf.buffer
}
// creates the audio context
var context = ObjectStore.AudioContextConstructor;
// creates a gain node
ObjectStore.VolumeGainNode = context.createGain();
var volume = ObjectStore.VolumeGainNode;
// creates an audio node from the microphone incoming stream
ObjectStore.AudioInput = context.createMediaStreamSource(mediaStream);
// creates an audio node from the microphone incoming stream
var audioInput = ObjectStore.AudioInput;
// connect the stream to the gain node
audioInput.connect(volume);
/* From the spec: This value controls how frequently the audioprocess event is
dispatched and how many sample-frames need to be processed each call.
Lower values for buffer size will result in a lower (better) latency.
Higher values will be necessary to avoid audio breakup and glitches
Legal values are 256, 512, 1024, 2048, 4096, 8192, and 16384.*/
var bufferSize = root.bufferSize || 2048;
if (root.bufferSize === 0) {
bufferSize = 0;
}
if (context.createJavaScriptNode) {
scriptprocessornode = context.createJavaScriptNode(bufferSize, numChannels, numChannels);
} else if (context.createScriptProcessor) {
scriptprocessornode = context.createScriptProcessor(bufferSize, numChannels, numChannels);
} else {
throw 'WebAudio API has no support on this browser.';
}
bufferSize = scriptprocessornode.bufferSize;
console.debug('using audio buffer-size:', bufferSize);
var requestDataInvoked = false;
// sometimes "scriptprocessornode" disconnects from he destination-node
// and there is no exception thrown in this case.
// and obviously no further "ondataavailable" events will be emitted.
// below global-scope variable is added to debug such unexpected but "rare" cases.
window.scriptprocessornode = scriptprocessornode;
if (numChannels === 1) {
console.debug('All right-channels are skipped.');
}
var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
this.onstop = function() {};
// http://webaudio.github.io/web-audio-api/#the-scriptprocessornode-interface
scriptprocessornode.onaudioprocess = function(e) {
if (!recording || requestDataInvoked || isPaused) {
return;
}
var left = e.inputBuffer.getChannelData(0);
leftchannel.push(new Float32Array(left));
if (numChannels === 2) {
var right = e.inputBuffer.getChannelData(1);
rightchannel.push(new Float32Array(right));
}
recordingLength += bufferSize;
};
volume.connect(scriptprocessornode);
scriptprocessornode.connect(context.destination);
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
StereoAudioRecorderHelper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
interleave
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
mergeBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
writeUTFBytes
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function convertoFloat32ToInt16(buffer) {
var l = buffer.length;
var buf = new Int16Array(l)
while (l--) {
buf[l] = buffer[l] * 0xFFFF; //convert to 16 bit
}
return buf.buffer
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
convertoFloat32ToInt16
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function WhammyRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new WhammyRecorderHelper(mediaStream, this);
for (var prop in this) {
if (typeof this[prop] !== 'function') {
mediaRecorder[prop] = this[prop];
}
}
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
this.onstop();
}
};
this.onstop = function() {};
this.clearOldRecordedFrames = function() {
if (mediaRecorder) {
mediaRecorder.clearOldRecordedFrames();
}
};
this.pause = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.pause();
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.resume();
};
this.ondataavailable = function() {};
// Reference to "WhammyRecorder" object
var mediaRecorder;
var timeout;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
WhammyRecorder
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function WhammyRecorderHelper(mediaStream, root) {
this.record = function(timeSlice) {
if (!this.width) {
this.width = 320;
}
if (!this.height) {
this.height = 240;
}
if (this.video && this.video instanceof HTMLVideoElement) {
if (!this.width) {
this.width = video.videoWidth || video.clientWidth || 320;
}
if (!this.height) {
this.height = video.videoHeight || video.clientHeight || 240;
}
}
if (!this.video) {
this.video = {
width: this.width,
height: this.height
};
}
if (!this.canvas || !this.canvas.width || !this.canvas.height) {
this.canvas = {
width: this.width,
height: this.height
};
}
canvas.width = this.canvas.width;
canvas.height = this.canvas.height;
// setting defaults
if (this.video && this.video instanceof HTMLVideoElement) {
this.isHTMLObject = true;
video = this.video.cloneNode();
} else {
video = document.createElement('video');
video.src = URL.createObjectURL(mediaStream);
video.width = this.video.width;
video.height = this.video.height;
}
video.muted = true;
video.play();
lastTime = new Date().getTime();
whammy = new Whammy.Video(root.speed, root.quality);
console.log('canvas resolutions', canvas.width, '*', canvas.height);
console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);
drawFrames();
};
this.clearOldRecordedFrames = function() {
whammy.frames = [];
};
var requestDataInvoked = false;
this.requestData = function() {
if (isPaused) {
return;
}
if (!whammy.frames.length) {
requestDataInvoked = false;
return;
}
requestDataInvoked = true;
// clone stuff
var internalFrames = whammy.frames.slice(0);
// reset the frames for the new recording
whammy.frames = dropBlackFrames(internalFrames, -1);
whammy.compile(function(whammyBlob) {
root.ondataavailable(whammyBlob);
console.debug('video recorded blob size:', bytesToSize(whammyBlob.size));
});
whammy.frames = [];
requestDataInvoked = false;
};
var isOnStartedDrawingNonBlankFramesInvoked = false;
function drawFrames() {
if (isPaused) {
lastTime = new Date().getTime();
setTimeout(drawFrames, 500);
return;
}
if (isStopDrawing) {
return;
}
if (requestDataInvoked) {
return setTimeout(drawFrames, 100);
}
var duration = new Date().getTime() - lastTime;
if (!duration) {
return drawFrames();
}
// via webrtc-experiment#206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
if (!self.isHTMLObject && video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
if (!isStopDrawing) {
whammy.frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
}
if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(whammy.frames[whammy.frames.length - 1])) {
isOnStartedDrawingNonBlankFramesInvoked = true;
root.onStartedDrawingNonBlankFrames();
}
setTimeout(drawFrames, 10);
}
var isStopDrawing = false;
this.stop = function() {
isStopDrawing = true;
this.requestData();
this.onstop();
};
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var video;
var lastTime;
var whammy;
var self = this;
function isBlankFrame(frame, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var matchPixCount, endPixCheck, maxPixCount;
var image = new Image();
image.src = frame.image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
if (maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
return false;
} else {
return true;
}
}
function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var resultFrames = [];
var checkUntilNotBlack = _framesToCheck === -1;
var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
_framesToCheck : _frames.length;
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var doNotCheckNext = false;
for (var f = 0; f < endCheckFrame; f++) {
var matchPixCount, endPixCheck, maxPixCount;
if (!doNotCheckNext) {
var image = new Image();
image.src = _frames[f].image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
}
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
}
resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
if (resultFrames.length <= 0) {
// at least one last frame should be available for next manipulation
// if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
resultFrames.push(_frames[_frames.length - 1]);
}
return resultFrames;
}
var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
this.onstop = function() {};
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
WhammyRecorderHelper
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function drawFrames() {
if (isPaused) {
lastTime = new Date().getTime();
setTimeout(drawFrames, 500);
return;
}
if (isStopDrawing) {
return;
}
if (requestDataInvoked) {
return setTimeout(drawFrames, 100);
}
var duration = new Date().getTime() - lastTime;
if (!duration) {
return drawFrames();
}
// via webrtc-experiment#206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
if (!self.isHTMLObject && video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
if (!isStopDrawing) {
whammy.frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
}
if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(whammy.frames[whammy.frames.length - 1])) {
isOnStartedDrawingNonBlankFramesInvoked = true;
root.onStartedDrawingNonBlankFrames();
}
setTimeout(drawFrames, 10);
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
drawFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function isBlankFrame(frame, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var matchPixCount, endPixCheck, maxPixCount;
var image = new Image();
image.src = frame.image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
if (maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
return false;
} else {
return true;
}
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
isBlankFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var resultFrames = [];
var checkUntilNotBlack = _framesToCheck === -1;
var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
_framesToCheck : _frames.length;
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var doNotCheckNext = false;
for (var f = 0; f < endCheckFrame; f++) {
var matchPixCount, endPixCheck, maxPixCount;
if (!doNotCheckNext) {
var image = new Image();
image.src = _frames[f].image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
}
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
}
resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
if (resultFrames.length <= 0) {
// at least one last frame should be available for next manipulation
// if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
resultFrames.push(_frames[_frames.length - 1]);
}
return resultFrames;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
dropBlackFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function GifRecorder(mediaStream) {
if (typeof GIFEncoder === 'undefined') {
throw 'Please link: https://cdn.webrtc-experiment.com/gif-recorder.js';
}
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
var imageWidth = this.videoWidth || 320;
var imageHeight = this.videoHeight || 240;
canvas.width = video.width = imageWidth;
canvas.height = video.height = imageHeight;
// external library to record as GIF images
gifEncoder = new GIFEncoder();
// void setRepeat(int iter)
// Sets the number of times the set of GIF frames should be played.
// Default is 1; 0 means play indefinitely.
gifEncoder.setRepeat(0);
// void setFrameRate(Number fps)
// Sets frame rate in frames per second.
// Equivalent to setDelay(1000/fps).
// Using "setDelay" instead of "setFrameRate"
gifEncoder.setDelay(this.frameRate || this.speed || 200);
// void setQuality(int quality)
// Sets quality of color quantization (conversion of images to the
// maximum 256 colors allowed by the GIF specification).
// Lower values (minimum = 1) produce better colors,
// but slow processing significantly. 10 is the default,
// and produces good color mapping at reasonable speeds.
// Values greater than 20 do not yield significant improvements in speed.
gifEncoder.setQuality(this.quality || 1);
// Boolean start()
// This writes the GIF Header and returns false if it fails.
gifEncoder.start();
startTime = Date.now();
function drawVideoFrame(time) {
if (isPaused) {
setTimeout(drawVideoFrame, 500, time);
return;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) {
return;
}
if (video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, imageWidth, imageHeight);
gifEncoder.addFrame(context);
// console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's');
// console.log("fps: ", 1000 / (time - lastFrameTime));
lastFrameTime = time;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
timeout = setTimeout(doneRecording, timeSlice);
};
function doneRecording() {
endTime = Date.now();
var gifBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
type: 'image/gif'
});
self.ondataavailable(gifBlob);
// todo: find a way to clear old recorded blobs
gifEncoder.stream().bin = [];
}
this.stop = function() {
if (lastAnimationFrame) {
cancelAnimationFrame(lastAnimationFrame);
clearTimeout(timeout);
doneRecording();
this.onstop();
}
};
this.onstop = function() {};
var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
this.ondataavailable = function() {};
this.onstop = function() {};
// Reference to itself
var self = this;
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var video = document.createElement('video');
video.muted = true;
video.autoplay = true;
video.src = URL.createObjectURL(mediaStream);
video.play();
var lastAnimationFrame = null;
var startTime, endTime, lastFrameTime;
var gifEncoder;
var timeout;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
GifRecorder
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function drawVideoFrame(time) {
if (isPaused) {
setTimeout(drawVideoFrame, 500, time);
return;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) {
return;
}
if (video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, imageWidth, imageHeight);
gifEncoder.addFrame(context);
// console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's');
// console.log("fps: ", 1000 / (time - lastFrameTime));
lastFrameTime = time;
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
drawVideoFrame
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function doneRecording() {
endTime = Date.now();
var gifBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
type: 'image/gif'
});
self.ondataavailable(gifBlob);
// todo: find a way to clear old recorded blobs
gifEncoder.stream().bin = [];
}
|
This method resets currently recorded data.
@method
@memberof MediaStreamRecorder
@example
recorder.clearRecordedData();
|
doneRecording
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function WhammyVideo(duration, quality) {
this.frames = [];
if (!duration) {
duration = 1;
}
this.duration = 1000 / duration;
this.quality = quality || 0.8;
}
|
Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
@summary A real time javascript webm encoder based on a canvas hack.
@typedef Whammy
@class
@example
var recorder = new Whammy().Video(15);
recorder.add(context || canvas || dataURL);
var output = recorder.compile();
|
WhammyVideo
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
URL.revokeObjectURL(blob);
return worker;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
processInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function whammyInWebWorker(frames) {
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
var webm = new ArrayToWebM(frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
postMessage(webm);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
whammyInWebWorker
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
var clusterMaxDuration = 30000;
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
ArrayToWebM
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getClusterData
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
checkFrames
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
numToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
strToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
bitsToBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
generateEBML
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
toBinStrOld
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
makeSimpleBlock
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
parseWebP
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
getStrLength
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
parseRIFF
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
|
Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
recorder.add(canvas || context || 'image/webp');
@param {string} frame - Canvas || Context || image/webp
@param {number} duration - Stick a duration (in milliseconds)
|
doubleToString
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function readAsArrayBuffer() {
if (!blobs[index]) {
return concatenateBuffers();
}
var reader = new FileReader();
reader.onload = function(event) {
buffers.push(event.target.result);
index++;
readAsArrayBuffer();
};
reader.readAsArrayBuffer(blobs[index]);
}
|
A more abstract-ish API.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
@param {?number} speed - 0.8
@param {?number} quality - 100
|
readAsArrayBuffer
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function concatenateBuffers() {
var byteLength = 0;
buffers.forEach(function(buffer) {
byteLength += buffer.byteLength;
});
var tmp = new Uint16Array(byteLength);
var lastOffset = 0;
buffers.forEach(function(buffer) {
// BYTES_PER_ELEMENT == 2 for Uint16Array
var reusableByteLength = buffer.byteLength;
if (reusableByteLength % 2 != 0) {
buffer = buffer.slice(0, reusableByteLength - 1)
}
tmp.set(new Uint16Array(buffer), lastOffset);
lastOffset += reusableByteLength;
});
var blob = new Blob([tmp.buffer], {
type: type
});
callback(blob);
}
|
A more abstract-ish API.
@method
@memberof Whammy
@example
recorder = new Whammy().Video(0.8, 100);
@param {?number} speed - 0.8
@param {?number} quality - 100
|
concatenateBuffers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/MediaStreamRecorder.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/MediaStreamRecorder.js
|
MIT
|
function gumCallback(stream) {
if (session.streamCallback) {
session.streamCallback(stream);
}
connection.renegotiate(remoteUserId);
}
|
iOS 11 doesn't allow automatic play and rejects **
|
gumCallback
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/RTCMultiConnection.js
|
MIT
|
function applyConstraints(stream, mediaConstraints) {
if (!stream) {
if (!!connection.enableLogs) {
console.error('No stream to applyConstraints.');
}
return;
}
if (mediaConstraints.audio) {
getTracks(stream, 'audio').forEach(function(track) {
track.applyConstraints(mediaConstraints.audio);
});
}
if (mediaConstraints.video) {
getTracks(stream, 'video').forEach(function(track) {
track.applyConstraints(mediaConstraints.video);
});
}
}
|
iOS 11 doesn't allow automatic play and rejects **
|
applyConstraints
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/RTCMultiConnection.js
|
MIT
|
function replaceTrack(track, remoteUserId, isVideoTrack) {
if (remoteUserId) {
mPeer.replaceTrack(track, remoteUserId, isVideoTrack);
return;
}
connection.peers.getAllParticipants().forEach(function(participant) {
mPeer.replaceTrack(track, participant, isVideoTrack);
});
}
|
iOS 11 doesn't allow automatic play and rejects **
|
replaceTrack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/RTCMultiConnection.js
|
MIT
|
function gumCallback(stream) {
connection.replaceTrack(stream, remoteUserId, isVideoTrack || session.video || session.screen);
}
|
iOS 11 doesn't allow automatic play and rejects **
|
gumCallback
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dev/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dev/RTCMultiConnection.js
|
MIT
|
function gumCallback(stream) {
if (session.streamCallback) {
session.streamCallback(stream);
}
connection.renegotiate(remoteUserId);
}
|
iOS 11 doesn't allow automatic play and rejects **
|
gumCallback
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dist/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dist/RTCMultiConnection.js
|
MIT
|
function applyConstraints(stream, mediaConstraints) {
if (!stream) {
if (!!connection.enableLogs) {
console.error('No stream to applyConstraints.');
}
return;
}
if (mediaConstraints.audio) {
getTracks(stream, 'audio').forEach(function(track) {
track.applyConstraints(mediaConstraints.audio);
});
}
if (mediaConstraints.video) {
getTracks(stream, 'video').forEach(function(track) {
track.applyConstraints(mediaConstraints.video);
});
}
}
|
iOS 11 doesn't allow automatic play and rejects **
|
applyConstraints
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dist/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dist/RTCMultiConnection.js
|
MIT
|
function replaceTrack(track, remoteUserId, isVideoTrack) {
if (remoteUserId) {
mPeer.replaceTrack(track, remoteUserId, isVideoTrack);
return;
}
connection.peers.getAllParticipants().forEach(function(participant) {
mPeer.replaceTrack(track, participant, isVideoTrack);
});
}
|
iOS 11 doesn't allow automatic play and rejects **
|
replaceTrack
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dist/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dist/RTCMultiConnection.js
|
MIT
|
function gumCallback(stream) {
connection.replaceTrack(stream, remoteUserId, isVideoTrack || session.video || session.screen);
}
|
iOS 11 doesn't allow automatic play and rejects **
|
gumCallback
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection/dist/RTCMultiConnection.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/dist/RTCMultiConnection.js
|
MIT
|
function markFunction( fn ) {
fn[ expando ] = true;
return fn;
}
|
Mark a function for special use by Sizzle
@param {Function} fn The function to mark
|
markFunction
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function assert( fn ) {
var div = document.createElement("div");
try {
return !!fn( div );
} catch (e) {
return false;
} finally {
// Remove from its parent by default
if ( div.parentNode ) {
div.parentNode.removeChild( div );
}
// release memory in IE
div = null;
}
}
|
Support testing using an element
@param {Function} fn Passed the created div and expects a boolean result
|
assert
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function addHandle( attrs, handler ) {
var arr = attrs.split("|"),
i = attrs.length;
while ( i-- ) {
Expr.attrHandle[ arr[i] ] = handler;
}
}
|
Adds the same handler for all of the specified attrs
@param {String} attrs Pipe-separated list of attributes
@param {Function} handler The method that will be applied
|
addHandle
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function siblingCheck( a, b ) {
var cur = b && a,
diff = cur && a.nodeType === 1 && b.nodeType === 1 &&
( ~b.sourceIndex || MAX_NEGATIVE ) -
( ~a.sourceIndex || MAX_NEGATIVE );
// Use IE sourceIndex if available on both nodes
if ( diff ) {
return diff;
}
// Check if b follows a
if ( cur ) {
while ( (cur = cur.nextSibling) ) {
if ( cur === b ) {
return -1;
}
}
}
return a ? 1 : -1;
}
|
Checks document order of two siblings
@param {Element} a
@param {Element} b
@returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b
|
siblingCheck
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function createInputPseudo( type ) {
return function( elem ) {
var name = elem.nodeName.toLowerCase();
return name === "input" && elem.type === type;
};
}
|
Returns a function to use in pseudos for input types
@param {String} type
|
createInputPseudo
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function createButtonPseudo( type ) {
return function( elem ) {
var name = elem.nodeName.toLowerCase();
return (name === "input" || name === "button") && elem.type === type;
};
}
|
Returns a function to use in pseudos for buttons
@param {String} type
|
createButtonPseudo
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function createPositionalPseudo( fn ) {
return markFunction(function( argument ) {
argument = +argument;
return markFunction(function( seed, matches ) {
var j,
matchIndexes = fn( [], seed.length, argument ),
i = matchIndexes.length;
// Match elements found at the specified indexes
while ( i-- ) {
if ( seed[ (j = matchIndexes[i]) ] ) {
seed[j] = !(matches[j] = seed[j]);
}
}
});
});
}
|
Returns a function to use in pseudos for positionals
@param {Function} fn
|
createPositionalPseudo
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function tokenize( selector, parseOnly ) {
var matched, match, tokens, type,
soFar, groups, preFilters,
cached = tokenCache[ selector + " " ];
if ( cached ) {
return parseOnly ? 0 : cached.slice( 0 );
}
soFar = selector;
groups = [];
preFilters = Expr.preFilter;
while ( soFar ) {
// Comma and first run
if ( !matched || (match = rcomma.exec( soFar )) ) {
if ( match ) {
// Don't consume trailing commas as valid
soFar = soFar.slice( match[0].length ) || soFar;
}
groups.push( tokens = [] );
}
matched = false;
// Combinators
if ( (match = rcombinators.exec( soFar )) ) {
matched = match.shift();
tokens.push({
value: matched,
// Cast descendant combinators to space
type: match[0].replace( rtrim, " " )
});
soFar = soFar.slice( matched.length );
}
// Filters
for ( type in Expr.filter ) {
if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] ||
(match = preFilters[ type ]( match ))) ) {
matched = match.shift();
tokens.push({
value: matched,
type: type,
matches: match
});
soFar = soFar.slice( matched.length );
}
}
if ( !matched ) {
break;
}
}
// Return the length of the invalid excess
// if we're just parsing
// Otherwise, throw an error or return tokens
return parseOnly ?
soFar.length :
soFar ?
Sizzle.error( selector ) :
// Cache the tokens
tokenCache( selector, groups ).slice( 0 );
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
tokenize
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function toSelector( tokens ) {
var i = 0,
len = tokens.length,
selector = "";
for ( ; i < len; i++ ) {
selector += tokens[i].value;
}
return selector;
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
toSelector
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function addCombinator( matcher, combinator, base ) {
var dir = combinator.dir,
checkNonElements = base && dir === "parentNode",
doneName = done++;
return combinator.first ?
// Check against closest ancestor/preceding element
function( elem, context, xml ) {
while ( (elem = elem[ dir ]) ) {
if ( elem.nodeType === 1 || checkNonElements ) {
return matcher( elem, context, xml );
}
}
} :
// Check against all ancestor/preceding elements
function( elem, context, xml ) {
var data, cache, outerCache,
dirkey = dirruns + " " + doneName;
// We can't set arbitrary data on XML nodes, so they don't benefit from dir caching
if ( xml ) {
while ( (elem = elem[ dir ]) ) {
if ( elem.nodeType === 1 || checkNonElements ) {
if ( matcher( elem, context, xml ) ) {
return true;
}
}
}
} else {
while ( (elem = elem[ dir ]) ) {
if ( elem.nodeType === 1 || checkNonElements ) {
outerCache = elem[ expando ] || (elem[ expando ] = {});
if ( (cache = outerCache[ dir ]) && cache[0] === dirkey ) {
if ( (data = cache[1]) === true || data === cachedruns ) {
return data === true;
}
} else {
cache = outerCache[ dir ] = [ dirkey ];
cache[1] = matcher( elem, context, xml ) || cachedruns;
if ( cache[1] === true ) {
return true;
}
}
}
}
}
};
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
addCombinator
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function elementMatcher( matchers ) {
return matchers.length > 1 ?
function( elem, context, xml ) {
var i = matchers.length;
while ( i-- ) {
if ( !matchers[i]( elem, context, xml ) ) {
return false;
}
}
return true;
} :
matchers[0];
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
elementMatcher
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function condense( unmatched, map, filter, context, xml ) {
var elem,
newUnmatched = [],
i = 0,
len = unmatched.length,
mapped = map != null;
for ( ; i < len; i++ ) {
if ( (elem = unmatched[i]) ) {
if ( !filter || filter( elem, context, xml ) ) {
newUnmatched.push( elem );
if ( mapped ) {
map.push( i );
}
}
}
}
return newUnmatched;
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
condense
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) {
if ( postFilter && !postFilter[ expando ] ) {
postFilter = setMatcher( postFilter );
}
if ( postFinder && !postFinder[ expando ] ) {
postFinder = setMatcher( postFinder, postSelector );
}
return markFunction(function( seed, results, context, xml ) {
var temp, i, elem,
preMap = [],
postMap = [],
preexisting = results.length,
// Get initial elements from seed or context
elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ),
// Prefilter to get matcher input, preserving a map for seed-results synchronization
matcherIn = preFilter && ( seed || !selector ) ?
condense( elems, preMap, preFilter, context, xml ) :
elems,
matcherOut = matcher ?
// If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results,
postFinder || ( seed ? preFilter : preexisting || postFilter ) ?
// ...intermediate processing is necessary
[] :
// ...otherwise use results directly
results :
matcherIn;
// Find primary matches
if ( matcher ) {
matcher( matcherIn, matcherOut, context, xml );
}
// Apply postFilter
if ( postFilter ) {
temp = condense( matcherOut, postMap );
postFilter( temp, [], context, xml );
// Un-match failing elements by moving them back to matcherIn
i = temp.length;
while ( i-- ) {
if ( (elem = temp[i]) ) {
matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem);
}
}
}
if ( seed ) {
if ( postFinder || preFilter ) {
if ( postFinder ) {
// Get the final matcherOut by condensing this intermediate into postFinder contexts
temp = [];
i = matcherOut.length;
while ( i-- ) {
if ( (elem = matcherOut[i]) ) {
// Restore matcherIn since elem is not yet a final match
temp.push( (matcherIn[i] = elem) );
}
}
postFinder( null, (matcherOut = []), temp, xml );
}
// Move matched elements from seed to results to keep them synchronized
i = matcherOut.length;
while ( i-- ) {
if ( (elem = matcherOut[i]) &&
(temp = postFinder ? indexOf.call( seed, elem ) : preMap[i]) > -1 ) {
seed[temp] = !(results[temp] = elem);
}
}
}
// Add elements to results, through postFinder if defined
} else {
matcherOut = condense(
matcherOut === results ?
matcherOut.splice( preexisting, matcherOut.length ) :
matcherOut
);
if ( postFinder ) {
postFinder( null, results, matcherOut, xml );
} else {
push.apply( results, matcherOut );
}
}
});
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
setMatcher
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function matcherFromTokens( tokens ) {
var checkContext, matcher, j,
len = tokens.length,
leadingRelative = Expr.relative[ tokens[0].type ],
implicitRelative = leadingRelative || Expr.relative[" "],
i = leadingRelative ? 1 : 0,
// The foundational matcher ensures that elements are reachable from top-level context(s)
matchContext = addCombinator( function( elem ) {
return elem === checkContext;
}, implicitRelative, true ),
matchAnyContext = addCombinator( function( elem ) {
return indexOf.call( checkContext, elem ) > -1;
}, implicitRelative, true ),
matchers = [ function( elem, context, xml ) {
return ( !leadingRelative && ( xml || context !== outermostContext ) ) || (
(checkContext = context).nodeType ?
matchContext( elem, context, xml ) :
matchAnyContext( elem, context, xml ) );
} ];
for ( ; i < len; i++ ) {
if ( (matcher = Expr.relative[ tokens[i].type ]) ) {
matchers = [ addCombinator(elementMatcher( matchers ), matcher) ];
} else {
matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches );
// Return special upon seeing a positional matcher
if ( matcher[ expando ] ) {
// Find the next relative operator (if any) for proper handling
j = ++i;
for ( ; j < len; j++ ) {
if ( Expr.relative[ tokens[j].type ] ) {
break;
}
}
return setMatcher(
i > 1 && elementMatcher( matchers ),
i > 1 && toSelector(
// If the preceding token was a descendant combinator, insert an implicit any-element `*`
tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" })
).replace( rtrim, "$1" ),
matcher,
i < j && matcherFromTokens( tokens.slice( i, j ) ),
j < len && matcherFromTokens( (tokens = tokens.slice( j )) ),
j < len && toSelector( tokens )
);
}
matchers.push( matcher );
}
}
return elementMatcher( matchers );
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
matcherFromTokens
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
// A counter to specify which element is currently being matched
var matcherCachedRuns = 0,
bySet = setMatchers.length > 0,
byElement = elementMatchers.length > 0,
superMatcher = function( seed, context, xml, results, expandContext ) {
var elem, j, matcher,
setMatched = [],
matchedCount = 0,
i = "0",
unmatched = seed && [],
outermost = expandContext != null,
contextBackup = outermostContext,
// We must always have either seed elements or context
elems = seed || byElement && Expr.find["TAG"]( "*", expandContext && context.parentNode || context ),
// Use integer dirruns iff this is the outermost matcher
dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1);
if ( outermost ) {
outermostContext = context !== document && context;
cachedruns = matcherCachedRuns;
}
// Add elements passing elementMatchers directly to results
// Keep `i` a string if there are no elements so `matchedCount` will be "00" below
for ( ; (elem = elems[i]) != null; i++ ) {
if ( byElement && elem ) {
j = 0;
while ( (matcher = elementMatchers[j++]) ) {
if ( matcher( elem, context, xml ) ) {
results.push( elem );
break;
}
}
if ( outermost ) {
dirruns = dirrunsUnique;
cachedruns = ++matcherCachedRuns;
}
}
// Track unmatched elements for set filters
if ( bySet ) {
// They will have gone through all possible matchers
if ( (elem = !matcher && elem) ) {
matchedCount--;
}
// Lengthen the array for every element, matched or not
if ( seed ) {
unmatched.push( elem );
}
}
}
// Apply set filters to unmatched elements
matchedCount += i;
if ( bySet && i !== matchedCount ) {
j = 0;
while ( (matcher = setMatchers[j++]) ) {
matcher( unmatched, setMatched, context, xml );
}
if ( seed ) {
// Reintegrate element matches to eliminate the need for sorting
if ( matchedCount > 0 ) {
while ( i-- ) {
if ( !(unmatched[i] || setMatched[i]) ) {
setMatched[i] = pop.call( results );
}
}
}
// Discard index placeholder values to get only actual matches
setMatched = condense( setMatched );
}
// Add matches to results
push.apply( results, setMatched );
// Seedless set matches succeeding multiple successful matchers stipulate sorting
if ( outermost && !seed && setMatched.length > 0 &&
( matchedCount + setMatchers.length ) > 1 ) {
Sizzle.uniqueSort( results );
}
}
// Override manipulation of globals by nested matchers
if ( outermost ) {
dirruns = dirrunsUnique;
outermostContext = contextBackup;
}
return unmatched;
};
return bySet ?
markFunction( superMatcher ) :
superMatcher;
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
matcherFromGroupMatchers
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
superMatcher = function( seed, context, xml, results, expandContext ) {
var elem, j, matcher,
setMatched = [],
matchedCount = 0,
i = "0",
unmatched = seed && [],
outermost = expandContext != null,
contextBackup = outermostContext,
// We must always have either seed elements or context
elems = seed || byElement && Expr.find["TAG"]( "*", expandContext && context.parentNode || context ),
// Use integer dirruns iff this is the outermost matcher
dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1);
if ( outermost ) {
outermostContext = context !== document && context;
cachedruns = matcherCachedRuns;
}
// Add elements passing elementMatchers directly to results
// Keep `i` a string if there are no elements so `matchedCount` will be "00" below
for ( ; (elem = elems[i]) != null; i++ ) {
if ( byElement && elem ) {
j = 0;
while ( (matcher = elementMatchers[j++]) ) {
if ( matcher( elem, context, xml ) ) {
results.push( elem );
break;
}
}
if ( outermost ) {
dirruns = dirrunsUnique;
cachedruns = ++matcherCachedRuns;
}
}
// Track unmatched elements for set filters
if ( bySet ) {
// They will have gone through all possible matchers
if ( (elem = !matcher && elem) ) {
matchedCount--;
}
// Lengthen the array for every element, matched or not
if ( seed ) {
unmatched.push( elem );
}
}
}
// Apply set filters to unmatched elements
matchedCount += i;
if ( bySet && i !== matchedCount ) {
j = 0;
while ( (matcher = setMatchers[j++]) ) {
matcher( unmatched, setMatched, context, xml );
}
if ( seed ) {
// Reintegrate element matches to eliminate the need for sorting
if ( matchedCount > 0 ) {
while ( i-- ) {
if ( !(unmatched[i] || setMatched[i]) ) {
setMatched[i] = pop.call( results );
}
}
}
// Discard index placeholder values to get only actual matches
setMatched = condense( setMatched );
}
// Add matches to results
push.apply( results, setMatched );
// Seedless set matches succeeding multiple successful matchers stipulate sorting
if ( outermost && !seed && setMatched.length > 0 &&
( matchedCount + setMatchers.length ) > 1 ) {
Sizzle.uniqueSort( results );
}
}
// Override manipulation of globals by nested matchers
if ( outermost ) {
dirruns = dirrunsUnique;
outermostContext = contextBackup;
}
return unmatched;
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
superMatcher
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
function multipleContexts( selector, contexts, results ) {
var i = 0,
len = contexts.length;
for ( ; i < len; i++ ) {
Sizzle( selector, contexts[i], results );
}
return results;
}
|
Utility function for retrieving the text value of an array of DOM nodes
@param {Array|Element} elem
|
multipleContexts
|
javascript
|
muaz-khan/WebRTC-Experiment
|
RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection-SignalR/RTCMultiConnection/Scripts/jquery-1.10.2.js
|
MIT
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.