id
stringlengths
1
8
text
stringlengths
6
1.05M
dataset_id
stringclasses
1 value
/mis_modulos-0.1.tar.gz/mis_modulos-0.1/tensorflow/python/grappler/tf_optimizer.py
"""Provides a proper python API for the symbols exported through swig.""" import threading from tensorflow.core.framework import graph_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.python.grappler import _pywrap_tf_optimizer as tf_opt from tensorflow.python.grappler import cluster as gcluster _OPTIMIZE_GRAPH_CLUSTER_LOCK = threading.Lock() def OptimizeGraph(config_proto, metagraph, verbose=True, graph_id=b'graph_to_optimize', cluster=None, strip_default_attributes=False): """Optimize the provided metagraph. For best results, the signature_def field in `metagraph` should be populated with information about input (feed) and output (fetch) tensors. Args: config_proto: a ConfigProto protobuf. metagraph: a MetagraphDef protobuf. verbose: whether to log optimization results. graph_id: a string identifying this graph. cluster: a grappler cluster object representing hardware resources available to run this graph. strip_default_attributes: whether graph node attributes having default values should be removed after all the optimization passes. This option is useful if the resulting graph will be executed by an older process that might not know some of the recently added attributes. """ if not isinstance(config_proto, config_pb2.ConfigProto): raise TypeError('Argument `config_proto` should be a tf.ConfigProto, ' f'received type: {type(config_proto).__name__}') if cluster is not None: out_graph = tf_opt.TF_OptimizeGraph(cluster.tf_cluster, config_proto.SerializeToString(), metagraph.SerializeToString(), verbose, graph_id, strip_default_attributes) else: # Currently Grappler assumes no more than 1 sessions alive globally. # See comments on SingleMachine::Provision(), hence we use the following # lock to prevent concurrent access to the following code. with _OPTIMIZE_GRAPH_CLUSTER_LOCK: cluster = gcluster.Cluster() try: out_graph = tf_opt.TF_OptimizeGraph(cluster.tf_cluster, config_proto.SerializeToString(), metagraph.SerializeToString(), verbose, graph_id, strip_default_attributes) finally: # Force the cleanup instead of waiting on python GC to cleanup the # temporary cluster we've created. Otherwise subsequent calls might # not have a clean slate because GC may not have run yet. cluster.Shutdown() return graph_pb2.GraphDef().FromString(out_graph)
PypiClean
/flask-bluelogin-0.2.7.tar.gz/flask-bluelogin-0.2.7/flask_bluelogin/swagger-ui/lib/sanitize-html.min.js
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.sanitizeHtml=f()}})(function(){var define,module,exports;return function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s}({1:[function(require,module,exports){var htmlparser=require("htmlparser2");var extend=require("xtend");var quoteRegexp=require("regexp-quote");function each(obj,cb){if(obj)Object.keys(obj).forEach(function(key){cb(obj[key],key)})}function has(obj,key){return{}.hasOwnProperty.call(obj,key)}module.exports=sanitizeHtml;function sanitizeHtml(html,options,_recursing){var result="";function Frame(tag,attribs){var that=this;this.tag=tag;this.attribs=attribs||{};this.tagPosition=result.length;this.text="";this.updateParentNodeText=function(){if(stack.length){var parentFrame=stack[stack.length-1];parentFrame.text+=that.text}}}if(!options){options=sanitizeHtml.defaults;options.parser=htmlParserDefaults}else{options=extend(sanitizeHtml.defaults,options);if(options.parser){options.parser=extend(htmlParserDefaults,options.parser)}else{options.parser=htmlParserDefaults}}var nonTextTagsArray=options.nonTextTags||["script","style","textarea"];var allowedAttributesMap;var allowedAttributesGlobMap;if(options.allowedAttributes){allowedAttributesMap={};allowedAttributesGlobMap={};each(options.allowedAttributes,function(attributes,tag){allowedAttributesMap[tag]=[];var globRegex=[];attributes.forEach(function(name){if(name.indexOf("*")>=0){globRegex.push(quoteRegexp(name).replace(/\\\*/g,".*"))}else{allowedAttributesMap[tag].push(name)}});allowedAttributesGlobMap[tag]=new RegExp("^("+globRegex.join("|")+")$")})}var allowedClassesMap={};each(options.allowedClasses,function(classes,tag){if(allowedAttributesMap){if(!has(allowedAttributesMap,tag)){allowedAttributesMap[tag]=[]}allowedAttributesMap[tag].push("class")}allowedClassesMap[tag]=classes});var transformTagsMap={};var transformTagsAll;each(options.transformTags,function(transform,tag){var transFun;if(typeof transform==="function"){transFun=transform}else if(typeof transform==="string"){transFun=sanitizeHtml.simpleTransform(transform)}if(tag==="*"){transformTagsAll=transFun}else{transformTagsMap[tag]=transFun}});var depth=0;var stack=[];var skipMap={};var transformMap={};var skipText=false;var skipTextDepth=0;var parser=new htmlparser.Parser({onopentag:function(name,attribs){if(skipText){skipTextDepth++;return}var frame=new Frame(name,attribs);stack.push(frame);var skip=false;var hasText=frame.text?true:false;var transformedTag;if(has(transformTagsMap,name)){transformedTag=transformTagsMap[name](name,attribs);frame.attribs=attribs=transformedTag.attribs;if(transformedTag.text!==undefined){frame.innerText=transformedTag.text}if(name!==transformedTag.tagName){frame.name=name=transformedTag.tagName;transformMap[depth]=transformedTag.tagName}}if(transformTagsAll){transformedTag=transformTagsAll(name,attribs);frame.attribs=attribs=transformedTag.attribs;if(name!==transformedTag.tagName){frame.name=name=transformedTag.tagName;transformMap[depth]=transformedTag.tagName}}if(options.allowedTags&&options.allowedTags.indexOf(name)===-1){skip=true;if(nonTextTagsArray.indexOf(name)!==-1){skipText=true;skipTextDepth=1}skipMap[depth]=true}depth++;if(skip){return}result+="<"+name;if(!allowedAttributesMap||has(allowedAttributesMap,name)||allowedAttributesMap["*"]){each(attribs,function(value,a){if(!allowedAttributesMap||has(allowedAttributesMap,name)&&allowedAttributesMap[name].indexOf(a)!==-1||allowedAttributesMap["*"]&&allowedAttributesMap["*"].indexOf(a)!==-1||has(allowedAttributesGlobMap,name)&&allowedAttributesGlobMap[name].test(a)||allowedAttributesGlobMap["*"]&&allowedAttributesGlobMap["*"].test(a)){if(a==="href"||a==="src"){if(naughtyHref(name,value)){delete frame.attribs[a];return}}if(a==="class"){value=filterClasses(value,allowedClassesMap[name]);if(!value.length){delete frame.attribs[a];return}}result+=" "+a;if(value.length){result+='="'+escapeHtml(value)+'"'}}else{delete frame.attribs[a]}})}if(options.selfClosing.indexOf(name)!==-1){result+=" />"}else{result+=">";if(frame.innerText&&!hasText&&!options.textFilter){result+=frame.innerText}}},ontext:function(text){if(skipText){return}var lastFrame=stack[stack.length-1];var tag;if(lastFrame){tag=lastFrame.tag;text=lastFrame.innerText!==undefined?lastFrame.innerText:text}if(tag==="script"||tag==="style"){result+=text}else{var escaped=escapeHtml(text);if(options.textFilter){result+=options.textFilter(escaped)}else{result+=escaped}}if(stack.length){var frame=stack[stack.length-1];frame.text+=text}},onclosetag:function(name){if(skipText){skipTextDepth--;if(!skipTextDepth){skipText=false}else{return}}var frame=stack.pop();if(!frame){return}skipText=false;depth--;if(skipMap[depth]){delete skipMap[depth];frame.updateParentNodeText();return}if(transformMap[depth]){name=transformMap[depth];delete transformMap[depth]}if(options.exclusiveFilter&&options.exclusiveFilter(frame)){result=result.substr(0,frame.tagPosition);return}frame.updateParentNodeText();if(options.selfClosing.indexOf(name)!==-1){return}result+="</"+name+">"}},options.parser);parser.write(html);parser.end();return result;function escapeHtml(s){if(typeof s!=="string"){s=s+""}return s.replace(/\&/g,"&amp;").replace(/</g,"&lt;").replace(/\>/g,"&gt;").replace(/\"/g,"&quot;")}function naughtyHref(name,href){href=href.replace(/[\x00-\x20]+/g,"");href=href.replace(/<\!\-\-.*?\-\-\>/g,"");var matches=href.match(/^([a-zA-Z]+)\:/);if(!matches){return false}var scheme=matches[1].toLowerCase();if(has(options.allowedSchemesByTag,name)){return options.allowedSchemesByTag[name].indexOf(scheme)===-1}return!options.allowedSchemes||options.allowedSchemes.indexOf(scheme)===-1}function filterClasses(classes,allowed){if(!allowed){return classes}classes=classes.split(/\s+/);return classes.filter(function(clss){return allowed.indexOf(clss)!==-1}).join(" ")}}var htmlParserDefaults={decodeEntities:true};sanitizeHtml.defaults={allowedTags:["h3","h4","h5","h6","blockquote","p","a","ul","ol","nl","li","b","i","strong","em","strike","code","hr","br","div","table","thead","caption","tbody","tr","th","td","pre"],allowedAttributes:{a:["href","name","target"],img:["src"]},selfClosing:["img","br","hr","area","base","basefont","input","link","meta"],allowedSchemes:["http","https","ftp","mailto"],allowedSchemesByTag:{}};sanitizeHtml.simpleTransform=function(newTagName,newAttribs,merge){merge=merge===undefined?true:merge;newAttribs=newAttribs||{};return function(tagName,attribs){var attrib;if(merge){for(attrib in newAttribs){attribs[attrib]=newAttribs[attrib]}}else{attribs=newAttribs}return{tagName:newTagName,attribs:attribs}}}},{htmlparser2:36,"regexp-quote":54,xtend:58}],2:[function(require,module,exports){"use strict";exports.toByteArray=toByteArray;exports.fromByteArray=fromByteArray;var lookup=[];var revLookup=[];var Arr=typeof Uint8Array!=="undefined"?Uint8Array:Array;function init(){var code="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";for(var i=0,len=code.length;i<len;++i){lookup[i]=code[i];revLookup[code.charCodeAt(i)]=i}revLookup["-".charCodeAt(0)]=62;revLookup["_".charCodeAt(0)]=63}init();function toByteArray(b64){var i,j,l,tmp,placeHolders,arr;var len=b64.length;if(len%4>0){throw new Error("Invalid string. Length must be a multiple of 4")}placeHolders=b64[len-2]==="="?2:b64[len-1]==="="?1:0;arr=new Arr(len*3/4-placeHolders);l=placeHolders>0?len-4:len;var L=0;for(i=0,j=0;i<l;i+=4,j+=3){tmp=revLookup[b64.charCodeAt(i)]<<18|revLookup[b64.charCodeAt(i+1)]<<12|revLookup[b64.charCodeAt(i+2)]<<6|revLookup[b64.charCodeAt(i+3)];arr[L++]=tmp>>16&255;arr[L++]=tmp>>8&255;arr[L++]=tmp&255}if(placeHolders===2){tmp=revLookup[b64.charCodeAt(i)]<<2|revLookup[b64.charCodeAt(i+1)]>>4;arr[L++]=tmp&255}else if(placeHolders===1){tmp=revLookup[b64.charCodeAt(i)]<<10|revLookup[b64.charCodeAt(i+1)]<<4|revLookup[b64.charCodeAt(i+2)]>>2;arr[L++]=tmp>>8&255;arr[L++]=tmp&255}return arr}function tripletToBase64(num){return lookup[num>>18&63]+lookup[num>>12&63]+lookup[num>>6&63]+lookup[num&63]}function encodeChunk(uint8,start,end){var tmp;var output=[];for(var i=start;i<end;i+=3){tmp=(uint8[i]<<16)+(uint8[i+1]<<8)+uint8[i+2];output.push(tripletToBase64(tmp))}return output.join("")}function fromByteArray(uint8){var tmp;var len=uint8.length;var extraBytes=len%3;var output="";var parts=[];var maxChunkLength=16383;for(var i=0,len2=len-extraBytes;i<len2;i+=maxChunkLength){parts.push(encodeChunk(uint8,i,i+maxChunkLength>len2?len2:i+maxChunkLength))}if(extraBytes===1){tmp=uint8[len-1];output+=lookup[tmp>>2];output+=lookup[tmp<<4&63];output+="=="}else if(extraBytes===2){tmp=(uint8[len-2]<<8)+uint8[len-1];output+=lookup[tmp>>10];output+=lookup[tmp>>4&63];output+=lookup[tmp<<2&63];output+="="}parts.push(output);return parts.join("")}},{}],3:[function(require,module,exports){},{}],4:[function(require,module,exports){(function(global){"use strict";var buffer=require("buffer");var Buffer=buffer.Buffer;var SlowBuffer=buffer.SlowBuffer;var MAX_LEN=buffer.kMaxLength||2147483647;exports.alloc=function alloc(size,fill,encoding){if(typeof Buffer.alloc==="function"){return Buffer.alloc(size,fill,encoding)}if(typeof encoding==="number"){throw new TypeError("encoding must not be number")}if(typeof size!=="number"){throw new TypeError("size must be a number")}if(size>MAX_LEN){throw new RangeError("size is too large")}var enc=encoding;var _fill=fill;if(_fill===undefined){enc=undefined;_fill=0}var buf=new Buffer(size);if(typeof _fill==="string"){var fillBuf=new Buffer(_fill,enc);var flen=fillBuf.length;var i=-1;while(++i<size){buf[i]=fillBuf[i%flen]}}else{buf.fill(_fill)}return buf};exports.allocUnsafe=function allocUnsafe(size){if(typeof Buffer.allocUnsafe==="function"){return Buffer.allocUnsafe(size)}if(typeof size!=="number"){throw new TypeError("size must be a number")}if(size>MAX_LEN){throw new RangeError("size is too large")}return new Buffer(size)};exports.from=function from(value,encodingOrOffset,length){if(typeof Buffer.from==="function"&&(!global.Uint8Array||Uint8Array.from!==Buffer.from)){return Buffer.from(value,encodingOrOffset,length)}if(typeof value==="number"){throw new TypeError('"value" argument must not be a number')}if(typeof value==="string"){return new Buffer(value,encodingOrOffset)}if(typeof ArrayBuffer!=="undefined"&&value instanceof ArrayBuffer){var offset=encodingOrOffset;if(arguments.length===1){return new Buffer(value)}if(typeof offset==="undefined"){offset=0}var len=length;if(typeof len==="undefined"){len=value.byteLength-offset}if(offset>=value.byteLength){throw new RangeError("'offset' is out of bounds")}if(len>value.byteLength-offset){throw new RangeError("'length' is out of bounds")}return new Buffer(value.slice(offset,offset+len))}if(Buffer.isBuffer(value)){var out=new Buffer(value.length);value.copy(out,0,0,value.length);return out}if(value){if(Array.isArray(value)||typeof ArrayBuffer!=="undefined"&&value.buffer instanceof ArrayBuffer||"length"in value){return new Buffer(value)}if(value.type==="Buffer"&&Array.isArray(value.data)){return new Buffer(value.data)}}throw new TypeError("First argument must be a string, Buffer, "+"ArrayBuffer, Array, or array-like object.")};exports.allocUnsafeSlow=function allocUnsafeSlow(size){if(typeof Buffer.allocUnsafeSlow==="function"){return Buffer.allocUnsafeSlow(size)}if(typeof size!=="number"){throw new TypeError("size must be a number")}if(size>=MAX_LEN){throw new RangeError("size is too large")}return new SlowBuffer(size)}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{buffer:5}],5:[function(require,module,exports){(function(global){"use strict";var base64=require("base64-js");var ieee754=require("ieee754");var isArray=require("isarray");exports.Buffer=Buffer;exports.SlowBuffer=SlowBuffer;exports.INSPECT_MAX_BYTES=50;Buffer.TYPED_ARRAY_SUPPORT=global.TYPED_ARRAY_SUPPORT!==undefined?global.TYPED_ARRAY_SUPPORT:typedArraySupport();exports.kMaxLength=kMaxLength();function typedArraySupport(){try{var arr=new Uint8Array(1);arr.__proto__={__proto__:Uint8Array.prototype,foo:function(){return 42}};return arr.foo()===42&&typeof arr.subarray==="function"&&arr.subarray(1,1).byteLength===0}catch(e){return false}}function kMaxLength(){return Buffer.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function createBuffer(that,length){if(kMaxLength()<length){throw new RangeError("Invalid typed array length")}if(Buffer.TYPED_ARRAY_SUPPORT){that=new Uint8Array(length);that.__proto__=Buffer.prototype}else{if(that===null){that=new Buffer(length)}that.length=length}return that}function Buffer(arg,encodingOrOffset,length){if(!Buffer.TYPED_ARRAY_SUPPORT&&!(this instanceof Buffer)){return new Buffer(arg,encodingOrOffset,length)}if(typeof arg==="number"){if(typeof encodingOrOffset==="string"){throw new Error("If encoding is specified then the first argument must be a string")}return allocUnsafe(this,arg)}return from(this,arg,encodingOrOffset,length)}Buffer.poolSize=8192;Buffer._augment=function(arr){arr.__proto__=Buffer.prototype;return arr};function from(that,value,encodingOrOffset,length){if(typeof value==="number"){throw new TypeError('"value" argument must not be a number')}if(typeof ArrayBuffer!=="undefined"&&value instanceof ArrayBuffer){return fromArrayBuffer(that,value,encodingOrOffset,length)}if(typeof value==="string"){return fromString(that,value,encodingOrOffset)}return fromObject(that,value)}Buffer.from=function(value,encodingOrOffset,length){return from(null,value,encodingOrOffset,length)};if(Buffer.TYPED_ARRAY_SUPPORT){Buffer.prototype.__proto__=Uint8Array.prototype;Buffer.__proto__=Uint8Array;if(typeof Symbol!=="undefined"&&Symbol.species&&Buffer[Symbol.species]===Buffer){Object.defineProperty(Buffer,Symbol.species,{value:null,configurable:true})}}function assertSize(size){if(typeof size!=="number"){throw new TypeError('"size" argument must be a number')}else if(size<0){throw new RangeError('"size" argument must not be negative')}}function alloc(that,size,fill,encoding){assertSize(size);if(size<=0){return createBuffer(that,size)}if(fill!==undefined){return typeof encoding==="string"?createBuffer(that,size).fill(fill,encoding):createBuffer(that,size).fill(fill)}return createBuffer(that,size)}Buffer.alloc=function(size,fill,encoding){return alloc(null,size,fill,encoding)};function allocUnsafe(that,size){assertSize(size);that=createBuffer(that,size<0?0:checked(size)|0);if(!Buffer.TYPED_ARRAY_SUPPORT){for(var i=0;i<size;++i){that[i]=0}}return that}Buffer.allocUnsafe=function(size){return allocUnsafe(null,size)};Buffer.allocUnsafeSlow=function(size){return allocUnsafe(null,size)};function fromString(that,string,encoding){if(typeof encoding!=="string"||encoding===""){encoding="utf8"}if(!Buffer.isEncoding(encoding)){throw new TypeError('"encoding" must be a valid string encoding')}var length=byteLength(string,encoding)|0;that=createBuffer(that,length);var actual=that.write(string,encoding);if(actual!==length){that=that.slice(0,actual)}return that}function fromArrayLike(that,array){var length=array.length<0?0:checked(array.length)|0;that=createBuffer(that,length);for(var i=0;i<length;i+=1){that[i]=array[i]&255}return that}function fromArrayBuffer(that,array,byteOffset,length){array.byteLength;if(byteOffset<0||array.byteLength<byteOffset){throw new RangeError("'offset' is out of bounds")}if(array.byteLength<byteOffset+(length||0)){throw new RangeError("'length' is out of bounds")}if(byteOffset===undefined&&length===undefined){array=new Uint8Array(array)}else if(length===undefined){array=new Uint8Array(array,byteOffset)}else{array=new Uint8Array(array,byteOffset,length)}if(Buffer.TYPED_ARRAY_SUPPORT){that=array;that.__proto__=Buffer.prototype}else{that=fromArrayLike(that,array)}return that}function fromObject(that,obj){if(Buffer.isBuffer(obj)){var len=checked(obj.length)|0;that=createBuffer(that,len);if(that.length===0){return that}obj.copy(that,0,0,len);return that}if(obj){if(typeof ArrayBuffer!=="undefined"&&obj.buffer instanceof ArrayBuffer||"length"in obj){if(typeof obj.length!=="number"||isnan(obj.length)){return createBuffer(that,0)}return fromArrayLike(that,obj)}if(obj.type==="Buffer"&&isArray(obj.data)){return fromArrayLike(that,obj.data)}}throw new TypeError("First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.")}function checked(length){if(length>=kMaxLength()){throw new RangeError("Attempt to allocate Buffer larger than maximum "+"size: 0x"+kMaxLength().toString(16)+" bytes")}return length|0}function SlowBuffer(length){if(+length!=length){length=0}return Buffer.alloc(+length)}Buffer.isBuffer=function isBuffer(b){return!!(b!=null&&b._isBuffer)};Buffer.compare=function compare(a,b){if(!Buffer.isBuffer(a)||!Buffer.isBuffer(b)){throw new TypeError("Arguments must be Buffers")}if(a===b)return 0;var x=a.length;var y=b.length;for(var i=0,len=Math.min(x,y);i<len;++i){if(a[i]!==b[i]){x=a[i];y=b[i];break}}if(x<y)return-1;if(y<x)return 1;return 0};Buffer.isEncoding=function isEncoding(encoding){switch(String(encoding).toLowerCase()){case"hex":case"utf8":case"utf-8":case"ascii":case"latin1":case"binary":case"base64":case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return true;default:return false}};Buffer.concat=function concat(list,length){if(!isArray(list)){throw new TypeError('"list" argument must be an Array of Buffers')}if(list.length===0){return Buffer.alloc(0)}var i;if(length===undefined){length=0;for(i=0;i<list.length;++i){length+=list[i].length}}var buffer=Buffer.allocUnsafe(length);var pos=0;for(i=0;i<list.length;++i){var buf=list[i];if(!Buffer.isBuffer(buf)){throw new TypeError('"list" argument must be an Array of Buffers')}buf.copy(buffer,pos);pos+=buf.length}return buffer};function byteLength(string,encoding){if(Buffer.isBuffer(string)){return string.length}if(typeof ArrayBuffer!=="undefined"&&typeof ArrayBuffer.isView==="function"&&(ArrayBuffer.isView(string)||string instanceof ArrayBuffer)){return string.byteLength}if(typeof string!=="string"){string=""+string}var len=string.length;if(len===0)return 0;var loweredCase=false;for(;;){switch(encoding){case"ascii":case"latin1":case"binary":return len;case"utf8":case"utf-8":case undefined:return utf8ToBytes(string).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return len*2;case"hex":return len>>>1;case"base64":return base64ToBytes(string).length;default:if(loweredCase)return utf8ToBytes(string).length;encoding=(""+encoding).toLowerCase();loweredCase=true}}}Buffer.byteLength=byteLength;function slowToString(encoding,start,end){var loweredCase=false;if(start===undefined||start<0){start=0}if(start>this.length){return""}if(end===undefined||end>this.length){end=this.length}if(end<=0){return""}end>>>=0;start>>>=0;if(end<=start){return""}if(!encoding)encoding="utf8";while(true){switch(encoding){case"hex":return hexSlice(this,start,end);case"utf8":case"utf-8":return utf8Slice(this,start,end);case"ascii":return asciiSlice(this,start,end);case"latin1":case"binary":return latin1Slice(this,start,end);case"base64":return base64Slice(this,start,end);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return utf16leSlice(this,start,end);default:if(loweredCase)throw new TypeError("Unknown encoding: "+encoding);encoding=(encoding+"").toLowerCase();loweredCase=true}}}Buffer.prototype._isBuffer=true;function swap(b,n,m){var i=b[n];b[n]=b[m];b[m]=i}Buffer.prototype.swap16=function swap16(){var len=this.length;if(len%2!==0){throw new RangeError("Buffer size must be a multiple of 16-bits")}for(var i=0;i<len;i+=2){swap(this,i,i+1)}return this};Buffer.prototype.swap32=function swap32(){var len=this.length;if(len%4!==0){throw new RangeError("Buffer size must be a multiple of 32-bits")}for(var i=0;i<len;i+=4){swap(this,i,i+3);swap(this,i+1,i+2)}return this};Buffer.prototype.swap64=function swap64(){var len=this.length;if(len%8!==0){throw new RangeError("Buffer size must be a multiple of 64-bits")}for(var i=0;i<len;i+=8){swap(this,i,i+7);swap(this,i+1,i+6);swap(this,i+2,i+5);swap(this,i+3,i+4)}return this};Buffer.prototype.toString=function toString(){var length=this.length|0;if(length===0)return"";if(arguments.length===0)return utf8Slice(this,0,length);return slowToString.apply(this,arguments)};Buffer.prototype.equals=function equals(b){if(!Buffer.isBuffer(b))throw new TypeError("Argument must be a Buffer");if(this===b)return true;return Buffer.compare(this,b)===0};Buffer.prototype.inspect=function inspect(){var str="";var max=exports.INSPECT_MAX_BYTES;if(this.length>0){str=this.toString("hex",0,max).match(/.{2}/g).join(" ");if(this.length>max)str+=" ... "}return"<Buffer "+str+">"};Buffer.prototype.compare=function compare(target,start,end,thisStart,thisEnd){if(!Buffer.isBuffer(target)){throw new TypeError("Argument must be a Buffer")}if(start===undefined){start=0}if(end===undefined){end=target?target.length:0}if(thisStart===undefined){thisStart=0}if(thisEnd===undefined){thisEnd=this.length}if(start<0||end>target.length||thisStart<0||thisEnd>this.length){throw new RangeError("out of range index")}if(thisStart>=thisEnd&&start>=end){return 0}if(thisStart>=thisEnd){return-1}if(start>=end){return 1}start>>>=0;end>>>=0;thisStart>>>=0;thisEnd>>>=0;if(this===target)return 0;var x=thisEnd-thisStart;var y=end-start;var len=Math.min(x,y);var thisCopy=this.slice(thisStart,thisEnd);var targetCopy=target.slice(start,end);for(var i=0;i<len;++i){if(thisCopy[i]!==targetCopy[i]){x=thisCopy[i];y=targetCopy[i];break}}if(x<y)return-1;if(y<x)return 1;return 0};function bidirectionalIndexOf(buffer,val,byteOffset,encoding,dir){if(buffer.length===0)return-1;if(typeof byteOffset==="string"){encoding=byteOffset;byteOffset=0}else if(byteOffset>2147483647){byteOffset=2147483647}else if(byteOffset<-2147483648){byteOffset=-2147483648}byteOffset=+byteOffset;if(isNaN(byteOffset)){byteOffset=dir?0:buffer.length-1}if(byteOffset<0)byteOffset=buffer.length+byteOffset;if(byteOffset>=buffer.length){if(dir)return-1;else byteOffset=buffer.length-1}else if(byteOffset<0){if(dir)byteOffset=0;else return-1}if(typeof val==="string"){val=Buffer.from(val,encoding)}if(Buffer.isBuffer(val)){if(val.length===0){return-1}return arrayIndexOf(buffer,val,byteOffset,encoding,dir)}else if(typeof val==="number"){val=val&255;if(Buffer.TYPED_ARRAY_SUPPORT&&typeof Uint8Array.prototype.indexOf==="function"){if(dir){return Uint8Array.prototype.indexOf.call(buffer,val,byteOffset)}else{return Uint8Array.prototype.lastIndexOf.call(buffer,val,byteOffset)}}return arrayIndexOf(buffer,[val],byteOffset,encoding,dir)}throw new TypeError("val must be string, number or Buffer")}function arrayIndexOf(arr,val,byteOffset,encoding,dir){var indexSize=1;var arrLength=arr.length;var valLength=val.length;if(encoding!==undefined){encoding=String(encoding).toLowerCase();if(encoding==="ucs2"||encoding==="ucs-2"||encoding==="utf16le"||encoding==="utf-16le"){if(arr.length<2||val.length<2){return-1}indexSize=2;arrLength/=2;valLength/=2;byteOffset/=2}}function read(buf,i){if(indexSize===1){return buf[i]}else{return buf.readUInt16BE(i*indexSize)}}var i;if(dir){var foundIndex=-1;for(i=byteOffset;i<arrLength;i++){if(read(arr,i)===read(val,foundIndex===-1?0:i-foundIndex)){if(foundIndex===-1)foundIndex=i;if(i-foundIndex+1===valLength)return foundIndex*indexSize}else{if(foundIndex!==-1)i-=i-foundIndex;foundIndex=-1}}}else{if(byteOffset+valLength>arrLength)byteOffset=arrLength-valLength;for(i=byteOffset;i>=0;i--){var found=true;for(var j=0;j<valLength;j++){if(read(arr,i+j)!==read(val,j)){found=false;break}}if(found)return i}}return-1}Buffer.prototype.includes=function includes(val,byteOffset,encoding){return this.indexOf(val,byteOffset,encoding)!==-1};Buffer.prototype.indexOf=function indexOf(val,byteOffset,encoding){return bidirectionalIndexOf(this,val,byteOffset,encoding,true)};Buffer.prototype.lastIndexOf=function lastIndexOf(val,byteOffset,encoding){return bidirectionalIndexOf(this,val,byteOffset,encoding,false)};function hexWrite(buf,string,offset,length){offset=Number(offset)||0;var remaining=buf.length-offset;if(!length){length=remaining}else{length=Number(length);if(length>remaining){length=remaining}}var strLen=string.length;if(strLen%2!==0)throw new TypeError("Invalid hex string");if(length>strLen/2){length=strLen/2}for(var i=0;i<length;++i){var parsed=parseInt(string.substr(i*2,2),16);if(isNaN(parsed))return i;buf[offset+i]=parsed}return i}function utf8Write(buf,string,offset,length){return blitBuffer(utf8ToBytes(string,buf.length-offset),buf,offset,length)}function asciiWrite(buf,string,offset,length){return blitBuffer(asciiToBytes(string),buf,offset,length)}function latin1Write(buf,string,offset,length){return asciiWrite(buf,string,offset,length)}function base64Write(buf,string,offset,length){return blitBuffer(base64ToBytes(string),buf,offset,length)}function ucs2Write(buf,string,offset,length){return blitBuffer(utf16leToBytes(string,buf.length-offset),buf,offset,length)}Buffer.prototype.write=function write(string,offset,length,encoding){if(offset===undefined){encoding="utf8";length=this.length;offset=0}else if(length===undefined&&typeof offset==="string"){encoding=offset;length=this.length;offset=0}else if(isFinite(offset)){offset=offset|0;if(isFinite(length)){length=length|0;if(encoding===undefined)encoding="utf8"}else{encoding=length;length=undefined}}else{throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported")}var remaining=this.length-offset;if(length===undefined||length>remaining)length=remaining;if(string.length>0&&(length<0||offset<0)||offset>this.length){throw new RangeError("Attempt to write outside buffer bounds")}if(!encoding)encoding="utf8";var loweredCase=false;for(;;){switch(encoding){case"hex":return hexWrite(this,string,offset,length);case"utf8":case"utf-8":return utf8Write(this,string,offset,length);case"ascii":return asciiWrite(this,string,offset,length);case"latin1":case"binary":return latin1Write(this,string,offset,length);case"base64":return base64Write(this,string,offset,length);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return ucs2Write(this,string,offset,length);default:if(loweredCase)throw new TypeError("Unknown encoding: "+encoding);encoding=(""+encoding).toLowerCase();loweredCase=true}}};Buffer.prototype.toJSON=function toJSON(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};function base64Slice(buf,start,end){if(start===0&&end===buf.length){return base64.fromByteArray(buf)}else{return base64.fromByteArray(buf.slice(start,end))}}function utf8Slice(buf,start,end){end=Math.min(buf.length,end);var res=[];var i=start;while(i<end){var firstByte=buf[i];var codePoint=null;var bytesPerSequence=firstByte>239?4:firstByte>223?3:firstByte>191?2:1;if(i+bytesPerSequence<=end){var secondByte,thirdByte,fourthByte,tempCodePoint;switch(bytesPerSequence){case 1:if(firstByte<128){codePoint=firstByte}break;case 2:secondByte=buf[i+1];if((secondByte&192)===128){tempCodePoint=(firstByte&31)<<6|secondByte&63;if(tempCodePoint>127){codePoint=tempCodePoint}}break;case 3:secondByte=buf[i+1];thirdByte=buf[i+2];if((secondByte&192)===128&&(thirdByte&192)===128){tempCodePoint=(firstByte&15)<<12|(secondByte&63)<<6|thirdByte&63;if(tempCodePoint>2047&&(tempCodePoint<55296||tempCodePoint>57343)){codePoint=tempCodePoint}}break;case 4:secondByte=buf[i+1];thirdByte=buf[i+2];fourthByte=buf[i+3];if((secondByte&192)===128&&(thirdByte&192)===128&&(fourthByte&192)===128){tempCodePoint=(firstByte&15)<<18|(secondByte&63)<<12|(thirdByte&63)<<6|fourthByte&63;if(tempCodePoint>65535&&tempCodePoint<1114112){codePoint=tempCodePoint}}}}if(codePoint===null){codePoint=65533;bytesPerSequence=1}else if(codePoint>65535){codePoint-=65536;res.push(codePoint>>>10&1023|55296);codePoint=56320|codePoint&1023}res.push(codePoint);i+=bytesPerSequence}return decodeCodePointsArray(res)}var MAX_ARGUMENTS_LENGTH=4096;function decodeCodePointsArray(codePoints){var len=codePoints.length;if(len<=MAX_ARGUMENTS_LENGTH){return String.fromCharCode.apply(String,codePoints)}var res="";var i=0;while(i<len){res+=String.fromCharCode.apply(String,codePoints.slice(i,i+=MAX_ARGUMENTS_LENGTH))}return res}function asciiSlice(buf,start,end){var ret="";end=Math.min(buf.length,end);for(var i=start;i<end;++i){ret+=String.fromCharCode(buf[i]&127)}return ret}function latin1Slice(buf,start,end){var ret="";end=Math.min(buf.length,end);for(var i=start;i<end;++i){ret+=String.fromCharCode(buf[i])}return ret}function hexSlice(buf,start,end){var len=buf.length;if(!start||start<0)start=0;if(!end||end<0||end>len)end=len;var out="";for(var i=start;i<end;++i){out+=toHex(buf[i])}return out}function utf16leSlice(buf,start,end){var bytes=buf.slice(start,end);var res="";for(var i=0;i<bytes.length;i+=2){res+=String.fromCharCode(bytes[i]+bytes[i+1]*256)}return res}Buffer.prototype.slice=function slice(start,end){var len=this.length;start=~~start;end=end===undefined?len:~~end;if(start<0){start+=len;if(start<0)start=0}else if(start>len){start=len}if(end<0){end+=len;if(end<0)end=0}else if(end>len){end=len}if(end<start)end=start;var newBuf;if(Buffer.TYPED_ARRAY_SUPPORT){newBuf=this.subarray(start,end);newBuf.__proto__=Buffer.prototype}else{var sliceLen=end-start;newBuf=new Buffer(sliceLen,undefined);for(var i=0;i<sliceLen;++i){newBuf[i]=this[i+start]}}return newBuf};function checkOffset(offset,ext,length){if(offset%1!==0||offset<0)throw new RangeError("offset is not uint");if(offset+ext>length)throw new RangeError("Trying to access beyond buffer length")}Buffer.prototype.readUIntLE=function readUIntLE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length);var val=this[offset];var mul=1;var i=0;while(++i<byteLength&&(mul*=256)){val+=this[offset+i]*mul}return val};Buffer.prototype.readUIntBE=function readUIntBE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert){checkOffset(offset,byteLength,this.length)}var val=this[offset+--byteLength];var mul=1;while(byteLength>0&&(mul*=256)){val+=this[offset+--byteLength]*mul}return val};Buffer.prototype.readUInt8=function readUInt8(offset,noAssert){if(!noAssert)checkOffset(offset,1,this.length);return this[offset]};Buffer.prototype.readUInt16LE=function readUInt16LE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);return this[offset]|this[offset+1]<<8};Buffer.prototype.readUInt16BE=function readUInt16BE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);return this[offset]<<8|this[offset+1]};Buffer.prototype.readUInt32LE=function readUInt32LE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return(this[offset]|this[offset+1]<<8|this[offset+2]<<16)+this[offset+3]*16777216};Buffer.prototype.readUInt32BE=function readUInt32BE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]*16777216+(this[offset+1]<<16|this[offset+2]<<8|this[offset+3])};Buffer.prototype.readIntLE=function readIntLE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length);var val=this[offset];var mul=1;var i=0;while(++i<byteLength&&(mul*=256)){val+=this[offset+i]*mul}mul*=128;if(val>=mul)val-=Math.pow(2,8*byteLength);return val};Buffer.prototype.readIntBE=function readIntBE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length); var i=byteLength;var mul=1;var val=this[offset+--i];while(i>0&&(mul*=256)){val+=this[offset+--i]*mul}mul*=128;if(val>=mul)val-=Math.pow(2,8*byteLength);return val};Buffer.prototype.readInt8=function readInt8(offset,noAssert){if(!noAssert)checkOffset(offset,1,this.length);if(!(this[offset]&128))return this[offset];return(255-this[offset]+1)*-1};Buffer.prototype.readInt16LE=function readInt16LE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);var val=this[offset]|this[offset+1]<<8;return val&32768?val|4294901760:val};Buffer.prototype.readInt16BE=function readInt16BE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);var val=this[offset+1]|this[offset]<<8;return val&32768?val|4294901760:val};Buffer.prototype.readInt32LE=function readInt32LE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]|this[offset+1]<<8|this[offset+2]<<16|this[offset+3]<<24};Buffer.prototype.readInt32BE=function readInt32BE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]<<24|this[offset+1]<<16|this[offset+2]<<8|this[offset+3]};Buffer.prototype.readFloatLE=function readFloatLE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return ieee754.read(this,offset,true,23,4)};Buffer.prototype.readFloatBE=function readFloatBE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return ieee754.read(this,offset,false,23,4)};Buffer.prototype.readDoubleLE=function readDoubleLE(offset,noAssert){if(!noAssert)checkOffset(offset,8,this.length);return ieee754.read(this,offset,true,52,8)};Buffer.prototype.readDoubleBE=function readDoubleBE(offset,noAssert){if(!noAssert)checkOffset(offset,8,this.length);return ieee754.read(this,offset,false,52,8)};function checkInt(buf,value,offset,ext,max,min){if(!Buffer.isBuffer(buf))throw new TypeError('"buffer" argument must be a Buffer instance');if(value>max||value<min)throw new RangeError('"value" argument is out of bounds');if(offset+ext>buf.length)throw new RangeError("Index out of range")}Buffer.prototype.writeUIntLE=function writeUIntLE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;byteLength=byteLength|0;if(!noAssert){var maxBytes=Math.pow(2,8*byteLength)-1;checkInt(this,value,offset,byteLength,maxBytes,0)}var mul=1;var i=0;this[offset]=value&255;while(++i<byteLength&&(mul*=256)){this[offset+i]=value/mul&255}return offset+byteLength};Buffer.prototype.writeUIntBE=function writeUIntBE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;byteLength=byteLength|0;if(!noAssert){var maxBytes=Math.pow(2,8*byteLength)-1;checkInt(this,value,offset,byteLength,maxBytes,0)}var i=byteLength-1;var mul=1;this[offset+i]=value&255;while(--i>=0&&(mul*=256)){this[offset+i]=value/mul&255}return offset+byteLength};Buffer.prototype.writeUInt8=function writeUInt8(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,1,255,0);if(!Buffer.TYPED_ARRAY_SUPPORT)value=Math.floor(value);this[offset]=value&255;return offset+1};function objectWriteUInt16(buf,value,offset,littleEndian){if(value<0)value=65535+value+1;for(var i=0,j=Math.min(buf.length-offset,2);i<j;++i){buf[offset+i]=(value&255<<8*(littleEndian?i:1-i))>>>(littleEndian?i:1-i)*8}}Buffer.prototype.writeUInt16LE=function writeUInt16LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,65535,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8}else{objectWriteUInt16(this,value,offset,true)}return offset+2};Buffer.prototype.writeUInt16BE=function writeUInt16BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,65535,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>8;this[offset+1]=value&255}else{objectWriteUInt16(this,value,offset,false)}return offset+2};function objectWriteUInt32(buf,value,offset,littleEndian){if(value<0)value=4294967295+value+1;for(var i=0,j=Math.min(buf.length-offset,4);i<j;++i){buf[offset+i]=value>>>(littleEndian?i:3-i)*8&255}}Buffer.prototype.writeUInt32LE=function writeUInt32LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,4294967295,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset+3]=value>>>24;this[offset+2]=value>>>16;this[offset+1]=value>>>8;this[offset]=value&255}else{objectWriteUInt32(this,value,offset,true)}return offset+4};Buffer.prototype.writeUInt32BE=function writeUInt32BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,4294967295,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>24;this[offset+1]=value>>>16;this[offset+2]=value>>>8;this[offset+3]=value&255}else{objectWriteUInt32(this,value,offset,false)}return offset+4};Buffer.prototype.writeIntLE=function writeIntLE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;if(!noAssert){var limit=Math.pow(2,8*byteLength-1);checkInt(this,value,offset,byteLength,limit-1,-limit)}var i=0;var mul=1;var sub=0;this[offset]=value&255;while(++i<byteLength&&(mul*=256)){if(value<0&&sub===0&&this[offset+i-1]!==0){sub=1}this[offset+i]=(value/mul>>0)-sub&255}return offset+byteLength};Buffer.prototype.writeIntBE=function writeIntBE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;if(!noAssert){var limit=Math.pow(2,8*byteLength-1);checkInt(this,value,offset,byteLength,limit-1,-limit)}var i=byteLength-1;var mul=1;var sub=0;this[offset+i]=value&255;while(--i>=0&&(mul*=256)){if(value<0&&sub===0&&this[offset+i+1]!==0){sub=1}this[offset+i]=(value/mul>>0)-sub&255}return offset+byteLength};Buffer.prototype.writeInt8=function writeInt8(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,1,127,-128);if(!Buffer.TYPED_ARRAY_SUPPORT)value=Math.floor(value);if(value<0)value=255+value+1;this[offset]=value&255;return offset+1};Buffer.prototype.writeInt16LE=function writeInt16LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,32767,-32768);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8}else{objectWriteUInt16(this,value,offset,true)}return offset+2};Buffer.prototype.writeInt16BE=function writeInt16BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,32767,-32768);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>8;this[offset+1]=value&255}else{objectWriteUInt16(this,value,offset,false)}return offset+2};Buffer.prototype.writeInt32LE=function writeInt32LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,2147483647,-2147483648);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8;this[offset+2]=value>>>16;this[offset+3]=value>>>24}else{objectWriteUInt32(this,value,offset,true)}return offset+4};Buffer.prototype.writeInt32BE=function writeInt32BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,2147483647,-2147483648);if(value<0)value=4294967295+value+1;if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>24;this[offset+1]=value>>>16;this[offset+2]=value>>>8;this[offset+3]=value&255}else{objectWriteUInt32(this,value,offset,false)}return offset+4};function checkIEEE754(buf,value,offset,ext,max,min){if(offset+ext>buf.length)throw new RangeError("Index out of range");if(offset<0)throw new RangeError("Index out of range")}function writeFloat(buf,value,offset,littleEndian,noAssert){if(!noAssert){checkIEEE754(buf,value,offset,4,3.4028234663852886e38,-3.4028234663852886e38)}ieee754.write(buf,value,offset,littleEndian,23,4);return offset+4}Buffer.prototype.writeFloatLE=function writeFloatLE(value,offset,noAssert){return writeFloat(this,value,offset,true,noAssert)};Buffer.prototype.writeFloatBE=function writeFloatBE(value,offset,noAssert){return writeFloat(this,value,offset,false,noAssert)};function writeDouble(buf,value,offset,littleEndian,noAssert){if(!noAssert){checkIEEE754(buf,value,offset,8,1.7976931348623157e308,-1.7976931348623157e308)}ieee754.write(buf,value,offset,littleEndian,52,8);return offset+8}Buffer.prototype.writeDoubleLE=function writeDoubleLE(value,offset,noAssert){return writeDouble(this,value,offset,true,noAssert)};Buffer.prototype.writeDoubleBE=function writeDoubleBE(value,offset,noAssert){return writeDouble(this,value,offset,false,noAssert)};Buffer.prototype.copy=function copy(target,targetStart,start,end){if(!start)start=0;if(!end&&end!==0)end=this.length;if(targetStart>=target.length)targetStart=target.length;if(!targetStart)targetStart=0;if(end>0&&end<start)end=start;if(end===start)return 0;if(target.length===0||this.length===0)return 0;if(targetStart<0){throw new RangeError("targetStart out of bounds")}if(start<0||start>=this.length)throw new RangeError("sourceStart out of bounds");if(end<0)throw new RangeError("sourceEnd out of bounds");if(end>this.length)end=this.length;if(target.length-targetStart<end-start){end=target.length-targetStart+start}var len=end-start;var i;if(this===target&&start<targetStart&&targetStart<end){for(i=len-1;i>=0;--i){target[i+targetStart]=this[i+start]}}else if(len<1e3||!Buffer.TYPED_ARRAY_SUPPORT){for(i=0;i<len;++i){target[i+targetStart]=this[i+start]}}else{Uint8Array.prototype.set.call(target,this.subarray(start,start+len),targetStart)}return len};Buffer.prototype.fill=function fill(val,start,end,encoding){if(typeof val==="string"){if(typeof start==="string"){encoding=start;start=0;end=this.length}else if(typeof end==="string"){encoding=end;end=this.length}if(val.length===1){var code=val.charCodeAt(0);if(code<256){val=code}}if(encoding!==undefined&&typeof encoding!=="string"){throw new TypeError("encoding must be a string")}if(typeof encoding==="string"&&!Buffer.isEncoding(encoding)){throw new TypeError("Unknown encoding: "+encoding)}}else if(typeof val==="number"){val=val&255}if(start<0||this.length<start||this.length<end){throw new RangeError("Out of range index")}if(end<=start){return this}start=start>>>0;end=end===undefined?this.length:end>>>0;if(!val)val=0;var i;if(typeof val==="number"){for(i=start;i<end;++i){this[i]=val}}else{var bytes=Buffer.isBuffer(val)?val:utf8ToBytes(new Buffer(val,encoding).toString());var len=bytes.length;for(i=0;i<end-start;++i){this[i+start]=bytes[i%len]}}return this};var INVALID_BASE64_RE=/[^+\/0-9A-Za-z-_]/g;function base64clean(str){str=stringtrim(str).replace(INVALID_BASE64_RE,"");if(str.length<2)return"";while(str.length%4!==0){str=str+"="}return str}function stringtrim(str){if(str.trim)return str.trim();return str.replace(/^\s+|\s+$/g,"")}function toHex(n){if(n<16)return"0"+n.toString(16);return n.toString(16)}function utf8ToBytes(string,units){units=units||Infinity;var codePoint;var length=string.length;var leadSurrogate=null;var bytes=[];for(var i=0;i<length;++i){codePoint=string.charCodeAt(i);if(codePoint>55295&&codePoint<57344){if(!leadSurrogate){if(codePoint>56319){if((units-=3)>-1)bytes.push(239,191,189);continue}else if(i+1===length){if((units-=3)>-1)bytes.push(239,191,189);continue}leadSurrogate=codePoint;continue}if(codePoint<56320){if((units-=3)>-1)bytes.push(239,191,189);leadSurrogate=codePoint;continue}codePoint=(leadSurrogate-55296<<10|codePoint-56320)+65536}else if(leadSurrogate){if((units-=3)>-1)bytes.push(239,191,189)}leadSurrogate=null;if(codePoint<128){if((units-=1)<0)break;bytes.push(codePoint)}else if(codePoint<2048){if((units-=2)<0)break;bytes.push(codePoint>>6|192,codePoint&63|128)}else if(codePoint<65536){if((units-=3)<0)break;bytes.push(codePoint>>12|224,codePoint>>6&63|128,codePoint&63|128)}else if(codePoint<1114112){if((units-=4)<0)break;bytes.push(codePoint>>18|240,codePoint>>12&63|128,codePoint>>6&63|128,codePoint&63|128)}else{throw new Error("Invalid code point")}}return bytes}function asciiToBytes(str){var byteArray=[];for(var i=0;i<str.length;++i){byteArray.push(str.charCodeAt(i)&255)}return byteArray}function utf16leToBytes(str,units){var c,hi,lo;var byteArray=[];for(var i=0;i<str.length;++i){if((units-=2)<0)break;c=str.charCodeAt(i);hi=c>>8;lo=c%256;byteArray.push(lo);byteArray.push(hi)}return byteArray}function base64ToBytes(str){return base64.toByteArray(base64clean(str))}function blitBuffer(src,dst,offset,length){for(var i=0;i<length;++i){if(i+offset>=dst.length||i>=src.length)break;dst[i+offset]=src[i]}return i}function isnan(val){return val!==val}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{"base64-js":2,ieee754:37,isarray:40}],6:[function(require,module,exports){(function(Buffer){function isArray(arg){if(Array.isArray){return Array.isArray(arg)}return objectToString(arg)==="[object Array]"}exports.isArray=isArray;function isBoolean(arg){return typeof arg==="boolean"}exports.isBoolean=isBoolean;function isNull(arg){return arg===null}exports.isNull=isNull;function isNullOrUndefined(arg){return arg==null}exports.isNullOrUndefined=isNullOrUndefined;function isNumber(arg){return typeof arg==="number"}exports.isNumber=isNumber;function isString(arg){return typeof arg==="string"}exports.isString=isString;function isSymbol(arg){return typeof arg==="symbol"}exports.isSymbol=isSymbol;function isUndefined(arg){return arg===void 0}exports.isUndefined=isUndefined;function isRegExp(re){return objectToString(re)==="[object RegExp]"}exports.isRegExp=isRegExp;function isObject(arg){return typeof arg==="object"&&arg!==null}exports.isObject=isObject;function isDate(d){return objectToString(d)==="[object Date]"}exports.isDate=isDate;function isError(e){return objectToString(e)==="[object Error]"||e instanceof Error}exports.isError=isError;function isFunction(arg){return typeof arg==="function"}exports.isFunction=isFunction;function isPrimitive(arg){return arg===null||typeof arg==="boolean"||typeof arg==="number"||typeof arg==="string"||typeof arg==="symbol"||typeof arg==="undefined"}exports.isPrimitive=isPrimitive;exports.isBuffer=Buffer.isBuffer;function objectToString(o){return Object.prototype.toString.call(o)}}).call(this,{isBuffer:require("../../is-buffer/index.js")})},{"../../is-buffer/index.js":39}],7:[function(require,module,exports){var ElementType=require("domelementtype");var entities=require("entities");var booleanAttributes={__proto__:null,allowfullscreen:true,async:true,autofocus:true,autoplay:true,checked:true,controls:true,default:true,defer:true,disabled:true,hidden:true,ismap:true,loop:true,multiple:true,muted:true,open:true,readonly:true,required:true,reversed:true,scoped:true,seamless:true,selected:true,typemustmatch:true};var unencodedElements={__proto__:null,style:true,script:true,xmp:true,iframe:true,noembed:true,noframes:true,plaintext:true,noscript:true};function formatAttrs(attributes,opts){if(!attributes)return;var output="",value;for(var key in attributes){value=attributes[key];if(output){output+=" "}if(!value&&booleanAttributes[key]){output+=key}else{output+=key+'="'+(opts.decodeEntities?entities.encodeXML(value):value)+'"'}}return output}var singleTag={__proto__:null,area:true,base:true,basefont:true,br:true,col:true,command:true,embed:true,frame:true,hr:true,img:true,input:true,isindex:true,keygen:true,link:true,meta:true,param:true,source:true,track:true,wbr:true};var render=module.exports=function(dom,opts){if(!Array.isArray(dom)&&!dom.cheerio)dom=[dom];opts=opts||{};var output="";for(var i=0;i<dom.length;i++){var elem=dom[i];if(elem.type==="root")output+=render(elem.children,opts);else if(ElementType.isTag(elem))output+=renderTag(elem,opts);else if(elem.type===ElementType.Directive)output+=renderDirective(elem);else if(elem.type===ElementType.Comment)output+=renderComment(elem);else if(elem.type===ElementType.CDATA)output+=renderCdata(elem);else output+=renderText(elem,opts)}return output};function renderTag(elem,opts){if(elem.name==="svg")opts={decodeEntities:opts.decodeEntities,xmlMode:true};var tag="<"+elem.name,attribs=formatAttrs(elem.attribs,opts);if(attribs){tag+=" "+attribs}if(opts.xmlMode&&(!elem.children||elem.children.length===0)){tag+="/>"}else{tag+=">";if(elem.children){tag+=render(elem.children,opts)}if(!singleTag[elem.name]||opts.xmlMode){tag+="</"+elem.name+">"}}return tag}function renderDirective(elem){return"<"+elem.data+">"}function renderText(elem,opts){var data=elem.data||"";if(opts.decodeEntities&&!(elem.parent&&elem.parent.name in unencodedElements)){data=entities.encodeXML(data)}return data}function renderCdata(elem){return"<![CDATA["+elem.children[0].data+"]]>"}function renderComment(elem){return"<!--"+elem.data+"-->"}},{domelementtype:8,entities:20}],8:[function(require,module,exports){module.exports={Text:"text",Directive:"directive",Comment:"comment",Script:"script",Style:"style",Tag:"tag",CDATA:"cdata",isTag:function(elem){return elem.type==="tag"||elem.type==="script"||elem.type==="style"}}},{}],9:[function(require,module,exports){module.exports={Text:"text",Directive:"directive",Comment:"comment",Script:"script",Style:"style",Tag:"tag",CDATA:"cdata",Doctype:"doctype",isTag:function(elem){return elem.type==="tag"||elem.type==="script"||elem.type==="style"}}},{}],10:[function(require,module,exports){var ElementType=require("domelementtype");var re_whitespace=/\s+/g;var NodePrototype=require("./lib/node");var ElementPrototype=require("./lib/element");function DomHandler(callback,options,elementCB){if(typeof callback==="object"){elementCB=options;options=callback;callback=null}else if(typeof options==="function"){elementCB=options;options=defaultOpts}this._callback=callback;this._options=options||defaultOpts;this._elementCB=elementCB;this.dom=[];this._done=false;this._tagStack=[];this._parser=this._parser||null}var defaultOpts={normalizeWhitespace:false,withStartIndices:false};DomHandler.prototype.onparserinit=function(parser){this._parser=parser};DomHandler.prototype.onreset=function(){DomHandler.call(this,this._callback,this._options,this._elementCB)};DomHandler.prototype.onend=function(){if(this._done)return;this._done=true;this._parser=null;this._handleCallback(null)};DomHandler.prototype._handleCallback=DomHandler.prototype.onerror=function(error){if(typeof this._callback==="function"){this._callback(error,this.dom)}else{if(error)throw error}};DomHandler.prototype.onclosetag=function(){var elem=this._tagStack.pop();if(this._elementCB)this._elementCB(elem)};DomHandler.prototype._addDomElement=function(element){var parent=this._tagStack[this._tagStack.length-1];var siblings=parent?parent.children:this.dom;var previousSibling=siblings[siblings.length-1];element.next=null;if(this._options.withStartIndices){element.startIndex=this._parser.startIndex}if(this._options.withDomLvl1){element.__proto__=element.type==="tag"?ElementPrototype:NodePrototype}if(previousSibling){element.prev=previousSibling;previousSibling.next=element}else{element.prev=null}siblings.push(element);element.parent=parent||null};DomHandler.prototype.onopentag=function(name,attribs){var element={type:name==="script"?ElementType.Script:name==="style"?ElementType.Style:ElementType.Tag,name:name,attribs:attribs,children:[]};this._addDomElement(element);this._tagStack.push(element)};DomHandler.prototype.ontext=function(data){var normalize=this._options.normalizeWhitespace||this._options.ignoreWhitespace;var lastTag;if(!this._tagStack.length&&this.dom.length&&(lastTag=this.dom[this.dom.length-1]).type===ElementType.Text){if(normalize){lastTag.data=(lastTag.data+data).replace(re_whitespace," ")}else{lastTag.data+=data}}else{if(this._tagStack.length&&(lastTag=this._tagStack[this._tagStack.length-1])&&(lastTag=lastTag.children[lastTag.children.length-1])&&lastTag.type===ElementType.Text){if(normalize){lastTag.data=(lastTag.data+data).replace(re_whitespace," ")}else{lastTag.data+=data}}else{if(normalize){data=data.replace(re_whitespace," ")}this._addDomElement({data:data,type:ElementType.Text})}}};DomHandler.prototype.oncomment=function(data){var lastTag=this._tagStack[this._tagStack.length-1];if(lastTag&&lastTag.type===ElementType.Comment){lastTag.data+=data;return}var element={data:data,type:ElementType.Comment};this._addDomElement(element);this._tagStack.push(element)};DomHandler.prototype.oncdatastart=function(){var element={children:[{data:"",type:ElementType.Text}],type:ElementType.CDATA};this._addDomElement(element);this._tagStack.push(element)};DomHandler.prototype.oncommentend=DomHandler.prototype.oncdataend=function(){this._tagStack.pop()};DomHandler.prototype.onprocessinginstruction=function(name,data){this._addDomElement({name:name,data:data,type:ElementType.Directive})};module.exports=DomHandler},{"./lib/element":11,"./lib/node":12,domelementtype:9}],11:[function(require,module,exports){var NodePrototype=require("./node");var ElementPrototype=module.exports=Object.create(NodePrototype);var domLvl1={tagName:"name"};Object.keys(domLvl1).forEach(function(key){var shorthand=domLvl1[key];Object.defineProperty(ElementPrototype,key,{get:function(){return this[shorthand]||null},set:function(val){this[shorthand]=val;return val}})})},{"./node":12}],12:[function(require,module,exports){var NodePrototype=module.exports={get firstChild(){var children=this.children;return children&&children[0]||null},get lastChild(){var children=this.children;return children&&children[children.length-1]||null},get nodeType(){return nodeTypes[this.type]||nodeTypes.element}};var domLvl1={tagName:"name",childNodes:"children",parentNode:"parent",previousSibling:"prev",nextSibling:"next",nodeValue:"data"};var nodeTypes={element:1,text:3,cdata:4,comment:8};Object.keys(domLvl1).forEach(function(key){var shorthand=domLvl1[key];Object.defineProperty(NodePrototype,key,{get:function(){return this[shorthand]||null},set:function(val){this[shorthand]=val;return val}})})},{}],13:[function(require,module,exports){var DomUtils=module.exports;[require("./lib/stringify"),require("./lib/traversal"),require("./lib/manipulation"),require("./lib/querying"),require("./lib/legacy"),require("./lib/helpers")].forEach(function(ext){Object.keys(ext).forEach(function(key){DomUtils[key]=ext[key].bind(DomUtils)})})},{"./lib/helpers":14,"./lib/legacy":15,"./lib/manipulation":16,"./lib/querying":17,"./lib/stringify":18,"./lib/traversal":19}],14:[function(require,module,exports){exports.removeSubsets=function(nodes){var idx=nodes.length,node,ancestor,replace;while(--idx>-1){node=ancestor=nodes[idx];nodes[idx]=null;replace=true;while(ancestor){if(nodes.indexOf(ancestor)>-1){replace=false;nodes.splice(idx,1);break}ancestor=ancestor.parent}if(replace){nodes[idx]=node}}return nodes};var POSITION={DISCONNECTED:1,PRECEDING:2,FOLLOWING:4,CONTAINS:8,CONTAINED_BY:16};var comparePos=exports.compareDocumentPosition=function(nodeA,nodeB){var aParents=[];var bParents=[];var current,sharedParent,siblings,aSibling,bSibling,idx;if(nodeA===nodeB){return 0}current=nodeA;while(current){aParents.unshift(current);current=current.parent}current=nodeB;while(current){bParents.unshift(current);current=current.parent}idx=0;while(aParents[idx]===bParents[idx]){idx++}if(idx===0){return POSITION.DISCONNECTED}sharedParent=aParents[idx-1];siblings=sharedParent.children;aSibling=aParents[idx];bSibling=bParents[idx];if(siblings.indexOf(aSibling)>siblings.indexOf(bSibling)){if(sharedParent===nodeB){return POSITION.FOLLOWING|POSITION.CONTAINED_BY}return POSITION.FOLLOWING}else{if(sharedParent===nodeA){return POSITION.PRECEDING|POSITION.CONTAINS}return POSITION.PRECEDING}};exports.uniqueSort=function(nodes){var idx=nodes.length,node,position;nodes=nodes.slice();while(--idx>-1){node=nodes[idx];position=nodes.indexOf(node);if(position>-1&&position<idx){nodes.splice(idx,1)}}nodes.sort(function(a,b){var relative=comparePos(a,b);if(relative&POSITION.PRECEDING){return-1}else if(relative&POSITION.FOLLOWING){return 1}return 0});return nodes}},{}],15:[function(require,module,exports){var ElementType=require("domelementtype");var isTag=exports.isTag=ElementType.isTag;exports.testElement=function(options,element){for(var key in options){if(!options.hasOwnProperty(key));else if(key==="tag_name"){if(!isTag(element)||!options.tag_name(element.name)){return false}}else if(key==="tag_type"){if(!options.tag_type(element.type))return false}else if(key==="tag_contains"){if(isTag(element)||!options.tag_contains(element.data)){return false}}else if(!element.attribs||!options[key](element.attribs[key])){return false}}return true};var Checks={tag_name:function(name){if(typeof name==="function"){return function(elem){return isTag(elem)&&name(elem.name)}}else if(name==="*"){return isTag}else{return function(elem){return isTag(elem)&&elem.name===name}}},tag_type:function(type){if(typeof type==="function"){return function(elem){return type(elem.type)}}else{return function(elem){return elem.type===type}}},tag_contains:function(data){if(typeof data==="function"){return function(elem){return!isTag(elem)&&data(elem.data)}}else{return function(elem){return!isTag(elem)&&elem.data===data}}}};function getAttribCheck(attrib,value){if(typeof value==="function"){return function(elem){return elem.attribs&&value(elem.attribs[attrib])}}else{return function(elem){return elem.attribs&&elem.attribs[attrib]===value}}}function combineFuncs(a,b){return function(elem){return a(elem)||b(elem)}}exports.getElements=function(options,element,recurse,limit){var funcs=Object.keys(options).map(function(key){var value=options[key];return key in Checks?Checks[key](value):getAttribCheck(key,value)});return funcs.length===0?[]:this.filter(funcs.reduce(combineFuncs),element,recurse,limit)};exports.getElementById=function(id,element,recurse){if(!Array.isArray(element))element=[element];return this.findOne(getAttribCheck("id",id),element,recurse!==false)};exports.getElementsByTagName=function(name,element,recurse,limit){return this.filter(Checks.tag_name(name),element,recurse,limit)};exports.getElementsByTagType=function(type,element,recurse,limit){return this.filter(Checks.tag_type(type),element,recurse,limit)}},{domelementtype:9}],16:[function(require,module,exports){exports.removeElement=function(elem){if(elem.prev)elem.prev.next=elem.next;if(elem.next)elem.next.prev=elem.prev;if(elem.parent){var childs=elem.parent.children;childs.splice(childs.lastIndexOf(elem),1)}};exports.replaceElement=function(elem,replacement){var prev=replacement.prev=elem.prev;if(prev){prev.next=replacement}var next=replacement.next=elem.next;if(next){next.prev=replacement}var parent=replacement.parent=elem.parent;if(parent){var childs=parent.children;childs[childs.lastIndexOf(elem)]=replacement}};exports.appendChild=function(elem,child){child.parent=elem;if(elem.children.push(child)!==1){var sibling=elem.children[elem.children.length-2];sibling.next=child;child.prev=sibling;child.next=null}};exports.append=function(elem,next){var parent=elem.parent,currNext=elem.next;next.next=currNext;next.prev=elem;elem.next=next;next.parent=parent;if(currNext){currNext.prev=next;if(parent){var childs=parent.children;childs.splice(childs.lastIndexOf(currNext),0,next)}}else if(parent){parent.children.push(next)}};exports.prepend=function(elem,prev){var parent=elem.parent;if(parent){var childs=parent.children;childs.splice(childs.lastIndexOf(elem),0,prev)}if(elem.prev){elem.prev.next=prev}prev.parent=parent;prev.prev=elem.prev;prev.next=elem;elem.prev=prev}},{}],17:[function(require,module,exports){var isTag=require("domelementtype").isTag;module.exports={filter:filter,find:find,findOneChild:findOneChild,findOne:findOne,existsOne:existsOne,findAll:findAll};function filter(test,element,recurse,limit){if(!Array.isArray(element))element=[element];if(typeof limit!=="number"||!isFinite(limit)){limit=Infinity}return find(test,element,recurse!==false,limit)}function find(test,elems,recurse,limit){var result=[],childs;for(var i=0,j=elems.length;i<j;i++){if(test(elems[i])){result.push(elems[i]);if(--limit<=0)break}childs=elems[i].children;if(recurse&&childs&&childs.length>0){childs=find(test,childs,recurse,limit);result=result.concat(childs);limit-=childs.length;if(limit<=0)break}}return result}function findOneChild(test,elems){for(var i=0,l=elems.length;i<l;i++){if(test(elems[i]))return elems[i]}return null}function findOne(test,elems){var elem=null;for(var i=0,l=elems.length;i<l&&!elem;i++){if(!isTag(elems[i])){continue}else if(test(elems[i])){elem=elems[i]}else if(elems[i].children.length>0){elem=findOne(test,elems[i].children)}}return elem}function existsOne(test,elems){for(var i=0,l=elems.length;i<l;i++){if(isTag(elems[i])&&(test(elems[i])||elems[i].children.length>0&&existsOne(test,elems[i].children))){return true}}return false}function findAll(test,elems){var result=[];for(var i=0,j=elems.length;i<j;i++){if(!isTag(elems[i]))continue;if(test(elems[i]))result.push(elems[i]);if(elems[i].children.length>0){result=result.concat(findAll(test,elems[i].children))}}return result}},{domelementtype:9}],18:[function(require,module,exports){var ElementType=require("domelementtype"),getOuterHTML=require("dom-serializer"),isTag=ElementType.isTag;module.exports={getInnerHTML:getInnerHTML,getOuterHTML:getOuterHTML,getText:getText};function getInnerHTML(elem,opts){return elem.children?elem.children.map(function(elem){return getOuterHTML(elem,opts)}).join(""):""}function getText(elem){if(Array.isArray(elem))return elem.map(getText).join("");if(isTag(elem)||elem.type===ElementType.CDATA)return getText(elem.children);if(elem.type===ElementType.Text)return elem.data;return""}},{"dom-serializer":7,domelementtype:9}],19:[function(require,module,exports){var getChildren=exports.getChildren=function(elem){return elem.children};var getParent=exports.getParent=function(elem){return elem.parent};exports.getSiblings=function(elem){var parent=getParent(elem);return parent?getChildren(parent):[elem]};exports.getAttributeValue=function(elem,name){return elem.attribs&&elem.attribs[name]};exports.hasAttrib=function(elem,name){return!!elem.attribs&&hasOwnProperty.call(elem.attribs,name)};exports.getName=function(elem){return elem.name}},{}],20:[function(require,module,exports){var encode=require("./lib/encode.js"),decode=require("./lib/decode.js");exports.decode=function(data,level){return(!level||level<=0?decode.XML:decode.HTML)(data)};exports.decodeStrict=function(data,level){return(!level||level<=0?decode.XML:decode.HTMLStrict)(data)};exports.encode=function(data,level){return(!level||level<=0?encode.XML:encode.HTML)(data)};exports.encodeXML=encode.XML;exports.encodeHTML4=exports.encodeHTML5=exports.encodeHTML=encode.HTML;exports.decodeXML=exports.decodeXMLStrict=decode.XML;exports.decodeHTML4=exports.decodeHTML5=exports.decodeHTML=decode.HTML;exports.decodeHTML4Strict=exports.decodeHTML5Strict=exports.decodeHTMLStrict=decode.HTMLStrict;exports.escape=encode.escape},{"./lib/decode.js":21,"./lib/encode.js":23}],21:[function(require,module,exports){var entityMap=require("../maps/entities.json"),legacyMap=require("../maps/legacy.json"),xmlMap=require("../maps/xml.json"),decodeCodePoint=require("./decode_codepoint.js");var decodeXMLStrict=getStrictDecoder(xmlMap),decodeHTMLStrict=getStrictDecoder(entityMap);function getStrictDecoder(map){var keys=Object.keys(map).join("|"),replace=getReplacer(map);keys+="|#[xX][\\da-fA-F]+|#\\d+";var re=new RegExp("&(?:"+keys+");","g");return function(str){return String(str).replace(re,replace)}}var decodeHTML=function(){var legacy=Object.keys(legacyMap).sort(sorter);var keys=Object.keys(entityMap).sort(sorter);for(var i=0,j=0;i<keys.length;i++){if(legacy[j]===keys[i]){keys[i]+=";?";j++}else{keys[i]+=";"}}var re=new RegExp("&(?:"+keys.join("|")+"|#[xX][\\da-fA-F]+;?|#\\d+;?)","g"),replace=getReplacer(entityMap);function replacer(str){if(str.substr(-1)!==";")str+=";";return replace(str)}return function(str){return String(str).replace(re,replacer)}}();function sorter(a,b){return a<b?1:-1}function getReplacer(map){return function replace(str){if(str.charAt(1)==="#"){if(str.charAt(2)==="X"||str.charAt(2)==="x"){return decodeCodePoint(parseInt(str.substr(3),16))}return decodeCodePoint(parseInt(str.substr(2),10))}return map[str.slice(1,-1)]; }}module.exports={XML:decodeXMLStrict,HTML:decodeHTML,HTMLStrict:decodeHTMLStrict}},{"../maps/entities.json":25,"../maps/legacy.json":26,"../maps/xml.json":27,"./decode_codepoint.js":22}],22:[function(require,module,exports){var decodeMap=require("../maps/decode.json");module.exports=decodeCodePoint;function decodeCodePoint(codePoint){if(codePoint>=55296&&codePoint<=57343||codePoint>1114111){return"�"}if(codePoint in decodeMap){codePoint=decodeMap[codePoint]}var output="";if(codePoint>65535){codePoint-=65536;output+=String.fromCharCode(codePoint>>>10&1023|55296);codePoint=56320|codePoint&1023}output+=String.fromCharCode(codePoint);return output}},{"../maps/decode.json":24}],23:[function(require,module,exports){var inverseXML=getInverseObj(require("../maps/xml.json")),xmlReplacer=getInverseReplacer(inverseXML);exports.XML=getInverse(inverseXML,xmlReplacer);var inverseHTML=getInverseObj(require("../maps/entities.json")),htmlReplacer=getInverseReplacer(inverseHTML);exports.HTML=getInverse(inverseHTML,htmlReplacer);function getInverseObj(obj){return Object.keys(obj).sort().reduce(function(inverse,name){inverse[obj[name]]="&"+name+";";return inverse},{})}function getInverseReplacer(inverse){var single=[],multiple=[];Object.keys(inverse).forEach(function(k){if(k.length===1){single.push("\\"+k)}else{multiple.push(k)}});multiple.unshift("["+single.join("")+"]");return new RegExp(multiple.join("|"),"g")}var re_nonASCII=/[^\0-\x7F]/g,re_astralSymbols=/[\uD800-\uDBFF][\uDC00-\uDFFF]/g;function singleCharReplacer(c){return"&#x"+c.charCodeAt(0).toString(16).toUpperCase()+";"}function astralReplacer(c){var high=c.charCodeAt(0);var low=c.charCodeAt(1);var codePoint=(high-55296)*1024+low-56320+65536;return"&#x"+codePoint.toString(16).toUpperCase()+";"}function getInverse(inverse,re){function func(name){return inverse[name]}return function(data){return data.replace(re,func).replace(re_astralSymbols,astralReplacer).replace(re_nonASCII,singleCharReplacer)}}var re_xmlChars=getInverseReplacer(inverseXML);function escapeXML(data){return data.replace(re_xmlChars,singleCharReplacer).replace(re_astralSymbols,astralReplacer).replace(re_nonASCII,singleCharReplacer)}exports.escape=escapeXML},{"../maps/entities.json":25,"../maps/xml.json":27}],24:[function(require,module,exports){module.exports={0:65533,128:8364,130:8218,131:402,132:8222,133:8230,134:8224,135:8225,136:710,137:8240,138:352,139:8249,140:338,142:381,145:8216,146:8217,147:8220,148:8221,149:8226,150:8211,151:8212,152:732,153:8482,154:353,155:8250,156:339,158:382,159:376}},{}],25:[function(require,module,exports){module.exports={Aacute:"Á",aacute:"á",Abreve:"Ă",abreve:"ă",ac:"∾",acd:"∿",acE:"∾̳",Acirc:"Â",acirc:"â",acute:"´",Acy:"А",acy:"а",AElig:"Æ",aelig:"æ",af:"⁡",Afr:"𝔄",afr:"𝔞",Agrave:"À",agrave:"à",alefsym:"ℵ",aleph:"ℵ",Alpha:"Α",alpha:"α",Amacr:"Ā",amacr:"ā",amalg:"⨿",amp:"&",AMP:"&",andand:"⩕",And:"⩓",and:"∧",andd:"⩜",andslope:"⩘",andv:"⩚",ang:"∠",ange:"⦤",angle:"∠",angmsdaa:"⦨",angmsdab:"⦩",angmsdac:"⦪",angmsdad:"⦫",angmsdae:"⦬",angmsdaf:"⦭",angmsdag:"⦮",angmsdah:"⦯",angmsd:"∡",angrt:"∟",angrtvb:"⊾",angrtvbd:"⦝",angsph:"∢",angst:"Å",angzarr:"⍼",Aogon:"Ą",aogon:"ą",Aopf:"𝔸",aopf:"𝕒",apacir:"⩯",ap:"≈",apE:"⩰",ape:"≊",apid:"≋",apos:"'",ApplyFunction:"⁡",approx:"≈",approxeq:"≊",Aring:"Å",aring:"å",Ascr:"𝒜",ascr:"𝒶",Assign:"≔",ast:"*",asymp:"≈",asympeq:"≍",Atilde:"Ã",atilde:"ã",Auml:"Ä",auml:"ä",awconint:"∳",awint:"⨑",backcong:"≌",backepsilon:"϶",backprime:"‵",backsim:"∽",backsimeq:"⋍",Backslash:"∖",Barv:"⫧",barvee:"⊽",barwed:"⌅",Barwed:"⌆",barwedge:"⌅",bbrk:"⎵",bbrktbrk:"⎶",bcong:"≌",Bcy:"Б",bcy:"б",bdquo:"„",becaus:"∵",because:"∵",Because:"∵",bemptyv:"⦰",bepsi:"϶",bernou:"ℬ",Bernoullis:"ℬ",Beta:"Β",beta:"β",beth:"ℶ",between:"≬",Bfr:"𝔅",bfr:"𝔟",bigcap:"⋂",bigcirc:"◯",bigcup:"⋃",bigodot:"⨀",bigoplus:"⨁",bigotimes:"⨂",bigsqcup:"⨆",bigstar:"★",bigtriangledown:"▽",bigtriangleup:"△",biguplus:"⨄",bigvee:"⋁",bigwedge:"⋀",bkarow:"⤍",blacklozenge:"⧫",blacksquare:"▪",blacktriangle:"▴",blacktriangledown:"▾",blacktriangleleft:"◂",blacktriangleright:"▸",blank:"␣",blk12:"▒",blk14:"░",blk34:"▓",block:"█",bne:"=⃥",bnequiv:"≡⃥",bNot:"⫭",bnot:"⌐",Bopf:"𝔹",bopf:"𝕓",bot:"⊥",bottom:"⊥",bowtie:"⋈",boxbox:"⧉",boxdl:"┐",boxdL:"╕",boxDl:"╖",boxDL:"╗",boxdr:"┌",boxdR:"╒",boxDr:"╓",boxDR:"╔",boxh:"─",boxH:"═",boxhd:"┬",boxHd:"╤",boxhD:"╥",boxHD:"╦",boxhu:"┴",boxHu:"╧",boxhU:"╨",boxHU:"╩",boxminus:"⊟",boxplus:"⊞",boxtimes:"⊠",boxul:"┘",boxuL:"╛",boxUl:"╜",boxUL:"╝",boxur:"└",boxuR:"╘",boxUr:"╙",boxUR:"╚",boxv:"│",boxV:"║",boxvh:"┼",boxvH:"╪",boxVh:"╫",boxVH:"╬",boxvl:"┤",boxvL:"╡",boxVl:"╢",boxVL:"╣",boxvr:"├",boxvR:"╞",boxVr:"╟",boxVR:"╠",bprime:"‵",breve:"˘",Breve:"˘",brvbar:"¦",bscr:"𝒷",Bscr:"ℬ",bsemi:"⁏",bsim:"∽",bsime:"⋍",bsolb:"⧅",bsol:"\\",bsolhsub:"⟈",bull:"•",bullet:"•",bump:"≎",bumpE:"⪮",bumpe:"≏",Bumpeq:"≎",bumpeq:"≏",Cacute:"Ć",cacute:"ć",capand:"⩄",capbrcup:"⩉",capcap:"⩋",cap:"∩",Cap:"⋒",capcup:"⩇",capdot:"⩀",CapitalDifferentialD:"ⅅ",caps:"∩︀",caret:"⁁",caron:"ˇ",Cayleys:"ℭ",ccaps:"⩍",Ccaron:"Č",ccaron:"č",Ccedil:"Ç",ccedil:"ç",Ccirc:"Ĉ",ccirc:"ĉ",Cconint:"∰",ccups:"⩌",ccupssm:"⩐",Cdot:"Ċ",cdot:"ċ",cedil:"¸",Cedilla:"¸",cemptyv:"⦲",cent:"¢",centerdot:"·",CenterDot:"·",cfr:"𝔠",Cfr:"ℭ",CHcy:"Ч",chcy:"ч",check:"✓",checkmark:"✓",Chi:"Χ",chi:"χ",circ:"ˆ",circeq:"≗",circlearrowleft:"↺",circlearrowright:"↻",circledast:"⊛",circledcirc:"⊚",circleddash:"⊝",CircleDot:"⊙",circledR:"®",circledS:"Ⓢ",CircleMinus:"⊖",CirclePlus:"⊕",CircleTimes:"⊗",cir:"○",cirE:"⧃",cire:"≗",cirfnint:"⨐",cirmid:"⫯",cirscir:"⧂",ClockwiseContourIntegral:"∲",CloseCurlyDoubleQuote:"”",CloseCurlyQuote:"’",clubs:"♣",clubsuit:"♣",colon:":",Colon:"∷",Colone:"⩴",colone:"≔",coloneq:"≔",comma:",",commat:"@",comp:"∁",compfn:"∘",complement:"∁",complexes:"ℂ",cong:"≅",congdot:"⩭",Congruent:"≡",conint:"∮",Conint:"∯",ContourIntegral:"∮",copf:"𝕔",Copf:"ℂ",coprod:"∐",Coproduct:"∐",copy:"©",COPY:"©",copysr:"℗",CounterClockwiseContourIntegral:"∳",crarr:"↵",cross:"✗",Cross:"⨯",Cscr:"𝒞",cscr:"𝒸",csub:"⫏",csube:"⫑",csup:"⫐",csupe:"⫒",ctdot:"⋯",cudarrl:"⤸",cudarrr:"⤵",cuepr:"⋞",cuesc:"⋟",cularr:"↶",cularrp:"⤽",cupbrcap:"⩈",cupcap:"⩆",CupCap:"≍",cup:"∪",Cup:"⋓",cupcup:"⩊",cupdot:"⊍",cupor:"⩅",cups:"∪︀",curarr:"↷",curarrm:"⤼",curlyeqprec:"⋞",curlyeqsucc:"⋟",curlyvee:"⋎",curlywedge:"⋏",curren:"¤",curvearrowleft:"↶",curvearrowright:"↷",cuvee:"⋎",cuwed:"⋏",cwconint:"∲",cwint:"∱",cylcty:"⌭",dagger:"†",Dagger:"‡",daleth:"ℸ",darr:"↓",Darr:"↡",dArr:"⇓",dash:"‐",Dashv:"⫤",dashv:"⊣",dbkarow:"⤏",dblac:"˝",Dcaron:"Ď",dcaron:"ď",Dcy:"Д",dcy:"д",ddagger:"‡",ddarr:"⇊",DD:"ⅅ",dd:"ⅆ",DDotrahd:"⤑",ddotseq:"⩷",deg:"°",Del:"∇",Delta:"Δ",delta:"δ",demptyv:"⦱",dfisht:"⥿",Dfr:"𝔇",dfr:"𝔡",dHar:"⥥",dharl:"⇃",dharr:"⇂",DiacriticalAcute:"´",DiacriticalDot:"˙",DiacriticalDoubleAcute:"˝",DiacriticalGrave:"`",DiacriticalTilde:"˜",diam:"⋄",diamond:"⋄",Diamond:"⋄",diamondsuit:"♦",diams:"♦",die:"¨",DifferentialD:"ⅆ",digamma:"ϝ",disin:"⋲",div:"÷",divide:"÷",divideontimes:"⋇",divonx:"⋇",DJcy:"Ђ",djcy:"ђ",dlcorn:"⌞",dlcrop:"⌍",dollar:"$",Dopf:"𝔻",dopf:"𝕕",Dot:"¨",dot:"˙",DotDot:"⃜",doteq:"≐",doteqdot:"≑",DotEqual:"≐",dotminus:"∸",dotplus:"∔",dotsquare:"⊡",doublebarwedge:"⌆",DoubleContourIntegral:"∯",DoubleDot:"¨",DoubleDownArrow:"⇓",DoubleLeftArrow:"⇐",DoubleLeftRightArrow:"⇔",DoubleLeftTee:"⫤",DoubleLongLeftArrow:"⟸",DoubleLongLeftRightArrow:"⟺",DoubleLongRightArrow:"⟹",DoubleRightArrow:"⇒",DoubleRightTee:"⊨",DoubleUpArrow:"⇑",DoubleUpDownArrow:"⇕",DoubleVerticalBar:"∥",DownArrowBar:"⤓",downarrow:"↓",DownArrow:"↓",Downarrow:"⇓",DownArrowUpArrow:"⇵",DownBreve:"̑",downdownarrows:"⇊",downharpoonleft:"⇃",downharpoonright:"⇂",DownLeftRightVector:"⥐",DownLeftTeeVector:"⥞",DownLeftVectorBar:"⥖",DownLeftVector:"↽",DownRightTeeVector:"⥟",DownRightVectorBar:"⥗",DownRightVector:"⇁",DownTeeArrow:"↧",DownTee:"⊤",drbkarow:"⤐",drcorn:"⌟",drcrop:"⌌",Dscr:"𝒟",dscr:"𝒹",DScy:"Ѕ",dscy:"ѕ",dsol:"⧶",Dstrok:"Đ",dstrok:"đ",dtdot:"⋱",dtri:"▿",dtrif:"▾",duarr:"⇵",duhar:"⥯",dwangle:"⦦",DZcy:"Џ",dzcy:"џ",dzigrarr:"⟿",Eacute:"É",eacute:"é",easter:"⩮",Ecaron:"Ě",ecaron:"ě",Ecirc:"Ê",ecirc:"ê",ecir:"≖",ecolon:"≕",Ecy:"Э",ecy:"э",eDDot:"⩷",Edot:"Ė",edot:"ė",eDot:"≑",ee:"ⅇ",efDot:"≒",Efr:"𝔈",efr:"𝔢",eg:"⪚",Egrave:"È",egrave:"è",egs:"⪖",egsdot:"⪘",el:"⪙",Element:"∈",elinters:"⏧",ell:"ℓ",els:"⪕",elsdot:"⪗",Emacr:"Ē",emacr:"ē",empty:"∅",emptyset:"∅",EmptySmallSquare:"◻",emptyv:"∅",EmptyVerySmallSquare:"▫",emsp13:" ",emsp14:" ",emsp:" ",ENG:"Ŋ",eng:"ŋ",ensp:" ",Eogon:"Ę",eogon:"ę",Eopf:"𝔼",eopf:"𝕖",epar:"⋕",eparsl:"⧣",eplus:"⩱",epsi:"ε",Epsilon:"Ε",epsilon:"ε",epsiv:"ϵ",eqcirc:"≖",eqcolon:"≕",eqsim:"≂",eqslantgtr:"⪖",eqslantless:"⪕",Equal:"⩵",equals:"=",EqualTilde:"≂",equest:"≟",Equilibrium:"⇌",equiv:"≡",equivDD:"⩸",eqvparsl:"⧥",erarr:"⥱",erDot:"≓",escr:"ℯ",Escr:"ℰ",esdot:"≐",Esim:"⩳",esim:"≂",Eta:"Η",eta:"η",ETH:"Ð",eth:"ð",Euml:"Ë",euml:"ë",euro:"€",excl:"!",exist:"∃",Exists:"∃",expectation:"ℰ",exponentiale:"ⅇ",ExponentialE:"ⅇ",fallingdotseq:"≒",Fcy:"Ф",fcy:"ф",female:"♀",ffilig:"ffi",fflig:"ff",ffllig:"ffl",Ffr:"𝔉",ffr:"𝔣",filig:"fi",FilledSmallSquare:"◼",FilledVerySmallSquare:"▪",fjlig:"fj",flat:"♭",fllig:"fl",fltns:"▱",fnof:"ƒ",Fopf:"𝔽",fopf:"𝕗",forall:"∀",ForAll:"∀",fork:"⋔",forkv:"⫙",Fouriertrf:"ℱ",fpartint:"⨍",frac12:"½",frac13:"⅓",frac14:"¼",frac15:"⅕",frac16:"⅙",frac18:"⅛",frac23:"⅔",frac25:"⅖",frac34:"¾",frac35:"⅗",frac38:"⅜",frac45:"⅘",frac56:"⅚",frac58:"⅝",frac78:"⅞",frasl:"⁄",frown:"⌢",fscr:"𝒻",Fscr:"ℱ",gacute:"ǵ",Gamma:"Γ",gamma:"γ",Gammad:"Ϝ",gammad:"ϝ",gap:"⪆",Gbreve:"Ğ",gbreve:"ğ",Gcedil:"Ģ",Gcirc:"Ĝ",gcirc:"ĝ",Gcy:"Г",gcy:"г",Gdot:"Ġ",gdot:"ġ",ge:"≥",gE:"≧",gEl:"⪌",gel:"⋛",geq:"≥",geqq:"≧",geqslant:"⩾",gescc:"⪩",ges:"⩾",gesdot:"⪀",gesdoto:"⪂",gesdotol:"⪄",gesl:"⋛︀",gesles:"⪔",Gfr:"𝔊",gfr:"𝔤",gg:"≫",Gg:"⋙",ggg:"⋙",gimel:"ℷ",GJcy:"Ѓ",gjcy:"ѓ",gla:"⪥",gl:"≷",glE:"⪒",glj:"⪤",gnap:"⪊",gnapprox:"⪊",gne:"⪈",gnE:"≩",gneq:"⪈",gneqq:"≩",gnsim:"⋧",Gopf:"𝔾",gopf:"𝕘",grave:"`",GreaterEqual:"≥",GreaterEqualLess:"⋛",GreaterFullEqual:"≧",GreaterGreater:"⪢",GreaterLess:"≷",GreaterSlantEqual:"⩾",GreaterTilde:"≳",Gscr:"𝒢",gscr:"ℊ",gsim:"≳",gsime:"⪎",gsiml:"⪐",gtcc:"⪧",gtcir:"⩺",gt:">",GT:">",Gt:"≫",gtdot:"⋗",gtlPar:"⦕",gtquest:"⩼",gtrapprox:"⪆",gtrarr:"⥸",gtrdot:"⋗",gtreqless:"⋛",gtreqqless:"⪌",gtrless:"≷",gtrsim:"≳",gvertneqq:"≩︀",gvnE:"≩︀",Hacek:"ˇ",hairsp:" ",half:"½",hamilt:"ℋ",HARDcy:"Ъ",hardcy:"ъ",harrcir:"⥈",harr:"↔",hArr:"⇔",harrw:"↭",Hat:"^",hbar:"ℏ",Hcirc:"Ĥ",hcirc:"ĥ",hearts:"♥",heartsuit:"♥",hellip:"…",hercon:"⊹",hfr:"𝔥",Hfr:"ℌ",HilbertSpace:"ℋ",hksearow:"⤥",hkswarow:"⤦",hoarr:"⇿",homtht:"∻",hookleftarrow:"↩",hookrightarrow:"↪",hopf:"𝕙",Hopf:"ℍ",horbar:"―",HorizontalLine:"─",hscr:"𝒽",Hscr:"ℋ",hslash:"ℏ",Hstrok:"Ħ",hstrok:"ħ",HumpDownHump:"≎",HumpEqual:"≏",hybull:"⁃",hyphen:"‐",Iacute:"Í",iacute:"í",ic:"⁣",Icirc:"Î",icirc:"î",Icy:"И",icy:"и",Idot:"İ",IEcy:"Е",iecy:"е",iexcl:"¡",iff:"⇔",ifr:"𝔦",Ifr:"ℑ",Igrave:"Ì",igrave:"ì",ii:"ⅈ",iiiint:"⨌",iiint:"∭",iinfin:"⧜",iiota:"℩",IJlig:"IJ",ijlig:"ij",Imacr:"Ī",imacr:"ī",image:"ℑ",ImaginaryI:"ⅈ",imagline:"ℐ",imagpart:"ℑ",imath:"ı",Im:"ℑ",imof:"⊷",imped:"Ƶ",Implies:"⇒",incare:"℅",in:"∈",infin:"∞",infintie:"⧝",inodot:"ı",intcal:"⊺",int:"∫",Int:"∬",integers:"ℤ",Integral:"∫",intercal:"⊺",Intersection:"⋂",intlarhk:"⨗",intprod:"⨼",InvisibleComma:"⁣",InvisibleTimes:"⁢",IOcy:"Ё",iocy:"ё",Iogon:"Į",iogon:"į",Iopf:"𝕀",iopf:"𝕚",Iota:"Ι",iota:"ι",iprod:"⨼",iquest:"¿",iscr:"𝒾",Iscr:"ℐ",isin:"∈",isindot:"⋵",isinE:"⋹",isins:"⋴",isinsv:"⋳",isinv:"∈",it:"⁢",Itilde:"Ĩ",itilde:"ĩ",Iukcy:"І",iukcy:"і",Iuml:"Ï",iuml:"ï",Jcirc:"Ĵ",jcirc:"ĵ",Jcy:"Й",jcy:"й",Jfr:"𝔍",jfr:"𝔧",jmath:"ȷ",Jopf:"𝕁",jopf:"𝕛",Jscr:"𝒥",jscr:"𝒿",Jsercy:"Ј",jsercy:"ј",Jukcy:"Є",jukcy:"є",Kappa:"Κ",kappa:"κ",kappav:"ϰ",Kcedil:"Ķ",kcedil:"ķ",Kcy:"К",kcy:"к",Kfr:"𝔎",kfr:"𝔨",kgreen:"ĸ",KHcy:"Х",khcy:"х",KJcy:"Ќ",kjcy:"ќ",Kopf:"𝕂",kopf:"𝕜",Kscr:"𝒦",kscr:"𝓀",lAarr:"⇚",Lacute:"Ĺ",lacute:"ĺ",laemptyv:"⦴",lagran:"ℒ",Lambda:"Λ",lambda:"λ",lang:"⟨",Lang:"⟪",langd:"⦑",langle:"⟨",lap:"⪅",Laplacetrf:"ℒ",laquo:"«",larrb:"⇤",larrbfs:"⤟",larr:"←",Larr:"↞",lArr:"⇐",larrfs:"⤝",larrhk:"↩",larrlp:"↫",larrpl:"⤹",larrsim:"⥳",larrtl:"↢",latail:"⤙",lAtail:"⤛",lat:"⪫",late:"⪭",lates:"⪭︀",lbarr:"⤌",lBarr:"⤎",lbbrk:"❲",lbrace:"{",lbrack:"[",lbrke:"⦋",lbrksld:"⦏",lbrkslu:"⦍",Lcaron:"Ľ",lcaron:"ľ",Lcedil:"Ļ",lcedil:"ļ",lceil:"⌈",lcub:"{",Lcy:"Л",lcy:"л",ldca:"⤶",ldquo:"“",ldquor:"„",ldrdhar:"⥧",ldrushar:"⥋",ldsh:"↲",le:"≤",lE:"≦",LeftAngleBracket:"⟨",LeftArrowBar:"⇤",leftarrow:"←",LeftArrow:"←",Leftarrow:"⇐",LeftArrowRightArrow:"⇆",leftarrowtail:"↢",LeftCeiling:"⌈",LeftDoubleBracket:"⟦",LeftDownTeeVector:"⥡",LeftDownVectorBar:"⥙",LeftDownVector:"⇃",LeftFloor:"⌊",leftharpoondown:"↽",leftharpoonup:"↼",leftleftarrows:"⇇",leftrightarrow:"↔",LeftRightArrow:"↔",Leftrightarrow:"⇔",leftrightarrows:"⇆",leftrightharpoons:"⇋",leftrightsquigarrow:"↭",LeftRightVector:"⥎",LeftTeeArrow:"↤",LeftTee:"⊣",LeftTeeVector:"⥚",leftthreetimes:"⋋",LeftTriangleBar:"⧏",LeftTriangle:"⊲",LeftTriangleEqual:"⊴",LeftUpDownVector:"⥑",LeftUpTeeVector:"⥠",LeftUpVectorBar:"⥘",LeftUpVector:"↿",LeftVectorBar:"⥒",LeftVector:"↼",lEg:"⪋",leg:"⋚",leq:"≤",leqq:"≦",leqslant:"⩽",lescc:"⪨",les:"⩽",lesdot:"⩿",lesdoto:"⪁",lesdotor:"⪃",lesg:"⋚︀",lesges:"⪓",lessapprox:"⪅",lessdot:"⋖",lesseqgtr:"⋚",lesseqqgtr:"⪋",LessEqualGreater:"⋚",LessFullEqual:"≦",LessGreater:"≶",lessgtr:"≶",LessLess:"⪡",lesssim:"≲",LessSlantEqual:"⩽",LessTilde:"≲",lfisht:"⥼",lfloor:"⌊",Lfr:"𝔏",lfr:"𝔩",lg:"≶",lgE:"⪑",lHar:"⥢",lhard:"↽",lharu:"↼",lharul:"⥪",lhblk:"▄",LJcy:"Љ",ljcy:"љ",llarr:"⇇",ll:"≪",Ll:"⋘",llcorner:"⌞",Lleftarrow:"⇚",llhard:"⥫",lltri:"◺",Lmidot:"Ŀ",lmidot:"ŀ",lmoustache:"⎰",lmoust:"⎰",lnap:"⪉",lnapprox:"⪉",lne:"⪇",lnE:"≨",lneq:"⪇",lneqq:"≨",lnsim:"⋦",loang:"⟬",loarr:"⇽",lobrk:"⟦",longleftarrow:"⟵",LongLeftArrow:"⟵",Longleftarrow:"⟸",longleftrightarrow:"⟷",LongLeftRightArrow:"⟷",Longleftrightarrow:"⟺",longmapsto:"⟼",longrightarrow:"⟶",LongRightArrow:"⟶",Longrightarrow:"⟹",looparrowleft:"↫",looparrowright:"↬",lopar:"⦅",Lopf:"𝕃",lopf:"𝕝",loplus:"⨭",lotimes:"⨴",lowast:"∗",lowbar:"_",LowerLeftArrow:"↙",LowerRightArrow:"↘",loz:"◊",lozenge:"◊",lozf:"⧫",lpar:"(",lparlt:"⦓",lrarr:"⇆",lrcorner:"⌟",lrhar:"⇋",lrhard:"⥭",lrm:"‎",lrtri:"⊿",lsaquo:"‹",lscr:"𝓁",Lscr:"ℒ",lsh:"↰",Lsh:"↰",lsim:"≲",lsime:"⪍",lsimg:"⪏",lsqb:"[",lsquo:"‘",lsquor:"‚",Lstrok:"Ł",lstrok:"ł",ltcc:"⪦",ltcir:"⩹",lt:"<",LT:"<",Lt:"≪",ltdot:"⋖",lthree:"⋋",ltimes:"⋉",ltlarr:"⥶",ltquest:"⩻",ltri:"◃",ltrie:"⊴",ltrif:"◂",ltrPar:"⦖",lurdshar:"⥊",luruhar:"⥦",lvertneqq:"≨︀",lvnE:"≨︀",macr:"¯",male:"♂",malt:"✠",maltese:"✠",Map:"⤅",map:"↦",mapsto:"↦",mapstodown:"↧",mapstoleft:"↤",mapstoup:"↥",marker:"▮",mcomma:"⨩",Mcy:"М",mcy:"м",mdash:"—",mDDot:"∺",measuredangle:"∡",MediumSpace:" ",Mellintrf:"ℳ",Mfr:"𝔐",mfr:"𝔪",mho:"℧",micro:"µ",midast:"*",midcir:"⫰",mid:"∣",middot:"·",minusb:"⊟",minus:"−",minusd:"∸",minusdu:"⨪",MinusPlus:"∓",mlcp:"⫛",mldr:"…",mnplus:"∓",models:"⊧",Mopf:"𝕄",mopf:"𝕞",mp:"∓",mscr:"𝓂",Mscr:"ℳ",mstpos:"∾",Mu:"Μ",mu:"μ",multimap:"⊸",mumap:"⊸",nabla:"∇",Nacute:"Ń",nacute:"ń",nang:"∠⃒",nap:"≉",napE:"⩰̸",napid:"≋̸",napos:"ʼn",napprox:"≉",natural:"♮",naturals:"ℕ",natur:"♮",nbsp:" ",nbump:"≎̸",nbumpe:"≏̸",ncap:"⩃",Ncaron:"Ň",ncaron:"ň",Ncedil:"Ņ",ncedil:"ņ",ncong:"≇",ncongdot:"⩭̸",ncup:"⩂",Ncy:"Н",ncy:"н",ndash:"–",nearhk:"⤤",nearr:"↗",neArr:"⇗",nearrow:"↗",ne:"≠",nedot:"≐̸",NegativeMediumSpace:"​",NegativeThickSpace:"​",NegativeThinSpace:"​",NegativeVeryThinSpace:"​",nequiv:"≢",nesear:"⤨",nesim:"≂̸",NestedGreaterGreater:"≫",NestedLessLess:"≪",NewLine:"\n",nexist:"∄",nexists:"∄",Nfr:"𝔑",nfr:"𝔫",ngE:"≧̸",nge:"≱",ngeq:"≱",ngeqq:"≧̸",ngeqslant:"⩾̸",nges:"⩾̸",nGg:"⋙̸",ngsim:"≵",nGt:"≫⃒",ngt:"≯",ngtr:"≯",nGtv:"≫̸",nharr:"↮",nhArr:"⇎",nhpar:"⫲",ni:"∋",nis:"⋼",nisd:"⋺",niv:"∋",NJcy:"Њ",njcy:"њ",nlarr:"↚",nlArr:"⇍",nldr:"‥",nlE:"≦̸",nle:"≰",nleftarrow:"↚",nLeftarrow:"⇍",nleftrightarrow:"↮",nLeftrightarrow:"⇎",nleq:"≰",nleqq:"≦̸",nleqslant:"⩽̸",nles:"⩽̸",nless:"≮",nLl:"⋘̸",nlsim:"≴",nLt:"≪⃒",nlt:"≮",nltri:"⋪",nltrie:"⋬",nLtv:"≪̸",nmid:"∤",NoBreak:"⁠",NonBreakingSpace:" ",nopf:"𝕟",Nopf:"ℕ",Not:"⫬",not:"¬",NotCongruent:"≢",NotCupCap:"≭",NotDoubleVerticalBar:"∦",NotElement:"∉",NotEqual:"≠",NotEqualTilde:"≂̸",NotExists:"∄",NotGreater:"≯",NotGreaterEqual:"≱",NotGreaterFullEqual:"≧̸",NotGreaterGreater:"≫̸",NotGreaterLess:"≹",NotGreaterSlantEqual:"⩾̸",NotGreaterTilde:"≵",NotHumpDownHump:"≎̸",NotHumpEqual:"≏̸",notin:"∉",notindot:"⋵̸",notinE:"⋹̸",notinva:"∉",notinvb:"⋷",notinvc:"⋶",NotLeftTriangleBar:"⧏̸",NotLeftTriangle:"⋪",NotLeftTriangleEqual:"⋬",NotLess:"≮",NotLessEqual:"≰",NotLessGreater:"≸",NotLessLess:"≪̸",NotLessSlantEqual:"⩽̸",NotLessTilde:"≴",NotNestedGreaterGreater:"⪢̸",NotNestedLessLess:"⪡̸",notni:"∌",notniva:"∌",notnivb:"⋾",notnivc:"⋽",NotPrecedes:"⊀",NotPrecedesEqual:"⪯̸",NotPrecedesSlantEqual:"⋠",NotReverseElement:"∌",NotRightTriangleBar:"⧐̸",NotRightTriangle:"⋫",NotRightTriangleEqual:"⋭",NotSquareSubset:"⊏̸",NotSquareSubsetEqual:"⋢",NotSquareSuperset:"⊐̸",NotSquareSupersetEqual:"⋣",NotSubset:"⊂⃒",NotSubsetEqual:"⊈",NotSucceeds:"⊁",NotSucceedsEqual:"⪰̸",NotSucceedsSlantEqual:"⋡",NotSucceedsTilde:"≿̸",NotSuperset:"⊃⃒",NotSupersetEqual:"⊉",NotTilde:"≁",NotTildeEqual:"≄",NotTildeFullEqual:"≇",NotTildeTilde:"≉",NotVerticalBar:"∤",nparallel:"∦",npar:"∦",nparsl:"⫽⃥",npart:"∂̸",npolint:"⨔",npr:"⊀",nprcue:"⋠",nprec:"⊀",npreceq:"⪯̸",npre:"⪯̸",nrarrc:"⤳̸",nrarr:"↛",nrArr:"⇏",nrarrw:"↝̸",nrightarrow:"↛",nRightarrow:"⇏",nrtri:"⋫",nrtrie:"⋭",nsc:"⊁",nsccue:"⋡",nsce:"⪰̸",Nscr:"𝒩",nscr:"𝓃",nshortmid:"∤",nshortparallel:"∦",nsim:"≁",nsime:"≄",nsimeq:"≄",nsmid:"∤",nspar:"∦",nsqsube:"⋢",nsqsupe:"⋣",nsub:"⊄",nsubE:"⫅̸",nsube:"⊈",nsubset:"⊂⃒",nsubseteq:"⊈",nsubseteqq:"⫅̸",nsucc:"⊁",nsucceq:"⪰̸",nsup:"⊅",nsupE:"⫆̸",nsupe:"⊉",nsupset:"⊃⃒",nsupseteq:"⊉",nsupseteqq:"⫆̸",ntgl:"≹",Ntilde:"Ñ",ntilde:"ñ",ntlg:"≸",ntriangleleft:"⋪",ntrianglelefteq:"⋬",ntriangleright:"⋫",ntrianglerighteq:"⋭",Nu:"Ν",nu:"ν",num:"#",numero:"№",numsp:" ",nvap:"≍⃒",nvdash:"⊬",nvDash:"⊭",nVdash:"⊮",nVDash:"⊯",nvge:"≥⃒",nvgt:">⃒",nvHarr:"⤄",nvinfin:"⧞",nvlArr:"⤂",nvle:"≤⃒",nvlt:"<⃒",nvltrie:"⊴⃒",nvrArr:"⤃",nvrtrie:"⊵⃒",nvsim:"∼⃒",nwarhk:"⤣",nwarr:"↖",nwArr:"⇖",nwarrow:"↖",nwnear:"⤧",Oacute:"Ó",oacute:"ó",oast:"⊛",Ocirc:"Ô",ocirc:"ô",ocir:"⊚",Ocy:"О",ocy:"о",odash:"⊝",Odblac:"Ő",odblac:"ő",odiv:"⨸",odot:"⊙",odsold:"⦼",OElig:"Œ",oelig:"œ",ofcir:"⦿",Ofr:"𝔒",ofr:"𝔬",ogon:"˛",Ograve:"Ò",ograve:"ò",ogt:"⧁",ohbar:"⦵",ohm:"Ω",oint:"∮",olarr:"↺",olcir:"⦾",olcross:"⦻",oline:"‾",olt:"⧀",Omacr:"Ō",omacr:"ō",Omega:"Ω",omega:"ω",Omicron:"Ο",omicron:"ο",omid:"⦶",ominus:"⊖",Oopf:"𝕆",oopf:"𝕠",opar:"⦷",OpenCurlyDoubleQuote:"“",OpenCurlyQuote:"‘",operp:"⦹",oplus:"⊕",orarr:"↻",Or:"⩔",or:"∨",ord:"⩝",order:"ℴ",orderof:"ℴ",ordf:"ª",ordm:"º",origof:"⊶",oror:"⩖",orslope:"⩗",orv:"⩛",oS:"Ⓢ",Oscr:"𝒪",oscr:"ℴ",Oslash:"Ø",oslash:"ø",osol:"⊘",Otilde:"Õ",otilde:"õ",otimesas:"⨶",Otimes:"⨷",otimes:"⊗",Ouml:"Ö",ouml:"ö",ovbar:"⌽",OverBar:"‾",OverBrace:"⏞",OverBracket:"⎴",OverParenthesis:"⏜",para:"¶",parallel:"∥",par:"∥",parsim:"⫳",parsl:"⫽",part:"∂",PartialD:"∂",Pcy:"П",pcy:"п",percnt:"%",period:".",permil:"‰",perp:"⊥",pertenk:"‱",Pfr:"𝔓",pfr:"𝔭",Phi:"Φ",phi:"φ",phiv:"ϕ",phmmat:"ℳ",phone:"☎",Pi:"Π",pi:"π",pitchfork:"⋔",piv:"ϖ",planck:"ℏ",planckh:"ℎ",plankv:"ℏ",plusacir:"⨣",plusb:"⊞",pluscir:"⨢",plus:"+",plusdo:"∔",plusdu:"⨥",pluse:"⩲",PlusMinus:"±",plusmn:"±",plussim:"⨦",plustwo:"⨧",pm:"±",Poincareplane:"ℌ",pointint:"⨕",popf:"𝕡",Popf:"ℙ",pound:"£",prap:"⪷",Pr:"⪻",pr:"≺",prcue:"≼",precapprox:"⪷",prec:"≺",preccurlyeq:"≼",Precedes:"≺",PrecedesEqual:"⪯",PrecedesSlantEqual:"≼",PrecedesTilde:"≾",preceq:"⪯",precnapprox:"⪹",precneqq:"⪵",precnsim:"⋨",pre:"⪯",prE:"⪳",precsim:"≾",prime:"′",Prime:"″",primes:"ℙ",prnap:"⪹",prnE:"⪵",prnsim:"⋨",prod:"∏",Product:"∏",profalar:"⌮",profline:"⌒",profsurf:"⌓",prop:"∝",Proportional:"∝",Proportion:"∷",propto:"∝",prsim:"≾",prurel:"⊰",Pscr:"𝒫",pscr:"𝓅",Psi:"Ψ",psi:"ψ",puncsp:" ",Qfr:"𝔔",qfr:"𝔮",qint:"⨌",qopf:"𝕢",Qopf:"ℚ",qprime:"⁗",Qscr:"𝒬",qscr:"𝓆",quaternions:"ℍ",quatint:"⨖",quest:"?",questeq:"≟",quot:'"',QUOT:'"',rAarr:"⇛",race:"∽̱",Racute:"Ŕ",racute:"ŕ",radic:"√",raemptyv:"⦳",rang:"⟩",Rang:"⟫",rangd:"⦒",range:"⦥",rangle:"⟩",raquo:"»",rarrap:"⥵",rarrb:"⇥",rarrbfs:"⤠",rarrc:"⤳",rarr:"→",Rarr:"↠",rArr:"⇒",rarrfs:"⤞",rarrhk:"↪",rarrlp:"↬",rarrpl:"⥅",rarrsim:"⥴",Rarrtl:"⤖",rarrtl:"↣",rarrw:"↝",ratail:"⤚",rAtail:"⤜",ratio:"∶",rationals:"ℚ",rbarr:"⤍",rBarr:"⤏",RBarr:"⤐",rbbrk:"❳",rbrace:"}",rbrack:"]",rbrke:"⦌",rbrksld:"⦎",rbrkslu:"⦐",Rcaron:"Ř",rcaron:"ř",Rcedil:"Ŗ",rcedil:"ŗ",rceil:"⌉",rcub:"}",Rcy:"Р",rcy:"р",rdca:"⤷",rdldhar:"⥩",rdquo:"”",rdquor:"”",rdsh:"↳",real:"ℜ",realine:"ℛ",realpart:"ℜ",reals:"ℝ",Re:"ℜ",rect:"▭",reg:"®",REG:"®",ReverseElement:"∋",ReverseEquilibrium:"⇋",ReverseUpEquilibrium:"⥯",rfisht:"⥽",rfloor:"⌋",rfr:"𝔯",Rfr:"ℜ",rHar:"⥤",rhard:"⇁",rharu:"⇀",rharul:"⥬",Rho:"Ρ",rho:"ρ",rhov:"ϱ",RightAngleBracket:"⟩",RightArrowBar:"⇥",rightarrow:"→",RightArrow:"→",Rightarrow:"⇒",RightArrowLeftArrow:"⇄",rightarrowtail:"↣",RightCeiling:"⌉",RightDoubleBracket:"⟧",RightDownTeeVector:"⥝",RightDownVectorBar:"⥕",RightDownVector:"⇂",RightFloor:"⌋",rightharpoondown:"⇁",rightharpoonup:"⇀",rightleftarrows:"⇄",rightleftharpoons:"⇌",rightrightarrows:"⇉",rightsquigarrow:"↝",RightTeeArrow:"↦",RightTee:"⊢",RightTeeVector:"⥛",rightthreetimes:"⋌",RightTriangleBar:"⧐",RightTriangle:"⊳",RightTriangleEqual:"⊵",RightUpDownVector:"⥏",RightUpTeeVector:"⥜",RightUpVectorBar:"⥔",RightUpVector:"↾",RightVectorBar:"⥓",RightVector:"⇀",ring:"˚",risingdotseq:"≓",rlarr:"⇄",rlhar:"⇌",rlm:"‏",rmoustache:"⎱",rmoust:"⎱",rnmid:"⫮",roang:"⟭",roarr:"⇾",robrk:"⟧",ropar:"⦆",ropf:"𝕣",Ropf:"ℝ",roplus:"⨮",rotimes:"⨵",RoundImplies:"⥰",rpar:")",rpargt:"⦔",rppolint:"⨒",rrarr:"⇉",Rrightarrow:"⇛",rsaquo:"›",rscr:"𝓇",Rscr:"ℛ",rsh:"↱",Rsh:"↱",rsqb:"]",rsquo:"’",rsquor:"’",rthree:"⋌",rtimes:"⋊",rtri:"▹",rtrie:"⊵",rtrif:"▸",rtriltri:"⧎",RuleDelayed:"⧴",ruluhar:"⥨",rx:"℞",Sacute:"Ś",sacute:"ś",sbquo:"‚",scap:"⪸",Scaron:"Š",scaron:"š",Sc:"⪼",sc:"≻",sccue:"≽",sce:"⪰",scE:"⪴",Scedil:"Ş",scedil:"ş",Scirc:"Ŝ",scirc:"ŝ",scnap:"⪺",scnE:"⪶",scnsim:"⋩",scpolint:"⨓",scsim:"≿",Scy:"С",scy:"с",sdotb:"⊡",sdot:"⋅",sdote:"⩦",searhk:"⤥",searr:"↘",seArr:"⇘",searrow:"↘",sect:"§",semi:";",seswar:"⤩",setminus:"∖",setmn:"∖",sext:"✶",Sfr:"𝔖",sfr:"𝔰",sfrown:"⌢",sharp:"♯",SHCHcy:"Щ",shchcy:"щ",SHcy:"Ш",shcy:"ш",ShortDownArrow:"↓",ShortLeftArrow:"←",shortmid:"∣",shortparallel:"∥",ShortRightArrow:"→",ShortUpArrow:"↑",shy:"­",Sigma:"Σ",sigma:"σ",sigmaf:"ς",sigmav:"ς",sim:"∼",simdot:"⩪",sime:"≃",simeq:"≃",simg:"⪞",simgE:"⪠",siml:"⪝",simlE:"⪟",simne:"≆",simplus:"⨤",simrarr:"⥲",slarr:"←",SmallCircle:"∘",smallsetminus:"∖",smashp:"⨳",smeparsl:"⧤",smid:"∣",smile:"⌣",smt:"⪪",smte:"⪬",smtes:"⪬︀",SOFTcy:"Ь",softcy:"ь",solbar:"⌿",solb:"⧄",sol:"/",Sopf:"𝕊",sopf:"𝕤",spades:"♠",spadesuit:"♠",spar:"∥",sqcap:"⊓",sqcaps:"⊓︀",sqcup:"⊔",sqcups:"⊔︀",Sqrt:"√",sqsub:"⊏",sqsube:"⊑",sqsubset:"⊏",sqsubseteq:"⊑",sqsup:"⊐",sqsupe:"⊒",sqsupset:"⊐",sqsupseteq:"⊒",square:"□",Square:"□",SquareIntersection:"⊓",SquareSubset:"⊏",SquareSubsetEqual:"⊑",SquareSuperset:"⊐",SquareSupersetEqual:"⊒",SquareUnion:"⊔",squarf:"▪",squ:"□",squf:"▪",srarr:"→",Sscr:"𝒮",sscr:"𝓈",ssetmn:"∖",ssmile:"⌣",sstarf:"⋆",Star:"⋆",star:"☆",starf:"★",straightepsilon:"ϵ",straightphi:"ϕ",strns:"¯",sub:"⊂",Sub:"⋐",subdot:"⪽",subE:"⫅",sube:"⊆",subedot:"⫃",submult:"⫁",subnE:"⫋",subne:"⊊",subplus:"⪿",subrarr:"⥹",subset:"⊂",Subset:"⋐",subseteq:"⊆",subseteqq:"⫅",SubsetEqual:"⊆",subsetneq:"⊊",subsetneqq:"⫋",subsim:"⫇",subsub:"⫕",subsup:"⫓",succapprox:"⪸",succ:"≻",succcurlyeq:"≽",Succeeds:"≻",SucceedsEqual:"⪰",SucceedsSlantEqual:"≽",SucceedsTilde:"≿",succeq:"⪰",succnapprox:"⪺",succneqq:"⪶",succnsim:"⋩",succsim:"≿",SuchThat:"∋",sum:"∑",Sum:"∑",sung:"♪",sup1:"¹",sup2:"²",sup3:"³",sup:"⊃",Sup:"⋑",supdot:"⪾",supdsub:"⫘",supE:"⫆",supe:"⊇",supedot:"⫄",Superset:"⊃",SupersetEqual:"⊇",suphsol:"⟉",suphsub:"⫗",suplarr:"⥻",supmult:"⫂",supnE:"⫌",supne:"⊋",supplus:"⫀",supset:"⊃",Supset:"⋑",supseteq:"⊇",supseteqq:"⫆",supsetneq:"⊋",supsetneqq:"⫌",supsim:"⫈",supsub:"⫔",supsup:"⫖",swarhk:"⤦",swarr:"↙",swArr:"⇙",swarrow:"↙",swnwar:"⤪",szlig:"ß",Tab:"\t",target:"⌖",Tau:"Τ",tau:"τ",tbrk:"⎴",Tcaron:"Ť",tcaron:"ť",Tcedil:"Ţ",tcedil:"ţ",Tcy:"Т",tcy:"т",tdot:"⃛",telrec:"⌕",Tfr:"𝔗",tfr:"𝔱",there4:"∴",therefore:"∴",Therefore:"∴",Theta:"Θ",theta:"θ",thetasym:"ϑ",thetav:"ϑ",thickapprox:"≈",thicksim:"∼",ThickSpace:"  ",ThinSpace:" ",thinsp:" ",thkap:"≈",thksim:"∼",THORN:"Þ",thorn:"þ",tilde:"˜",Tilde:"∼",TildeEqual:"≃",TildeFullEqual:"≅",TildeTilde:"≈",timesbar:"⨱",timesb:"⊠",times:"×",timesd:"⨰",tint:"∭",toea:"⤨",topbot:"⌶",topcir:"⫱",top:"⊤",Topf:"𝕋",topf:"𝕥",topfork:"⫚",tosa:"⤩",tprime:"‴",trade:"™",TRADE:"™",triangle:"▵",triangledown:"▿",triangleleft:"◃",trianglelefteq:"⊴",triangleq:"≜",triangleright:"▹",trianglerighteq:"⊵",tridot:"◬",trie:"≜",triminus:"⨺",TripleDot:"⃛",triplus:"⨹",trisb:"⧍",tritime:"⨻",trpezium:"⏢",Tscr:"𝒯",tscr:"𝓉",TScy:"Ц",tscy:"ц",TSHcy:"Ћ",tshcy:"ћ",Tstrok:"Ŧ",tstrok:"ŧ",twixt:"≬",twoheadleftarrow:"↞",twoheadrightarrow:"↠",Uacute:"Ú",uacute:"ú",uarr:"↑",Uarr:"↟",uArr:"⇑",Uarrocir:"⥉",Ubrcy:"Ў",ubrcy:"ў",Ubreve:"Ŭ",ubreve:"ŭ",Ucirc:"Û",ucirc:"û",Ucy:"У",ucy:"у",udarr:"⇅",Udblac:"Ű",udblac:"ű",udhar:"⥮",ufisht:"⥾",Ufr:"𝔘",ufr:"𝔲",Ugrave:"Ù",ugrave:"ù",uHar:"⥣",uharl:"↿",uharr:"↾",uhblk:"▀",ulcorn:"⌜",ulcorner:"⌜",ulcrop:"⌏",ultri:"◸",Umacr:"Ū",umacr:"ū",uml:"¨",UnderBar:"_",UnderBrace:"⏟",UnderBracket:"⎵",UnderParenthesis:"⏝",Union:"⋃",UnionPlus:"⊎",Uogon:"Ų",uogon:"ų",Uopf:"𝕌",uopf:"𝕦",UpArrowBar:"⤒",uparrow:"↑",UpArrow:"↑",Uparrow:"⇑",UpArrowDownArrow:"⇅",updownarrow:"↕",UpDownArrow:"↕",Updownarrow:"⇕",UpEquilibrium:"⥮",upharpoonleft:"↿",upharpoonright:"↾",uplus:"⊎",UpperLeftArrow:"↖",UpperRightArrow:"↗",upsi:"υ",Upsi:"ϒ",upsih:"ϒ",Upsilon:"Υ",upsilon:"υ",UpTeeArrow:"↥",UpTee:"⊥",upuparrows:"⇈",urcorn:"⌝",urcorner:"⌝",urcrop:"⌎",Uring:"Ů",uring:"ů",urtri:"◹",Uscr:"𝒰",uscr:"𝓊",utdot:"⋰",Utilde:"Ũ",utilde:"ũ",utri:"▵",utrif:"▴",uuarr:"⇈",Uuml:"Ü",uuml:"ü",uwangle:"⦧",vangrt:"⦜",varepsilon:"ϵ",varkappa:"ϰ",varnothing:"∅",varphi:"ϕ",varpi:"ϖ",varpropto:"∝",varr:"↕",vArr:"⇕",varrho:"ϱ",varsigma:"ς",varsubsetneq:"⊊︀",varsubsetneqq:"⫋︀",varsupsetneq:"⊋︀",varsupsetneqq:"⫌︀",vartheta:"ϑ",vartriangleleft:"⊲",vartriangleright:"⊳",vBar:"⫨",Vbar:"⫫",vBarv:"⫩",Vcy:"В",vcy:"в",vdash:"⊢",vDash:"⊨",Vdash:"⊩",VDash:"⊫",Vdashl:"⫦",veebar:"⊻",vee:"∨",Vee:"⋁",veeeq:"≚",vellip:"⋮",verbar:"|",Verbar:"‖",vert:"|",Vert:"‖",VerticalBar:"∣",VerticalLine:"|",VerticalSeparator:"❘",VerticalTilde:"≀",VeryThinSpace:" ",Vfr:"𝔙",vfr:"𝔳",vltri:"⊲",vnsub:"⊂⃒",vnsup:"⊃⃒",Vopf:"𝕍",vopf:"𝕧",vprop:"∝",vrtri:"⊳",Vscr:"𝒱",vscr:"𝓋",vsubnE:"⫋︀",vsubne:"⊊︀",vsupnE:"⫌︀",vsupne:"⊋︀",Vvdash:"⊪",vzigzag:"⦚",Wcirc:"Ŵ",wcirc:"ŵ",wedbar:"⩟",wedge:"∧",Wedge:"⋀",wedgeq:"≙",weierp:"℘",Wfr:"𝔚",wfr:"𝔴",Wopf:"𝕎",wopf:"𝕨",wp:"℘",wr:"≀",wreath:"≀",Wscr:"𝒲",wscr:"𝓌",xcap:"⋂",xcirc:"◯",xcup:"⋃",xdtri:"▽",Xfr:"𝔛",xfr:"𝔵",xharr:"⟷",xhArr:"⟺",Xi:"Ξ",xi:"ξ",xlarr:"⟵",xlArr:"⟸",xmap:"⟼",xnis:"⋻",xodot:"⨀",Xopf:"𝕏",xopf:"𝕩",xoplus:"⨁",xotime:"⨂",xrarr:"⟶",xrArr:"⟹",Xscr:"𝒳",xscr:"𝓍",xsqcup:"⨆",xuplus:"⨄",xutri:"△",xvee:"⋁",xwedge:"⋀",Yacute:"Ý",yacute:"ý",YAcy:"Я",yacy:"я",Ycirc:"Ŷ",ycirc:"ŷ",Ycy:"Ы",ycy:"ы",yen:"¥",Yfr:"𝔜",yfr:"𝔶",YIcy:"Ї",yicy:"ї",Yopf:"𝕐",yopf:"𝕪",Yscr:"𝒴",yscr:"𝓎",YUcy:"Ю",yucy:"ю",yuml:"ÿ",Yuml:"Ÿ",Zacute:"Ź",zacute:"ź",Zcaron:"Ž",zcaron:"ž",Zcy:"З",zcy:"з",Zdot:"Ż",zdot:"ż",zeetrf:"ℨ",ZeroWidthSpace:"​",Zeta:"Ζ",zeta:"ζ",zfr:"𝔷",Zfr:"ℨ",ZHcy:"Ж",zhcy:"ж",zigrarr:"⇝",zopf:"𝕫",Zopf:"ℤ",Zscr:"𝒵",zscr:"𝓏",zwj:"‍",zwnj:"‌"}},{}],26:[function(require,module,exports){module.exports={Aacute:"Á",aacute:"á",Acirc:"Â",acirc:"â",acute:"´",AElig:"Æ",aelig:"æ",Agrave:"À",agrave:"à",amp:"&",AMP:"&",Aring:"Å",aring:"å",Atilde:"Ã",atilde:"ã",Auml:"Ä",auml:"ä",brvbar:"¦",Ccedil:"Ç",ccedil:"ç",cedil:"¸",cent:"¢",copy:"©",COPY:"©",curren:"¤",deg:"°",divide:"÷",Eacute:"É",eacute:"é",Ecirc:"Ê",ecirc:"ê",Egrave:"È",egrave:"è",ETH:"Ð",eth:"ð",Euml:"Ë",euml:"ë",frac12:"½",frac14:"¼",frac34:"¾",gt:">",GT:">",Iacute:"Í",iacute:"í",Icirc:"Î",icirc:"î",iexcl:"¡",Igrave:"Ì",igrave:"ì",iquest:"¿",Iuml:"Ï",iuml:"ï",laquo:"«",lt:"<",LT:"<",macr:"¯",micro:"µ",middot:"·",nbsp:" ",not:"¬",Ntilde:"Ñ",ntilde:"ñ",Oacute:"Ó",oacute:"ó",Ocirc:"Ô",ocirc:"ô",Ograve:"Ò",ograve:"ò",ordf:"ª",ordm:"º",Oslash:"Ø",oslash:"ø",Otilde:"Õ",otilde:"õ",Ouml:"Ö",ouml:"ö",para:"¶",plusmn:"±",pound:"£",quot:'"',QUOT:'"',raquo:"»",reg:"®",REG:"®",sect:"§",shy:"­",sup1:"¹",sup2:"²",sup3:"³",szlig:"ß",THORN:"Þ",thorn:"þ",times:"×",Uacute:"Ú",uacute:"ú",Ucirc:"Û",ucirc:"û",Ugrave:"Ù",ugrave:"ù",uml:"¨",Uuml:"Ü",uuml:"ü",Yacute:"Ý",yacute:"ý",yen:"¥",yuml:"ÿ"}},{}],27:[function(require,module,exports){module.exports={amp:"&",apos:"'",gt:">",lt:"<",quot:'"'}},{}],28:[function(require,module,exports){function EventEmitter(){this._events=this._events||{};this._maxListeners=this._maxListeners||undefined}module.exports=EventEmitter;EventEmitter.EventEmitter=EventEmitter;EventEmitter.prototype._events=undefined;EventEmitter.prototype._maxListeners=undefined;EventEmitter.defaultMaxListeners=10;EventEmitter.prototype.setMaxListeners=function(n){if(!isNumber(n)||n<0||isNaN(n))throw TypeError("n must be a positive number");this._maxListeners=n;return this};EventEmitter.prototype.emit=function(type){var er,handler,len,args,i,listeners;if(!this._events)this._events={};if(type==="error"){if(!this._events.error||isObject(this._events.error)&&!this._events.error.length){er=arguments[1];if(er instanceof Error){throw er}else{var err=new Error('Uncaught, unspecified "error" event. ('+er+")");err.context=er;throw err}}}handler=this._events[type];if(isUndefined(handler))return false;if(isFunction(handler)){switch(arguments.length){case 1:handler.call(this);break;case 2:handler.call(this,arguments[1]);break;case 3:handler.call(this,arguments[1],arguments[2]);break;default:args=Array.prototype.slice.call(arguments,1);handler.apply(this,args)}}else if(isObject(handler)){args=Array.prototype.slice.call(arguments,1);listeners=handler.slice();len=listeners.length;for(i=0;i<len;i++)listeners[i].apply(this,args)}return true};EventEmitter.prototype.addListener=function(type,listener){var m;if(!isFunction(listener))throw TypeError("listener must be a function");if(!this._events)this._events={};if(this._events.newListener)this.emit("newListener",type,isFunction(listener.listener)?listener.listener:listener);if(!this._events[type])this._events[type]=listener;else if(isObject(this._events[type]))this._events[type].push(listener);else this._events[type]=[this._events[type],listener];if(isObject(this._events[type])&&!this._events[type].warned){if(!isUndefined(this._maxListeners)){m=this._maxListeners}else{m=EventEmitter.defaultMaxListeners}if(m&&m>0&&this._events[type].length>m){this._events[type].warned=true;console.error("(node) warning: possible EventEmitter memory "+"leak detected. %d listeners added. "+"Use emitter.setMaxListeners() to increase limit.",this._events[type].length);if(typeof console.trace==="function"){console.trace()}}}return this};EventEmitter.prototype.on=EventEmitter.prototype.addListener;EventEmitter.prototype.once=function(type,listener){if(!isFunction(listener))throw TypeError("listener must be a function");var fired=false;function g(){this.removeListener(type,g);if(!fired){fired=true;listener.apply(this,arguments)}}g.listener=listener;this.on(type,g);return this};EventEmitter.prototype.removeListener=function(type,listener){var list,position,length,i;if(!isFunction(listener))throw TypeError("listener must be a function");if(!this._events||!this._events[type])return this;list=this._events[type];length=list.length;position=-1;if(list===listener||isFunction(list.listener)&&list.listener===listener){delete this._events[type];if(this._events.removeListener)this.emit("removeListener",type,listener)}else if(isObject(list)){for(i=length;i-- >0;){if(list[i]===listener||list[i].listener&&list[i].listener===listener){position=i;break}}if(position<0)return this;if(list.length===1){list.length=0;delete this._events[type]}else{list.splice(position,1); }if(this._events.removeListener)this.emit("removeListener",type,listener)}return this};EventEmitter.prototype.removeAllListeners=function(type){var key,listeners;if(!this._events)return this;if(!this._events.removeListener){if(arguments.length===0)this._events={};else if(this._events[type])delete this._events[type];return this}if(arguments.length===0){for(key in this._events){if(key==="removeListener")continue;this.removeAllListeners(key)}this.removeAllListeners("removeListener");this._events={};return this}listeners=this._events[type];if(isFunction(listeners)){this.removeListener(type,listeners)}else if(listeners){while(listeners.length)this.removeListener(type,listeners[listeners.length-1])}delete this._events[type];return this};EventEmitter.prototype.listeners=function(type){var ret;if(!this._events||!this._events[type])ret=[];else if(isFunction(this._events[type]))ret=[this._events[type]];else ret=this._events[type].slice();return ret};EventEmitter.prototype.listenerCount=function(type){if(this._events){var evlistener=this._events[type];if(isFunction(evlistener))return 1;else if(evlistener)return evlistener.length}return 0};EventEmitter.listenerCount=function(emitter,type){return emitter.listenerCount(type)};function isFunction(arg){return typeof arg==="function"}function isNumber(arg){return typeof arg==="number"}function isObject(arg){return typeof arg==="object"&&arg!==null}function isUndefined(arg){return arg===void 0}},{}],29:[function(require,module,exports){module.exports=CollectingHandler;function CollectingHandler(cbs){this._cbs=cbs||{};this.events=[]}var EVENTS=require("./").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){name="on"+name;CollectingHandler.prototype[name]=function(){this.events.push([name]);if(this._cbs[name])this._cbs[name]()}}else if(EVENTS[name]===1){name="on"+name;CollectingHandler.prototype[name]=function(a){this.events.push([name,a]);if(this._cbs[name])this._cbs[name](a)}}else if(EVENTS[name]===2){name="on"+name;CollectingHandler.prototype[name]=function(a,b){this.events.push([name,a,b]);if(this._cbs[name])this._cbs[name](a,b)}}else{throw Error("wrong number of arguments")}});CollectingHandler.prototype.onreset=function(){this.events=[];if(this._cbs.onreset)this._cbs.onreset()};CollectingHandler.prototype.restart=function(){if(this._cbs.onreset)this._cbs.onreset();for(var i=0,len=this.events.length;i<len;i++){if(this._cbs[this.events[i][0]]){var num=this.events[i].length;if(num===1){this._cbs[this.events[i][0]]()}else if(num===2){this._cbs[this.events[i][0]](this.events[i][1])}else{this._cbs[this.events[i][0]](this.events[i][1],this.events[i][2])}}}}},{"./":36}],30:[function(require,module,exports){var index=require("./index.js"),DomHandler=index.DomHandler,DomUtils=index.DomUtils;function FeedHandler(callback,options){this.init(callback,options)}require("inherits")(FeedHandler,DomHandler);FeedHandler.prototype.init=DomHandler;function getElements(what,where){return DomUtils.getElementsByTagName(what,where,true)}function getOneElement(what,where){return DomUtils.getElementsByTagName(what,where,true,1)[0]}function fetch(what,where,recurse){return DomUtils.getText(DomUtils.getElementsByTagName(what,where,recurse,1)).trim()}function addConditionally(obj,prop,what,where,recurse){var tmp=fetch(what,where,recurse);if(tmp)obj[prop]=tmp}var isValidFeed=function(value){return value==="rss"||value==="feed"||value==="rdf:RDF"};FeedHandler.prototype.onend=function(){var feed={},feedRoot=getOneElement(isValidFeed,this.dom),tmp,childs;if(feedRoot){if(feedRoot.name==="feed"){childs=feedRoot.children;feed.type="atom";addConditionally(feed,"id","id",childs);addConditionally(feed,"title","title",childs);if((tmp=getOneElement("link",childs))&&(tmp=tmp.attribs)&&(tmp=tmp.href))feed.link=tmp;addConditionally(feed,"description","subtitle",childs);if(tmp=fetch("updated",childs))feed.updated=new Date(tmp);addConditionally(feed,"author","email",childs,true);feed.items=getElements("entry",childs).map(function(item){var entry={},tmp;item=item.children;addConditionally(entry,"id","id",item);addConditionally(entry,"title","title",item);if((tmp=getOneElement("link",item))&&(tmp=tmp.attribs)&&(tmp=tmp.href))entry.link=tmp;if(tmp=fetch("summary",item)||fetch("content",item))entry.description=tmp;if(tmp=fetch("updated",item))entry.pubDate=new Date(tmp);return entry})}else{childs=getOneElement("channel",feedRoot.children).children;feed.type=feedRoot.name.substr(0,3);feed.id="";addConditionally(feed,"title","title",childs);addConditionally(feed,"link","link",childs);addConditionally(feed,"description","description",childs);if(tmp=fetch("lastBuildDate",childs))feed.updated=new Date(tmp);addConditionally(feed,"author","managingEditor",childs,true);feed.items=getElements("item",feedRoot.children).map(function(item){var entry={},tmp;item=item.children;addConditionally(entry,"id","guid",item);addConditionally(entry,"title","title",item);addConditionally(entry,"link","link",item);addConditionally(entry,"description","description",item);if(tmp=fetch("pubDate",item))entry.pubDate=new Date(tmp);return entry})}}this.dom=feed;DomHandler.prototype._handleCallback.call(this,feedRoot?null:Error("couldn't find root of feed"))};module.exports=FeedHandler},{"./index.js":36,inherits:38}],31:[function(require,module,exports){var Tokenizer=require("./Tokenizer.js");var formTags={input:true,option:true,optgroup:true,select:true,button:true,datalist:true,textarea:true};var openImpliesClose={tr:{tr:true,th:true,td:true},th:{th:true},td:{thead:true,th:true,td:true},body:{head:true,link:true,script:true},li:{li:true},p:{p:true},h1:{p:true},h2:{p:true},h3:{p:true},h4:{p:true},h5:{p:true},h6:{p:true},select:formTags,input:formTags,output:formTags,button:formTags,datalist:formTags,textarea:formTags,option:{option:true},optgroup:{optgroup:true}};var voidElements={__proto__:null,area:true,base:true,basefont:true,br:true,col:true,command:true,embed:true,frame:true,hr:true,img:true,input:true,isindex:true,keygen:true,link:true,meta:true,param:true,source:true,track:true,wbr:true,path:true,circle:true,ellipse:true,line:true,rect:true,use:true,stop:true,polyline:true,polygon:true};var re_nameEnd=/\s|\//;function Parser(cbs,options){this._options=options||{};this._cbs=cbs||{};this._tagname="";this._attribname="";this._attribvalue="";this._attribs=null;this._stack=[];this.startIndex=0;this.endIndex=null;this._lowerCaseTagNames="lowerCaseTags"in this._options?!!this._options.lowerCaseTags:!this._options.xmlMode;this._lowerCaseAttributeNames="lowerCaseAttributeNames"in this._options?!!this._options.lowerCaseAttributeNames:!this._options.xmlMode;if(this._options.Tokenizer){Tokenizer=this._options.Tokenizer}this._tokenizer=new Tokenizer(this._options,this);if(this._cbs.onparserinit)this._cbs.onparserinit(this)}require("inherits")(Parser,require("events").EventEmitter);Parser.prototype._updatePosition=function(initialOffset){if(this.endIndex===null){if(this._tokenizer._sectionStart<=initialOffset){this.startIndex=0}else{this.startIndex=this._tokenizer._sectionStart-initialOffset}}else this.startIndex=this.endIndex+1;this.endIndex=this._tokenizer.getAbsoluteIndex()};Parser.prototype.ontext=function(data){this._updatePosition(1);this.endIndex--;if(this._cbs.ontext)this._cbs.ontext(data)};Parser.prototype.onopentagname=function(name){if(this._lowerCaseTagNames){name=name.toLowerCase()}this._tagname=name;if(!this._options.xmlMode&&name in openImpliesClose){for(var el;(el=this._stack[this._stack.length-1])in openImpliesClose[name];this.onclosetag(el));}if(this._options.xmlMode||!(name in voidElements)){this._stack.push(name)}if(this._cbs.onopentagname)this._cbs.onopentagname(name);if(this._cbs.onopentag)this._attribs={}};Parser.prototype.onopentagend=function(){this._updatePosition(1);if(this._attribs){if(this._cbs.onopentag)this._cbs.onopentag(this._tagname,this._attribs);this._attribs=null}if(!this._options.xmlMode&&this._cbs.onclosetag&&this._tagname in voidElements){this._cbs.onclosetag(this._tagname)}this._tagname=""};Parser.prototype.onclosetag=function(name){this._updatePosition(1);if(this._lowerCaseTagNames){name=name.toLowerCase()}if(this._stack.length&&(!(name in voidElements)||this._options.xmlMode)){var pos=this._stack.lastIndexOf(name);if(pos!==-1){if(this._cbs.onclosetag){pos=this._stack.length-pos;while(pos--)this._cbs.onclosetag(this._stack.pop())}else this._stack.length=pos}else if(name==="p"&&!this._options.xmlMode){this.onopentagname(name);this._closeCurrentTag()}}else if(!this._options.xmlMode&&(name==="br"||name==="p")){this.onopentagname(name);this._closeCurrentTag()}};Parser.prototype.onselfclosingtag=function(){if(this._options.xmlMode||this._options.recognizeSelfClosing){this._closeCurrentTag()}else{this.onopentagend()}};Parser.prototype._closeCurrentTag=function(){var name=this._tagname;this.onopentagend();if(this._stack[this._stack.length-1]===name){if(this._cbs.onclosetag){this._cbs.onclosetag(name)}this._stack.pop()}};Parser.prototype.onattribname=function(name){if(this._lowerCaseAttributeNames){name=name.toLowerCase()}this._attribname=name};Parser.prototype.onattribdata=function(value){this._attribvalue+=value};Parser.prototype.onattribend=function(){if(this._cbs.onattribute)this._cbs.onattribute(this._attribname,this._attribvalue);if(this._attribs&&!Object.prototype.hasOwnProperty.call(this._attribs,this._attribname)){this._attribs[this._attribname]=this._attribvalue}this._attribname="";this._attribvalue=""};Parser.prototype._getInstructionName=function(value){var idx=value.search(re_nameEnd),name=idx<0?value:value.substr(0,idx);if(this._lowerCaseTagNames){name=name.toLowerCase()}return name};Parser.prototype.ondeclaration=function(value){if(this._cbs.onprocessinginstruction){var name=this._getInstructionName(value);this._cbs.onprocessinginstruction("!"+name,"!"+value)}};Parser.prototype.onprocessinginstruction=function(value){if(this._cbs.onprocessinginstruction){var name=this._getInstructionName(value);this._cbs.onprocessinginstruction("?"+name,"?"+value)}};Parser.prototype.oncomment=function(value){this._updatePosition(4);if(this._cbs.oncomment)this._cbs.oncomment(value);if(this._cbs.oncommentend)this._cbs.oncommentend()};Parser.prototype.oncdata=function(value){this._updatePosition(1);if(this._options.xmlMode||this._options.recognizeCDATA){if(this._cbs.oncdatastart)this._cbs.oncdatastart();if(this._cbs.ontext)this._cbs.ontext(value);if(this._cbs.oncdataend)this._cbs.oncdataend()}else{this.oncomment("[CDATA["+value+"]]")}};Parser.prototype.onerror=function(err){if(this._cbs.onerror)this._cbs.onerror(err)};Parser.prototype.onend=function(){if(this._cbs.onclosetag){for(var i=this._stack.length;i>0;this._cbs.onclosetag(this._stack[--i]));}if(this._cbs.onend)this._cbs.onend()};Parser.prototype.reset=function(){if(this._cbs.onreset)this._cbs.onreset();this._tokenizer.reset();this._tagname="";this._attribname="";this._attribs=null;this._stack=[];if(this._cbs.onparserinit)this._cbs.onparserinit(this)};Parser.prototype.parseComplete=function(data){this.reset();this.end(data)};Parser.prototype.write=function(chunk){this._tokenizer.write(chunk)};Parser.prototype.end=function(chunk){this._tokenizer.end(chunk)};Parser.prototype.pause=function(){this._tokenizer.pause()};Parser.prototype.resume=function(){this._tokenizer.resume()};Parser.prototype.parseChunk=Parser.prototype.write;Parser.prototype.done=Parser.prototype.end;module.exports=Parser},{"./Tokenizer.js":34,events:28,inherits:38}],32:[function(require,module,exports){module.exports=ProxyHandler;function ProxyHandler(cbs){this._cbs=cbs||{}}var EVENTS=require("./").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){name="on"+name;ProxyHandler.prototype[name]=function(){if(this._cbs[name])this._cbs[name]()}}else if(EVENTS[name]===1){name="on"+name;ProxyHandler.prototype[name]=function(a){if(this._cbs[name])this._cbs[name](a)}}else if(EVENTS[name]===2){name="on"+name;ProxyHandler.prototype[name]=function(a,b){if(this._cbs[name])this._cbs[name](a,b)}}else{throw Error("wrong number of arguments")}})},{"./":36}],33:[function(require,module,exports){module.exports=Stream;var Parser=require("./WritableStream.js");function Stream(options){Parser.call(this,new Cbs(this),options)}require("inherits")(Stream,Parser);Stream.prototype.readable=true;function Cbs(scope){this.scope=scope}var EVENTS=require("../").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){Cbs.prototype["on"+name]=function(){this.scope.emit(name)}}else if(EVENTS[name]===1){Cbs.prototype["on"+name]=function(a){this.scope.emit(name,a)}}else if(EVENTS[name]===2){Cbs.prototype["on"+name]=function(a,b){this.scope.emit(name,a,b)}}else{throw Error("wrong number of arguments!")}})},{"../":36,"./WritableStream.js":35,inherits:38}],34:[function(require,module,exports){module.exports=Tokenizer;var decodeCodePoint=require("entities/lib/decode_codepoint.js"),entityMap=require("entities/maps/entities.json"),legacyMap=require("entities/maps/legacy.json"),xmlMap=require("entities/maps/xml.json"),i=0,TEXT=i++,BEFORE_TAG_NAME=i++,IN_TAG_NAME=i++,IN_SELF_CLOSING_TAG=i++,BEFORE_CLOSING_TAG_NAME=i++,IN_CLOSING_TAG_NAME=i++,AFTER_CLOSING_TAG_NAME=i++,BEFORE_ATTRIBUTE_NAME=i++,IN_ATTRIBUTE_NAME=i++,AFTER_ATTRIBUTE_NAME=i++,BEFORE_ATTRIBUTE_VALUE=i++,IN_ATTRIBUTE_VALUE_DQ=i++,IN_ATTRIBUTE_VALUE_SQ=i++,IN_ATTRIBUTE_VALUE_NQ=i++,BEFORE_DECLARATION=i++,IN_DECLARATION=i++,IN_PROCESSING_INSTRUCTION=i++,BEFORE_COMMENT=i++,IN_COMMENT=i++,AFTER_COMMENT_1=i++,AFTER_COMMENT_2=i++,BEFORE_CDATA_1=i++,BEFORE_CDATA_2=i++,BEFORE_CDATA_3=i++,BEFORE_CDATA_4=i++,BEFORE_CDATA_5=i++,BEFORE_CDATA_6=i++,IN_CDATA=i++,AFTER_CDATA_1=i++,AFTER_CDATA_2=i++,BEFORE_SPECIAL=i++,BEFORE_SPECIAL_END=i++,BEFORE_SCRIPT_1=i++,BEFORE_SCRIPT_2=i++,BEFORE_SCRIPT_3=i++,BEFORE_SCRIPT_4=i++,BEFORE_SCRIPT_5=i++,AFTER_SCRIPT_1=i++,AFTER_SCRIPT_2=i++,AFTER_SCRIPT_3=i++,AFTER_SCRIPT_4=i++,AFTER_SCRIPT_5=i++,BEFORE_STYLE_1=i++,BEFORE_STYLE_2=i++,BEFORE_STYLE_3=i++,BEFORE_STYLE_4=i++,AFTER_STYLE_1=i++,AFTER_STYLE_2=i++,AFTER_STYLE_3=i++,AFTER_STYLE_4=i++,BEFORE_ENTITY=i++,BEFORE_NUMERIC_ENTITY=i++,IN_NAMED_ENTITY=i++,IN_NUMERIC_ENTITY=i++,IN_HEX_ENTITY=i++,j=0,SPECIAL_NONE=j++,SPECIAL_SCRIPT=j++,SPECIAL_STYLE=j++;function whitespace(c){return c===" "||c==="\n"||c==="\t"||c==="\f"||c==="\r"}function characterState(char,SUCCESS){return function(c){if(c===char)this._state=SUCCESS}}function ifElseState(upper,SUCCESS,FAILURE){var lower=upper.toLowerCase();if(upper===lower){return function(c){if(c===lower){this._state=SUCCESS}else{this._state=FAILURE;this._index--}}}else{return function(c){if(c===lower||c===upper){this._state=SUCCESS}else{this._state=FAILURE;this._index--}}}}function consumeSpecialNameChar(upper,NEXT_STATE){var lower=upper.toLowerCase();return function(c){if(c===lower||c===upper){this._state=NEXT_STATE}else{this._state=IN_TAG_NAME;this._index--}}}function Tokenizer(options,cbs){this._state=TEXT;this._buffer="";this._sectionStart=0;this._index=0;this._bufferOffset=0;this._baseState=TEXT;this._special=SPECIAL_NONE;this._cbs=cbs;this._running=true;this._ended=false;this._xmlMode=!!(options&&options.xmlMode);this._decodeEntities=!!(options&&options.decodeEntities)}Tokenizer.prototype._stateText=function(c){if(c==="<"){if(this._index>this._sectionStart){this._cbs.ontext(this._getSection())}this._state=BEFORE_TAG_NAME;this._sectionStart=this._index}else if(this._decodeEntities&&this._special===SPECIAL_NONE&&c==="&"){if(this._index>this._sectionStart){this._cbs.ontext(this._getSection())}this._baseState=TEXT;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeTagName=function(c){if(c==="/"){this._state=BEFORE_CLOSING_TAG_NAME}else if(c==="<"){this._cbs.ontext(this._getSection());this._sectionStart=this._index}else if(c===">"||this._special!==SPECIAL_NONE||whitespace(c)){this._state=TEXT}else if(c==="!"){this._state=BEFORE_DECLARATION;this._sectionStart=this._index+1}else if(c==="?"){this._state=IN_PROCESSING_INSTRUCTION;this._sectionStart=this._index+1}else{this._state=!this._xmlMode&&(c==="s"||c==="S")?BEFORE_SPECIAL:IN_TAG_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInTagName=function(c){if(c==="/"||c===">"||whitespace(c)){this._emitToken("onopentagname");this._state=BEFORE_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateBeforeCloseingTagName=function(c){if(whitespace(c));else if(c===">"){this._state=TEXT}else if(this._special!==SPECIAL_NONE){if(c==="s"||c==="S"){this._state=BEFORE_SPECIAL_END}else{this._state=TEXT;this._index--}}else{this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInCloseingTagName=function(c){if(c===">"||whitespace(c)){this._emitToken("onclosetag");this._state=AFTER_CLOSING_TAG_NAME;this._index--}};Tokenizer.prototype._stateAfterCloseingTagName=function(c){if(c===">"){this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateBeforeAttributeName=function(c){if(c===">"){this._cbs.onopentagend();this._state=TEXT;this._sectionStart=this._index+1}else if(c==="/"){this._state=IN_SELF_CLOSING_TAG}else if(!whitespace(c)){this._state=IN_ATTRIBUTE_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInSelfClosingTag=function(c){if(c===">"){this._cbs.onselfclosingtag();this._state=TEXT;this._sectionStart=this._index+1}else if(!whitespace(c)){this._state=BEFORE_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateInAttributeName=function(c){if(c==="="||c==="/"||c===">"||whitespace(c)){this._cbs.onattribname(this._getSection());this._sectionStart=-1;this._state=AFTER_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateAfterAttributeName=function(c){if(c==="="){this._state=BEFORE_ATTRIBUTE_VALUE}else if(c==="/"||c===">"){this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME;this._index--}else if(!whitespace(c)){this._cbs.onattribend();this._state=IN_ATTRIBUTE_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeAttributeValue=function(c){if(c==='"'){this._state=IN_ATTRIBUTE_VALUE_DQ;this._sectionStart=this._index+1}else if(c==="'"){this._state=IN_ATTRIBUTE_VALUE_SQ;this._sectionStart=this._index+1}else if(!whitespace(c)){this._state=IN_ATTRIBUTE_VALUE_NQ;this._sectionStart=this._index;this._index--}};Tokenizer.prototype._stateInAttributeValueDoubleQuotes=function(c){if(c==='"'){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateInAttributeValueSingleQuotes=function(c){if(c==="'"){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateInAttributeValueNoQuotes=function(c){if(whitespace(c)||c===">"){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME;this._index--}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeDeclaration=function(c){this._state=c==="["?BEFORE_CDATA_1:c==="-"?BEFORE_COMMENT:IN_DECLARATION};Tokenizer.prototype._stateInDeclaration=function(c){if(c===">"){this._cbs.ondeclaration(this._getSection());this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateInProcessingInstruction=function(c){if(c===">"){this._cbs.onprocessinginstruction(this._getSection());this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateBeforeComment=function(c){if(c==="-"){this._state=IN_COMMENT;this._sectionStart=this._index+1}else{this._state=IN_DECLARATION}};Tokenizer.prototype._stateInComment=function(c){if(c==="-")this._state=AFTER_COMMENT_1};Tokenizer.prototype._stateAfterComment1=function(c){if(c==="-"){this._state=AFTER_COMMENT_2}else{this._state=IN_COMMENT}};Tokenizer.prototype._stateAfterComment2=function(c){if(c===">"){this._cbs.oncomment(this._buffer.substring(this._sectionStart,this._index-2));this._state=TEXT;this._sectionStart=this._index+1}else if(c!=="-"){this._state=IN_COMMENT}};Tokenizer.prototype._stateBeforeCdata1=ifElseState("C",BEFORE_CDATA_2,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata2=ifElseState("D",BEFORE_CDATA_3,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata3=ifElseState("A",BEFORE_CDATA_4,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata4=ifElseState("T",BEFORE_CDATA_5,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata5=ifElseState("A",BEFORE_CDATA_6,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata6=function(c){if(c==="["){this._state=IN_CDATA;this._sectionStart=this._index+1}else{this._state=IN_DECLARATION;this._index--}};Tokenizer.prototype._stateInCdata=function(c){if(c==="]")this._state=AFTER_CDATA_1};Tokenizer.prototype._stateAfterCdata1=characterState("]",AFTER_CDATA_2);Tokenizer.prototype._stateAfterCdata2=function(c){if(c===">"){this._cbs.oncdata(this._buffer.substring(this._sectionStart,this._index-2));this._state=TEXT;this._sectionStart=this._index+1}else if(c!=="]"){this._state=IN_CDATA}};Tokenizer.prototype._stateBeforeSpecial=function(c){if(c==="c"||c==="C"){this._state=BEFORE_SCRIPT_1}else if(c==="t"||c==="T"){this._state=BEFORE_STYLE_1}else{this._state=IN_TAG_NAME;this._index--}};Tokenizer.prototype._stateBeforeSpecialEnd=function(c){if(this._special===SPECIAL_SCRIPT&&(c==="c"||c==="C")){this._state=AFTER_SCRIPT_1}else if(this._special===SPECIAL_STYLE&&(c==="t"||c==="T")){this._state=AFTER_STYLE_1}else this._state=TEXT};Tokenizer.prototype._stateBeforeScript1=consumeSpecialNameChar("R",BEFORE_SCRIPT_2);Tokenizer.prototype._stateBeforeScript2=consumeSpecialNameChar("I",BEFORE_SCRIPT_3);Tokenizer.prototype._stateBeforeScript3=consumeSpecialNameChar("P",BEFORE_SCRIPT_4);Tokenizer.prototype._stateBeforeScript4=consumeSpecialNameChar("T",BEFORE_SCRIPT_5);Tokenizer.prototype._stateBeforeScript5=function(c){if(c==="/"||c===">"||whitespace(c)){this._special=SPECIAL_SCRIPT}this._state=IN_TAG_NAME;this._index--};Tokenizer.prototype._stateAfterScript1=ifElseState("R",AFTER_SCRIPT_2,TEXT);Tokenizer.prototype._stateAfterScript2=ifElseState("I",AFTER_SCRIPT_3,TEXT);Tokenizer.prototype._stateAfterScript3=ifElseState("P",AFTER_SCRIPT_4,TEXT);Tokenizer.prototype._stateAfterScript4=ifElseState("T",AFTER_SCRIPT_5,TEXT);Tokenizer.prototype._stateAfterScript5=function(c){if(c===">"||whitespace(c)){this._special=SPECIAL_NONE;this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index-6;this._index--}else this._state=TEXT};Tokenizer.prototype._stateBeforeStyle1=consumeSpecialNameChar("Y",BEFORE_STYLE_2);Tokenizer.prototype._stateBeforeStyle2=consumeSpecialNameChar("L",BEFORE_STYLE_3);Tokenizer.prototype._stateBeforeStyle3=consumeSpecialNameChar("E",BEFORE_STYLE_4);Tokenizer.prototype._stateBeforeStyle4=function(c){if(c==="/"||c===">"||whitespace(c)){this._special=SPECIAL_STYLE}this._state=IN_TAG_NAME;this._index--};Tokenizer.prototype._stateAfterStyle1=ifElseState("Y",AFTER_STYLE_2,TEXT);Tokenizer.prototype._stateAfterStyle2=ifElseState("L",AFTER_STYLE_3,TEXT);Tokenizer.prototype._stateAfterStyle3=ifElseState("E",AFTER_STYLE_4,TEXT);Tokenizer.prototype._stateAfterStyle4=function(c){if(c===">"||whitespace(c)){this._special=SPECIAL_NONE;this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index-5;this._index--}else this._state=TEXT};Tokenizer.prototype._stateBeforeEntity=ifElseState("#",BEFORE_NUMERIC_ENTITY,IN_NAMED_ENTITY);Tokenizer.prototype._stateBeforeNumericEntity=ifElseState("X",IN_HEX_ENTITY,IN_NUMERIC_ENTITY);Tokenizer.prototype._parseNamedEntityStrict=function(){if(this._sectionStart+1<this._index){var entity=this._buffer.substring(this._sectionStart+1,this._index),map=this._xmlMode?xmlMap:entityMap;if(map.hasOwnProperty(entity)){this._emitPartial(map[entity]);this._sectionStart=this._index+1}}};Tokenizer.prototype._parseLegacyEntity=function(){var start=this._sectionStart+1,limit=this._index-start;if(limit>6)limit=6;while(limit>=2){var entity=this._buffer.substr(start,limit);if(legacyMap.hasOwnProperty(entity)){this._emitPartial(legacyMap[entity]);this._sectionStart+=limit+1;return}else{limit--}}};Tokenizer.prototype._stateInNamedEntity=function(c){if(c===";"){this._parseNamedEntityStrict();if(this._sectionStart+1<this._index&&!this._xmlMode){this._parseLegacyEntity()}this._state=this._baseState}else if((c<"a"||c>"z")&&(c<"A"||c>"Z")&&(c<"0"||c>"9")){if(this._xmlMode);else if(this._sectionStart+1===this._index);else if(this._baseState!==TEXT){if(c!=="="){this._parseNamedEntityStrict()}}else{this._parseLegacyEntity()}this._state=this._baseState;this._index--}};Tokenizer.prototype._decodeNumericEntity=function(offset,base){var sectionStart=this._sectionStart+offset;if(sectionStart!==this._index){var entity=this._buffer.substring(sectionStart,this._index);var parsed=parseInt(entity,base);this._emitPartial(decodeCodePoint(parsed));this._sectionStart=this._index}else{this._sectionStart--}this._state=this._baseState};Tokenizer.prototype._stateInNumericEntity=function(c){if(c===";"){this._decodeNumericEntity(2,10);this._sectionStart++}else if(c<"0"||c>"9"){if(!this._xmlMode){this._decodeNumericEntity(2,10)}else{this._state=this._baseState}this._index--}};Tokenizer.prototype._stateInHexEntity=function(c){if(c===";"){this._decodeNumericEntity(3,16);this._sectionStart++}else if((c<"a"||c>"f")&&(c<"A"||c>"F")&&(c<"0"||c>"9")){if(!this._xmlMode){this._decodeNumericEntity(3,16)}else{this._state=this._baseState}this._index--}};Tokenizer.prototype._cleanup=function(){if(this._sectionStart<0){this._buffer="";this._index=0;this._bufferOffset+=this._index}else if(this._running){if(this._state===TEXT){if(this._sectionStart!==this._index){this._cbs.ontext(this._buffer.substr(this._sectionStart))}this._buffer="";this._bufferOffset+=this._index;this._index=0}else if(this._sectionStart===this._index){this._buffer="";this._bufferOffset+=this._index;this._index=0}else{this._buffer=this._buffer.substr(this._sectionStart);this._index-=this._sectionStart;this._bufferOffset+=this._sectionStart}this._sectionStart=0}};Tokenizer.prototype.write=function(chunk){if(this._ended)this._cbs.onerror(Error(".write() after done!"));this._buffer+=chunk;this._parse()};Tokenizer.prototype._parse=function(){while(this._index<this._buffer.length&&this._running){var c=this._buffer.charAt(this._index);if(this._state===TEXT){this._stateText(c)}else if(this._state===BEFORE_TAG_NAME){this._stateBeforeTagName(c)}else if(this._state===IN_TAG_NAME){this._stateInTagName(c)}else if(this._state===BEFORE_CLOSING_TAG_NAME){this._stateBeforeCloseingTagName(c)}else if(this._state===IN_CLOSING_TAG_NAME){this._stateInCloseingTagName(c)}else if(this._state===AFTER_CLOSING_TAG_NAME){this._stateAfterCloseingTagName(c)}else if(this._state===IN_SELF_CLOSING_TAG){this._stateInSelfClosingTag(c)}else if(this._state===BEFORE_ATTRIBUTE_NAME){this._stateBeforeAttributeName(c)}else if(this._state===IN_ATTRIBUTE_NAME){this._stateInAttributeName(c)}else if(this._state===AFTER_ATTRIBUTE_NAME){this._stateAfterAttributeName(c)}else if(this._state===BEFORE_ATTRIBUTE_VALUE){this._stateBeforeAttributeValue(c)}else if(this._state===IN_ATTRIBUTE_VALUE_DQ){this._stateInAttributeValueDoubleQuotes(c)}else if(this._state===IN_ATTRIBUTE_VALUE_SQ){this._stateInAttributeValueSingleQuotes(c)}else if(this._state===IN_ATTRIBUTE_VALUE_NQ){this._stateInAttributeValueNoQuotes(c)}else if(this._state===BEFORE_DECLARATION){this._stateBeforeDeclaration(c)}else if(this._state===IN_DECLARATION){this._stateInDeclaration(c)}else if(this._state===IN_PROCESSING_INSTRUCTION){this._stateInProcessingInstruction(c)}else if(this._state===BEFORE_COMMENT){this._stateBeforeComment(c)}else if(this._state===IN_COMMENT){this._stateInComment(c)}else if(this._state===AFTER_COMMENT_1){this._stateAfterComment1(c)}else if(this._state===AFTER_COMMENT_2){this._stateAfterComment2(c)}else if(this._state===BEFORE_CDATA_1){this._stateBeforeCdata1(c)}else if(this._state===BEFORE_CDATA_2){this._stateBeforeCdata2(c)}else if(this._state===BEFORE_CDATA_3){this._stateBeforeCdata3(c)}else if(this._state===BEFORE_CDATA_4){this._stateBeforeCdata4(c)}else if(this._state===BEFORE_CDATA_5){this._stateBeforeCdata5(c)}else if(this._state===BEFORE_CDATA_6){this._stateBeforeCdata6(c)}else if(this._state===IN_CDATA){this._stateInCdata(c)}else if(this._state===AFTER_CDATA_1){this._stateAfterCdata1(c)}else if(this._state===AFTER_CDATA_2){this._stateAfterCdata2(c)}else if(this._state===BEFORE_SPECIAL){this._stateBeforeSpecial(c)}else if(this._state===BEFORE_SPECIAL_END){this._stateBeforeSpecialEnd(c)}else if(this._state===BEFORE_SCRIPT_1){this._stateBeforeScript1(c)}else if(this._state===BEFORE_SCRIPT_2){this._stateBeforeScript2(c)}else if(this._state===BEFORE_SCRIPT_3){this._stateBeforeScript3(c)}else if(this._state===BEFORE_SCRIPT_4){this._stateBeforeScript4(c)}else if(this._state===BEFORE_SCRIPT_5){this._stateBeforeScript5(c)}else if(this._state===AFTER_SCRIPT_1){this._stateAfterScript1(c)}else if(this._state===AFTER_SCRIPT_2){this._stateAfterScript2(c)}else if(this._state===AFTER_SCRIPT_3){this._stateAfterScript3(c)}else if(this._state===AFTER_SCRIPT_4){this._stateAfterScript4(c)}else if(this._state===AFTER_SCRIPT_5){this._stateAfterScript5(c)}else if(this._state===BEFORE_STYLE_1){this._stateBeforeStyle1(c)}else if(this._state===BEFORE_STYLE_2){this._stateBeforeStyle2(c)}else if(this._state===BEFORE_STYLE_3){this._stateBeforeStyle3(c)}else if(this._state===BEFORE_STYLE_4){this._stateBeforeStyle4(c)}else if(this._state===AFTER_STYLE_1){this._stateAfterStyle1(c)}else if(this._state===AFTER_STYLE_2){this._stateAfterStyle2(c)}else if(this._state===AFTER_STYLE_3){this._stateAfterStyle3(c)}else if(this._state===AFTER_STYLE_4){this._stateAfterStyle4(c)}else if(this._state===BEFORE_ENTITY){this._stateBeforeEntity(c)}else if(this._state===BEFORE_NUMERIC_ENTITY){this._stateBeforeNumericEntity(c)}else if(this._state===IN_NAMED_ENTITY){this._stateInNamedEntity(c)}else if(this._state===IN_NUMERIC_ENTITY){this._stateInNumericEntity(c)}else if(this._state===IN_HEX_ENTITY){this._stateInHexEntity(c)}else{this._cbs.onerror(Error("unknown _state"),this._state)}this._index++}this._cleanup()};Tokenizer.prototype.pause=function(){this._running=false};Tokenizer.prototype.resume=function(){this._running=true;if(this._index<this._buffer.length){this._parse()}if(this._ended){this._finish()}};Tokenizer.prototype.end=function(chunk){if(this._ended)this._cbs.onerror(Error(".end() after done!"));if(chunk)this.write(chunk);this._ended=true;if(this._running)this._finish()};Tokenizer.prototype._finish=function(){if(this._sectionStart<this._index){this._handleTrailingData()}this._cbs.onend()};Tokenizer.prototype._handleTrailingData=function(){var data=this._buffer.substr(this._sectionStart);if(this._state===IN_CDATA||this._state===AFTER_CDATA_1||this._state===AFTER_CDATA_2){this._cbs.oncdata(data)}else if(this._state===IN_COMMENT||this._state===AFTER_COMMENT_1||this._state===AFTER_COMMENT_2){this._cbs.oncomment(data)}else if(this._state===IN_NAMED_ENTITY&&!this._xmlMode){this._parseLegacyEntity();if(this._sectionStart<this._index){this._state=this._baseState;this._handleTrailingData()}}else if(this._state===IN_NUMERIC_ENTITY&&!this._xmlMode){this._decodeNumericEntity(2,10);if(this._sectionStart<this._index){this._state=this._baseState;this._handleTrailingData()}}else if(this._state===IN_HEX_ENTITY&&!this._xmlMode){this._decodeNumericEntity(3,16);if(this._sectionStart<this._index){this._state=this._baseState;this._handleTrailingData()}}else if(this._state!==IN_TAG_NAME&&this._state!==BEFORE_ATTRIBUTE_NAME&&this._state!==BEFORE_ATTRIBUTE_VALUE&&this._state!==AFTER_ATTRIBUTE_NAME&&this._state!==IN_ATTRIBUTE_NAME&&this._state!==IN_ATTRIBUTE_VALUE_SQ&&this._state!==IN_ATTRIBUTE_VALUE_DQ&&this._state!==IN_ATTRIBUTE_VALUE_NQ&&this._state!==IN_CLOSING_TAG_NAME){ this._cbs.ontext(data)}};Tokenizer.prototype.reset=function(){Tokenizer.call(this,{xmlMode:this._xmlMode,decodeEntities:this._decodeEntities},this._cbs)};Tokenizer.prototype.getAbsoluteIndex=function(){return this._bufferOffset+this._index};Tokenizer.prototype._getSection=function(){return this._buffer.substring(this._sectionStart,this._index)};Tokenizer.prototype._emitToken=function(name){this._cbs[name](this._getSection());this._sectionStart=-1};Tokenizer.prototype._emitPartial=function(value){if(this._baseState!==TEXT){this._cbs.onattribdata(value)}else{this._cbs.ontext(value)}}},{"entities/lib/decode_codepoint.js":22,"entities/maps/entities.json":25,"entities/maps/legacy.json":26,"entities/maps/xml.json":27}],35:[function(require,module,exports){module.exports=Stream;var Parser=require("./Parser.js"),WritableStream=require("stream").Writable||require("readable-stream").Writable,StringDecoder=require("string_decoder").StringDecoder,Buffer=require("buffer").Buffer;function Stream(cbs,options){var parser=this._parser=new Parser(cbs,options);var decoder=this._decoder=new StringDecoder;WritableStream.call(this,{decodeStrings:false});this.once("finish",function(){parser.end(decoder.end())})}require("inherits")(Stream,WritableStream);WritableStream.prototype._write=function(chunk,encoding,cb){if(chunk instanceof Buffer)chunk=this._decoder.write(chunk);this._parser.write(chunk);cb()}},{"./Parser.js":31,buffer:5,inherits:38,"readable-stream":3,stream:55,string_decoder:56}],36:[function(require,module,exports){var Parser=require("./Parser.js"),DomHandler=require("domhandler");function defineProp(name,value){delete module.exports[name];module.exports[name]=value;return value}module.exports={Parser:Parser,Tokenizer:require("./Tokenizer.js"),ElementType:require("domelementtype"),DomHandler:DomHandler,get FeedHandler(){return defineProp("FeedHandler",require("./FeedHandler.js"))},get Stream(){return defineProp("Stream",require("./Stream.js"))},get WritableStream(){return defineProp("WritableStream",require("./WritableStream.js"))},get ProxyHandler(){return defineProp("ProxyHandler",require("./ProxyHandler.js"))},get DomUtils(){return defineProp("DomUtils",require("domutils"))},get CollectingHandler(){return defineProp("CollectingHandler",require("./CollectingHandler.js"))},DefaultHandler:DomHandler,get RssHandler(){return defineProp("RssHandler",this.FeedHandler)},parseDOM:function(data,options){var handler=new DomHandler(options);new Parser(handler,options).end(data);return handler.dom},parseFeed:function(feed,options){var handler=new module.exports.FeedHandler(options);new Parser(handler,options).end(feed);return handler.dom},createDomStream:function(cb,options,elementCb){var handler=new DomHandler(cb,options,elementCb);return new Parser(handler,options)},EVENTS:{attribute:2,cdatastart:0,cdataend:0,text:1,processinginstruction:2,comment:1,commentend:0,closetag:1,opentag:2,opentagname:1,error:1,end:0}}},{"./CollectingHandler.js":29,"./FeedHandler.js":30,"./Parser.js":31,"./ProxyHandler.js":32,"./Stream.js":33,"./Tokenizer.js":34,"./WritableStream.js":35,domelementtype:9,domhandler:10,domutils:13}],37:[function(require,module,exports){exports.read=function(buffer,offset,isLE,mLen,nBytes){var e,m;var eLen=nBytes*8-mLen-1;var eMax=(1<<eLen)-1;var eBias=eMax>>1;var nBits=-7;var i=isLE?nBytes-1:0;var d=isLE?-1:1;var s=buffer[offset+i];i+=d;e=s&(1<<-nBits)-1;s>>=-nBits;nBits+=eLen;for(;nBits>0;e=e*256+buffer[offset+i],i+=d,nBits-=8){}m=e&(1<<-nBits)-1;e>>=-nBits;nBits+=mLen;for(;nBits>0;m=m*256+buffer[offset+i],i+=d,nBits-=8){}if(e===0){e=1-eBias}else if(e===eMax){return m?NaN:(s?-1:1)*Infinity}else{m=m+Math.pow(2,mLen);e=e-eBias}return(s?-1:1)*m*Math.pow(2,e-mLen)};exports.write=function(buffer,value,offset,isLE,mLen,nBytes){var e,m,c;var eLen=nBytes*8-mLen-1;var eMax=(1<<eLen)-1;var eBias=eMax>>1;var rt=mLen===23?Math.pow(2,-24)-Math.pow(2,-77):0;var i=isLE?0:nBytes-1;var d=isLE?1:-1;var s=value<0||value===0&&1/value<0?1:0;value=Math.abs(value);if(isNaN(value)||value===Infinity){m=isNaN(value)?1:0;e=eMax}else{e=Math.floor(Math.log(value)/Math.LN2);if(value*(c=Math.pow(2,-e))<1){e--;c*=2}if(e+eBias>=1){value+=rt/c}else{value+=rt*Math.pow(2,1-eBias)}if(value*c>=2){e++;c/=2}if(e+eBias>=eMax){m=0;e=eMax}else if(e+eBias>=1){m=(value*c-1)*Math.pow(2,mLen);e=e+eBias}else{m=value*Math.pow(2,eBias-1)*Math.pow(2,mLen);e=0}}for(;mLen>=8;buffer[offset+i]=m&255,i+=d,m/=256,mLen-=8){}e=e<<mLen|m;eLen+=mLen;for(;eLen>0;buffer[offset+i]=e&255,i+=d,e/=256,eLen-=8){}buffer[offset+i-d]|=s*128}},{}],38:[function(require,module,exports){if(typeof Object.create==="function"){module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;ctor.prototype=Object.create(superCtor.prototype,{constructor:{value:ctor,enumerable:false,writable:true,configurable:true}})}}else{module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;var TempCtor=function(){};TempCtor.prototype=superCtor.prototype;ctor.prototype=new TempCtor;ctor.prototype.constructor=ctor}}},{}],39:[function(require,module,exports){module.exports=function(obj){return obj!=null&&(isBuffer(obj)||isSlowBuffer(obj)||!!obj._isBuffer)};function isBuffer(obj){return!!obj.constructor&&typeof obj.constructor.isBuffer==="function"&&obj.constructor.isBuffer(obj)}function isSlowBuffer(obj){return typeof obj.readFloatLE==="function"&&typeof obj.slice==="function"&&isBuffer(obj.slice(0,0))}},{}],40:[function(require,module,exports){var toString={}.toString;module.exports=Array.isArray||function(arr){return toString.call(arr)=="[object Array]"}},{}],41:[function(require,module,exports){(function(process){"use strict";if(!process.version||process.version.indexOf("v0.")===0||process.version.indexOf("v1.")===0&&process.version.indexOf("v1.8.")!==0){module.exports=nextTick}else{module.exports=process.nextTick}function nextTick(fn,arg1,arg2,arg3){if(typeof fn!=="function"){throw new TypeError('"callback" argument must be a function')}var len=arguments.length;var args,i;switch(len){case 0:case 1:return process.nextTick(fn);case 2:return process.nextTick(function afterTickOne(){fn.call(null,arg1)});case 3:return process.nextTick(function afterTickTwo(){fn.call(null,arg1,arg2)});case 4:return process.nextTick(function afterTickThree(){fn.call(null,arg1,arg2,arg3)});default:args=new Array(len-1);i=0;while(i<args.length){args[i++]=arguments[i]}return process.nextTick(function afterTick(){fn.apply(null,args)})}}}).call(this,require("_process"))},{_process:42}],42:[function(require,module,exports){var process=module.exports={};var cachedSetTimeout;var cachedClearTimeout;function defaultSetTimout(){throw new Error("setTimeout has not been defined")}function defaultClearTimeout(){throw new Error("clearTimeout has not been defined")}(function(){try{if(typeof setTimeout==="function"){cachedSetTimeout=setTimeout}else{cachedSetTimeout=defaultSetTimout}}catch(e){cachedSetTimeout=defaultSetTimout}try{if(typeof clearTimeout==="function"){cachedClearTimeout=clearTimeout}else{cachedClearTimeout=defaultClearTimeout}}catch(e){cachedClearTimeout=defaultClearTimeout}})();function runTimeout(fun){if(cachedSetTimeout===setTimeout){return setTimeout(fun,0)}if((cachedSetTimeout===defaultSetTimout||!cachedSetTimeout)&&setTimeout){cachedSetTimeout=setTimeout;return setTimeout(fun,0)}try{return cachedSetTimeout(fun,0)}catch(e){try{return cachedSetTimeout.call(null,fun,0)}catch(e){return cachedSetTimeout.call(this,fun,0)}}}function runClearTimeout(marker){if(cachedClearTimeout===clearTimeout){return clearTimeout(marker)}if((cachedClearTimeout===defaultClearTimeout||!cachedClearTimeout)&&clearTimeout){cachedClearTimeout=clearTimeout;return clearTimeout(marker)}try{return cachedClearTimeout(marker)}catch(e){try{return cachedClearTimeout.call(null,marker)}catch(e){return cachedClearTimeout.call(this,marker)}}}var queue=[];var draining=false;var currentQueue;var queueIndex=-1;function cleanUpNextTick(){if(!draining||!currentQueue){return}draining=false;if(currentQueue.length){queue=currentQueue.concat(queue)}else{queueIndex=-1}if(queue.length){drainQueue()}}function drainQueue(){if(draining){return}var timeout=runTimeout(cleanUpNextTick);draining=true;var len=queue.length;while(len){currentQueue=queue;queue=[];while(++queueIndex<len){if(currentQueue){currentQueue[queueIndex].run()}}queueIndex=-1;len=queue.length}currentQueue=null;draining=false;runClearTimeout(timeout)}process.nextTick=function(fun){var args=new Array(arguments.length-1);if(arguments.length>1){for(var i=1;i<arguments.length;i++){args[i-1]=arguments[i]}}queue.push(new Item(fun,args));if(queue.length===1&&!draining){runTimeout(drainQueue)}};function Item(fun,array){this.fun=fun;this.array=array}Item.prototype.run=function(){this.fun.apply(null,this.array)};process.title="browser";process.browser=true;process.env={};process.argv=[];process.version="";process.versions={};function noop(){}process.on=noop;process.addListener=noop;process.once=noop;process.off=noop;process.removeListener=noop;process.removeAllListeners=noop;process.emit=noop;process.binding=function(name){throw new Error("process.binding is not supported")};process.cwd=function(){return"/"};process.chdir=function(dir){throw new Error("process.chdir is not supported")};process.umask=function(){return 0}},{}],43:[function(require,module,exports){module.exports=require("./lib/_stream_duplex.js")},{"./lib/_stream_duplex.js":44}],44:[function(require,module,exports){"use strict";var objectKeys=Object.keys||function(obj){var keys=[];for(var key in obj){keys.push(key)}return keys};module.exports=Duplex;var processNextTick=require("process-nextick-args");var util=require("core-util-is");util.inherits=require("inherits");var Readable=require("./_stream_readable");var Writable=require("./_stream_writable");util.inherits(Duplex,Readable);var keys=objectKeys(Writable.prototype);for(var v=0;v<keys.length;v++){var method=keys[v];if(!Duplex.prototype[method])Duplex.prototype[method]=Writable.prototype[method]}function Duplex(options){if(!(this instanceof Duplex))return new Duplex(options);Readable.call(this,options);Writable.call(this,options);if(options&&options.readable===false)this.readable=false;if(options&&options.writable===false)this.writable=false;this.allowHalfOpen=true;if(options&&options.allowHalfOpen===false)this.allowHalfOpen=false;this.once("end",onend)}function onend(){if(this.allowHalfOpen||this._writableState.ended)return;processNextTick(onEndNT,this)}function onEndNT(self){self.end()}function forEach(xs,f){for(var i=0,l=xs.length;i<l;i++){f(xs[i],i)}}},{"./_stream_readable":46,"./_stream_writable":48,"core-util-is":6,inherits:38,"process-nextick-args":41}],45:[function(require,module,exports){"use strict";module.exports=PassThrough;var Transform=require("./_stream_transform");var util=require("core-util-is");util.inherits=require("inherits");util.inherits(PassThrough,Transform);function PassThrough(options){if(!(this instanceof PassThrough))return new PassThrough(options);Transform.call(this,options)}PassThrough.prototype._transform=function(chunk,encoding,cb){cb(null,chunk)}},{"./_stream_transform":47,"core-util-is":6,inherits:38}],46:[function(require,module,exports){(function(process){"use strict";module.exports=Readable;var processNextTick=require("process-nextick-args");var isArray=require("isarray");Readable.ReadableState=ReadableState;var EE=require("events").EventEmitter;var EElistenerCount=function(emitter,type){return emitter.listeners(type).length};var Stream;(function(){try{Stream=require("st"+"ream")}catch(_){}finally{if(!Stream)Stream=require("events").EventEmitter}})();var Buffer=require("buffer").Buffer;var bufferShim=require("buffer-shims");var util=require("core-util-is");util.inherits=require("inherits");var debugUtil=require("util");var debug=void 0;if(debugUtil&&debugUtil.debuglog){debug=debugUtil.debuglog("stream")}else{debug=function(){}}var BufferList=require("./internal/streams/BufferList");var StringDecoder;util.inherits(Readable,Stream);function prependListener(emitter,event,fn){if(typeof emitter.prependListener==="function"){return emitter.prependListener(event,fn)}else{if(!emitter._events||!emitter._events[event])emitter.on(event,fn);else if(isArray(emitter._events[event]))emitter._events[event].unshift(fn);else emitter._events[event]=[fn,emitter._events[event]]}}var Duplex;function ReadableState(options,stream){Duplex=Duplex||require("./_stream_duplex");options=options||{};this.objectMode=!!options.objectMode;if(stream instanceof Duplex)this.objectMode=this.objectMode||!!options.readableObjectMode;var hwm=options.highWaterMark;var defaultHwm=this.objectMode?16:16*1024;this.highWaterMark=hwm||hwm===0?hwm:defaultHwm;this.highWaterMark=~~this.highWaterMark;this.buffer=new BufferList;this.length=0;this.pipes=null;this.pipesCount=0;this.flowing=null;this.ended=false;this.endEmitted=false;this.reading=false;this.sync=true;this.needReadable=false;this.emittedReadable=false;this.readableListening=false;this.resumeScheduled=false;this.defaultEncoding=options.defaultEncoding||"utf8";this.ranOut=false;this.awaitDrain=0;this.readingMore=false;this.decoder=null;this.encoding=null;if(options.encoding){if(!StringDecoder)StringDecoder=require("string_decoder/").StringDecoder;this.decoder=new StringDecoder(options.encoding);this.encoding=options.encoding}}var Duplex;function Readable(options){Duplex=Duplex||require("./_stream_duplex");if(!(this instanceof Readable))return new Readable(options);this._readableState=new ReadableState(options,this);this.readable=true;if(options&&typeof options.read==="function")this._read=options.read;Stream.call(this)}Readable.prototype.push=function(chunk,encoding){var state=this._readableState;if(!state.objectMode&&typeof chunk==="string"){encoding=encoding||state.defaultEncoding;if(encoding!==state.encoding){chunk=bufferShim.from(chunk,encoding);encoding=""}}return readableAddChunk(this,state,chunk,encoding,false)};Readable.prototype.unshift=function(chunk){var state=this._readableState;return readableAddChunk(this,state,chunk,"",true)};Readable.prototype.isPaused=function(){return this._readableState.flowing===false};function readableAddChunk(stream,state,chunk,encoding,addToFront){var er=chunkInvalid(state,chunk);if(er){stream.emit("error",er)}else if(chunk===null){state.reading=false;onEofChunk(stream,state)}else if(state.objectMode||chunk&&chunk.length>0){if(state.ended&&!addToFront){var e=new Error("stream.push() after EOF");stream.emit("error",e)}else if(state.endEmitted&&addToFront){var _e=new Error("stream.unshift() after end event");stream.emit("error",_e)}else{var skipAdd;if(state.decoder&&!addToFront&&!encoding){chunk=state.decoder.write(chunk);skipAdd=!state.objectMode&&chunk.length===0}if(!addToFront)state.reading=false;if(!skipAdd){if(state.flowing&&state.length===0&&!state.sync){stream.emit("data",chunk);stream.read(0)}else{state.length+=state.objectMode?1:chunk.length;if(addToFront)state.buffer.unshift(chunk);else state.buffer.push(chunk);if(state.needReadable)emitReadable(stream)}}maybeReadMore(stream,state)}}else if(!addToFront){state.reading=false}return needMoreData(state)}function needMoreData(state){return!state.ended&&(state.needReadable||state.length<state.highWaterMark||state.length===0)}Readable.prototype.setEncoding=function(enc){if(!StringDecoder)StringDecoder=require("string_decoder/").StringDecoder;this._readableState.decoder=new StringDecoder(enc);this._readableState.encoding=enc;return this};var MAX_HWM=8388608;function computeNewHighWaterMark(n){if(n>=MAX_HWM){n=MAX_HWM}else{n--;n|=n>>>1;n|=n>>>2;n|=n>>>4;n|=n>>>8;n|=n>>>16;n++}return n}function howMuchToRead(n,state){if(n<=0||state.length===0&&state.ended)return 0;if(state.objectMode)return 1;if(n!==n){if(state.flowing&&state.length)return state.buffer.head.data.length;else return state.length}if(n>state.highWaterMark)state.highWaterMark=computeNewHighWaterMark(n);if(n<=state.length)return n;if(!state.ended){state.needReadable=true;return 0}return state.length}Readable.prototype.read=function(n){debug("read",n);n=parseInt(n,10);var state=this._readableState;var nOrig=n;if(n!==0)state.emittedReadable=false;if(n===0&&state.needReadable&&(state.length>=state.highWaterMark||state.ended)){debug("read: emitReadable",state.length,state.ended);if(state.length===0&&state.ended)endReadable(this);else emitReadable(this);return null}n=howMuchToRead(n,state);if(n===0&&state.ended){if(state.length===0)endReadable(this);return null}var doRead=state.needReadable;debug("need readable",doRead);if(state.length===0||state.length-n<state.highWaterMark){doRead=true;debug("length less than watermark",doRead)}if(state.ended||state.reading){doRead=false;debug("reading or ended",doRead)}else if(doRead){debug("do read");state.reading=true;state.sync=true;if(state.length===0)state.needReadable=true;this._read(state.highWaterMark);state.sync=false;if(!state.reading)n=howMuchToRead(nOrig,state)}var ret;if(n>0)ret=fromList(n,state);else ret=null;if(ret===null){state.needReadable=true;n=0}else{state.length-=n}if(state.length===0){if(!state.ended)state.needReadable=true;if(nOrig!==n&&state.ended)endReadable(this)}if(ret!==null)this.emit("data",ret);return ret};function chunkInvalid(state,chunk){var er=null;if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==null&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}return er}function onEofChunk(stream,state){if(state.ended)return;if(state.decoder){var chunk=state.decoder.end();if(chunk&&chunk.length){state.buffer.push(chunk);state.length+=state.objectMode?1:chunk.length}}state.ended=true;emitReadable(stream)}function emitReadable(stream){var state=stream._readableState;state.needReadable=false;if(!state.emittedReadable){debug("emitReadable",state.flowing);state.emittedReadable=true;if(state.sync)processNextTick(emitReadable_,stream);else emitReadable_(stream)}}function emitReadable_(stream){debug("emit readable");stream.emit("readable");flow(stream)}function maybeReadMore(stream,state){if(!state.readingMore){state.readingMore=true;processNextTick(maybeReadMore_,stream,state)}}function maybeReadMore_(stream,state){var len=state.length;while(!state.reading&&!state.flowing&&!state.ended&&state.length<state.highWaterMark){debug("maybeReadMore read 0");stream.read(0);if(len===state.length)break;else len=state.length}state.readingMore=false}Readable.prototype._read=function(n){this.emit("error",new Error("not implemented"))};Readable.prototype.pipe=function(dest,pipeOpts){var src=this;var state=this._readableState;switch(state.pipesCount){case 0:state.pipes=dest;break;case 1:state.pipes=[state.pipes,dest];break;default:state.pipes.push(dest);break}state.pipesCount+=1;debug("pipe count=%d opts=%j",state.pipesCount,pipeOpts);var doEnd=(!pipeOpts||pipeOpts.end!==false)&&dest!==process.stdout&&dest!==process.stderr;var endFn=doEnd?onend:cleanup;if(state.endEmitted)processNextTick(endFn);else src.once("end",endFn);dest.on("unpipe",onunpipe);function onunpipe(readable){debug("onunpipe");if(readable===src){cleanup()}}function onend(){debug("onend");dest.end()}var ondrain=pipeOnDrain(src);dest.on("drain",ondrain);var cleanedUp=false;function cleanup(){debug("cleanup");dest.removeListener("close",onclose);dest.removeListener("finish",onfinish);dest.removeListener("drain",ondrain);dest.removeListener("error",onerror);dest.removeListener("unpipe",onunpipe);src.removeListener("end",onend);src.removeListener("end",cleanup);src.removeListener("data",ondata);cleanedUp=true;if(state.awaitDrain&&(!dest._writableState||dest._writableState.needDrain))ondrain()}var increasedAwaitDrain=false;src.on("data",ondata);function ondata(chunk){debug("ondata");increasedAwaitDrain=false;var ret=dest.write(chunk);if(false===ret&&!increasedAwaitDrain){if((state.pipesCount===1&&state.pipes===dest||state.pipesCount>1&&indexOf(state.pipes,dest)!==-1)&&!cleanedUp){debug("false write response, pause",src._readableState.awaitDrain);src._readableState.awaitDrain++;increasedAwaitDrain=true}src.pause()}}function onerror(er){debug("onerror",er);unpipe();dest.removeListener("error",onerror);if(EElistenerCount(dest,"error")===0)dest.emit("error",er)}prependListener(dest,"error",onerror);function onclose(){dest.removeListener("finish",onfinish);unpipe()}dest.once("close",onclose);function onfinish(){debug("onfinish");dest.removeListener("close",onclose);unpipe()}dest.once("finish",onfinish);function unpipe(){debug("unpipe");src.unpipe(dest)}dest.emit("pipe",src);if(!state.flowing){debug("pipe resume");src.resume()}return dest};function pipeOnDrain(src){return function(){var state=src._readableState;debug("pipeOnDrain",state.awaitDrain);if(state.awaitDrain)state.awaitDrain--;if(state.awaitDrain===0&&EElistenerCount(src,"data")){state.flowing=true;flow(src)}}}Readable.prototype.unpipe=function(dest){var state=this._readableState;if(state.pipesCount===0)return this;if(state.pipesCount===1){if(dest&&dest!==state.pipes)return this;if(!dest)dest=state.pipes;state.pipes=null;state.pipesCount=0;state.flowing=false;if(dest)dest.emit("unpipe",this);return this}if(!dest){var dests=state.pipes;var len=state.pipesCount;state.pipes=null;state.pipesCount=0;state.flowing=false;for(var _i=0;_i<len;_i++){dests[_i].emit("unpipe",this)}return this}var i=indexOf(state.pipes,dest);if(i===-1)return this;state.pipes.splice(i,1);state.pipesCount-=1;if(state.pipesCount===1)state.pipes=state.pipes[0];dest.emit("unpipe",this);return this};Readable.prototype.on=function(ev,fn){var res=Stream.prototype.on.call(this,ev,fn);if(ev==="data"){if(this._readableState.flowing!==false)this.resume()}else if(ev==="readable"){var state=this._readableState;if(!state.endEmitted&&!state.readableListening){state.readableListening=state.needReadable=true;state.emittedReadable=false;if(!state.reading){processNextTick(nReadingNextTick,this)}else if(state.length){emitReadable(this,state)}}}return res};Readable.prototype.addListener=Readable.prototype.on;function nReadingNextTick(self){debug("readable nexttick read 0");self.read(0)}Readable.prototype.resume=function(){var state=this._readableState;if(!state.flowing){debug("resume");state.flowing=true;resume(this,state)}return this};function resume(stream,state){if(!state.resumeScheduled){state.resumeScheduled=true;processNextTick(resume_,stream,state)}}function resume_(stream,state){if(!state.reading){debug("resume read 0");stream.read(0)}state.resumeScheduled=false;state.awaitDrain=0;stream.emit("resume");flow(stream);if(state.flowing&&!state.reading)stream.read(0)}Readable.prototype.pause=function(){debug("call pause flowing=%j",this._readableState.flowing);if(false!==this._readableState.flowing){debug("pause");this._readableState.flowing=false;this.emit("pause")}return this};function flow(stream){var state=stream._readableState;debug("flow",state.flowing);while(state.flowing&&stream.read()!==null){}}Readable.prototype.wrap=function(stream){var state=this._readableState;var paused=false;var self=this;stream.on("end",function(){debug("wrapped end");if(state.decoder&&!state.ended){var chunk=state.decoder.end();if(chunk&&chunk.length)self.push(chunk)}self.push(null)});stream.on("data",function(chunk){debug("wrapped data");if(state.decoder)chunk=state.decoder.write(chunk);if(state.objectMode&&(chunk===null||chunk===undefined))return;else if(!state.objectMode&&(!chunk||!chunk.length))return;var ret=self.push(chunk);if(!ret){paused=true;stream.pause()}});for(var i in stream){if(this[i]===undefined&&typeof stream[i]==="function"){this[i]=function(method){return function(){return stream[method].apply(stream,arguments)}}(i)}}var events=["error","close","destroy","pause","resume"];forEach(events,function(ev){stream.on(ev,self.emit.bind(self,ev))});self._read=function(n){debug("wrapped _read",n);if(paused){paused=false;stream.resume()}};return self};Readable._fromList=fromList;function fromList(n,state){if(state.length===0)return null;var ret;if(state.objectMode)ret=state.buffer.shift();else if(!n||n>=state.length){if(state.decoder)ret=state.buffer.join("");else if(state.buffer.length===1)ret=state.buffer.head.data;else ret=state.buffer.concat(state.length);state.buffer.clear()}else{ret=fromListPartial(n,state.buffer,state.decoder)}return ret}function fromListPartial(n,list,hasStrings){var ret;if(n<list.head.data.length){ret=list.head.data.slice(0,n);list.head.data=list.head.data.slice(n)}else if(n===list.head.data.length){ret=list.shift()}else{ret=hasStrings?copyFromBufferString(n,list):copyFromBuffer(n,list)}return ret}function copyFromBufferString(n,list){var p=list.head;var c=1;var ret=p.data;n-=ret.length;while(p=p.next){var str=p.data;var nb=n>str.length?str.length:n;if(nb===str.length)ret+=str;else ret+=str.slice(0,n);n-=nb;if(n===0){if(nb===str.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=str.slice(nb)}break}++c}list.length-=c;return ret}function copyFromBuffer(n,list){var ret=bufferShim.allocUnsafe(n);var p=list.head;var c=1;p.data.copy(ret);n-=p.data.length;while(p=p.next){var buf=p.data;var nb=n>buf.length?buf.length:n;buf.copy(ret,ret.length-n,0,nb);n-=nb;if(n===0){if(nb===buf.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=buf.slice(nb)}break}++c}list.length-=c;return ret}function endReadable(stream){var state=stream._readableState;if(state.length>0)throw new Error('"endReadable()" called on non-empty stream');if(!state.endEmitted){state.ended=true;processNextTick(endReadableNT,state,stream)}}function endReadableNT(state,stream){if(!state.endEmitted&&state.length===0){state.endEmitted=true;stream.readable=false;stream.emit("end")}}function forEach(xs,f){for(var i=0,l=xs.length;i<l;i++){f(xs[i],i)}}function indexOf(xs,x){for(var i=0,l=xs.length;i<l;i++){if(xs[i]===x)return i}return-1}}).call(this,require("_process"))},{"./_stream_duplex":44,"./internal/streams/BufferList":49,_process:42,buffer:5,"buffer-shims":4,"core-util-is":6,events:28,inherits:38,isarray:40,"process-nextick-args":41,"string_decoder/":56,util:3}],47:[function(require,module,exports){"use strict";module.exports=Transform;var Duplex=require("./_stream_duplex");var util=require("core-util-is");util.inherits=require("inherits");util.inherits(Transform,Duplex);function TransformState(stream){this.afterTransform=function(er,data){return afterTransform(stream,er,data)};this.needTransform=false;this.transforming=false;this.writecb=null;this.writechunk=null;this.writeencoding=null}function afterTransform(stream,er,data){var ts=stream._transformState;ts.transforming=false;var cb=ts.writecb;if(!cb)return stream.emit("error",new Error("no writecb in Transform class"));ts.writechunk=null;ts.writecb=null;if(data!==null&&data!==undefined)stream.push(data);cb(er);var rs=stream._readableState;rs.reading=false;if(rs.needReadable||rs.length<rs.highWaterMark){stream._read(rs.highWaterMark)}}function Transform(options){if(!(this instanceof Transform))return new Transform(options);Duplex.call(this,options);this._transformState=new TransformState(this);var stream=this;this._readableState.needReadable=true;this._readableState.sync=false;if(options){if(typeof options.transform==="function")this._transform=options.transform;if(typeof options.flush==="function")this._flush=options.flush}this.once("prefinish",function(){if(typeof this._flush==="function")this._flush(function(er){done(stream,er)});else done(stream)})}Transform.prototype.push=function(chunk,encoding){this._transformState.needTransform=false;return Duplex.prototype.push.call(this,chunk,encoding)};Transform.prototype._transform=function(chunk,encoding,cb){throw new Error("Not implemented")};Transform.prototype._write=function(chunk,encoding,cb){var ts=this._transformState;ts.writecb=cb;ts.writechunk=chunk;ts.writeencoding=encoding;if(!ts.transforming){var rs=this._readableState;if(ts.needTransform||rs.needReadable||rs.length<rs.highWaterMark)this._read(rs.highWaterMark)}};Transform.prototype._read=function(n){var ts=this._transformState;if(ts.writechunk!==null&&ts.writecb&&!ts.transforming){ts.transforming=true;this._transform(ts.writechunk,ts.writeencoding,ts.afterTransform)}else{ts.needTransform=true}};function done(stream,er){if(er)return stream.emit("error",er);var ws=stream._writableState;var ts=stream._transformState;if(ws.length)throw new Error("Calling transform done when ws.length != 0");if(ts.transforming)throw new Error("Calling transform done when still transforming");return stream.push(null)}},{"./_stream_duplex":44,"core-util-is":6,inherits:38}],48:[function(require,module,exports){(function(process){"use strict";module.exports=Writable;var processNextTick=require("process-nextick-args");var asyncWrite=!process.browser&&["v0.10","v0.9."].indexOf(process.version.slice(0,5))>-1?setImmediate:processNextTick;Writable.WritableState=WritableState;var util=require("core-util-is");util.inherits=require("inherits");var internalUtil={deprecate:require("util-deprecate")};var Stream;(function(){try{Stream=require("st"+"ream")}catch(_){}finally{if(!Stream)Stream=require("events").EventEmitter}})();var Buffer=require("buffer").Buffer;var bufferShim=require("buffer-shims");util.inherits(Writable,Stream);function nop(){}function WriteReq(chunk,encoding,cb){this.chunk=chunk;this.encoding=encoding;this.callback=cb;this.next=null}var Duplex;function WritableState(options,stream){Duplex=Duplex||require("./_stream_duplex");options=options||{};this.objectMode=!!options.objectMode;if(stream instanceof Duplex)this.objectMode=this.objectMode||!!options.writableObjectMode;var hwm=options.highWaterMark;var defaultHwm=this.objectMode?16:16*1024;this.highWaterMark=hwm||hwm===0?hwm:defaultHwm;this.highWaterMark=~~this.highWaterMark;this.needDrain=false;this.ending=false;this.ended=false;this.finished=false;var noDecode=options.decodeStrings===false;this.decodeStrings=!noDecode;this.defaultEncoding=options.defaultEncoding||"utf8";this.length=0;this.writing=false;this.corked=0;this.sync=true;this.bufferProcessing=false;this.onwrite=function(er){onwrite(stream,er)};this.writecb=null;this.writelen=0;this.bufferedRequest=null;this.lastBufferedRequest=null;this.pendingcb=0;this.prefinished=false;this.errorEmitted=false;this.bufferedRequestCount=0;this.corkedRequestsFree=new CorkedRequest(this)}WritableState.prototype.getBuffer=function writableStateGetBuffer(){var current=this.bufferedRequest;var out=[];while(current){out.push(current);current=current.next}return out};(function(){try{Object.defineProperty(WritableState.prototype,"buffer",{get:internalUtil.deprecate(function(){return this.getBuffer()},"_writableState.buffer is deprecated. Use _writableState.getBuffer "+"instead.")})}catch(_){}})();var Duplex;function Writable(options){Duplex=Duplex||require("./_stream_duplex");if(!(this instanceof Writable)&&!(this instanceof Duplex))return new Writable(options);this._writableState=new WritableState(options,this);this.writable=true;if(options){if(typeof options.write==="function")this._write=options.write;if(typeof options.writev==="function")this._writev=options.writev}Stream.call(this)}Writable.prototype.pipe=function(){this.emit("error",new Error("Cannot pipe, not readable"))};function writeAfterEnd(stream,cb){var er=new Error("write after end");stream.emit("error",er);processNextTick(cb,er)}function validChunk(stream,state,chunk,cb){var valid=true;var er=false;if(chunk===null){er=new TypeError("May not write null values to stream")}else if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}if(er){stream.emit("error",er);processNextTick(cb,er);valid=false}return valid}Writable.prototype.write=function(chunk,encoding,cb){var state=this._writableState;var ret=false;if(typeof encoding==="function"){cb=encoding;encoding=null}if(Buffer.isBuffer(chunk))encoding="buffer";else if(!encoding)encoding=state.defaultEncoding;if(typeof cb!=="function")cb=nop;if(state.ended)writeAfterEnd(this,cb);else if(validChunk(this,state,chunk,cb)){ state.pendingcb++;ret=writeOrBuffer(this,state,chunk,encoding,cb)}return ret};Writable.prototype.cork=function(){var state=this._writableState;state.corked++};Writable.prototype.uncork=function(){var state=this._writableState;if(state.corked){state.corked--;if(!state.writing&&!state.corked&&!state.finished&&!state.bufferProcessing&&state.bufferedRequest)clearBuffer(this,state)}};Writable.prototype.setDefaultEncoding=function setDefaultEncoding(encoding){if(typeof encoding==="string")encoding=encoding.toLowerCase();if(!(["hex","utf8","utf-8","ascii","binary","base64","ucs2","ucs-2","utf16le","utf-16le","raw"].indexOf((encoding+"").toLowerCase())>-1))throw new TypeError("Unknown encoding: "+encoding);this._writableState.defaultEncoding=encoding;return this};function decodeChunk(state,chunk,encoding){if(!state.objectMode&&state.decodeStrings!==false&&typeof chunk==="string"){chunk=bufferShim.from(chunk,encoding)}return chunk}function writeOrBuffer(stream,state,chunk,encoding,cb){chunk=decodeChunk(state,chunk,encoding);if(Buffer.isBuffer(chunk))encoding="buffer";var len=state.objectMode?1:chunk.length;state.length+=len;var ret=state.length<state.highWaterMark;if(!ret)state.needDrain=true;if(state.writing||state.corked){var last=state.lastBufferedRequest;state.lastBufferedRequest=new WriteReq(chunk,encoding,cb);if(last){last.next=state.lastBufferedRequest}else{state.bufferedRequest=state.lastBufferedRequest}state.bufferedRequestCount+=1}else{doWrite(stream,state,false,len,chunk,encoding,cb)}return ret}function doWrite(stream,state,writev,len,chunk,encoding,cb){state.writelen=len;state.writecb=cb;state.writing=true;state.sync=true;if(writev)stream._writev(chunk,state.onwrite);else stream._write(chunk,encoding,state.onwrite);state.sync=false}function onwriteError(stream,state,sync,er,cb){--state.pendingcb;if(sync)processNextTick(cb,er);else cb(er);stream._writableState.errorEmitted=true;stream.emit("error",er)}function onwriteStateUpdate(state){state.writing=false;state.writecb=null;state.length-=state.writelen;state.writelen=0}function onwrite(stream,er){var state=stream._writableState;var sync=state.sync;var cb=state.writecb;onwriteStateUpdate(state);if(er)onwriteError(stream,state,sync,er,cb);else{var finished=needFinish(state);if(!finished&&!state.corked&&!state.bufferProcessing&&state.bufferedRequest){clearBuffer(stream,state)}if(sync){asyncWrite(afterWrite,stream,state,finished,cb)}else{afterWrite(stream,state,finished,cb)}}}function afterWrite(stream,state,finished,cb){if(!finished)onwriteDrain(stream,state);state.pendingcb--;cb();finishMaybe(stream,state)}function onwriteDrain(stream,state){if(state.length===0&&state.needDrain){state.needDrain=false;stream.emit("drain")}}function clearBuffer(stream,state){state.bufferProcessing=true;var entry=state.bufferedRequest;if(stream._writev&&entry&&entry.next){var l=state.bufferedRequestCount;var buffer=new Array(l);var holder=state.corkedRequestsFree;holder.entry=entry;var count=0;while(entry){buffer[count]=entry;entry=entry.next;count+=1}doWrite(stream,state,true,state.length,buffer,"",holder.finish);state.pendingcb++;state.lastBufferedRequest=null;if(holder.next){state.corkedRequestsFree=holder.next;holder.next=null}else{state.corkedRequestsFree=new CorkedRequest(state)}}else{while(entry){var chunk=entry.chunk;var encoding=entry.encoding;var cb=entry.callback;var len=state.objectMode?1:chunk.length;doWrite(stream,state,false,len,chunk,encoding,cb);entry=entry.next;if(state.writing){break}}if(entry===null)state.lastBufferedRequest=null}state.bufferedRequestCount=0;state.bufferedRequest=entry;state.bufferProcessing=false}Writable.prototype._write=function(chunk,encoding,cb){cb(new Error("not implemented"))};Writable.prototype._writev=null;Writable.prototype.end=function(chunk,encoding,cb){var state=this._writableState;if(typeof chunk==="function"){cb=chunk;chunk=null;encoding=null}else if(typeof encoding==="function"){cb=encoding;encoding=null}if(chunk!==null&&chunk!==undefined)this.write(chunk,encoding);if(state.corked){state.corked=1;this.uncork()}if(!state.ending&&!state.finished)endWritable(this,state,cb)};function needFinish(state){return state.ending&&state.length===0&&state.bufferedRequest===null&&!state.finished&&!state.writing}function prefinish(stream,state){if(!state.prefinished){state.prefinished=true;stream.emit("prefinish")}}function finishMaybe(stream,state){var need=needFinish(state);if(need){if(state.pendingcb===0){prefinish(stream,state);state.finished=true;stream.emit("finish")}else{prefinish(stream,state)}}return need}function endWritable(stream,state,cb){state.ending=true;finishMaybe(stream,state);if(cb){if(state.finished)processNextTick(cb);else stream.once("finish",cb)}state.ended=true;stream.writable=false}function CorkedRequest(state){var _this=this;this.next=null;this.entry=null;this.finish=function(err){var entry=_this.entry;_this.entry=null;while(entry){var cb=entry.callback;state.pendingcb--;cb(err);entry=entry.next}if(state.corkedRequestsFree){state.corkedRequestsFree.next=_this}else{state.corkedRequestsFree=_this}}}}).call(this,require("_process"))},{"./_stream_duplex":44,_process:42,buffer:5,"buffer-shims":4,"core-util-is":6,events:28,inherits:38,"process-nextick-args":41,"util-deprecate":57}],49:[function(require,module,exports){"use strict";var Buffer=require("buffer").Buffer;var bufferShim=require("buffer-shims");module.exports=BufferList;function BufferList(){this.head=null;this.tail=null;this.length=0}BufferList.prototype.push=function(v){var entry={data:v,next:null};if(this.length>0)this.tail.next=entry;else this.head=entry;this.tail=entry;++this.length};BufferList.prototype.unshift=function(v){var entry={data:v,next:this.head};if(this.length===0)this.tail=entry;this.head=entry;++this.length};BufferList.prototype.shift=function(){if(this.length===0)return;var ret=this.head.data;if(this.length===1)this.head=this.tail=null;else this.head=this.head.next;--this.length;return ret};BufferList.prototype.clear=function(){this.head=this.tail=null;this.length=0};BufferList.prototype.join=function(s){if(this.length===0)return"";var p=this.head;var ret=""+p.data;while(p=p.next){ret+=s+p.data}return ret};BufferList.prototype.concat=function(n){if(this.length===0)return bufferShim.alloc(0);if(this.length===1)return this.head.data;var ret=bufferShim.allocUnsafe(n>>>0);var p=this.head;var i=0;while(p){p.data.copy(ret,i);i+=p.data.length;p=p.next}return ret}},{buffer:5,"buffer-shims":4}],50:[function(require,module,exports){module.exports=require("./lib/_stream_passthrough.js")},{"./lib/_stream_passthrough.js":45}],51:[function(require,module,exports){(function(process){var Stream=function(){try{return require("st"+"ream")}catch(_){}}();exports=module.exports=require("./lib/_stream_readable.js");exports.Stream=Stream||exports;exports.Readable=exports;exports.Writable=require("./lib/_stream_writable.js");exports.Duplex=require("./lib/_stream_duplex.js");exports.Transform=require("./lib/_stream_transform.js");exports.PassThrough=require("./lib/_stream_passthrough.js");if(!process.browser&&process.env.READABLE_STREAM==="disable"&&Stream){module.exports=Stream}}).call(this,require("_process"))},{"./lib/_stream_duplex.js":44,"./lib/_stream_passthrough.js":45,"./lib/_stream_readable.js":46,"./lib/_stream_transform.js":47,"./lib/_stream_writable.js":48,_process:42}],52:[function(require,module,exports){module.exports=require("./lib/_stream_transform.js")},{"./lib/_stream_transform.js":47}],53:[function(require,module,exports){module.exports=require("./lib/_stream_writable.js")},{"./lib/_stream_writable.js":48}],54:[function(require,module,exports){module.exports=function(string){return string.replace(/[-\\^$*+?.()|[\]{}]/g,"\\$&")}},{}],55:[function(require,module,exports){module.exports=Stream;var EE=require("events").EventEmitter;var inherits=require("inherits");inherits(Stream,EE);Stream.Readable=require("readable-stream/readable.js");Stream.Writable=require("readable-stream/writable.js");Stream.Duplex=require("readable-stream/duplex.js");Stream.Transform=require("readable-stream/transform.js");Stream.PassThrough=require("readable-stream/passthrough.js");Stream.Stream=Stream;function Stream(){EE.call(this)}Stream.prototype.pipe=function(dest,options){var source=this;function ondata(chunk){if(dest.writable){if(false===dest.write(chunk)&&source.pause){source.pause()}}}source.on("data",ondata);function ondrain(){if(source.readable&&source.resume){source.resume()}}dest.on("drain",ondrain);if(!dest._isStdio&&(!options||options.end!==false)){source.on("end",onend);source.on("close",onclose)}var didOnEnd=false;function onend(){if(didOnEnd)return;didOnEnd=true;dest.end()}function onclose(){if(didOnEnd)return;didOnEnd=true;if(typeof dest.destroy==="function")dest.destroy()}function onerror(er){cleanup();if(EE.listenerCount(this,"error")===0){throw er}}source.on("error",onerror);dest.on("error",onerror);function cleanup(){source.removeListener("data",ondata);dest.removeListener("drain",ondrain);source.removeListener("end",onend);source.removeListener("close",onclose);source.removeListener("error",onerror);dest.removeListener("error",onerror);source.removeListener("end",cleanup);source.removeListener("close",cleanup);dest.removeListener("close",cleanup)}source.on("end",cleanup);source.on("close",cleanup);dest.on("close",cleanup);dest.emit("pipe",source);return dest}},{events:28,inherits:38,"readable-stream/duplex.js":43,"readable-stream/passthrough.js":50,"readable-stream/readable.js":51,"readable-stream/transform.js":52,"readable-stream/writable.js":53}],56:[function(require,module,exports){var Buffer=require("buffer").Buffer;var isBufferEncoding=Buffer.isEncoding||function(encoding){switch(encoding&&encoding.toLowerCase()){case"hex":case"utf8":case"utf-8":case"ascii":case"binary":case"base64":case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":case"raw":return true;default:return false}};function assertEncoding(encoding){if(encoding&&!isBufferEncoding(encoding)){throw new Error("Unknown encoding: "+encoding)}}var StringDecoder=exports.StringDecoder=function(encoding){this.encoding=(encoding||"utf8").toLowerCase().replace(/[-_]/,"");assertEncoding(encoding);switch(this.encoding){case"utf8":this.surrogateSize=3;break;case"ucs2":case"utf16le":this.surrogateSize=2;this.detectIncompleteChar=utf16DetectIncompleteChar;break;case"base64":this.surrogateSize=3;this.detectIncompleteChar=base64DetectIncompleteChar;break;default:this.write=passThroughWrite;return}this.charBuffer=new Buffer(6);this.charReceived=0;this.charLength=0};StringDecoder.prototype.write=function(buffer){var charStr="";while(this.charLength){var available=buffer.length>=this.charLength-this.charReceived?this.charLength-this.charReceived:buffer.length;buffer.copy(this.charBuffer,this.charReceived,0,available);this.charReceived+=available;if(this.charReceived<this.charLength){return""}buffer=buffer.slice(available,buffer.length);charStr=this.charBuffer.slice(0,this.charLength).toString(this.encoding);var charCode=charStr.charCodeAt(charStr.length-1);if(charCode>=55296&&charCode<=56319){this.charLength+=this.surrogateSize;charStr="";continue}this.charReceived=this.charLength=0;if(buffer.length===0){return charStr}break}this.detectIncompleteChar(buffer);var end=buffer.length;if(this.charLength){buffer.copy(this.charBuffer,0,buffer.length-this.charReceived,end);end-=this.charReceived}charStr+=buffer.toString(this.encoding,0,end);var end=charStr.length-1;var charCode=charStr.charCodeAt(end);if(charCode>=55296&&charCode<=56319){var size=this.surrogateSize;this.charLength+=size;this.charReceived+=size;this.charBuffer.copy(this.charBuffer,size,0,size);buffer.copy(this.charBuffer,0,0,size);return charStr.substring(0,end)}return charStr};StringDecoder.prototype.detectIncompleteChar=function(buffer){var i=buffer.length>=3?3:buffer.length;for(;i>0;i--){var c=buffer[buffer.length-i];if(i==1&&c>>5==6){this.charLength=2;break}if(i<=2&&c>>4==14){this.charLength=3;break}if(i<=3&&c>>3==30){this.charLength=4;break}}this.charReceived=i};StringDecoder.prototype.end=function(buffer){var res="";if(buffer&&buffer.length)res=this.write(buffer);if(this.charReceived){var cr=this.charReceived;var buf=this.charBuffer;var enc=this.encoding;res+=buf.slice(0,cr).toString(enc)}return res};function passThroughWrite(buffer){return buffer.toString(this.encoding)}function utf16DetectIncompleteChar(buffer){this.charReceived=buffer.length%2;this.charLength=this.charReceived?2:0}function base64DetectIncompleteChar(buffer){this.charReceived=buffer.length%3;this.charLength=this.charReceived?3:0}},{buffer:5}],57:[function(require,module,exports){(function(global){module.exports=deprecate;function deprecate(fn,msg){if(config("noDeprecation")){return fn}var warned=false;function deprecated(){if(!warned){if(config("throwDeprecation")){throw new Error(msg)}else if(config("traceDeprecation")){console.trace(msg)}else{console.warn(msg)}warned=true}return fn.apply(this,arguments)}return deprecated}function config(name){try{if(!global.localStorage)return false}catch(_){return false}var val=global.localStorage[name];if(null==val)return false;return String(val).toLowerCase()==="true"}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{}],58:[function(require,module,exports){module.exports=extend;var hasOwnProperty=Object.prototype.hasOwnProperty;function extend(){var target={};for(var i=0;i<arguments.length;i++){var source=arguments[i];for(var key in source){if(hasOwnProperty.call(source,key)){target[key]=source[key]}}}return target}},{}]},{},[1])(1)});
PypiClean
/swh.web-0.2.37.tar.gz/swh.web-0.2.37/assets/config/webpack-plugins/generate-weblabels-webpack-plugin/index.js
const ejs = require('ejs'); const fs = require('fs'); const log = require('webpack-log'); const path = require('path'); const schema = require('./plugin-options-schema.json'); const spdxParse = require('spdx-expression-parse'); const spdxLicensesMapping = require('./spdx-licenses-mapping'); const {validate} = require('schema-utils'); const pluginName = 'GenerateWebLabelsPlugin'; class GenerateWebLabelsPlugin { constructor(opts) { // check that provided options match JSON schema validate(schema, opts, pluginName); this.options = opts || {}; this.weblabelsDirName = this.options['outputDir'] || 'jssources'; this.outputType = this.options['outputType'] || 'html'; // source file extension handled by webpack and compiled to js this.srcExts = ['js', 'ts', 'coffee', 'lua']; this.srcExtsRegexp = new RegExp('^.*.(' + this.srcExts.join('|') + ')$'); this.chunkIdToName = {}; this.chunkNameToJsAsset = {}; this.chunkJsAssetToSrcFiles = {}; this.srcIdsInChunkJsAsset = {}; this.packageJsonCache = {}; this.packageLicenseFile = {}; this.exclude = []; this.copiedFiles = new Set(); this.logger = log({name: pluginName}); // populate module prefix patterns to exclude if (Array.isArray(this.options['exclude'])) { this.options['exclude'].forEach(toExclude => { if (!toExclude.startsWith('.')) { this.exclude.push('./' + path.join('node_modules', toExclude)); } else { this.exclude.push(toExclude); } }); } } apply(compiler) { compiler.hooks.done.tap(pluginName, statsObj => { // get the stats object in JSON format const stats = statsObj.toJson(); this.stats = stats; // set output folder this.weblabelsOutputDir = path.join(stats.outputPath, this.weblabelsDirName); this.recursiveMkdir(this.weblabelsOutputDir); stats.assets.forEach(asset => { for (let i = 0; i < asset.chunks.length; ++i) { this.chunkIdToName[asset.chunks[i]] = asset.chunkNames[i]; } }); // map each generated webpack chunk to its js asset Object.keys(stats.assetsByChunkName).forEach((chunkName, i) => { if (Array.isArray(stats.assetsByChunkName[chunkName])) { for (const asset of stats.assetsByChunkName[chunkName]) { if (asset.endsWith('.js')) { this.chunkNameToJsAsset[chunkName] = asset; this.chunkNameToJsAsset[i] = asset; break; } } } else if (stats.assetsByChunkName[chunkName].endsWith('.js')) { this.chunkNameToJsAsset[chunkName] = stats.assetsByChunkName[chunkName]; this.chunkNameToJsAsset[i] = stats.assetsByChunkName[chunkName]; } }); // iterate on all bundled webpack modules stats.modules.forEach(mod => { let srcFilePath = mod.name; // do not process non js related modules if (!this.srcExtsRegexp.test(srcFilePath)) { return; } // do not process modules unrelated to a source file if (!srcFilePath.startsWith('./')) { return; } // do not process modules in the exclusion list for (const toExclude of this.exclude) { if (srcFilePath.startsWith(toExclude)) { return; } } // remove webpack loader call if any const loaderEndPos = srcFilePath.indexOf('!'); if (loaderEndPos !== -1) { srcFilePath = srcFilePath.slice(loaderEndPos + 1); } // iterate on all chunks containing the module mod.chunks.forEach(chunk => { const chunkName = this.chunkIdToName[chunk]; const chunkJsAsset = stats.publicPath + this.chunkNameToJsAsset[chunkName]; // init the chunk to source files mapping if needed if (!this.chunkJsAssetToSrcFiles.hasOwnProperty(chunkJsAsset)) { this.chunkJsAssetToSrcFiles[chunkJsAsset] = []; this.srcIdsInChunkJsAsset[chunkJsAsset] = new Set(); } // check if the source file needs to be replaces if (this.options['srcReplace'] && this.options['srcReplace'].hasOwnProperty(srcFilePath)) { srcFilePath = this.options['srcReplace'][srcFilePath]; } // init source file metadata const srcFileData = {'id': this.cleanupPath(srcFilePath)}; // extract license information, overriding it if needed let licenseOverridden = false; let licenseFilePath; if (this.options['licenseOverride']) { for (const srcFilePrefixKey of Object.keys(this.options['licenseOverride'])) { let srcFilePrefix = srcFilePrefixKey; if (!srcFilePrefixKey.startsWith('.')) { srcFilePrefix = './' + path.join('node_modules', srcFilePrefixKey); } if (srcFilePath.startsWith(srcFilePrefix)) { const spdxLicenseExpression = this.options['licenseOverride'][srcFilePrefixKey]['spdxLicenseExpression']; licenseFilePath = this.options['licenseOverride'][srcFilePrefixKey]['licenseFilePath']; const parsedSpdxLicenses = this.parseSpdxLicenseExpression(spdxLicenseExpression, `file ${srcFilePath}`); srcFileData['licenses'] = this.spdxToWebLabelsLicenses(parsedSpdxLicenses); licenseOverridden = true; break; } } } if (!licenseOverridden) { // find and parse the corresponding package.json file let packageJsonPath; const nodeModule = srcFilePath.startsWith('./node_modules/'); if (nodeModule) { packageJsonPath = this.findPackageJsonPath(srcFilePath); } else { packageJsonPath = './package.json'; } const packageJson = this.parsePackageJson(packageJsonPath); srcFileData['licenses'] = this.extractLicenseInformation(packageJson); const licenseDir = path.join(...packageJsonPath.split('/').slice(0, -1)); licenseFilePath = this.findLicenseFile(licenseDir); } // copy original license file and get its url const licenseCopyUrl = this.copyLicenseFile(licenseFilePath); srcFileData['licenses'].forEach(license => { license['copy_url'] = licenseCopyUrl; }); // generate url for downloading non-minified source code srcFileData['src_url'] = stats.publicPath + path.join(this.weblabelsDirName, srcFileData['id']); // add source file metadata to the webpack chunk this.addSrcFileDataToJsChunkAsset(chunkJsAsset, srcFileData); // copy non-minified source to output folder this.copyFileToOutputPath(srcFilePath); }); }); // process additional scripts if needed if (this.options['additionalScripts']) { for (let script of Object.keys(this.options['additionalScripts'])) { const scriptFilesData = this.options['additionalScripts'][script]; if (script.indexOf('://') === -1 && !script.startsWith('/')) { script = stats.publicPath + script; } this.chunkJsAssetToSrcFiles[script] = []; this.srcIdsInChunkJsAsset[script] = new Set(); for (const scriptSrc of scriptFilesData) { const scriptSrcData = {'id': scriptSrc['id']}; const licenceFilePath = scriptSrc['licenseFilePath']; const parsedSpdxLicenses = this.parseSpdxLicenseExpression(scriptSrc['spdxLicenseExpression'], `file ${scriptSrc['path']}`); scriptSrcData['licenses'] = this.spdxToWebLabelsLicenses(parsedSpdxLicenses); if (licenceFilePath.indexOf('://') === -1 && !licenceFilePath.startsWith('/')) { const licenseCopyUrl = this.copyLicenseFile(licenceFilePath); scriptSrcData['licenses'].forEach(license => { license['copy_url'] = licenseCopyUrl; }); } else { scriptSrcData['licenses'].forEach(license => { license['copy_url'] = licenceFilePath; }); } if (scriptSrc['path'].indexOf('://') === -1 && !scriptSrc['path'].startsWith('/')) { scriptSrcData['src_url'] = stats.publicPath + path.join(this.weblabelsDirName, scriptSrc['id']); } else { scriptSrcData['src_url'] = scriptSrc['path']; } this.addSrcFileDataToJsChunkAsset(script, scriptSrcData); this.copyFileToOutputPath(scriptSrc['path']); } } } for (const srcFiles of Object.values(this.chunkJsAssetToSrcFiles)) { srcFiles.sort((a, b) => a.id.localeCompare(b.id)); } if (this.outputType === 'json') { // generate the jslicenses.json file const weblabelsData = JSON.stringify(this.chunkJsAssetToSrcFiles); const weblabelsJsonFile = path.join(this.weblabelsOutputDir, 'jslicenses.json'); fs.writeFileSync(weblabelsJsonFile, weblabelsData); } else { // generate the jslicenses.html file const weblabelsPageFile = path.join(this.weblabelsOutputDir, 'jslicenses.html'); ejs.renderFile(path.join(__dirname, 'jslicenses.ejs'), {'jslicenses_data': this.chunkJsAssetToSrcFiles}, {'rmWhitespace': true}, (e, str) => { fs.writeFileSync(weblabelsPageFile, str); }); } }); } addSrcFileDataToJsChunkAsset(chunkJsAsset, srcFileData) { if (!this.srcIdsInChunkJsAsset[chunkJsAsset].has(srcFileData['id'])) { this.chunkJsAssetToSrcFiles[chunkJsAsset].push(srcFileData); this.srcIdsInChunkJsAsset[chunkJsAsset].add(srcFileData['id']); } } cleanupPath(moduleFilePath) { return moduleFilePath.replace(/^[./]*node_modules\//, '').replace(/^.\//, ''); } findPackageJsonPath(srcFilePath) { const pathSplit = srcFilePath.split('/'); let packageJsonPath; for (let i = 3; i < pathSplit.length; ++i) { packageJsonPath = path.join(...pathSplit.slice(0, i), 'package.json'); if (fs.existsSync(packageJsonPath)) { break; } } return packageJsonPath; } findLicenseFile(packageJsonDir) { if (!this.packageLicenseFile.hasOwnProperty(packageJsonDir)) { let foundLicenseFile; fs.readdirSync(packageJsonDir).forEach(file => { if (foundLicenseFile) { return; } if (file.toLowerCase().startsWith('license')) { foundLicenseFile = path.join(packageJsonDir, file); } }); this.packageLicenseFile[packageJsonDir] = foundLicenseFile; } return this.packageLicenseFile[packageJsonDir]; } copyLicenseFile(licenseFilePath) { let licenseCopyPath = ''; if (licenseFilePath && fs.existsSync(licenseFilePath)) { let ext = ''; // add a .txt extension in order to serve license file with text/plain // content type to client browsers if (licenseFilePath.toLowerCase().indexOf('license.') === -1) { ext = '.txt'; } this.copyFileToOutputPath(licenseFilePath, ext); licenseFilePath = this.cleanupPath(licenseFilePath); licenseCopyPath = this.stats.publicPath + path.join(this.weblabelsDirName, licenseFilePath + ext); } return licenseCopyPath; } parsePackageJson(packageJsonPath) { if (!this.packageJsonCache.hasOwnProperty(packageJsonPath)) { const packageJsonStr = fs.readFileSync(packageJsonPath).toString('utf8'); this.packageJsonCache[packageJsonPath] = JSON.parse(packageJsonStr); } return this.packageJsonCache[packageJsonPath]; } parseSpdxLicenseExpression(spdxLicenseExpression, context) { let parsedLicense; try { parsedLicense = spdxParse(spdxLicenseExpression); if (spdxLicenseExpression.indexOf('AND') !== -1) { this.logger.warn(`The SPDX license expression '${spdxLicenseExpression}' associated to ${context} ` + 'contains an AND operator, this is currently not properly handled and erroneous ' + 'licenses information may be provided to LibreJS'); } } catch (e) { this.logger.warn(`Unable to parse the SPDX license expression '${spdxLicenseExpression}' associated to ${context}.`); this.logger.warn('Some generated JavaScript assets may be blocked by LibreJS due to missing license information.'); parsedLicense = {'license': spdxLicenseExpression}; } return parsedLicense; } spdxToWebLabelsLicense(spdxLicenceId) { for (let i = 0; i < spdxLicensesMapping.length; ++i) { if (spdxLicensesMapping[i]['spdx_ids'].indexOf(spdxLicenceId) !== -1) { const licenseData = Object.assign({}, spdxLicensesMapping[i]); delete licenseData['spdx_ids']; delete licenseData['magnet_link']; licenseData['copy_url'] = ''; return licenseData; } } this.logger.warn(`Unable to associate the SPDX license identifier '${spdxLicenceId}' to a LibreJS supported license.`); this.logger.warn('Some generated JavaScript assets may be blocked by LibreJS due to missing license information.'); return { 'name': spdxLicenceId, 'url': '', 'copy_url': '' }; } spdxToWebLabelsLicenses(spdxLicenses) { // This method simply extracts all referenced licenses in the SPDX expression // regardless of their combinations. // TODO: Handle licenses combination properly once LibreJS has a spec for it. let ret = []; if (spdxLicenses.hasOwnProperty('license')) { ret.push(this.spdxToWebLabelsLicense(spdxLicenses['license'])); } else if (spdxLicenses.hasOwnProperty('left')) { if (spdxLicenses['left'].hasOwnProperty('license')) { const licenseData = this.spdxToWebLabelsLicense(spdxLicenses['left']['license']); ret.push(licenseData); } else { ret = ret.concat(this.spdxToWebLabelsLicenses(spdxLicenses['left'])); } ret = ret.concat(this.spdxToWebLabelsLicenses(spdxLicenses['right'])); } return ret; } extractLicenseInformation(packageJson) { let spdxLicenseExpression; if (packageJson.hasOwnProperty('license')) { spdxLicenseExpression = packageJson['license']; } else if (packageJson.hasOwnProperty('licenses')) { // for node packages using deprecated licenses property const licenses = packageJson['licenses']; if (Array.isArray(licenses)) { const l = []; licenses.forEach(license => { l.push(license['type']); }); spdxLicenseExpression = l.join(' OR '); } else { spdxLicenseExpression = licenses['type']; } } const parsedSpdxLicenses = this.parseSpdxLicenseExpression(spdxLicenseExpression, `module ${packageJson['name']}`); return this.spdxToWebLabelsLicenses(parsedSpdxLicenses); } copyFileToOutputPath(srcFilePath, ext = '') { if (this.copiedFiles.has(srcFilePath) || srcFilePath.indexOf('://') !== -1 || !fs.existsSync(srcFilePath)) { return; } let destPath = this.cleanupPath(srcFilePath); const destDir = path.join(this.weblabelsOutputDir, ...destPath.split('/').slice(0, -1)); this.recursiveMkdir(destDir); destPath = path.join(this.weblabelsOutputDir, destPath + ext); fs.copyFileSync(srcFilePath, destPath); this.copiedFiles.add(srcFilePath); } recursiveMkdir(destPath) { const destPathSplit = destPath.split('/'); for (let i = 1; i < destPathSplit.length; ++i) { const currentPath = path.join('/', ...destPathSplit.slice(0, i + 1)); if (!fs.existsSync(currentPath)) { fs.mkdirSync(currentPath); } } } }; module.exports = GenerateWebLabelsPlugin;
PypiClean
/tb-rest-client-3.5.tar.gz/tb-rest-client-3.5/tb_rest_client/models/models_pe/edge_info.py
# Copyright 2023. ThingsBoard # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pprint import re # noqa: F401 import six class EdgeInfo(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'additional_info': 'JsonNode', 'owner_id': 'EntityId', 'id': 'EdgeId', 'created_time': 'int', 'tenant_id': 'TenantId', 'customer_id': 'CustomerId', 'root_rule_chain_id': 'RuleChainId', 'name': 'str', 'type': 'str', 'label': 'str', 'routing_key': 'str', 'secret': 'str', 'edge_license_key': 'str', 'cloud_endpoint': 'str', 'owner_name': 'str', 'groups': 'list[EntityInfo]' } attribute_map = { 'additional_info': 'additionalInfo', 'owner_id': 'ownerId', 'id': 'id', 'created_time': 'createdTime', 'tenant_id': 'tenantId', 'customer_id': 'customerId', 'root_rule_chain_id': 'rootRuleChainId', 'name': 'name', 'type': 'type', 'label': 'label', 'routing_key': 'routingKey', 'secret': 'secret', 'edge_license_key': 'edgeLicenseKey', 'cloud_endpoint': 'cloudEndpoint', 'owner_name': 'ownerName', 'groups': 'groups' } def __init__(self, additional_info=None, owner_id=None, id=None, created_time=None, tenant_id=None, customer_id=None, root_rule_chain_id=None, name=None, type=None, label=None, routing_key=None, secret=None, edge_license_key=None, cloud_endpoint=None, owner_name=None, groups=None): # noqa: E501 """EdgeInfo - a model defined in Swagger""" # noqa: E501 self._additional_info = None self._owner_id = None self._id = None self._created_time = None self._tenant_id = None self._customer_id = None self._root_rule_chain_id = None self._name = None self._type = None self._label = None self._routing_key = None self._secret = None self._edge_license_key = None self._cloud_endpoint = None self._owner_name = None self._groups = None self.discriminator = None if additional_info is not None: self.additional_info = additional_info if owner_id is not None: self.owner_id = owner_id if id is not None: self.id = id if created_time is not None: self.created_time = created_time if tenant_id is not None: self.tenant_id = tenant_id if customer_id is not None: self.customer_id = customer_id if root_rule_chain_id is not None: self.root_rule_chain_id = root_rule_chain_id self.name = name self.type = type if label is not None: self.label = label self.routing_key = routing_key self.secret = secret self.edge_license_key = edge_license_key self.cloud_endpoint = cloud_endpoint if owner_name is not None: self.owner_name = owner_name if groups is not None: self.groups = groups @property def additional_info(self): """Gets the additional_info of this EdgeInfo. # noqa: E501 :return: The additional_info of this EdgeInfo. # noqa: E501 :rtype: JsonNode """ return self._additional_info @additional_info.setter def additional_info(self, additional_info): """Sets the additional_info of this EdgeInfo. :param additional_info: The additional_info of this EdgeInfo. # noqa: E501 :type: JsonNode """ self._additional_info = additional_info @property def owner_id(self): """Gets the owner_id of this EdgeInfo. # noqa: E501 :return: The owner_id of this EdgeInfo. # noqa: E501 :rtype: EntityId """ return self._owner_id @owner_id.setter def owner_id(self, owner_id): """Sets the owner_id of this EdgeInfo. :param owner_id: The owner_id of this EdgeInfo. # noqa: E501 :type: EntityId """ self._owner_id = owner_id @property def id(self): """Gets the id of this EdgeInfo. # noqa: E501 :return: The id of this EdgeInfo. # noqa: E501 :rtype: EdgeId """ return self._id @id.setter def id(self, id): """Sets the id of this EdgeInfo. :param id: The id of this EdgeInfo. # noqa: E501 :type: EdgeId """ self._id = id @property def created_time(self): """Gets the created_time of this EdgeInfo. # noqa: E501 Timestamp of the edge creation, in milliseconds # noqa: E501 :return: The created_time of this EdgeInfo. # noqa: E501 :rtype: int """ return self._created_time @created_time.setter def created_time(self, created_time): """Sets the created_time of this EdgeInfo. Timestamp of the edge creation, in milliseconds # noqa: E501 :param created_time: The created_time of this EdgeInfo. # noqa: E501 :type: int """ self._created_time = created_time @property def tenant_id(self): """Gets the tenant_id of this EdgeInfo. # noqa: E501 :return: The tenant_id of this EdgeInfo. # noqa: E501 :rtype: TenantId """ return self._tenant_id @tenant_id.setter def tenant_id(self, tenant_id): """Sets the tenant_id of this EdgeInfo. :param tenant_id: The tenant_id of this EdgeInfo. # noqa: E501 :type: TenantId """ self._tenant_id = tenant_id @property def customer_id(self): """Gets the customer_id of this EdgeInfo. # noqa: E501 :return: The customer_id of this EdgeInfo. # noqa: E501 :rtype: CustomerId """ return self._customer_id @customer_id.setter def customer_id(self, customer_id): """Sets the customer_id of this EdgeInfo. :param customer_id: The customer_id of this EdgeInfo. # noqa: E501 :type: CustomerId """ self._customer_id = customer_id @property def root_rule_chain_id(self): """Gets the root_rule_chain_id of this EdgeInfo. # noqa: E501 :return: The root_rule_chain_id of this EdgeInfo. # noqa: E501 :rtype: RuleChainId """ return self._root_rule_chain_id @root_rule_chain_id.setter def root_rule_chain_id(self, root_rule_chain_id): """Sets the root_rule_chain_id of this EdgeInfo. :param root_rule_chain_id: The root_rule_chain_id of this EdgeInfo. # noqa: E501 :type: RuleChainId """ self._root_rule_chain_id = root_rule_chain_id @property def name(self): """Gets the name of this EdgeInfo. # noqa: E501 Unique Edge Name in scope of Tenant # noqa: E501 :return: The name of this EdgeInfo. # noqa: E501 :rtype: str """ return self._name @name.setter def name(self, name): """Sets the name of this EdgeInfo. Unique Edge Name in scope of Tenant # noqa: E501 :param name: The name of this EdgeInfo. # noqa: E501 :type: str """ if name is None: raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 self._name = name @property def type(self): """Gets the type of this EdgeInfo. # noqa: E501 Edge type # noqa: E501 :return: The type of this EdgeInfo. # noqa: E501 :rtype: str """ return self._type @type.setter def type(self, type): """Sets the type of this EdgeInfo. Edge type # noqa: E501 :param type: The type of this EdgeInfo. # noqa: E501 :type: str """ if type is None: raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 self._type = type @property def label(self): """Gets the label of this EdgeInfo. # noqa: E501 Label that may be used in widgets # noqa: E501 :return: The label of this EdgeInfo. # noqa: E501 :rtype: str """ return self._label @label.setter def label(self, label): """Sets the label of this EdgeInfo. Label that may be used in widgets # noqa: E501 :param label: The label of this EdgeInfo. # noqa: E501 :type: str """ self._label = label @property def routing_key(self): """Gets the routing_key of this EdgeInfo. # noqa: E501 Edge routing key ('username') to authorize on cloud # noqa: E501 :return: The routing_key of this EdgeInfo. # noqa: E501 :rtype: str """ return self._routing_key @routing_key.setter def routing_key(self, routing_key): """Sets the routing_key of this EdgeInfo. Edge routing key ('username') to authorize on cloud # noqa: E501 :param routing_key: The routing_key of this EdgeInfo. # noqa: E501 :type: str """ if routing_key is None: raise ValueError("Invalid value for `routing_key`, must not be `None`") # noqa: E501 self._routing_key = routing_key @property def secret(self): """Gets the secret of this EdgeInfo. # noqa: E501 Edge secret ('password') to authorize on cloud # noqa: E501 :return: The secret of this EdgeInfo. # noqa: E501 :rtype: str """ return self._secret @secret.setter def secret(self, secret): """Sets the secret of this EdgeInfo. Edge secret ('password') to authorize on cloud # noqa: E501 :param secret: The secret of this EdgeInfo. # noqa: E501 :type: str """ if secret is None: raise ValueError("Invalid value for `secret`, must not be `None`") # noqa: E501 self._secret = secret @property def edge_license_key(self): """Gets the edge_license_key of this EdgeInfo. # noqa: E501 Edge license key obtained from license portal # noqa: E501 :return: The edge_license_key of this EdgeInfo. # noqa: E501 :rtype: str """ return self._edge_license_key @edge_license_key.setter def edge_license_key(self, edge_license_key): """Sets the edge_license_key of this EdgeInfo. Edge license key obtained from license portal # noqa: E501 :param edge_license_key: The edge_license_key of this EdgeInfo. # noqa: E501 :type: str """ if edge_license_key is None: raise ValueError("Invalid value for `edge_license_key`, must not be `None`") # noqa: E501 self._edge_license_key = edge_license_key @property def cloud_endpoint(self): """Gets the cloud_endpoint of this EdgeInfo. # noqa: E501 Edge uses this cloud URL to activate and periodically check it's license # noqa: E501 :return: The cloud_endpoint of this EdgeInfo. # noqa: E501 :rtype: str """ return self._cloud_endpoint @cloud_endpoint.setter def cloud_endpoint(self, cloud_endpoint): """Sets the cloud_endpoint of this EdgeInfo. Edge uses this cloud URL to activate and periodically check it's license # noqa: E501 :param cloud_endpoint: The cloud_endpoint of this EdgeInfo. # noqa: E501 :type: str """ if cloud_endpoint is None: raise ValueError("Invalid value for `cloud_endpoint`, must not be `None`") # noqa: E501 self._cloud_endpoint = cloud_endpoint @property def owner_name(self): """Gets the owner_name of this EdgeInfo. # noqa: E501 Owner name # noqa: E501 :return: The owner_name of this EdgeInfo. # noqa: E501 :rtype: str """ return self._owner_name @owner_name.setter def owner_name(self, owner_name): """Sets the owner_name of this EdgeInfo. Owner name # noqa: E501 :param owner_name: The owner_name of this EdgeInfo. # noqa: E501 :type: str """ self._owner_name = owner_name @property def groups(self): """Gets the groups of this EdgeInfo. # noqa: E501 Groups # noqa: E501 :return: The groups of this EdgeInfo. # noqa: E501 :rtype: list[EntityInfo] """ return self._groups @groups.setter def groups(self, groups): """Sets the groups of this EdgeInfo. Groups # noqa: E501 :param groups: The groups of this EdgeInfo. # noqa: E501 :type: list[EntityInfo] """ self._groups = groups def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(EdgeInfo, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, EdgeInfo): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
PypiClean
/aiddl_core-0.3.1-py3-none-any.whl/aiddl_core/function/function_registry.py
from aiddl_core.representation.sym import Sym from aiddl_core.representation.var import Var from aiddl_core.representation.tuple import Tuple from aiddl_core.representation.list import List from aiddl_core.function.eval.type import TypeCheckFunction, GenericTypeConstructor from aiddl_core.function.function import InterfaceImplementationMixin, NamedFunction, LambdaFunction import aiddl_core.function as fun_uri DEF = Sym("#def") class FunctionRegistry: def __init__(self): self.functions = {} self.interfaces = {} self.interface_implementations = {} def add_function(self, name, f): self.functions[name] = f for k in self.interface_implementations.keys(): self.interface_implementations[k] = filter( name.__ne__, self.interface_implementations[k]) if isinstance(f, InterfaceImplementationMixin): uri = f.get_interface_uri() if uri not in self.interface_implementations.keys(): self.interface_implementations[uri] = [] self.interface_implementations[uri].append(name) def get_function(self, name): if isinstance(name, Tuple) \ and len(name) == 3 \ and name[0] == fun_uri.LAMBDA: return self.lambda_factory(name) if name in self.functions.keys(): return self.functions[name] return None def get_function_or_panic(self, name): if isinstance(name, Tuple) \ and len(name) == 3 \ and name[0] == fun_uri.LAMBDA: return self.lambda_factory(name) if name in self.functions.keys(): return self.functions[name] raise ValueError("Function not registered:", str(name)) def get_function_or_default(self, name, f_def): if isinstance(name, Tuple) \ and len(name) == 3 \ and name[0] == fun_uri.LAMBDA: return self.lambda_factory(name) if name in self.functions.keys(): return self.functions[name] raise ValueError(f_def) def get_interface_implementations(self, uri): if uri in self.interface_implementations: return List(self.interface_implementations[uri]) return List() def lambda_factory(self, n): x = n.get(1) f = n.get(2) e = self.functions[fun_uri.EVAL] return LambdaFunction(x, f, e) def has_function(self, name): return name in self.functions.keys() def get_registered_names(self): return list(self.functions.keys()) def load_container_interfaces(self, C): evaluator = self.get_function(fun_uri.EVAL) for m in C.get_module_names(): for e in C.get_matching_entries(m, Sym("#interface"), Var()): uri = evaluator(e.value[Sym("uri")]) interface_term = evaluator(e.value) self.interfaces[uri] = interface_term # def get_function_list(self, m, C): # L = [] # for e in C.get_matching_entries(m, Symbolic("#functions"), Variable()): # for t in e.get_value(): # L.append(t) # return List(L) # # def load_req_python_functions(self, C): # for m in C.get_module_names(): # functions = self.get_function_list(m, C) # C.get_entry(Symbolic("functions"), module=m) # # print("Functions:", functions) # if len(functions) > 0: # missing = False # for f in functions: # if not self.has_function(f[0]): # missing = True # break # if missing: # loader_mod = m + Symbolic("python") # if parser.is_known_module(loader_mod): # lu = parser.get_mod_file_lookup(parser.collect_aiddl_paths([])) # # evalutaor = self.get_function(EVAL) # parser.parse_internal(lu[loader_mod], C, self, ".") # # for e in C.get_matching_entries(loader_mod, Symbolic("#on-load"), Variable()): # load = e.get_value() # # if isinstance(load, Tuple): # evalutaor(load) # else: # for call in load: # evalutaor(call) # # # load_request = C.get_entry(Symbolic("load"), module=loader_mod) # # if load_request is not None: # # rHandler = RequestHandler(C, self) # # # rHandler.verbose = True # # rHandler.satisfy_request(load_request.get_value(), Symbolic("NIL")) # # else: # # print("Could not find loader module:", loader_mod) # for f in functions: # if not self.has_function(f[0]): # print("[Warning]", f[0], ": Missing python implementation") def load_type_functions(self, C): evaluator = self.get_function(fun_uri.EVAL) for m in C.get_module_names(): for e in C.get_matching_entries(m, Sym("#type"), Var()): if isinstance(e.name, Sym): uri = m + e.name evaluator.set_follow_references(True) #evaluator.set_verbose(True) type_def = evaluator(e.value) evaluator.set_follow_references(False) #evaluator.set_verbose(False) type_fun = TypeCheckFunction(type_def, evaluator) self.add_function(uri, type_fun) interface_term = evaluator(e.value) self.interfaces[uri] = interface_term # print("Loaded type:", uri, "with def", type_def) elif isinstance(e.name, Tuple): base_uri = m + e.name[0] evaluator.set_follow_references(True) type_def = evaluator(e.value) evaluator.set_follow_references(False) arg_list = [] for i in range(1, len(e.name)): arg_list.append(e.name[i]) if len(arg_list) == 1: gen_args = arg_list[0] else: gen_args = Tuple(arg_list) type_fun = GenericTypeConstructor(base_uri, gen_args, type_def, evaluator, self) self.add_function(base_uri, type_fun) def load_def(self, C): for m in C.get_module_names(): for e in C.get_matching_entries(m, DEF, Var()): if isinstance(e.name, Sym): uri = m + e.name f = NamedFunction(uri, e.value, self.functions[fun_uri.EVAL]) # print("Loading:", uri) else: uri = m + e.name[0] args = None if len(e.name) == 2: args = e.name[1] else: arg_list = [] for i in range(1, len(e.name)): arg_list.append(e.name[i]) args = Tuple(arg_list) # print("Loading:", uri) f = NamedFunction(uri, e.value, self.functions[fun_uri.EVAL], args=args) self.add_function(uri, f)
PypiClean
/makeapp-1.9.1.tar.gz/makeapp-1.9.1/docs/source/userconf.rst
User defined configuration ========================== User defined configuration should be stored in ``.makeapp`` (dot is required) directory under user's HOME directory:: /home/librarian/.makeapp/ Thus user can configure: 1. `makeapp` default settings, that are used on rollouts; 2. application layouts by providing skeleton templates. .. note:: User defined configuration is automatically loaded on every ``makeapp`` command call if not overrode by command line switches. User defined settings --------------------- Settings are read by `makeapp` from ``makeapp.conf`` file. This is simply a configuration file:: [settings] author = The Librarian author_email = [email protected] license = bsd3cl url = https://github.discworld.wrld/librarian/{{ app_name }} vcs=git year = 2010-2013 Such configuration simplifies application rollouts by making redundant command lines switches joggling, so:: makeapp new my_new_app /home/librarian/dev/my_new_app_env/ -d "My application." --author "The Librarian" --year "2010-2013" could be:: makeapp new my_new_app /home/librarian/dev/my_new_app_env/ -d "My application." .. note:: You can also define different (and even your own settings) that are used in skeleton templates. User defined application layouts -------------------------------- User defined application layouts are searched in ``app_templates`` directory under ``.makeapp``. Let's create a skeleton template named ``cool``: 1. Create ``cool`` directory:: /home/librarian/.makeapp/app_templates/cool/ 2. In ``cool`` directory create ``COOL.txt`` file with desired contents:: echo "You'd better be cool." > /home/librarian/.makeapp/app_templates/cool/COOL.txt Now you can use this skeleton template to rollout your application (`-t`):: makeapp new my_new_app /home/librarian/dev/my_new_app_env/ -d "My application." -t cool After such a call you'll have an application default structure provided by `makeapp` extended with files from ``cool``. .. note:: You can provide more application layout flavors by a combination of templates. `-t` switch allows several comma-separated template names. Order matters.
PypiClean
/azure_percept-0.0.13-cp38-cp38-manylinux_2_24_aarch64.whl/azure/iot/percept/extensions/ops/priorbox_clustered.py
import numpy as np from mo.front.common.layout import get_width_dim, get_height_dim from mo.front.extractor import attr_getter, bool_to_str from mo.graph.graph import Node, Graph from mo.ops.op import Op class PriorBoxClusteredOp(Op): op = 'PriorBoxClustered' def __init__(self, graph: Graph, attrs: dict): mandatory_props = { 'type': self.op, 'op': self.op, 'version': 'opset1', 'in_ports_count': 2, 'out_ports_count': 1, 'infer': self.priorbox_clustered_infer, 'type_infer': self.type_infer, 'clip': True, } super().__init__(graph, mandatory_props, attrs) def supported_attrs(self): return [ 'width', 'height', 'flip', 'clip', 'variance', 'img_size', 'img_h', 'img_w', 'step', 'step_h', 'step_w', 'offset' ] def backend_attrs(self): return [ ('clip', lambda node: int(node.clip)), # We need to convert this boolean attribute value to int to keep # forward compatibility with IE 2021.2 'img_h', 'img_w', 'step', 'step_h', 'step_w', 'offset', ('variance', lambda node: attr_getter(node, 'variance')), ('width', lambda node: attr_getter(node, 'width')), ('height', lambda node: attr_getter(node, 'height')) ] @staticmethod def type_infer(node): node.out_port(0).set_data_type(np.float32) @staticmethod def priorbox_clustered_infer(node: Node): layout = node.graph.graph['layout'] data_shape = node.in_node(0).shape num_ratios = len(node.width) if node.has_and_set('V10_infer'): assert node.in_node(0).value is not None node.out_node(0).shape = np.array([2, np.prod(node.in_node(0).value) * num_ratios * 4], dtype=np.int64) else: res_prod = data_shape[get_height_dim(layout, 4)] * data_shape[get_width_dim(layout, 4)] * num_ratios * 4 node.out_node(0).shape = np.array([1, 2, res_prod], dtype=np.int64)
PypiClean
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/pipe_utils.py
import logging from sparknlp.annotator import * import nlu from nlu import Licenses from nlu.pipe.nlu_component import NluComponent from nlu.pipe.pipeline import NLUPipeline from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from nlu.universe.atoms import JslAnnoId from nlu.universe.component_universes import ComponentUniverse, jsl_id_to_empty_component from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS, OCR_NODE_IDS from nlu.universe.feature_universes import NLP_FEATURES from nlu.universe.logic_universes import NLP_LEVELS, AnnoTypes logger = logging.getLogger('nlu') from nlu.pipe.utils.component_utils import ComponentUtils from typing import List, Union from nlu.universe.annotator_class_universe import AnnoClassRef from nlu.utils.environment.env_utils import is_running_in_databricks import os import glob import json class PipeUtils: """Pipe Level logic operations and utils""" @staticmethod def update_bad_storage_refs(pipe: NLUPipeline): """ Some models have bad storage refs. The list of these bad models is defined by nlu.spellbook.Spellbook.bad_storage_refs. The correct storage ref is given by the resolving models storage ref defined by nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang][storage_ref]. Once the resolving model_anno_obj is loaded in the pipe, this method will take its storage ref and write it to the bad_storage_ref model_anno_obj defined by nlu.spellbook.Spellbook.bad_storage_refs. If storage ref is already updated, this method will leave the pipe unchanged. We only check for healthcare storage refs :param pipe: Pipe to update bad storage refs on :return: Pipe where each component_to_resolve has storage ref updated, if it was not already updated """ for bad_storage_ref_component in pipe.components: if not bad_storage_ref_component.loaded_from_pretrained_pipe and bad_storage_ref_component.has_storage_ref: storage_ref = StorageRefUtils.extract_storage_ref(bad_storage_ref_component) # After updating the storage ref it will not be in the licensed_storage_ref_2_nlu_ref mapping anymore, if storage_ref in nlu.spellbook.Spellbook.bad_storage_refs: # since its a bad storage ref, we can resolve its storage ref by checking licensed_storage_ref_2_nlu_ref if pipe.lang in nlu.Spellbook.licensed_storage_ref_2_nlu_ref.keys(): if storage_ref in nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang].keys(): storage_ref_resolver_nlu_ref = nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang][ storage_ref] for storage_resolver in pipe.components: if storage_resolver.nlu_ref == storage_ref_resolver_nlu_ref: # Update the storage ref of the bad_component to the storage ref of the resolving model_anno_obj according to licensed_storage_ref_2_nlu_ref resolving_storage_ref = StorageRefUtils.extract_storage_ref(storage_resolver) bad_storage_ref_component.model.set(bad_storage_ref_component.model.storageRef, resolving_storage_ref) return pipe @staticmethod def update_relation_extractor_models_storage_ref(pipe: NLUPipeline): # if provided, because the sometimes have unresolvable storage refs # we can find the actual storage ref only after its mapped is sresolved to an model_anno_obj defined by an nlp ref # If RelationExtractor is not loaded from a pretrained pipe we update its storage ref to the resolving models storage ref for relation_extractor_component in pipe.components: if relation_extractor_component.jsl_anno_class_id == NLP_HC_NODE_IDS.RELATION_EXTRACTION and not relation_extractor_component.loaded_from_pretrained_pipe: storage_ref = StorageRefUtils.extract_storage_ref(relation_extractor_component) # After updating the storage ref it will not be in the licensed_storage_ref_2_nlu_ref mapping anymore, # so we have to check here if it exists in the mapping before accessing it if pipe.lang in nlu.Spellbook.licensed_storage_ref_2_nlu_ref.keys(): if storage_ref in nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang].keys(): # We need to find a component_to_resolve in the pipeline which has this storage ref storage_ref_resolver_nlu_ref = nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang][ storage_ref] for storage_resolver in pipe.components: if storage_resolver.nlu_ref == storage_ref_resolver_nlu_ref: # Update the storage ref of the RL-Extractor to the storage ref of the resolving model_anno_obj according to licensed_storage_ref_2_nlu_ref resolving_storage_ref = StorageRefUtils.extract_storage_ref(storage_resolver) relation_extractor_component.model.set(relation_extractor_component.model.storageRef, resolving_storage_ref) return pipe @staticmethod def get_json_data_for_pipe_model_at_stage_number(pipe_path, stage_number_as_string): """Gets the json metadata from a model_anno_obj for a given base path at a specific stage index""" c_metadata_path = f'{pipe_path}/stages/{stage_number_as_string}_*/metadata/part-00000' c_metadata_path = glob.glob(f'{c_metadata_path}*')[0] with open(c_metadata_path, "r", encoding="utf8") as f: data = json.load(f) return data @staticmethod def get_json_data_for_pipe_model_at_stage_number_on_databricks(nlp_ref, lang, digit_str): """Gets the json metadata from a model_anno_obj for a given base path at a specific stage index on databricks""" import sparknlp spark = sparknlp.start() pipe_df = spark.read.json( f'dbfs:/root/cache_pretrained/{nlp_ref}_{lang}*/stages/{digit_str}_*/metadata/part-00000') data = pipe_df.toPandas().to_dict() data = {k: v[0] for k, v in data.items()} if 'inputCols' in data['paramMap'].keys(): data['paramMap']['inputCols'] = data['paramMap']['inputCols'].tolist() data return data @staticmethod def set_column_values_on_components_from_pretrained_pipe(component_list: List[NluComponent], nlp_ref, lang, path): """Since output/input cols cannot be fetched from Annotators via get input/output col reliably, we must check annotator data to find them Expects a list of NLU Component objects which all stem from the same pipeline defined by nlp_ref """ if path: pipe_path = path else: pipe_path = os.path.expanduser('~') + '/cache_pretrained/' + f'{nlp_ref}_{lang}' # We do not need to check for Spark Version, since cols should match across versions pipe_path = glob.glob(f'{pipe_path}*') if len(pipe_path) == 0: # try databricks env path if is_running_in_databricks(): pipe_path = [f'dbfs:/root/cache_pretrained/{nlp_ref}_{lang}'] else: raise FileNotFoundError(f"Could not find downloaded Pipeline at path={pipe_path}") pipe_path = pipe_path[0] if not os.path.exists(pipe_path) and not is_running_in_databricks(): raise FileNotFoundError(f"Could not find downloaded Pipeline at path={pipe_path}") # Find HDD location of component_list and read out input/output cols digits_num = len(str(len(component_list))) digit_str = '0' * digits_num digit_cur = 0 for c in component_list: model_name = c.model.uid.split('_')[0] if is_running_in_databricks(): data = PipeUtils.get_json_data_for_pipe_model_at_stage_number_on_databricks(nlp_ref, lang, digit_str) else: data = PipeUtils.get_json_data_for_pipe_model_at_stage_number(pipe_path, digit_str) if 'inputCols' in data['paramMap'].keys(): inp = data['paramMap']['inputCols'] c.model.setInputCols(inp) else: inp = data['paramMap']['inputCol'] c.model.setInputCol(inp) if 'outputCol' in data['paramMap'].keys(): out = data['paramMap']['outputCol'] else: # Sometimes paramMap is missing outputCol, so we have to use this hack if model_name == 'DocumentAssembler': out = 'document' elif model_name == 'Finisher': out = 'finished' else: out = c.model.uid.split('_')[0] + '_out' c.spark_input_column_names = inp if isinstance(inp, List) else [inp] c.spark_output_column_names = [out] if model_name != 'Finisher': # finisher dynamically generates cols from input cols 4 c.model.setOutputCol(out) if hasattr(c.model, 'setOutputCol') else c.model.setOutputCols(out) digit_cur += 1 digit_str = str(digit_cur) while len(digit_str) < digits_num: digit_str = '0' + digit_str return component_list @staticmethod def is_trainable_pipe(pipe: NLUPipeline): """Check if component_list is trainable""" for c in pipe.components: if c.trainable: return True return False @staticmethod def enforece_AT_embedding_provider_output_col_name_schema_for_list_of_components(pipe_list: List[NluComponent]): """For every embedding provider, enforce that their output col is named <pipe_prediction_output_level>@storage_ref for output_levels word,chunk,sentence aka document , TODO update the classifier models swell i.e. word_embed@elmo or sentence_embed@elmo etc. """ for c in pipe_list: if ComponentUtils.is_embedding_provider(c): level_AT_ref = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(c, 'output') c.out_types = [level_AT_ref] c.info.spark_output_column_names = [level_AT_ref] c.model.setOutputCol(level_AT_ref[0]) return pipe_list @staticmethod def enforce_AT_schema_on_pipeline_and_add_NER_converter(pipe: NLUPipeline): """Enforces the AT naming schema on all column names and add missing NER converters""" return PipeUtils.enforce_AT_schema_on_NER_processors_and_add_missing_NER_converters( PipeUtils.enforce_AT_schema_on_embedding_processors(pipe)) @staticmethod def enforce_AT_schema_on_NER_processors_and_add_missing_NER_converters(pipe: NLUPipeline): """For every NER provider and consumer, enforce that their output col is named <pipe_prediction_output_level>@storage_ref for output_levels word,chunk,sentence aka document , i.e. word_embed@elmo or sentence_embed@elmo etc. We also add NER converters for every NER model_anno_obj that no Converter converting its inputs In addition, returns the pipeline with missing NER converters added, for every NER model_anno_obj. The converters transform the IOB schema in a merged and more usable form for downstream tasks 1. Find a NER model_anno_obj in component_list 2. Find a NER converter feeding from it, if there is None, create one. 3. Generate name with Identifier <ner-iob>@<nlu_ref_identifier> and <entities>@<nlu_ref_identifier> 3.1 Update NER Models output to <ner-iob>@<nlu_ref_identifier> 3.2 Update NER Converter input to <ner-iob>@<nlu_ref_identifier> 3.3 Update NER Converter output to <entities>@<nlu_ref_identifier> 4. Update every Component that feeds from the NER converter (i.e. Resolver etc.) includes TOKEN-CLASSIFIER-TRANSFORMER models which usually output NER format """ new_converters = [] for c in pipe.components: if c.loaded_from_pretrained_pipe: # Leave pretrained component_list models untouched new_converters.append(c) continue # TRANSFORMER_TOKEN_CLASSIFIER might be a NER provider. Regardless, No ner-Conversion will be performed # because it will not return NER IOB if ComponentUtils.is_NER_provider(c): if c.type == AnnoTypes.TRANSFORMER_TOKEN_CLASSIFIER and not ComponentUtils.is_NER_IOB_token_classifier( c): continue output_NER_col = ComponentUtils.extract_NER_col(c, 'output') converter_to_update = None for other_c in pipe.components: if output_NER_col in other_c.spark_input_column_names and ComponentUtils.is_NER_converter(other_c): converter_to_update = other_c ner_identifier = ComponentUtils.get_nlu_ref_identifier(c) if converter_to_update is None: if c.license == Licenses.hc: converter_to_update = jsl_id_to_empty_component(NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL) converter_to_update.set_metadata(converter_to_update.get_default_model(), NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, 'xx', False, Licenses.hc) else: converter_to_update = jsl_id_to_empty_component(NLP_NODE_IDS.NER_CONVERTER) converter_to_update.set_metadata(converter_to_update.get_default_model(), NLP_NODE_IDS.NER_CONVERTER, NLP_NODE_IDS.NER_CONVERTER, 'xx', False, Licenses.open_source) new_converters.append(converter_to_update) converter_to_update.nlu_ref = f'ner_converter.{c.nlu_ref}' # 3. generate new col names new_NER_AT_ref = output_NER_col if '@' not in output_NER_col: new_NER_AT_ref = output_NER_col + '@' + ner_identifier new_NER_converter_AT_ref = 'entities' + '@' + ner_identifier # 3.1 upate NER model_anno_obj outputs c.spark_output_column_names = [new_NER_AT_ref] c.model.setOutputCol(new_NER_AT_ref) # 3.2 update converter inputs old_ner_input_col = ComponentUtils.extract_NER_converter_col(converter_to_update, 'input') if old_ner_input_col in converter_to_update.spark_input_column_names: converter_to_update.spark_input_column_names.remove(old_ner_input_col) else: converter_to_update.spark_input_column_names.pop() # if old_ner_input_col in converter_to_update.spark_input_column_names: # converter_to_update.spark_input_column_names.remove(old_ner_input_col) # else: # converter_to_update.spark_input_column_names.pop() converter_to_update.spark_input_column_names.append(new_NER_AT_ref) converter_to_update.model.setInputCols(converter_to_update.spark_input_column_names) # 3.3 update converter outputs converter_to_update.spark_output_column_names = [new_NER_converter_AT_ref] converter_to_update.model.setOutputCol(new_NER_converter_AT_ref) ## todo improve, this causes the first ner producer to feed to all ner-cosnuners. All other ner-producers will be ignored by ner-consumers,w ithouth special syntax or manual configs --> Chunk merger ##4. Update all NER consumers input columns, i.e. Resolver, Relation, etc.. for conversion_consumer in pipe.components: if NLP_FEATURES.NAMED_ENTITY_CONVERTED in conversion_consumer.in_types: conversion_consumer.spark_input_column_names.remove(NLP_FEATURES.NAMED_ENTITY_CONVERTED) conversion_consumer.spark_input_column_names.append(new_NER_converter_AT_ref) # Add new converters to component_list for conv in new_converters: if conv.license == Licenses.hc: pipe.add(conv, name_to_add=f'chunk_converter_licensed@{conv.spark_output_column_names[0].split("@")[0]}') else: pipe.add(conv, name_to_add=f'chunk_converter@{conv.spark_output_column_names[0].split("@")[0]}') return pipe @staticmethod def enforce_AT_schema_on_embedding_processors(pipe: NLUPipeline): """For every embedding provider and consumer, enforce that their output col is named <pipe_prediction_output_level>@storage_ref for output_levels word,chunk,sentence aka document , i.e. word_embed@elmo or sentence_embed@elmo etc. """ for c in pipe.components: # Leave pretrained component_list models untouched if c.loaded_from_pretrained_pipe: continue if ComponentUtils.is_embedding_provider(c): if '@' not in c.spark_output_column_names[0]: new_embed_AT_ref = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(c, 'output') c.spark_output_column_names = [new_embed_AT_ref] c.model.setOutputCol(new_embed_AT_ref) if ComponentUtils.is_embedding_consumer(c): input_embed_col = ComponentUtils.extract_embed_col(c) if '@' not in input_embed_col: # TODO set storage ref for traianble model_anno_obj? new_embed_AT_ref = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(c, 'input') c.spark_input_column_names.remove(input_embed_col) c.spark_input_column_names.append(new_embed_AT_ref) c.model.setInputCols(c.spark_input_column_names) return pipe @staticmethod def enforce_NLU_columns_to_NLP_columns(pipe: NLUPipeline): """for every component_to_resolve, set its inputs and outputs to the ones configured on the NLU component_to_resolve.""" # These anno have no standardized setInputCol or it should not be configured blacklisted = [NLP_NODE_IDS.DOCUMENT_ASSEMBLER] for c in pipe.components: if c.name == OCR_NODE_IDS.VISUAL_DOCUMENT_CLASSIFIER: c.model.setLabelCol(c.spark_output_column_names[0]) c.model.setConfidenceCol(c.spark_output_column_names[1]) continue if c.loaded_from_pretrained_pipe: continue if c.name in blacklisted: continue if hasattr(c.model, 'setOutputCol'): c.model.setOutputCol(c.spark_output_column_names[0]) else: c.model.setOutputCols(c.spark_output_column_names) if hasattr(c.model, 'setInputCols'): c.model.setInputCols(c.spark_input_column_names) else: # Some OCR Annotators only have one input and thus only setInputCol method but not setInputCols c.model.setInputCol(c.spark_input_column_names[0]) return pipe @staticmethod def is_converter_component_resolution_reference(reference: str) -> bool: if 'chunk_emb' in reference: return True @staticmethod def configure_component_output_levels_to_sentence(pipe: NLUPipeline): ''' Configure component_list components to output level document. Substitute every occurrence of <document> to <sentence> for every component_to_resolve that feeds from <document :param pipe: component_list to be configured :return: configured component_list ''' logger.info('Configuring components to sentence level') for c in pipe.components: # update in/out spark cols if c.loaded_from_pretrained_pipe: continue if NLP_FEATURES.DOCUMENT in c.spark_input_column_names and NLP_FEATURES.SENTENCE not in c.spark_input_column_names and NLP_FEATURES.SENTENCE not in c.spark_output_column_names: logger.info(f"Configuring C={c.name} of Type={type(c.model)} to Sentence Level") c.spark_input_column_names.remove(NLP_FEATURES.DOCUMENT) c.spark_input_column_names.append(NLP_FEATURES.SENTENCE) c.model.setInputCols(c.spark_input_column_names) if 'input_dependent' in c.output_level: c.output_level = NLP_LEVELS.SENTENCE # update in/out col types if NLP_FEATURES.DOCUMENT in c.in_types and NLP_FEATURES.SENTENCE not in c.in_types and NLP_FEATURES.SENTENCE not in c.out_types: c.in_types.remove(NLP_FEATURES.DOCUMENT) c.in_types.append(NLP_FEATURES.SENTENCE) return pipe.components @staticmethod def configure_component_output_levels_to_document(pipe: NLUPipeline): ''' Configure component_list components to output level document. Substitute every occurence of <sentence> to <document> for every component_to_resolve that feeds from <sentence> :param pipe: component_list to be configured :return: configured component_list coonents only ''' logger.info('Configuring components to document level') for c in pipe.components: if c.loaded_from_pretrained_pipe: continue # Update in/out spark cols if NLP_FEATURES.SENTENCE in c.spark_input_column_names and NLP_FEATURES.DOCUMENT not in c.spark_input_column_names and NLP_FEATURES.DOCUMENT not in c.spark_output_column_names: logger.info(f"Configuring C={c.name} to document output level") c.spark_input_column_names.remove(NLP_FEATURES.SENTENCE) c.spark_input_column_names.append(NLP_FEATURES.DOCUMENT) c.model.setInputCols(c.spark_input_column_names) if 'input_dependent' in c.output_level: c.output_level = NLP_LEVELS.DOCUMENT # Update in/out col types if NLP_FEATURES.SENTENCE in c.in_types and NLP_FEATURES.DOCUMENT not in c.in_types and NLP_FEATURES.DOCUMENT not in c.out_types: c.in_types.remove(NLP_FEATURES.SENTENCE) c.in_types.append(NLP_FEATURES.DOCUMENT) return pipe.components @staticmethod def has_component_with_id(pipe: NLUPipeline, ids: Union[JslAnnoId, List[JslAnnoId]]): """Check for NLUPipeline if it contains component with id """ ids = ids if isinstance(ids, list) else [ids] for c in pipe.components: if c.name in ids: return True return False @staticmethod def has_sentence_detector(pipe: NLUPipeline): """Check for NLUPipeline if it contains sentence detector""" for c in pipe.components: if isinstance(c.model, (SentenceDetectorDLModel, SentenceDetector, SentenceDetectorDLApproach)): return True return False @staticmethod def has_document_assembler(pipe: NLUPipeline): return PipeUtils.has_component_with_id(pipe, NLP_NODE_IDS.DOCUMENT_ASSEMBLER) @staticmethod def has_table_extractor(pipe: NLUPipeline): """Check for NLUPipieline if it contains any table extracting OCR component""" return PipeUtils.has_component_with_id(pipe, [OCR_NODE_IDS.PDF2TEXT_TABLE, OCR_NODE_IDS.PPT2TEXT_TABLE, OCR_NODE_IDS.DOC2TEXT_TABLE, OCR_NODE_IDS.IMAGE_TABLE_DETECTOR, ]) @staticmethod def get_component_idx_by_id(pipe: NLUPipeline, node_id: JslAnnoId): """Find first occurrence of component in pipe by ID and returns index """ for i, c in enumerate(pipe.components): if c.name == node_id: return i raise Exception(f'Could not find component {node_id} in pipe {pipe}') @staticmethod def add_tokenizer_to_pipe_if_missing(pipe: NLUPipeline): """add tokenizer to pipe if it is missing :param pipe: pipe :return: Pipe with tokenizer if missing """ if PipeUtils.has_component_with_id(pipe, [NLP_NODE_IDS.TOKENIZER, NLP_NODE_IDS.TOKEN_ASSEMBLER, NLP_NODE_IDS.REGEX_TOKENIZER, NLP_NODE_IDS.RECURISVE_TOKENIZER, NLP_NODE_IDS.WORD_SEGMENTER]): return pipe from nlu.pipe.component_resolution import resolve_feature tokenizer = resolve_feature(NLP_FEATURES.TOKEN) tokenizer.spark_input_column_names = [pipe.component_output_level] tokenizer.spark_output_column_names = [NLP_FEATURES.TOKEN] tokenizer.model.setInputCols(pipe.component_output_level) tokenizer.model.setOutputCol(NLP_FEATURES.TOKEN) # Find the document/sentence component and add tokenizer right after that for i, c in enumerate(pipe.components): if pipe.component_output_level in c.spark_output_column_names: pipe.components.insert(i + 1, tokenizer) return pipe @staticmethod def configure_component_output_levels(pipe: NLUPipeline, new_output_level=''): ''' This method configures sentenceEmbeddings and Classifier components to output at a specific level. Generally this substitutes all `sentence` columns to `document` and vice versa. Adds SentenceDetector to pipeline if none exists This method is called the first time .predict() is called and every time the pipe_prediction_output_level changed If pipe_prediction_output_level == Document, then sentence embeddings will be fed on Document col and classifiers receive doc_embeds/doc_raw column, depending on if the classifier works with or without embeddings If pipe_prediction_output_level == sentence, then sentence embeddings will be fed on sentence col and classifiers receive sentence_embeds/sentence_raw column, depending on if the classifier works with or without embeddings. If sentence detector is missing, one will be added. :param pipe: NLU pipeline :param new_output_level: The new output level to apply, either sentence or document :return: Nlu pipeline, with all components output levels configured to new_output_level ''' if not PipeUtils.has_document_assembler(pipe): # When loaded from OCR, we might not have a documentAssembler in pipe pipe.is_fitted = False document_assembler = ComponentUniverse.components[NLP_NODE_IDS.DOCUMENT_ASSEMBLER]() document_assembler.set_metadata(document_assembler.get_default_model(), 'document_assembler', 'document_assembler', 'xx', False, Licenses.open_source) pipe.components.insert(0, document_assembler) if new_output_level == 'sentence': if not PipeUtils.has_sentence_detector(pipe): logger.info("Adding missing Sentence Detector") pipe.is_fitted = False sentence_detector = ComponentUniverse.components[NLP_NODE_IDS.SENTENCE_DETECTOR_DL]() sentence_detector.set_metadata(sentence_detector.get_default_model(), 'detect_sentence', 'sentence_detector_dl', 'en', False, Licenses.open_source) insert_idx = PipeUtils.get_component_idx_by_id(pipe, NLP_NODE_IDS.DOCUMENT_ASSEMBLER) # insert After doc assembler pipe.components.insert(insert_idx + 1, sentence_detector) return PipeUtils.configure_component_output_levels_to_sentence(pipe) elif new_output_level == 'document': return PipeUtils.configure_component_output_levels_to_document(pipe) @staticmethod def check_if_component_is_in_pipe(pipe: NLUPipeline, component_name_to_check, check_strong=True): """Check if a component_to_resolve with a given name is already in a component_list """ for c in pipe.components: if check_strong and component_name_to_check == c.info.name: return True elif not check_strong and component_name_to_check in c.info.name: return True return False @staticmethod def check_if_there_component_with_col_in_components(component_list, features, except_component): """For a given list of features and a list of components, see if there are components taht provide this feature If yes, True, otherwise False """ for c in component_list: if c.out_types[0] != except_component.out_types[0]: for f in ComponentUtils.clean_irrelevant_features(c.info.spark_output_column_names, True): if f in features: return True return False @staticmethod def is_leaf_node(c, pipe: NLUPipeline) -> bool: """Check if a component_to_resolve is a leaf in the DAG. We verify by checking if any other_c is feeding from os_components. If yes, it is not a leaf. If nobody feeds from os_components, it's a leaf. """ inputs = c.info.inputs for other_c in pipe.components: if c is not other_c: for f in other_c.info.inputs: 1 return False @staticmethod def clean_AT_storage_refs(pipe: NLUPipeline): """Removes AT notation from all columns. Useful to reset component_list back to default state""" for c in pipe.components: if c.info.loaded_from_pretrained_pipe: continue c.info.inputs = [f.split('@')[0] for f in c.info.inputs] c.out_types = [f.split('@')[0] for f in c.out_types] c.info.spark_input_column_names = [f.split('@')[0] for f in c.info.spark_input_column_names] c.info.spark_output_column_names = [f.split('@')[0] for f in c.info.spark_output_column_names] c.info.spark_input_column_names = c.info.inputs.copy() c.info.spark_output_column_names = c.out_types.copy() return pipe @staticmethod def rename_duplicate_cols(pipe: NLUPipeline): """Rename cols with duplicate names""" for i, c in enumerate(pipe.components): for other_c in pipe.components: if c is other_c: continue if c.loaded_from_pretrained_pipe: continue if c.spark_output_column_names[0] == other_c.spark_output_column_names[0]: c.spark_output_column_names[0] = f'{c.spark_output_column_names[0]}_{str(i)}' return pipe @staticmethod def find_trainable_embed_consumer(pipe: NLUPipeline): """Find traianble component_to_resolve which consumes emeddings. Returns index of component_to_resolve and type of embedding if found, otherwise returns -1 and None""" for i, c in enumerate(pipe.components): if c.trainable and c.has_storage_ref: return pipe.components.index(c), ComponentUtils.extract_embed_col(c, 'input') return -1, None @staticmethod def remove_convertable_storage_refs(required_features_ref, conversion_candidates, provided_features_ref): """Remove required storage ref features if conversion candidate has it, so that storage ref provider will not be downloaded twice """ if len(conversion_candidates) == 0: return required_features_ref, conversion_candidates # ComponentUtils.extract_storage_ref_AT_notation_for_embeds for candidate in conversion_candidates: # candidate_at_storage_ref_feature = ComponentUtils.extract_storage_ref_AT_notation_for_embeds( # candidate.component_candidate, 'output') if candidate.component_candidate is None: continue for feature in required_features_ref: # if feature not in provided_features_ref: # TODO revisit this after deep test # # Feature not yet manifested by creating corresponding anno # # Unless its also a storage ref candidate. In this scenario, the Feature is manifested but the Converter is missing. # # Remove the feature from requirements, since its already there and will otherwise cause storage ref resolution to manifest again # continue required_storage_ref = feature.split('@')[-1] if required_storage_ref == candidate.storage_ref: # or candidate_at_storage_ref_feature == feature # The feature is already provided, but not converted. We can remove it required_features_ref.remove(feature) return required_features_ref, conversion_candidates @staticmethod def update_converter_storage_refs_and_cols(pipe: NLUPipeline, provided_features_ref, required_features_ref): """Storage ref of converters is initially empty string, i.e. '' . This method checks if any convertable embeddings are provided, if yes it will update storage ref of converter , update the input/output columns with colname@storage_ref notation and mark it as resolved by removing it from the corrosponding lists""" for c in pipe.components: if c.name in [NLP_NODE_IDS.SENTENCE_EMBEDDINGS_CONVERTER, NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER]: # Check if there are candidates that feed the converter, any word Embedding will work if c.storage_ref != '': # If storage_ref is not '' then this is converter is already fixed, nothing to do continue for other_c in pipe.components: if other_c.has_storage_ref and other_c.type == AnnoTypes.TOKEN_EMBEDDING: # Get original embed cols in_embed = ComponentUtils.extract_embed_col(c, 'input') out_embed = ComponentUtils.extract_embed_col(c, 'output') if len(in_embed.split('@')) == 2: # Storage ref is already on annotator, we dont ned to fix this continue c.spark_output_column_names.remove(out_embed) c.spark_input_column_names.remove(in_embed) provided_features_ref.remove(out_embed + '@') required_features_ref.remove(in_embed + '@') storage_ref = StorageRefUtils.extract_storage_ref(other_c) in_embed = in_embed + '@' + storage_ref out_embed = out_embed + '@' + storage_ref c.spark_output_column_names.append(out_embed) c.spark_input_column_names.append(in_embed) provided_features_ref.append(out_embed) required_features_ref.append(in_embed) c.storage_ref = storage_ref return provided_features_ref, required_features_ref @staticmethod def add_metadata_to_pipe(pipe: NLUPipeline): """Write metadata fields to pipeline, for now only whether it contains OCR components or not. To be extended in the future """ py_class_to_anno_id = AnnoClassRef.get_ocr_pyclass_2_anno_id_dict() for c in pipe.components: # Check for OCR componments if c.jsl_anno_py_class in py_class_to_anno_id.keys(): pipe.contains_ocr_components = True # Check for licensed components if c.license in [Licenses.ocr, Licenses.hc]: pipe.has_licensed_components = True # Check for NLP Component, which is any open source if c.license == Licenses.open_source \ and c.name != NLP_NODE_IDS.WAV2VEC_FOR_CTC \ and c.name != NLP_NODE_IDS.HUBERT_FOR_CTC \ and c.name != NLP_NODE_IDS.AUDIO_ASSEMBLER: # TODO Table Assembler/VIT/ Other non txt open source pipe.has_nlp_components = True if c.type == AnnoTypes.QUESTION_TABLE_ANSWERER: pipe.has_table_qa_models = True if c.type == AnnoTypes.CHUNK_MAPPER: pipe.prefer_light = True if c.type == AnnoTypes.QUESTION_SPAN_CLASSIFIER: pipe.has_span_classifiers = True if c.type == AnnoTypes.SPEECH_RECOGNIZER: pipe.contains_audio_components = True if c.type == AnnoTypes.IMAGE_CLASSIFICATION: pipe.contains_ocr_components = True pipe.has_nlp_components = False if c.jsl_anno_py_class == 'ImageAssembler': pipe.contains_ocr_components = True return pipe @staticmethod def replace_untrained_component_with_trained(nlu_pipe: NLUPipeline, spark_transformer_pipe): """Write metadata fields to pipeline, for now only whether it contains OCR components or not. To be extended in the future :return: :param nlu_pipe: NLU pipeline, which contains one untrained component :param spark_transformer_pipe: Spark Pipeline which contains fitted component version of the untrained one :return: NLU pipeline component list, where untrained component is replaced with a trained one """ # Go through NLU pip and find the untrained component and replace with the trained one for i, trainable_c in enumerate(nlu_pipe.components): if trainable_c.trainable: # Construct trained NLU component with the trained Spark Model if trainable_c.license == Licenses.open_source: trained_class_name = AnnoClassRef.JSL_anno2_py_class[trainable_c.trained_mirror_anno] untrained_class_name = AnnoClassRef.JSL_anno2_py_class[trainable_c.jsl_anno_class_id] trained_model = PipeUtils.get_model_of_class_from_spark_pipe(spark_transformer_pipe, trained_class_name) trained_component = jsl_id_to_empty_component(trainable_c.trained_mirror_anno).set_metadata( trained_model, trainable_c.trained_mirror_anno, trainable_c.trained_mirror_anno, nlu_pipe.lang, False, Licenses.open_source) elif trainable_c.license == Licenses.hc: trained_class_name = AnnoClassRef.JSL_anno_HC_ref_2_py_class[trainable_c.trained_mirror_anno] untrained_class_name = AnnoClassRef.JSL_anno_HC_ref_2_py_class[trainable_c.jsl_anno_class_id] trained_model = PipeUtils.get_model_of_class_from_spark_pipe(spark_transformer_pipe, trained_class_name) trained_component = jsl_id_to_empty_component(trainable_c.trained_mirror_anno).set_metadata( trained_model, trainable_c.trained_mirror_anno, trainable_c.trained_mirror_anno, nlu_pipe.lang, False, Licenses.hc) # update col names on new model_anno_obj trained_component.spark_input_column_names = trainable_c.spark_input_column_names trained_component.spark_output_column_names = trainable_c.spark_output_column_names trained_component.model.setInputCols(trained_component.spark_input_column_names) trained_component.model.setOutputCol(trained_component.spark_output_column_names[0]) # Replace component in pipe nlu_pipe.components.remove(trainable_c) # nlu_pipe.components.insert(i, trained_component) # remove the component from the NlpuPipe dict Keys and add the trained one pipe_key_to_delete = None for k in nlu_pipe.keys(): if nlu_pipe[k].__class__.__name__ == untrained_class_name: pipe_key_to_delete = k del nlu_pipe[pipe_key_to_delete] # TODOf NER or other trainable, make sure we ad at the right place! nlu_pipe.add(trained_component, idx=i) return nlu_pipe.components @staticmethod def get_model_of_class_from_spark_pipe(spark_transformer_pipe, class_name): for model in spark_transformer_pipe.stages: if model.__class__.name == class_name: return model raise ValueError(f"Could not find model_anno_obj of requested class = {class_name}") @staticmethod def contains_t5_or_gpt(pipe: NLUPipeline): return PipeUtils.has_component_with_id(pipe, [NLP_NODE_IDS.GPT2, NLP_NODE_IDS.T5_TRANSFORMER]) @staticmethod def add_sentence_detector_to_pipe_if_required(pipe: NLUPipeline): """ 1. For Tabla-QA the Question Tapas Col should originate from a doc type -> doc_question -> sent_question | (Context/Questions) -> Multi-Doc - => TAPAS -> doc_context -> assembled_table | right after the Multi-Doc-assembler we add a sentence Detector. Sentence Detectors Input is doc_question and we update TAPAS to take sent_question instead of doc_question :param pipe: """ if not pipe.has_table_qa_models: return pipe PipeUtils.has_sentence_detector(pipe) # Create Sentence Detector & Set inputs to Document_question sent_detector = ComponentUniverse.components[NLP_NODE_IDS.SENTENCE_DETECTOR_DL]() sent_detector.set_metadata(sent_detector.get_default_model(), 'detect_sentence', 'sentence_detector_dl', 'en', False, Licenses.open_source) sent_detector.set_input(str(NLP_FEATURES.DOCUMENT_QUESTION)) # Insert Sentence Detector right after Multi-Doc multi_doc_idx = PipeUtils.get_component_idx_by_id(pipe, NLP_NODE_IDS.MULTI_DOCUMENT_ASSEMBLER) pipe.components.insert(multi_doc_idx + 1, sent_detector) # Update Tapas to use sentence_detector_question instead of doc_quesiton pipe.components[PipeUtils.get_component_idx_by_id(pipe, NLP_NODE_IDS.TAPAS_FOR_QA)].set_input( [str(NLP_FEATURES.ASSEMBLED_TABULAR_DATA), str(NLP_FEATURES.SENTENCE)]) return pipe
PypiClean
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/domain/AlipayMarketingCashitemvoucherTemplateCreateModel.py
import json from alipay.aop.api.constant.ParamConstants import * class AlipayMarketingCashitemvoucherTemplateCreateModel(object): def __init__(self): self._amount = None self._brand_name = None self._discount = None self._floor_amount = None self._fund_account = None self._goods_ceiling_quantity = None self._goods_cover_image_id = None self._goods_detail_image_ids = None self._goods_id = None self._goods_info = None self._goods_name = None self._goods_origin_price = None self._notify_uri = None self._out_biz_no = None self._publish_end_time = None self._publish_start_time = None self._redirect_uri = None self._rule_conf = None self._special_price = None self._voucher_available_time = None self._voucher_description = None self._voucher_quantity = None self._voucher_type = None self._voucher_valid_period = None @property def amount(self): return self._amount @amount.setter def amount(self, value): self._amount = value @property def brand_name(self): return self._brand_name @brand_name.setter def brand_name(self, value): self._brand_name = value @property def discount(self): return self._discount @discount.setter def discount(self, value): self._discount = value @property def floor_amount(self): return self._floor_amount @floor_amount.setter def floor_amount(self, value): self._floor_amount = value @property def fund_account(self): return self._fund_account @fund_account.setter def fund_account(self, value): self._fund_account = value @property def goods_ceiling_quantity(self): return self._goods_ceiling_quantity @goods_ceiling_quantity.setter def goods_ceiling_quantity(self, value): self._goods_ceiling_quantity = value @property def goods_cover_image_id(self): return self._goods_cover_image_id @goods_cover_image_id.setter def goods_cover_image_id(self, value): self._goods_cover_image_id = value @property def goods_detail_image_ids(self): return self._goods_detail_image_ids @goods_detail_image_ids.setter def goods_detail_image_ids(self, value): self._goods_detail_image_ids = value @property def goods_id(self): return self._goods_id @goods_id.setter def goods_id(self, value): self._goods_id = value @property def goods_info(self): return self._goods_info @goods_info.setter def goods_info(self, value): self._goods_info = value @property def goods_name(self): return self._goods_name @goods_name.setter def goods_name(self, value): self._goods_name = value @property def goods_origin_price(self): return self._goods_origin_price @goods_origin_price.setter def goods_origin_price(self, value): self._goods_origin_price = value @property def notify_uri(self): return self._notify_uri @notify_uri.setter def notify_uri(self, value): self._notify_uri = value @property def out_biz_no(self): return self._out_biz_no @out_biz_no.setter def out_biz_no(self, value): self._out_biz_no = value @property def publish_end_time(self): return self._publish_end_time @publish_end_time.setter def publish_end_time(self, value): self._publish_end_time = value @property def publish_start_time(self): return self._publish_start_time @publish_start_time.setter def publish_start_time(self, value): self._publish_start_time = value @property def redirect_uri(self): return self._redirect_uri @redirect_uri.setter def redirect_uri(self, value): self._redirect_uri = value @property def rule_conf(self): return self._rule_conf @rule_conf.setter def rule_conf(self, value): self._rule_conf = value @property def special_price(self): return self._special_price @special_price.setter def special_price(self, value): self._special_price = value @property def voucher_available_time(self): return self._voucher_available_time @voucher_available_time.setter def voucher_available_time(self, value): self._voucher_available_time = value @property def voucher_description(self): return self._voucher_description @voucher_description.setter def voucher_description(self, value): self._voucher_description = value @property def voucher_quantity(self): return self._voucher_quantity @voucher_quantity.setter def voucher_quantity(self, value): self._voucher_quantity = value @property def voucher_type(self): return self._voucher_type @voucher_type.setter def voucher_type(self, value): self._voucher_type = value @property def voucher_valid_period(self): return self._voucher_valid_period @voucher_valid_period.setter def voucher_valid_period(self, value): self._voucher_valid_period = value def to_alipay_dict(self): params = dict() if self.amount: if hasattr(self.amount, 'to_alipay_dict'): params['amount'] = self.amount.to_alipay_dict() else: params['amount'] = self.amount if self.brand_name: if hasattr(self.brand_name, 'to_alipay_dict'): params['brand_name'] = self.brand_name.to_alipay_dict() else: params['brand_name'] = self.brand_name if self.discount: if hasattr(self.discount, 'to_alipay_dict'): params['discount'] = self.discount.to_alipay_dict() else: params['discount'] = self.discount if self.floor_amount: if hasattr(self.floor_amount, 'to_alipay_dict'): params['floor_amount'] = self.floor_amount.to_alipay_dict() else: params['floor_amount'] = self.floor_amount if self.fund_account: if hasattr(self.fund_account, 'to_alipay_dict'): params['fund_account'] = self.fund_account.to_alipay_dict() else: params['fund_account'] = self.fund_account if self.goods_ceiling_quantity: if hasattr(self.goods_ceiling_quantity, 'to_alipay_dict'): params['goods_ceiling_quantity'] = self.goods_ceiling_quantity.to_alipay_dict() else: params['goods_ceiling_quantity'] = self.goods_ceiling_quantity if self.goods_cover_image_id: if hasattr(self.goods_cover_image_id, 'to_alipay_dict'): params['goods_cover_image_id'] = self.goods_cover_image_id.to_alipay_dict() else: params['goods_cover_image_id'] = self.goods_cover_image_id if self.goods_detail_image_ids: if hasattr(self.goods_detail_image_ids, 'to_alipay_dict'): params['goods_detail_image_ids'] = self.goods_detail_image_ids.to_alipay_dict() else: params['goods_detail_image_ids'] = self.goods_detail_image_ids if self.goods_id: if hasattr(self.goods_id, 'to_alipay_dict'): params['goods_id'] = self.goods_id.to_alipay_dict() else: params['goods_id'] = self.goods_id if self.goods_info: if hasattr(self.goods_info, 'to_alipay_dict'): params['goods_info'] = self.goods_info.to_alipay_dict() else: params['goods_info'] = self.goods_info if self.goods_name: if hasattr(self.goods_name, 'to_alipay_dict'): params['goods_name'] = self.goods_name.to_alipay_dict() else: params['goods_name'] = self.goods_name if self.goods_origin_price: if hasattr(self.goods_origin_price, 'to_alipay_dict'): params['goods_origin_price'] = self.goods_origin_price.to_alipay_dict() else: params['goods_origin_price'] = self.goods_origin_price if self.notify_uri: if hasattr(self.notify_uri, 'to_alipay_dict'): params['notify_uri'] = self.notify_uri.to_alipay_dict() else: params['notify_uri'] = self.notify_uri if self.out_biz_no: if hasattr(self.out_biz_no, 'to_alipay_dict'): params['out_biz_no'] = self.out_biz_no.to_alipay_dict() else: params['out_biz_no'] = self.out_biz_no if self.publish_end_time: if hasattr(self.publish_end_time, 'to_alipay_dict'): params['publish_end_time'] = self.publish_end_time.to_alipay_dict() else: params['publish_end_time'] = self.publish_end_time if self.publish_start_time: if hasattr(self.publish_start_time, 'to_alipay_dict'): params['publish_start_time'] = self.publish_start_time.to_alipay_dict() else: params['publish_start_time'] = self.publish_start_time if self.redirect_uri: if hasattr(self.redirect_uri, 'to_alipay_dict'): params['redirect_uri'] = self.redirect_uri.to_alipay_dict() else: params['redirect_uri'] = self.redirect_uri if self.rule_conf: if hasattr(self.rule_conf, 'to_alipay_dict'): params['rule_conf'] = self.rule_conf.to_alipay_dict() else: params['rule_conf'] = self.rule_conf if self.special_price: if hasattr(self.special_price, 'to_alipay_dict'): params['special_price'] = self.special_price.to_alipay_dict() else: params['special_price'] = self.special_price if self.voucher_available_time: if hasattr(self.voucher_available_time, 'to_alipay_dict'): params['voucher_available_time'] = self.voucher_available_time.to_alipay_dict() else: params['voucher_available_time'] = self.voucher_available_time if self.voucher_description: if hasattr(self.voucher_description, 'to_alipay_dict'): params['voucher_description'] = self.voucher_description.to_alipay_dict() else: params['voucher_description'] = self.voucher_description if self.voucher_quantity: if hasattr(self.voucher_quantity, 'to_alipay_dict'): params['voucher_quantity'] = self.voucher_quantity.to_alipay_dict() else: params['voucher_quantity'] = self.voucher_quantity if self.voucher_type: if hasattr(self.voucher_type, 'to_alipay_dict'): params['voucher_type'] = self.voucher_type.to_alipay_dict() else: params['voucher_type'] = self.voucher_type if self.voucher_valid_period: if hasattr(self.voucher_valid_period, 'to_alipay_dict'): params['voucher_valid_period'] = self.voucher_valid_period.to_alipay_dict() else: params['voucher_valid_period'] = self.voucher_valid_period return params @staticmethod def from_alipay_dict(d): if not d: return None o = AlipayMarketingCashitemvoucherTemplateCreateModel() if 'amount' in d: o.amount = d['amount'] if 'brand_name' in d: o.brand_name = d['brand_name'] if 'discount' in d: o.discount = d['discount'] if 'floor_amount' in d: o.floor_amount = d['floor_amount'] if 'fund_account' in d: o.fund_account = d['fund_account'] if 'goods_ceiling_quantity' in d: o.goods_ceiling_quantity = d['goods_ceiling_quantity'] if 'goods_cover_image_id' in d: o.goods_cover_image_id = d['goods_cover_image_id'] if 'goods_detail_image_ids' in d: o.goods_detail_image_ids = d['goods_detail_image_ids'] if 'goods_id' in d: o.goods_id = d['goods_id'] if 'goods_info' in d: o.goods_info = d['goods_info'] if 'goods_name' in d: o.goods_name = d['goods_name'] if 'goods_origin_price' in d: o.goods_origin_price = d['goods_origin_price'] if 'notify_uri' in d: o.notify_uri = d['notify_uri'] if 'out_biz_no' in d: o.out_biz_no = d['out_biz_no'] if 'publish_end_time' in d: o.publish_end_time = d['publish_end_time'] if 'publish_start_time' in d: o.publish_start_time = d['publish_start_time'] if 'redirect_uri' in d: o.redirect_uri = d['redirect_uri'] if 'rule_conf' in d: o.rule_conf = d['rule_conf'] if 'special_price' in d: o.special_price = d['special_price'] if 'voucher_available_time' in d: o.voucher_available_time = d['voucher_available_time'] if 'voucher_description' in d: o.voucher_description = d['voucher_description'] if 'voucher_quantity' in d: o.voucher_quantity = d['voucher_quantity'] if 'voucher_type' in d: o.voucher_type = d['voucher_type'] if 'voucher_valid_period' in d: o.voucher_valid_period = d['voucher_valid_period'] return o
PypiClean
/burp-ui-1.1.1.tar.gz/burp-ui-1.1.1/burpui/static/vendor/angular/angular.min.js
(function(w){'use strict';function oe(a){if(B(a))u(a.objectMaxDepth)&&(Mc.objectMaxDepth=Wb(a.objectMaxDepth)?a.objectMaxDepth:NaN);else return Mc}function Wb(a){return Y(a)&&0<a}function K(a,b){b=b||Error;return function(){var d=arguments[0],c;c="["+(a?a+":":"")+d+"] http://errors.angularjs.org/1.6.9/"+(a?a+"/":"")+d;for(d=1;d<arguments.length;d++){c=c+(1==d?"?":"&")+"p"+(d-1)+"=";var e=encodeURIComponent,f;f=arguments[d];f="function"==typeof f?f.toString().replace(/ \{[\s\S]*$/,""):"undefined"== typeof f?"undefined":"string"!=typeof f?JSON.stringify(f):f;c+=e(f)}return new b(c)}}function wa(a){if(null==a||Za(a))return!1;if(I(a)||E(a)||z&&a instanceof z)return!0;var b="length"in Object(a)&&a.length;return Y(b)&&(0<=b&&(b-1 in a||a instanceof Array)||"function"===typeof a.item)}function r(a,b,d){var c,e;if(a)if(C(a))for(c in a)"prototype"!==c&&"length"!==c&&"name"!==c&&a.hasOwnProperty(c)&&b.call(d,a[c],c,a);else if(I(a)||wa(a)){var f="object"!==typeof a;c=0;for(e=a.length;c<e;c++)(f||c in a)&&b.call(d,a[c],c,a)}else if(a.forEach&&a.forEach!==r)a.forEach(b,d,a);else if(Nc(a))for(c in a)b.call(d,a[c],c,a);else if("function"===typeof a.hasOwnProperty)for(c in a)a.hasOwnProperty(c)&&b.call(d,a[c],c,a);else for(c in a)ra.call(a,c)&&b.call(d,a[c],c,a);return a}function Oc(a,b,d){for(var c=Object.keys(a).sort(),e=0;e<c.length;e++)b.call(d,a[c[e]],c[e]);return c}function Xb(a){return function(b,d){a(d,b)}}function pe(){return++qb}function Yb(a,b,d){for(var c=a.$$hashKey,e=0,f=b.length;e<f;++e){var g= b[e];if(B(g)||C(g))for(var h=Object.keys(g),k=0,l=h.length;k<l;k++){var m=h[k],p=g[m];d&&B(p)?fa(p)?a[m]=new Date(p.valueOf()):$a(p)?a[m]=new RegExp(p):p.nodeName?a[m]=p.cloneNode(!0):Zb(p)?a[m]=p.clone():(B(a[m])||(a[m]=I(p)?[]:{}),Yb(a[m],[p],!0)):a[m]=p}}c?a.$$hashKey=c:delete a.$$hashKey;return a}function O(a){return Yb(a,xa.call(arguments,1),!1)}function qe(a){return Yb(a,xa.call(arguments,1),!0)}function Z(a){return parseInt(a,10)}function $b(a,b){return O(Object.create(a),b)}function D(){} function ab(a){return a}function la(a){return function(){return a}}function ac(a){return C(a.toString)&&a.toString!==ia}function x(a){return"undefined"===typeof a}function u(a){return"undefined"!==typeof a}function B(a){return null!==a&&"object"===typeof a}function Nc(a){return null!==a&&"object"===typeof a&&!Pc(a)}function E(a){return"string"===typeof a}function Y(a){return"number"===typeof a}function fa(a){return"[object Date]"===ia.call(a)}function bc(a){switch(ia.call(a)){case "[object Error]":return!0; case "[object Exception]":return!0;case "[object DOMException]":return!0;default:return a instanceof Error}}function C(a){return"function"===typeof a}function $a(a){return"[object RegExp]"===ia.call(a)}function Za(a){return a&&a.window===a}function bb(a){return a&&a.$evalAsync&&a.$watch}function Na(a){return"boolean"===typeof a}function re(a){return a&&Y(a.length)&&se.test(ia.call(a))}function Zb(a){return!(!a||!(a.nodeName||a.prop&&a.attr&&a.find))}function te(a){var b={};a=a.split(",");var d;for(d= 0;d<a.length;d++)b[a[d]]=!0;return b}function ya(a){return L(a.nodeName||a[0]&&a[0].nodeName)}function cb(a,b){var d=a.indexOf(b);0<=d&&a.splice(d,1);return d}function pa(a,b,d){function c(a,b,c){c--;if(0>c)return"...";var d=b.$$hashKey,g;if(I(a)){g=0;for(var f=a.length;g<f;g++)b.push(e(a[g],c))}else if(Nc(a))for(g in a)b[g]=e(a[g],c);else if(a&&"function"===typeof a.hasOwnProperty)for(g in a)a.hasOwnProperty(g)&&(b[g]=e(a[g],c));else for(g in a)ra.call(a,g)&&(b[g]=e(a[g],c));d?b.$$hashKey=d:delete b.$$hashKey; return b}function e(a,b){if(!B(a))return a;var d=g.indexOf(a);if(-1!==d)return h[d];if(Za(a)||bb(a))throw qa("cpws");var d=!1,e=f(a);void 0===e&&(e=I(a)?[]:Object.create(Pc(a)),d=!0);g.push(a);h.push(e);return d?c(a,e,b):e}function f(a){switch(ia.call(a)){case "[object Int8Array]":case "[object Int16Array]":case "[object Int32Array]":case "[object Float32Array]":case "[object Float64Array]":case "[object Uint8Array]":case "[object Uint8ClampedArray]":case "[object Uint16Array]":case "[object Uint32Array]":return new a.constructor(e(a.buffer), a.byteOffset,a.length);case "[object ArrayBuffer]":if(!a.slice){var b=new ArrayBuffer(a.byteLength);(new Uint8Array(b)).set(new Uint8Array(a));return b}return a.slice(0);case "[object Boolean]":case "[object Number]":case "[object String]":case "[object Date]":return new a.constructor(a.valueOf());case "[object RegExp]":return b=new RegExp(a.source,a.toString().match(/[^/]*$/)[0]),b.lastIndex=a.lastIndex,b;case "[object Blob]":return new a.constructor([a],{type:a.type})}if(C(a.cloneNode))return a.cloneNode(!0)} var g=[],h=[];d=Wb(d)?d:NaN;if(b){if(re(b)||"[object ArrayBuffer]"===ia.call(b))throw qa("cpta");if(a===b)throw qa("cpi");I(b)?b.length=0:r(b,function(a,c){"$$hashKey"!==c&&delete b[c]});g.push(a);h.push(b);return c(a,b,d)}return e(a,d)}function cc(a,b){return a===b||a!==a&&b!==b}function sa(a,b){if(a===b)return!0;if(null===a||null===b)return!1;if(a!==a&&b!==b)return!0;var d=typeof a,c;if(d===typeof b&&"object"===d)if(I(a)){if(!I(b))return!1;if((d=a.length)===b.length){for(c=0;c<d;c++)if(!sa(a[c], b[c]))return!1;return!0}}else{if(fa(a))return fa(b)?cc(a.getTime(),b.getTime()):!1;if($a(a))return $a(b)?a.toString()===b.toString():!1;if(bb(a)||bb(b)||Za(a)||Za(b)||I(b)||fa(b)||$a(b))return!1;d=S();for(c in a)if("$"!==c.charAt(0)&&!C(a[c])){if(!sa(a[c],b[c]))return!1;d[c]=!0}for(c in b)if(!(c in d)&&"$"!==c.charAt(0)&&u(b[c])&&!C(b[c]))return!1;return!0}return!1}function db(a,b,d){return a.concat(xa.call(b,d))}function Ra(a,b){var d=2<arguments.length?xa.call(arguments,2):[];return!C(b)||b instanceof RegExp?b:d.length?function(){return arguments.length?b.apply(a,db(d,arguments,0)):b.apply(a,d)}:function(){return arguments.length?b.apply(a,arguments):b.call(a)}}function Qc(a,b){var d=b;"string"===typeof a&&"$"===a.charAt(0)&&"$"===a.charAt(1)?d=void 0:Za(b)?d="$WINDOW":b&&w.document===b?d="$DOCUMENT":bb(b)&&(d="$SCOPE");return d}function eb(a,b){if(!x(a))return Y(b)||(b=b?2:null),JSON.stringify(a,Qc,b)}function Rc(a){return E(a)?JSON.parse(a):a}function Sc(a,b){a=a.replace(ue,"");var d=Date.parse("Jan 01, 1970 00:00:00 "+ a)/6E4;return U(d)?b:d}function dc(a,b,d){d=d?-1:1;var c=a.getTimezoneOffset();b=Sc(b,c);d*=b-c;a=new Date(a.getTime());a.setMinutes(a.getMinutes()+d);return a}function za(a){a=z(a).clone().empty();var b=z("<div>").append(a).html();try{return a[0].nodeType===Oa?L(b):b.match(/^(<[^>]+>)/)[1].replace(/^<([\w-]+)/,function(a,b){return"<"+L(b)})}catch(d){return L(b)}}function Tc(a){try{return decodeURIComponent(a)}catch(b){}}function ec(a){var b={};r((a||"").split("&"),function(a){var c,e,f;a&&(e=a=a.replace(/\+/g, "%20"),c=a.indexOf("="),-1!==c&&(e=a.substring(0,c),f=a.substring(c+1)),e=Tc(e),u(e)&&(f=u(f)?Tc(f):!0,ra.call(b,e)?I(b[e])?b[e].push(f):b[e]=[b[e],f]:b[e]=f))});return b}function fc(a){var b=[];r(a,function(a,c){I(a)?r(a,function(a){b.push(ja(c,!0)+(!0===a?"":"="+ja(a,!0)))}):b.push(ja(c,!0)+(!0===a?"":"="+ja(a,!0)))});return b.length?b.join("&"):""}function fb(a){return ja(a,!0).replace(/%26/gi,"&").replace(/%3D/gi,"=").replace(/%2B/gi,"+")}function ja(a,b){return encodeURIComponent(a).replace(/%40/gi, "@").replace(/%3A/gi,":").replace(/%24/g,"$").replace(/%2C/gi,",").replace(/%3B/gi,";").replace(/%20/g,b?"%20":"+")}function ve(a,b){var d,c,e=Ha.length;for(c=0;c<e;++c)if(d=Ha[c]+b,E(d=a.getAttribute(d)))return d;return null}function we(a,b){var d,c,e={};r(Ha,function(b){b+="app";!d&&a.hasAttribute&&a.hasAttribute(b)&&(d=a,c=a.getAttribute(b))});r(Ha,function(b){b+="app";var e;!d&&(e=a.querySelector("["+b.replace(":","\\:")+"]"))&&(d=e,c=e.getAttribute(b))});d&&(xe?(e.strictDi=null!==ve(d,"strict-di"), b(d,c?[c]:[],e)):w.console.error("AngularJS: disabling automatic bootstrap. <script> protocol indicates an extension, document.location.href does not match."))}function Uc(a,b,d){B(d)||(d={});d=O({strictDi:!1},d);var c=function(){a=z(a);if(a.injector()){var c=a[0]===w.document?"document":za(a);throw qa("btstrpd",c.replace(/</,"&lt;").replace(/>/,"&gt;"));}b=b||[];b.unshift(["$provide",function(b){b.value("$rootElement",a)}]);d.debugInfoEnabled&&b.push(["$compileProvider",function(a){a.debugInfoEnabled(!0)}]); b.unshift("ng");c=gb(b,d.strictDi);c.invoke(["$rootScope","$rootElement","$compile","$injector",function(a,b,c,d){a.$apply(function(){b.data("$injector",d);c(b)(a)})}]);return c},e=/^NG_ENABLE_DEBUG_INFO!/,f=/^NG_DEFER_BOOTSTRAP!/;w&&e.test(w.name)&&(d.debugInfoEnabled=!0,w.name=w.name.replace(e,""));if(w&&!f.test(w.name))return c();w.name=w.name.replace(f,"");$.resumeBootstrap=function(a){r(a,function(a){b.push(a)});return c()};C($.resumeDeferredBootstrap)&&$.resumeDeferredBootstrap()}function ye(){w.name= "NG_ENABLE_DEBUG_INFO!"+w.name;w.location.reload()}function ze(a){a=$.element(a).injector();if(!a)throw qa("test");return a.get("$$testability")}function Vc(a,b){b=b||"_";return a.replace(Ae,function(a,c){return(c?b:"")+a.toLowerCase()})}function Be(){var a;if(!Wc){var b=rb();(ma=x(b)?w.jQuery:b?w[b]:void 0)&&ma.fn.on?(z=ma,O(ma.fn,{scope:Sa.scope,isolateScope:Sa.isolateScope,controller:Sa.controller,injector:Sa.injector,inheritedData:Sa.inheritedData}),a=ma.cleanData,ma.cleanData=function(b){for(var c, e=0,f;null!=(f=b[e]);e++)(c=ma._data(f,"events"))&&c.$destroy&&ma(f).triggerHandler("$destroy");a(b)}):z=V;$.element=z;Wc=!0}}function hb(a,b,d){if(!a)throw qa("areq",b||"?",d||"required");return a}function sb(a,b,d){d&&I(a)&&(a=a[a.length-1]);hb(C(a),b,"not a function, got "+(a&&"object"===typeof a?a.constructor.name||"Object":typeof a));return a}function Ia(a,b){if("hasOwnProperty"===a)throw qa("badname",b);}function Xc(a,b,d){if(!b)return a;b=b.split(".");for(var c,e=a,f=b.length,g=0;g<f;g++)c= b[g],a&&(a=(e=a)[c]);return!d&&C(a)?Ra(e,a):a}function tb(a){for(var b=a[0],d=a[a.length-1],c,e=1;b!==d&&(b=b.nextSibling);e++)if(c||a[e]!==b)c||(c=z(xa.call(a,0,e))),c.push(b);return c||a}function S(){return Object.create(null)}function gc(a){if(null==a)return"";switch(typeof a){case "string":break;case "number":a=""+a;break;default:a=!ac(a)||I(a)||fa(a)?eb(a):a.toString()}return a}function Ce(a){function b(a,b,c){return a[b]||(a[b]=c())}var d=K("$injector"),c=K("ng");a=b(a,"angular",Object);a.$$minErr= a.$$minErr||K;return b(a,"module",function(){var a={};return function(f,g,h){var k={};if("hasOwnProperty"===f)throw c("badname","module");g&&a.hasOwnProperty(f)&&(a[f]=null);return b(a,f,function(){function a(b,c,d,g){g||(g=e);return function(){g[d||"push"]([b,c,arguments]);return v}}function b(a,c,d){d||(d=e);return function(b,e){e&&C(e)&&(e.$$moduleName=f);d.push([a,c,arguments]);return v}}if(!g)throw d("nomod",f);var e=[],n=[],F=[],s=a("$injector","invoke","push",n),v={_invokeQueue:e,_configBlocks:n, _runBlocks:F,info:function(a){if(u(a)){if(!B(a))throw c("aobj","value");k=a;return this}return k},requires:g,name:f,provider:b("$provide","provider"),factory:b("$provide","factory"),service:b("$provide","service"),value:a("$provide","value"),constant:a("$provide","constant","unshift"),decorator:b("$provide","decorator",n),animation:b("$animateProvider","register"),filter:b("$filterProvider","register"),controller:b("$controllerProvider","register"),directive:b("$compileProvider","directive"),component:b("$compileProvider", "component"),config:s,run:function(a){F.push(a);return this}};h&&s(h);return v})}})}function ka(a,b){if(I(a)){b=b||[];for(var d=0,c=a.length;d<c;d++)b[d]=a[d]}else if(B(a))for(d in b=b||{},a)if("$"!==d.charAt(0)||"$"!==d.charAt(1))b[d]=a[d];return b||a}function De(a,b){var d=[];Wb(b)&&(a=$.copy(a,null,b));return JSON.stringify(a,function(a,b){b=Qc(a,b);if(B(b)){if(0<=d.indexOf(b))return"...";d.push(b)}return b})}function Ee(a){O(a,{errorHandlingConfig:oe,bootstrap:Uc,copy:pa,extend:O,merge:qe,equals:sa, element:z,forEach:r,injector:gb,noop:D,bind:Ra,toJson:eb,fromJson:Rc,identity:ab,isUndefined:x,isDefined:u,isString:E,isFunction:C,isObject:B,isNumber:Y,isElement:Zb,isArray:I,version:Fe,isDate:fa,lowercase:L,uppercase:ub,callbacks:{$$counter:0},getTestability:ze,reloadWithDebugInfo:ye,$$minErr:K,$$csp:Ja,$$encodeUriSegment:fb,$$encodeUriQuery:ja,$$stringify:gc});ic=Ce(w);ic("ng",["ngLocale"],["$provide",function(a){a.provider({$$sanitizeUri:Ge});a.provider("$compile",Yc).directive({a:He,input:Zc, textarea:Zc,form:Ie,script:Je,select:Ke,option:Le,ngBind:Me,ngBindHtml:Ne,ngBindTemplate:Oe,ngClass:Pe,ngClassEven:Qe,ngClassOdd:Re,ngCloak:Se,ngController:Te,ngForm:Ue,ngHide:Ve,ngIf:We,ngInclude:Xe,ngInit:Ye,ngNonBindable:Ze,ngPluralize:$e,ngRepeat:af,ngShow:bf,ngStyle:cf,ngSwitch:df,ngSwitchWhen:ef,ngSwitchDefault:ff,ngOptions:gf,ngTransclude:hf,ngModel:jf,ngList:kf,ngChange:lf,pattern:$c,ngPattern:$c,required:ad,ngRequired:ad,minlength:bd,ngMinlength:bd,maxlength:cd,ngMaxlength:cd,ngValue:mf, ngModelOptions:nf}).directive({ngInclude:of}).directive(vb).directive(dd);a.provider({$anchorScroll:pf,$animate:qf,$animateCss:rf,$$animateJs:sf,$$animateQueue:tf,$$AnimateRunner:uf,$$animateAsyncRun:vf,$browser:wf,$cacheFactory:xf,$controller:yf,$document:zf,$$isDocumentHidden:Af,$exceptionHandler:Bf,$filter:ed,$$forceReflow:Cf,$interpolate:Df,$interval:Ef,$http:Ff,$httpParamSerializer:Gf,$httpParamSerializerJQLike:Hf,$httpBackend:If,$xhrFactory:Jf,$jsonpCallbacks:Kf,$location:Lf,$log:Mf,$parse:Nf, $rootScope:Of,$q:Pf,$$q:Qf,$sce:Rf,$sceDelegate:Sf,$sniffer:Tf,$templateCache:Uf,$templateRequest:Vf,$$testability:Wf,$timeout:Xf,$window:Yf,$$rAF:Zf,$$jqLite:$f,$$Map:ag,$$cookieReader:bg})}]).info({angularVersion:"1.6.9"})}function wb(a,b){return b.toUpperCase()}function xb(a){return a.replace(cg,wb)}function jc(a){a=a.nodeType;return 1===a||!a||9===a}function fd(a,b){var d,c,e=b.createDocumentFragment(),f=[];if(kc.test(a)){d=e.appendChild(b.createElement("div"));c=(dg.exec(a)||["",""])[1].toLowerCase(); c=aa[c]||aa._default;d.innerHTML=c[1]+a.replace(eg,"<$1></$2>")+c[2];for(c=c[0];c--;)d=d.lastChild;f=db(f,d.childNodes);d=e.firstChild;d.textContent=""}else f.push(b.createTextNode(a));e.textContent="";e.innerHTML="";r(f,function(a){e.appendChild(a)});return e}function V(a){if(a instanceof V)return a;var b;E(a)&&(a=Q(a),b=!0);if(!(this instanceof V)){if(b&&"<"!==a.charAt(0))throw lc("nosel");return new V(a)}if(b){b=w.document;var d;a=(d=fg.exec(a))?[b.createElement(d[1])]:(d=fd(a,b))?d.childNodes: [];mc(this,a)}else C(a)?gd(a):mc(this,a)}function nc(a){return a.cloneNode(!0)}function yb(a,b){!b&&jc(a)&&z.cleanData([a]);a.querySelectorAll&&z.cleanData(a.querySelectorAll("*"))}function hd(a,b,d,c){if(u(c))throw lc("offargs");var e=(c=zb(a))&&c.events,f=c&&c.handle;if(f)if(b){var g=function(b){var c=e[b];u(d)&&cb(c||[],d);u(d)&&c&&0<c.length||(a.removeEventListener(b,f),delete e[b])};r(b.split(" "),function(a){g(a);Ab[a]&&g(Ab[a])})}else for(b in e)"$destroy"!==b&&a.removeEventListener(b,f),delete e[b]} function oc(a,b){var d=a.ng339,c=d&&ib[d];c&&(b?delete c.data[b]:(c.handle&&(c.events.$destroy&&c.handle({},"$destroy"),hd(a)),delete ib[d],a.ng339=void 0))}function zb(a,b){var d=a.ng339,d=d&&ib[d];b&&!d&&(a.ng339=d=++gg,d=ib[d]={events:{},data:{},handle:void 0});return d}function pc(a,b,d){if(jc(a)){var c,e=u(d),f=!e&&b&&!B(b),g=!b;a=(a=zb(a,!f))&&a.data;if(e)a[xb(b)]=d;else{if(g)return a;if(f)return a&&a[xb(b)];for(c in b)a[xb(c)]=b[c]}}}function Bb(a,b){return a.getAttribute?-1<(" "+(a.getAttribute("class")|| "")+" ").replace(/[\n\t]/g," ").indexOf(" "+b+" "):!1}function Cb(a,b){if(b&&a.setAttribute){var d=(" "+(a.getAttribute("class")||"")+" ").replace(/[\n\t]/g," "),c=d;r(b.split(" "),function(a){a=Q(a);c=c.replace(" "+a+" "," ")});c!==d&&a.setAttribute("class",Q(c))}}function Db(a,b){if(b&&a.setAttribute){var d=(" "+(a.getAttribute("class")||"")+" ").replace(/[\n\t]/g," "),c=d;r(b.split(" "),function(a){a=Q(a);-1===c.indexOf(" "+a+" ")&&(c+=a+" ")});c!==d&&a.setAttribute("class",Q(c))}}function mc(a, b){if(b)if(b.nodeType)a[a.length++]=b;else{var d=b.length;if("number"===typeof d&&b.window!==b){if(d)for(var c=0;c<d;c++)a[a.length++]=b[c]}else a[a.length++]=b}}function id(a,b){return Eb(a,"$"+(b||"ngController")+"Controller")}function Eb(a,b,d){9===a.nodeType&&(a=a.documentElement);for(b=I(b)?b:[b];a;){for(var c=0,e=b.length;c<e;c++)if(u(d=z.data(a,b[c])))return d;a=a.parentNode||11===a.nodeType&&a.host}}function jd(a){for(yb(a,!0);a.firstChild;)a.removeChild(a.firstChild)}function Fb(a,b){b|| yb(a);var d=a.parentNode;d&&d.removeChild(a)}function hg(a,b){b=b||w;if("complete"===b.document.readyState)b.setTimeout(a);else z(b).on("load",a)}function gd(a){function b(){w.document.removeEventListener("DOMContentLoaded",b);w.removeEventListener("load",b);a()}"complete"===w.document.readyState?w.setTimeout(a):(w.document.addEventListener("DOMContentLoaded",b),w.addEventListener("load",b))}function kd(a,b){var d=Gb[b.toLowerCase()];return d&&ld[ya(a)]&&d}function ig(a,b){var d=function(c,d){c.isDefaultPrevented= function(){return c.defaultPrevented};var f=b[d||c.type],g=f?f.length:0;if(g){if(x(c.immediatePropagationStopped)){var h=c.stopImmediatePropagation;c.stopImmediatePropagation=function(){c.immediatePropagationStopped=!0;c.stopPropagation&&c.stopPropagation();h&&h.call(c)}}c.isImmediatePropagationStopped=function(){return!0===c.immediatePropagationStopped};var k=f.specialHandlerWrapper||jg;1<g&&(f=ka(f));for(var l=0;l<g;l++)c.isImmediatePropagationStopped()||k(a,c,f[l])}};d.elem=a;return d}function jg(a, b,d){d.call(a,b)}function kg(a,b,d){var c=b.relatedTarget;c&&(c===a||lg.call(a,c))||d.call(a,b)}function $f(){this.$get=function(){return O(V,{hasClass:function(a,b){a.attr&&(a=a[0]);return Bb(a,b)},addClass:function(a,b){a.attr&&(a=a[0]);return Db(a,b)},removeClass:function(a,b){a.attr&&(a=a[0]);return Cb(a,b)}})}}function Pa(a,b){var d=a&&a.$$hashKey;if(d)return"function"===typeof d&&(d=a.$$hashKey()),d;d=typeof a;return d="function"===d||"object"===d&&null!==a?a.$$hashKey=d+":"+(b||pe)():d+":"+ a}function md(){this._keys=[];this._values=[];this._lastKey=NaN;this._lastIndex=-1}function nd(a){a=Function.prototype.toString.call(a).replace(mg,"");return a.match(ng)||a.match(og)}function pg(a){return(a=nd(a))?"function("+(a[1]||"").replace(/[\s\r\n]+/," ")+")":"fn"}function gb(a,b){function d(a){return function(b,c){if(B(b))r(b,Xb(a));else return a(b,c)}}function c(a,b){Ia(a,"service");if(C(b)||I(b))b=n.instantiate(b);if(!b.$get)throw Ba("pget",a);return p[a+"Provider"]=b}function e(a,b){return function(){var c= v.invoke(b,this);if(x(c))throw Ba("undef",a);return c}}function f(a,b,d){return c(a,{$get:!1!==d?e(a,b):b})}function g(a){hb(x(a)||I(a),"modulesToLoad","not an array");var b=[],c;r(a,function(a){function d(a){var b,c;b=0;for(c=a.length;b<c;b++){var e=a[b],g=n.get(e[0]);g[e[1]].apply(g,e[2])}}if(!m.get(a)){m.set(a,!0);try{E(a)?(c=ic(a),v.modules[a]=c,b=b.concat(g(c.requires)).concat(c._runBlocks),d(c._invokeQueue),d(c._configBlocks)):C(a)?b.push(n.invoke(a)):I(a)?b.push(n.invoke(a)):sb(a,"module")}catch(e){throw I(a)&& (a=a[a.length-1]),e.message&&e.stack&&-1===e.stack.indexOf(e.message)&&(e=e.message+"\n"+e.stack),Ba("modulerr",a,e.stack||e.message||e);}}});return b}function h(a,c){function d(b,e){if(a.hasOwnProperty(b)){if(a[b]===k)throw Ba("cdep",b+" <- "+l.join(" <- "));return a[b]}try{return l.unshift(b),a[b]=k,a[b]=c(b,e),a[b]}catch(g){throw a[b]===k&&delete a[b],g;}finally{l.shift()}}function e(a,c,g){var f=[];a=gb.$$annotate(a,b,g);for(var k=0,h=a.length;k<h;k++){var l=a[k];if("string"!==typeof l)throw Ba("itkn", l);f.push(c&&c.hasOwnProperty(l)?c[l]:d(l,g))}return f}return{invoke:function(a,b,c,d){"string"===typeof c&&(d=c,c=null);c=e(a,c,d);I(a)&&(a=a[a.length-1]);d=a;if(Ca||"function"!==typeof d)d=!1;else{var g=d.$$ngIsClass;Na(g)||(g=d.$$ngIsClass=/^(?:class\b|constructor\()/.test(Function.prototype.toString.call(d)));d=g}return d?(c.unshift(null),new (Function.prototype.bind.apply(a,c))):a.apply(b,c)},instantiate:function(a,b,c){var d=I(a)?a[a.length-1]:a;a=e(a,b,c);a.unshift(null);return new (Function.prototype.bind.apply(d, a))},get:d,annotate:gb.$$annotate,has:function(b){return p.hasOwnProperty(b+"Provider")||a.hasOwnProperty(b)}}}b=!0===b;var k={},l=[],m=new Hb,p={$provide:{provider:d(c),factory:d(f),service:d(function(a,b){return f(a,["$injector",function(a){return a.instantiate(b)}])}),value:d(function(a,b){return f(a,la(b),!1)}),constant:d(function(a,b){Ia(a,"constant");p[a]=b;F[a]=b}),decorator:function(a,b){var c=n.get(a+"Provider"),d=c.$get;c.$get=function(){var a=v.invoke(d,c);return v.invoke(b,null,{$delegate:a})}}}}, n=p.$injector=h(p,function(a,b){$.isString(b)&&l.push(b);throw Ba("unpr",l.join(" <- "));}),F={},s=h(F,function(a,b){var c=n.get(a+"Provider",b);return v.invoke(c.$get,c,void 0,a)}),v=s;p.$injectorProvider={$get:la(s)};v.modules=n.modules=S();var y=g(a),v=s.get("$injector");v.strictDi=b;r(y,function(a){a&&v.invoke(a)});v.loadNewModules=function(a){r(g(a),function(a){a&&v.invoke(a)})};return v}function pf(){var a=!0;this.disableAutoScrolling=function(){a=!1};this.$get=["$window","$location","$rootScope", function(b,d,c){function e(a){var b=null;Array.prototype.some.call(a,function(a){if("a"===ya(a))return b=a,!0});return b}function f(a){if(a){a.scrollIntoView();var c;c=g.yOffset;C(c)?c=c():Zb(c)?(c=c[0],c="fixed"!==b.getComputedStyle(c).position?0:c.getBoundingClientRect().bottom):Y(c)||(c=0);c&&(a=a.getBoundingClientRect().top,b.scrollBy(0,a-c))}else b.scrollTo(0,0)}function g(a){a=E(a)?a:Y(a)?a.toString():d.hash();var b;a?(b=h.getElementById(a))?f(b):(b=e(h.getElementsByName(a)))?f(b):"top"===a&& f(null):f(null)}var h=b.document;a&&c.$watch(function(){return d.hash()},function(a,b){a===b&&""===a||hg(function(){c.$evalAsync(g)})});return g}]}function jb(a,b){if(!a&&!b)return"";if(!a)return b;if(!b)return a;I(a)&&(a=a.join(" "));I(b)&&(b=b.join(" "));return a+" "+b}function qg(a){E(a)&&(a=a.split(" "));var b=S();r(a,function(a){a.length&&(b[a]=!0)});return b}function Ka(a){return B(a)?a:{}}function rg(a,b,d,c){function e(a){try{a.apply(null,xa.call(arguments,1))}finally{if(s--,0===s)for(;v.length;)try{v.pop()()}catch(b){d.error(b)}}} function f(){A=null;h()}function g(){y=H();y=x(y)?null:y;sa(y,J)&&(y=J);t=J=y}function h(){var a=t;g();if(Aa!==k.url()||a!==y)Aa=k.url(),t=y,r(G,function(a){a(k.url(),y)})}var k=this,l=a.location,m=a.history,p=a.setTimeout,n=a.clearTimeout,F={};k.isMock=!1;var s=0,v=[];k.$$completeOutstandingRequest=e;k.$$incOutstandingRequestCount=function(){s++};k.notifyWhenNoOutstandingRequests=function(a){0===s?a():v.push(a)};var y,t,Aa=l.href,hc=b.find("base"),A=null,H=c.history?function(){try{return m.state}catch(a){}}: D;g();k.url=function(b,d,e){x(e)&&(e=null);l!==a.location&&(l=a.location);m!==a.history&&(m=a.history);if(b){var f=t===e;if(Aa===b&&(!c.history||f))return k;var h=Aa&&La(Aa)===La(b);Aa=b;t=e;!c.history||h&&f?(h||(A=b),d?l.replace(b):h?(d=l,e=b.indexOf("#"),e=-1===e?"":b.substr(e),d.hash=e):l.href=b,l.href!==b&&(A=b)):(m[d?"replaceState":"pushState"](e,"",b),g());A&&(A=b);return k}return A||l.href.replace(/%27/g,"'")};k.state=function(){return y};var G=[],ba=!1,J=null;k.onUrlChange=function(b){if(!ba){if(c.history)z(a).on("popstate", f);z(a).on("hashchange",f);ba=!0}G.push(b);return b};k.$$applicationDestroyed=function(){z(a).off("hashchange popstate",f)};k.$$checkUrlChange=h;k.baseHref=function(){var a=hc.attr("href");return a?a.replace(/^(https?:)?\/\/[^/]*/,""):""};k.defer=function(a,b){var c;s++;c=p(function(){delete F[c];e(a)},b||0);F[c]=!0;return c};k.defer.cancel=function(a){return F[a]?(delete F[a],n(a),e(D),!0):!1}}function wf(){this.$get=["$window","$log","$sniffer","$document",function(a,b,d,c){return new rg(a,c,b, d)}]}function xf(){this.$get=function(){function a(a,c){function e(a){a!==p&&(n?n===a&&(n=a.n):n=a,f(a.n,a.p),f(a,p),p=a,p.n=null)}function f(a,b){a!==b&&(a&&(a.p=b),b&&(b.n=a))}if(a in b)throw K("$cacheFactory")("iid",a);var g=0,h=O({},c,{id:a}),k=S(),l=c&&c.capacity||Number.MAX_VALUE,m=S(),p=null,n=null;return b[a]={put:function(a,b){if(!x(b)){if(l<Number.MAX_VALUE){var c=m[a]||(m[a]={key:a});e(c)}a in k||g++;k[a]=b;g>l&&this.remove(n.key);return b}},get:function(a){if(l<Number.MAX_VALUE){var b= m[a];if(!b)return;e(b)}return k[a]},remove:function(a){if(l<Number.MAX_VALUE){var b=m[a];if(!b)return;b===p&&(p=b.p);b===n&&(n=b.n);f(b.n,b.p);delete m[a]}a in k&&(delete k[a],g--)},removeAll:function(){k=S();g=0;m=S();p=n=null},destroy:function(){m=h=k=null;delete b[a]},info:function(){return O({},h,{size:g})}}}var b={};a.info=function(){var a={};r(b,function(b,e){a[e]=b.info()});return a};a.get=function(a){return b[a]};return a}}function Uf(){this.$get=["$cacheFactory",function(a){return a("templates")}]} function Yc(a,b){function d(a,b,c){var d=/^\s*([@&<]|=(\*?))(\??)\s*([\w$]*)\s*$/,e=S();r(a,function(a,g){if(a in p)e[g]=p[a];else{var f=a.match(d);if(!f)throw ca("iscp",b,g,a,c?"controller bindings definition":"isolate scope definition");e[g]={mode:f[1][0],collection:"*"===f[2],optional:"?"===f[3],attrName:f[4]||g};f[4]&&(p[a]=e[g])}});return e}function c(a){var b=a.charAt(0);if(!b||b!==L(b))throw ca("baddir",a);if(a!==a.trim())throw ca("baddir",a);}function e(a){var b=a.require||a.controller&&a.name; !I(b)&&B(b)&&r(b,function(a,c){var d=a.match(l);a.substring(d[0].length)||(b[c]=d[0]+c)});return b}var f={},g=/^\s*directive:\s*([\w-]+)\s+(.*)$/,h=/(([\w-]+)(?::([^;]+))?;?)/,k=te("ngSrc,ngSrcset,src,srcset"),l=/^(?:(\^\^?)?(\?)?(\^\^?)?)?/,m=/^(on[a-z]+|formaction)$/,p=S();this.directive=function hc(b,d){hb(b,"name");Ia(b,"directive");E(b)?(c(b),hb(d,"directiveFactory"),f.hasOwnProperty(b)||(f[b]=[],a.factory(b+"Directive",["$injector","$exceptionHandler",function(a,c){var d=[];r(f[b],function(g, f){try{var h=a.invoke(g);C(h)?h={compile:la(h)}:!h.compile&&h.link&&(h.compile=la(h.link));h.priority=h.priority||0;h.index=f;h.name=h.name||b;h.require=e(h);var k=h,l=h.restrict;if(l&&(!E(l)||!/[EACM]/.test(l)))throw ca("badrestrict",l,b);k.restrict=l||"EA";h.$$moduleName=g.$$moduleName;d.push(h)}catch(m){c(m)}});return d}])),f[b].push(d)):r(b,Xb(hc));return this};this.component=function A(a,b){function c(a){function e(b){return C(b)||I(b)?function(c,d){return a.invoke(b,this,{$element:c,$attrs:d})}: b}var g=b.template||b.templateUrl?b.template:"",f={controller:d,controllerAs:sg(b.controller)||b.controllerAs||"$ctrl",template:e(g),templateUrl:e(b.templateUrl),transclude:b.transclude,scope:{},bindToController:b.bindings||{},restrict:"E",require:b.require};r(b,function(a,b){"$"===b.charAt(0)&&(f[b]=a)});return f}if(!E(a))return r(a,Xb(Ra(this,A))),this;var d=b.controller||function(){};r(b,function(a,b){"$"===b.charAt(0)&&(c[b]=a,C(d)&&(d[b]=a))});c.$inject=["$injector"];return this.directive(a, c)};this.aHrefSanitizationWhitelist=function(a){return u(a)?(b.aHrefSanitizationWhitelist(a),this):b.aHrefSanitizationWhitelist()};this.imgSrcSanitizationWhitelist=function(a){return u(a)?(b.imgSrcSanitizationWhitelist(a),this):b.imgSrcSanitizationWhitelist()};var n=!0;this.debugInfoEnabled=function(a){return u(a)?(n=a,this):n};var F=!1;this.preAssignBindingsEnabled=function(a){return u(a)?(F=a,this):F};var s=!1;this.strictComponentBindingsEnabled=function(a){return u(a)?(s=a,this):s};var v=10;this.onChangesTtl= function(a){return arguments.length?(v=a,this):v};var y=!0;this.commentDirectivesEnabled=function(a){return arguments.length?(y=a,this):y};var t=!0;this.cssClassDirectivesEnabled=function(a){return arguments.length?(t=a,this):t};this.$get=["$injector","$interpolate","$exceptionHandler","$templateRequest","$parse","$controller","$rootScope","$sce","$animate","$$sanitizeUri",function(a,b,c,e,p,R,M,T,P,q){function N(){try{if(!--Fa)throw ha=void 0,ca("infchng",v);M.$apply(function(){for(var a=[],b=0, c=ha.length;b<c;++b)try{ha[b]()}catch(d){a.push(d)}ha=void 0;if(a.length)throw a;})}finally{Fa++}}function qc(a,b){if(b){var c=Object.keys(b),d,e,g;d=0;for(e=c.length;d<e;d++)g=c[d],this[g]=b[g]}else this.$attr={};this.$$element=a}function Ta(a,b,c){Ba.innerHTML="<span "+b+">";b=Ba.firstChild.attributes;var d=b[0];b.removeNamedItem(d.name);d.value=c;a.attributes.setNamedItem(d)}function na(a,b){try{a.addClass(b)}catch(c){}}function da(a,b,c,d,e){a instanceof z||(a=z(a));var g=Ua(a,b,a,c,d,e);da.$$addScopeClass(a); var f=null;return function(b,c,d){if(!a)throw ca("multilink");hb(b,"scope");e&&e.needsNewScope&&(b=b.$parent.$new());d=d||{};var h=d.parentBoundTranscludeFn,k=d.transcludeControllers;d=d.futureParentElement;h&&h.$$boundTransclude&&(h=h.$$boundTransclude);f||(f=(d=d&&d[0])?"foreignobject"!==ya(d)&&ia.call(d).match(/SVG/)?"svg":"html":"html");d="html"!==f?z(ka(f,z("<div>").append(a).html())):c?Sa.clone.call(a):a;if(k)for(var l in k)d.data("$"+l+"Controller",k[l].instance);da.$$addScopeInfo(d,b);c&& c(d,b);g&&g(b,d,d,h);c||(a=g=null);return d}}function Ua(a,b,c,d,e,g){function f(a,c,d,e){var g,k,l,m,p,n,G;if(t)for(G=Array(c.length),m=0;m<h.length;m+=3)g=h[m],G[g]=c[g];else G=c;m=0;for(p=h.length;m<p;)k=G[h[m++]],c=h[m++],g=h[m++],c?(c.scope?(l=a.$new(),da.$$addScopeInfo(z(k),l)):l=a,n=c.transcludeOnThisElement?Ma(a,c.transclude,e):!c.templateOnThisElement&&e?e:!e&&b?Ma(a,b):null,c(g,l,k,d,n)):g&&g(a,k.childNodes,void 0,e)}for(var h=[],k=I(a)||a instanceof z,l,m,p,n,t,G=0;G<a.length;G++){l=new qc; 11===Ca&&Da(a,G,k);m=K(a[G],[],l,0===G?d:void 0,e);(g=m.length?Y(m,a[G],l,b,c,null,[],[],g):null)&&g.scope&&da.$$addScopeClass(l.$$element);l=g&&g.terminal||!(p=a[G].childNodes)||!p.length?null:Ua(p,g?(g.transcludeOnThisElement||!g.templateOnThisElement)&&g.transclude:b);if(g||l)h.push(G,g,l),n=!0,t=t||g;g=null}return n?f:null}function Da(a,b,c){var d=a[b],e=d.parentNode,g;if(d.nodeType===Oa)for(;;){g=e?d.nextSibling:a[b+1];if(!g||g.nodeType!==Oa)break;d.nodeValue+=g.nodeValue;g.parentNode&&g.parentNode.removeChild(g); c&&g===a[b+1]&&a.splice(b+1,1)}}function Ma(a,b,c){function d(e,g,f,h,k){e||(e=a.$new(!1,k),e.$$transcluded=!0);return b(e,g,{parentBoundTranscludeFn:c,transcludeControllers:f,futureParentElement:h})}var e=d.$$slots=S(),g;for(g in b.$$slots)e[g]=b.$$slots[g]?Ma(a,b.$$slots[g],c):null;return d}function K(a,b,c,d,e){var g=c.$attr,f;switch(a.nodeType){case 1:f=ya(a);U(b,Ea(f),"E",d,e);for(var k,l,m,p,n=a.attributes,t=0,G=n&&n.length;t<G;t++){var H=!1,F=!1;k=n[t];l=k.name;m=k.value;k=Ea(l);(p=Pa.test(k))&& (l=l.replace(od,"").substr(8).replace(/_(.)/g,function(a,b){return b.toUpperCase()}));(k=k.match(Qa))&&$(k[1])&&(H=l,F=l.substr(0,l.length-5)+"end",l=l.substr(0,l.length-6));k=Ea(l.toLowerCase());g[k]=l;if(p||!c.hasOwnProperty(k))c[k]=m,kd(a,k)&&(c[k]=!0);wa(a,b,m,k,p);U(b,k,"A",d,e,H,F)}"input"===f&&"hidden"===a.getAttribute("type")&&a.setAttribute("autocomplete","off");if(!La)break;g=a.className;B(g)&&(g=g.animVal);if(E(g)&&""!==g)for(;a=h.exec(g);)k=Ea(a[2]),U(b,k,"C",d,e)&&(c[k]=Q(a[3])),g=g.substr(a.index+ a[0].length);break;case Oa:oa(b,a.nodeValue);break;case 8:if(!Ka)break;rc(a,b,c,d,e)}b.sort(la);return b}function rc(a,b,c,d,e){try{var f=g.exec(a.nodeValue);if(f){var h=Ea(f[1]);U(b,h,"M",d,e)&&(c[h]=Q(f[2]))}}catch(k){}}function pd(a,b,c){var d=[],e=0;if(b&&a.hasAttribute&&a.hasAttribute(b)){do{if(!a)throw ca("uterdir",b,c);1===a.nodeType&&(a.hasAttribute(b)&&e++,a.hasAttribute(c)&&e--);d.push(a);a=a.nextSibling}while(0<e)}else d.push(a);return z(d)}function V(a,b,c){return function(d,e,g,f,h){e= pd(e[0],b,c);return a(d,e,g,f,h)}}function W(a,b,c,d,e,g){var f;return a?da(b,c,d,e,g):function(){f||(f=da(b,c,d,e,g),b=c=g=null);return f.apply(this,arguments)}}function Y(a,b,d,e,g,f,h,k,l){function m(a,b,c,d){if(a){c&&(a=V(a,c,d));a.require=s.require;a.directiveName=R;if(J===s||s.$$isolateScope)a=ta(a,{isolateScope:!0});h.push(a)}if(b){c&&(b=V(b,c,d));b.require=s.require;b.directiveName=R;if(J===s||s.$$isolateScope)b=ta(b,{isolateScope:!0});k.push(b)}}function p(a,e,g,f,l){function m(a,b,c,d){var e; bb(a)||(d=c,c=b,b=a,a=void 0);T&&(e=M);c||(c=T?ga.parent():ga);if(d){var g=l.$$slots[d];if(g)return g(a,b,e,c,N);if(x(g))throw ca("noslot",d,za(ga));}else return l(a,b,e,c,N)}var n,s,v,y,ba,M,R,ga;b===g?(f=d,ga=d.$$element):(ga=z(g),f=new qc(ga,d));ba=e;J?y=e.$new(!0):t&&(ba=e.$parent);l&&(R=m,R.$$boundTransclude=l,R.isSlotFilled=function(a){return!!l.$$slots[a]});H&&(M=ea(ga,f,R,H,y,e,J));J&&(da.$$addScopeInfo(ga,y,!0,!(A&&(A===J||A===J.$$originalDirective))),da.$$addScopeClass(ga,!0),y.$$isolateBindings= J.$$isolateBindings,s=qa(e,f,y,y.$$isolateBindings,J),s.removeWatches&&y.$on("$destroy",s.removeWatches));for(n in M){s=H[n];v=M[n];var P=s.$$bindings.bindToController;if(F){v.bindingInfo=P?qa(ba,f,v.instance,P,s):{};var q=v();q!==v.instance&&(v.instance=q,ga.data("$"+s.name+"Controller",q),v.bindingInfo.removeWatches&&v.bindingInfo.removeWatches(),v.bindingInfo=qa(ba,f,v.instance,P,s))}else v.instance=v(),ga.data("$"+s.name+"Controller",v.instance),v.bindingInfo=qa(ba,f,v.instance,P,s)}r(H,function(a, b){var c=a.require;a.bindToController&&!I(c)&&B(c)&&O(M[b].instance,X(b,c,ga,M))});r(M,function(a){var b=a.instance;if(C(b.$onChanges))try{b.$onChanges(a.bindingInfo.initialChanges)}catch(d){c(d)}if(C(b.$onInit))try{b.$onInit()}catch(e){c(e)}C(b.$doCheck)&&(ba.$watch(function(){b.$doCheck()}),b.$doCheck());C(b.$onDestroy)&&ba.$on("$destroy",function(){b.$onDestroy()})});n=0;for(s=h.length;n<s;n++)v=h[n],va(v,v.isolateScope?y:e,ga,f,v.require&&X(v.directiveName,v.require,ga,M),R);var N=e;J&&(J.template|| null===J.templateUrl)&&(N=y);a&&a(N,g.childNodes,void 0,l);for(n=k.length-1;0<=n;n--)v=k[n],va(v,v.isolateScope?y:e,ga,f,v.require&&X(v.directiveName,v.require,ga,M),R);r(M,function(a){a=a.instance;C(a.$postLink)&&a.$postLink()})}l=l||{};for(var n=-Number.MAX_VALUE,t=l.newScopeDirective,H=l.controllerDirectives,J=l.newIsolateScopeDirective,A=l.templateDirective,y=l.nonTlbTranscludeDirective,ba=!1,M=!1,T=l.hasElementTranscludeDirective,v=d.$$element=z(b),s,R,P,q=e,N,u=!1,Ib=!1,w,Da=0,D=a.length;Da< D;Da++){s=a[Da];var Ta=s.$$start,E=s.$$end;Ta&&(v=pd(b,Ta,E));P=void 0;if(n>s.priority)break;if(w=s.scope)s.templateUrl||(B(w)?(aa("new/isolated scope",J||t,s,v),J=s):aa("new/isolated scope",J,s,v)),t=t||s;R=s.name;if(!u&&(s.replace&&(s.templateUrl||s.template)||s.transclude&&!s.$$tlb)){for(w=Da+1;u=a[w++];)if(u.transclude&&!u.$$tlb||u.replace&&(u.templateUrl||u.template)){Ib=!0;break}u=!0}!s.templateUrl&&s.controller&&(H=H||S(),aa("'"+R+"' controller",H[R],s,v),H[R]=s);if(w=s.transclude)if(ba=!0, s.$$tlb||(aa("transclusion",y,s,v),y=s),"element"===w)T=!0,n=s.priority,P=v,v=d.$$element=z(da.$$createComment(R,d[R])),b=v[0],ma(g,xa.call(P,0),b),P[0].$$parentNode=P[0].parentNode,q=W(Ib,P,e,n,f&&f.name,{nonTlbTranscludeDirective:y});else{var na=S();if(B(w)){P=[];var Ua=S(),Ma=S();r(w,function(a,b){var c="?"===a.charAt(0);a=c?a.substring(1):a;Ua[a]=b;na[b]=null;Ma[b]=c});r(v.contents(),function(a){var b=Ua[Ea(ya(a))];b?(Ma[b]=!0,na[b]=na[b]||[],na[b].push(a)):P.push(a)});r(Ma,function(a,b){if(!a)throw ca("reqslot", b);});for(var L in na)na[L]&&(na[L]=W(Ib,na[L],e))}else P=z(nc(b)).contents();v.empty();q=W(Ib,P,e,void 0,void 0,{needsNewScope:s.$$isolateScope||s.$$newScope});q.$$slots=na}if(s.template)if(M=!0,aa("template",A,s,v),A=s,w=C(s.template)?s.template(v,d):s.template,w=Ia(w),s.replace){f=s;P=kc.test(w)?qd(ka(s.templateNamespace,Q(w))):[];b=P[0];if(1!==P.length||1!==b.nodeType)throw ca("tplrt",R,"");ma(g,v,b);D={$attr:{}};w=K(b,[],D);var rc=a.splice(Da+1,a.length-(Da+1));(J||t)&&Z(w,J,t);a=a.concat(w).concat(rc); fa(d,D);D=a.length}else v.html(w);if(s.templateUrl)M=!0,aa("template",A,s,v),A=s,s.replace&&(f=s),p=ja(a.splice(Da,a.length-Da),v,d,g,ba&&q,h,k,{controllerDirectives:H,newScopeDirective:t!==s&&t,newIsolateScopeDirective:J,templateDirective:A,nonTlbTranscludeDirective:y}),D=a.length;else if(s.compile)try{N=s.compile(v,d,q);var U=s.$$originalDirective||s;C(N)?m(null,Ra(U,N),Ta,E):N&&m(Ra(U,N.pre),Ra(U,N.post),Ta,E)}catch($){c($,za(v))}s.terminal&&(p.terminal=!0,n=Math.max(n,s.priority))}p.scope=t&& !0===t.scope;p.transcludeOnThisElement=ba;p.templateOnThisElement=M;p.transclude=q;l.hasElementTranscludeDirective=T;return p}function X(a,b,c,d){var e;if(E(b)){var g=b.match(l);b=b.substring(g[0].length);var f=g[1]||g[3],g="?"===g[2];"^^"===f?c=c.parent():e=(e=d&&d[b])&&e.instance;if(!e){var h="$"+b+"Controller";e=f?c.inheritedData(h):c.data(h)}if(!e&&!g)throw ca("ctreq",b,a);}else if(I(b))for(e=[],f=0,g=b.length;f<g;f++)e[f]=X(a,b[f],c,d);else B(b)&&(e={},r(b,function(b,g){e[g]=X(a,b,c,d)}));return e|| null}function ea(a,b,c,d,e,g,f){var h=S(),k;for(k in d){var l=d[k],m={$scope:l===f||l.$$isolateScope?e:g,$element:a,$attrs:b,$transclude:c},p=l.controller;"@"===p&&(p=b[l.name]);m=R(p,m,!0,l.controllerAs);h[l.name]=m;a.data("$"+l.name+"Controller",m.instance)}return h}function Z(a,b,c){for(var d=0,e=a.length;d<e;d++)a[d]=$b(a[d],{$$isolateScope:b,$$newScope:c})}function U(b,c,e,g,h,k,l){if(c===h)return null;var m=null;if(f.hasOwnProperty(c)){h=a.get(c+"Directive");for(var p=0,n=h.length;p<n;p++)if(c= h[p],(x(g)||g>c.priority)&&-1!==c.restrict.indexOf(e)){k&&(c=$b(c,{$$start:k,$$end:l}));if(!c.$$bindings){var t=m=c,G=c.name,H={isolateScope:null,bindToController:null};B(t.scope)&&(!0===t.bindToController?(H.bindToController=d(t.scope,G,!0),H.isolateScope={}):H.isolateScope=d(t.scope,G,!1));B(t.bindToController)&&(H.bindToController=d(t.bindToController,G,!0));if(H.bindToController&&!t.controller)throw ca("noctrl",G);m=m.$$bindings=H;B(m.isolateScope)&&(c.$$isolateBindings=m.isolateScope)}b.push(c); m=c}}return m}function $(b){if(f.hasOwnProperty(b))for(var c=a.get(b+"Directive"),d=0,e=c.length;d<e;d++)if(b=c[d],b.multiElement)return!0;return!1}function fa(a,b){var c=b.$attr,d=a.$attr;r(a,function(d,e){"$"!==e.charAt(0)&&(b[e]&&b[e]!==d&&(d=d.length?d+(("style"===e?";":" ")+b[e]):b[e]),a.$set(e,d,!0,c[e]))});r(b,function(b,e){a.hasOwnProperty(e)||"$"===e.charAt(0)||(a[e]=b,"class"!==e&&"style"!==e&&(d[e]=c[e]))})}function ja(a,b,d,g,f,h,k,l){var m=[],p,n,t=b[0],H=a.shift(),s=$b(H,{templateUrl:null, transclude:null,replace:null,$$originalDirective:H}),F=C(H.templateUrl)?H.templateUrl(b,d):H.templateUrl,v=H.templateNamespace;b.empty();e(F).then(function(c){var e,G;c=Ia(c);if(H.replace){c=kc.test(c)?qd(ka(v,Q(c))):[];e=c[0];if(1!==c.length||1!==e.nodeType)throw ca("tplrt",H.name,F);c={$attr:{}};ma(g,b,e);var J=K(e,[],c);B(H.scope)&&Z(J,!0);a=J.concat(a);fa(d,c)}else e=t,b.html(c);a.unshift(s);p=Y(a,e,d,f,b,H,h,k,l);r(g,function(a,c){a===e&&(g[c]=b[0])});for(n=Ua(b[0].childNodes,f);m.length;){c= m.shift();G=m.shift();var y=m.shift(),A=m.shift(),J=b[0];if(!c.$$destroyed){if(G!==t){var M=G.className;l.hasElementTranscludeDirective&&H.replace||(J=nc(e));ma(y,z(G),J);na(z(J),M)}G=p.transcludeOnThisElement?Ma(c,p.transclude,A):A;p(n,c,J,g,G)}}m=null}).catch(function(a){bc(a)&&c(a)});return function(a,b,c,d,e){a=e;b.$$destroyed||(m?m.push(b,c,d,a):(p.transcludeOnThisElement&&(a=Ma(b,p.transclude,e)),p(n,b,c,d,a)))}}function la(a,b){var c=b.priority-a.priority;return 0!==c?c:a.name!==b.name?a.name< b.name?-1:1:a.index-b.index}function aa(a,b,c,d){function e(a){return a?" (module: "+a+")":""}if(b)throw ca("multidir",b.name,e(b.$$moduleName),c.name,e(c.$$moduleName),a,za(d));}function oa(a,c){var d=b(c,!0);d&&a.push({priority:0,compile:function(a){a=a.parent();var b=!!a.length;b&&da.$$addBindingClass(a);return function(a,c){var e=c.parent();b||da.$$addBindingClass(e);da.$$addBindingInfo(e,d.expressions);a.$watch(d,function(a){c[0].nodeValue=a})}}})}function ka(a,b){a=L(a||"html");switch(a){case "svg":case "math":var c= w.document.createElement("div");c.innerHTML="<"+a+">"+b+"</"+a+">";return c.childNodes[0].childNodes;default:return b}}function ua(a,b){if("srcdoc"===b)return T.HTML;var c=ya(a);if("src"===b||"ngSrc"===b){if(-1===["img","video","audio","source","track"].indexOf(c))return T.RESOURCE_URL}else if("xlinkHref"===b||"form"===c&&"action"===b||"link"===c&&"href"===b)return T.RESOURCE_URL}function wa(a,c,d,e,g){var f=ua(a,e),h=k[e]||g,l=b(d,!g,f,h);if(l){if("multiple"===e&&"select"===ya(a))throw ca("selmulti", za(a));if(m.test(e))throw ca("nodomevents");c.push({priority:100,compile:function(){return{pre:function(a,c,g){c=g.$$observers||(g.$$observers=S());var k=g[e];k!==d&&(l=k&&b(k,!0,f,h),d=k);l&&(g[e]=l(a),(c[e]||(c[e]=[])).$$inter=!0,(g.$$observers&&g.$$observers[e].$$scope||a).$watch(l,function(a,b){"class"===e&&a!==b?g.$updateClass(a,b):g.$set(e,a)}))}}}})}}function ma(a,b,c){var d=b[0],e=b.length,g=d.parentNode,f,h;if(a)for(f=0,h=a.length;f<h;f++)if(a[f]===d){a[f++]=c;h=f+e-1;for(var k=a.length;f< k;f++,h++)h<k?a[f]=a[h]:delete a[f];a.length-=e-1;a.context===d&&(a.context=c);break}g&&g.replaceChild(c,d);a=w.document.createDocumentFragment();for(f=0;f<e;f++)a.appendChild(b[f]);z.hasData(d)&&(z.data(c,z.data(d)),z(d).off("$destroy"));z.cleanData(a.querySelectorAll("*"));for(f=1;f<e;f++)delete b[f];b[0]=c;b.length=1}function ta(a,b){return O(function(){return a.apply(null,arguments)},a,b)}function va(a,b,d,e,g,f){try{a(b,d,e,g,f)}catch(h){c(h,za(d))}}function pa(a,b){if(s)throw ca("missingattr", a,b);}function qa(a,c,d,e,g){function f(b,c,e){C(d.$onChanges)&&!cc(c,e)&&(ha||(a.$$postDigest(N),ha=[]),m||(m={},ha.push(h)),m[b]&&(e=m[b].previousValue),m[b]=new Jb(e,c))}function h(){d.$onChanges(m);m=void 0}var k=[],l={},m;r(e,function(e,h){var m=e.attrName,n=e.optional,t,G,s,F;switch(e.mode){case "@":n||ra.call(c,m)||(pa(m,g.name),d[h]=c[m]=void 0);n=c.$observe(m,function(a){if(E(a)||Na(a))f(h,a,d[h]),d[h]=a});c.$$observers[m].$$scope=a;t=c[m];E(t)?d[h]=b(t)(a):Na(t)&&(d[h]=t);l[h]=new Jb(sc, d[h]);k.push(n);break;case "=":if(!ra.call(c,m)){if(n)break;pa(m,g.name);c[m]=void 0}if(n&&!c[m])break;G=p(c[m]);F=G.literal?sa:cc;s=G.assign||function(){t=d[h]=G(a);throw ca("nonassign",c[m],m,g.name);};t=d[h]=G(a);n=function(b){F(b,d[h])||(F(b,t)?s(a,b=d[h]):d[h]=b);return t=b};n.$stateful=!0;n=e.collection?a.$watchCollection(c[m],n):a.$watch(p(c[m],n),null,G.literal);k.push(n);break;case "<":if(!ra.call(c,m)){if(n)break;pa(m,g.name);c[m]=void 0}if(n&&!c[m])break;G=p(c[m]);var v=G.literal,y=d[h]= G(a);l[h]=new Jb(sc,d[h]);n=a.$watch(G,function(a,b){if(b===a){if(b===y||v&&sa(b,y))return;b=y}f(h,a,b);d[h]=a},v);k.push(n);break;case "&":n||ra.call(c,m)||pa(m,g.name);G=c.hasOwnProperty(m)?p(c[m]):D;if(G===D&&n)break;d[h]=function(b){return G(a,b)}}});return{initialChanges:l,removeWatches:k.length&&function(){for(var a=0,b=k.length;a<b;++a)k[a]()}}}var Ja=/^\w/,Ba=w.document.createElement("div"),Ka=y,La=t,Fa=v,ha;qc.prototype={$normalize:Ea,$addClass:function(a){a&&0<a.length&&P.addClass(this.$$element, a)},$removeClass:function(a){a&&0<a.length&&P.removeClass(this.$$element,a)},$updateClass:function(a,b){var c=rd(a,b);c&&c.length&&P.addClass(this.$$element,c);(c=rd(b,a))&&c.length&&P.removeClass(this.$$element,c)},$set:function(a,b,d,e){var g=kd(this.$$element[0],a),f=sd[a],h=a;g?(this.$$element.prop(a,b),e=g):f&&(this[f]=b,h=f);this[a]=b;e?this.$attr[a]=e:(e=this.$attr[a])||(this.$attr[a]=e=Vc(a,"-"));g=ya(this.$$element);if("a"===g&&("href"===a||"xlinkHref"===a)||"img"===g&&"src"===a)this[a]= b=q(b,"src"===a);else if("img"===g&&"srcset"===a&&u(b)){for(var g="",f=Q(b),k=/(\s+\d+x\s*,|\s+\d+w\s*,|\s+,|,\s+)/,k=/\s/.test(f)?k:/(,)/,f=f.split(k),k=Math.floor(f.length/2),l=0;l<k;l++)var m=2*l,g=g+q(Q(f[m]),!0),g=g+(" "+Q(f[m+1]));f=Q(f[2*l]).split(/\s/);g+=q(Q(f[0]),!0);2===f.length&&(g+=" "+Q(f[1]));this[a]=b=g}!1!==d&&(null===b||x(b)?this.$$element.removeAttr(e):Ja.test(e)?this.$$element.attr(e,b):Ta(this.$$element[0],e,b));(a=this.$$observers)&&r(a[h],function(a){try{a(b)}catch(d){c(d)}})}, $observe:function(a,b){var c=this,d=c.$$observers||(c.$$observers=S()),e=d[a]||(d[a]=[]);e.push(b);M.$evalAsync(function(){e.$$inter||!c.hasOwnProperty(a)||x(c[a])||b(c[a])});return function(){cb(e,b)}}};var Ga=b.startSymbol(),Ha=b.endSymbol(),Ia="{{"===Ga&&"}}"===Ha?ab:function(a){return a.replace(/\{\{/g,Ga).replace(/}}/g,Ha)},Pa=/^ngAttr[A-Z]/,Qa=/^(.+)Start$/;da.$$addBindingInfo=n?function(a,b){var c=a.data("$binding")||[];I(b)?c=c.concat(b):c.push(b);a.data("$binding",c)}:D;da.$$addBindingClass= n?function(a){na(a,"ng-binding")}:D;da.$$addScopeInfo=n?function(a,b,c,d){a.data(c?d?"$isolateScopeNoTemplate":"$isolateScope":"$scope",b)}:D;da.$$addScopeClass=n?function(a,b){na(a,b?"ng-isolate-scope":"ng-scope")}:D;da.$$createComment=function(a,b){var c="";n&&(c=" "+(a||"")+": ",b&&(c+=b+" "));return w.document.createComment(c)};return da}]}function Jb(a,b){this.previousValue=a;this.currentValue=b}function Ea(a){return a.replace(od,"").replace(tg,function(a,d,c){return c?d.toUpperCase():d})}function rd(a, b){var d="",c=a.split(/\s+/),e=b.split(/\s+/),f=0;a:for(;f<c.length;f++){for(var g=c[f],h=0;h<e.length;h++)if(g===e[h])continue a;d+=(0<d.length?" ":"")+g}return d}function qd(a){a=z(a);var b=a.length;if(1>=b)return a;for(;b--;){var d=a[b];(8===d.nodeType||d.nodeType===Oa&&""===d.nodeValue.trim())&&ug.call(a,b,1)}return a}function sg(a,b){if(b&&E(b))return b;if(E(a)){var d=td.exec(a);if(d)return d[3]}}function yf(){var a={},b=!1;this.has=function(b){return a.hasOwnProperty(b)};this.register=function(b, c){Ia(b,"controller");B(b)?O(a,b):a[b]=c};this.allowGlobals=function(){b=!0};this.$get=["$injector","$window",function(d,c){function e(a,b,c,d){if(!a||!B(a.$scope))throw K("$controller")("noscp",d,b);a.$scope[b]=c}return function(f,g,h,k){var l,m,p;h=!0===h;k&&E(k)&&(p=k);if(E(f)){k=f.match(td);if(!k)throw ud("ctrlfmt",f);m=k[1];p=p||k[3];f=a.hasOwnProperty(m)?a[m]:Xc(g.$scope,m,!0)||(b?Xc(c,m,!0):void 0);if(!f)throw ud("ctrlreg",m);sb(f,m,!0)}if(h)return h=(I(f)?f[f.length-1]:f).prototype,l=Object.create(h|| null),p&&e(g,p,l,m||f.name),O(function(){var a=d.invoke(f,l,g,m);a!==l&&(B(a)||C(a))&&(l=a,p&&e(g,p,l,m||f.name));return l},{instance:l,identifier:p});l=d.instantiate(f,g,m);p&&e(g,p,l,m||f.name);return l}}]}function zf(){this.$get=["$window",function(a){return z(a.document)}]}function Af(){this.$get=["$document","$rootScope",function(a,b){function d(){e=c.hidden}var c=a[0],e=c&&c.hidden;a.on("visibilitychange",d);b.$on("$destroy",function(){a.off("visibilitychange",d)});return function(){return e}}]} function Bf(){this.$get=["$log",function(a){return function(b,d){a.error.apply(a,arguments)}}]}function tc(a){return B(a)?fa(a)?a.toISOString():eb(a):a}function Gf(){this.$get=function(){return function(a){if(!a)return"";var b=[];Oc(a,function(a,c){null===a||x(a)||C(a)||(I(a)?r(a,function(a){b.push(ja(c)+"="+ja(tc(a)))}):b.push(ja(c)+"="+ja(tc(a))))});return b.join("&")}}}function Hf(){this.$get=function(){return function(a){function b(a,e,f){null===a||x(a)||(I(a)?r(a,function(a,c){b(a,e+"["+(B(a)? c:"")+"]")}):B(a)&&!fa(a)?Oc(a,function(a,c){b(a,e+(f?"":"[")+c+(f?"":"]"))}):d.push(ja(e)+"="+ja(tc(a))))}if(!a)return"";var d=[];b(a,"",!0);return d.join("&")}}}function uc(a,b){if(E(a)){var d=a.replace(vg,"").trim();if(d){var c=b("Content-Type"),c=c&&0===c.indexOf(vd),e;(e=c)||(e=(e=d.match(wg))&&xg[e[0]].test(d));if(e)try{a=Rc(d)}catch(f){if(!c)return a;throw Kb("baddata",a,f);}}}return a}function wd(a){var b=S(),d;E(a)?r(a.split("\n"),function(a){d=a.indexOf(":");var e=L(Q(a.substr(0,d)));a= Q(a.substr(d+1));e&&(b[e]=b[e]?b[e]+", "+a:a)}):B(a)&&r(a,function(a,d){var f=L(d),g=Q(a);f&&(b[f]=b[f]?b[f]+", "+g:g)});return b}function xd(a){var b;return function(d){b||(b=wd(a));return d?(d=b[L(d)],void 0===d&&(d=null),d):b}}function yd(a,b,d,c){if(C(c))return c(a,b,d);r(c,function(c){a=c(a,b,d)});return a}function Ff(){var a=this.defaults={transformResponse:[uc],transformRequest:[function(a){return B(a)&&"[object File]"!==ia.call(a)&&"[object Blob]"!==ia.call(a)&&"[object FormData]"!==ia.call(a)? eb(a):a}],headers:{common:{Accept:"application/json, text/plain, */*"},post:ka(vc),put:ka(vc),patch:ka(vc)},xsrfCookieName:"XSRF-TOKEN",xsrfHeaderName:"X-XSRF-TOKEN",paramSerializer:"$httpParamSerializer",jsonpCallbackParam:"callback"},b=!1;this.useApplyAsync=function(a){return u(a)?(b=!!a,this):b};var d=this.interceptors=[];this.$get=["$browser","$httpBackend","$$cookieReader","$cacheFactory","$rootScope","$q","$injector","$sce",function(c,e,f,g,h,k,l,m){function p(b){function d(a,b){for(var c=0, e=b.length;c<e;){var g=b[c++],f=b[c++];a=a.then(g,f)}b.length=0;return a}function e(a,b){var c,d={};r(a,function(a,e){C(a)?(c=a(b),null!=c&&(d[e]=c)):d[e]=a});return d}function g(a){var b=O({},a);b.data=yd(a.data,a.headers,a.status,f.transformResponse);a=a.status;return 200<=a&&300>a?b:k.reject(b)}if(!B(b))throw K("$http")("badreq",b);if(!E(m.valueOf(b.url)))throw K("$http")("badreq",b.url);var f=O({method:"get",transformRequest:a.transformRequest,transformResponse:a.transformResponse,paramSerializer:a.paramSerializer, jsonpCallbackParam:a.jsonpCallbackParam},b);f.headers=function(b){var c=a.headers,d=O({},b.headers),g,f,h,c=O({},c.common,c[L(b.method)]);a:for(g in c){f=L(g);for(h in d)if(L(h)===f)continue a;d[g]=c[g]}return e(d,ka(b))}(b);f.method=ub(f.method);f.paramSerializer=E(f.paramSerializer)?l.get(f.paramSerializer):f.paramSerializer;c.$$incOutstandingRequestCount();var h=[],p=[];b=k.resolve(f);r(y,function(a){(a.request||a.requestError)&&h.unshift(a.request,a.requestError);(a.response||a.responseError)&& p.push(a.response,a.responseError)});b=d(b,h);b=b.then(function(b){var c=b.headers,d=yd(b.data,xd(c),void 0,b.transformRequest);x(d)&&r(c,function(a,b){"content-type"===L(b)&&delete c[b]});x(b.withCredentials)&&!x(a.withCredentials)&&(b.withCredentials=a.withCredentials);return n(b,d).then(g,g)});b=d(b,p);return b=b.finally(function(){c.$$completeOutstandingRequest(D)})}function n(c,d){function g(a){if(a){var c={};r(a,function(a,d){c[d]=function(c){function d(){a(c)}b?h.$applyAsync(d):h.$$phase?d(): h.$apply(d)}});return c}}function l(a,c,d,e,g){function f(){n(c,a,d,e,g)}M&&(200<=a&&300>a?M.put(N,[a,c,wd(d),e,g]):M.remove(N));b?h.$applyAsync(f):(f(),h.$$phase||h.$apply())}function n(a,b,d,e,g){b=-1<=b?b:0;(200<=b&&300>b?J.resolve:J.reject)({data:a,status:b,headers:xd(d),config:c,statusText:e,xhrStatus:g})}function G(a){n(a.data,a.status,ka(a.headers()),a.statusText,a.xhrStatus)}function y(){var a=p.pendingRequests.indexOf(c);-1!==a&&p.pendingRequests.splice(a,1)}var J=k.defer(),R=J.promise,M, T,P=c.headers,q="jsonp"===L(c.method),N=c.url;q?N=m.getTrustedResourceUrl(N):E(N)||(N=m.valueOf(N));N=F(N,c.paramSerializer(c.params));q&&(N=s(N,c.jsonpCallbackParam));p.pendingRequests.push(c);R.then(y,y);!c.cache&&!a.cache||!1===c.cache||"GET"!==c.method&&"JSONP"!==c.method||(M=B(c.cache)?c.cache:B(a.cache)?a.cache:v);M&&(T=M.get(N),u(T)?T&&C(T.then)?T.then(G,G):I(T)?n(T[1],T[0],ka(T[2]),T[3],T[4]):n(T,200,{},"OK","complete"):M.put(N,R));x(T)&&((T=zd(c.url)?f()[c.xsrfCookieName||a.xsrfCookieName]: void 0)&&(P[c.xsrfHeaderName||a.xsrfHeaderName]=T),e(c.method,N,d,l,P,c.timeout,c.withCredentials,c.responseType,g(c.eventHandlers),g(c.uploadEventHandlers)));return R}function F(a,b){0<b.length&&(a+=(-1===a.indexOf("?")?"?":"&")+b);return a}function s(a,b){var c=a.split("?");if(2<c.length)throw Kb("badjsonp",a);c=ec(c[1]);r(c,function(c,d){if("JSON_CALLBACK"===c)throw Kb("badjsonp",a);if(d===b)throw Kb("badjsonp",b,a);});return a+=(-1===a.indexOf("?")?"?":"&")+b+"=JSON_CALLBACK"}var v=g("$http"); a.paramSerializer=E(a.paramSerializer)?l.get(a.paramSerializer):a.paramSerializer;var y=[];r(d,function(a){y.unshift(E(a)?l.get(a):l.invoke(a))});p.pendingRequests=[];(function(a){r(arguments,function(a){p[a]=function(b,c){return p(O({},c||{},{method:a,url:b}))}})})("get","delete","head","jsonp");(function(a){r(arguments,function(a){p[a]=function(b,c,d){return p(O({},d||{},{method:a,url:b,data:c}))}})})("post","put","patch");p.defaults=a;return p}]}function Jf(){this.$get=function(){return function(){return new w.XMLHttpRequest}}} function If(){this.$get=["$browser","$jsonpCallbacks","$document","$xhrFactory",function(a,b,d,c){return yg(a,c,a.defer,b,d[0])}]}function yg(a,b,d,c,e){function f(a,b,d){a=a.replace("JSON_CALLBACK",b);var f=e.createElement("script"),m=null;f.type="text/javascript";f.src=a;f.async=!0;m=function(a){f.removeEventListener("load",m);f.removeEventListener("error",m);e.body.removeChild(f);f=null;var g=-1,F="unknown";a&&("load"!==a.type||c.wasCalled(b)||(a={type:"error"}),F=a.type,g="error"===a.type?404: 200);d&&d(g,F)};f.addEventListener("load",m);f.addEventListener("error",m);e.body.appendChild(f);return m}return function(e,h,k,l,m,p,n,F,s,v){function y(){q&&q();A&&A.abort()}function t(a,b,c,e,g,f){u(G)&&d.cancel(G);q=A=null;a(b,c,e,g,f)}h=h||a.url();if("jsonp"===L(e))var Aa=c.createCallback(h),q=f(h,Aa,function(a,b){var d=200===a&&c.getResponse(Aa);t(l,a,d,"",b,"complete");c.removeCallback(Aa)});else{var A=b(e,h);A.open(e,h,!0);r(m,function(a,b){u(a)&&A.setRequestHeader(b,a)});A.onload=function(){var a= A.statusText||"",b="response"in A?A.response:A.responseText,c=1223===A.status?204:A.status;0===c&&(c=b?200:"file"===ta(h).protocol?404:0);t(l,c,b,A.getAllResponseHeaders(),a,"complete")};A.onerror=function(){t(l,-1,null,null,"","error")};A.onabort=function(){t(l,-1,null,null,"","abort")};A.ontimeout=function(){t(l,-1,null,null,"","timeout")};r(s,function(a,b){A.addEventListener(b,a)});r(v,function(a,b){A.upload.addEventListener(b,a)});n&&(A.withCredentials=!0);if(F)try{A.responseType=F}catch(H){if("json"!== F)throw H;}A.send(x(k)?null:k)}if(0<p)var G=d(y,p);else p&&C(p.then)&&p.then(y)}}function Df(){var a="{{",b="}}";this.startSymbol=function(b){return b?(a=b,this):a};this.endSymbol=function(a){return a?(b=a,this):b};this.$get=["$parse","$exceptionHandler","$sce",function(d,c,e){function f(a){return"\\\\\\"+a}function g(c){return c.replace(p,a).replace(n,b)}function h(a,b,c,d){var e=a.$watch(function(a){e();return d(a)},b,c);return e}function k(f,k,p,n){function t(a){try{var b=a;a=p?e.getTrusted(p, b):e.valueOf(b);return n&&!u(a)?a:gc(a)}catch(d){c(Fa.interr(f,d))}}if(!f.length||-1===f.indexOf(a)){var r;k||(k=g(f),r=la(k),r.exp=f,r.expressions=[],r.$$watchDelegate=h);return r}n=!!n;var q,A,H=0,G=[],ba=[];r=f.length;for(var J=[],R=[];H<r;)if(-1!==(q=f.indexOf(a,H))&&-1!==(A=f.indexOf(b,q+l)))H!==q&&J.push(g(f.substring(H,q))),H=f.substring(q+l,A),G.push(H),ba.push(d(H,t)),H=A+m,R.push(J.length),J.push("");else{H!==r&&J.push(g(f.substring(H)));break}p&&1<J.length&&Fa.throwNoconcat(f);if(!k||G.length){var M= function(a){for(var b=0,c=G.length;b<c;b++){if(n&&x(a[b]))return;J[R[b]]=a[b]}return J.join("")};return O(function(a){var b=0,d=G.length,e=Array(d);try{for(;b<d;b++)e[b]=ba[b](a);return M(e)}catch(g){c(Fa.interr(f,g))}},{exp:f,expressions:G,$$watchDelegate:function(a,b){var c;return a.$watchGroup(ba,function(d,e){var g=M(d);b.call(this,g,d!==e?c:g,a);c=g})}})}}var l=a.length,m=b.length,p=new RegExp(a.replace(/./g,f),"g"),n=new RegExp(b.replace(/./g,f),"g");k.startSymbol=function(){return a};k.endSymbol= function(){return b};return k}]}function Ef(){this.$get=["$rootScope","$window","$q","$$q","$browser",function(a,b,d,c,e){function f(f,k,l,m){function p(){n?f.apply(null,F):f(y)}var n=4<arguments.length,F=n?xa.call(arguments,4):[],s=b.setInterval,v=b.clearInterval,y=0,t=u(m)&&!m,r=(t?c:d).defer(),q=r.promise;l=u(l)?l:0;q.$$intervalId=s(function(){t?e.defer(p):a.$evalAsync(p);r.notify(y++);0<l&&y>=l&&(r.resolve(y),v(q.$$intervalId),delete g[q.$$intervalId]);t||a.$apply()},k);g[q.$$intervalId]=r;return q} var g={};f.cancel=function(a){return a&&a.$$intervalId in g?(g[a.$$intervalId].promise.$$state.pur=!0,g[a.$$intervalId].reject("canceled"),b.clearInterval(a.$$intervalId),delete g[a.$$intervalId],!0):!1};return f}]}function wc(a){a=a.split("/");for(var b=a.length;b--;)a[b]=fb(a[b].replace(/%2F/g,"/"));return a.join("/")}function Ad(a,b){var d=ta(a);b.$$protocol=d.protocol;b.$$host=d.hostname;b.$$port=Z(d.port)||zg[d.protocol]||null}function Bd(a,b,d){if(Ag.test(a))throw kb("badpath",a);var c="/"!== a.charAt(0);c&&(a="/"+a);a=ta(a);for(var c=(c&&"/"===a.pathname.charAt(0)?a.pathname.substring(1):a.pathname).split("/"),e=c.length;e--;)c[e]=decodeURIComponent(c[e]),d&&(c[e]=c[e].replace(/\//g,"%2F"));d=c.join("/");b.$$path=d;b.$$search=ec(a.search);b.$$hash=decodeURIComponent(a.hash);b.$$path&&"/"!==b.$$path.charAt(0)&&(b.$$path="/"+b.$$path)}function xc(a,b){return a.slice(0,b.length)===b}function ua(a,b){if(xc(b,a))return b.substr(a.length)}function La(a){var b=a.indexOf("#");return-1===b?a: a.substr(0,b)}function lb(a){return a.replace(/(#.+)|#$/,"$1")}function yc(a,b,d){this.$$html5=!0;d=d||"";Ad(a,this);this.$$parse=function(a){var d=ua(b,a);if(!E(d))throw kb("ipthprfx",a,b);Bd(d,this,!0);this.$$path||(this.$$path="/");this.$$compose()};this.$$compose=function(){var a=fc(this.$$search),d=this.$$hash?"#"+fb(this.$$hash):"";this.$$url=wc(this.$$path)+(a?"?"+a:"")+d;this.$$absUrl=b+this.$$url.substr(1);this.$$urlUpdatedByLocation=!0};this.$$parseLinkUrl=function(c,e){if(e&&"#"===e[0])return this.hash(e.slice(1)), !0;var f,g;u(f=ua(a,c))?(g=f,g=d&&u(f=ua(d,f))?b+(ua("/",f)||f):a+g):u(f=ua(b,c))?g=b+f:b===c+"/"&&(g=b);g&&this.$$parse(g);return!!g}}function zc(a,b,d){Ad(a,this);this.$$parse=function(c){var e=ua(a,c)||ua(b,c),f;x(e)||"#"!==e.charAt(0)?this.$$html5?f=e:(f="",x(e)&&(a=c,this.replace())):(f=ua(d,e),x(f)&&(f=e));Bd(f,this,!1);c=this.$$path;var e=a,g=/^\/[A-Z]:(\/.*)/;xc(f,e)&&(f=f.replace(e,""));g.exec(f)||(c=(f=g.exec(c))?f[1]:c);this.$$path=c;this.$$compose()};this.$$compose=function(){var b=fc(this.$$search), e=this.$$hash?"#"+fb(this.$$hash):"";this.$$url=wc(this.$$path)+(b?"?"+b:"")+e;this.$$absUrl=a+(this.$$url?d+this.$$url:"");this.$$urlUpdatedByLocation=!0};this.$$parseLinkUrl=function(b,d){return La(a)===La(b)?(this.$$parse(b),!0):!1}}function Cd(a,b,d){this.$$html5=!0;zc.apply(this,arguments);this.$$parseLinkUrl=function(c,e){if(e&&"#"===e[0])return this.hash(e.slice(1)),!0;var f,g;a===La(c)?f=c:(g=ua(b,c))?f=a+d+g:b===c+"/"&&(f=b);f&&this.$$parse(f);return!!f};this.$$compose=function(){var b=fc(this.$$search), e=this.$$hash?"#"+fb(this.$$hash):"";this.$$url=wc(this.$$path)+(b?"?"+b:"")+e;this.$$absUrl=a+d+this.$$url;this.$$urlUpdatedByLocation=!0}}function Lb(a){return function(){return this[a]}}function Dd(a,b){return function(d){if(x(d))return this[a];this[a]=b(d);this.$$compose();return this}}function Lf(){var a="!",b={enabled:!1,requireBase:!0,rewriteLinks:!0};this.hashPrefix=function(b){return u(b)?(a=b,this):a};this.html5Mode=function(a){if(Na(a))return b.enabled=a,this;if(B(a)){Na(a.enabled)&&(b.enabled= a.enabled);Na(a.requireBase)&&(b.requireBase=a.requireBase);if(Na(a.rewriteLinks)||E(a.rewriteLinks))b.rewriteLinks=a.rewriteLinks;return this}return b};this.$get=["$rootScope","$browser","$sniffer","$rootElement","$window",function(d,c,e,f,g){function h(a,b,d){var e=l.url(),g=l.$$state;try{c.url(a,b,d),l.$$state=c.state()}catch(f){throw l.url(e),l.$$state=g,f;}}function k(a,b){d.$broadcast("$locationChangeSuccess",l.absUrl(),a,l.$$state,b)}var l,m;m=c.baseHref();var p=c.url(),n;if(b.enabled){if(!m&& b.requireBase)throw kb("nobase");n=p.substring(0,p.indexOf("/",p.indexOf("//")+2))+(m||"/");m=e.history?yc:Cd}else n=La(p),m=zc;var F=n.substr(0,La(n).lastIndexOf("/")+1);l=new m(n,F,"#"+a);l.$$parseLinkUrl(p,p);l.$$state=c.state();var s=/^\s*(javascript|mailto):/i;f.on("click",function(a){var e=b.rewriteLinks;if(e&&!a.ctrlKey&&!a.metaKey&&!a.shiftKey&&2!==a.which&&2!==a.button){for(var h=z(a.target);"a"!==ya(h[0]);)if(h[0]===f[0]||!(h=h.parent())[0])return;if(!E(e)||!x(h.attr(e))){var e=h.prop("href"), k=h.attr("href")||h.attr("xlink:href");B(e)&&"[object SVGAnimatedString]"===e.toString()&&(e=ta(e.animVal).href);s.test(e)||!e||h.attr("target")||a.isDefaultPrevented()||!l.$$parseLinkUrl(e,k)||(a.preventDefault(),l.absUrl()!==c.url()&&(d.$apply(),g.angular["ff-684208-preventDefault"]=!0))}}});lb(l.absUrl())!==lb(p)&&c.url(l.absUrl(),!0);var v=!0;c.onUrlChange(function(a,b){xc(a,F)?(d.$evalAsync(function(){var c=l.absUrl(),e=l.$$state,g;a=lb(a);l.$$parse(a);l.$$state=b;g=d.$broadcast("$locationChangeStart", a,c,b,e).defaultPrevented;l.absUrl()===a&&(g?(l.$$parse(c),l.$$state=e,h(c,!1,e)):(v=!1,k(c,e)))}),d.$$phase||d.$digest()):g.location.href=a});d.$watch(function(){if(v||l.$$urlUpdatedByLocation){l.$$urlUpdatedByLocation=!1;var a=lb(c.url()),b=lb(l.absUrl()),g=c.state(),f=l.$$replace,m=a!==b||l.$$html5&&e.history&&g!==l.$$state;if(v||m)v=!1,d.$evalAsync(function(){var b=l.absUrl(),c=d.$broadcast("$locationChangeStart",b,a,l.$$state,g).defaultPrevented;l.absUrl()===b&&(c?(l.$$parse(a),l.$$state=g): (m&&h(b,f,g===l.$$state?null:l.$$state),k(a,g)))})}l.$$replace=!1});return l}]}function Mf(){var a=!0,b=this;this.debugEnabled=function(b){return u(b)?(a=b,this):a};this.$get=["$window",function(d){function c(a){bc(a)&&(a.stack&&f?a=a.message&&-1===a.stack.indexOf(a.message)?"Error: "+a.message+"\n"+a.stack:a.stack:a.sourceURL&&(a=a.message+"\n"+a.sourceURL+":"+a.line));return a}function e(a){var b=d.console||{},e=b[a]||b.log||D;return function(){var a=[];r(arguments,function(b){a.push(c(b))});return Function.prototype.apply.call(e, b,a)}}var f=Ca||/\bEdge\//.test(d.navigator&&d.navigator.userAgent);return{log:e("log"),info:e("info"),warn:e("warn"),error:e("error"),debug:function(){var c=e("debug");return function(){a&&c.apply(b,arguments)}}()}}]}function Bg(a){return a+""}function Cg(a,b){return"undefined"!==typeof a?a:b}function Ed(a,b){return"undefined"===typeof a?b:"undefined"===typeof b?a:a+b}function Dg(a,b){switch(a.type){case q.MemberExpression:if(a.computed)return!1;break;case q.UnaryExpression:return 1;case q.BinaryExpression:return"+"!== a.operator?1:!1;case q.CallExpression:return!1}return void 0===b?Fd:b}function W(a,b,d){var c,e,f=a.isPure=Dg(a,d);switch(a.type){case q.Program:c=!0;r(a.body,function(a){W(a.expression,b,f);c=c&&a.expression.constant});a.constant=c;break;case q.Literal:a.constant=!0;a.toWatch=[];break;case q.UnaryExpression:W(a.argument,b,f);a.constant=a.argument.constant;a.toWatch=a.argument.toWatch;break;case q.BinaryExpression:W(a.left,b,f);W(a.right,b,f);a.constant=a.left.constant&&a.right.constant;a.toWatch= a.left.toWatch.concat(a.right.toWatch);break;case q.LogicalExpression:W(a.left,b,f);W(a.right,b,f);a.constant=a.left.constant&&a.right.constant;a.toWatch=a.constant?[]:[a];break;case q.ConditionalExpression:W(a.test,b,f);W(a.alternate,b,f);W(a.consequent,b,f);a.constant=a.test.constant&&a.alternate.constant&&a.consequent.constant;a.toWatch=a.constant?[]:[a];break;case q.Identifier:a.constant=!1;a.toWatch=[a];break;case q.MemberExpression:W(a.object,b,f);a.computed&&W(a.property,b,f);a.constant=a.object.constant&& (!a.computed||a.property.constant);a.toWatch=a.constant?[]:[a];break;case q.CallExpression:c=d=a.filter?!b(a.callee.name).$stateful:!1;e=[];r(a.arguments,function(a){W(a,b,f);c=c&&a.constant;e.push.apply(e,a.toWatch)});a.constant=c;a.toWatch=d?e:[a];break;case q.AssignmentExpression:W(a.left,b,f);W(a.right,b,f);a.constant=a.left.constant&&a.right.constant;a.toWatch=[a];break;case q.ArrayExpression:c=!0;e=[];r(a.elements,function(a){W(a,b,f);c=c&&a.constant;e.push.apply(e,a.toWatch)});a.constant=c; a.toWatch=e;break;case q.ObjectExpression:c=!0;e=[];r(a.properties,function(a){W(a.value,b,f);c=c&&a.value.constant;e.push.apply(e,a.value.toWatch);a.computed&&(W(a.key,b,!1),c=c&&a.key.constant,e.push.apply(e,a.key.toWatch))});a.constant=c;a.toWatch=e;break;case q.ThisExpression:a.constant=!1;a.toWatch=[];break;case q.LocalsExpression:a.constant=!1,a.toWatch=[]}}function Gd(a){if(1===a.length){a=a[0].expression;var b=a.toWatch;return 1!==b.length?b:b[0]!==a?b:void 0}}function Hd(a){return a.type=== q.Identifier||a.type===q.MemberExpression}function Id(a){if(1===a.body.length&&Hd(a.body[0].expression))return{type:q.AssignmentExpression,left:a.body[0].expression,right:{type:q.NGValueParameter},operator:"="}}function Jd(a){this.$filter=a}function Kd(a){this.$filter=a}function Mb(a,b,d){this.ast=new q(a,d);this.astCompiler=d.csp?new Kd(b):new Jd(b)}function Ac(a){return C(a.valueOf)?a.valueOf():Eg.call(a)}function Nf(){var a=S(),b={"true":!0,"false":!1,"null":null,undefined:void 0},d,c;this.addLiteral= function(a,c){b[a]=c};this.setIdentifierFns=function(a,b){d=a;c=b;return this};this.$get=["$filter",function(e){function f(b,c){var d,g;switch(typeof b){case "string":return g=b=b.trim(),d=a[g],d||(d=new Nb(n),d=(new Mb(d,e,n)).parse(b),d.constant?d.$$watchDelegate=m:d.oneTime?d.$$watchDelegate=d.literal?l:k:d.inputs&&(d.$$watchDelegate=h),a[g]=d),p(d,c);case "function":return p(b,c);default:return p(D,c)}}function g(a,b,c){return null==a||null==b?a===b:"object"!==typeof a||(a=Ac(a),"object"!==typeof a|| c)?a===b||a!==a&&b!==b:!1}function h(a,b,c,d,e){var f=d.inputs,h;if(1===f.length){var k=g,f=f[0];return a.$watch(function(a){var b=f(a);g(b,k,f.isPure)||(h=d(a,void 0,void 0,[b]),k=b&&Ac(b));return h},b,c,e)}for(var l=[],m=[],p=0,n=f.length;p<n;p++)l[p]=g,m[p]=null;return a.$watch(function(a){for(var b=!1,c=0,e=f.length;c<e;c++){var k=f[c](a);if(b||(b=!g(k,l[c],f[c].isPure)))m[c]=k,l[c]=k&&Ac(k)}b&&(h=d(a,void 0,void 0,m));return h},b,c,e)}function k(a,b,c,d,e){function g(a){return d(a)}function f(a, c,d){l=a;C(b)&&b(a,c,d);u(a)&&d.$$postDigest(function(){u(l)&&k()})}var k,l;return k=d.inputs?h(a,f,c,d,e):a.$watch(g,f,c)}function l(a,b,c,d){function e(a){var b=!0;r(a,function(a){u(a)||(b=!1)});return b}var g,f;return g=a.$watch(function(a){return d(a)},function(a,c,d){f=a;C(b)&&b(a,c,d);e(a)&&d.$$postDigest(function(){e(f)&&g()})},c)}function m(a,b,c,d){var e=a.$watch(function(a){e();return d(a)},b,c);return e}function p(a,b){if(!b)return a;var c=a.$$watchDelegate,d=!1,e=c!==l&&c!==k?function(c, e,g,f){g=d&&f?f[0]:a(c,e,g,f);return b(g,c,e)}:function(c,d,e,g){e=a(c,d,e,g);c=b(e,c,d);return u(e)?c:e},d=!a.inputs;c&&c!==h?(e.$$watchDelegate=c,e.inputs=a.inputs):b.$stateful||(e.$$watchDelegate=h,e.inputs=a.inputs?a.inputs:[a]);e.inputs&&(e.inputs=e.inputs.map(function(a){return a.isPure===Fd?function(b){return a(b)}:a}));return e}var n={csp:Ja().noUnsafeEval,literals:pa(b),isIdentifierStart:C(d)&&d,isIdentifierContinue:C(c)&&c};f.$$getAst=function(a){var b=new Nb(n);return(new Mb(b,e,n)).getAst(a).ast}; return f}]}function Pf(){var a=!0;this.$get=["$rootScope","$exceptionHandler",function(b,d){return Ld(function(a){b.$evalAsync(a)},d,a)}];this.errorOnUnhandledRejections=function(b){return u(b)?(a=b,this):a}}function Qf(){var a=!0;this.$get=["$browser","$exceptionHandler",function(b,d){return Ld(function(a){b.defer(a)},d,a)}];this.errorOnUnhandledRejections=function(b){return u(b)?(a=b,this):a}}function Ld(a,b,d){function c(){return new e}function e(){var a=this.promise=new f;this.resolve=function(b){k(a, b)};this.reject=function(b){m(a,b)};this.notify=function(b){n(a,b)}}function f(){this.$$state={status:0}}function g(){for(;!u&&w.length;){var a=w.shift();if(!a.pur){a.pur=!0;var c=a.value,c="Possibly unhandled rejection: "+("function"===typeof c?c.toString().replace(/ \{[\s\S]*$/,""):x(c)?"undefined":"string"!==typeof c?De(c,void 0):c);bc(a.value)?b(a.value,c):b(c)}}}function h(c){!d||c.pending||2!==c.status||c.pur||(0===u&&0===w.length&&a(g),w.push(c));!c.processScheduled&&c.pending&&(c.processScheduled= !0,++u,a(function(){var e,f,h;h=c.pending;c.processScheduled=!1;c.pending=void 0;try{for(var l=0,p=h.length;l<p;++l){c.pur=!0;f=h[l][0];e=h[l][c.status];try{C(e)?k(f,e(c.value)):1===c.status?k(f,c.value):m(f,c.value)}catch(n){m(f,n),n&&!0===n.$$passToExceptionHandler&&b(n)}}}finally{--u,d&&0===u&&a(g)}}))}function k(a,b){a.$$state.status||(b===a?p(a,t("qcycle",b)):l(a,b))}function l(a,b){function c(b){g||(g=!0,l(a,b))}function d(b){g||(g=!0,p(a,b))}function e(b){n(a,b)}var f,g=!1;try{if(B(b)||C(b))f= b.then;C(f)?(a.$$state.status=-1,f.call(b,c,d,e)):(a.$$state.value=b,a.$$state.status=1,h(a.$$state))}catch(k){d(k)}}function m(a,b){a.$$state.status||p(a,b)}function p(a,b){a.$$state.value=b;a.$$state.status=2;h(a.$$state)}function n(c,d){var e=c.$$state.pending;0>=c.$$state.status&&e&&e.length&&a(function(){for(var a,c,g=0,f=e.length;g<f;g++){c=e[g][0];a=e[g][3];try{n(c,C(a)?a(d):d)}catch(h){b(h)}}})}function F(a){var b=new f;m(b,a);return b}function s(a,b,c){var d=null;try{C(c)&&(d=c())}catch(e){return F(e)}return d&& C(d.then)?d.then(function(){return b(a)},F):b(a)}function v(a,b,c,d){var e=new f;k(e,a);return e.then(b,c,d)}function q(a){if(!C(a))throw t("norslvr",a);var b=new f;a(function(a){k(b,a)},function(a){m(b,a)});return b}var t=K("$q",TypeError),u=0,w=[];O(f.prototype,{then:function(a,b,c){if(x(a)&&x(b)&&x(c))return this;var d=new f;this.$$state.pending=this.$$state.pending||[];this.$$state.pending.push([d,a,b,c]);0<this.$$state.status&&h(this.$$state);return d},"catch":function(a){return this.then(null, a)},"finally":function(a,b){return this.then(function(b){return s(b,A,a)},function(b){return s(b,F,a)},b)}});var A=v;q.prototype=f.prototype;q.defer=c;q.reject=F;q.when=v;q.resolve=A;q.all=function(a){var b=new f,c=0,d=I(a)?[]:{};r(a,function(a,e){c++;v(a).then(function(a){d[e]=a;--c||k(b,d)},function(a){m(b,a)})});0===c&&k(b,d);return b};q.race=function(a){var b=c();r(a,function(a){v(a).then(b.resolve,b.reject)});return b.promise};return q}function Zf(){this.$get=["$window","$timeout",function(a, b){var d=a.requestAnimationFrame||a.webkitRequestAnimationFrame,c=a.cancelAnimationFrame||a.webkitCancelAnimationFrame||a.webkitCancelRequestAnimationFrame,e=!!d,f=e?function(a){var b=d(a);return function(){c(b)}}:function(a){var c=b(a,16.66,!1);return function(){b.cancel(c)}};f.supported=e;return f}]}function Of(){function a(a){function b(){this.$$watchers=this.$$nextSibling=this.$$childHead=this.$$childTail=null;this.$$listeners={};this.$$listenerCount={};this.$$watchersCount=0;this.$id=++qb;this.$$ChildScope= null}b.prototype=a;return b}var b=10,d=K("$rootScope"),c=null,e=null;this.digestTtl=function(a){arguments.length&&(b=a);return b};this.$get=["$exceptionHandler","$parse","$browser",function(f,g,h){function k(a){a.currentScope.$$destroyed=!0}function l(a){9===Ca&&(a.$$childHead&&l(a.$$childHead),a.$$nextSibling&&l(a.$$nextSibling));a.$parent=a.$$nextSibling=a.$$prevSibling=a.$$childHead=a.$$childTail=a.$root=a.$$watchers=null}function m(){this.$id=++qb;this.$$phase=this.$parent=this.$$watchers=this.$$nextSibling= this.$$prevSibling=this.$$childHead=this.$$childTail=null;this.$root=this;this.$$destroyed=!1;this.$$listeners={};this.$$listenerCount={};this.$$watchersCount=0;this.$$isolateBindings=null}function p(a){if(t.$$phase)throw d("inprog",t.$$phase);t.$$phase=a}function n(a,b){do a.$$watchersCount+=b;while(a=a.$parent)}function F(a,b,c){do a.$$listenerCount[c]-=b,0===a.$$listenerCount[c]&&delete a.$$listenerCount[c];while(a=a.$parent)}function s(){}function v(){for(;A.length;)try{A.shift()()}catch(a){f(a)}e= null}function q(){null===e&&(e=h.defer(function(){t.$apply(v)}))}m.prototype={constructor:m,$new:function(b,c){var d;c=c||this;b?(d=new m,d.$root=this.$root):(this.$$ChildScope||(this.$$ChildScope=a(this)),d=new this.$$ChildScope);d.$parent=c;d.$$prevSibling=c.$$childTail;c.$$childHead?(c.$$childTail.$$nextSibling=d,c.$$childTail=d):c.$$childHead=c.$$childTail=d;(b||c!==this)&&d.$on("$destroy",k);return d},$watch:function(a,b,d,e){var f=g(a);b=C(b)?b:D;if(f.$$watchDelegate)return f.$$watchDelegate(this, b,d,f,a);var h=this,k=h.$$watchers,l={fn:b,last:s,get:f,exp:e||a,eq:!!d};c=null;k||(k=h.$$watchers=[],k.$$digestWatchIndex=-1);k.unshift(l);k.$$digestWatchIndex++;n(this,1);return function(){var a=cb(k,l);0<=a&&(n(h,-1),a<k.$$digestWatchIndex&&k.$$digestWatchIndex--);c=null}},$watchGroup:function(a,b){function c(){h=!1;k?(k=!1,b(e,e,g)):b(e,d,g)}var d=Array(a.length),e=Array(a.length),f=[],g=this,h=!1,k=!0;if(!a.length){var l=!0;g.$evalAsync(function(){l&&b(e,e,g)});return function(){l=!1}}if(1=== a.length)return this.$watch(a[0],function(a,c,f){e[0]=a;d[0]=c;b(e,a===c?e:d,f)});r(a,function(a,b){var k=g.$watch(a,function(a,f){e[b]=a;d[b]=f;h||(h=!0,g.$evalAsync(c))});f.push(k)});return function(){for(;f.length;)f.shift()()}},$watchCollection:function(a,b){function c(a){e=a;var b,d,g,h;if(!x(e)){if(B(e))if(wa(e))for(f!==p&&(f=p,t=f.length=0,l++),a=e.length,t!==a&&(l++,f.length=t=a),b=0;b<a;b++)h=f[b],g=e[b],d=h!==h&&g!==g,d||h===g||(l++,f[b]=g);else{f!==n&&(f=n={},t=0,l++);a=0;for(b in e)ra.call(e, b)&&(a++,g=e[b],h=f[b],b in f?(d=h!==h&&g!==g,d||h===g||(l++,f[b]=g)):(t++,f[b]=g,l++));if(t>a)for(b in l++,f)ra.call(e,b)||(t--,delete f[b])}else f!==e&&(f=e,l++);return l}}c.$stateful=!0;var d=this,e,f,h,k=1<b.length,l=0,m=g(a,c),p=[],n={},s=!0,t=0;return this.$watch(m,function(){s?(s=!1,b(e,e,d)):b(e,h,d);if(k)if(B(e))if(wa(e)){h=Array(e.length);for(var a=0;a<e.length;a++)h[a]=e[a]}else for(a in h={},e)ra.call(e,a)&&(h[a]=e[a]);else h=e})},$digest:function(){var a,g,k,l,m,n,r,F=b,q,A=[],y,x;p("$digest"); h.$$checkUrlChange();this===t&&null!==e&&(h.defer.cancel(e),v());c=null;do{r=!1;q=this;for(n=0;n<u.length;n++){try{x=u[n],l=x.fn,l(x.scope,x.locals)}catch(z){f(z)}c=null}u.length=0;a:do{if(n=q.$$watchers)for(n.$$digestWatchIndex=n.length;n.$$digestWatchIndex--;)try{if(a=n[n.$$digestWatchIndex])if(m=a.get,(g=m(q))!==(k=a.last)&&!(a.eq?sa(g,k):U(g)&&U(k)))r=!0,c=a,a.last=a.eq?pa(g,null):g,l=a.fn,l(g,k===s?g:k,q),5>F&&(y=4-F,A[y]||(A[y]=[]),A[y].push({msg:C(a.exp)?"fn: "+(a.exp.name||a.exp.toString()): a.exp,newVal:g,oldVal:k}));else if(a===c){r=!1;break a}}catch(D){f(D)}if(!(n=q.$$watchersCount&&q.$$childHead||q!==this&&q.$$nextSibling))for(;q!==this&&!(n=q.$$nextSibling);)q=q.$parent}while(q=n);if((r||u.length)&&!F--)throw t.$$phase=null,d("infdig",b,A);}while(r||u.length);for(t.$$phase=null;H<w.length;)try{w[H++]()}catch(B){f(B)}w.length=H=0;h.$$checkUrlChange()},$destroy:function(){if(!this.$$destroyed){var a=this.$parent;this.$broadcast("$destroy");this.$$destroyed=!0;this===t&&h.$$applicationDestroyed(); n(this,-this.$$watchersCount);for(var b in this.$$listenerCount)F(this,this.$$listenerCount[b],b);a&&a.$$childHead===this&&(a.$$childHead=this.$$nextSibling);a&&a.$$childTail===this&&(a.$$childTail=this.$$prevSibling);this.$$prevSibling&&(this.$$prevSibling.$$nextSibling=this.$$nextSibling);this.$$nextSibling&&(this.$$nextSibling.$$prevSibling=this.$$prevSibling);this.$destroy=this.$digest=this.$apply=this.$evalAsync=this.$applyAsync=D;this.$on=this.$watch=this.$watchGroup=function(){return D};this.$$listeners= {};this.$$nextSibling=null;l(this)}},$eval:function(a,b){return g(a)(this,b)},$evalAsync:function(a,b){t.$$phase||u.length||h.defer(function(){u.length&&t.$digest()});u.push({scope:this,fn:g(a),locals:b})},$$postDigest:function(a){w.push(a)},$apply:function(a){try{p("$apply");try{return this.$eval(a)}finally{t.$$phase=null}}catch(b){f(b)}finally{try{t.$digest()}catch(c){throw f(c),c;}}},$applyAsync:function(a){function b(){c.$eval(a)}var c=this;a&&A.push(b);a=g(a);q()},$on:function(a,b){var c=this.$$listeners[a]; c||(this.$$listeners[a]=c=[]);c.push(b);var d=this;do d.$$listenerCount[a]||(d.$$listenerCount[a]=0),d.$$listenerCount[a]++;while(d=d.$parent);var e=this;return function(){var d=c.indexOf(b);-1!==d&&(delete c[d],F(e,1,a))}},$emit:function(a,b){var c=[],d,e=this,g=!1,h={name:a,targetScope:e,stopPropagation:function(){g=!0},preventDefault:function(){h.defaultPrevented=!0},defaultPrevented:!1},k=db([h],arguments,1),l,m;do{d=e.$$listeners[a]||c;h.currentScope=e;l=0;for(m=d.length;l<m;l++)if(d[l])try{d[l].apply(null, k)}catch(n){f(n)}else d.splice(l,1),l--,m--;if(g)break;e=e.$parent}while(e);h.currentScope=null;return h},$broadcast:function(a,b){var c=this,d=this,e={name:a,targetScope:this,preventDefault:function(){e.defaultPrevented=!0},defaultPrevented:!1};if(!this.$$listenerCount[a])return e;for(var g=db([e],arguments,1),h,k;c=d;){e.currentScope=c;d=c.$$listeners[a]||[];h=0;for(k=d.length;h<k;h++)if(d[h])try{d[h].apply(null,g)}catch(l){f(l)}else d.splice(h,1),h--,k--;if(!(d=c.$$listenerCount[a]&&c.$$childHead|| c!==this&&c.$$nextSibling))for(;c!==this&&!(d=c.$$nextSibling);)c=c.$parent}e.currentScope=null;return e}};var t=new m,u=t.$$asyncQueue=[],w=t.$$postDigestQueue=[],A=t.$$applyAsyncQueue=[],H=0;return t}]}function Ge(){var a=/^\s*(https?|s?ftp|mailto|tel|file):/,b=/^\s*((https?|ftp|file|blob):|data:image\/)/;this.aHrefSanitizationWhitelist=function(b){return u(b)?(a=b,this):a};this.imgSrcSanitizationWhitelist=function(a){return u(a)?(b=a,this):b};this.$get=function(){return function(d,c){var e=c?b: a,f;f=ta(d&&d.trim()).href;return""===f||f.match(e)?d:"unsafe:"+f}}}function Fg(a){if("self"===a)return a;if(E(a)){if(-1<a.indexOf("***"))throw va("iwcard",a);a=Md(a).replace(/\\\*\\\*/g,".*").replace(/\\\*/g,"[^:/.?&;]*");return new RegExp("^"+a+"$")}if($a(a))return new RegExp("^"+a.source+"$");throw va("imatcher");}function Nd(a){var b=[];u(a)&&r(a,function(a){b.push(Fg(a))});return b}function Sf(){this.SCE_CONTEXTS=oa;var a=["self"],b=[];this.resourceUrlWhitelist=function(b){arguments.length&& (a=Nd(b));return a};this.resourceUrlBlacklist=function(a){arguments.length&&(b=Nd(a));return b};this.$get=["$injector",function(d){function c(a,b){return"self"===a?zd(b):!!a.exec(b.href)}function e(a){var b=function(a){this.$$unwrapTrustedValue=function(){return a}};a&&(b.prototype=new a);b.prototype.valueOf=function(){return this.$$unwrapTrustedValue()};b.prototype.toString=function(){return this.$$unwrapTrustedValue().toString()};return b}var f=function(a){throw va("unsafe");};d.has("$sanitize")&& (f=d.get("$sanitize"));var g=e(),h={};h[oa.HTML]=e(g);h[oa.CSS]=e(g);h[oa.URL]=e(g);h[oa.JS]=e(g);h[oa.RESOURCE_URL]=e(h[oa.URL]);return{trustAs:function(a,b){var c=h.hasOwnProperty(a)?h[a]:null;if(!c)throw va("icontext",a,b);if(null===b||x(b)||""===b)return b;if("string"!==typeof b)throw va("itype",a);return new c(b)},getTrusted:function(d,e){if(null===e||x(e)||""===e)return e;var g=h.hasOwnProperty(d)?h[d]:null;if(g&&e instanceof g)return e.$$unwrapTrustedValue();if(d===oa.RESOURCE_URL){var g=ta(e.toString()), p,n,r=!1;p=0;for(n=a.length;p<n;p++)if(c(a[p],g)){r=!0;break}if(r)for(p=0,n=b.length;p<n;p++)if(c(b[p],g)){r=!1;break}if(r)return e;throw va("insecurl",e.toString());}if(d===oa.HTML)return f(e);throw va("unsafe");},valueOf:function(a){return a instanceof g?a.$$unwrapTrustedValue():a}}}]}function Rf(){var a=!0;this.enabled=function(b){arguments.length&&(a=!!b);return a};this.$get=["$parse","$sceDelegate",function(b,d){if(a&&8>Ca)throw va("iequirks");var c=ka(oa);c.isEnabled=function(){return a};c.trustAs= d.trustAs;c.getTrusted=d.getTrusted;c.valueOf=d.valueOf;a||(c.trustAs=c.getTrusted=function(a,b){return b},c.valueOf=ab);c.parseAs=function(a,d){var e=b(d);return e.literal&&e.constant?e:b(d,function(b){return c.getTrusted(a,b)})};var e=c.parseAs,f=c.getTrusted,g=c.trustAs;r(oa,function(a,b){var d=L(b);c[("parse_as_"+d).replace(Bc,wb)]=function(b){return e(a,b)};c[("get_trusted_"+d).replace(Bc,wb)]=function(b){return f(a,b)};c[("trust_as_"+d).replace(Bc,wb)]=function(b){return g(a,b)}});return c}]} function Tf(){this.$get=["$window","$document",function(a,b){var d={},c=!((!a.nw||!a.nw.process)&&a.chrome&&(a.chrome.app&&a.chrome.app.runtime||!a.chrome.app&&a.chrome.runtime&&a.chrome.runtime.id))&&a.history&&a.history.pushState,e=Z((/android (\d+)/.exec(L((a.navigator||{}).userAgent))||[])[1]),f=/Boxee/i.test((a.navigator||{}).userAgent),g=b[0]||{},h=g.body&&g.body.style,k=!1,l=!1;h&&(k=!!("transition"in h||"webkitTransition"in h),l=!!("animation"in h||"webkitAnimation"in h));return{history:!(!c|| 4>e||f),hasEvent:function(a){if("input"===a&&Ca)return!1;if(x(d[a])){var b=g.createElement("div");d[a]="on"+a in b}return d[a]},csp:Ja(),transitions:k,animations:l,android:e}}]}function Vf(){var a;this.httpOptions=function(b){return b?(a=b,this):a};this.$get=["$exceptionHandler","$templateCache","$http","$q","$sce",function(b,d,c,e,f){function g(h,k){g.totalPendingRequests++;if(!E(h)||x(d.get(h)))h=f.getTrustedResourceUrl(h);var l=c.defaults&&c.defaults.transformResponse;I(l)?l=l.filter(function(a){return a!== uc}):l===uc&&(l=null);return c.get(h,O({cache:d,transformResponse:l},a)).finally(function(){g.totalPendingRequests--}).then(function(a){d.put(h,a.data);return a.data},function(a){k||(a=Gg("tpload",h,a.status,a.statusText),b(a));return e.reject(a)})}g.totalPendingRequests=0;return g}]}function Wf(){this.$get=["$rootScope","$browser","$location",function(a,b,d){return{findBindings:function(a,b,d){a=a.getElementsByClassName("ng-binding");var g=[];r(a,function(a){var c=$.element(a).data("$binding");c&& r(c,function(c){d?(new RegExp("(^|\\s)"+Md(b)+"(\\s|\\||$)")).test(c)&&g.push(a):-1!==c.indexOf(b)&&g.push(a)})});return g},findModels:function(a,b,d){for(var g=["ng-","data-ng-","ng\\:"],h=0;h<g.length;++h){var k=a.querySelectorAll("["+g[h]+"model"+(d?"=":"*=")+'"'+b+'"]');if(k.length)return k}},getLocation:function(){return d.url()},setLocation:function(b){b!==d.url()&&(d.url(b),a.$digest())},whenStable:function(a){b.notifyWhenNoOutstandingRequests(a)}}}]}function Xf(){this.$get=["$rootScope","$browser", "$q","$$q","$exceptionHandler",function(a,b,d,c,e){function f(f,k,l){C(f)||(l=k,k=f,f=D);var m=xa.call(arguments,3),p=u(l)&&!l,n=(p?c:d).defer(),r=n.promise,s;s=b.defer(function(){try{n.resolve(f.apply(null,m))}catch(b){n.reject(b),e(b)}finally{delete g[r.$$timeoutId]}p||a.$apply()},k);r.$$timeoutId=s;g[s]=n;return r}var g={};f.cancel=function(a){return a&&a.$$timeoutId in g?(g[a.$$timeoutId].promise.$$state.pur=!0,g[a.$$timeoutId].reject("canceled"),delete g[a.$$timeoutId],b.defer.cancel(a.$$timeoutId)): !1};return f}]}function ta(a){Ca&&(X.setAttribute("href",a),a=X.href);X.setAttribute("href",a);return{href:X.href,protocol:X.protocol?X.protocol.replace(/:$/,""):"",host:X.host,search:X.search?X.search.replace(/^\?/,""):"",hash:X.hash?X.hash.replace(/^#/,""):"",hostname:X.hostname,port:X.port,pathname:"/"===X.pathname.charAt(0)?X.pathname:"/"+X.pathname}}function zd(a){a=E(a)?ta(a):a;return a.protocol===Od.protocol&&a.host===Od.host}function Yf(){this.$get=la(w)}function Pd(a){function b(a){try{return decodeURIComponent(a)}catch(b){return a}} var d=a[0]||{},c={},e="";return function(){var a,g,h,k,l;try{a=d.cookie||""}catch(m){a=""}if(a!==e)for(e=a,a=e.split("; "),c={},h=0;h<a.length;h++)g=a[h],k=g.indexOf("="),0<k&&(l=b(g.substring(0,k)),x(c[l])&&(c[l]=b(g.substring(k+1))));return c}}function bg(){this.$get=Pd}function ed(a){function b(d,c){if(B(d)){var e={};r(d,function(a,c){e[c]=b(c,a)});return e}return a.factory(d+"Filter",c)}this.register=b;this.$get=["$injector",function(a){return function(b){return a.get(b+"Filter")}}];b("currency", Qd);b("date",Rd);b("filter",Hg);b("json",Ig);b("limitTo",Jg);b("lowercase",Kg);b("number",Sd);b("orderBy",Td);b("uppercase",Lg)}function Hg(){return function(a,b,d,c){if(!wa(a)){if(null==a)return a;throw K("filter")("notarray",a);}c=c||"$";var e;switch(Cc(b)){case "function":break;case "boolean":case "null":case "number":case "string":e=!0;case "object":b=Mg(b,d,c,e);break;default:return a}return Array.prototype.filter.call(a,b)}}function Mg(a,b,d,c){var e=B(a)&&d in a;!0===b?b=sa:C(b)||(b=function(a, b){if(x(a))return!1;if(null===a||null===b)return a===b;if(B(b)||B(a)&&!ac(a))return!1;a=L(""+a);b=L(""+b);return-1!==a.indexOf(b)});return function(f){return e&&!B(f)?ha(f,a[d],b,d,!1):ha(f,a,b,d,c)}}function ha(a,b,d,c,e,f){var g=Cc(a),h=Cc(b);if("string"===h&&"!"===b.charAt(0))return!ha(a,b.substring(1),d,c,e);if(I(a))return a.some(function(a){return ha(a,b,d,c,e)});switch(g){case "object":var k;if(e){for(k in a)if(k.charAt&&"$"!==k.charAt(0)&&ha(a[k],b,d,c,!0))return!0;return f?!1:ha(a,b,d,c,!1)}if("object"=== h){for(k in b)if(f=b[k],!C(f)&&!x(f)&&(g=k===c,!ha(g?a:a[k],f,d,c,g,g)))return!1;return!0}return d(a,b);case "function":return!1;default:return d(a,b)}}function Cc(a){return null===a?"null":typeof a}function Qd(a){var b=a.NUMBER_FORMATS;return function(a,c,e){x(c)&&(c=b.CURRENCY_SYM);x(e)&&(e=b.PATTERNS[1].maxFrac);var f=c?/\u00A4/g:/\s*\u00A4\s*/g;return null==a?a:Ud(a,b.PATTERNS[1],b.GROUP_SEP,b.DECIMAL_SEP,e).replace(f,c)}}function Sd(a){var b=a.NUMBER_FORMATS;return function(a,c){return null== a?a:Ud(a,b.PATTERNS[0],b.GROUP_SEP,b.DECIMAL_SEP,c)}}function Ng(a){var b=0,d,c,e,f,g;-1<(c=a.indexOf(Vd))&&(a=a.replace(Vd,""));0<(e=a.search(/e/i))?(0>c&&(c=e),c+=+a.slice(e+1),a=a.substring(0,e)):0>c&&(c=a.length);for(e=0;a.charAt(e)===Dc;e++);if(e===(g=a.length))d=[0],c=1;else{for(g--;a.charAt(g)===Dc;)g--;c-=e;d=[];for(f=0;e<=g;e++,f++)d[f]=+a.charAt(e)}c>Wd&&(d=d.splice(0,Wd-1),b=c-1,c=1);return{d:d,e:b,i:c}}function Og(a,b,d,c){var e=a.d,f=e.length-a.i;b=x(b)?Math.min(Math.max(d,f),c):+b;d= b+a.i;c=e[d];if(0<d){e.splice(Math.max(a.i,d));for(var g=d;g<e.length;g++)e[g]=0}else for(f=Math.max(0,f),a.i=1,e.length=Math.max(1,d=b+1),e[0]=0,g=1;g<d;g++)e[g]=0;if(5<=c)if(0>d-1){for(c=0;c>d;c--)e.unshift(0),a.i++;e.unshift(1);a.i++}else e[d-1]++;for(;f<Math.max(0,b);f++)e.push(0);if(b=e.reduceRight(function(a,b,c,d){b+=a;d[c]=b%10;return Math.floor(b/10)},0))e.unshift(b),a.i++}function Ud(a,b,d,c,e){if(!E(a)&&!Y(a)||isNaN(a))return"";var f=!isFinite(a),g=!1,h=Math.abs(a)+"",k="";if(f)k="\u221e"; else{g=Ng(h);Og(g,e,b.minFrac,b.maxFrac);k=g.d;h=g.i;e=g.e;f=[];for(g=k.reduce(function(a,b){return a&&!b},!0);0>h;)k.unshift(0),h++;0<h?f=k.splice(h,k.length):(f=k,k=[0]);h=[];for(k.length>=b.lgSize&&h.unshift(k.splice(-b.lgSize,k.length).join(""));k.length>b.gSize;)h.unshift(k.splice(-b.gSize,k.length).join(""));k.length&&h.unshift(k.join(""));k=h.join(d);f.length&&(k+=c+f.join(""));e&&(k+="e+"+e)}return 0>a&&!g?b.negPre+k+b.negSuf:b.posPre+k+b.posSuf}function Ob(a,b,d,c){var e="";if(0>a||c&&0>= a)c?a=-a+1:(a=-a,e="-");for(a=""+a;a.length<b;)a=Dc+a;d&&(a=a.substr(a.length-b));return e+a}function ea(a,b,d,c,e){d=d||0;return function(f){f=f["get"+a]();if(0<d||f>-d)f+=d;0===f&&-12===d&&(f=12);return Ob(f,b,c,e)}}function mb(a,b,d){return function(c,e){var f=c["get"+a](),g=ub((d?"STANDALONE":"")+(b?"SHORT":"")+a);return e[g][f]}}function Xd(a){var b=(new Date(a,0,1)).getDay();return new Date(a,0,(4>=b?5:12)-b)}function Yd(a){return function(b){var d=Xd(b.getFullYear());b=+new Date(b.getFullYear(), b.getMonth(),b.getDate()+(4-b.getDay()))-+d;b=1+Math.round(b/6048E5);return Ob(b,a)}}function Ec(a,b){return 0>=a.getFullYear()?b.ERAS[0]:b.ERAS[1]}function Rd(a){function b(a){var b;if(b=a.match(d)){a=new Date(0);var f=0,g=0,h=b[8]?a.setUTCFullYear:a.setFullYear,k=b[8]?a.setUTCHours:a.setHours;b[9]&&(f=Z(b[9]+b[10]),g=Z(b[9]+b[11]));h.call(a,Z(b[1]),Z(b[2])-1,Z(b[3]));f=Z(b[4]||0)-f;g=Z(b[5]||0)-g;h=Z(b[6]||0);b=Math.round(1E3*parseFloat("0."+(b[7]||0)));k.call(a,f,g,h,b)}return a}var d=/^(\d{4})-?(\d\d)-?(\d\d)(?:T(\d\d)(?::?(\d\d)(?::?(\d\d)(?:\.(\d+))?)?)?(Z|([+-])(\d\d):?(\d\d))?)?$/; return function(c,d,f){var g="",h=[],k,l;d=d||"mediumDate";d=a.DATETIME_FORMATS[d]||d;E(c)&&(c=Pg.test(c)?Z(c):b(c));Y(c)&&(c=new Date(c));if(!fa(c)||!isFinite(c.getTime()))return c;for(;d;)(l=Qg.exec(d))?(h=db(h,l,1),d=h.pop()):(h.push(d),d=null);var m=c.getTimezoneOffset();f&&(m=Sc(f,m),c=dc(c,f,!0));r(h,function(b){k=Rg[b];g+=k?k(c,a.DATETIME_FORMATS,m):"''"===b?"'":b.replace(/(^'|'$)/g,"").replace(/''/g,"'")});return g}}function Ig(){return function(a,b){x(b)&&(b=2);return eb(a,b)}}function Jg(){return function(a, b,d){b=Infinity===Math.abs(Number(b))?Number(b):Z(b);if(U(b))return a;Y(a)&&(a=a.toString());if(!wa(a))return a;d=!d||isNaN(d)?0:Z(d);d=0>d?Math.max(0,a.length+d):d;return 0<=b?Fc(a,d,d+b):0===d?Fc(a,b,a.length):Fc(a,Math.max(0,d+b),d)}}function Fc(a,b,d){return E(a)?a.slice(b,d):xa.call(a,b,d)}function Td(a){function b(b){return b.map(function(b){var c=1,d=ab;if(C(b))d=b;else if(E(b)){if("+"===b.charAt(0)||"-"===b.charAt(0))c="-"===b.charAt(0)?-1:1,b=b.substring(1);if(""!==b&&(d=a(b),d.constant))var e= d(),d=function(a){return a[e]}}return{get:d,descending:c}})}function d(a){switch(typeof a){case "number":case "boolean":case "string":return!0;default:return!1}}function c(a,b){var c=0,d=a.type,k=b.type;if(d===k){var k=a.value,l=b.value;"string"===d?(k=k.toLowerCase(),l=l.toLowerCase()):"object"===d&&(B(k)&&(k=a.index),B(l)&&(l=b.index));k!==l&&(c=k<l?-1:1)}else c=d<k?-1:1;return c}return function(a,f,g,h){if(null==a)return a;if(!wa(a))throw K("orderBy")("notarray",a);I(f)||(f=[f]);0===f.length&& (f=["+"]);var k=b(f),l=g?-1:1,m=C(h)?h:c;a=Array.prototype.map.call(a,function(a,b){return{value:a,tieBreaker:{value:b,type:"number",index:b},predicateValues:k.map(function(c){var e=c.get(a);c=typeof e;if(null===e)c="string",e="null";else if("object"===c)a:{if(C(e.valueOf)&&(e=e.valueOf(),d(e)))break a;ac(e)&&(e=e.toString(),d(e))}return{value:e,type:c,index:b}})}});a.sort(function(a,b){for(var d=0,e=k.length;d<e;d++){var g=m(a.predicateValues[d],b.predicateValues[d]);if(g)return g*k[d].descending* l}return(m(a.tieBreaker,b.tieBreaker)||c(a.tieBreaker,b.tieBreaker))*l});return a=a.map(function(a){return a.value})}}function Qa(a){C(a)&&(a={link:a});a.restrict=a.restrict||"AC";return la(a)}function Pb(a,b,d,c,e){this.$$controls=[];this.$error={};this.$$success={};this.$pending=void 0;this.$name=e(b.name||b.ngForm||"")(d);this.$dirty=!1;this.$valid=this.$pristine=!0;this.$submitted=this.$invalid=!1;this.$$parentForm=Qb;this.$$element=a;this.$$animate=c;Zd(this)}function Zd(a){a.$$classCache={}; a.$$classCache[$d]=!(a.$$classCache[nb]=a.$$element.hasClass(nb))}function ae(a){function b(a,b,c){c&&!a.$$classCache[b]?(a.$$animate.addClass(a.$$element,b),a.$$classCache[b]=!0):!c&&a.$$classCache[b]&&(a.$$animate.removeClass(a.$$element,b),a.$$classCache[b]=!1)}function d(a,c,d){c=c?"-"+Vc(c,"-"):"";b(a,nb+c,!0===d);b(a,$d+c,!1===d)}var c=a.set,e=a.unset;a.clazz.prototype.$setValidity=function(a,g,h){x(g)?(this.$pending||(this.$pending={}),c(this.$pending,a,h)):(this.$pending&&e(this.$pending, a,h),be(this.$pending)&&(this.$pending=void 0));Na(g)?g?(e(this.$error,a,h),c(this.$$success,a,h)):(c(this.$error,a,h),e(this.$$success,a,h)):(e(this.$error,a,h),e(this.$$success,a,h));this.$pending?(b(this,"ng-pending",!0),this.$valid=this.$invalid=void 0,d(this,"",null)):(b(this,"ng-pending",!1),this.$valid=be(this.$error),this.$invalid=!this.$valid,d(this,"",this.$valid));g=this.$pending&&this.$pending[a]?void 0:this.$error[a]?!1:this.$$success[a]?!0:null;d(this,a,g);this.$$parentForm.$setValidity(a, g,this)}}function be(a){if(a)for(var b in a)if(a.hasOwnProperty(b))return!1;return!0}function Gc(a){a.$formatters.push(function(b){return a.$isEmpty(b)?b:b.toString()})}function Va(a,b,d,c,e,f){var g=L(b[0].type);if(!e.android){var h=!1;b.on("compositionstart",function(){h=!0});b.on("compositionend",function(){h=!1;l()})}var k,l=function(a){k&&(f.defer.cancel(k),k=null);if(!h){var e=b.val();a=a&&a.type;"password"===g||d.ngTrim&&"false"===d.ngTrim||(e=Q(e));(c.$viewValue!==e||""===e&&c.$$hasNativeValidators)&& c.$setViewValue(e,a)}};if(e.hasEvent("input"))b.on("input",l);else{var m=function(a,b,c){k||(k=f.defer(function(){k=null;b&&b.value===c||l(a)}))};b.on("keydown",function(a){var b=a.keyCode;91===b||15<b&&19>b||37<=b&&40>=b||m(a,this,this.value)});if(e.hasEvent("paste"))b.on("paste cut drop",m)}b.on("change",l);if(ce[g]&&c.$$hasNativeValidators&&g===d.type)b.on("keydown wheel mousedown",function(a){if(!k){var b=this.validity,c=b.badInput,d=b.typeMismatch;k=f.defer(function(){k=null;b.badInput===c&& b.typeMismatch===d||l(a)})}});c.$render=function(){var a=c.$isEmpty(c.$viewValue)?"":c.$viewValue;b.val()!==a&&b.val(a)}}function Rb(a,b){return function(d,c){var e,f;if(fa(d))return d;if(E(d)){'"'===d.charAt(0)&&'"'===d.charAt(d.length-1)&&(d=d.substring(1,d.length-1));if(Sg.test(d))return new Date(d);a.lastIndex=0;if(e=a.exec(d))return e.shift(),f=c?{yyyy:c.getFullYear(),MM:c.getMonth()+1,dd:c.getDate(),HH:c.getHours(),mm:c.getMinutes(),ss:c.getSeconds(),sss:c.getMilliseconds()/1E3}:{yyyy:1970, MM:1,dd:1,HH:0,mm:0,ss:0,sss:0},r(e,function(a,c){c<b.length&&(f[b[c]]=+a)}),new Date(f.yyyy,f.MM-1,f.dd,f.HH,f.mm,f.ss||0,1E3*f.sss||0)}return NaN}}function ob(a,b,d,c){return function(e,f,g,h,k,l,m){function p(a){return a&&!(a.getTime&&a.getTime()!==a.getTime())}function n(a){return u(a)&&!fa(a)?d(a)||void 0:a}Hc(e,f,g,h);Va(e,f,g,h,k,l);var r=h&&h.$options.getOption("timezone"),s;h.$$parserName=a;h.$parsers.push(function(a){if(h.$isEmpty(a))return null;if(b.test(a))return a=d(a,s),r&&(a=dc(a,r)), a});h.$formatters.push(function(a){if(a&&!fa(a))throw pb("datefmt",a);if(p(a))return(s=a)&&r&&(s=dc(s,r,!0)),m("date")(a,c,r);s=null;return""});if(u(g.min)||g.ngMin){var q;h.$validators.min=function(a){return!p(a)||x(q)||d(a)>=q};g.$observe("min",function(a){q=n(a);h.$validate()})}if(u(g.max)||g.ngMax){var y;h.$validators.max=function(a){return!p(a)||x(y)||d(a)<=y};g.$observe("max",function(a){y=n(a);h.$validate()})}}}function Hc(a,b,d,c){(c.$$hasNativeValidators=B(b[0].validity))&&c.$parsers.push(function(a){var c= b.prop("validity")||{};return c.badInput||c.typeMismatch?void 0:a})}function de(a){a.$$parserName="number";a.$parsers.push(function(b){if(a.$isEmpty(b))return null;if(Tg.test(b))return parseFloat(b)});a.$formatters.push(function(b){if(!a.$isEmpty(b)){if(!Y(b))throw pb("numfmt",b);b=b.toString()}return b})}function Wa(a){u(a)&&!Y(a)&&(a=parseFloat(a));return U(a)?void 0:a}function Ic(a){var b=a.toString(),d=b.indexOf(".");return-1===d?-1<a&&1>a&&(a=/e-(\d+)$/.exec(b))?Number(a[1]):0:b.length-d-1}function ee(a, b,d){a=Number(a);var c=(a|0)!==a,e=(b|0)!==b,f=(d|0)!==d;if(c||e||f){var g=c?Ic(a):0,h=e?Ic(b):0,k=f?Ic(d):0,g=Math.max(g,h,k),g=Math.pow(10,g);a*=g;b*=g;d*=g;c&&(a=Math.round(a));e&&(b=Math.round(b));f&&(d=Math.round(d))}return 0===(a-b)%d}function fe(a,b,d,c,e){if(u(c)){a=a(c);if(!a.constant)throw pb("constexpr",d,c);return a(b)}return e}function Jc(a,b){function d(a,b){if(!a||!a.length)return[];if(!b||!b.length)return a;var c=[],d=0;a:for(;d<a.length;d++){for(var e=a[d],f=0;f<b.length;f++)if(e=== b[f])continue a;c.push(e)}return c}function c(a){var b=a;I(a)?b=a.map(c).join(" "):B(a)&&(b=Object.keys(a).filter(function(b){return a[b]}).join(" "));return b}function e(a){var b=a;if(I(a))b=a.map(e);else if(B(a)){var c=!1,b=Object.keys(a).filter(function(b){b=a[b];!c&&x(b)&&(c=!0);return b});c&&b.push(void 0)}return b}a="ngClass"+a;var f;return["$parse",function(g){return{restrict:"AC",link:function(h,k,l){function m(a,b){var c=[];r(a,function(a){if(0<b||t[a])t[a]=(t[a]||0)+b,t[a]===+(0<b)&&c.push(a)}); return c.join(" ")}function p(a){if(a===b){var c=w,c=m(c&&c.split(" "),1);l.$addClass(c)}else c=w,c=m(c&&c.split(" "),-1),l.$removeClass(c);u=a}function n(a){a=c(a);a!==w&&q(a)}function q(a){if(u===b){var c=w&&w.split(" "),e=a&&a.split(" "),g=d(c,e),c=d(e,c),g=m(g,-1),c=m(c,1);l.$addClass(c);l.$removeClass(g)}w=a}var s=l[a].trim(),v=":"===s.charAt(0)&&":"===s.charAt(1),s=g(s,v?e:c),y=v?n:q,t=k.data("$classCounts"),u=!0,w;t||(t=S(),k.data("$classCounts",t));"ngClass"!==a&&(f||(f=g("$index",function(a){return a& 1})),h.$watch(f,p));h.$watch(s,y,v)}}}]}function Sb(a,b,d,c,e,f,g,h,k){this.$modelValue=this.$viewValue=Number.NaN;this.$$rawModelValue=void 0;this.$validators={};this.$asyncValidators={};this.$parsers=[];this.$formatters=[];this.$viewChangeListeners=[];this.$untouched=!0;this.$touched=!1;this.$pristine=!0;this.$dirty=!1;this.$valid=!0;this.$invalid=!1;this.$error={};this.$$success={};this.$pending=void 0;this.$name=k(d.name||"",!1)(a);this.$$parentForm=Qb;this.$options=Tb;this.$$updateEvents=""; this.$$updateEventHandler=this.$$updateEventHandler.bind(this);this.$$parsedNgModel=e(d.ngModel);this.$$parsedNgModelAssign=this.$$parsedNgModel.assign;this.$$ngModelGet=this.$$parsedNgModel;this.$$ngModelSet=this.$$parsedNgModelAssign;this.$$pendingDebounce=null;this.$$parserValid=void 0;this.$$currentValidationRunId=0;Object.defineProperty(this,"$$scope",{value:a});this.$$attr=d;this.$$element=c;this.$$animate=f;this.$$timeout=g;this.$$parse=e;this.$$q=h;this.$$exceptionHandler=b;Zd(this);Ug(this)} function Ug(a){a.$$scope.$watch(function(b){b=a.$$ngModelGet(b);b===a.$modelValue||a.$modelValue!==a.$modelValue&&b!==b||a.$$setModelValue(b);return b})}function Kc(a){this.$$options=a}function ge(a,b){r(b,function(b,c){u(a[c])||(a[c]=b)})}function Ga(a,b){a.prop("selected",b);a.attr("selected",b)}var Mc={objectMaxDepth:5},Vg=/^\/(.+)\/([a-z]*)$/,ra=Object.prototype.hasOwnProperty,L=function(a){return E(a)?a.toLowerCase():a},ub=function(a){return E(a)?a.toUpperCase():a},Ca,z,ma,xa=[].slice,ug=[].splice, Wg=[].push,ia=Object.prototype.toString,Pc=Object.getPrototypeOf,qa=K("ng"),$=w.angular||(w.angular={}),ic,qb=0;Ca=w.document.documentMode;var U=Number.isNaN||function(a){return a!==a};D.$inject=[];ab.$inject=[];var I=Array.isArray,se=/^\[object (?:Uint8|Uint8Clamped|Uint16|Uint32|Int8|Int16|Int32|Float32|Float64)Array]$/,Q=function(a){return E(a)?a.trim():a},Md=function(a){return a.replace(/([-()[\]{}+?*.$^|,:#<!\\])/g,"\\$1").replace(/\x08/g,"\\x08")},Ja=function(){if(!u(Ja.rules)){var a=w.document.querySelector("[ng-csp]")|| w.document.querySelector("[data-ng-csp]");if(a){var b=a.getAttribute("ng-csp")||a.getAttribute("data-ng-csp");Ja.rules={noUnsafeEval:!b||-1!==b.indexOf("no-unsafe-eval"),noInlineStyle:!b||-1!==b.indexOf("no-inline-style")}}else{a=Ja;try{new Function(""),b=!1}catch(d){b=!0}a.rules={noUnsafeEval:b,noInlineStyle:!1}}}return Ja.rules},rb=function(){if(u(rb.name_))return rb.name_;var a,b,d=Ha.length,c,e;for(b=0;b<d;++b)if(c=Ha[b],a=w.document.querySelector("["+c.replace(":","\\:")+"jq]")){e=a.getAttribute(c+ "jq");break}return rb.name_=e},ue=/:/g,Ha=["ng-","data-ng-","ng:","x-ng-"],xe=function(a){var b=a.currentScript;if(!b)return!0;if(!(b instanceof w.HTMLScriptElement||b instanceof w.SVGScriptElement))return!1;b=b.attributes;return[b.getNamedItem("src"),b.getNamedItem("href"),b.getNamedItem("xlink:href")].every(function(b){if(!b)return!0;if(!b.value)return!1;var c=a.createElement("a");c.href=b.value;if(a.location.origin===c.origin)return!0;switch(c.protocol){case "http:":case "https:":case "ftp:":case "blob:":case "file:":case "data:":return!0; default:return!1}})}(w.document),Ae=/[A-Z]/g,Wc=!1,Oa=3,Fe={full:"1.6.9",major:1,minor:6,dot:9,codeName:"fiery-basilisk"};V.expando="ng339";var ib=V.cache={},gg=1;V._data=function(a){return this.cache[a[this.expando]]||{}};var cg=/-([a-z])/g,Xg=/^-ms-/,Ab={mouseleave:"mouseout",mouseenter:"mouseover"},lc=K("jqLite"),fg=/^<([\w-]+)\s*\/?>(?:<\/\1>|)$/,kc=/<|&#?\w+;/,dg=/<([\w:-]+)/,eg=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:-]+)[^>]*)\/>/gi,aa={option:[1,'<select multiple="multiple">', "</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};aa.optgroup=aa.option;aa.tbody=aa.tfoot=aa.colgroup=aa.caption=aa.thead;aa.th=aa.td;var lg=w.Node.prototype.contains||function(a){return!!(this.compareDocumentPosition(a)&16)},Sa=V.prototype={ready:gd,toString:function(){var a=[];r(this,function(b){a.push(""+b)});return"["+a.join(", ")+"]"}, eq:function(a){return 0<=a?z(this[a]):z(this[this.length+a])},length:0,push:Wg,sort:[].sort,splice:[].splice},Gb={};r("multiple selected checked disabled readOnly required open".split(" "),function(a){Gb[L(a)]=a});var ld={};r("input select option textarea button form details".split(" "),function(a){ld[a]=!0});var sd={ngMinlength:"minlength",ngMaxlength:"maxlength",ngMin:"min",ngMax:"max",ngPattern:"pattern",ngStep:"step"};r({data:pc,removeData:oc,hasData:function(a){for(var b in ib[a.ng339])return!0; return!1},cleanData:function(a){for(var b=0,d=a.length;b<d;b++)oc(a[b])}},function(a,b){V[b]=a});r({data:pc,inheritedData:Eb,scope:function(a){return z.data(a,"$scope")||Eb(a.parentNode||a,["$isolateScope","$scope"])},isolateScope:function(a){return z.data(a,"$isolateScope")||z.data(a,"$isolateScopeNoTemplate")},controller:id,injector:function(a){return Eb(a,"$injector")},removeAttr:function(a,b){a.removeAttribute(b)},hasClass:Bb,css:function(a,b,d){b=xb(b.replace(Xg,"ms-"));if(u(d))a.style[b]=d; else return a.style[b]},attr:function(a,b,d){var c=a.nodeType;if(c!==Oa&&2!==c&&8!==c&&a.getAttribute){var c=L(b),e=Gb[c];if(u(d))null===d||!1===d&&e?a.removeAttribute(b):a.setAttribute(b,e?c:d);else return a=a.getAttribute(b),e&&null!==a&&(a=c),null===a?void 0:a}},prop:function(a,b,d){if(u(d))a[b]=d;else return a[b]},text:function(){function a(a,d){if(x(d)){var c=a.nodeType;return 1===c||c===Oa?a.textContent:""}a.textContent=d}a.$dv="";return a}(),val:function(a,b){if(x(b)){if(a.multiple&&"select"=== ya(a)){var d=[];r(a.options,function(a){a.selected&&d.push(a.value||a.text)});return d}return a.value}a.value=b},html:function(a,b){if(x(b))return a.innerHTML;yb(a,!0);a.innerHTML=b},empty:jd},function(a,b){V.prototype[b]=function(b,c){var e,f,g=this.length;if(a!==jd&&x(2===a.length&&a!==Bb&&a!==id?b:c)){if(B(b)){for(e=0;e<g;e++)if(a===pc)a(this[e],b);else for(f in b)a(this[e],f,b[f]);return this}e=a.$dv;g=x(e)?Math.min(g,1):g;for(f=0;f<g;f++){var h=a(this[f],b,c);e=e?e+h:h}return e}for(e=0;e<g;e++)a(this[e], b,c);return this}});r({removeData:oc,on:function(a,b,d,c){if(u(c))throw lc("onargs");if(jc(a)){c=zb(a,!0);var e=c.events,f=c.handle;f||(f=c.handle=ig(a,e));c=0<=b.indexOf(" ")?b.split(" "):[b];for(var g=c.length,h=function(b,c,g){var h=e[b];h||(h=e[b]=[],h.specialHandlerWrapper=c,"$destroy"===b||g||a.addEventListener(b,f));h.push(d)};g--;)b=c[g],Ab[b]?(h(Ab[b],kg),h(b,void 0,!0)):h(b)}},off:hd,one:function(a,b,d){a=z(a);a.on(b,function e(){a.off(b,d);a.off(b,e)});a.on(b,d)},replaceWith:function(a, b){var d,c=a.parentNode;yb(a);r(new V(b),function(b){d?c.insertBefore(b,d.nextSibling):c.replaceChild(b,a);d=b})},children:function(a){var b=[];r(a.childNodes,function(a){1===a.nodeType&&b.push(a)});return b},contents:function(a){return a.contentDocument||a.childNodes||[]},append:function(a,b){var d=a.nodeType;if(1===d||11===d){b=new V(b);for(var d=0,c=b.length;d<c;d++)a.appendChild(b[d])}},prepend:function(a,b){if(1===a.nodeType){var d=a.firstChild;r(new V(b),function(b){a.insertBefore(b,d)})}}, wrap:function(a,b){var d=z(b).eq(0).clone()[0],c=a.parentNode;c&&c.replaceChild(d,a);d.appendChild(a)},remove:Fb,detach:function(a){Fb(a,!0)},after:function(a,b){var d=a,c=a.parentNode;if(c){b=new V(b);for(var e=0,f=b.length;e<f;e++){var g=b[e];c.insertBefore(g,d.nextSibling);d=g}}},addClass:Db,removeClass:Cb,toggleClass:function(a,b,d){b&&r(b.split(" "),function(b){var e=d;x(e)&&(e=!Bb(a,b));(e?Db:Cb)(a,b)})},parent:function(a){return(a=a.parentNode)&&11!==a.nodeType?a:null},next:function(a){return a.nextElementSibling}, find:function(a,b){return a.getElementsByTagName?a.getElementsByTagName(b):[]},clone:nc,triggerHandler:function(a,b,d){var c,e,f=b.type||b,g=zb(a);if(g=(g=g&&g.events)&&g[f])c={preventDefault:function(){this.defaultPrevented=!0},isDefaultPrevented:function(){return!0===this.defaultPrevented},stopImmediatePropagation:function(){this.immediatePropagationStopped=!0},isImmediatePropagationStopped:function(){return!0===this.immediatePropagationStopped},stopPropagation:D,type:f,target:a},b.type&&(c=O(c, b)),b=ka(g),e=d?[c].concat(d):[c],r(b,function(b){c.isImmediatePropagationStopped()||b.apply(a,e)})}},function(a,b){V.prototype[b]=function(b,c,e){for(var f,g=0,h=this.length;g<h;g++)x(f)?(f=a(this[g],b,c,e),u(f)&&(f=z(f))):mc(f,a(this[g],b,c,e));return u(f)?f:this}});V.prototype.bind=V.prototype.on;V.prototype.unbind=V.prototype.off;var Yg=Object.create(null);md.prototype={_idx:function(a){if(a===this._lastKey)return this._lastIndex;this._lastKey=a;return this._lastIndex=this._keys.indexOf(a)},_transformKey:function(a){return U(a)? Yg:a},get:function(a){a=this._transformKey(a);a=this._idx(a);if(-1!==a)return this._values[a]},set:function(a,b){a=this._transformKey(a);var d=this._idx(a);-1===d&&(d=this._lastIndex=this._keys.length);this._keys[d]=a;this._values[d]=b},delete:function(a){a=this._transformKey(a);a=this._idx(a);if(-1===a)return!1;this._keys.splice(a,1);this._values.splice(a,1);this._lastKey=NaN;this._lastIndex=-1;return!0}};var Hb=md,ag=[function(){this.$get=[function(){return Hb}]}],ng=/^([^(]+?)=>/,og=/^[^(]*\(\s*([^)]*)\)/m, Zg=/,/,$g=/^\s*(_?)(\S+?)\1\s*$/,mg=/((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg,Ba=K("$injector");gb.$$annotate=function(a,b,d){var c;if("function"===typeof a){if(!(c=a.$inject)){c=[];if(a.length){if(b)throw E(d)&&d||(d=a.name||pg(a)),Ba("strictdi",d);b=nd(a);r(b[1].split(Zg),function(a){a.replace($g,function(a,b,d){c.push(d)})})}a.$inject=c}}else I(a)?(b=a.length-1,sb(a[b],"fn"),c=a.slice(0,b)):sb(a,"fn",!0);return c};var he=K("$animate"),sf=function(){this.$get=D},tf=function(){var a=new Hb,b=[];this.$get= ["$$AnimateRunner","$rootScope",function(d,c){function e(a,b,c){var d=!1;b&&(b=E(b)?b.split(" "):I(b)?b:[],r(b,function(b){b&&(d=!0,a[b]=c)}));return d}function f(){r(b,function(b){var c=a.get(b);if(c){var d=qg(b.attr("class")),e="",f="";r(c,function(a,b){a!==!!d[b]&&(a?e+=(e.length?" ":"")+b:f+=(f.length?" ":"")+b)});r(b,function(a){e&&Db(a,e);f&&Cb(a,f)});a.delete(b)}});b.length=0}return{enabled:D,on:D,off:D,pin:D,push:function(g,h,k,l){l&&l();k=k||{};k.from&&g.css(k.from);k.to&&g.css(k.to);if(k.addClass|| k.removeClass)if(h=k.addClass,l=k.removeClass,k=a.get(g)||{},h=e(k,h,!0),l=e(k,l,!1),h||l)a.set(g,k),b.push(g),1===b.length&&c.$$postDigest(f);g=new d;g.complete();return g}}}]},qf=["$provide",function(a){var b=this,d=null,c=null;this.$$registeredAnimations=Object.create(null);this.register=function(c,d){if(c&&"."!==c.charAt(0))throw he("notcsel",c);var g=c+"-animation";b.$$registeredAnimations[c.substr(1)]=g;a.factory(g,d)};this.customFilter=function(a){1===arguments.length&&(c=C(a)?a:null);return c}; this.classNameFilter=function(a){if(1===arguments.length&&(d=a instanceof RegExp?a:null)&&/[(\s|\/)]ng-animate[(\s|\/)]/.test(d.toString()))throw d=null,he("nongcls","ng-animate");return d};this.$get=["$$animateQueue",function(a){function b(a,c,d){if(d){var e;a:{for(e=0;e<d.length;e++){var f=d[e];if(1===f.nodeType){e=f;break a}}e=void 0}!e||e.parentNode||e.previousElementSibling||(d=null)}d?d.after(a):c.prepend(a)}return{on:a.on,off:a.off,pin:a.pin,enabled:a.enabled,cancel:function(a){a.end&&a.end()}, enter:function(c,d,k,l){d=d&&z(d);k=k&&z(k);d=d||k.parent();b(c,d,k);return a.push(c,"enter",Ka(l))},move:function(c,d,k,l){d=d&&z(d);k=k&&z(k);d=d||k.parent();b(c,d,k);return a.push(c,"move",Ka(l))},leave:function(b,c){return a.push(b,"leave",Ka(c),function(){b.remove()})},addClass:function(b,c,d){d=Ka(d);d.addClass=jb(d.addclass,c);return a.push(b,"addClass",d)},removeClass:function(b,c,d){d=Ka(d);d.removeClass=jb(d.removeClass,c);return a.push(b,"removeClass",d)},setClass:function(b,c,d,f){f=Ka(f); f.addClass=jb(f.addClass,c);f.removeClass=jb(f.removeClass,d);return a.push(b,"setClass",f)},animate:function(b,c,d,f,m){m=Ka(m);m.from=m.from?O(m.from,c):c;m.to=m.to?O(m.to,d):d;m.tempClasses=jb(m.tempClasses,f||"ng-inline-animate");return a.push(b,"animate",m)}}}]}],vf=function(){this.$get=["$$rAF",function(a){function b(b){d.push(b);1<d.length||a(function(){for(var a=0;a<d.length;a++)d[a]();d=[]})}var d=[];return function(){var a=!1;b(function(){a=!0});return function(d){a?d():b(d)}}}]},uf=function(){this.$get= ["$q","$sniffer","$$animateAsyncRun","$$isDocumentHidden","$timeout",function(a,b,d,c,e){function f(a){this.setHost(a);var b=d();this._doneCallbacks=[];this._tick=function(a){c()?e(a,0,!1):b(a)};this._state=0}f.chain=function(a,b){function c(){if(d===a.length)b(!0);else a[d](function(a){!1===a?b(!1):(d++,c())})}var d=0;c()};f.all=function(a,b){function c(f){e=e&&f;++d===a.length&&b(e)}var d=0,e=!0;r(a,function(a){a.done(c)})};f.prototype={setHost:function(a){this.host=a||{}},done:function(a){2=== this._state?a():this._doneCallbacks.push(a)},progress:D,getPromise:function(){if(!this.promise){var b=this;this.promise=a(function(a,c){b.done(function(b){!1===b?c():a()})})}return this.promise},then:function(a,b){return this.getPromise().then(a,b)},"catch":function(a){return this.getPromise()["catch"](a)},"finally":function(a){return this.getPromise()["finally"](a)},pause:function(){this.host.pause&&this.host.pause()},resume:function(){this.host.resume&&this.host.resume()},end:function(){this.host.end&& this.host.end();this._resolve(!0)},cancel:function(){this.host.cancel&&this.host.cancel();this._resolve(!1)},complete:function(a){var b=this;0===b._state&&(b._state=1,b._tick(function(){b._resolve(a)}))},_resolve:function(a){2!==this._state&&(r(this._doneCallbacks,function(b){b(a)}),this._doneCallbacks.length=0,this._state=2)}};return f}]},rf=function(){this.$get=["$$rAF","$q","$$AnimateRunner",function(a,b,d){return function(b,e){function f(){a(function(){g.addClass&&(b.addClass(g.addClass),g.addClass= null);g.removeClass&&(b.removeClass(g.removeClass),g.removeClass=null);g.to&&(b.css(g.to),g.to=null);h||k.complete();h=!0});return k}var g=e||{};g.$$prepared||(g=pa(g));g.cleanupStyles&&(g.from=g.to=null);g.from&&(b.css(g.from),g.from=null);var h,k=new d;return{start:f,end:f}}}]},ca=K("$compile"),sc=new function(){};Yc.$inject=["$provide","$$sanitizeUriProvider"];Jb.prototype.isFirstChange=function(){return this.previousValue===sc};var od=/^((?:x|data)[:\-_])/i,tg=/[:\-_]+(.)/g,ud=K("$controller"), td=/^(\S+)(\s+as\s+([\w$]+))?$/,Cf=function(){this.$get=["$document",function(a){return function(b){b?!b.nodeType&&b instanceof z&&(b=b[0]):b=a[0].body;return b.offsetWidth+1}}]},vd="application/json",vc={"Content-Type":vd+";charset=utf-8"},wg=/^\[|^\{(?!\{)/,xg={"[":/]$/,"{":/}$/},vg=/^\)]\}',?\n/,Kb=K("$http"),Fa=$.$interpolateMinErr=K("$interpolate");Fa.throwNoconcat=function(a){throw Fa("noconcat",a);};Fa.interr=function(a,b){return Fa("interr",a,b.toString())};var Kf=function(){this.$get=function(){function a(a){var b= function(a){b.data=a;b.called=!0};b.id=a;return b}var b=$.callbacks,d={};return{createCallback:function(c){c="_"+(b.$$counter++).toString(36);var e="angular.callbacks."+c,f=a(c);d[e]=b[c]=f;return e},wasCalled:function(a){return d[a].called},getResponse:function(a){return d[a].data},removeCallback:function(a){delete b[d[a].id];delete d[a]}}}},ah=/^([^?#]*)(\?([^#]*))?(#(.*))?$/,zg={http:80,https:443,ftp:21},kb=K("$location"),Ag=/^\s*[\\/]{2,}/,bh={$$absUrl:"",$$html5:!1,$$replace:!1,absUrl:Lb("$$absUrl"), url:function(a){if(x(a))return this.$$url;var b=ah.exec(a);(b[1]||""===a)&&this.path(decodeURIComponent(b[1]));(b[2]||b[1]||""===a)&&this.search(b[3]||"");this.hash(b[5]||"");return this},protocol:Lb("$$protocol"),host:Lb("$$host"),port:Lb("$$port"),path:Dd("$$path",function(a){a=null!==a?a.toString():"";return"/"===a.charAt(0)?a:"/"+a}),search:function(a,b){switch(arguments.length){case 0:return this.$$search;case 1:if(E(a)||Y(a))a=a.toString(),this.$$search=ec(a);else if(B(a))a=pa(a,{}),r(a,function(b, c){null==b&&delete a[c]}),this.$$search=a;else throw kb("isrcharg");break;default:x(b)||null===b?delete this.$$search[a]:this.$$search[a]=b}this.$$compose();return this},hash:Dd("$$hash",function(a){return null!==a?a.toString():""}),replace:function(){this.$$replace=!0;return this}};r([Cd,zc,yc],function(a){a.prototype=Object.create(bh);a.prototype.state=function(b){if(!arguments.length)return this.$$state;if(a!==yc||!this.$$html5)throw kb("nostate");this.$$state=x(b)?null:b;this.$$urlUpdatedByLocation= !0;return this}});var Xa=K("$parse"),Eg={}.constructor.prototype.valueOf,Ub=S();r("+ - * / % === !== == != < > <= >= && || ! = |".split(" "),function(a){Ub[a]=!0});var ch={n:"\n",f:"\f",r:"\r",t:"\t",v:"\v","'":"'",'"':'"'},Nb=function(a){this.options=a};Nb.prototype={constructor:Nb,lex:function(a){this.text=a;this.index=0;for(this.tokens=[];this.index<this.text.length;)if(a=this.text.charAt(this.index),'"'===a||"'"===a)this.readString(a);else if(this.isNumber(a)||"."===a&&this.isNumber(this.peek()))this.readNumber(); else if(this.isIdentifierStart(this.peekMultichar()))this.readIdent();else if(this.is(a,"(){}[].,;:?"))this.tokens.push({index:this.index,text:a}),this.index++;else if(this.isWhitespace(a))this.index++;else{var b=a+this.peek(),d=b+this.peek(2),c=Ub[b],e=Ub[d];Ub[a]||c||e?(a=e?d:c?b:a,this.tokens.push({index:this.index,text:a,operator:!0}),this.index+=a.length):this.throwError("Unexpected next character ",this.index,this.index+1)}return this.tokens},is:function(a,b){return-1!==b.indexOf(a)},peek:function(a){a= a||1;return this.index+a<this.text.length?this.text.charAt(this.index+a):!1},isNumber:function(a){return"0"<=a&&"9">=a&&"string"===typeof a},isWhitespace:function(a){return" "===a||"\r"===a||"\t"===a||"\n"===a||"\v"===a||"\u00a0"===a},isIdentifierStart:function(a){return this.options.isIdentifierStart?this.options.isIdentifierStart(a,this.codePointAt(a)):this.isValidIdentifierStart(a)},isValidIdentifierStart:function(a){return"a"<=a&&"z">=a||"A"<=a&&"Z">=a||"_"===a||"$"===a},isIdentifierContinue:function(a){return this.options.isIdentifierContinue? this.options.isIdentifierContinue(a,this.codePointAt(a)):this.isValidIdentifierContinue(a)},isValidIdentifierContinue:function(a,b){return this.isValidIdentifierStart(a,b)||this.isNumber(a)},codePointAt:function(a){return 1===a.length?a.charCodeAt(0):(a.charCodeAt(0)<<10)+a.charCodeAt(1)-56613888},peekMultichar:function(){var a=this.text.charAt(this.index),b=this.peek();if(!b)return a;var d=a.charCodeAt(0),c=b.charCodeAt(0);return 55296<=d&&56319>=d&&56320<=c&&57343>=c?a+b:a},isExpOperator:function(a){return"-"=== a||"+"===a||this.isNumber(a)},throwError:function(a,b,d){d=d||this.index;b=u(b)?"s "+b+"-"+this.index+" ["+this.text.substring(b,d)+"]":" "+d;throw Xa("lexerr",a,b,this.text);},readNumber:function(){for(var a="",b=this.index;this.index<this.text.length;){var d=L(this.text.charAt(this.index));if("."===d||this.isNumber(d))a+=d;else{var c=this.peek();if("e"===d&&this.isExpOperator(c))a+=d;else if(this.isExpOperator(d)&&c&&this.isNumber(c)&&"e"===a.charAt(a.length-1))a+=d;else if(!this.isExpOperator(d)|| c&&this.isNumber(c)||"e"!==a.charAt(a.length-1))break;else this.throwError("Invalid exponent")}this.index++}this.tokens.push({index:b,text:a,constant:!0,value:Number(a)})},readIdent:function(){var a=this.index;for(this.index+=this.peekMultichar().length;this.index<this.text.length;){var b=this.peekMultichar();if(!this.isIdentifierContinue(b))break;this.index+=b.length}this.tokens.push({index:a,text:this.text.slice(a,this.index),identifier:!0})},readString:function(a){var b=this.index;this.index++; for(var d="",c=a,e=!1;this.index<this.text.length;){var f=this.text.charAt(this.index),c=c+f;if(e)"u"===f?(e=this.text.substring(this.index+1,this.index+5),e.match(/[\da-f]{4}/i)||this.throwError("Invalid unicode escape [\\u"+e+"]"),this.index+=4,d+=String.fromCharCode(parseInt(e,16))):d+=ch[f]||f,e=!1;else if("\\"===f)e=!0;else{if(f===a){this.index++;this.tokens.push({index:b,text:c,constant:!0,value:d});return}d+=f}this.index++}this.throwError("Unterminated quote",b)}};var q=function(a,b){this.lexer= a;this.options=b};q.Program="Program";q.ExpressionStatement="ExpressionStatement";q.AssignmentExpression="AssignmentExpression";q.ConditionalExpression="ConditionalExpression";q.LogicalExpression="LogicalExpression";q.BinaryExpression="BinaryExpression";q.UnaryExpression="UnaryExpression";q.CallExpression="CallExpression";q.MemberExpression="MemberExpression";q.Identifier="Identifier";q.Literal="Literal";q.ArrayExpression="ArrayExpression";q.Property="Property";q.ObjectExpression="ObjectExpression"; q.ThisExpression="ThisExpression";q.LocalsExpression="LocalsExpression";q.NGValueParameter="NGValueParameter";q.prototype={ast:function(a){this.text=a;this.tokens=this.lexer.lex(a);a=this.program();0!==this.tokens.length&&this.throwError("is an unexpected token",this.tokens[0]);return a},program:function(){for(var a=[];;)if(0<this.tokens.length&&!this.peek("}",")",";","]")&&a.push(this.expressionStatement()),!this.expect(";"))return{type:q.Program,body:a}},expressionStatement:function(){return{type:q.ExpressionStatement, expression:this.filterChain()}},filterChain:function(){for(var a=this.expression();this.expect("|");)a=this.filter(a);return a},expression:function(){return this.assignment()},assignment:function(){var a=this.ternary();if(this.expect("=")){if(!Hd(a))throw Xa("lval");a={type:q.AssignmentExpression,left:a,right:this.assignment(),operator:"="}}return a},ternary:function(){var a=this.logicalOR(),b,d;return this.expect("?")&&(b=this.expression(),this.consume(":"))?(d=this.expression(),{type:q.ConditionalExpression, test:a,alternate:b,consequent:d}):a},logicalOR:function(){for(var a=this.logicalAND();this.expect("||");)a={type:q.LogicalExpression,operator:"||",left:a,right:this.logicalAND()};return a},logicalAND:function(){for(var a=this.equality();this.expect("&&");)a={type:q.LogicalExpression,operator:"&&",left:a,right:this.equality()};return a},equality:function(){for(var a=this.relational(),b;b=this.expect("==","!=","===","!==");)a={type:q.BinaryExpression,operator:b.text,left:a,right:this.relational()}; return a},relational:function(){for(var a=this.additive(),b;b=this.expect("<",">","<=",">=");)a={type:q.BinaryExpression,operator:b.text,left:a,right:this.additive()};return a},additive:function(){for(var a=this.multiplicative(),b;b=this.expect("+","-");)a={type:q.BinaryExpression,operator:b.text,left:a,right:this.multiplicative()};return a},multiplicative:function(){for(var a=this.unary(),b;b=this.expect("*","/","%");)a={type:q.BinaryExpression,operator:b.text,left:a,right:this.unary()};return a}, unary:function(){var a;return(a=this.expect("+","-","!"))?{type:q.UnaryExpression,operator:a.text,prefix:!0,argument:this.unary()}:this.primary()},primary:function(){var a;this.expect("(")?(a=this.filterChain(),this.consume(")")):this.expect("[")?a=this.arrayDeclaration():this.expect("{")?a=this.object():this.selfReferential.hasOwnProperty(this.peek().text)?a=pa(this.selfReferential[this.consume().text]):this.options.literals.hasOwnProperty(this.peek().text)?a={type:q.Literal,value:this.options.literals[this.consume().text]}: this.peek().identifier?a=this.identifier():this.peek().constant?a=this.constant():this.throwError("not a primary expression",this.peek());for(var b;b=this.expect("(","[",".");)"("===b.text?(a={type:q.CallExpression,callee:a,arguments:this.parseArguments()},this.consume(")")):"["===b.text?(a={type:q.MemberExpression,object:a,property:this.expression(),computed:!0},this.consume("]")):"."===b.text?a={type:q.MemberExpression,object:a,property:this.identifier(),computed:!1}:this.throwError("IMPOSSIBLE"); return a},filter:function(a){a=[a];for(var b={type:q.CallExpression,callee:this.identifier(),arguments:a,filter:!0};this.expect(":");)a.push(this.expression());return b},parseArguments:function(){var a=[];if(")"!==this.peekToken().text){do a.push(this.filterChain());while(this.expect(","))}return a},identifier:function(){var a=this.consume();a.identifier||this.throwError("is not a valid identifier",a);return{type:q.Identifier,name:a.text}},constant:function(){return{type:q.Literal,value:this.consume().value}}, arrayDeclaration:function(){var a=[];if("]"!==this.peekToken().text){do{if(this.peek("]"))break;a.push(this.expression())}while(this.expect(","))}this.consume("]");return{type:q.ArrayExpression,elements:a}},object:function(){var a=[],b;if("}"!==this.peekToken().text){do{if(this.peek("}"))break;b={type:q.Property,kind:"init"};this.peek().constant?(b.key=this.constant(),b.computed=!1,this.consume(":"),b.value=this.expression()):this.peek().identifier?(b.key=this.identifier(),b.computed=!1,this.peek(":")? (this.consume(":"),b.value=this.expression()):b.value=b.key):this.peek("[")?(this.consume("["),b.key=this.expression(),this.consume("]"),b.computed=!0,this.consume(":"),b.value=this.expression()):this.throwError("invalid key",this.peek());a.push(b)}while(this.expect(","))}this.consume("}");return{type:q.ObjectExpression,properties:a}},throwError:function(a,b){throw Xa("syntax",b.text,a,b.index+1,this.text,this.text.substring(b.index));},consume:function(a){if(0===this.tokens.length)throw Xa("ueoe", this.text);var b=this.expect(a);b||this.throwError("is unexpected, expecting ["+a+"]",this.peek());return b},peekToken:function(){if(0===this.tokens.length)throw Xa("ueoe",this.text);return this.tokens[0]},peek:function(a,b,d,c){return this.peekAhead(0,a,b,d,c)},peekAhead:function(a,b,d,c,e){if(this.tokens.length>a){a=this.tokens[a];var f=a.text;if(f===b||f===d||f===c||f===e||!(b||d||c||e))return a}return!1},expect:function(a,b,d,c){return(a=this.peek(a,b,d,c))?(this.tokens.shift(),a):!1},selfReferential:{"this":{type:q.ThisExpression}, $locals:{type:q.LocalsExpression}}};var Fd=2;Jd.prototype={compile:function(a){var b=this;this.state={nextId:0,filters:{},fn:{vars:[],body:[],own:{}},assign:{vars:[],body:[],own:{}},inputs:[]};W(a,b.$filter);var d="",c;this.stage="assign";if(c=Id(a))this.state.computing="assign",d=this.nextId(),this.recurse(c,d),this.return_(d),d="fn.assign="+this.generateFunction("assign","s,v,l");c=Gd(a.body);b.stage="inputs";r(c,function(a,c){var d="fn"+c;b.state[d]={vars:[],body:[],own:{}};b.state.computing=d; var h=b.nextId();b.recurse(a,h);b.return_(h);b.state.inputs.push({name:d,isPure:a.isPure});a.watchId=c});this.state.computing="fn";this.stage="main";this.recurse(a);a='"'+this.USE+" "+this.STRICT+'";\n'+this.filterPrefix()+"var fn="+this.generateFunction("fn","s,l,a,i")+d+this.watchFns()+"return fn;";a=(new Function("$filter","getStringValue","ifDefined","plus",a))(this.$filter,Bg,Cg,Ed);this.state=this.stage=void 0;return a},USE:"use",STRICT:"strict",watchFns:function(){var a=[],b=this.state.inputs, d=this;r(b,function(b){a.push("var "+b.name+"="+d.generateFunction(b.name,"s"));b.isPure&&a.push(b.name,".isPure="+JSON.stringify(b.isPure)+";")});b.length&&a.push("fn.inputs=["+b.map(function(a){return a.name}).join(",")+"];");return a.join("")},generateFunction:function(a,b){return"function("+b+"){"+this.varsPrefix(a)+this.body(a)+"};"},filterPrefix:function(){var a=[],b=this;r(this.state.filters,function(d,c){a.push(d+"=$filter("+b.escape(c)+")")});return a.length?"var "+a.join(",")+";":""},varsPrefix:function(a){return this.state[a].vars.length? "var "+this.state[a].vars.join(",")+";":""},body:function(a){return this.state[a].body.join("")},recurse:function(a,b,d,c,e,f){var g,h,k=this,l,m,p;c=c||D;if(!f&&u(a.watchId))b=b||this.nextId(),this.if_("i",this.lazyAssign(b,this.computedMember("i",a.watchId)),this.lazyRecurse(a,b,d,c,e,!0));else switch(a.type){case q.Program:r(a.body,function(b,c){k.recurse(b.expression,void 0,void 0,function(a){h=a});c!==a.body.length-1?k.current().body.push(h,";"):k.return_(h)});break;case q.Literal:m=this.escape(a.value); this.assign(b,m);c(b||m);break;case q.UnaryExpression:this.recurse(a.argument,void 0,void 0,function(a){h=a});m=a.operator+"("+this.ifDefined(h,0)+")";this.assign(b,m);c(m);break;case q.BinaryExpression:this.recurse(a.left,void 0,void 0,function(a){g=a});this.recurse(a.right,void 0,void 0,function(a){h=a});m="+"===a.operator?this.plus(g,h):"-"===a.operator?this.ifDefined(g,0)+a.operator+this.ifDefined(h,0):"("+g+")"+a.operator+"("+h+")";this.assign(b,m);c(m);break;case q.LogicalExpression:b=b||this.nextId(); k.recurse(a.left,b);k.if_("&&"===a.operator?b:k.not(b),k.lazyRecurse(a.right,b));c(b);break;case q.ConditionalExpression:b=b||this.nextId();k.recurse(a.test,b);k.if_(b,k.lazyRecurse(a.alternate,b),k.lazyRecurse(a.consequent,b));c(b);break;case q.Identifier:b=b||this.nextId();d&&(d.context="inputs"===k.stage?"s":this.assign(this.nextId(),this.getHasOwnProperty("l",a.name)+"?l:s"),d.computed=!1,d.name=a.name);k.if_("inputs"===k.stage||k.not(k.getHasOwnProperty("l",a.name)),function(){k.if_("inputs"=== k.stage||"s",function(){e&&1!==e&&k.if_(k.isNull(k.nonComputedMember("s",a.name)),k.lazyAssign(k.nonComputedMember("s",a.name),"{}"));k.assign(b,k.nonComputedMember("s",a.name))})},b&&k.lazyAssign(b,k.nonComputedMember("l",a.name)));c(b);break;case q.MemberExpression:g=d&&(d.context=this.nextId())||this.nextId();b=b||this.nextId();k.recurse(a.object,g,void 0,function(){k.if_(k.notNull(g),function(){a.computed?(h=k.nextId(),k.recurse(a.property,h),k.getStringValue(h),e&&1!==e&&k.if_(k.not(k.computedMember(g, h)),k.lazyAssign(k.computedMember(g,h),"{}")),m=k.computedMember(g,h),k.assign(b,m),d&&(d.computed=!0,d.name=h)):(e&&1!==e&&k.if_(k.isNull(k.nonComputedMember(g,a.property.name)),k.lazyAssign(k.nonComputedMember(g,a.property.name),"{}")),m=k.nonComputedMember(g,a.property.name),k.assign(b,m),d&&(d.computed=!1,d.name=a.property.name))},function(){k.assign(b,"undefined")});c(b)},!!e);break;case q.CallExpression:b=b||this.nextId();a.filter?(h=k.filter(a.callee.name),l=[],r(a.arguments,function(a){var b= k.nextId();k.recurse(a,b);l.push(b)}),m=h+"("+l.join(",")+")",k.assign(b,m),c(b)):(h=k.nextId(),g={},l=[],k.recurse(a.callee,h,g,function(){k.if_(k.notNull(h),function(){r(a.arguments,function(b){k.recurse(b,a.constant?void 0:k.nextId(),void 0,function(a){l.push(a)})});m=g.name?k.member(g.context,g.name,g.computed)+"("+l.join(",")+")":h+"("+l.join(",")+")";k.assign(b,m)},function(){k.assign(b,"undefined")});c(b)}));break;case q.AssignmentExpression:h=this.nextId();g={};this.recurse(a.left,void 0, g,function(){k.if_(k.notNull(g.context),function(){k.recurse(a.right,h);m=k.member(g.context,g.name,g.computed)+a.operator+h;k.assign(b,m);c(b||m)})},1);break;case q.ArrayExpression:l=[];r(a.elements,function(b){k.recurse(b,a.constant?void 0:k.nextId(),void 0,function(a){l.push(a)})});m="["+l.join(",")+"]";this.assign(b,m);c(b||m);break;case q.ObjectExpression:l=[];p=!1;r(a.properties,function(a){a.computed&&(p=!0)});p?(b=b||this.nextId(),this.assign(b,"{}"),r(a.properties,function(a){a.computed? (g=k.nextId(),k.recurse(a.key,g)):g=a.key.type===q.Identifier?a.key.name:""+a.key.value;h=k.nextId();k.recurse(a.value,h);k.assign(k.member(b,g,a.computed),h)})):(r(a.properties,function(b){k.recurse(b.value,a.constant?void 0:k.nextId(),void 0,function(a){l.push(k.escape(b.key.type===q.Identifier?b.key.name:""+b.key.value)+":"+a)})}),m="{"+l.join(",")+"}",this.assign(b,m));c(b||m);break;case q.ThisExpression:this.assign(b,"s");c(b||"s");break;case q.LocalsExpression:this.assign(b,"l");c(b||"l");break; case q.NGValueParameter:this.assign(b,"v"),c(b||"v")}},getHasOwnProperty:function(a,b){var d=a+"."+b,c=this.current().own;c.hasOwnProperty(d)||(c[d]=this.nextId(!1,a+"&&("+this.escape(b)+" in "+a+")"));return c[d]},assign:function(a,b){if(a)return this.current().body.push(a,"=",b,";"),a},filter:function(a){this.state.filters.hasOwnProperty(a)||(this.state.filters[a]=this.nextId(!0));return this.state.filters[a]},ifDefined:function(a,b){return"ifDefined("+a+","+this.escape(b)+")"},plus:function(a, b){return"plus("+a+","+b+")"},return_:function(a){this.current().body.push("return ",a,";")},if_:function(a,b,d){if(!0===a)b();else{var c=this.current().body;c.push("if(",a,"){");b();c.push("}");d&&(c.push("else{"),d(),c.push("}"))}},not:function(a){return"!("+a+")"},isNull:function(a){return a+"==null"},notNull:function(a){return a+"!=null"},nonComputedMember:function(a,b){var d=/[^$_a-zA-Z0-9]/g;return/^[$_a-zA-Z][$_a-zA-Z0-9]*$/.test(b)?a+"."+b:a+'["'+b.replace(d,this.stringEscapeFn)+'"]'},computedMember:function(a, b){return a+"["+b+"]"},member:function(a,b,d){return d?this.computedMember(a,b):this.nonComputedMember(a,b)},getStringValue:function(a){this.assign(a,"getStringValue("+a+")")},lazyRecurse:function(a,b,d,c,e,f){var g=this;return function(){g.recurse(a,b,d,c,e,f)}},lazyAssign:function(a,b){var d=this;return function(){d.assign(a,b)}},stringEscapeRegex:/[^ a-zA-Z0-9]/g,stringEscapeFn:function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)},escape:function(a){if(E(a))return"'"+a.replace(this.stringEscapeRegex, this.stringEscapeFn)+"'";if(Y(a))return a.toString();if(!0===a)return"true";if(!1===a)return"false";if(null===a)return"null";if("undefined"===typeof a)return"undefined";throw Xa("esc");},nextId:function(a,b){var d="v"+this.state.nextId++;a||this.current().vars.push(d+(b?"="+b:""));return d},current:function(){return this.state[this.state.computing]}};Kd.prototype={compile:function(a){var b=this;W(a,b.$filter);var d,c;if(d=Id(a))c=this.recurse(d);d=Gd(a.body);var e;d&&(e=[],r(d,function(a,c){var d= b.recurse(a);d.isPure=a.isPure;a.input=d;e.push(d);a.watchId=c}));var f=[];r(a.body,function(a){f.push(b.recurse(a.expression))});a=0===a.body.length?D:1===a.body.length?f[0]:function(a,b){var c;r(f,function(d){c=d(a,b)});return c};c&&(a.assign=function(a,b,d){return c(a,d,b)});e&&(a.inputs=e);return a},recurse:function(a,b,d){var c,e,f=this,g;if(a.input)return this.inputs(a.input,a.watchId);switch(a.type){case q.Literal:return this.value(a.value,b);case q.UnaryExpression:return e=this.recurse(a.argument), this["unary"+a.operator](e,b);case q.BinaryExpression:return c=this.recurse(a.left),e=this.recurse(a.right),this["binary"+a.operator](c,e,b);case q.LogicalExpression:return c=this.recurse(a.left),e=this.recurse(a.right),this["binary"+a.operator](c,e,b);case q.ConditionalExpression:return this["ternary?:"](this.recurse(a.test),this.recurse(a.alternate),this.recurse(a.consequent),b);case q.Identifier:return f.identifier(a.name,b,d);case q.MemberExpression:return c=this.recurse(a.object,!1,!!d),a.computed|| (e=a.property.name),a.computed&&(e=this.recurse(a.property)),a.computed?this.computedMember(c,e,b,d):this.nonComputedMember(c,e,b,d);case q.CallExpression:return g=[],r(a.arguments,function(a){g.push(f.recurse(a))}),a.filter&&(e=this.$filter(a.callee.name)),a.filter||(e=this.recurse(a.callee,!0)),a.filter?function(a,c,d,f){for(var p=[],n=0;n<g.length;++n)p.push(g[n](a,c,d,f));a=e.apply(void 0,p,f);return b?{context:void 0,name:void 0,value:a}:a}:function(a,c,d,f){var p=e(a,c,d,f),n;if(null!=p.value){n= [];for(var r=0;r<g.length;++r)n.push(g[r](a,c,d,f));n=p.value.apply(p.context,n)}return b?{value:n}:n};case q.AssignmentExpression:return c=this.recurse(a.left,!0,1),e=this.recurse(a.right),function(a,d,f,g){var p=c(a,d,f,g);a=e(a,d,f,g);p.context[p.name]=a;return b?{value:a}:a};case q.ArrayExpression:return g=[],r(a.elements,function(a){g.push(f.recurse(a))}),function(a,c,d,e){for(var f=[],n=0;n<g.length;++n)f.push(g[n](a,c,d,e));return b?{value:f}:f};case q.ObjectExpression:return g=[],r(a.properties, function(a){a.computed?g.push({key:f.recurse(a.key),computed:!0,value:f.recurse(a.value)}):g.push({key:a.key.type===q.Identifier?a.key.name:""+a.key.value,computed:!1,value:f.recurse(a.value)})}),function(a,c,d,e){for(var f={},n=0;n<g.length;++n)g[n].computed?f[g[n].key(a,c,d,e)]=g[n].value(a,c,d,e):f[g[n].key]=g[n].value(a,c,d,e);return b?{value:f}:f};case q.ThisExpression:return function(a){return b?{value:a}:a};case q.LocalsExpression:return function(a,c){return b?{value:c}:c};case q.NGValueParameter:return function(a, c,d){return b?{value:d}:d}}},"unary+":function(a,b){return function(d,c,e,f){d=a(d,c,e,f);d=u(d)?+d:0;return b?{value:d}:d}},"unary-":function(a,b){return function(d,c,e,f){d=a(d,c,e,f);d=u(d)?-d:-0;return b?{value:d}:d}},"unary!":function(a,b){return function(d,c,e,f){d=!a(d,c,e,f);return b?{value:d}:d}},"binary+":function(a,b,d){return function(c,e,f,g){var h=a(c,e,f,g);c=b(c,e,f,g);h=Ed(h,c);return d?{value:h}:h}},"binary-":function(a,b,d){return function(c,e,f,g){var h=a(c,e,f,g);c=b(c,e,f,g); h=(u(h)?h:0)-(u(c)?c:0);return d?{value:h}:h}},"binary*":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)*b(c,e,f,g);return d?{value:c}:c}},"binary/":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)/b(c,e,f,g);return d?{value:c}:c}},"binary%":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)%b(c,e,f,g);return d?{value:c}:c}},"binary===":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)===b(c,e,f,g);return d?{value:c}:c}},"binary!==":function(a,b,d){return function(c,e,f,g){c=a(c, e,f,g)!==b(c,e,f,g);return d?{value:c}:c}},"binary==":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)==b(c,e,f,g);return d?{value:c}:c}},"binary!=":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)!=b(c,e,f,g);return d?{value:c}:c}},"binary<":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)<b(c,e,f,g);return d?{value:c}:c}},"binary>":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)>b(c,e,f,g);return d?{value:c}:c}},"binary<=":function(a,b,d){return function(c,e,f,g){c=a(c,e,f, g)<=b(c,e,f,g);return d?{value:c}:c}},"binary>=":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)>=b(c,e,f,g);return d?{value:c}:c}},"binary&&":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)&&b(c,e,f,g);return d?{value:c}:c}},"binary||":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)||b(c,e,f,g);return d?{value:c}:c}},"ternary?:":function(a,b,d,c){return function(e,f,g,h){e=a(e,f,g,h)?b(e,f,g,h):d(e,f,g,h);return c?{value:e}:e}},value:function(a,b){return function(){return b?{context:void 0, name:void 0,value:a}:a}},identifier:function(a,b,d){return function(c,e,f,g){c=e&&a in e?e:c;d&&1!==d&&c&&null==c[a]&&(c[a]={});e=c?c[a]:void 0;return b?{context:c,name:a,value:e}:e}},computedMember:function(a,b,d,c){return function(e,f,g,h){var k=a(e,f,g,h),l,m;null!=k&&(l=b(e,f,g,h),l+="",c&&1!==c&&k&&!k[l]&&(k[l]={}),m=k[l]);return d?{context:k,name:l,value:m}:m}},nonComputedMember:function(a,b,d,c){return function(e,f,g,h){e=a(e,f,g,h);c&&1!==c&&e&&null==e[b]&&(e[b]={});f=null!=e?e[b]:void 0; return d?{context:e,name:b,value:f}:f}},inputs:function(a,b){return function(d,c,e,f){return f?f[b]:a(d,c,e)}}};Mb.prototype={constructor:Mb,parse:function(a){a=this.getAst(a);var b=this.astCompiler.compile(a.ast),d=a.ast;b.literal=0===d.body.length||1===d.body.length&&(d.body[0].expression.type===q.Literal||d.body[0].expression.type===q.ArrayExpression||d.body[0].expression.type===q.ObjectExpression);b.constant=a.ast.constant;b.oneTime=a.oneTime;return b},getAst:function(a){var b=!1;a=a.trim();":"=== a.charAt(0)&&":"===a.charAt(1)&&(b=!0,a=a.substring(2));return{ast:this.ast.ast(a),oneTime:b}}};var va=K("$sce"),oa={HTML:"html",CSS:"css",URL:"url",RESOURCE_URL:"resourceUrl",JS:"js"},Bc=/_([a-z])/g,Gg=K("$compile"),X=w.document.createElement("a"),Od=ta(w.location.href);Pd.$inject=["$document"];ed.$inject=["$provide"];var Wd=22,Vd=".",Dc="0";Qd.$inject=["$locale"];Sd.$inject=["$locale"];var Rg={yyyy:ea("FullYear",4,0,!1,!0),yy:ea("FullYear",2,0,!0,!0),y:ea("FullYear",1,0,!1,!0),MMMM:mb("Month"), MMM:mb("Month",!0),MM:ea("Month",2,1),M:ea("Month",1,1),LLLL:mb("Month",!1,!0),dd:ea("Date",2),d:ea("Date",1),HH:ea("Hours",2),H:ea("Hours",1),hh:ea("Hours",2,-12),h:ea("Hours",1,-12),mm:ea("Minutes",2),m:ea("Minutes",1),ss:ea("Seconds",2),s:ea("Seconds",1),sss:ea("Milliseconds",3),EEEE:mb("Day"),EEE:mb("Day",!0),a:function(a,b){return 12>a.getHours()?b.AMPMS[0]:b.AMPMS[1]},Z:function(a,b,d){a=-1*d;return a=(0<=a?"+":"")+(Ob(Math[0<a?"floor":"ceil"](a/60),2)+Ob(Math.abs(a%60),2))},ww:Yd(2),w:Yd(1), G:Ec,GG:Ec,GGG:Ec,GGGG:function(a,b){return 0>=a.getFullYear()?b.ERANAMES[0]:b.ERANAMES[1]}},Qg=/((?:[^yMLdHhmsaZEwG']+)|(?:'(?:[^']|'')*')|(?:E+|y+|M+|L+|d+|H+|h+|m+|s+|a|Z|G+|w+))([\s\S]*)/,Pg=/^-?\d+$/;Rd.$inject=["$locale"];var Kg=la(L),Lg=la(ub);Td.$inject=["$parse"];var He=la({restrict:"E",compile:function(a,b){if(!b.href&&!b.xlinkHref)return function(a,b){if("a"===b[0].nodeName.toLowerCase()){var e="[object SVGAnimatedString]"===ia.call(b.prop("href"))?"xlink:href":"href";b.on("click",function(a){b.attr(e)|| a.preventDefault()})}}}}),vb={};r(Gb,function(a,b){function d(a,d,e){a.$watch(e[c],function(a){e.$set(b,!!a)})}if("multiple"!==a){var c=Ea("ng-"+b),e=d;"checked"===a&&(e=function(a,b,e){e.ngModel!==e[c]&&d(a,b,e)});vb[c]=function(){return{restrict:"A",priority:100,link:e}}}});r(sd,function(a,b){vb[b]=function(){return{priority:100,link:function(a,c,e){if("ngPattern"===b&&"/"===e.ngPattern.charAt(0)&&(c=e.ngPattern.match(Vg))){e.$set("ngPattern",new RegExp(c[1],c[2]));return}a.$watch(e[b],function(a){e.$set(b, a)})}}}});r(["src","srcset","href"],function(a){var b=Ea("ng-"+a);vb[b]=function(){return{priority:99,link:function(d,c,e){var f=a,g=a;"href"===a&&"[object SVGAnimatedString]"===ia.call(c.prop("href"))&&(g="xlinkHref",e.$attr[g]="xlink:href",f=null);e.$observe(b,function(b){b?(e.$set(g,b),Ca&&f&&c.prop(f,e[g])):"href"===a&&e.$set(g,null)})}}}});var Qb={$addControl:D,$$renameControl:function(a,b){a.$name=b},$removeControl:D,$setValidity:D,$setDirty:D,$setPristine:D,$setSubmitted:D};Pb.$inject=["$element", "$attrs","$scope","$animate","$interpolate"];Pb.prototype={$rollbackViewValue:function(){r(this.$$controls,function(a){a.$rollbackViewValue()})},$commitViewValue:function(){r(this.$$controls,function(a){a.$commitViewValue()})},$addControl:function(a){Ia(a.$name,"input");this.$$controls.push(a);a.$name&&(this[a.$name]=a);a.$$parentForm=this},$$renameControl:function(a,b){var d=a.$name;this[d]===a&&delete this[d];this[b]=a;a.$name=b},$removeControl:function(a){a.$name&&this[a.$name]===a&&delete this[a.$name]; r(this.$pending,function(b,d){this.$setValidity(d,null,a)},this);r(this.$error,function(b,d){this.$setValidity(d,null,a)},this);r(this.$$success,function(b,d){this.$setValidity(d,null,a)},this);cb(this.$$controls,a);a.$$parentForm=Qb},$setDirty:function(){this.$$animate.removeClass(this.$$element,Ya);this.$$animate.addClass(this.$$element,Vb);this.$dirty=!0;this.$pristine=!1;this.$$parentForm.$setDirty()},$setPristine:function(){this.$$animate.setClass(this.$$element,Ya,Vb+" ng-submitted");this.$dirty= !1;this.$pristine=!0;this.$submitted=!1;r(this.$$controls,function(a){a.$setPristine()})},$setUntouched:function(){r(this.$$controls,function(a){a.$setUntouched()})},$setSubmitted:function(){this.$$animate.addClass(this.$$element,"ng-submitted");this.$submitted=!0;this.$$parentForm.$setSubmitted()}};ae({clazz:Pb,set:function(a,b,d){var c=a[b];c?-1===c.indexOf(d)&&c.push(d):a[b]=[d]},unset:function(a,b,d){var c=a[b];c&&(cb(c,d),0===c.length&&delete a[b])}});var ie=function(a){return["$timeout","$parse", function(b,d){function c(a){return""===a?d('this[""]').assign:d(a).assign||D}return{name:"form",restrict:a?"EAC":"E",require:["form","^^?form"],controller:Pb,compile:function(d,f){d.addClass(Ya).addClass(nb);var g=f.name?"name":a&&f.ngForm?"ngForm":!1;return{pre:function(a,d,e,f){var p=f[0];if(!("action"in e)){var n=function(b){a.$apply(function(){p.$commitViewValue();p.$setSubmitted()});b.preventDefault()};d[0].addEventListener("submit",n);d.on("$destroy",function(){b(function(){d[0].removeEventListener("submit", n)},0,!1)})}(f[1]||p.$$parentForm).$addControl(p);var r=g?c(p.$name):D;g&&(r(a,p),e.$observe(g,function(b){p.$name!==b&&(r(a,void 0),p.$$parentForm.$$renameControl(p,b),r=c(p.$name),r(a,p))}));d.on("$destroy",function(){p.$$parentForm.$removeControl(p);r(a,void 0);O(p,Qb)})}}}}}]},Ie=ie(),Ue=ie(!0),Sg=/^\d{4,}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+(?:[+-][0-2]\d:[0-5]\d|Z)$/,dh=/^[a-z][a-z\d.+-]*:\/*(?:[^:@]+(?::[^@]+)?@)?(?:[^\s:/?#]+|\[[a-f\d:]+])(?::\d+)?(?:\/[^?#]*)?(?:\?[^#]*)?(?:#.*)?$/i, eh=/^(?=.{1,254}$)(?=.{1,64}@)[-!#$%&'*+/0-9=?A-Z^_`a-z{|}~]+(\.[-!#$%&'*+/0-9=?A-Z^_`a-z{|}~]+)*@[A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?(\.[A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?)*$/,Tg=/^\s*(-|\+)?(\d+|(\d*(\.\d*)))([eE][+-]?\d+)?\s*$/,je=/^(\d{4,})-(\d{2})-(\d{2})$/,ke=/^(\d{4,})-(\d\d)-(\d\d)T(\d\d):(\d\d)(?::(\d\d)(\.\d{1,3})?)?$/,Lc=/^(\d{4,})-W(\d\d)$/,le=/^(\d{4,})-(\d\d)$/,me=/^(\d\d):(\d\d)(?::(\d\d)(\.\d{1,3})?)?$/,ce=S();r(["date","datetime-local","month","time","week"],function(a){ce[a]= !0});var ne={text:function(a,b,d,c,e,f){Va(a,b,d,c,e,f);Gc(c)},date:ob("date",je,Rb(je,["yyyy","MM","dd"]),"yyyy-MM-dd"),"datetime-local":ob("datetimelocal",ke,Rb(ke,"yyyy MM dd HH mm ss sss".split(" ")),"yyyy-MM-ddTHH:mm:ss.sss"),time:ob("time",me,Rb(me,["HH","mm","ss","sss"]),"HH:mm:ss.sss"),week:ob("week",Lc,function(a,b){if(fa(a))return a;if(E(a)){Lc.lastIndex=0;var d=Lc.exec(a);if(d){var c=+d[1],e=+d[2],f=d=0,g=0,h=0,k=Xd(c),e=7*(e-1);b&&(d=b.getHours(),f=b.getMinutes(),g=b.getSeconds(),h=b.getMilliseconds()); return new Date(c,0,k.getDate()+e,d,f,g,h)}}return NaN},"yyyy-Www"),month:ob("month",le,Rb(le,["yyyy","MM"]),"yyyy-MM"),number:function(a,b,d,c,e,f){Hc(a,b,d,c);de(c);Va(a,b,d,c,e,f);var g,h;if(u(d.min)||d.ngMin)c.$validators.min=function(a){return c.$isEmpty(a)||x(g)||a>=g},d.$observe("min",function(a){g=Wa(a);c.$validate()});if(u(d.max)||d.ngMax)c.$validators.max=function(a){return c.$isEmpty(a)||x(h)||a<=h},d.$observe("max",function(a){h=Wa(a);c.$validate()});if(u(d.step)||d.ngStep){var k;c.$validators.step= function(a,b){return c.$isEmpty(b)||x(k)||ee(b,g||0,k)};d.$observe("step",function(a){k=Wa(a);c.$validate()})}},url:function(a,b,d,c,e,f){Va(a,b,d,c,e,f);Gc(c);c.$$parserName="url";c.$validators.url=function(a,b){var d=a||b;return c.$isEmpty(d)||dh.test(d)}},email:function(a,b,d,c,e,f){Va(a,b,d,c,e,f);Gc(c);c.$$parserName="email";c.$validators.email=function(a,b){var d=a||b;return c.$isEmpty(d)||eh.test(d)}},radio:function(a,b,d,c){var e=!d.ngTrim||"false"!==Q(d.ngTrim);x(d.name)&&b.attr("name",++qb); b.on("click",function(a){var g;b[0].checked&&(g=d.value,e&&(g=Q(g)),c.$setViewValue(g,a&&a.type))});c.$render=function(){var a=d.value;e&&(a=Q(a));b[0].checked=a===c.$viewValue};d.$observe("value",c.$render)},range:function(a,b,d,c,e,f){function g(a,c){b.attr(a,d[a]);d.$observe(a,c)}function h(a){p=Wa(a);U(c.$modelValue)||(m?(a=b.val(),p>a&&(a=p,b.val(a)),c.$setViewValue(a)):c.$validate())}function k(a){n=Wa(a);U(c.$modelValue)||(m?(a=b.val(),n<a&&(b.val(n),a=n<p?p:n),c.$setViewValue(a)):c.$validate())} function l(a){r=Wa(a);U(c.$modelValue)||(m&&c.$viewValue!==b.val()?c.$setViewValue(b.val()):c.$validate())}Hc(a,b,d,c);de(c);Va(a,b,d,c,e,f);var m=c.$$hasNativeValidators&&"range"===b[0].type,p=m?0:void 0,n=m?100:void 0,r=m?1:void 0,q=b[0].validity;a=u(d.min);e=u(d.max);f=u(d.step);var v=c.$render;c.$render=m&&u(q.rangeUnderflow)&&u(q.rangeOverflow)?function(){v();c.$setViewValue(b.val())}:v;a&&(c.$validators.min=m?function(){return!0}:function(a,b){return c.$isEmpty(b)||x(p)||b>=p},g("min",h));e&& (c.$validators.max=m?function(){return!0}:function(a,b){return c.$isEmpty(b)||x(n)||b<=n},g("max",k));f&&(c.$validators.step=m?function(){return!q.stepMismatch}:function(a,b){return c.$isEmpty(b)||x(r)||ee(b,p||0,r)},g("step",l))},checkbox:function(a,b,d,c,e,f,g,h){var k=fe(h,a,"ngTrueValue",d.ngTrueValue,!0),l=fe(h,a,"ngFalseValue",d.ngFalseValue,!1);b.on("click",function(a){c.$setViewValue(b[0].checked,a&&a.type)});c.$render=function(){b[0].checked=c.$viewValue};c.$isEmpty=function(a){return!1=== a};c.$formatters.push(function(a){return sa(a,k)});c.$parsers.push(function(a){return a?k:l})},hidden:D,button:D,submit:D,reset:D,file:D},Zc=["$browser","$sniffer","$filter","$parse",function(a,b,d,c){return{restrict:"E",require:["?ngModel"],link:{pre:function(e,f,g,h){h[0]&&(ne[L(g.type)]||ne.text)(e,f,g,h[0],b,a,d,c)}}}}],fh=/^(true|false|\d+)$/,mf=function(){function a(a,d,c){var e=u(c)?c:9===Ca?"":null;a.prop("value",e);d.$set("value",c)}return{restrict:"A",priority:100,compile:function(b,d){return fh.test(d.ngValue)? function(b,d,f){b=b.$eval(f.ngValue);a(d,f,b)}:function(b,d,f){b.$watch(f.ngValue,function(b){a(d,f,b)})}}}},Me=["$compile",function(a){return{restrict:"AC",compile:function(b){a.$$addBindingClass(b);return function(b,c,e){a.$$addBindingInfo(c,e.ngBind);c=c[0];b.$watch(e.ngBind,function(a){c.textContent=gc(a)})}}}}],Oe=["$interpolate","$compile",function(a,b){return{compile:function(d){b.$$addBindingClass(d);return function(c,d,f){c=a(d.attr(f.$attr.ngBindTemplate));b.$$addBindingInfo(d,c.expressions); d=d[0];f.$observe("ngBindTemplate",function(a){d.textContent=x(a)?"":a})}}}}],Ne=["$sce","$parse","$compile",function(a,b,d){return{restrict:"A",compile:function(c,e){var f=b(e.ngBindHtml),g=b(e.ngBindHtml,function(b){return a.valueOf(b)});d.$$addBindingClass(c);return function(b,c,e){d.$$addBindingInfo(c,e.ngBindHtml);b.$watch(g,function(){var d=f(b);c.html(a.getTrustedHtml(d)||"")})}}}}],lf=la({restrict:"A",require:"ngModel",link:function(a,b,d,c){c.$viewChangeListeners.push(function(){a.$eval(d.ngChange)})}}), Pe=Jc("",!0),Re=Jc("Odd",0),Qe=Jc("Even",1),Se=Qa({compile:function(a,b){b.$set("ngCloak",void 0);a.removeClass("ng-cloak")}}),Te=[function(){return{restrict:"A",scope:!0,controller:"@",priority:500}}],dd={},gh={blur:!0,focus:!0};r("click dblclick mousedown mouseup mouseover mouseout mousemove mouseenter mouseleave keydown keyup keypress submit focus blur copy cut paste".split(" "),function(a){var b=Ea("ng-"+a);dd[b]=["$parse","$rootScope",function(d,c){return{restrict:"A",compile:function(e,f){var g= d(f[b]);return function(b,d){d.on(a,function(d){var e=function(){g(b,{$event:d})};gh[a]&&c.$$phase?b.$evalAsync(e):b.$apply(e)})}}}}]});var We=["$animate","$compile",function(a,b){return{multiElement:!0,transclude:"element",priority:600,terminal:!0,restrict:"A",$$tlb:!0,link:function(d,c,e,f,g){var h,k,l;d.$watch(e.ngIf,function(d){d?k||g(function(d,f){k=f;d[d.length++]=b.$$createComment("end ngIf",e.ngIf);h={clone:d};a.enter(d,c.parent(),c)}):(l&&(l.remove(),l=null),k&&(k.$destroy(),k=null),h&&(l= tb(h.clone),a.leave(l).done(function(a){!1!==a&&(l=null)}),h=null))})}}}],Xe=["$templateRequest","$anchorScroll","$animate",function(a,b,d){return{restrict:"ECA",priority:400,terminal:!0,transclude:"element",controller:$.noop,compile:function(c,e){var f=e.ngInclude||e.src,g=e.onload||"",h=e.autoscroll;return function(c,e,m,p,n){var r=0,q,v,y,t=function(){v&&(v.remove(),v=null);q&&(q.$destroy(),q=null);y&&(d.leave(y).done(function(a){!1!==a&&(v=null)}),v=y,y=null)};c.$watch(f,function(f){var m=function(a){!1=== a||!u(h)||h&&!c.$eval(h)||b()},v=++r;f?(a(f,!0).then(function(a){if(!c.$$destroyed&&v===r){var b=c.$new();p.template=a;a=n(b,function(a){t();d.enter(a,null,e).done(m)});q=b;y=a;q.$emit("$includeContentLoaded",f);c.$eval(g)}},function(){c.$$destroyed||v!==r||(t(),c.$emit("$includeContentError",f))}),c.$emit("$includeContentRequested",f)):(t(),p.template=null)})}}}}],of=["$compile",function(a){return{restrict:"ECA",priority:-400,require:"ngInclude",link:function(b,d,c,e){ia.call(d[0]).match(/SVG/)? (d.empty(),a(fd(e.template,w.document).childNodes)(b,function(a){d.append(a)},{futureParentElement:d})):(d.html(e.template),a(d.contents())(b))}}}],Ye=Qa({priority:450,compile:function(){return{pre:function(a,b,d){a.$eval(d.ngInit)}}}}),kf=function(){return{restrict:"A",priority:100,require:"ngModel",link:function(a,b,d,c){var e=d.ngList||", ",f="false"!==d.ngTrim,g=f?Q(e):e;c.$parsers.push(function(a){if(!x(a)){var b=[];a&&r(a.split(g),function(a){a&&b.push(f?Q(a):a)});return b}});c.$formatters.push(function(a){if(I(a))return a.join(e)}); c.$isEmpty=function(a){return!a||!a.length}}}},nb="ng-valid",$d="ng-invalid",Ya="ng-pristine",Vb="ng-dirty",pb=K("ngModel");Sb.$inject="$scope $exceptionHandler $attrs $element $parse $animate $timeout $q $interpolate".split(" ");Sb.prototype={$$initGetterSetters:function(){if(this.$options.getOption("getterSetter")){var a=this.$$parse(this.$$attr.ngModel+"()"),b=this.$$parse(this.$$attr.ngModel+"($$$p)");this.$$ngModelGet=function(b){var c=this.$$parsedNgModel(b);C(c)&&(c=a(b));return c};this.$$ngModelSet= function(a,c){C(this.$$parsedNgModel(a))?b(a,{$$$p:c}):this.$$parsedNgModelAssign(a,c)}}else if(!this.$$parsedNgModel.assign)throw pb("nonassign",this.$$attr.ngModel,za(this.$$element));},$render:D,$isEmpty:function(a){return x(a)||""===a||null===a||a!==a},$$updateEmptyClasses:function(a){this.$isEmpty(a)?(this.$$animate.removeClass(this.$$element,"ng-not-empty"),this.$$animate.addClass(this.$$element,"ng-empty")):(this.$$animate.removeClass(this.$$element,"ng-empty"),this.$$animate.addClass(this.$$element, "ng-not-empty"))},$setPristine:function(){this.$dirty=!1;this.$pristine=!0;this.$$animate.removeClass(this.$$element,Vb);this.$$animate.addClass(this.$$element,Ya)},$setDirty:function(){this.$dirty=!0;this.$pristine=!1;this.$$animate.removeClass(this.$$element,Ya);this.$$animate.addClass(this.$$element,Vb);this.$$parentForm.$setDirty()},$setUntouched:function(){this.$touched=!1;this.$untouched=!0;this.$$animate.setClass(this.$$element,"ng-untouched","ng-touched")},$setTouched:function(){this.$touched= !0;this.$untouched=!1;this.$$animate.setClass(this.$$element,"ng-touched","ng-untouched")},$rollbackViewValue:function(){this.$$timeout.cancel(this.$$pendingDebounce);this.$viewValue=this.$$lastCommittedViewValue;this.$render()},$validate:function(){if(!U(this.$modelValue)){var a=this.$$lastCommittedViewValue,b=this.$$rawModelValue,d=this.$valid,c=this.$modelValue,e=this.$options.getOption("allowInvalid"),f=this;this.$$runValidators(b,a,function(a){e||d===a||(f.$modelValue=a?b:void 0,f.$modelValue!== c&&f.$$writeModelToScope())})}},$$runValidators:function(a,b,d){function c(){var c=!0;r(k.$validators,function(d,e){var g=Boolean(d(a,b));c=c&&g;f(e,g)});return c?!0:(r(k.$asyncValidators,function(a,b){f(b,null)}),!1)}function e(){var c=[],d=!0;r(k.$asyncValidators,function(e,g){var k=e(a,b);if(!k||!C(k.then))throw pb("nopromise",k);f(g,void 0);c.push(k.then(function(){f(g,!0)},function(){d=!1;f(g,!1)}))});c.length?k.$$q.all(c).then(function(){g(d)},D):g(!0)}function f(a,b){h===k.$$currentValidationRunId&& k.$setValidity(a,b)}function g(a){h===k.$$currentValidationRunId&&d(a)}this.$$currentValidationRunId++;var h=this.$$currentValidationRunId,k=this;(function(){var a=k.$$parserName||"parse";if(x(k.$$parserValid))f(a,null);else return k.$$parserValid||(r(k.$validators,function(a,b){f(b,null)}),r(k.$asyncValidators,function(a,b){f(b,null)})),f(a,k.$$parserValid),k.$$parserValid;return!0})()?c()?e():g(!1):g(!1)},$commitViewValue:function(){var a=this.$viewValue;this.$$timeout.cancel(this.$$pendingDebounce); if(this.$$lastCommittedViewValue!==a||""===a&&this.$$hasNativeValidators)this.$$updateEmptyClasses(a),this.$$lastCommittedViewValue=a,this.$pristine&&this.$setDirty(),this.$$parseAndValidate()},$$parseAndValidate:function(){var a=this.$$lastCommittedViewValue,b=this;if(this.$$parserValid=x(a)?void 0:!0)for(var d=0;d<this.$parsers.length;d++)if(a=this.$parsers[d](a),x(a)){this.$$parserValid=!1;break}U(this.$modelValue)&&(this.$modelValue=this.$$ngModelGet(this.$$scope));var c=this.$modelValue,e=this.$options.getOption("allowInvalid"); this.$$rawModelValue=a;e&&(this.$modelValue=a,b.$modelValue!==c&&b.$$writeModelToScope());this.$$runValidators(a,this.$$lastCommittedViewValue,function(d){e||(b.$modelValue=d?a:void 0,b.$modelValue!==c&&b.$$writeModelToScope())})},$$writeModelToScope:function(){this.$$ngModelSet(this.$$scope,this.$modelValue);r(this.$viewChangeListeners,function(a){try{a()}catch(b){this.$$exceptionHandler(b)}},this)},$setViewValue:function(a,b){this.$viewValue=a;this.$options.getOption("updateOnDefault")&&this.$$debounceViewValueCommit(b)}, $$debounceViewValueCommit:function(a){var b=this.$options.getOption("debounce");Y(b[a])?b=b[a]:Y(b["default"])&&(b=b["default"]);this.$$timeout.cancel(this.$$pendingDebounce);var d=this;0<b?this.$$pendingDebounce=this.$$timeout(function(){d.$commitViewValue()},b):this.$$scope.$root.$$phase?this.$commitViewValue():this.$$scope.$apply(function(){d.$commitViewValue()})},$overrideModelOptions:function(a){this.$options=this.$options.createChild(a);this.$$setUpdateOnEvents()},$processModelValue:function(){var a= this.$$format();this.$viewValue!==a&&(this.$$updateEmptyClasses(a),this.$viewValue=this.$$lastCommittedViewValue=a,this.$render(),this.$$runValidators(this.$modelValue,this.$viewValue,D))},$$format:function(){for(var a=this.$formatters,b=a.length,d=this.$modelValue;b--;)d=a[b](d);return d},$$setModelValue:function(a){this.$modelValue=this.$$rawModelValue=a;this.$$parserValid=void 0;this.$processModelValue()},$$setUpdateOnEvents:function(){this.$$updateEvents&&this.$$element.off(this.$$updateEvents, this.$$updateEventHandler);if(this.$$updateEvents=this.$options.getOption("updateOn"))this.$$element.on(this.$$updateEvents,this.$$updateEventHandler)},$$updateEventHandler:function(a){this.$$debounceViewValueCommit(a&&a.type)}};ae({clazz:Sb,set:function(a,b){a[b]=!0},unset:function(a,b){delete a[b]}});var jf=["$rootScope",function(a){return{restrict:"A",require:["ngModel","^?form","^?ngModelOptions"],controller:Sb,priority:1,compile:function(b){b.addClass(Ya).addClass("ng-untouched").addClass(nb); return{pre:function(a,b,e,f){var g=f[0];b=f[1]||g.$$parentForm;if(f=f[2])g.$options=f.$options;g.$$initGetterSetters();b.$addControl(g);e.$observe("name",function(a){g.$name!==a&&g.$$parentForm.$$renameControl(g,a)});a.$on("$destroy",function(){g.$$parentForm.$removeControl(g)})},post:function(b,c,e,f){function g(){h.$setTouched()}var h=f[0];h.$$setUpdateOnEvents();c.on("blur",function(){h.$touched||(a.$$phase?b.$evalAsync(g):b.$apply(g))})}}}}}],Tb,hh=/(\s+|^)default(\s+|$)/;Kc.prototype={getOption:function(a){return this.$$options[a]}, createChild:function(a){var b=!1;a=O({},a);r(a,function(d,c){"$inherit"===d?"*"===c?b=!0:(a[c]=this.$$options[c],"updateOn"===c&&(a.updateOnDefault=this.$$options.updateOnDefault)):"updateOn"===c&&(a.updateOnDefault=!1,a[c]=Q(d.replace(hh,function(){a.updateOnDefault=!0;return" "})))},this);b&&(delete a["*"],ge(a,this.$$options));ge(a,Tb.$$options);return new Kc(a)}};Tb=new Kc({updateOn:"",updateOnDefault:!0,debounce:0,getterSetter:!1,allowInvalid:!1,timezone:null});var nf=function(){function a(a, d){this.$$attrs=a;this.$$scope=d}a.$inject=["$attrs","$scope"];a.prototype={$onInit:function(){var a=this.parentCtrl?this.parentCtrl.$options:Tb,d=this.$$scope.$eval(this.$$attrs.ngModelOptions);this.$options=a.createChild(d)}};return{restrict:"A",priority:10,require:{parentCtrl:"?^^ngModelOptions"},bindToController:!0,controller:a}},Ze=Qa({terminal:!0,priority:1E3}),ih=K("ngOptions"),jh=/^\s*([\s\S]+?)(?:\s+as\s+([\s\S]+?))?(?:\s+group\s+by\s+([\s\S]+?))?(?:\s+disable\s+when\s+([\s\S]+?))?\s+for\s+(?:([$\w][$\w]*)|(?:\(\s*([$\w][$\w]*)\s*,\s*([$\w][$\w]*)\s*\)))\s+in\s+([\s\S]+?)(?:\s+track\s+by\s+([\s\S]+?))?$/, gf=["$compile","$document","$parse",function(a,b,d){function c(a,b,c){function e(a,b,c,d,f){this.selectValue=a;this.viewValue=b;this.label=c;this.group=d;this.disabled=f}function f(a){var b;if(!r&&wa(a))b=a;else{b=[];for(var c in a)a.hasOwnProperty(c)&&"$"!==c.charAt(0)&&b.push(c)}return b}var p=a.match(jh);if(!p)throw ih("iexp",a,za(b));var n=p[5]||p[7],r=p[6];a=/ as /.test(p[0])&&p[1];var q=p[9];b=d(p[2]?p[1]:n);var v=a&&d(a)||b,u=q&&d(q),t=q?function(a,b){return u(c,b)}:function(a){return Pa(a)}, w=function(a,b){return t(a,C(a,b))},x=d(p[2]||p[1]),A=d(p[3]||""),H=d(p[4]||""),G=d(p[8]),z={},C=r?function(a,b){z[r]=b;z[n]=a;return z}:function(a){z[n]=a;return z};return{trackBy:q,getTrackByValue:w,getWatchables:d(G,function(a){var b=[];a=a||[];for(var d=f(a),e=d.length,g=0;g<e;g++){var h=a===d?g:d[g],l=a[h],h=C(l,h),l=t(l,h);b.push(l);if(p[2]||p[1])l=x(c,h),b.push(l);p[4]&&(h=H(c,h),b.push(h))}return b}),getOptions:function(){for(var a=[],b={},d=G(c)||[],g=f(d),h=g.length,n=0;n<h;n++){var p=d=== g?n:g[n],r=C(d[p],p),u=v(c,r),p=t(u,r),y=x(c,r),F=A(c,r),r=H(c,r),u=new e(p,u,y,F,r);a.push(u);b[p]=u}return{items:a,selectValueMap:b,getOptionFromViewValue:function(a){return b[w(a)]},getViewValueFromOption:function(a){return q?pa(a.viewValue):a.viewValue}}}}}var e=w.document.createElement("option"),f=w.document.createElement("optgroup");return{restrict:"A",terminal:!0,require:["select","ngModel"],link:{pre:function(a,b,c,d){d[0].registerOption=D},post:function(d,h,k,l){function m(a){var b=(a=t.getOptionFromViewValue(a))&& a.element;b&&!b.selected&&(b.selected=!0);return a}function p(a,b){a.element=b;b.disabled=a.disabled;a.label!==b.label&&(b.label=a.label,b.textContent=a.label);b.value=a.selectValue}var n=l[0],q=l[1],s=k.multiple;l=0;for(var v=h.children(),y=v.length;l<y;l++)if(""===v[l].value){n.hasEmptyOption=!0;n.emptyOption=v.eq(l);break}h.empty();l=!!n.emptyOption;z(e.cloneNode(!1)).val("?");var t,w=c(k.ngOptions,h,d),x=b[0].createDocumentFragment();n.generateUnknownOptionValue=function(a){return"?"};s?(n.writeValue= function(a){if(t){var b=a&&a.map(m)||[];t.items.forEach(function(a){a.element.selected&&-1===Array.prototype.indexOf.call(b,a)&&(a.element.selected=!1)})}},n.readValue=function(){var a=h.val()||[],b=[];r(a,function(a){(a=t.selectValueMap[a])&&!a.disabled&&b.push(t.getViewValueFromOption(a))});return b},w.trackBy&&d.$watchCollection(function(){if(I(q.$viewValue))return q.$viewValue.map(function(a){return w.getTrackByValue(a)})},function(){q.$render()})):(n.writeValue=function(a){if(t){var b=h[0].options[h[0].selectedIndex], c=t.getOptionFromViewValue(a);b&&b.removeAttribute("selected");c?(h[0].value!==c.selectValue&&(n.removeUnknownOption(),h[0].value=c.selectValue,c.element.selected=!0),c.element.setAttribute("selected","selected")):n.selectUnknownOrEmptyOption(a)}},n.readValue=function(){var a=t.selectValueMap[h.val()];return a&&!a.disabled?(n.unselectEmptyOption(),n.removeUnknownOption(),t.getViewValueFromOption(a)):null},w.trackBy&&d.$watch(function(){return w.getTrackByValue(q.$viewValue)},function(){q.$render()})); l&&(a(n.emptyOption)(d),h.prepend(n.emptyOption),8===n.emptyOption[0].nodeType?(n.hasEmptyOption=!1,n.registerOption=function(a,b){""===b.val()&&(n.hasEmptyOption=!0,n.emptyOption=b,n.emptyOption.removeClass("ng-scope"),q.$render(),b.on("$destroy",function(){var a=n.$isEmptyOptionSelected();n.hasEmptyOption=!1;n.emptyOption=void 0;a&&q.$render()}))}):n.emptyOption.removeClass("ng-scope"));d.$watchCollection(w.getWatchables,function(){var a=t&&n.readValue();if(t)for(var b=t.items.length-1;0<=b;b--){var c= t.items[b];u(c.group)?Fb(c.element.parentNode):Fb(c.element)}t=w.getOptions();var d={};t.items.forEach(function(a){var b;if(u(a.group)){b=d[a.group];b||(b=f.cloneNode(!1),x.appendChild(b),b.label=null===a.group?"null":a.group,d[a.group]=b);var c=e.cloneNode(!1);b.appendChild(c);p(a,c)}else b=e.cloneNode(!1),x.appendChild(b),p(a,b)});h[0].appendChild(x);q.$render();q.$isEmpty(a)||(b=n.readValue(),(w.trackBy||s?sa(a,b):a===b)||(q.$setViewValue(b),q.$render()))})}}}}],$e=["$locale","$interpolate","$log", function(a,b,d){var c=/{}/g,e=/^when(Minus)?(.+)$/;return{link:function(f,g,h){function k(a){g.text(a||"")}var l=h.count,m=h.$attr.when&&g.attr(h.$attr.when),p=h.offset||0,n=f.$eval(m)||{},q={},s=b.startSymbol(),v=b.endSymbol(),u=s+l+"-"+p+v,t=$.noop,w;r(h,function(a,b){var c=e.exec(b);c&&(c=(c[1]?"-":"")+L(c[2]),n[c]=g.attr(h.$attr[b]))});r(n,function(a,d){q[d]=b(a.replace(c,u))});f.$watch(l,function(b){var c=parseFloat(b),e=U(c);e||c in n||(c=a.pluralCat(c-p));c===w||e&&U(w)||(t(),e=q[c],x(e)?(null!= b&&d.debug("ngPluralize: no rule defined for '"+c+"' in "+m),t=D,k()):t=f.$watch(e,k),w=c)})}}}],af=["$parse","$animate","$compile",function(a,b,d){var c=K("ngRepeat"),e=function(a,b,c,d,e,m,p){a[c]=d;e&&(a[e]=m);a.$index=b;a.$first=0===b;a.$last=b===p-1;a.$middle=!(a.$first||a.$last);a.$odd=!(a.$even=0===(b&1))};return{restrict:"A",multiElement:!0,transclude:"element",priority:1E3,terminal:!0,$$tlb:!0,compile:function(f,g){var h=g.ngRepeat,k=d.$$createComment("end ngRepeat",h),l=h.match(/^\s*([\s\S]+?)\s+in\s+([\s\S]+?)(?:\s+as\s+([\s\S]+?))?(?:\s+track\s+by\s+([\s\S]+?))?\s*$/); if(!l)throw c("iexp",h);var m=l[1],p=l[2],n=l[3],q=l[4],l=m.match(/^(?:(\s*[$\w]+)|\(\s*([$\w]+)\s*,\s*([$\w]+)\s*\))$/);if(!l)throw c("iidexp",m);var s=l[3]||l[1],v=l[2];if(n&&(!/^[$a-zA-Z_][$a-zA-Z0-9_]*$/.test(n)||/^(null|undefined|this|\$index|\$first|\$middle|\$last|\$even|\$odd|\$parent|\$root|\$id)$/.test(n)))throw c("badident",n);var u,t,w,x,z={$id:Pa};q?u=a(q):(w=function(a,b){return Pa(b)},x=function(a){return a});return function(a,d,f,g,l){u&&(t=function(b,c,d){v&&(z[v]=b);z[s]=c;z.$index= d;return u(a,z)});var m=S();a.$watchCollection(p,function(f){var g,p,q=d[0],u,y=S(),z,F,C,A,D,B,E;n&&(a[n]=f);if(wa(f))D=f,p=t||w;else for(E in p=t||x,D=[],f)ra.call(f,E)&&"$"!==E.charAt(0)&&D.push(E);z=D.length;E=Array(z);for(g=0;g<z;g++)if(F=f===D?g:D[g],C=f[F],A=p(F,C,g),m[A])B=m[A],delete m[A],y[A]=B,E[g]=B;else{if(y[A])throw r(E,function(a){a&&a.scope&&(m[a.id]=a)}),c("dupes",h,A,C);E[g]={id:A,scope:void 0,clone:void 0};y[A]=!0}for(u in m){B=m[u];A=tb(B.clone);b.leave(A);if(A[0].parentNode)for(g= 0,p=A.length;g<p;g++)A[g].$$NG_REMOVED=!0;B.scope.$destroy()}for(g=0;g<z;g++)if(F=f===D?g:D[g],C=f[F],B=E[g],B.scope){u=q;do u=u.nextSibling;while(u&&u.$$NG_REMOVED);B.clone[0]!==u&&b.move(tb(B.clone),null,q);q=B.clone[B.clone.length-1];e(B.scope,g,s,C,v,F,z)}else l(function(a,c){B.scope=c;var d=k.cloneNode(!1);a[a.length++]=d;b.enter(a,null,q);q=d;B.clone=a;y[B.id]=B;e(B.scope,g,s,C,v,F,z)});m=y})}}}}],bf=["$animate",function(a){return{restrict:"A",multiElement:!0,link:function(b,d,c){b.$watch(c.ngShow, function(b){a[b?"removeClass":"addClass"](d,"ng-hide",{tempClasses:"ng-hide-animate"})})}}}],Ve=["$animate",function(a){return{restrict:"A",multiElement:!0,link:function(b,d,c){b.$watch(c.ngHide,function(b){a[b?"addClass":"removeClass"](d,"ng-hide",{tempClasses:"ng-hide-animate"})})}}}],cf=Qa(function(a,b,d){a.$watch(d.ngStyle,function(a,d){d&&a!==d&&r(d,function(a,c){b.css(c,"")});a&&b.css(a)},!0)}),df=["$animate","$compile",function(a,b){return{require:"ngSwitch",controller:["$scope",function(){this.cases= {}}],link:function(d,c,e,f){var g=[],h=[],k=[],l=[],m=function(a,b){return function(c){!1!==c&&a.splice(b,1)}};d.$watch(e.ngSwitch||e.on,function(c){for(var d,e;k.length;)a.cancel(k.pop());d=0;for(e=l.length;d<e;++d){var q=tb(h[d].clone);l[d].$destroy();(k[d]=a.leave(q)).done(m(k,d))}h.length=0;l.length=0;(g=f.cases["!"+c]||f.cases["?"])&&r(g,function(c){c.transclude(function(d,e){l.push(e);var f=c.element;d[d.length++]=b.$$createComment("end ngSwitchWhen");h.push({clone:d});a.enter(d,f.parent(), f)})})})}}}],ef=Qa({transclude:"element",priority:1200,require:"^ngSwitch",multiElement:!0,link:function(a,b,d,c,e){a=d.ngSwitchWhen.split(d.ngSwitchWhenSeparator).sort().filter(function(a,b,c){return c[b-1]!==a});r(a,function(a){c.cases["!"+a]=c.cases["!"+a]||[];c.cases["!"+a].push({transclude:e,element:b})})}}),ff=Qa({transclude:"element",priority:1200,require:"^ngSwitch",multiElement:!0,link:function(a,b,d,c,e){c.cases["?"]=c.cases["?"]||[];c.cases["?"].push({transclude:e,element:b})}}),kh=K("ngTransclude"), hf=["$compile",function(a){return{restrict:"EAC",compile:function(b){var d=a(b.contents());b.empty();return function(a,b,f,g,h){function k(){d(a,function(a){b.append(a)})}if(!h)throw kh("orphan",za(b));f.ngTransclude===f.$attr.ngTransclude&&(f.ngTransclude="");f=f.ngTransclude||f.ngTranscludeSlot;h(function(a,c){var d;if(d=a.length)a:{d=0;for(var f=a.length;d<f;d++){var g=a[d];if(g.nodeType!==Oa||g.nodeValue.trim()){d=!0;break a}}d=void 0}d?b.append(a):(k(),c.$destroy())},null,f);f&&!h.isSlotFilled(f)&& k()}}}}],Je=["$templateCache",function(a){return{restrict:"E",terminal:!0,compile:function(b,d){"text/ng-template"===d.type&&a.put(d.id,b[0].text)}}}],lh={$setViewValue:D,$render:D},mh=["$element","$scope",function(a,b){function d(){g||(g=!0,b.$$postDigest(function(){g=!1;e.ngModelCtrl.$render()}))}function c(a){h||(h=!0,b.$$postDigest(function(){b.$$destroyed||(h=!1,e.ngModelCtrl.$setViewValue(e.readValue()),a&&e.ngModelCtrl.$render())}))}var e=this,f=new Hb;e.selectValueMap={};e.ngModelCtrl=lh; e.multiple=!1;e.unknownOption=z(w.document.createElement("option"));e.hasEmptyOption=!1;e.emptyOption=void 0;e.renderUnknownOption=function(b){b=e.generateUnknownOptionValue(b);e.unknownOption.val(b);a.prepend(e.unknownOption);Ga(e.unknownOption,!0);a.val(b)};e.updateUnknownOption=function(b){b=e.generateUnknownOptionValue(b);e.unknownOption.val(b);Ga(e.unknownOption,!0);a.val(b)};e.generateUnknownOptionValue=function(a){return"? "+Pa(a)+" ?"};e.removeUnknownOption=function(){e.unknownOption.parent()&& e.unknownOption.remove()};e.selectEmptyOption=function(){e.emptyOption&&(a.val(""),Ga(e.emptyOption,!0))};e.unselectEmptyOption=function(){e.hasEmptyOption&&Ga(e.emptyOption,!1)};b.$on("$destroy",function(){e.renderUnknownOption=D});e.readValue=function(){var b=a.val(),b=b in e.selectValueMap?e.selectValueMap[b]:b;return e.hasOption(b)?b:null};e.writeValue=function(b){var c=a[0].options[a[0].selectedIndex];c&&Ga(z(c),!1);e.hasOption(b)?(e.removeUnknownOption(),c=Pa(b),a.val(c in e.selectValueMap? c:b),Ga(z(a[0].options[a[0].selectedIndex]),!0)):e.selectUnknownOrEmptyOption(b)};e.addOption=function(a,b){if(8!==b[0].nodeType){Ia(a,'"option value"');""===a&&(e.hasEmptyOption=!0,e.emptyOption=b);var c=f.get(a)||0;f.set(a,c+1);d()}};e.removeOption=function(a){var b=f.get(a);b&&(1===b?(f.delete(a),""===a&&(e.hasEmptyOption=!1,e.emptyOption=void 0)):f.set(a,b-1))};e.hasOption=function(a){return!!f.get(a)};e.$hasEmptyOption=function(){return e.hasEmptyOption};e.$isUnknownOptionSelected=function(){return a[0].options[0]=== e.unknownOption[0]};e.$isEmptyOptionSelected=function(){return e.hasEmptyOption&&a[0].options[a[0].selectedIndex]===e.emptyOption[0]};e.selectUnknownOrEmptyOption=function(a){null==a&&e.emptyOption?(e.removeUnknownOption(),e.selectEmptyOption()):e.unknownOption.parent().length?e.updateUnknownOption(a):e.renderUnknownOption(a)};var g=!1,h=!1;e.registerOption=function(a,b,f,g,h){if(f.$attr.ngValue){var q,r=NaN;f.$observe("value",function(a){var d,f=b.prop("selected");u(r)&&(e.removeOption(q),delete e.selectValueMap[r], d=!0);r=Pa(a);q=a;e.selectValueMap[r]=a;e.addOption(a,b);b.attr("value",r);d&&f&&c()})}else g?f.$observe("value",function(a){e.readValue();var d,f=b.prop("selected");u(q)&&(e.removeOption(q),d=!0);q=a;e.addOption(a,b);d&&f&&c()}):h?a.$watch(h,function(a,d){f.$set("value",a);var g=b.prop("selected");d!==a&&e.removeOption(d);e.addOption(a,b);d&&g&&c()}):e.addOption(f.value,b);f.$observe("disabled",function(a){if("true"===a||a&&b.prop("selected"))e.multiple?c(!0):(e.ngModelCtrl.$setViewValue(null),e.ngModelCtrl.$render())}); b.on("$destroy",function(){var a=e.readValue(),b=f.value;e.removeOption(b);d();(e.multiple&&a&&-1!==a.indexOf(b)||a===b)&&c(!0)})}}],Ke=function(){return{restrict:"E",require:["select","?ngModel"],controller:mh,priority:1,link:{pre:function(a,b,d,c){var e=c[0],f=c[1];if(f){if(e.ngModelCtrl=f,b.on("change",function(){e.removeUnknownOption();a.$apply(function(){f.$setViewValue(e.readValue())})}),d.multiple){e.multiple=!0;e.readValue=function(){var a=[];r(b.find("option"),function(b){b.selected&&!b.disabled&& (b=b.value,a.push(b in e.selectValueMap?e.selectValueMap[b]:b))});return a};e.writeValue=function(a){r(b.find("option"),function(b){var c=!!a&&(-1!==Array.prototype.indexOf.call(a,b.value)||-1!==Array.prototype.indexOf.call(a,e.selectValueMap[b.value]));c!==b.selected&&Ga(z(b),c)})};var g,h=NaN;a.$watch(function(){h!==f.$viewValue||sa(g,f.$viewValue)||(g=ka(f.$viewValue),f.$render());h=f.$viewValue});f.$isEmpty=function(a){return!a||0===a.length}}}else e.registerOption=D},post:function(a,b,d,c){var e= c[1];if(e){var f=c[0];e.$render=function(){f.writeValue(e.$viewValue)}}}}}},Le=["$interpolate",function(a){return{restrict:"E",priority:100,compile:function(b,d){var c,e;u(d.ngValue)||(u(d.value)?c=a(d.value,!0):(e=a(b.text(),!0))||d.$set("value",b.text()));return function(a,b,d){var k=b.parent();(k=k.data("$selectController")||k.parent().data("$selectController"))&&k.registerOption(a,b,d,c,e)}}}}],ad=function(){return{restrict:"A",require:"?ngModel",link:function(a,b,d,c){c&&(d.required=!0,c.$validators.required= function(a,b){return!d.required||!c.$isEmpty(b)},d.$observe("required",function(){c.$validate()}))}}},$c=function(){return{restrict:"A",require:"?ngModel",link:function(a,b,d,c){if(c){var e,f=d.ngPattern||d.pattern;d.$observe("pattern",function(a){E(a)&&0<a.length&&(a=new RegExp("^"+a+"$"));if(a&&!a.test)throw K("ngPattern")("noregexp",f,a,za(b));e=a||void 0;c.$validate()});c.$validators.pattern=function(a,b){return c.$isEmpty(b)||x(e)||e.test(b)}}}}},cd=function(){return{restrict:"A",require:"?ngModel", link:function(a,b,d,c){if(c){var e=-1;d.$observe("maxlength",function(a){a=Z(a);e=U(a)?-1:a;c.$validate()});c.$validators.maxlength=function(a,b){return 0>e||c.$isEmpty(b)||b.length<=e}}}}},bd=function(){return{restrict:"A",require:"?ngModel",link:function(a,b,d,c){if(c){var e=0;d.$observe("minlength",function(a){e=Z(a)||0;c.$validate()});c.$validators.minlength=function(a,b){return c.$isEmpty(b)||b.length>=e}}}}};w.angular.bootstrap?w.console&&console.log("WARNING: Tried to load AngularJS more than once."): (Be(),Ee($),$.module("ngLocale",[],["$provide",function(a){function b(a){a+="";var b=a.indexOf(".");return-1==b?0:a.length-b-1}a.value("$locale",{DATETIME_FORMATS:{AMPMS:["AM","PM"],DAY:"Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),ERANAMES:["Before Christ","Anno Domini"],ERAS:["BC","AD"],FIRSTDAYOFWEEK:6,MONTH:"January February March April May June July August September October November December".split(" "),SHORTDAY:"Sun Mon Tue Wed Thu Fri Sat".split(" "),SHORTMONTH:"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "), STANDALONEMONTH:"January February March April May June July August September October November December".split(" "),WEEKENDRANGE:[5,6],fullDate:"EEEE, MMMM d, y",longDate:"MMMM d, y",medium:"MMM d, y h:mm:ss a",mediumDate:"MMM d, y",mediumTime:"h:mm:ss a","short":"M/d/yy h:mm a",shortDate:"M/d/yy",shortTime:"h:mm a"},NUMBER_FORMATS:{CURRENCY_SYM:"$",DECIMAL_SEP:".",GROUP_SEP:",",PATTERNS:[{gSize:3,lgSize:3,maxFrac:3,minFrac:0,minInt:1,negPre:"-",negSuf:"",posPre:"",posSuf:""},{gSize:3,lgSize:3,maxFrac:2, minFrac:2,minInt:1,negPre:"-\u00a4",negSuf:"",posPre:"\u00a4",posSuf:""}]},id:"en-us",localeID:"en_US",pluralCat:function(a,c){var e=a|0,f=c;void 0===f&&(f=Math.min(b(a),3));Math.pow(10,f);return 1==e&&0==f?"one":"other"}})}]),z(function(){we(w.document,Uc)}))})(window);!window.angular.$$csp().noInlineStyle&&window.angular.element(document.head).prepend('<style type="text/css">@charset "UTF-8";[ng\\:cloak],[ng-cloak],[data-ng-cloak],[x-ng-cloak],.ng-cloak,.x-ng-cloak,.ng-hide:not(.ng-hide-animate){display:none !important;}ng\\:form{display:block;}.ng-animate-shim{visibility:hidden;}.ng-anchor{position:absolute;}</style>'); //# sourceMappingURL=angular.min.js.map
PypiClean
/garbas.forum-0.1b4.tar.gz/garbas.forum-0.1b4/garbas/forum/browser/topic.py
from datetime import datetime from zope.i18n import translate from zope.component import getUtility from zope.component import getMultiAdapter from plone.memoize.instance import memoize from collective.captcha.browser.captcha import Captcha from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from Products.CMFCore.utils import getToolByName from Products.CMFPlone import Batch from Products.CMFCore.interfaces import ISiteRoot from email.MIMEMultipart import MIMEMultipart from email.MIMEBase import MIMEBase from email.MIMEText import MIMEText from email.Utils import COMMASPACE, formatdate from email import Encoders from garbas.forum.browser import BrowserContentView from garbas.forum.interfaces import IForumPost from garbas.forum.interfaces import IForumNotification from garbas.forum import ForumMessageFactory as _ POST_SUCCESS = _(u'post_success', default='Post was successfuly added.') POST_TITLE_ERROR = _(u'post_title_error', default='Subject is required. Please correct.') POST_TEXT_ERROR = _(u'post_text_error', default='Text is required. Please correct.') CAPTCHA_ERROR = _(u'captcha_error', default=u'Captcha is required. Please correct.') CAPTCHA_ERROR2 = _(u'captcha_error2', default=u'Captcha is incorrect.') class ForumTopicView(BrowserContentView): """ topic view """ errors = dict() render = ViewPageTemplateFile('templates/topic.pt') def __init__(self, context, request, view=None): BrowserContentView.__init__(self, context, request, view) self.request.set('disable_border', 1) def render_template(self): return 'garbas.forum.topic_view' def is_manager(self): membership = getToolByName(self.context, 'portal_membership') member = membership.getAuthenticatedMember() return 'Manager' in member.getRolesInContext(self.context) def has_permissions_to_addposts(self): membership = getToolByName(self.context, 'portal_membership') return membership.checkPermission("Forum: Add ForumPost", self.context) def has_posts(self): return len(self.posts()) > 0 @memoize def posts(self): catalog = getToolByName(self.context, 'portal_catalog') membership = getToolByName(self.context, 'portal_membership') b_size = getattr(self.request, 'b_size', 20) b_start = getattr(self.request, 'b_start', 0) result = [ dict( title = 'RE: ' + self.context.title, UID = item.UID, author = item.Creator and item.Creator or 'Anon', author_url = self.site_url+'/author/'+str(item.Creator), portrait_url = membership.getPersonalPortrait(item.Creator).absolute_url(), created = item.created, text = item.getObject().text ) for item in catalog( object_provides = IForumPost.__identifier__, path = dict(query='/'.join(self.context.getPhysicalPath()), depth=1), sort_on = 'modified',)] return Batch([dict( title = self.context.title, UID = self.context.UID(), author = self.context.Creator(), author_url = self.site_url+'/author/'+str(self.context.Creator()), portrait_url = membership.getPersonalPortrait(self.context.Creator()).absolute_url(), created = self.context.CreationDate(), text = self.context.text )] + result, b_size, b_start, orphan=1) class ForumAddPost(BrowserContentView): """ add post form """ errors = dict() render = ViewPageTemplateFile('templates/add_post.pt') def __init__(self, context, request, view=None): BrowserContentView.__init__(self, context, request, view) self.request.set('disable_border', 1) def render_template(self): return 'garbas.forum.add_post' def update(self): """ all action happens here """ if getattr(self.request, 'unsubscribe', None): member = '' self.unsubscribe(member) elif getattr(self.request, 'form.button.submit', None): # VALIDATE FIELDS self.errors = {} for method in dir(self): if method[:9] == 'validate_': method = getattr(self, method, False) if method: errors = method() if errors: self.errors.update(errors) if not self.errors: transforms = getToolByName(self.context, 'portal_transforms') text = getattr(self.request, 'post_text', None) text = transforms('text_to_html', text) now = datetime.today() post_id = self.context.invokeFactory('ForumPost', id=now.strftime('forumpost_%Y-%m-%d.')+str(now.microsecond), text=text) post = getattr(self.context, post_id) post._renameAfterCreation() portal = getUtility(ISiteRoot) language = getToolByName(self.context, 'portal_languages') membership = getToolByName(self.context, 'portal_membership') localization_code = language.getLanguageBindings()[0] mail_encoding = portal.getProperty('email_charset') portal_from = str(portal.getProperty('email_from_address')) + \ '<' + str(portal.getProperty('email_from_name ')) + '>' member = str(membership.getAuthenticatedMember()) if member: IForumNotification(self.context).subscribe(member) mail = MIMEMultipart() mail.set_charset(mail_encoding) mail['From'] = portal_from mail['Date'] = formatdate(localtime=True) mail['Subject'] = translate('notify', domain = 'garbas.forum', target_language = localization_code, context = self.context).encode(mail_encoding) mail.attach( MIMEText( translate( 'mail_add_new_member_text', domain = 'garbas.forum', mapping = dict(), target_language = localization_code, context = self.context).encode(mail_encoding))) host = self.context.MailHost emails = IForumNotification(self.context).notification_emails() for email in emails: mail['To'] = email host.send(mail.as_string().decode(mail_encoding).encode(mail_encoding)) # FIXME :: portal_status_message si not working self.request.response.redirect( self.site_url + '/' + \ post.absolute_url() + '/' + \ '?portal_status_messages=' + POST_SUCCESS) def validate_text(self): if 'post_text' not in self.request or \ not self.request['post_text']: self.errors['post_text'] = POST_TEXT_ERROR def validate_captcha(self): membership = getToolByName(self.context, 'portal_membership') if not membership.isAnonymousUser(): return if 'captcha' not in self.request or \ not self.request['captcha']: self.errors['captcha'] = CAPTCHA_ERROR elif not Captcha(self.context, self.request).verify(self.request['captcha']): self.errors['captcha'] = CAPTCHA_ERROR2 def is_anonymous(self): membership = getToolByName(self.context, 'portal_membership') return membership.isAnonymousUser() def captcha(self): return Captcha(self.context, self.request) def unsubscribe(self, member): IForumNotification(self.context).unsubscribe(member)
PypiClean
/django-geoexplorer-worldmap-4.0.72.tar.gz/django-geoexplorer-worldmap-4.0.72/geoexplorer-worldmap/static/worldmap_client/externals/gxp/src/script/plugins/Legend.js
* @requires plugins/Tool.js * @requires GeoExt/widgets/LegendPanel.js * @requires GeoExt/widgets/WMSLegend.js */ /** api: (define) * module = gxp.plugins * class = Legend */ /** api: (extends) * plugins/Tool.js */ Ext.namespace("gxp.plugins"); /** api: constructor * .. class:: Legend(config) * * Provides an action to display a legend in a new window. */ gxp.plugins.Legend = Ext.extend(gxp.plugins.Tool, { /** api: ptype = gxp_legend */ ptype: "gxp_legend", /** api: config[menuText] * ``String`` * Text for legend menu item (i18n). */ menuText: "Legend", /** api: config[tooltip] * ``String`` * Text for legend action tooltip (i18n). */ tooltip: "Show Legend", /** api: config[actionTarget] * ``Object`` or ``String`` or ``Array`` Where to place the tool's actions * (e.g. buttons or menus)? Use null as the default since our tool has both * output and action(s). */ actionTarget: null, /** private: method[constructor] */ constructor: function(config) { gxp.plugins.Legend.superclass.constructor.apply(this, arguments); if (!this.outputConfig) { this.outputConfig = { width: 300, height: 400 }; } Ext.applyIf(this.outputConfig, {title: this.menuText}); }, /** api: method[addActions] */ addActions: function() { var actions = [{ menuText: this.menuText, iconCls: "gxp-icon-legend", tooltip: this.tooltip, handler: function() { this.removeOutput(); this.addOutput(); }, scope: this }]; return gxp.plugins.Legend.superclass.addActions.apply(this, [actions]); }, /** api: method[getLegendPanel] * :returns: ``GeoExt.LegendPanel`` * * Get the legend panel associated with this legend plugin. */ getLegendPanel: function() { return this.output[0]; }, /** private: method[addOutput] * :arg config: ``Object`` */ addOutput: function(config) { return gxp.plugins.Legend.superclass.addOutput.call(this, Ext.apply({ xtype: 'gx_legendpanel', ascending: false, border: false, hideMode: "offsets", layerStore: this.target.mapPanel.layers, defaults: {cls: 'gxp-legend-item'} }, config)); } }); Ext.preg(gxp.plugins.Legend.prototype.ptype, gxp.plugins.Legend);
PypiClean
/azure-ai-language-conversations-1.1.0.zip/azure-ai-language-conversations-1.1.0/azure/ai/language/conversations/authoring/aio/_client.py
from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING from azure.core import AsyncPipelineClient from azure.core.rest import AsyncHttpResponse, HttpRequest from .._serialization import Deserializer, Serializer from ._configuration import ConversationAuthoringClientConfiguration from ._operations import ConversationAuthoringClientOperationsMixin if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential class ConversationAuthoringClient( ConversationAuthoringClientOperationsMixin ): # pylint: disable=client-accepts-api-version-keyword """The language service conversations API is a suite of natural language processing (NLP) skills that can be used to analyze structured conversations (textual or spoken). Further documentation can be found in https://docs.microsoft.com/azure/cognitive-services/language-service/overview. :param endpoint: Supported Cognitive Services endpoint (e.g., https://:code:`<resource-name>`.cognitiveservices.azure.com). Required. :type endpoint: str :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :keyword api_version: Api Version. Default value is "2023-04-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: _endpoint = "{Endpoint}/language" self._config = ConversationAuthoringClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, config=self._config, **kwargs) self._serialize = Serializer() self._deserialize = Deserializer() self._serialize.client_side_validation = False def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") <HttpRequest [GET], url: 'https://www.example.org/'> >>> response = await client.send_request(request) <AsyncHttpResponse: 200 OK> For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.AsyncHttpResponse """ request_copy = deepcopy(request) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() async def __aenter__(self) -> "ConversationAuthoringClient": await self._client.__aenter__() return self async def __aexit__(self, *exc_details: Any) -> None: await self._client.__aexit__(*exc_details)
PypiClean
/python-rainwave-client-0.10.0.tar.gz/python-rainwave-client-0.10.0/src/rainwaveclient/listener.py
class RainwaveListener(dict): """A :class:`RainwaveListener` object represents a radio listener.""" #: The :class:`RainwaveChannel` the listener belongs to. channel = None def __init__(self, channel, raw_info): self.channel = channel super(RainwaveListener, self).__init__(raw_info) def __repr__(self): return f'<RainwaveListener [{self}]>' def __str__(self): return self.name @property def avatar(self): """The URL of the listener's avatar.""" return self['avatar'] @property def color(self): """See :attr:`colour`.""" return self.colour @property def colour(self): """A hexadecimal string representing the listener's colour on the forums.""" return self['colour'] @property def id(self): """The ID of the listener.""" if 'id' in self: return self['id'] return self['user_id'] @property def losing_requests(self): """The number of requests made by the listener that lost their election.""" return self['losing_requests'] @property def losing_votes(self): """The number of votes the listeners has given to a song that lost an election.""" return self['losing_votes'] @property def mind_changes(self): """The total number of times the listener changed a song rating.""" return self['mind_changes'] @property def name(self): """The name of the listener.""" return self['name'] @property def rank(self): """A string representing the listener's title on the forums.""" return self['rank'] @property def total_ratings(self): """The total number of songs the listener has rated.""" return self['total_ratings'] @property def total_requests(self): """The total number of requests the listener has made.""" return self['total_requests'] @property def total_votes(self): """The number of votes the listener has cast in the last two weeks.""" return self['total_votes'] @property def user_id(self): """See :attr:`id`.""" return self.id @property def winning_requests(self): """The number of requests made by the listener that won their election.""" return self['winning_requests'] @property def winning_votes(self): """The number of votes the listener has given to a song that won an election.""" return self['winning_votes']
PypiClean
/auto_augment-1.0.0-cp38-cp38-manylinux1_x86_64.whl/auto_augment/third_party/PaddleClas/ppcls/modeling/architectures/resnet_vd.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import math import paddle import paddle.fluid as fluid from paddle.fluid.param_attr import ParamAttr __all__ = [ "ResNet", "ResNet18_vd", "ResNet34_vd", "ResNet50_vd", "ResNet101_vd", "ResNet152_vd", "ResNet200_vd" ] class ResNet(): def __init__(self, layers=50, is_3x3=False, postfix_name="", lr_mult_list=[1.0, 1.0, 1.0, 1.0, 1.0]): self.layers = layers self.is_3x3 = is_3x3 self.postfix_name = "" if postfix_name is None else postfix_name self.lr_mult_list = lr_mult_list assert len( self.lr_mult_list ) == 5, "lr_mult_list length in ResNet must be 5 but got {}!!".format( len(self.lr_mult_list)) self.curr_stage = 0 def net(self, input, class_dim=1000): is_3x3 = self.is_3x3 layers = self.layers supported_layers = [18, 34, 50, 101, 152, 200] assert layers in supported_layers, \ "supported layers are {} but input layer is {}".format(supported_layers, layers) if layers == 18: depth = [2, 2, 2, 2] elif layers == 34 or layers == 50: depth = [3, 4, 6, 3] elif layers == 101: depth = [3, 4, 23, 3] elif layers == 152: depth = [3, 8, 36, 3] elif layers == 200: depth = [3, 12, 48, 3] num_filters = [64, 128, 256, 512] if is_3x3 == False: conv = self.conv_bn_layer( input=input, num_filters=64, filter_size=7, stride=2, act='relu') else: conv = self.conv_bn_layer( input=input, num_filters=32, filter_size=3, stride=2, act='relu', name='conv1_1') conv = self.conv_bn_layer( input=conv, num_filters=32, filter_size=3, stride=1, act='relu', name='conv1_2') conv = self.conv_bn_layer( input=conv, num_filters=64, filter_size=3, stride=1, act='relu', name='conv1_3') conv = fluid.layers.pool2d( input=conv, pool_size=3, pool_stride=2, pool_padding=1, pool_type='max') if layers >= 50: for block in range(len(depth)): self.curr_stage += 1 for i in range(depth[block]): if layers in [101, 152, 200] and block == 2: if i == 0: conv_name = "res" + str(block + 2) + "a" else: conv_name = "res" + str(block + 2) + "b" + str(i) else: conv_name = "res" + str(block + 2) + chr(97 + i) conv = self.bottleneck_block( input=conv, num_filters=num_filters[block], stride=2 if i == 0 and block != 0 else 1, if_first=block == i == 0, name=conv_name) else: for block in range(len(depth)): self.curr_stage += 1 for i in range(depth[block]): conv_name = "res" + str(block + 2) + chr(97 + i) conv = self.basic_block( input=conv, num_filters=num_filters[block], stride=2 if i == 0 and block != 0 else 1, if_first=block == i == 0, name=conv_name) pool = fluid.layers.pool2d( input=conv, pool_type='avg', global_pooling=True) stdv = 1.0 / math.sqrt(pool.shape[1] * 1.0) out = fluid.layers.fc( input=pool, size=class_dim, param_attr=fluid.param_attr.ParamAttr( name="fc_0.w_0" + self.postfix_name, initializer=fluid.initializer.Uniform(-stdv, stdv)), bias_attr=ParamAttr(name="fc_0.b_0" + self.postfix_name)) return out def conv_bn_layer(self, input, num_filters, filter_size, stride=1, groups=1, act=None, name=None): lr_mult = self.lr_mult_list[self.curr_stage] conv = fluid.layers.conv2d( input=input, num_filters=num_filters, filter_size=filter_size, stride=stride, padding=(filter_size - 1) // 2, groups=groups, act=None, param_attr=ParamAttr(name=name + "_weights" + self.postfix_name), bias_attr=False) if name == "conv1": bn_name = "bn_" + name else: bn_name = "bn" + name[3:] return fluid.layers.batch_norm( input=conv, act=act, param_attr=ParamAttr(name=bn_name + '_scale' + self.postfix_name), bias_attr=ParamAttr(bn_name + '_offset' + self.postfix_name), moving_mean_name=bn_name + '_mean' + self.postfix_name, moving_variance_name=bn_name + '_variance' + self.postfix_name) def conv_bn_layer_new(self, input, num_filters, filter_size, stride=1, groups=1, act=None, name=None): lr_mult = self.lr_mult_list[self.curr_stage] pool = fluid.layers.pool2d( input=input, pool_size=2, pool_stride=2, pool_padding=0, pool_type='avg', ceil_mode=True) conv = fluid.layers.conv2d( input=pool, num_filters=num_filters, filter_size=filter_size, stride=1, padding=(filter_size - 1) // 2, groups=groups, act=None, param_attr=ParamAttr( name=name + "_weights" + self.postfix_name, learning_rate=lr_mult), bias_attr=False) if name == "conv1": bn_name = "bn_" + name else: bn_name = "bn" + name[3:] return fluid.layers.batch_norm( input=conv, act=act, param_attr=ParamAttr( name=bn_name + '_scale' + self.postfix_name, learning_rate=lr_mult), bias_attr=ParamAttr( bn_name + '_offset' + self.postfix_name, learning_rate=lr_mult), moving_mean_name=bn_name + '_mean' + self.postfix_name, moving_variance_name=bn_name + '_variance' + self.postfix_name) def shortcut(self, input, ch_out, stride, name, if_first=False): ch_in = input.shape[1] if ch_in != ch_out or stride != 1: if if_first: return self.conv_bn_layer(input, ch_out, 1, stride, name=name) else: return self.conv_bn_layer_new( input, ch_out, 1, stride, name=name) elif if_first: return self.conv_bn_layer(input, ch_out, 1, stride, name=name) else: return input def bottleneck_block(self, input, num_filters, stride, name, if_first): conv0 = self.conv_bn_layer( input=input, num_filters=num_filters, filter_size=1, act='relu', name=name + "_branch2a") conv1 = self.conv_bn_layer( input=conv0, num_filters=num_filters, filter_size=3, stride=stride, act='relu', name=name + "_branch2b") conv2 = self.conv_bn_layer( input=conv1, num_filters=num_filters * 4, filter_size=1, act=None, name=name + "_branch2c") short = self.shortcut( input, num_filters * 4, stride, if_first=if_first, name=name + "_branch1") return fluid.layers.elementwise_add(x=short, y=conv2, act='relu') def basic_block(self, input, num_filters, stride, name, if_first): conv0 = self.conv_bn_layer( input=input, num_filters=num_filters, filter_size=3, act='relu', stride=stride, name=name + "_branch2a") conv1 = self.conv_bn_layer( input=conv0, num_filters=num_filters, filter_size=3, act=None, name=name + "_branch2b") short = self.shortcut( input, num_filters, stride, if_first=if_first, name=name + "_branch1") return fluid.layers.elementwise_add(x=short, y=conv1, act='relu') def ResNet18_vd(): model = ResNet(layers=18, is_3x3=True) return model def ResNet34_vd(): model = ResNet(layers=34, is_3x3=True) return model def ResNet50_vd(**args): model = ResNet(layers=50, is_3x3=True, **args) return model def ResNet101_vd(): model = ResNet(layers=101, is_3x3=True) return model def ResNet152_vd(): model = ResNet(layers=152, is_3x3=True) return model def ResNet200_vd(): model = ResNet(layers=200, is_3x3=True) return model
PypiClean
/unifix-0.1.1-py3-none-any.whl/UniFix/Uni/option.py
import re import collections import logging tokenstate = collections.namedtuple('tokenstate', 'startpos endpos token') logger = logging.getLogger(__name__) class paragraph(object): '''a paragraph inside a man page is text that ends with two new lines''' def __init__(self, idx, text, section, is_option): self.idx = idx self.text = text self.section = section self.is_option = is_option def cleantext(self): t = re.sub(r'<[^>]+>', '', self.text) t = re.sub('&lt;', '<', t) t = re.sub('&gt;', '>', t) return t @staticmethod def from_store(d): p = paragraph(d.get('idx', 0), d['text'].encode( 'utf8'), d['section'], d['is_option']) return p def to_store(self): return {'idx': self.idx, 'text': self.text, 'section': self.section, 'is_option': self.is_option} def __repr__(self): t = self.cleantext() t = t[:min(20, t.find('\n'))].lstrip() return '<paragraph %d, %s: %r>' % (self.idx, self.section, t) def __eq__(self, other): if not other: return False return self.__dict__ == other.__dict__ class option(paragraph): '''a paragraph that contains extracted options short - a list of short options (-a, -b, ..) long - a list of long options (--a, --b) expectsarg - specifies if one of the short/long options expects an additional argument argument - specifies if to consider this as positional arguments nestedcommand - specifies if the arguments to this option can start a nested command ''' def __init__(self, p, short, long, expectsargname, argument=None, nestedcommand=False): paragraph.__init__(self, p.idx, p.text, p.section, p.is_option) self.short = short self.long = long self._opts = self.short + self.long self.argument = argument self.expectsargname = expectsargname self.nestedcommand = nestedcommand if nestedcommand: assert expectsargname, 'an option that can nest commands must expect an argument' @property def opts(self): return self._opts ''' @classmethod def from_store(cls, d): p = paragraph.from_store(d) return cls(p, d['short'], d['long'], d['expectsarg'], d['argument'], d.get('nestedcommand')) def to_store(self): d = paragraph.to_store(self) assert d['is_option'] d['short'] = self.short d['long'] = self.long d['expectsarg'] = self.expectsarg d['argument'] = self.argument d['nestedcommand'] = self.nestedcommand return d ''' def __str__(self): return '(%s)' % ', '.join([str(x.flag) for x in self.opts]) def __repr__(self): return '<options for paragraph %d: %s>' % (self.idx, str(self)) def extract(manpage): '''extract options from all paragraphs that have been classified as containing options''' for i, p in enumerate(manpage.paragraphs): # print("%d"%i) # print("%r"%p) if p.is_option: # print("%r"%p) s, l, endpos= extract_option(p.cleantext()) if s or l: expectsarg = any(x.expectsarg for x in s + l) expectargname = None if expectsarg: for x in s + l: #有的会出现 -d, --delimiter=DELIM的 if x.expectsarg: expectargname = x.expectsarg ''' for x in s + l: if not x.expectsarg: x.expectsarg = expectargname ?不能赋值 ''' s = [x.flag for x in s] l = [x.flag for x in l] p.text = ' '.join(p.text[endpos:].split('.')[0].split('\n')) + ". " manpage.paragraphs[i] = option(p, s, l, expectargname) else: logger.info( "no options could be extracted from paragraph %r", p) opt_regex = re.compile(r''' (?P<opt>--?(?:\?|\#|(?:\w+-)*\w+)) # option starts with - or -- and can have - in the middle but not at the end, also allow '-?' (?: (?:[^\S\r\n]?(=)?[^\S\r\n]?) # -a= (?P<argoptional>[<\[])? # -a=< or -a=[ (?:[^\S\r\n]?(=)?[^\S\r\n]?) # or maybe -a<= (?P<arg> (?(argoptional) # if we think we have an arg (we saw [ or <) [^\]>]+ # either read everything until the closing ] or > | (?(2) [-a-zA-Z]+ # or if we didn't see [ or < but just saw =, read all letters, e.g. -a=abc | [-a-z]+ # but if we didn't have =, only allow uppercase letters, e.g. -a FOO????? ) ) ) (?(argoptional)(?P<argoptionalc>[\]>])) # read closing ] or > if we have an arg )? # the whole arg thing is optional (?P<ending>,\s*|\s+|\Z|/|\|)''', re.X) # read any trailing whitespace or the end of the string opt2_regex = re.compile(r''' (?P<opt>\w+) # an option that doesn't start with any of the usual characters, e.g. options from 'dd' like bs=BYTES (?: (?:\s*=\s*) # an optional arg, e.g. bs=BYTES (?P<arg>\w+) ) (?:,\s*|\s+|\Z)''', re.X) # end with , or whitespace or the end of the string def _flag(s, pos=0): ''' >>> _flag('a=b').groupdict() {'opt': 'a', 'arg': 'b'} >>> bool(_flag('---c-d')) False >>> bool(_flag('foobar')) False ''' m = opt2_regex.match(s, pos) return m def _option(s, pos=0): ''' >>> bool(_option('-')) False >>> bool(_option('--')) False >>> bool(_option('---')) False >>> bool(_option('-a-')) False >>> bool(_option('--a-')) False >>> bool(_option('--a-b-')) False >>> sorted(_option('-a').groupdict().iteritems()) [('arg', None), ('argoptional', None), ('argoptionalc', None), ('ending', ''), ('opt', '-a')] >>> sorted(_option('--a').groupdict().iteritems()) [('arg', None), ('argoptional', None), ('argoptionalc', None), ('ending', ''), ('opt', '--a')] >>> sorted(_option('-a<b>').groupdict().iteritems()) [('arg', 'b'), ('argoptional', '<'), ('argoptionalc', '>'), ('ending', ''), ('opt', '-a')] >>> sorted(_option('-a=[foo]').groupdict().iteritems()) [('arg', 'foo'), ('argoptional', '['), ('argoptionalc', ']'), ('ending', ''), ('opt', '-a')] >>> sorted(_option('-a=<foo>').groupdict().iteritems()) [('arg', 'foo'), ('argoptional', '<'), ('argoptionalc', '>'), ('ending', ''), ('opt', '-a')] >>> sorted(_option('-a=<foo bar>').groupdict().iteritems()) [('arg', 'foo bar'), ('argoptional', '<'), ('argoptionalc', '>'), ('ending', ''), ('opt', '-a')] >>> sorted(_option('-a=foo').groupdict().iteritems()) [('arg', 'foo'), ('argoptional', None), ('argoptionalc', None), ('ending', ''), ('opt', '-a')] >>> bool(_option('-a=[foo>')) False >>> bool(_option('-a=[foo bar')) False >>> _option('-a foo').end(0) 3 ''' m = opt_regex.match(s, pos) if m: if m.group('argoptional'): c = m.group('argoptional') cc = m.group('argoptionalc') if (c == '[' and cc == ']') or (c == '<' and cc == '>'): return m else: return return m _eatbetweenregex = re.compile(r'\s*(?:or|,|\|)\s*') def _eatbetween(s, pos): ''' >>> _eatbetween('foo', 0) 0 >>> _eatbetween('a, b', 1) 3 >>> _eatbetween('a|b', 1) 2 >>> _eatbetween('a or b', 1) 5 ''' m = _eatbetweenregex.match(s, pos) if m: return m.end(0) return pos class extractedoption(collections.namedtuple('extractedoption', 'flag expectsarg')): def __eq__(self, other): if isinstance(other, str): return self.flag == other else: return super(extractedoption, self).__eq__(other) def __str__(self): return self.flag def extract_option(txt): '''this is where the magic is (suppose) to happen. try and find options using a regex''' prepos = endpos = startpos = currpos = len(txt) - len(txt.lstrip()) short, long = [], [] m = _option(txt, currpos) # keep going as long as options are found while m: s = m.group('opt') po = extractedoption(s, m.group('arg')) # print("here%r",po.expectsarg) if s.startswith('--'): long.append(po) else: short.append(po) currpos = m.end(0) # print(m) # print(s) endpos = prepos + len(s) currpos = _eatbetween(txt, currpos) if m.group('ending') == '|': m = _option(txt, currpos) if not m: startpos = currpos while currpos < len(txt) and not txt[currpos].isspace(): if txt[currpos] == '|': short.append(extractedoption( txt[startpos:currpos], None)) startpos = currpos currpos += 1 leftover = txt[startpos:currpos] if leftover: short.append(extractedoption(leftover, None)) endpos = currpos else: m = _option(txt, currpos) prepos = currpos if currpos == startpos: m = _flag(txt, currpos) while m: s = m.group('opt') # print(s) po = extractedoption(s, m.group('arg')) long.append(po) endpos = prepos + len(s) currpos = _eatbetween(txt, currpos) m = _flag(txt, currpos) prepos = currpos return short, long, currpos
PypiClean
/mess_server_dec-0.0.2.tar.gz/mess_server_dec-0.0.2/server/server/main_window.py
from PyQt5.QtWidgets import QMainWindow, QAction, qApp, QApplication, QLabel, QTableView from PyQt5.QtGui import QStandardItemModel, QStandardItem from PyQt5.QtCore import QTimer from server.stat_window import StatWindow from server.config_window import ConfigWindow from server.add_user import RegisterUser from server.remove_user import DelUserDialog class MainWindow(QMainWindow): '''Класс - основное окно сервера.''' def __init__(self, database, server, config): # Конструктор предка super().__init__() # База данных сервера self.database = database self.server_thread = server self.config = config # Ярлык выхода self.exitAction = QAction('Выход', self) self.exitAction.setShortcut('Ctrl+Q') self.exitAction.triggered.connect(qApp.quit) # Кнопка обновить список клиентов self.refresh_button = QAction('Обновить список', self) # Кнопка настроек сервера self.config_btn = QAction('Настройки сервера', self) # Кнопка регистрации пользователя self.register_btn = QAction('Регистрация пользователя', self) # Кнопка удаления пользователя self.remove_btn = QAction('Удаление пользователя', self) # Кнопка вывести историю сообщений self.show_history_button = QAction('История клиентов', self) # Статусбар self.statusBar() self.statusBar().showMessage('Server Working') # Тулбар self.toolbar = self.addToolBar('MainBar') self.toolbar.addAction(self.exitAction) self.toolbar.addAction(self.refresh_button) self.toolbar.addAction(self.show_history_button) self.toolbar.addAction(self.config_btn) self.toolbar.addAction(self.register_btn) self.toolbar.addAction(self.remove_btn) # Настройки геометрии основного окна # Поскольку работать с динамическими размерами мы не умеем, и мало # времени на изучение, размер окна фиксирован. self.setFixedSize(800, 600) self.setWindowTitle('Messaging Server alpha release') # Надпись о том, что ниже список подключённых клиентов self.label = QLabel('Список подключённых клиентов:', self) self.label.setFixedSize(240, 15) self.label.move(10, 25) # Окно со списком подключённых клиентов. self.active_clients_table = QTableView(self) self.active_clients_table.move(10, 45) self.active_clients_table.setFixedSize(780, 400) # Таймер, обновляющий список клиентов 1 раз в секунду self.timer = QTimer() self.timer.timeout.connect(self.create_users_model) self.timer.start(1000) # Связываем кнопки с процедурами self.refresh_button.triggered.connect(self.create_users_model) self.show_history_button.triggered.connect(self.show_statistics) self.config_btn.triggered.connect(self.server_config) self.register_btn.triggered.connect(self.reg_user) self.remove_btn.triggered.connect(self.rem_user) # Последним параметром отображаем окно. self.show() def create_users_model(self): '''Метод заполняющий таблицу активных пользователей.''' list_users = self.database.active_users_list() list = QStandardItemModel() list.setHorizontalHeaderLabels( ['Имя Клиента', 'IP Адрес', 'Порт', 'Время подключения']) for row in list_users: user, ip, port, time = row user = QStandardItem(user) user.setEditable(False) ip = QStandardItem(ip) ip.setEditable(False) port = QStandardItem(str(port)) port.setEditable(False) # Уберём милисекунды из строки времени, т.к. такая точность не # требуется. time = QStandardItem(str(time.replace(microsecond=0))) time.setEditable(False) list.appendRow([user, ip, port, time]) self.active_clients_table.setModel(list) self.active_clients_table.resizeColumnsToContents() self.active_clients_table.resizeRowsToContents() def show_statistics(self): '''Метод создающий окно со статистикой клиентов.''' global stat_window stat_window = StatWindow(self.database) stat_window.show() def server_config(self): '''Метод создающий окно с настройками сервера.''' global config_window # Создаём окно и заносим в него текущие параметры config_window = ConfigWindow(self.config) def reg_user(self): '''Метод создающий окно регистрации пользователя.''' global reg_window reg_window = RegisterUser(self.database, self.server_thread) reg_window.show() def rem_user(self): '''Метод создающий окно удаления пользователя.''' global rem_window rem_window = DelUserDialog(self.database, self.server_thread) rem_window.show()
PypiClean
/novel_dl-1.6.2-py3-none-any.whl/novel_dl/themes/narou/static/jquery.min.js
!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=a.document,e=c.slice,f=c.concat,g=c.push,h=c.indexOf,i={},j=i.toString,k=i.hasOwnProperty,l={},m="1.12.4",n=function(a,b){return new n.fn.init(a,b)},o=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,p=/^-ms-/,q=/-([\da-z])/gi,r=function(a,b){return b.toUpperCase()};n.fn=n.prototype={jquery:m,constructor:n,selector:"",length:0,toArray:function(){return e.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:e.call(this)},pushStack:function(a){var b=n.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a){return n.each(this,a)},map:function(a){return this.pushStack(n.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(e.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor()},push:g,sort:c.sort,splice:c.splice},n.extend=n.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||n.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(n.isPlainObject(c)||(b=n.isArray(c)))?(b?(b=!1,f=a&&n.isArray(a)?a:[]):f=a&&n.isPlainObject(a)?a:{},g[d]=n.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},n.extend({expando:"jQuery"+(m+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===n.type(a)},isArray:Array.isArray||function(a){return"array"===n.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){var b=a&&a.toString();return!n.isArray(a)&&b-parseFloat(b)+1>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==n.type(a)||a.nodeType||n.isWindow(a))return!1;try{if(a.constructor&&!k.call(a,"constructor")&&!k.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(!l.ownFirst)for(b in a)return k.call(a,b);for(b in a);return void 0===b||k.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?i[j.call(a)]||"object":typeof a},globalEval:function(b){b&&n.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b){var c,d=0;if(s(a)){for(c=a.length;c>d;d++)if(b.call(a[d],d,a[d])===!1)break}else for(d in a)if(b.call(a[d],d,a[d])===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(o,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?n.merge(c,"string"==typeof a?[a]:a):g.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(h)return h.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,e,g=0,h=[];if(s(a))for(d=a.length;d>g;g++)e=b(a[g],g,c),null!=e&&h.push(e);else for(g in a)e=b(a[g],g,c),null!=e&&h.push(e);return f.apply([],h)},guid:1,proxy:function(a,b){var c,d,f;return"string"==typeof b&&(f=a[b],b=a,a=f),n.isFunction(a)?(c=e.call(arguments,2),d=function(){return a.apply(b||this,c.concat(e.call(arguments)))},d.guid=a.guid=a.guid||n.guid++,d):void 0},now:function(){return+new Date},support:l}),"function"==typeof Symbol&&(n.fn[Symbol.iterator]=c[Symbol.iterator]),n.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(a,b){i["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=!!a&&"length"in a&&a.length,c=n.type(a);return"function"===c||n.isWindow(a)?!1:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ga(),z=ga(),A=ga(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+M+"))|)"+L+"*\\]",O=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+N+")*)|.*)\\)|)",P=new RegExp(L+"+","g"),Q=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),R=new RegExp("^"+L+"*,"+L+"*"),S=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),T=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),U=new RegExp(O),V=new RegExp("^"+M+"$"),W={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M+"|[*])"),ATTR:new RegExp("^"+N),PSEUDO:new RegExp("^"+O),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},X=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Z=/^[^{]+\{\s*\[native \w/,$=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,_=/[+~]/,aa=/'|\\/g,ba=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),ca=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},da=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(ea){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fa(a,b,d,e){var f,h,j,k,l,o,r,s,w=b&&b.ownerDocument,x=b?b.nodeType:9;if(d=d||[],"string"!=typeof a||!a||1!==x&&9!==x&&11!==x)return d;if(!e&&((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,p)){if(11!==x&&(o=$.exec(a)))if(f=o[1]){if(9===x){if(!(j=b.getElementById(f)))return d;if(j.id===f)return d.push(j),d}else if(w&&(j=w.getElementById(f))&&t(b,j)&&j.id===f)return d.push(j),d}else{if(o[2])return H.apply(d,b.getElementsByTagName(a)),d;if((f=o[3])&&c.getElementsByClassName&&b.getElementsByClassName)return H.apply(d,b.getElementsByClassName(f)),d}if(c.qsa&&!A[a+" "]&&(!q||!q.test(a))){if(1!==x)w=b,s=a;else if("object"!==b.nodeName.toLowerCase()){(k=b.getAttribute("id"))?k=k.replace(aa,"\\$&"):b.setAttribute("id",k=u),r=g(a),h=r.length,l=V.test(k)?"#"+k:"[id='"+k+"']";while(h--)r[h]=l+" "+qa(r[h]);s=r.join(","),w=_.test(a)&&oa(b.parentNode)||b}if(s)try{return H.apply(d,w.querySelectorAll(s)),d}catch(y){}finally{k===u&&b.removeAttribute("id")}}}return i(a.replace(Q,"$1"),b,d,e)}function ga(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ha(a){return a[u]=!0,a}function ia(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ja(a,b){var c=a.split("|"),e=c.length;while(e--)d.attrHandle[c[e]]=b}function ka(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function la(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function na(a){return ha(function(b){return b=+b,ha(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function oa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=fa.support={},f=fa.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fa.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=n.documentElement,p=!f(n),(e=n.defaultView)&&e.top!==e&&(e.addEventListener?e.addEventListener("unload",da,!1):e.attachEvent&&e.attachEvent("onunload",da)),c.attributes=ia(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ia(function(a){return a.appendChild(n.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=Z.test(n.getElementsByClassName),c.getById=ia(function(a){return o.appendChild(a).id=u,!n.getElementsByName||!n.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return"undefined"!=typeof b.getElementsByClassName&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=Z.test(n.querySelectorAll))&&(ia(function(a){o.appendChild(a).innerHTML="<a id='"+u+"'></a><select id='"+u+"-\r\\' msallowcapture=''><option selected=''></option></select>",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ia(function(a){var b=n.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=Z.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ia(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",O)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=Z.test(o.compareDocumentPosition),t=b||Z.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===n||a.ownerDocument===v&&t(v,a)?-1:b===n||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,g=[a],h=[b];if(!e||!f)return a===n?-1:b===n?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return ka(a,b);c=a;while(c=c.parentNode)g.unshift(c);c=b;while(c=c.parentNode)h.unshift(c);while(g[d]===h[d])d++;return d?ka(g[d],h[d]):g[d]===v?-1:h[d]===v?1:0},n):n},fa.matches=function(a,b){return fa(a,null,null,b)},fa.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(T,"='$1']"),c.matchesSelector&&p&&!A[b+" "]&&(!r||!r.test(b))&&(!q||!q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fa(b,n,null,[a]).length>0},fa.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fa.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fa.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fa.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fa.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fa.selectors={cacheLength:50,createPseudo:ha,match:W,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ba,ca),a[3]=(a[3]||a[4]||a[5]||"").replace(ba,ca),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fa.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fa.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return W.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&U.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ba,ca).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fa.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(P," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h,t=!1;if(q){if(f){while(p){m=b;while(m=m[p])if(h?m.nodeName.toLowerCase()===r:1===m.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){m=q,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n&&j[2],m=n&&q.childNodes[n];while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if(1===m.nodeType&&++t&&m===b){k[a]=[w,n,t];break}}else if(s&&(m=b,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n),t===!1)while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if((h?m.nodeName.toLowerCase()===r:1===m.nodeType)&&++t&&(s&&(l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),k[a]=[w,t]),m===b))break;return t-=e,t===d||t%d===0&&t/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fa.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ha(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ha(function(a){var b=[],c=[],d=h(a.replace(Q,"$1"));return d[u]?ha(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ha(function(a){return function(b){return fa(a,b).length>0}}),contains:ha(function(a){return a=a.replace(ba,ca),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ha(function(a){return V.test(a||"")||fa.error("unsupported lang: "+a),a=a.replace(ba,ca).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Y.test(a.nodeName)},input:function(a){return X.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:na(function(){return[0]}),last:na(function(a,b){return[b-1]}),eq:na(function(a,b,c){return[0>c?c+b:c]}),even:na(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:na(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:na(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:na(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=la(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=ma(b);function pa(){}pa.prototype=d.filters=d.pseudos,d.setFilters=new pa,g=fa.tokenize=function(a,b){var c,e,f,g,h,i,j,k=z[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){c&&!(e=R.exec(h))||(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=S.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(Q," ")}),h=h.slice(c.length));for(g in d.filter)!(e=W[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?fa.error(a):z(a,i).slice(0)};function qa(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function ra(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j,k=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(j=b[u]||(b[u]={}),i=j[b.uniqueID]||(j[b.uniqueID]={}),(h=i[d])&&h[0]===w&&h[1]===f)return k[2]=h[2];if(i[d]=k,k[2]=a(b,c,g))return!0}}}function sa(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ta(a,b,c){for(var d=0,e=b.length;e>d;d++)fa(a,b[d],c);return c}function ua(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(c&&!c(f,d,e)||(g.push(f),j&&b.push(h)));return g}function va(a,b,c,d,e,f){return d&&!d[u]&&(d=va(d)),e&&!e[u]&&(e=va(e,f)),ha(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ta(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ua(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ua(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ua(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function wa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=ra(function(a){return a===b},h,!0),l=ra(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[ra(sa(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return va(i>1&&sa(m),i>1&&qa(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(Q,"$1"),c,e>i&&wa(a.slice(i,e)),f>e&&wa(a=a.slice(e)),f>e&&qa(a))}m.push(c)}return sa(m)}function xa(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,o,q,r=0,s="0",t=f&&[],u=[],v=j,x=f||e&&d.find.TAG("*",k),y=w+=null==v?1:Math.random()||.1,z=x.length;for(k&&(j=g===n||g||k);s!==z&&null!=(l=x[s]);s++){if(e&&l){o=0,g||l.ownerDocument===n||(m(l),h=!p);while(q=a[o++])if(q(l,g||n,h)){i.push(l);break}k&&(w=y)}c&&((l=!q&&l)&&r--,f&&t.push(l))}if(r+=s,c&&s!==r){o=0;while(q=b[o++])q(t,u,g,h);if(f){if(r>0)while(s--)t[s]||u[s]||(u[s]=F.call(i));u=ua(u)}H.apply(i,u),k&&!f&&u.length>0&&r+b.length>1&&fa.uniqueSort(i)}return k&&(w=y,j=v),t};return c?ha(f):f}return h=fa.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xa(e,d)),f.selector=a}return f},i=fa.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ba,ca),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=W.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ba,ca),_.test(j[0].type)&&oa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qa(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,!b||_.test(a)&&oa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ia(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ia(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||ja("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ia(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ja("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ia(function(a){return null==a.getAttribute("disabled")})||ja(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fa}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.uniqueSort=n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},v=function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c},w=n.expr.match.needsContext,x=/^<([\w-]+)\s*\/?>(?:<\/\1>|)$/,y=/^.[^:#\[\.,]*$/;function z(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(y.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return n.inArray(a,b)>-1!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;e>b;b++)if(n.contains(d[b],this))return!0}));for(b=0;e>b;b++)n.find(a,d[b],c);return c=this.pushStack(e>1?n.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(z(this,a||[],!1))},not:function(a){return this.pushStack(z(this,a||[],!0))},is:function(a){return!!z(this,"string"==typeof a&&w.test(a)?n(a):a||[],!1).length}});var A,B=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=n.fn.init=function(a,b,c){var e,f;if(!a)return this;if(c=c||A,"string"==typeof a){if(e="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:B.exec(a),!e||!e[1]&&b)return!b||b.jquery?(b||c).find(a):this.constructor(b).find(a);if(e[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(e[1],b&&b.nodeType?b.ownerDocument||b:d,!0)),x.test(e[1])&&n.isPlainObject(b))for(e in b)n.isFunction(this[e])?this[e](b[e]):this.attr(e,b[e]);return this}if(f=d.getElementById(e[2]),f&&f.parentNode){if(f.id!==e[2])return A.find(a);this.length=1,this[0]=f}return this.context=d,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?"undefined"!=typeof c.ready?c.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};C.prototype=n.fn,A=n(d);var D=/^(?:parents|prev(?:Until|All))/,E={children:!0,contents:!0,next:!0,prev:!0};n.fn.extend({has:function(a){var b,c=n(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(n.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=w.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.uniqueSort(f):f)},index:function(a){return a?"string"==typeof a?n.inArray(this[0],n(a)):n.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.uniqueSort(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function F(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return u(a,"parentNode")},parentsUntil:function(a,b,c){return u(a,"parentNode",c)},next:function(a){return F(a,"nextSibling")},prev:function(a){return F(a,"previousSibling")},nextAll:function(a){return u(a,"nextSibling")},prevAll:function(a){return u(a,"previousSibling")},nextUntil:function(a,b,c){return u(a,"nextSibling",c)},prevUntil:function(a,b,c){return u(a,"previousSibling",c)},siblings:function(a){return v((a.parentNode||{}).firstChild,a)},children:function(a){return v(a.firstChild)},contents:function(a){return n.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(E[a]||(e=n.uniqueSort(e)),D.test(a)&&(e=e.reverse())),this.pushStack(e)}});var G=/\S+/g;function H(a){var b={};return n.each(a.match(G)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?H(a):n.extend({},a);var b,c,d,e,f=[],g=[],h=-1,i=function(){for(e=a.once,d=b=!0;g.length;h=-1){c=g.shift();while(++h<f.length)f[h].apply(c[0],c[1])===!1&&a.stopOnFalse&&(h=f.length,c=!1)}a.memory||(c=!1),b=!1,e&&(f=c?[]:"")},j={add:function(){return f&&(c&&!b&&(h=f.length-1,g.push(c)),function d(b){n.each(b,function(b,c){n.isFunction(c)?a.unique&&j.has(c)||f.push(c):c&&c.length&&"string"!==n.type(c)&&d(c)})}(arguments),c&&!b&&i()),this},remove:function(){return n.each(arguments,function(a,b){var c;while((c=n.inArray(b,f,c))>-1)f.splice(c,1),h>=c&&h--}),this},has:function(a){return a?n.inArray(a,f)>-1:f.length>0},empty:function(){return f&&(f=[]),this},disable:function(){return e=g=[],f=c="",this},disabled:function(){return!f},lock:function(){return e=!0,c||j.disable(),this},locked:function(){return!!e},fireWith:function(a,c){return e||(c=c||[],c=[a,c.slice?c.slice():c],g.push(c),b||i()),this},fire:function(){return j.fireWith(this,arguments),this},fired:function(){return!!d}};return j},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().progress(c.notify).done(c.resolve).fail(c.reject):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=e.call(arguments),d=c.length,f=1!==d||a&&n.isFunction(a.promise)?d:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?e.call(arguments):d,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(d>1)for(i=new Array(d),j=new Array(d),k=new Array(d);d>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().progress(h(b,j,i)).done(h(b,k,c)).fail(g.reject):--f;return f||g.resolveWith(k,c),g.promise()}});var I;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(I.resolveWith(d,[n]),n.fn.triggerHandler&&(n(d).triggerHandler("ready"),n(d).off("ready"))))}});function J(){d.addEventListener?(d.removeEventListener("DOMContentLoaded",K),a.removeEventListener("load",K)):(d.detachEvent("onreadystatechange",K),a.detachEvent("onload",K))}function K(){(d.addEventListener||"load"===a.event.type||"complete"===d.readyState)&&(J(),n.ready())}n.ready.promise=function(b){if(!I)if(I=n.Deferred(),"complete"===d.readyState||"loading"!==d.readyState&&!d.documentElement.doScroll)a.setTimeout(n.ready);else if(d.addEventListener)d.addEventListener("DOMContentLoaded",K),a.addEventListener("load",K);else{d.attachEvent("onreadystatechange",K),a.attachEvent("onload",K);var c=!1;try{c=null==a.frameElement&&d.documentElement}catch(e){}c&&c.doScroll&&!function f(){if(!n.isReady){try{c.doScroll("left")}catch(b){return a.setTimeout(f,50)}J(),n.ready()}}()}return I.promise(b)},n.ready.promise();var L;for(L in n(l))break;l.ownFirst="0"===L,l.inlineBlockNeedsLayout=!1,n(function(){var a,b,c,e;c=d.getElementsByTagName("body")[0],c&&c.style&&(b=d.createElement("div"),e=d.createElement("div"),e.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(e).appendChild(b),"undefined"!=typeof b.style.zoom&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",l.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(e))}),function(){var a=d.createElement("div");l.deleteExpando=!0;try{delete a.test}catch(b){l.deleteExpando=!1}a=null}();var M=function(a){var b=n.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b},N=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,O=/([A-Z])/g;function P(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(O,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:N.test(c)?n.parseJSON(c):c}catch(e){}n.data(a,b,c)}else c=void 0; }return c}function Q(a){var b;for(b in a)if(("data"!==b||!n.isEmptyObject(a[b]))&&"toJSON"!==b)return!1;return!0}function R(a,b,d,e){if(M(a)){var f,g,h=n.expando,i=a.nodeType,j=i?n.cache:a,k=i?a[h]:a[h]&&h;if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||n.guid++:h),j[k]||(j[k]=i?{}:{toJSON:n.noop}),"object"!=typeof b&&"function"!=typeof b||(e?j[k]=n.extend(j[k],b):j[k].data=n.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[n.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[n.camelCase(b)])):f=g,f}}function S(a,b,c){if(M(a)){var d,e,f=a.nodeType,g=f?n.cache:a,h=f?a[n.expando]:n.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){n.isArray(b)?b=b.concat(n.map(b,n.camelCase)):b in d?b=[b]:(b=n.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!Q(d):!n.isEmptyObject(d))return}(c||(delete g[h].data,Q(g[h])))&&(f?n.cleanData([a],!0):l.deleteExpando||g!=g.window?delete g[h]:g[h]=void 0)}}}n.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?n.cache[a[n.expando]]:a[n.expando],!!a&&!Q(a)},data:function(a,b,c){return R(a,b,c)},removeData:function(a,b){return S(a,b)},_data:function(a,b,c){return R(a,b,c,!0)},_removeData:function(a,b){return S(a,b,!0)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=n.data(f),1===f.nodeType&&!n._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),P(f,d,e[d])));n._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){n.data(this,a)}):arguments.length>1?this.each(function(){n.data(this,a,b)}):f?P(f,a,n.data(f,a)):void 0},removeData:function(a){return this.each(function(){n.removeData(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=n._data(a,b),c&&(!d||n.isArray(c)?d=n._data(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return n._data(a,c)||n._data(a,c,{empty:n.Callbacks("once memory").add(function(){n._removeData(a,b+"queue"),n._removeData(a,c)})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?n.queue(this[0],a):void 0===b?this:this.each(function(){var c=n.queue(this,a,b);n._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&n.dequeue(this,a)})},dequeue:function(a){return this.each(function(){n.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=n.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=n._data(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}}),function(){var a;l.shrinkWrapBlocks=function(){if(null!=a)return a;a=!1;var b,c,e;return c=d.getElementsByTagName("body")[0],c&&c.style?(b=d.createElement("div"),e=d.createElement("div"),e.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(e).appendChild(b),"undefined"!=typeof b.style.zoom&&(b.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:1px;width:1px;zoom:1",b.appendChild(d.createElement("div")).style.width="5px",a=3!==b.offsetWidth),c.removeChild(e),a):void 0}}();var T=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,U=new RegExp("^(?:([+-])=|)("+T+")([a-z%]*)$","i"),V=["Top","Right","Bottom","Left"],W=function(a,b){return a=b||a,"none"===n.css(a,"display")||!n.contains(a.ownerDocument,a)};function X(a,b,c,d){var e,f=1,g=20,h=d?function(){return d.cur()}:function(){return n.css(a,b,"")},i=h(),j=c&&c[3]||(n.cssNumber[b]?"":"px"),k=(n.cssNumber[b]||"px"!==j&&+i)&&U.exec(n.css(a,b));if(k&&k[3]!==j){j=j||k[3],c=c||[],k=+i||1;do f=f||".5",k/=f,n.style(a,b,k+j);while(f!==(f=h()/i)&&1!==f&&--g)}return c&&(k=+k||+i||0,e=c[1]?k+(c[1]+1)*c[2]:+c[2],d&&(d.unit=j,d.start=k,d.end=e)),e}var Y=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===n.type(c)){e=!0;for(h in c)Y(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,n.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(n(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},Z=/^(?:checkbox|radio)$/i,$=/<([\w:-]+)/,_=/^$|\/(?:java|ecma)script/i,aa=/^\s+/,ba="abbr|article|aside|audio|bdi|canvas|data|datalist|details|dialog|figcaption|figure|footer|header|hgroup|main|mark|meter|nav|output|picture|progress|section|summary|template|time|video";function ca(a){var b=ba.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}!function(){var a=d.createElement("div"),b=d.createDocumentFragment(),c=d.createElement("input");a.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",l.leadingWhitespace=3===a.firstChild.nodeType,l.tbody=!a.getElementsByTagName("tbody").length,l.htmlSerialize=!!a.getElementsByTagName("link").length,l.html5Clone="<:nav></:nav>"!==d.createElement("nav").cloneNode(!0).outerHTML,c.type="checkbox",c.checked=!0,b.appendChild(c),l.appendChecked=c.checked,a.innerHTML="<textarea>x</textarea>",l.noCloneChecked=!!a.cloneNode(!0).lastChild.defaultValue,b.appendChild(a),c=d.createElement("input"),c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),a.appendChild(c),l.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,l.noCloneEvent=!!a.addEventListener,a[n.expando]=1,l.attributes=!a.getAttribute(n.expando)}();var da={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],area:[1,"<map>","</map>"],param:[1,"<object>","</object>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:l.htmlSerialize?[0,"",""]:[1,"X<div>","</div>"]};da.optgroup=da.option,da.tbody=da.tfoot=da.colgroup=da.caption=da.thead,da.th=da.td;function ea(a,b){var c,d,e=0,f="undefined"!=typeof a.getElementsByTagName?a.getElementsByTagName(b||"*"):"undefined"!=typeof a.querySelectorAll?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||n.nodeName(d,b)?f.push(d):n.merge(f,ea(d,b));return void 0===b||b&&n.nodeName(a,b)?n.merge([a],f):f}function fa(a,b){for(var c,d=0;null!=(c=a[d]);d++)n._data(c,"globalEval",!b||n._data(b[d],"globalEval"))}var ga=/<|&#?\w+;/,ha=/<tbody/i;function ia(a){Z.test(a.type)&&(a.defaultChecked=a.checked)}function ja(a,b,c,d,e){for(var f,g,h,i,j,k,m,o=a.length,p=ca(b),q=[],r=0;o>r;r++)if(g=a[r],g||0===g)if("object"===n.type(g))n.merge(q,g.nodeType?[g]:g);else if(ga.test(g)){i=i||p.appendChild(b.createElement("div")),j=($.exec(g)||["",""])[1].toLowerCase(),m=da[j]||da._default,i.innerHTML=m[1]+n.htmlPrefilter(g)+m[2],f=m[0];while(f--)i=i.lastChild;if(!l.leadingWhitespace&&aa.test(g)&&q.push(b.createTextNode(aa.exec(g)[0])),!l.tbody){g="table"!==j||ha.test(g)?"<table>"!==m[1]||ha.test(g)?0:i:i.firstChild,f=g&&g.childNodes.length;while(f--)n.nodeName(k=g.childNodes[f],"tbody")&&!k.childNodes.length&&g.removeChild(k)}n.merge(q,i.childNodes),i.textContent="";while(i.firstChild)i.removeChild(i.firstChild);i=p.lastChild}else q.push(b.createTextNode(g));i&&p.removeChild(i),l.appendChecked||n.grep(ea(q,"input"),ia),r=0;while(g=q[r++])if(d&&n.inArray(g,d)>-1)e&&e.push(g);else if(h=n.contains(g.ownerDocument,g),i=ea(p.appendChild(g),"script"),h&&fa(i),c){f=0;while(g=i[f++])_.test(g.type||"")&&c.push(g)}return i=null,p}!function(){var b,c,e=d.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(l[b]=c in a)||(e.setAttribute(c,"t"),l[b]=e.attributes[c].expando===!1);e=null}();var ka=/^(?:input|select|textarea)$/i,la=/^key/,ma=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,na=/^(?:focusinfocus|focusoutblur)$/,oa=/^([^.]*)(?:\.(.+)|)/;function pa(){return!0}function qa(){return!1}function ra(){try{return d.activeElement}catch(a){}}function sa(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof c&&(d=d||c,c=void 0);for(h in b)sa(a,h,c,d,b[h],f);return a}if(null==d&&null==e?(e=c,d=c=void 0):null==e&&("string"==typeof c?(e=d,d=void 0):(e=d,d=c,c=void 0)),e===!1)e=qa;else if(!e)return a;return 1===f&&(g=e,e=function(a){return n().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=n.guid++)),a.each(function(){n.event.add(this,b,e,d,c)})}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=n._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=n.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return"undefined"==typeof n||a&&n.event.triggered===a.type?void 0:n.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(G)||[""],h=b.length;while(h--)f=oa.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=n.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=n.event.special[o]||{},l=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},i),(m=g[o])||(m=g[o]=[],m.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,l):m.push(l),n.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=n.hasData(a)&&n._data(a);if(r&&(k=r.events)){b=(b||"").match(G)||[""],j=b.length;while(j--)if(h=oa.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=m.length;while(f--)g=m[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(m.splice(f,1),g.selector&&m.delegateCount--,l.remove&&l.remove.call(a,g));i&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(k)&&(delete r.handle,n._removeData(a,"events"))}},trigger:function(b,c,e,f){var g,h,i,j,l,m,o,p=[e||d],q=k.call(b,"type")?b.type:b,r=k.call(b,"namespace")?b.namespace.split("."):[];if(i=m=e=e||d,3!==e.nodeType&&8!==e.nodeType&&!na.test(q+n.event.triggered)&&(q.indexOf(".")>-1&&(r=q.split("."),q=r.shift(),r.sort()),h=q.indexOf(":")<0&&"on"+q,b=b[n.expando]?b:new n.Event(q,"object"==typeof b&&b),b.isTrigger=f?2:3,b.namespace=r.join("."),b.rnamespace=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=e),c=null==c?[b]:n.makeArray(c,[b]),l=n.event.special[q]||{},f||!l.trigger||l.trigger.apply(e,c)!==!1)){if(!f&&!l.noBubble&&!n.isWindow(e)){for(j=l.delegateType||q,na.test(j+q)||(i=i.parentNode);i;i=i.parentNode)p.push(i),m=i;m===(e.ownerDocument||d)&&p.push(m.defaultView||m.parentWindow||a)}o=0;while((i=p[o++])&&!b.isPropagationStopped())b.type=o>1?j:l.bindType||q,g=(n._data(i,"events")||{})[b.type]&&n._data(i,"handle"),g&&g.apply(i,c),g=h&&i[h],g&&g.apply&&M(i)&&(b.result=g.apply(i,c),b.result===!1&&b.preventDefault());if(b.type=q,!f&&!b.isDefaultPrevented()&&(!l._default||l._default.apply(p.pop(),c)===!1)&&M(e)&&h&&e[q]&&!n.isWindow(e)){m=e[h],m&&(e[h]=null),n.event.triggered=q;try{e[q]()}catch(s){}n.event.triggered=void 0,m&&(e[h]=m)}return b.result}},dispatch:function(a){a=n.event.fix(a);var b,c,d,f,g,h=[],i=e.call(arguments),j=(n._data(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())a.rnamespace&&!a.rnamespace.test(g.namespace)||(a.handleObj=g,a.data=g.data,d=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==d&&(a.result=d)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&("click"!==a.type||isNaN(a.button)||a.button<1))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>-1:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},fix:function(a){if(a[n.expando])return a;var b,c,e,f=a.type,g=a,h=this.fixHooks[f];h||(this.fixHooks[f]=h=ma.test(f)?this.mouseHooks:la.test(f)?this.keyHooks:{}),e=h.props?this.props.concat(h.props):this.props,a=new n.Event(g),b=e.length;while(b--)c=e[b],a[c]=g[c];return a.target||(a.target=g.srcElement||d),3===a.target.nodeType&&(a.target=a.target.parentNode),a.metaKey=!!a.metaKey,h.filter?h.filter(a,g):a},props:"altKey bubbles cancelable ctrlKey currentTarget detail eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,e,f,g=b.button,h=b.fromElement;return null==a.pageX&&null!=b.clientX&&(e=a.target.ownerDocument||d,f=e.documentElement,c=e.body,a.pageX=b.clientX+(f&&f.scrollLeft||c&&c.scrollLeft||0)-(f&&f.clientLeft||c&&c.clientLeft||0),a.pageY=b.clientY+(f&&f.scrollTop||c&&c.scrollTop||0)-(f&&f.clientTop||c&&c.clientTop||0)),!a.relatedTarget&&h&&(a.relatedTarget=h===a.target?b.toElement:h),a.which||void 0===g||(a.which=1&g?1:2&g?3:4&g?2:0),a}},special:{load:{noBubble:!0},focus:{trigger:function(){if(this!==ra()&&this.focus)try{return this.focus(),!1}catch(a){}},delegateType:"focusin"},blur:{trigger:function(){return this===ra()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return n.nodeName(this,"input")&&"checkbox"===this.type&&this.click?(this.click(),!1):void 0},_default:function(a){return n.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&a.originalEvent&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c){var d=n.extend(new n.Event,c,{type:a,isSimulated:!0});n.event.trigger(d,null,b),d.isDefaultPrevented()&&c.preventDefault()}},n.removeEvent=d.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c)}:function(a,b,c){var d="on"+b;a.detachEvent&&("undefined"==typeof a[d]&&(a[d]=null),a.detachEvent(d,c))},n.Event=function(a,b){return this instanceof n.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.returnValue===!1?pa:qa):this.type=a,b&&n.extend(this,b),this.timeStamp=a&&a.timeStamp||n.now(),void(this[n.expando]=!0)):new n.Event(a,b)},n.Event.prototype={constructor:n.Event,isDefaultPrevented:qa,isPropagationStopped:qa,isImmediatePropagationStopped:qa,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=pa,a&&(a.preventDefault?a.preventDefault():a.returnValue=!1)},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=pa,a&&!this.isSimulated&&(a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0)},stopImmediatePropagation:function(){var a=this.originalEvent;this.isImmediatePropagationStopped=pa,a&&a.stopImmediatePropagation&&a.stopImmediatePropagation(),this.stopPropagation()}},n.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(a,b){n.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return e&&(e===d||n.contains(d,e))||(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),l.submit||(n.event.special.submit={setup:function(){return n.nodeName(this,"form")?!1:void n.event.add(this,"click._submit keypress._submit",function(a){var b=a.target,c=n.nodeName(b,"input")||n.nodeName(b,"button")?n.prop(b,"form"):void 0;c&&!n._data(c,"submit")&&(n.event.add(c,"submit._submit",function(a){a._submitBubble=!0}),n._data(c,"submit",!0))})},postDispatch:function(a){a._submitBubble&&(delete a._submitBubble,this.parentNode&&!a.isTrigger&&n.event.simulate("submit",this.parentNode,a))},teardown:function(){return n.nodeName(this,"form")?!1:void n.event.remove(this,"._submit")}}),l.change||(n.event.special.change={setup:function(){return ka.test(this.nodeName)?("checkbox"!==this.type&&"radio"!==this.type||(n.event.add(this,"propertychange._change",function(a){"checked"===a.originalEvent.propertyName&&(this._justChanged=!0)}),n.event.add(this,"click._change",function(a){this._justChanged&&!a.isTrigger&&(this._justChanged=!1),n.event.simulate("change",this,a)})),!1):void n.event.add(this,"beforeactivate._change",function(a){var b=a.target;ka.test(b.nodeName)&&!n._data(b,"change")&&(n.event.add(b,"change._change",function(a){!this.parentNode||a.isSimulated||a.isTrigger||n.event.simulate("change",this.parentNode,a)}),n._data(b,"change",!0))})},handle:function(a){var b=a.target;return this!==b||a.isSimulated||a.isTrigger||"radio"!==b.type&&"checkbox"!==b.type?a.handleObj.handler.apply(this,arguments):void 0},teardown:function(){return n.event.remove(this,"._change"),!ka.test(this.nodeName)}}),l.focusin||n.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){n.event.simulate(b,a.target,n.event.fix(a))};n.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=n._data(d,b);e||d.addEventListener(a,c,!0),n._data(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=n._data(d,b)-1;e?n._data(d,b,e):(d.removeEventListener(a,c,!0),n._removeData(d,b))}}}),n.fn.extend({on:function(a,b,c,d){return sa(this,a,b,c,d)},one:function(a,b,c,d){return sa(this,a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,n(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return b!==!1&&"function"!=typeof b||(c=b,b=void 0),c===!1&&(c=qa),this.each(function(){n.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){n.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?n.event.trigger(a,b,c,!0):void 0}});var ta=/ jQuery\d+="(?:null|\d+)"/g,ua=new RegExp("<(?:"+ba+")[\\s/>]","i"),va=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:-]+)[^>]*)\/>/gi,wa=/<script|<style|<link/i,xa=/checked\s*(?:[^=]|=\s*.checked.)/i,ya=/^true\/(.*)/,za=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,Aa=ca(d),Ba=Aa.appendChild(d.createElement("div"));function Ca(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function Da(a){return a.type=(null!==n.find.attr(a,"type"))+"/"+a.type,a}function Ea(a){var b=ya.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function Fa(a,b){if(1===b.nodeType&&n.hasData(a)){var c,d,e,f=n._data(a),g=n._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)n.event.add(b,c,h[c][d])}g.data&&(g.data=n.extend({},g.data))}}function Ga(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!l.noCloneEvent&&b[n.expando]){e=n._data(b);for(d in e.events)n.removeEvent(b,d,e.handle);b.removeAttribute(n.expando)}"script"===c&&b.text!==a.text?(Da(b).text=a.text,Ea(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),l.html5Clone&&a.innerHTML&&!n.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&Z.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:"input"!==c&&"textarea"!==c||(b.defaultValue=a.defaultValue)}}function Ha(a,b,c,d){b=f.apply([],b);var e,g,h,i,j,k,m=0,o=a.length,p=o-1,q=b[0],r=n.isFunction(q);if(r||o>1&&"string"==typeof q&&!l.checkClone&&xa.test(q))return a.each(function(e){var f=a.eq(e);r&&(b[0]=q.call(this,e,f.html())),Ha(f,b,c,d)});if(o&&(k=ja(b,a[0].ownerDocument,!1,a,d),e=k.firstChild,1===k.childNodes.length&&(k=e),e||d)){for(i=n.map(ea(k,"script"),Da),h=i.length;o>m;m++)g=k,m!==p&&(g=n.clone(g,!0,!0),h&&n.merge(i,ea(g,"script"))),c.call(a[m],g,m);if(h)for(j=i[i.length-1].ownerDocument,n.map(i,Ea),m=0;h>m;m++)g=i[m],_.test(g.type||"")&&!n._data(g,"globalEval")&&n.contains(j,g)&&(g.src?n._evalUrl&&n._evalUrl(g.src):n.globalEval((g.text||g.textContent||g.innerHTML||"").replace(za,"")));k=e=null}return a}function Ia(a,b,c){for(var d,e=b?n.filter(b,a):a,f=0;null!=(d=e[f]);f++)c||1!==d.nodeType||n.cleanData(ea(d)),d.parentNode&&(c&&n.contains(d.ownerDocument,d)&&fa(ea(d,"script")),d.parentNode.removeChild(d));return a}n.extend({htmlPrefilter:function(a){return a.replace(va,"<$1></$2>")},clone:function(a,b,c){var d,e,f,g,h,i=n.contains(a.ownerDocument,a);if(l.html5Clone||n.isXMLDoc(a)||!ua.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(Ba.innerHTML=a.outerHTML,Ba.removeChild(f=Ba.firstChild)),!(l.noCloneEvent&&l.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(d=ea(f),h=ea(a),g=0;null!=(e=h[g]);++g)d[g]&&Ga(e,d[g]);if(b)if(c)for(h=h||ea(a),d=d||ea(f),g=0;null!=(e=h[g]);g++)Fa(e,d[g]);else Fa(a,f);return d=ea(f,"script"),d.length>0&&fa(d,!i&&ea(a,"script")),d=h=e=null,f},cleanData:function(a,b){for(var d,e,f,g,h=0,i=n.expando,j=n.cache,k=l.attributes,m=n.event.special;null!=(d=a[h]);h++)if((b||M(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)m[e]?n.event.remove(d,e):n.removeEvent(d,e,g.handle);j[f]&&(delete j[f],k||"undefined"==typeof d.removeAttribute?d[i]=void 0:d.removeAttribute(i),c.push(f))}}}),n.fn.extend({domManip:Ha,detach:function(a){return Ia(this,a,!0)},remove:function(a){return Ia(this,a)},text:function(a){return Y(this,function(a){return void 0===a?n.text(this):this.empty().append((this[0]&&this[0].ownerDocument||d).createTextNode(a))},null,a,arguments.length)},append:function(){return Ha(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ca(this,a);b.appendChild(a)}})},prepend:function(){return Ha(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ca(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return Ha(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return Ha(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&n.cleanData(ea(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&n.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return Y(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(ta,""):void 0;if("string"==typeof a&&!wa.test(a)&&(l.htmlSerialize||!ua.test(a))&&(l.leadingWhitespace||!aa.test(a))&&!da[($.exec(a)||["",""])[1].toLowerCase()]){a=n.htmlPrefilter(a);try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(ea(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=[];return Ha(this,arguments,function(b){var c=this.parentNode;n.inArray(this,a)<0&&(n.cleanData(ea(this)),c&&c.replaceChild(b,this))},a)}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=0,e=[],f=n(a),h=f.length-1;h>=d;d++)c=d===h?this:this.clone(!0),n(f[d])[b](c),g.apply(e,c.get());return this.pushStack(e)}});var Ja,Ka={HTML:"block",BODY:"block"};function La(a,b){var c=n(b.createElement(a)).appendTo(b.body),d=n.css(c[0],"display");return c.detach(),d}function Ma(a){var b=d,c=Ka[a];return c||(c=La(a,b),"none"!==c&&c||(Ja=(Ja||n("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=(Ja[0].contentWindow||Ja[0].contentDocument).document,b.write(),b.close(),c=La(a,b),Ja.detach()),Ka[a]=c),c}var Na=/^margin/,Oa=new RegExp("^("+T+")(?!px)[a-z%]+$","i"),Pa=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e},Qa=d.documentElement;!function(){var b,c,e,f,g,h,i=d.createElement("div"),j=d.createElement("div");if(j.style){j.style.cssText="float:left;opacity:.5",l.opacity="0.5"===j.style.opacity,l.cssFloat=!!j.style.cssFloat,j.style.backgroundClip="content-box",j.cloneNode(!0).style.backgroundClip="",l.clearCloneStyle="content-box"===j.style.backgroundClip,i=d.createElement("div"),i.style.cssText="border:0;width:8px;height:0;top:0;left:-9999px;padding:0;margin-top:1px;position:absolute",j.innerHTML="",i.appendChild(j),l.boxSizing=""===j.style.boxSizing||""===j.style.MozBoxSizing||""===j.style.WebkitBoxSizing,n.extend(l,{reliableHiddenOffsets:function(){return null==b&&k(),f},boxSizingReliable:function(){return null==b&&k(),e},pixelMarginRight:function(){return null==b&&k(),c},pixelPosition:function(){return null==b&&k(),b},reliableMarginRight:function(){return null==b&&k(),g},reliableMarginLeft:function(){return null==b&&k(),h}});function k(){var k,l,m=d.documentElement;m.appendChild(i),j.style.cssText="-webkit-box-sizing:border-box;box-sizing:border-box;position:relative;display:block;margin:auto;border:1px;padding:1px;top:1%;width:50%",b=e=h=!1,c=g=!0,a.getComputedStyle&&(l=a.getComputedStyle(j),b="1%"!==(l||{}).top,h="2px"===(l||{}).marginLeft,e="4px"===(l||{width:"4px"}).width,j.style.marginRight="50%",c="4px"===(l||{marginRight:"4px"}).marginRight,k=j.appendChild(d.createElement("div")),k.style.cssText=j.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:0",k.style.marginRight=k.style.width="0",j.style.width="1px",g=!parseFloat((a.getComputedStyle(k)||{}).marginRight),j.removeChild(k)),j.style.display="none",f=0===j.getClientRects().length,f&&(j.style.display="",j.innerHTML="<table><tr><td></td><td>t</td></tr></table>",j.childNodes[0].style.borderCollapse="separate",k=j.getElementsByTagName("td"),k[0].style.cssText="margin:0;border:0;padding:0;display:none",f=0===k[0].offsetHeight,f&&(k[0].style.display="",k[1].style.display="none",f=0===k[0].offsetHeight)),m.removeChild(i)}}}();var Ra,Sa,Ta=/^(top|right|bottom|left)$/;a.getComputedStyle?(Ra=function(b){var c=b.ownerDocument.defaultView;return c&&c.opener||(c=a),c.getComputedStyle(b)},Sa=function(a,b,c){var d,e,f,g,h=a.style;return c=c||Ra(a),g=c?c.getPropertyValue(b)||c[b]:void 0,""!==g&&void 0!==g||n.contains(a.ownerDocument,a)||(g=n.style(a,b)),c&&!l.pixelMarginRight()&&Oa.test(g)&&Na.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f),void 0===g?g:g+""}):Qa.currentStyle&&(Ra=function(a){return a.currentStyle},Sa=function(a,b,c){var d,e,f,g,h=a.style;return c=c||Ra(a),g=c?c[b]:void 0,null==g&&h&&h[b]&&(g=h[b]),Oa.test(g)&&!Ta.test(b)&&(d=h.left,e=a.runtimeStyle,f=e&&e.left,f&&(e.left=a.currentStyle.left),h.left="fontSize"===b?"1em":g,g=h.pixelLeft+"px",h.left=d,f&&(e.left=f)),void 0===g?g:g+""||"auto"});function Ua(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}var Va=/alpha\([^)]*\)/i,Wa=/opacity\s*=\s*([^)]*)/i,Xa=/^(none|table(?!-c[ea]).+)/,Ya=new RegExp("^("+T+")(.*)$","i"),Za={position:"absolute",visibility:"hidden",display:"block"},$a={letterSpacing:"0",fontWeight:"400"},_a=["Webkit","O","Moz","ms"],ab=d.createElement("div").style;function bb(a){if(a in ab)return a;var b=a.charAt(0).toUpperCase()+a.slice(1),c=_a.length;while(c--)if(a=_a[c]+b,a in ab)return a}function cb(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=n._data(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&W(d)&&(f[g]=n._data(d,"olddisplay",Ma(d.nodeName)))):(e=W(d),(c&&"none"!==c||!e)&&n._data(d,"olddisplay",e?c:n.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}function db(a,b,c){var d=Ya.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function eb(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=n.css(a,c+V[f],!0,e)),d?("content"===c&&(g-=n.css(a,"padding"+V[f],!0,e)),"margin"!==c&&(g-=n.css(a,"border"+V[f]+"Width",!0,e))):(g+=n.css(a,"padding"+V[f],!0,e),"padding"!==c&&(g+=n.css(a,"border"+V[f]+"Width",!0,e)));return g}function fb(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=Ra(a),g=l.boxSizing&&"border-box"===n.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=Sa(a,b,f),(0>e||null==e)&&(e=a.style[b]),Oa.test(e))return e;d=g&&(l.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+eb(a,b,c||(g?"border":"content"),d,f)+"px"}n.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=Sa(a,"opacity");return""===c?"1":c}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":l.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=n.camelCase(b),i=a.style;if(b=n.cssProps[h]||(n.cssProps[h]=bb(h)||h),g=n.cssHooks[b]||n.cssHooks[h],void 0===c)return g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b];if(f=typeof c,"string"===f&&(e=U.exec(c))&&e[1]&&(c=X(a,b,e),f="number"),null!=c&&c===c&&("number"===f&&(c+=e&&e[3]||(n.cssNumber[h]?"":"px")),l.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),!(g&&"set"in g&&void 0===(c=g.set(a,c,d)))))try{i[b]=c}catch(j){}}},css:function(a,b,c,d){var e,f,g,h=n.camelCase(b);return b=n.cssProps[h]||(n.cssProps[h]=bb(h)||h),g=n.cssHooks[b]||n.cssHooks[h],g&&"get"in g&&(f=g.get(a,!0,c)),void 0===f&&(f=Sa(a,b,d)),"normal"===f&&b in $a&&(f=$a[b]),""===c||c?(e=parseFloat(f),c===!0||isFinite(e)?e||0:f):f}}),n.each(["height","width"],function(a,b){n.cssHooks[b]={get:function(a,c,d){return c?Xa.test(n.css(a,"display"))&&0===a.offsetWidth?Pa(a,Za,function(){return fb(a,b,d)}):fb(a,b,d):void 0},set:function(a,c,d){var e=d&&Ra(a);return db(a,c,d?eb(a,b,d,l.boxSizing&&"border-box"===n.css(a,"boxSizing",!1,e),e):0)}}}),l.opacity||(n.cssHooks.opacity={get:function(a,b){return Wa.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=n.isNumeric(b)?"alpha(opacity="+100*b+")":"",f=d&&d.filter||c.filter||"";c.zoom=1,(b>=1||""===b)&&""===n.trim(f.replace(Va,""))&&c.removeAttribute&&(c.removeAttribute("filter"),""===b||d&&!d.filter)||(c.filter=Va.test(f)?f.replace(Va,e):f+" "+e)}}),n.cssHooks.marginRight=Ua(l.reliableMarginRight,function(a,b){return b?Pa(a,{display:"inline-block"},Sa,[a,"marginRight"]):void 0}),n.cssHooks.marginLeft=Ua(l.reliableMarginLeft,function(a,b){return b?(parseFloat(Sa(a,"marginLeft"))||(n.contains(a.ownerDocument,a)?a.getBoundingClientRect().left-Pa(a,{ marginLeft:0},function(){return a.getBoundingClientRect().left}):0))+"px":void 0}),n.each({margin:"",padding:"",border:"Width"},function(a,b){n.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+V[d]+b]=f[d]||f[d-2]||f[0];return e}},Na.test(a)||(n.cssHooks[a+b].set=db)}),n.fn.extend({css:function(a,b){return Y(this,function(a,b,c){var d,e,f={},g=0;if(n.isArray(b)){for(d=Ra(a),e=b.length;e>g;g++)f[b[g]]=n.css(a,b[g],!1,d);return f}return void 0!==c?n.style(a,b,c):n.css(a,b)},a,b,arguments.length>1)},show:function(){return cb(this,!0)},hide:function(){return cb(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){W(this)?n(this).show():n(this).hide()})}});function gb(a,b,c,d,e){return new gb.prototype.init(a,b,c,d,e)}n.Tween=gb,gb.prototype={constructor:gb,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||n.easing._default,this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(n.cssNumber[c]?"":"px")},cur:function(){var a=gb.propHooks[this.prop];return a&&a.get?a.get(this):gb.propHooks._default.get(this)},run:function(a){var b,c=gb.propHooks[this.prop];return this.options.duration?this.pos=b=n.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):this.pos=b=a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):gb.propHooks._default.set(this),this}},gb.prototype.init.prototype=gb.prototype,gb.propHooks={_default:{get:function(a){var b;return 1!==a.elem.nodeType||null!=a.elem[a.prop]&&null==a.elem.style[a.prop]?a.elem[a.prop]:(b=n.css(a.elem,a.prop,""),b&&"auto"!==b?b:0)},set:function(a){n.fx.step[a.prop]?n.fx.step[a.prop](a):1!==a.elem.nodeType||null==a.elem.style[n.cssProps[a.prop]]&&!n.cssHooks[a.prop]?a.elem[a.prop]=a.now:n.style(a.elem,a.prop,a.now+a.unit)}}},gb.propHooks.scrollTop=gb.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},n.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2},_default:"swing"},n.fx=gb.prototype.init,n.fx.step={};var hb,ib,jb=/^(?:toggle|show|hide)$/,kb=/queueHooks$/;function lb(){return a.setTimeout(function(){hb=void 0}),hb=n.now()}function mb(a,b){var c,d={height:a},e=0;for(b=b?1:0;4>e;e+=2-b)c=V[e],d["margin"+c]=d["padding"+c]=a;return b&&(d.opacity=d.width=a),d}function nb(a,b,c){for(var d,e=(qb.tweeners[b]||[]).concat(qb.tweeners["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function ob(a,b,c){var d,e,f,g,h,i,j,k,m=this,o={},p=a.style,q=a.nodeType&&W(a),r=n._data(a,"fxshow");c.queue||(h=n._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,m.always(function(){m.always(function(){h.unqueued--,n.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[p.overflow,p.overflowX,p.overflowY],j=n.css(a,"display"),k="none"===j?n._data(a,"olddisplay")||Ma(a.nodeName):j,"inline"===k&&"none"===n.css(a,"float")&&(l.inlineBlockNeedsLayout&&"inline"!==Ma(a.nodeName)?p.zoom=1:p.display="inline-block")),c.overflow&&(p.overflow="hidden",l.shrinkWrapBlocks()||m.always(function(){p.overflow=c.overflow[0],p.overflowX=c.overflow[1],p.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],jb.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(q?"hide":"show")){if("show"!==e||!r||void 0===r[d])continue;q=!0}o[d]=r&&r[d]||n.style(a,d)}else j=void 0;if(n.isEmptyObject(o))"inline"===("none"===j?Ma(a.nodeName):j)&&(p.display=j);else{r?"hidden"in r&&(q=r.hidden):r=n._data(a,"fxshow",{}),f&&(r.hidden=!q),q?n(a).show():m.done(function(){n(a).hide()}),m.done(function(){var b;n._removeData(a,"fxshow");for(b in o)n.style(a,b,o[b])});for(d in o)g=nb(q?r[d]:0,d,m),d in r||(r[d]=g.start,q&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function pb(a,b){var c,d,e,f,g;for(c in a)if(d=n.camelCase(c),e=b[d],f=a[c],n.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=n.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function qb(a,b,c){var d,e,f=0,g=qb.prefilters.length,h=n.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=hb||lb(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:n.extend({},b),opts:n.extend(!0,{specialEasing:{},easing:n.easing._default},c),originalProperties:b,originalOptions:c,startTime:hb||lb(),duration:c.duration,tweens:[],createTween:function(b,c){var d=n.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?(h.notifyWith(a,[j,1,0]),h.resolveWith(a,[j,b])):h.rejectWith(a,[j,b]),this}}),k=j.props;for(pb(k,j.opts.specialEasing);g>f;f++)if(d=qb.prefilters[f].call(j,a,k,j.opts))return n.isFunction(d.stop)&&(n._queueHooks(j.elem,j.opts.queue).stop=n.proxy(d.stop,d)),d;return n.map(k,nb,j),n.isFunction(j.opts.start)&&j.opts.start.call(a,j),n.fx.timer(n.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}n.Animation=n.extend(qb,{tweeners:{"*":[function(a,b){var c=this.createTween(a,b);return X(c.elem,a,U.exec(b),c),c}]},tweener:function(a,b){n.isFunction(a)?(b=a,a=["*"]):a=a.match(G);for(var c,d=0,e=a.length;e>d;d++)c=a[d],qb.tweeners[c]=qb.tweeners[c]||[],qb.tweeners[c].unshift(b)},prefilters:[ob],prefilter:function(a,b){b?qb.prefilters.unshift(a):qb.prefilters.push(a)}}),n.speed=function(a,b,c){var d=a&&"object"==typeof a?n.extend({},a):{complete:c||!c&&b||n.isFunction(a)&&a,duration:a,easing:c&&b||b&&!n.isFunction(b)&&b};return d.duration=n.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in n.fx.speeds?n.fx.speeds[d.duration]:n.fx.speeds._default,null!=d.queue&&d.queue!==!0||(d.queue="fx"),d.old=d.complete,d.complete=function(){n.isFunction(d.old)&&d.old.call(this),d.queue&&n.dequeue(this,d.queue)},d},n.fn.extend({fadeTo:function(a,b,c,d){return this.filter(W).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=n.isEmptyObject(a),f=n.speed(b,c,d),g=function(){var b=qb(this,n.extend({},a),f);(e||n._data(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=n.timers,g=n._data(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&kb.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));!b&&c||n.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=n._data(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=n.timers,g=d?d.length:0;for(c.finish=!0,n.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),n.each(["toggle","show","hide"],function(a,b){var c=n.fn[b];n.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(mb(b,!0),a,d,e)}}),n.each({slideDown:mb("show"),slideUp:mb("hide"),slideToggle:mb("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){n.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),n.timers=[],n.fx.tick=function(){var a,b=n.timers,c=0;for(hb=n.now();c<b.length;c++)a=b[c],a()||b[c]!==a||b.splice(c--,1);b.length||n.fx.stop(),hb=void 0},n.fx.timer=function(a){n.timers.push(a),a()?n.fx.start():n.timers.pop()},n.fx.interval=13,n.fx.start=function(){ib||(ib=a.setInterval(n.fx.tick,n.fx.interval))},n.fx.stop=function(){a.clearInterval(ib),ib=null},n.fx.speeds={slow:600,fast:200,_default:400},n.fn.delay=function(b,c){return b=n.fx?n.fx.speeds[b]||b:b,c=c||"fx",this.queue(c,function(c,d){var e=a.setTimeout(c,b);d.stop=function(){a.clearTimeout(e)}})},function(){var a,b=d.createElement("input"),c=d.createElement("div"),e=d.createElement("select"),f=e.appendChild(d.createElement("option"));c=d.createElement("div"),c.setAttribute("className","t"),c.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",a=c.getElementsByTagName("a")[0],b.setAttribute("type","checkbox"),c.appendChild(b),a=c.getElementsByTagName("a")[0],a.style.cssText="top:1px",l.getSetAttribute="t"!==c.className,l.style=/top/.test(a.getAttribute("style")),l.hrefNormalized="/a"===a.getAttribute("href"),l.checkOn=!!b.value,l.optSelected=f.selected,l.enctype=!!d.createElement("form").enctype,e.disabled=!0,l.optDisabled=!f.disabled,b=d.createElement("input"),b.setAttribute("value",""),l.input=""===b.getAttribute("value"),b.value="t",b.setAttribute("type","radio"),l.radioValue="t"===b.value}();var rb=/\r/g,sb=/[\x20\t\r\n\f]+/g;n.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=n.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,n(this).val()):a,null==e?e="":"number"==typeof e?e+="":n.isArray(e)&&(e=n.map(e,function(a){return null==a?"":a+""})),b=n.valHooks[this.type]||n.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=n.valHooks[e.type]||n.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(rb,""):null==c?"":c)}}}),n.extend({valHooks:{option:{get:function(a){var b=n.find.attr(a,"value");return null!=b?b:n.trim(n.text(a)).replace(sb," ")}},select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],(c.selected||i===e)&&(l.optDisabled?!c.disabled:null===c.getAttribute("disabled"))&&(!c.parentNode.disabled||!n.nodeName(c.parentNode,"optgroup"))){if(b=n(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=n.makeArray(b),g=e.length;while(g--)if(d=e[g],n.inArray(n.valHooks.option.get(d),f)>-1)try{d.selected=c=!0}catch(h){d.scrollHeight}else d.selected=!1;return c||(a.selectedIndex=-1),e}}}}),n.each(["radio","checkbox"],function(){n.valHooks[this]={set:function(a,b){return n.isArray(b)?a.checked=n.inArray(n(a).val(),b)>-1:void 0}},l.checkOn||(n.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})});var tb,ub,vb=n.expr.attrHandle,wb=/^(?:checked|selected)$/i,xb=l.getSetAttribute,yb=l.input;n.fn.extend({attr:function(a,b){return Y(this,n.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){n.removeAttr(this,a)})}}),n.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return"undefined"==typeof a.getAttribute?n.prop(a,b,c):(1===f&&n.isXMLDoc(a)||(b=b.toLowerCase(),e=n.attrHooks[b]||(n.expr.match.bool.test(b)?ub:tb)),void 0!==c?null===c?void n.removeAttr(a,b):e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:(a.setAttribute(b,c+""),c):e&&"get"in e&&null!==(d=e.get(a,b))?d:(d=n.find.attr(a,b),null==d?void 0:d))},attrHooks:{type:{set:function(a,b){if(!l.radioValue&&"radio"===b&&n.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(G);if(f&&1===a.nodeType)while(c=f[e++])d=n.propFix[c]||c,n.expr.match.bool.test(c)?yb&&xb||!wb.test(c)?a[d]=!1:a[n.camelCase("default-"+c)]=a[d]=!1:n.attr(a,c,""),a.removeAttribute(xb?c:d)}}),ub={set:function(a,b,c){return b===!1?n.removeAttr(a,c):yb&&xb||!wb.test(c)?a.setAttribute(!xb&&n.propFix[c]||c,c):a[n.camelCase("default-"+c)]=a[c]=!0,c}},n.each(n.expr.match.bool.source.match(/\w+/g),function(a,b){var c=vb[b]||n.find.attr;yb&&xb||!wb.test(b)?vb[b]=function(a,b,d){var e,f;return d||(f=vb[b],vb[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,vb[b]=f),e}:vb[b]=function(a,b,c){return c?void 0:a[n.camelCase("default-"+b)]?b.toLowerCase():null}}),yb&&xb||(n.attrHooks.value={set:function(a,b,c){return n.nodeName(a,"input")?void(a.defaultValue=b):tb&&tb.set(a,b,c)}}),xb||(tb={set:function(a,b,c){var d=a.getAttributeNode(c);return d||a.setAttributeNode(d=a.ownerDocument.createAttribute(c)),d.value=b+="","value"===c||b===a.getAttribute(c)?b:void 0}},vb.id=vb.name=vb.coords=function(a,b,c){var d;return c?void 0:(d=a.getAttributeNode(b))&&""!==d.value?d.value:null},n.valHooks.button={get:function(a,b){var c=a.getAttributeNode(b);return c&&c.specified?c.value:void 0},set:tb.set},n.attrHooks.contenteditable={set:function(a,b,c){tb.set(a,""===b?!1:b,c)}},n.each(["width","height"],function(a,b){n.attrHooks[b]={set:function(a,c){return""===c?(a.setAttribute(b,"auto"),c):void 0}}})),l.style||(n.attrHooks.style={get:function(a){return a.style.cssText||void 0},set:function(a,b){return a.style.cssText=b+""}});var zb=/^(?:input|select|textarea|button|object)$/i,Ab=/^(?:a|area)$/i;n.fn.extend({prop:function(a,b){return Y(this,n.prop,a,b,arguments.length>1)},removeProp:function(a){return a=n.propFix[a]||a,this.each(function(){try{this[a]=void 0,delete this[a]}catch(b){}})}}),n.extend({prop:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return 1===f&&n.isXMLDoc(a)||(b=n.propFix[b]||b,e=n.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){var b=n.find.attr(a,"tabindex");return b?parseInt(b,10):zb.test(a.nodeName)||Ab.test(a.nodeName)&&a.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),l.hrefNormalized||n.each(["href","src"],function(a,b){n.propHooks[b]={get:function(a){return a.getAttribute(b,4)}}}),l.optSelected||(n.propHooks.selected={get:function(a){var b=a.parentNode;return b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex),null},set:function(a){var b=a.parentNode;b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex)}}),n.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){n.propFix[this.toLowerCase()]=this}),l.enctype||(n.propFix.enctype="encoding");var Bb=/[\t\r\n\f]/g;function Cb(a){return n.attr(a,"class")||""}n.fn.extend({addClass:function(a){var b,c,d,e,f,g,h,i=0;if(n.isFunction(a))return this.each(function(b){n(this).addClass(a.call(this,b,Cb(this)))});if("string"==typeof a&&a){b=a.match(G)||[];while(c=this[i++])if(e=Cb(c),d=1===c.nodeType&&(" "+e+" ").replace(Bb," ")){g=0;while(f=b[g++])d.indexOf(" "+f+" ")<0&&(d+=f+" ");h=n.trim(d),e!==h&&n.attr(c,"class",h)}}return this},removeClass:function(a){var b,c,d,e,f,g,h,i=0;if(n.isFunction(a))return this.each(function(b){n(this).removeClass(a.call(this,b,Cb(this)))});if(!arguments.length)return this.attr("class","");if("string"==typeof a&&a){b=a.match(G)||[];while(c=this[i++])if(e=Cb(c),d=1===c.nodeType&&(" "+e+" ").replace(Bb," ")){g=0;while(f=b[g++])while(d.indexOf(" "+f+" ")>-1)d=d.replace(" "+f+" "," ");h=n.trim(d),e!==h&&n.attr(c,"class",h)}}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):n.isFunction(a)?this.each(function(c){n(this).toggleClass(a.call(this,c,Cb(this),b),b)}):this.each(function(){var b,d,e,f;if("string"===c){d=0,e=n(this),f=a.match(G)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else void 0!==a&&"boolean"!==c||(b=Cb(this),b&&n._data(this,"__className__",b),n.attr(this,"class",b||a===!1?"":n._data(this,"__className__")||""))})},hasClass:function(a){var b,c,d=0;b=" "+a+" ";while(c=this[d++])if(1===c.nodeType&&(" "+Cb(c)+" ").replace(Bb," ").indexOf(b)>-1)return!0;return!1}}),n.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){n.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),n.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Db=a.location,Eb=n.now(),Fb=/\?/,Gb=/(,)|(\[|{)|(}|])|"(?:[^"\\\r\n]|\\["\\\/bfnrt]|\\u[\da-fA-F]{4})*"\s*:?|true|false|null|-?(?!0\d)\d+(?:\.\d+|)(?:[eE][+-]?\d+|)/g;n.parseJSON=function(b){if(a.JSON&&a.JSON.parse)return a.JSON.parse(b+"");var c,d=null,e=n.trim(b+"");return e&&!n.trim(e.replace(Gb,function(a,b,e,f){return c&&b&&(d=0),0===d?a:(c=e||b,d+=!f-!e,"")}))?Function("return "+e)():n.error("Invalid JSON: "+b)},n.parseXML=function(b){var c,d;if(!b||"string"!=typeof b)return null;try{a.DOMParser?(d=new a.DOMParser,c=d.parseFromString(b,"text/xml")):(c=new a.ActiveXObject("Microsoft.XMLDOM"),c.async="false",c.loadXML(b))}catch(e){c=void 0}return c&&c.documentElement&&!c.getElementsByTagName("parsererror").length||n.error("Invalid XML: "+b),c};var Hb=/#.*$/,Ib=/([?&])_=[^&]*/,Jb=/^(.*?):[ \t]*([^\r\n]*)\r?$/gm,Kb=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Lb=/^(?:GET|HEAD)$/,Mb=/^\/\//,Nb=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,Ob={},Pb={},Qb="*/".concat("*"),Rb=Db.href,Sb=Nb.exec(Rb.toLowerCase())||[];function Tb(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(G)||[];if(n.isFunction(c))while(d=f[e++])"+"===d.charAt(0)?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function Ub(a,b,c,d){var e={},f=a===Pb;function g(h){var i;return e[h]=!0,n.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function Vb(a,b){var c,d,e=n.ajaxSettings.flatOptions||{};for(d in b)void 0!==b[d]&&((e[d]?a:c||(c={}))[d]=b[d]);return c&&n.extend(!0,a,c),a}function Wb(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===e&&(e=a.mimeType||b.getResponseHeader("Content-Type"));if(e)for(g in h)if(h[g]&&h[g].test(e)){i.unshift(g);break}if(i[0]in c)f=i[0];else{for(g in c){if(!i[0]||a.converters[g+" "+i[0]]){f=g;break}d||(d=g)}f=f||d}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function Xb(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}n.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Rb,type:"GET",isLocal:Kb.test(Sb[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":Qb,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":n.parseJSON,"text xml":n.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?Vb(Vb(a,n.ajaxSettings),b):Vb(n.ajaxSettings,a)},ajaxPrefilter:Tb(Ob),ajaxTransport:Tb(Pb),ajax:function(b,c){"object"==typeof b&&(c=b,b=void 0),c=c||{};var d,e,f,g,h,i,j,k,l=n.ajaxSetup({},c),m=l.context||l,o=l.context&&(m.nodeType||m.jquery)?n(m):n.event,p=n.Deferred(),q=n.Callbacks("once memory"),r=l.statusCode||{},s={},t={},u=0,v="canceled",w={readyState:0,getResponseHeader:function(a){var b;if(2===u){if(!k){k={};while(b=Jb.exec(g))k[b[1].toLowerCase()]=b[2]}b=k[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===u?g:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return u||(a=t[c]=t[c]||a,s[a]=b),this},overrideMimeType:function(a){return u||(l.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>u)for(b in a)r[b]=[r[b],a[b]];else w.always(a[w.status]);return this},abort:function(a){var b=a||v;return j&&j.abort(b),y(0,b),this}};if(p.promise(w).complete=q.add,w.success=w.done,w.error=w.fail,l.url=((b||l.url||Rb)+"").replace(Hb,"").replace(Mb,Sb[1]+"//"),l.type=c.method||c.type||l.method||l.type,l.dataTypes=n.trim(l.dataType||"*").toLowerCase().match(G)||[""],null==l.crossDomain&&(d=Nb.exec(l.url.toLowerCase()),l.crossDomain=!(!d||d[1]===Sb[1]&&d[2]===Sb[2]&&(d[3]||("http:"===d[1]?"80":"443"))===(Sb[3]||("http:"===Sb[1]?"80":"443")))),l.data&&l.processData&&"string"!=typeof l.data&&(l.data=n.param(l.data,l.traditional)),Ub(Ob,l,c,w),2===u)return w;i=n.event&&l.global,i&&0===n.active++&&n.event.trigger("ajaxStart"),l.type=l.type.toUpperCase(),l.hasContent=!Lb.test(l.type),f=l.url,l.hasContent||(l.data&&(f=l.url+=(Fb.test(f)?"&":"?")+l.data,delete l.data),l.cache===!1&&(l.url=Ib.test(f)?f.replace(Ib,"$1_="+Eb++):f+(Fb.test(f)?"&":"?")+"_="+Eb++)),l.ifModified&&(n.lastModified[f]&&w.setRequestHeader("If-Modified-Since",n.lastModified[f]),n.etag[f]&&w.setRequestHeader("If-None-Match",n.etag[f])),(l.data&&l.hasContent&&l.contentType!==!1||c.contentType)&&w.setRequestHeader("Content-Type",l.contentType),w.setRequestHeader("Accept",l.dataTypes[0]&&l.accepts[l.dataTypes[0]]?l.accepts[l.dataTypes[0]]+("*"!==l.dataTypes[0]?", "+Qb+"; q=0.01":""):l.accepts["*"]);for(e in l.headers)w.setRequestHeader(e,l.headers[e]);if(l.beforeSend&&(l.beforeSend.call(m,w,l)===!1||2===u))return w.abort();v="abort";for(e in{success:1,error:1,complete:1})w[e](l[e]);if(j=Ub(Pb,l,c,w)){if(w.readyState=1,i&&o.trigger("ajaxSend",[w,l]),2===u)return w;l.async&&l.timeout>0&&(h=a.setTimeout(function(){w.abort("timeout")},l.timeout));try{u=1,j.send(s,y)}catch(x){if(!(2>u))throw x;y(-1,x)}}else y(-1,"No Transport");function y(b,c,d,e){var k,s,t,v,x,y=c;2!==u&&(u=2,h&&a.clearTimeout(h),j=void 0,g=e||"",w.readyState=b>0?4:0,k=b>=200&&300>b||304===b,d&&(v=Wb(l,w,d)),v=Xb(l,v,w,k),k?(l.ifModified&&(x=w.getResponseHeader("Last-Modified"),x&&(n.lastModified[f]=x),x=w.getResponseHeader("etag"),x&&(n.etag[f]=x)),204===b||"HEAD"===l.type?y="nocontent":304===b?y="notmodified":(y=v.state,s=v.data,t=v.error,k=!t)):(t=y,!b&&y||(y="error",0>b&&(b=0))),w.status=b,w.statusText=(c||y)+"",k?p.resolveWith(m,[s,y,w]):p.rejectWith(m,[w,y,t]),w.statusCode(r),r=void 0,i&&o.trigger(k?"ajaxSuccess":"ajaxError",[w,l,k?s:t]),q.fireWith(m,[w,y]),i&&(o.trigger("ajaxComplete",[w,l]),--n.active||n.event.trigger("ajaxStop")))}return w},getJSON:function(a,b,c){return n.get(a,b,c,"json")},getScript:function(a,b){return n.get(a,void 0,b,"script")}}),n.each(["get","post"],function(a,b){n[b]=function(a,c,d,e){return n.isFunction(c)&&(e=e||d,d=c,c=void 0),n.ajax(n.extend({url:a,type:b,dataType:e,data:c,success:d},n.isPlainObject(a)&&a))}}),n._evalUrl=function(a){return n.ajax({url:a,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,"throws":!0})},n.fn.extend({wrapAll:function(a){if(n.isFunction(a))return this.each(function(b){n(this).wrapAll(a.call(this,b))});if(this[0]){var b=n(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&1===a.firstChild.nodeType)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){return n.isFunction(a)?this.each(function(b){n(this).wrapInner(a.call(this,b))}):this.each(function(){var b=n(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=n.isFunction(a);return this.each(function(c){n(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){n.nodeName(this,"body")||n(this).replaceWith(this.childNodes)}).end()}});function Yb(a){return a.style&&a.style.display||n.css(a,"display")}function Zb(a){if(!n.contains(a.ownerDocument||d,a))return!0;while(a&&1===a.nodeType){if("none"===Yb(a)||"hidden"===a.type)return!0;a=a.parentNode}return!1}n.expr.filters.hidden=function(a){return l.reliableHiddenOffsets()?a.offsetWidth<=0&&a.offsetHeight<=0&&!a.getClientRects().length:Zb(a)},n.expr.filters.visible=function(a){return!n.expr.filters.hidden(a)};var $b=/%20/g,_b=/\[\]$/,ac=/\r?\n/g,bc=/^(?:submit|button|image|reset|file)$/i,cc=/^(?:input|select|textarea|keygen)/i;function dc(a,b,c,d){var e;if(n.isArray(b))n.each(b,function(b,e){c||_b.test(a)?d(a,e):dc(a+"["+("object"==typeof e&&null!=e?b:"")+"]",e,c,d)});else if(c||"object"!==n.type(b))d(a,b);else for(e in b)dc(a+"["+e+"]",b[e],c,d)}n.param=function(a,b){var c,d=[],e=function(a,b){b=n.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=n.ajaxSettings&&n.ajaxSettings.traditional),n.isArray(a)||a.jquery&&!n.isPlainObject(a))n.each(a,function(){e(this.name,this.value)});else for(c in a)dc(c,a[c],b,e);return d.join("&").replace($b,"+")},n.fn.extend({serialize:function(){return n.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=n.prop(this,"elements");return a?n.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!n(this).is(":disabled")&&cc.test(this.nodeName)&&!bc.test(a)&&(this.checked||!Z.test(a))}).map(function(a,b){var c=n(this).val();return null==c?null:n.isArray(c)?n.map(c,function(a){return{name:b.name,value:a.replace(ac,"\r\n")}}):{name:b.name,value:c.replace(ac,"\r\n")}}).get()}}),n.ajaxSettings.xhr=void 0!==a.ActiveXObject?function(){return this.isLocal?ic():d.documentMode>8?hc():/^(get|post|head|put|delete|options)$/i.test(this.type)&&hc()||ic()}:hc;var ec=0,fc={},gc=n.ajaxSettings.xhr();a.attachEvent&&a.attachEvent("onunload",function(){for(var a in fc)fc[a](void 0,!0)}),l.cors=!!gc&&"withCredentials"in gc,gc=l.ajax=!!gc,gc&&n.ajaxTransport(function(b){if(!b.crossDomain||l.cors){var c;return{send:function(d,e){var f,g=b.xhr(),h=++ec;if(g.open(b.type,b.url,b.async,b.username,b.password),b.xhrFields)for(f in b.xhrFields)g[f]=b.xhrFields[f];b.mimeType&&g.overrideMimeType&&g.overrideMimeType(b.mimeType),b.crossDomain||d["X-Requested-With"]||(d["X-Requested-With"]="XMLHttpRequest");for(f in d)void 0!==d[f]&&g.setRequestHeader(f,d[f]+"");g.send(b.hasContent&&b.data||null),c=function(a,d){var f,i,j;if(c&&(d||4===g.readyState))if(delete fc[h],c=void 0,g.onreadystatechange=n.noop,d)4!==g.readyState&&g.abort();else{j={},f=g.status,"string"==typeof g.responseText&&(j.text=g.responseText);try{i=g.statusText}catch(k){i=""}f||!b.isLocal||b.crossDomain?1223===f&&(f=204):f=j.text?200:404}j&&e(f,i,j,g.getAllResponseHeaders())},b.async?4===g.readyState?a.setTimeout(c):g.onreadystatechange=fc[h]=c:c()},abort:function(){c&&c(void 0,!0)}}}});function hc(){try{return new a.XMLHttpRequest}catch(b){}}function ic(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}n.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(a){return n.globalEval(a),a}}}),n.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),n.ajaxTransport("script",function(a){if(a.crossDomain){var b,c=d.head||n("head")[0]||d.documentElement;return{send:function(e,f){b=d.createElement("script"),b.async=!0,a.scriptCharset&&(b.charset=a.scriptCharset),b.src=a.url,b.onload=b.onreadystatechange=function(a,c){(c||!b.readyState||/loaded|complete/.test(b.readyState))&&(b.onload=b.onreadystatechange=null,b.parentNode&&b.parentNode.removeChild(b),b=null,c||f(200,"success"))},c.insertBefore(b,c.firstChild)},abort:function(){b&&b.onload(void 0,!0)}}}});var jc=[],kc=/(=)\?(?=&|$)|\?\?/;n.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=jc.pop()||n.expando+"_"+Eb++;return this[a]=!0,a}}),n.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(kc.test(b.url)?"url":"string"==typeof b.data&&0===(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&kc.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=n.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(kc,"$1"+e):b.jsonp!==!1&&(b.url+=(Fb.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||n.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){void 0===f?n(a).removeProp(e):a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,jc.push(e)),g&&n.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),n.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||d;var e=x.exec(a),f=!c&&[];return e?[b.createElement(e[1])]:(e=ja([a],b,f),f&&f.length&&n(f).remove(),n.merge([],e.childNodes))};var lc=n.fn.load;n.fn.load=function(a,b,c){if("string"!=typeof a&&lc)return lc.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>-1&&(d=n.trim(a.slice(h,a.length)),a=a.slice(0,h)),n.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&n.ajax({url:a,type:e||"GET",dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?n("<div>").append(n.parseHTML(a)).find(d):a)}).always(c&&function(a,b){g.each(function(){c.apply(this,f||[a.responseText,b,a])})}),this},n.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){n.fn[b]=function(a){return this.on(b,a)}}),n.expr.filters.animated=function(a){return n.grep(n.timers,function(b){return a===b.elem}).length};function mc(a){return n.isWindow(a)?a:9===a.nodeType?a.defaultView||a.parentWindow:!1}n.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=n.css(a,"position"),l=n(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=n.css(a,"top"),i=n.css(a,"left"),j=("absolute"===k||"fixed"===k)&&n.inArray("auto",[f,i])>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),n.isFunction(b)&&(b=b.call(a,c,n.extend({},h))),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},n.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){n.offset.setOffset(this,a,b)});var b,c,d={top:0,left:0},e=this[0],f=e&&e.ownerDocument;if(f)return b=f.documentElement,n.contains(b,e)?("undefined"!=typeof e.getBoundingClientRect&&(d=e.getBoundingClientRect()),c=mc(f),{top:d.top+(c.pageYOffset||b.scrollTop)-(b.clientTop||0),left:d.left+(c.pageXOffset||b.scrollLeft)-(b.clientLeft||0)}):d},position:function(){if(this[0]){var a,b,c={top:0,left:0},d=this[0];return"fixed"===n.css(d,"position")?b=d.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),n.nodeName(a[0],"html")||(c=a.offset()),c.top+=n.css(a[0],"borderTopWidth",!0),c.left+=n.css(a[0],"borderLeftWidth",!0)),{top:b.top-c.top-n.css(d,"marginTop",!0),left:b.left-c.left-n.css(d,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent;while(a&&!n.nodeName(a,"html")&&"static"===n.css(a,"position"))a=a.offsetParent;return a||Qa})}}),n.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,b){var c=/Y/.test(b);n.fn[a]=function(d){return Y(this,function(a,d,e){var f=mc(a);return void 0===e?f?b in f?f[b]:f.document.documentElement[d]:a[d]:void(f?f.scrollTo(c?n(f).scrollLeft():e,c?e:n(f).scrollTop()):a[d]=e)},a,d,arguments.length,null)}}),n.each(["top","left"],function(a,b){n.cssHooks[b]=Ua(l.pixelPosition,function(a,c){return c?(c=Sa(a,b),Oa.test(c)?n(a).position()[b]+"px":c):void 0})}),n.each({Height:"height",Width:"width"},function(a,b){n.each({ padding:"inner"+a,content:b,"":"outer"+a},function(c,d){n.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return Y(this,function(b,c,d){var e;return n.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?n.css(b,c,g):n.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),n.fn.extend({bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}}),n.fn.size=function(){return this.length},n.fn.andSelf=n.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return n});var nc=a.jQuery,oc=a.$;return n.noConflict=function(b){return a.$===n&&(a.$=oc),b&&a.jQuery===n&&(a.jQuery=nc),n},b||(a.jQuery=a.$=n),n});
PypiClean
/windmill_api-1.165.0-py3-none-any.whl/windmill_api/models/open_flow_value_failure_module_suspend_resume_form.py
from typing import Any, Dict, List, Type, TypeVar, Union import attr from ..models.open_flow_value_failure_module_suspend_resume_form_schema import ( OpenFlowValueFailureModuleSuspendResumeFormSchema, ) from ..types import UNSET, Unset T = TypeVar("T", bound="OpenFlowValueFailureModuleSuspendResumeForm") @attr.s(auto_attribs=True) class OpenFlowValueFailureModuleSuspendResumeForm: """ Attributes: schema (Union[Unset, OpenFlowValueFailureModuleSuspendResumeFormSchema]): """ schema: Union[Unset, OpenFlowValueFailureModuleSuspendResumeFormSchema] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: schema: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.schema, Unset): schema = self.schema.to_dict() field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if schema is not UNSET: field_dict["schema"] = schema return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() _schema = d.pop("schema", UNSET) schema: Union[Unset, OpenFlowValueFailureModuleSuspendResumeFormSchema] if isinstance(_schema, Unset): schema = UNSET else: schema = OpenFlowValueFailureModuleSuspendResumeFormSchema.from_dict(_schema) open_flow_value_failure_module_suspend_resume_form = cls( schema=schema, ) open_flow_value_failure_module_suspend_resume_form.additional_properties = d return open_flow_value_failure_module_suspend_resume_form @property def additional_keys(self) -> List[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: return self.additional_properties[key] def __setitem__(self, key: str, value: Any) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: del self.additional_properties[key] def __contains__(self, key: str) -> bool: return key in self.additional_properties
PypiClean
/rezup-api-2.6.0.tar.gz/rezup-api-2.6.0/src/rezup/_vendor/toml/encoder.py
import datetime import re import sys from decimal import Decimal from rezup._vendor.toml.decoder import InlineTableDict if sys.version_info >= (3,): unicode = str def dump(o, f, encoder=None): """Writes out dict as toml to a file Args: o: Object to dump into toml f: File descriptor where the toml should be stored encoder: The ``TomlEncoder`` to use for constructing the output string Returns: String containing the toml corresponding to dictionary Raises: TypeError: When anything other than file descriptor is passed """ if not f.write: raise TypeError("You can only dump an object to a file descriptor") d = dumps(o, encoder=encoder) f.write(d) return d def dumps(o, encoder=None): """Stringifies input dict as toml Args: o: Object to dump into toml encoder: The ``TomlEncoder`` to use for constructing the output string Returns: String containing the toml corresponding to dict Examples: ```python >>> import toml >>> output = { ... 'a': "I'm a string", ... 'b': ["I'm", "a", "list"], ... 'c': 2400 ... } >>> toml.dumps(output) 'a = "I\'m a string"\nb = [ "I\'m", "a", "list",]\nc = 2400\n' ``` """ retval = "" if encoder is None: encoder = TomlEncoder(o.__class__) addtoretval, sections = encoder.dump_sections(o, "") retval += addtoretval outer_objs = [id(o)] while sections: section_ids = [id(section) for section in sections] for outer_obj in outer_objs: if outer_obj in section_ids: raise ValueError("Circular reference detected") outer_objs += section_ids newsections = encoder.get_empty_table() for section in sections: addtoretval, addtosections = encoder.dump_sections( sections[section], section) if addtoretval or (not addtoretval and not addtosections): if retval and retval[-2:] != "\n\n": retval += "\n" retval += "[" + section + "]\n" if addtoretval: retval += addtoretval for s in addtosections: newsections[section + "." + s] = addtosections[s] sections = newsections return retval def _dump_str(v): if sys.version_info < (3,) and hasattr(v, 'decode') and isinstance(v, str): v = v.decode('utf-8') v = "%r" % v if v[0] == 'u': v = v[1:] singlequote = v.startswith("'") if singlequote or v.startswith('"'): v = v[1:-1] if singlequote: v = v.replace("\\'", "'") v = v.replace('"', '\\"') v = v.split("\\x") while len(v) > 1: i = -1 if not v[0]: v = v[1:] v[0] = v[0].replace("\\\\", "\\") # No, I don't know why != works and == breaks joinx = v[0][i] != "\\" while v[0][:i] and v[0][i] == "\\": joinx = not joinx i -= 1 if joinx: joiner = "x" else: joiner = "u00" v = [v[0] + joiner + v[1]] + v[2:] return unicode('"' + v[0] + '"') def _dump_float(v): return "{}".format(v).replace("e+0", "e+").replace("e-0", "e-") def _dump_time(v): utcoffset = v.utcoffset() if utcoffset is None: return v.isoformat() # The TOML norm specifies that it's local time thus we drop the offset return v.isoformat()[:-6] class TomlEncoder(object): def __init__(self, _dict=dict, preserve=False): self._dict = _dict self.preserve = preserve self.dump_funcs = { str: _dump_str, unicode: _dump_str, list: self.dump_list, bool: lambda v: unicode(v).lower(), int: lambda v: v, float: _dump_float, Decimal: _dump_float, datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), datetime.time: _dump_time, datetime.date: lambda v: v.isoformat() } def get_empty_table(self): return self._dict() def dump_list(self, v): retval = "[" for u in v: retval += " " + unicode(self.dump_value(u)) + "," retval += "]" return retval def dump_inline_table(self, section): """Preserve inline table in its compact syntax instead of expanding into subsection. https://github.com/toml-lang/toml#user-content-inline-table """ retval = "" if isinstance(section, dict): val_list = [] for k, v in section.items(): val = self.dump_inline_table(v) val_list.append(k + " = " + val) retval += "{ " + ", ".join(val_list) + " }\n" return retval else: return unicode(self.dump_value(section)) def dump_value(self, v): # Lookup function corresponding to v's type dump_fn = self.dump_funcs.get(type(v)) if dump_fn is None and hasattr(v, '__iter__'): dump_fn = self.dump_funcs[list] # Evaluate function (if it exists) else return v return dump_fn(v) if dump_fn is not None else self.dump_funcs[str](v) def dump_sections(self, o, sup): retstr = "" if sup != "" and sup[-1] != ".": sup += '.' retdict = self._dict() arraystr = "" for section in o: section = unicode(section) qsection = section if not re.match(r'^[A-Za-z0-9_-]+$', section): qsection = _dump_str(section) if not isinstance(o[section], dict): arrayoftables = False if isinstance(o[section], list): for a in o[section]: if isinstance(a, dict): arrayoftables = True if arrayoftables: for a in o[section]: arraytabstr = "\n" arraystr += "[[" + sup + qsection + "]]\n" s, d = self.dump_sections(a, sup + qsection) if s: if s[0] == "[": arraytabstr += s else: arraystr += s while d: newd = self._dict() for dsec in d: s1, d1 = self.dump_sections(d[dsec], sup + qsection + "." + dsec) if s1: arraytabstr += ("[" + sup + qsection + "." + dsec + "]\n") arraytabstr += s1 for s1 in d1: newd[dsec + "." + s1] = d1[s1] d = newd arraystr += arraytabstr else: if o[section] is not None: retstr += (qsection + " = " + unicode(self.dump_value(o[section])) + '\n') elif self.preserve and isinstance(o[section], InlineTableDict): retstr += (qsection + " = " + self.dump_inline_table(o[section])) else: retdict[qsection] = o[section] retstr += arraystr return (retstr, retdict) class TomlPreserveInlineDictEncoder(TomlEncoder): def __init__(self, _dict=dict): super(TomlPreserveInlineDictEncoder, self).__init__(_dict, True) class TomlArraySeparatorEncoder(TomlEncoder): def __init__(self, _dict=dict, preserve=False, separator=","): super(TomlArraySeparatorEncoder, self).__init__(_dict, preserve) if separator.strip() == "": separator = "," + separator elif separator.strip(' \t\n\r,'): raise ValueError("Invalid separator for arrays") self.separator = separator def dump_list(self, v): t = [] retval = "[" for u in v: t.append(self.dump_value(u)) while t != []: s = [] for u in t: if isinstance(u, list): for r in u: s.append(r) else: retval += " " + unicode(u) + self.separator t = s retval += "]" return retval class TomlNumpyEncoder(TomlEncoder): def __init__(self, _dict=dict, preserve=False): import numpy as np super(TomlNumpyEncoder, self).__init__(_dict, preserve) self.dump_funcs[np.float16] = _dump_float self.dump_funcs[np.float32] = _dump_float self.dump_funcs[np.float64] = _dump_float self.dump_funcs[np.int16] = self._dump_int self.dump_funcs[np.int32] = self._dump_int self.dump_funcs[np.int64] = self._dump_int def _dump_int(self, v): return "{}".format(int(v)) class TomlPreserveCommentEncoder(TomlEncoder): def __init__(self, _dict=dict, preserve=False): from rezup._vendor.toml.decoder import CommentValue super(TomlPreserveCommentEncoder, self).__init__(_dict, preserve) self.dump_funcs[CommentValue] = lambda v: v.dump(self.dump_value) class TomlPathlibEncoder(TomlEncoder): def _dump_pathlib_path(self, v): return _dump_str(str(v)) def dump_value(self, v): if (3, 4) <= sys.version_info: import pathlib if isinstance(v, pathlib.PurePath): v = str(v) return super(TomlPathlibEncoder, self).dump_value(v)
PypiClean
/modislock-0.2.1.tar.gz/modislock-0.2.1/modislock_webservice/static/plugins/DataTables-1.10.15/extensions/Scroller/Readme.md
# Scroller Scroller is a virtual rendering plug-in for DataTables which allows large datasets to be drawn on screen every quickly. What the virtual rendering means is that only the visible portion of the table (and a bit to either side to make the scrolling smooth) is drawn, while the scrolling container gives the visual impression that the whole table is visible. This is done by making use of the pagination abilities of DataTables and moving the table around in the scrolling container DataTables adds to the page. The scrolling container is forced to the height it would be for the full table display using an extra element. Key features include: * Speed! The aim of Scroller for DataTables is to make rendering large data sets fast * Full compatibility with DataTables' deferred rendering for maximum speed * Integration with state saving in DataTables (scrolling position is saved) * Support for scrolling with millions of rows * Easy to use # Installation To use Scroller the primary way to obtain the software is to use the [DataTables downloader](//datatables.net/download). You can also include the individual files from the [DataTables CDN](//cdn.datatables.net). See the [documentation](http://datatables.net/extensions/scroller/) for full details. ## NPM and Bower If you prefer to use a package manager such as NPM or Bower, distribution repositories are available with software built from this repository under the name `datatables.net-scroller`. Styling packages for Bootstrap, Foundation and other styling libraries are also available by adding a suffix to the package name. Please see the DataTables [NPM](//datatables.net/download/npm) and [Bower](//datatables.net/download/bower) installation pages for further information. The [DataTables installation manual](//datatables.net/manual/installation) also has details on how to use package managers with DataTables. # Basic usage Scroller is initialised using the `scroller` option in the DataTables constructor - a simple boolean `true` will enable the feature. Further options can be specified using this option as an object - see the documentation for details. ```js $(document).ready( function () { $('#example').DataTable( { scroller: true } ); } ); ``` Note that rows in the table must all be the same height. Information in a cell which expands on to multiple lines will cause some odd behaviour in the scrolling. Additionally, the table's `cellspacing` parameter must be set to 0, again to ensure the information display is correct. # Documentation / support * [Documentation](https://datatables.net/extensions/scroller/) * [DataTables support forums](http://datatables.net/forums) # GitHub If you fancy getting involved with the development of Scroller and help make it better, please refer to its [GitHub repo](https://github.com/DataTables/Scroller)
PypiClean
/boil-0.2.0.tar.gz/boil-0.2.0/README.rst
Boil ============================================== .. image:: https://img.shields.io/travis/bzurkowski/boil.svg :target: https://travis-ci.org/bzurkowski/boil .. image:: https://api.codeclimate.com/v1/badges/b56e0c5a0856da0c35ba/maintainability :target: https://codeclimate.com/github/bzurkowski/boil/maintainability .. image:: https://img.shields.io/pypi/v/boil.svg :target: https://pypi.python.org/pypi/boil .. image:: https://img.shields.io/pypi/l/boil.svg :target: https://pypi.python.org/pypi/boil .. image:: https://img.shields.io/pypi/pyversions/boil.svg :target: https://pypi.python.org/pypi/boil .. image:: https://img.shields.io/codecov/c/github/bzurkowski/boil.svg :target: https://codecov.io/gh/bzurkowski/boil --------------- **Initialization of new software projects should be quick and easy. Period.** Boil's mission is to build a centralized, pluggable and community-driven repository of project templates for various technologies, managed via single API. These are the problems that it aims to solve: - **Wasting time on initializing new software projects from scratch** In particular: creating a file structure, checking naming conventions, determining dependencies and completing basic metadata. No matter how many times we would initialize the project, it almost always requires documentation lookup. This time can be usefully spent on something else. - **Poor quality and lack of consistency between projects** Most developers initiate projects in their own way without complying with generally accepted standards. Often due to lack of time, projects are initialized neglectfully, have no maintenance-friendly structure and are poorly documented. - **Burden of bootstrapping tools** Separate app generator for Ansible, Django, Rails, etc.. Their advantage is greater flexibility and extended functionality, but it is not always what you need. Switching between one and the other may be troublesome considering the variety of APIs and different configuration options for each tool. Installation ------------ Use pip or easy_install:: $ pip install boil Usage ----- :: $ boil Usage: boil list boil search <phrase> boil new <plate_name> [--target-dir=<dir>] boil -h | --help Options: --target-dir=<dir> Target directory where project files should be populated. List all available plates:: $ boil list Search for plates:: $ boil search <phrase> Initialize new project from selected plate:: $ boil new <plate_name> Examples //////// List all available plates:: $ boil list Search for Python-related plates:: $ boil search python Initialize new Python package:: $ boil new python_package Initialize new Django app:: $ boil new django_app Initialize new Rails app:: $ boil new rails_app Initialize new Ruby gem:: $ boil new gem Initialize new Bash command-line tool:: $ boil new bash_cli Initialize new Ansible role:: $ boil new ansible_role Initialize new plate:: $ boil new plate Changelog --------- All notable changes to this project are documented in the CHANGELOG_. .. _CHANGELOG: CHANGELOG.rst
PypiClean
/SRF-0.2.2.tar.gz/SRF-0.2.2/src/python/srf/psf/gaussFunction.py
import numpy as np from numpy import matlib from scipy.optimize import curve_fit __all__ = ['gaussian1d', 'gaussian2d', 'gaussFit1d', 'gaussFit2d', 'gaussFit1d_solver', 'gaussFit2d_solver'] def gaussian1d(x, a, sigma): if isinstance(x, list): x = np.array(x) return a * np.exp(-x ** 2 / (2 * sigma ** 2)) def gaussian2d(data, a, sigma): x = data[0] y = data[1] if isinstance(x, list): x = np.array(x) if isinstance(y, list): y = np.array(y) r = np.sqrt(x ** 2 + y ** 2) return gaussian1d(r, a, sigma) def gaussFit1d(x, yn): if isinstance(x, list): x = np.array(x) if isinstance(yn, list): yn = np.array(yn) popt, _ = curve_fit(gaussian1d, x, yn) return float(popt[0]), float(popt[1]) # print(popt) def gaussFit2d(data, zn): x = data[0] y = data[1] if isinstance(x, list): x = np.array(x) if isinstance(y, list): y = np.array(y) if isinstance(zn, list): zn = np.array(zn) r = np.sqrt(x ** 2 + y ** 2) return gaussFit1d(r, zn) def gaussFit1d_solver(x, yn): if isinstance(x, list): x = np.array(x) if isinstance(yn, list): yn = np.array(yn) def func(r2, alpha, beta): return alpha + beta * r2 r2, out = [], [] for i in range(len(x)): if yn[i] > 1e-7: r2 = r2 + [x[i] ** 2] out = out + [np.log(yn[i])] r2 = np.array(r2) out = np.array(out) popt, _ = curve_fit(func, r2, out) return float(np.exp(popt[0])), float(np.sqrt(-0.5 / popt[1])) def gaussFit2d_solver(xy, zn): x = xy[0] y = xy[1] if isinstance(x, list): x = np.array(x) if isinstance(y, list): y = np.array(y) if isinstance(zn, list): zn = np.array(zn) def func(r2, alpha, beta): return alpha + beta * r2 r2, out = [], [] for i in range(len(x)): if zn[i] > 1e-7: r2 = r2 + [x[i] ** 2 + y[i] ** 2] out = out + [np.log(zn[i])] r2 = np.array(r2) out = np.array(out) popt, _ = curve_fit(func, r2, out) return float(np.exp(popt[0])), float(np.sqrt(-0.5 / popt[1]))
PypiClean
/snowflake_ml_python-1.0.6-py3-none-any.whl/snowflake/ml/_internal/utils/parallelize.py
import math from contextlib import contextmanager from timeit import default_timer from typing import Any, Callable, Dict, Generator, Iterable, List, Optional import snowflake.snowpark.functions as F from snowflake import snowpark @contextmanager def timer() -> Generator[Callable[[], float], None, None]: start: float = default_timer() def elapser() -> float: return default_timer() - start yield lambda: elapser() def _flatten(L: Iterable[List[Any]]) -> List[Any]: return [val for sublist in L for val in sublist] def map_dataframe_by_column( df: snowpark.DataFrame, cols: List[str], map_func: Callable[[snowpark.DataFrame, List[str]], snowpark.DataFrame], partition_size: int, statement_params: Optional[Dict[str, Any]] = None, ) -> List[List[Any]]: """Applies the `map_func` to the input DataFrame by parallelizing it over subsets of the column. Because the return results are materialized as Python lists *in memory*, this method should not be used on operations that are expected to return many rows. The `map_func` must satisfy the property that for an input `df` with columns C, then for any partition [*c1, *c2, ..., *cp] on C, then `map_func(df,C) == [*map_func(df,c1), ..., *map_func(df,cp)]`, where * is the list unpacking operator. This means that `map_func(df, col_subset)` should return r rows and c*|col_subset| columns, for constants r and c. Args: df: Input dataset to operate on. cols: List of column names to compute on. Must index into `df`. map_func: The map function applied on each partition of the DataFrame. partition_size: The number of columns to include in each partition. Must be a positive integer. statement_params: Statement parameters for query telemetry. Returns: A Python list representation of the output of the query. Raises: Exception: If the pre-conditions above are not met. """ partition_id_col = "_PARTITION_ID" n_output_cols = 0 unioned_df: Optional[snowpark.DataFrame] = None last_partition_df = None if partition_size < 1: raise Exception(f"Partition size must be a positive integer, but got {partition_size}.") try: n_partitions = math.ceil(len(df[cols].columns) / partition_size) except Exception: raise Exception(f"Provided column names {cols} does not index into the dataset.") # This should never happen if n_partitions == 0: return [[]] # Create one DataFrame for the first n-1 partitions, and one for the last partition. for partition_id in range(n_partitions): cols_subset = cols[(partition_id * partition_size) : ((partition_id + 1) * partition_size)] mapped_df = map_func(df, cols_subset) if partition_id == 0: n_output_cols = len(mapped_df.columns) if partition_id == n_partitions - 1: last_partition_df = mapped_df else: if n_output_cols != len(mapped_df.columns): raise Exception("All partitions must contain the same number of columns.") mapped_df = mapped_df.with_column(partition_id_col, F.lit(partition_id)) unioned_df = mapped_df if unioned_df is None else unioned_df.union_all(mapped_df) # Store results in a list of size |n_partitions| x |n_rows| x |n_output_cols| all_results: List[List[List[Any]]] = [[] for _ in range(n_partitions - 1)] # Collect the results of the first n-1 partitions, removing the partition_id column unioned_result = unioned_df.collect(statement_params=statement_params) if unioned_df is not None else [] for row in unioned_result: row_dict = row.as_dict() partition_id = row_dict.pop(partition_id_col, None) if partition_id is None: raise Exception(f"Found unknown partition id {partition_id}.") all_results[partition_id].append(list(row_dict.values())) # Collect the results of the last partition last_partition_result = ( [[]] if last_partition_df is None else [list(row) for row in last_partition_df.collect(statement_params=statement_params)] ) all_results.append(last_partition_result) row_counts = {len(res) for res in all_results} if len(row_counts) > 1: raise Exception( f"All partitions must return the same number of rows, but found multiple row counts: {row_counts}." ) return [_flatten(row) for row in list(zip(*all_results))]
PypiClean
/zope.copypastemove-5.0.tar.gz/zope.copypastemove-5.0/src/zope/copypastemove/interfaces.py
"""Copy and Move support """ __docformat__ = 'restructuredtext' from zope.interface import Interface from zope.interface import implementer class IObjectMover(Interface): """Use `IObjectMover(obj)` to move an object somewhere.""" def moveTo(target, new_name=None): """Move this object to the target given. Returns the new name within the target. """ def moveable(): """Returns ``True`` if the object is moveable, otherwise ``False``.""" def moveableTo(target, name=None): """Say whether the object can be moved to the given `target`. Returns ``True`` if it can be moved there. Otherwise, returns ``False``. """ class IObjectCopier(Interface): def copyTo(target, new_name=None): """Copy this object to the `target` given. Returns the new name within the `target`. After the copy is created and before adding it to the target container, an `IObjectCopied` event is published. """ def copyable(): """Returns ``True`` if the object is copyable, otherwise ``False``.""" def copyableTo(target, name=None): """Say whether the object can be copied to the given `target`. Returns ``True`` if it can be copied there. Otherwise, returns ``False``. """ class IContainerItemRenamer(Interface): def renameItem(oldName, newName): """Renames an object in the container from oldName to newName. Raises ItemNotFoundError if oldName doesn't exist in the container. Raises DuplicationError if newName is already used in the container. """ class IPrincipalClipboard(Interface): """Interface for adapters that store/retrieve clipboard information for a principal. Clipboard information consists of mappings of ``{'action':action, 'target':target}``. """ def clearContents(): """Clear the contents of the clipboard""" def addItems(action, targets): """Add new items to the clipboard""" def setContents(clipboard): """Replace the contents of the clipboard by the given value""" def getContents(): """Return the contents of the clipboard""" class IItemNotFoundError(Interface): pass @implementer(IItemNotFoundError) class ItemNotFoundError(LookupError): pass
PypiClean
/invenio_app_ils-1.0.0a60.tar.gz/invenio_app_ils-1.0.0a60/invenio_app_ils/facets.py
import arrow from elasticsearch_dsl.query import Bool, Q, Range from flask import current_app def keyed_range_filter(field, range_query, **kwargs): """Create a range filter. :param field: Field name. :param range_query: Dictionary with available keys and their range options. """ def inner(values): args = {} for range_key, mappings in range_query.items(): if range_key in values: for key, value in mappings.items(): args[key] = value args.update(kwargs.copy()) return Range(**{field: args}) return inner def default_value_when_missing_filter(field, missing_val): """Create a custom exists filter. :param field: Field name. :missing_val :returns: Function that returns the Terms query. """ def inner(values): if missing_val in values: return Bool(**{"must_not": {"exists": {"field": field}}}) else: return Q("terms", **{field: values}) return inner def exists_value_filter(field, filter_value): """Create a custom filter that filters by existing value. :param field: Field name. :param filter_value: Filter value. """ def inner(values): if filter_value in values: return Bool(**{"must": {"exists": {"field": field}}}) else: return Bool(**{"must_not": {"exists": {"field": field}}}) return inner def overdue_loans_filter(field): """Create a custom filter for ongoing loans. :param field: Field to filter. :param range_query: Dictionary with available keys and their range options. """ def inner(values): range_query = { "Overdue": {"lt": str(arrow.utcnow().date())}, "Upcoming return": { "lte": str( current_app.config["CIRCULATION_POLICIES"][ "upcoming_return_range" ]().date() ), "gte": str(arrow.utcnow().date()), }, } args = {} for range_key, mappings in range_query.items(): if range_key in values: for key, value in mappings.items(): args[key] = value return Range(**{field: args}) & Q( "terms", **{"state": current_app.config["CIRCULATION_STATES_LOAN_ACTIVE"]} ) return inner def overdue_agg(): """Create a custom aggregation with dynamic dates.""" return dict( filter=dict( terms=dict( state=current_app.config["CIRCULATION_STATES_LOAN_ACTIVE"] ) ), aggs=dict( end_date=dict( range=dict( field="end_date", ranges=[ {"key": "Overdue", "to": str((arrow.utcnow()).date())}, { "key": "Upcoming return", "from": str(arrow.utcnow().date()), "to": str( current_app.config["CIRCULATION_POLICIES"][ "upcoming_return_range" ]().date() ), }, ], ) ) ), ) def date_range_filter(field, comparator): """Create a range filter. :param field: Field name. :param comparator: Comparison we want with the supplied date. """ def inner(values): try: input_date = str(arrow.get(values[0]).date()) except arrow.parser.ParserError: raise ValueError("Input should be a date") return Range(**{field: {comparator: input_date}}) return inner
PypiClean
/smc-python-0.6.2.tar.gz/smc-python-0.6.2/smc/elements/situations.py
from smc.base.model import Element, ElementRef, ElementList, SubElement,\ ElementCache, ElementCreator from smc.api.exceptions import CreateElementFailed, ElementNotFound from smc.elements.other import SituationTag SEVERITY = {10: 'critical', 7: 'high', 4: 'low', 1: 'information'} def _severity_by_name(name): """ Return the severity integer value by it's name. If not found, return 'information'. :rtype: int """ for intvalue, sevname in SEVERITY.items(): if name.lower() == sevname: return intvalue return 1 class SituationParameter(SubElement): """ A situation parameter defines the parameter type used to define the inspection situation context. For example, Regular Expression would be a situation parameter. """ @property def type(self): """ The type of this situation parameter in textual format. For example, integer, regexp, etc. :rtype: str """ return self.data.get('type') @property def display_name(self): """ The display name as shown in the SMC :rtype: str """ return self.data.get('display_name') @property def order(self): """ The order placement for this parameter. This is only relevant when there are multiple parameters in an inspection context definition. :rtype: int """ return self.data.get('order', 0) class SituationParameterValue(SubElement): """ The situation parameter value is associated with a situation parameter and as the name implies, provides the value payload for the given parameter. """ class SituationContextGroup(Element): """ A situation context group is simply a top level group for organizing individual situation contexts. This is a top level element that can be retrieved directly:: >>> from smc.elements.situations import SituationContextGroup >>> for group in SituationContextGroup.objects.all(): ... group ... SituationContextGroup(name=DoS Detection) SituationContextGroup(name=FINGER) SituationContextGroup(name=SMTP Deprecated) SituationContextGroup(name=PPTP) SituationContextGroup(name=IPv6) SituationContextGroup(name=NETBIOS) SituationContextGroup(name=SIP) SituationContextGroup(name=SNMP) ... :ivar list(InspectionContext, InspectionContextGroup) sub_elements: the members of this inspection context group """ typeof = 'situation_context_group' sub_elements = ElementList('sub_elements') class SituationContext(Element): """ A situation context can be used by an inspection situation or by a correlated situation. The context defines the situation parameters used to define a pattern match and how that match is made. :ivar str name: name of this situation context :ivar str comment: comment for the context """ @property def description(self): """ Description for this context :rtype: str """ return self.data.get('description', '') @property def situation_parameters(self): """ Situation parameters defining detection logic for the context. This will return a list of SituationParameter indicating how the detection is made, i.e. regular expression, integer value, etc. :rtype: list(SituationParameter) """ for param in self.data.get('situation_parameters', []): cache = ElementCache(data=self.make_request(href=param)) yield type('SituationParameter', (SituationParameter,), { 'data': cache})(name=cache.name, type=cache.type, href=param) class InspectionSituationContext(SituationContext): """ Represents groups of situation contexts that can be characterized by a common technique used for identifying the situation. Contexts also typically have in common the type of situation they apply to, i.e. `File Text Stream` would be an inspection context, and encapsulates inspection situations such as ActiveX in text file stream detection, etc. """ typeof = 'inspection_situation_context' class CorrelationSituationContext(SituationContext): """ Correlation Contexts define the patterns for matching groups of related events in traffic. Examples of correlation contexts are Count, Compress, Group, Match and Sequence. See SMC documentation for more details on each context type and meaning. """ typeof = 'correlation_situation_context' class Situation(Element): """ Situation defines a common interface for inspection and correlated situations. """ situation_context = ElementRef('situation_context_ref') @property def severity(self): """ The severity of this inspection situation, critical, high, low, information :rtype: int """ return SEVERITY.get(self.data.get('severity')) @property def description(self): """ The description for this situation :rtype: str """ return self.data.get('description', '') @property def attacker(self): """ How the Attacker is determined when the Situation matches. This information is used for blacklisting and in log entries and may be None :rtype: str or None """ return self.data.get('attacker') @property def target(self): """ How the Target is determined when the Situation matches. This information is used for blacklisting and in log entries and may be None :rtype: str or None """ return self.data.get('target') @property def parameter_values(self): """ Parameter values for this inspection situation. This correlate to the the situation_context. :rtype: list(SituationParameterValue) """ for param in self.data.get('parameter_values', []): cache = ElementCache(data=self.make_request(href=param)) name = '{}'.format(cache.type.title()).replace('_', '') yield type(name, (SituationParameterValue,), { 'data': cache})(name=cache.name, type=cache.type, href=param) class InspectionSituation(Situation): """ It is an element that identifies and describes detected events in the traffic or in the operation of the system. Situations contain the Context information, i.e., a pattern that the system is to look for in the inspected traffic. """ typeof = 'inspection_situation' @classmethod def create(cls, name, situation_context, attacker=None, target=None, severity='information', situation_type=None, description=None, comment=None): """ Create an inspection situation. :param str name: name of the situation :param InspectionSituationContext situation_context: The situation context type used to define this situation. Identifies the proper parameter that identifies how the situation is defined (i.e. regex, etc). :param str attacker: Attacker information, used to identify last packet the triggers attack and is only used for blacklisting. Values can be packet_source, packet_destination, connection_source, or connection_destination :param str target: Target information, used to identify the last packet that triggers the attack and is only used for blacklisting. Values can be packet_source, packet_destination, connection_source, or connection_destination :param str severity: severity for this situation. Valid values are critical, high, low, information :param str description: optional description :param str comment: optional comment """ try: json = { 'name': name, 'comment': comment, 'description': description, 'situation_context_ref': situation_context.href, 'attacker': attacker, 'victim': target, 'severity': _severity_by_name(severity)} element = ElementCreator(cls, json) tag = situation_type or SituationTag('User Defined Situations') tag.add_element(element) return element except ElementNotFound as e: raise CreateElementFailed('{}. Inspection Situation Contexts require SMC ' 'version 6.5 and above.'.format(str(e))) def create_regular_expression(self, regexp): """ Create a regular expression for this inspection situation context. The inspection situation must be using an inspection context that supports regex. :param str regexp: regular expression string :raises CreateElementFailed: failed to modify the situation """ for parameter in self.situation_context.situation_parameters: if parameter.type == 'regexp': return self.add_parameter_value( 'reg_exp_situation_parameter_values', **{'parameter_ref': parameter.href, 'reg_exp': regexp}) # Treat as raw string raise CreateElementFailed('The situation does not support a regular ' 'expression as a context value.') def add_parameter_value(self, resource, **value): print(resource, value) return self.make_request( CreateElementFailed, method='create', resource=resource, json=value) def find_vulnerabilities(self): pass @property def vulnerability_references(self): """ If this inspection situation has associated CVE, OSVDB, BID, etc references, this will return those reference IDs :rtype: list(str) """ return self.data.get('vulnerability_references', []) class CorrelationSituation(Situation): """ Correlation Situations are used by NGFW Engines and Log Servers to conduct further analysis of detected events. Correlation Situations do not handle traffic directly. Instead they analyze the events generated by matches to Situations found in traffic. Correlation Situations use Event Binding elements to define the log events that bind together different types of events in traffic. """ typeof = 'correlation_situation' @classmethod def create(cls): pass
PypiClean
/saltext.sap_car-1.0.2.tar.gz/saltext.sap_car-1.0.2/src/saltext/sap_car/_states/sap_car.py
import logging import salt.utils.files # Globals log = logging.getLogger(__name__) __virtualname__ = "sap_car" def __virtual__(): """ Only works on Linux and if SAPCAR is available in PATH """ if "sap_car.list" not in __salt__: return ( False, "The sap_car execution module failed to load.", ) return __virtualname__ def extracted(name, options=None, output_dir=None, user=None, group=None): """ Extracts a SAPCAR archive if necessary. name Path to the sar file to be extracted options Additional options to SAPCAR command output_dir Directory where archive will be extracted. It creates the dir if the path doesn't exist. If it's not set the current dir is used user User to execute the SAPCAR command group Group to execute """ log.debug(f"Running function with name={name}") ret = { "name": name, "changes": {"old": [], "new": []}, "result": True if not __opts__["test"] else None, "comment": "", } log.debug("Listing files of archive") archive_file_list = __salt__["sap_car.list"](path=name, user=user, group=group) if not isinstance(archive_file_list, list): log.error("An error occured during list of files") ret["comment"] = "An error occured during list of files, check the log files" ret["result"] = False return ret log.debug("Listing files of target dir") disk_file_list = salt.utils.files.list_files(output_dir) # returns full paths disk_file_list.remove(output_dir) # is part of list by default disk_file_list = [x.replace(output_dir, "") for x in disk_file_list] log.debug("Checking if files need to be extracted") files_to_extract = [] for archive_file in archive_file_list: if archive_file not in disk_file_list: files_to_extract.append(archive_file) else: disk_file_list.remove(archive_file) # should increase performance for large archives if not files_to_extract: log.debug("All files are already extracted") ret["comment"] = "All files are already extracted" ret["result"] = True ret["changes"] = {} return ret if files_to_extract == archive_file_list: log.debug("All files need to be extracted") files_to_extract = None log.debug("Extracting files") if __opts__["test"]: ret["comment"] = f"Extracted archive {name} to {output_dir}" if files_to_extract: ret["changes"]["new"] = f"Would extract the following files:\n{files_to_extract}" else: ret["changes"]["new"] = f"Would extract all files from {name}" ret["result"] = None else: result = __salt__["sap_car.extract"]( path=name, files=files_to_extract, options=options, output_dir=output_dir, user=user, group=group, ) if not isinstance(result, bool): log.error(f"An error occured during execution:\n{result}") ret["comment"] = "An error occured during execution, check the log files" ret["result"] = False return ret if not result: log.error(f"Could not extract archive {name}") ret["comment"] = f"Could not extract archive {name}" ret["result"] = False else: ret["comment"] = f"Extracted archive {name} to {output_dir}" if files_to_extract: ret["changes"]["new"] = files_to_extract else: ret["changes"]["new"] = f"Extracted all files from {name}" ret["result"] = True if not ret["changes"]["new"]: del ret["changes"]["new"] if not ret["changes"]["old"]: del ret["changes"]["old"] log.debug(f"Returning:\n{ret}") return ret
PypiClean
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/network/v20200301/load_balancer.py
import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs from ._enums import * from ._inputs import * __all__ = ['LoadBalancer'] class LoadBalancer(pulumi.CustomResource): def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, backend_address_pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendAddressPoolArgs']]]]] = None, frontend_ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FrontendIPConfigurationArgs']]]]] = None, id: Optional[pulumi.Input[str]] = None, inbound_nat_pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatPoolArgs']]]]] = None, inbound_nat_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatRuleArgs']]]]] = None, load_balancer_name: Optional[pulumi.Input[str]] = None, load_balancing_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadBalancingRuleArgs']]]]] = None, location: Optional[pulumi.Input[str]] = None, outbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OutboundRuleArgs']]]]] = None, probes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ProbeArgs']]]]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, sku: Optional[pulumi.Input[pulumi.InputType['LoadBalancerSkuArgs']]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, __props__=None, __name__=None, __opts__=None): """ LoadBalancer resource. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendAddressPoolArgs']]]] backend_address_pools: Collection of backend address pools used by a load balancer. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FrontendIPConfigurationArgs']]]] frontend_ip_configurations: Object representing the frontend IPs to be used for the load balancer. :param pulumi.Input[str] id: Resource ID. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatPoolArgs']]]] inbound_nat_pools: Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an inbound NAT pool. They have to reference individual inbound NAT rules. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatRuleArgs']]]] inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an Inbound NAT pool. They have to reference individual inbound NAT rules. :param pulumi.Input[str] load_balancer_name: The name of the load balancer. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadBalancingRuleArgs']]]] load_balancing_rules: Object collection representing the load balancing rules Gets the provisioning. :param pulumi.Input[str] location: Resource location. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OutboundRuleArgs']]]] outbound_rules: The outbound rules. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ProbeArgs']]]] probes: Collection of probe objects used in the load balancer. :param pulumi.Input[str] resource_group_name: The name of the resource group. :param pulumi.Input[pulumi.InputType['LoadBalancerSkuArgs']] sku: The load balancer SKU. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags. """ if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() __props__['backend_address_pools'] = backend_address_pools __props__['frontend_ip_configurations'] = frontend_ip_configurations __props__['id'] = id __props__['inbound_nat_pools'] = inbound_nat_pools __props__['inbound_nat_rules'] = inbound_nat_rules if load_balancer_name is None and not opts.urn: raise TypeError("Missing required property 'load_balancer_name'") __props__['load_balancer_name'] = load_balancer_name __props__['load_balancing_rules'] = load_balancing_rules __props__['location'] = location __props__['outbound_rules'] = outbound_rules __props__['probes'] = probes if resource_group_name is None and not opts.urn: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name __props__['sku'] = sku __props__['tags'] = tags __props__['etag'] = None __props__['name'] = None __props__['provisioning_state'] = None __props__['resource_guid'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/latest:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20150615:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20160330:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20160601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20160901:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20161201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170301:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170801:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170901:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20171001:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20171101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180401:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180701:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180801:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20181001:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20181101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20181201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190401:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190701:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190801:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190901:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20191101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20191201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200401:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200501:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200701:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200801:LoadBalancer")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(LoadBalancer, __self__).__init__( 'azure-nextgen:network/v20200301:LoadBalancer', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'LoadBalancer': """ Get an existing LoadBalancer resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() return LoadBalancer(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="backendAddressPools") def backend_address_pools(self) -> pulumi.Output[Optional[Sequence['outputs.BackendAddressPoolResponse']]]: """ Collection of backend address pools used by a load balancer. """ return pulumi.get(self, "backend_address_pools") @property @pulumi.getter def etag(self) -> pulumi.Output[str]: """ A unique read-only string that changes whenever the resource is updated. """ return pulumi.get(self, "etag") @property @pulumi.getter(name="frontendIPConfigurations") def frontend_ip_configurations(self) -> pulumi.Output[Optional[Sequence['outputs.FrontendIPConfigurationResponse']]]: """ Object representing the frontend IPs to be used for the load balancer. """ return pulumi.get(self, "frontend_ip_configurations") @property @pulumi.getter(name="inboundNatPools") def inbound_nat_pools(self) -> pulumi.Output[Optional[Sequence['outputs.InboundNatPoolResponse']]]: """ Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an inbound NAT pool. They have to reference individual inbound NAT rules. """ return pulumi.get(self, "inbound_nat_pools") @property @pulumi.getter(name="inboundNatRules") def inbound_nat_rules(self) -> pulumi.Output[Optional[Sequence['outputs.InboundNatRuleResponse']]]: """ Collection of inbound NAT Rules used by a load balancer. Defining inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an Inbound NAT pool. They have to reference individual inbound NAT rules. """ return pulumi.get(self, "inbound_nat_rules") @property @pulumi.getter(name="loadBalancingRules") def load_balancing_rules(self) -> pulumi.Output[Optional[Sequence['outputs.LoadBalancingRuleResponse']]]: """ Object collection representing the load balancing rules Gets the provisioning. """ return pulumi.get(self, "load_balancing_rules") @property @pulumi.getter def location(self) -> pulumi.Output[Optional[str]]: """ Resource location. """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ Resource name. """ return pulumi.get(self, "name") @property @pulumi.getter(name="outboundRules") def outbound_rules(self) -> pulumi.Output[Optional[Sequence['outputs.OutboundRuleResponse']]]: """ The outbound rules. """ return pulumi.get(self, "outbound_rules") @property @pulumi.getter def probes(self) -> pulumi.Output[Optional[Sequence['outputs.ProbeResponse']]]: """ Collection of probe objects used in the load balancer. """ return pulumi.get(self, "probes") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> pulumi.Output[str]: """ The provisioning state of the load balancer resource. """ return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="resourceGuid") def resource_guid(self) -> pulumi.Output[str]: """ The resource GUID property of the load balancer resource. """ return pulumi.get(self, "resource_guid") @property @pulumi.getter def sku(self) -> pulumi.Output[Optional['outputs.LoadBalancerSkuResponse']]: """ The load balancer SKU. """ return pulumi.get(self, "sku") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ Resource tags. """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> pulumi.Output[str]: """ Resource type. """ return pulumi.get(self, "type") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
PypiClean
/isedit-0.3.0.tar.gz/isedit-0.3.0/js/node_modules/moment/locale/en-ie.js
;(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' && typeof require === 'function' ? factory(require('../moment')) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; //! moment.js locale configuration var enIe = moment.defineLocale('en-ie', { months: 'January_February_March_April_May_June_July_August_September_October_November_December'.split( '_' ), monthsShort: 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'), weekdays: 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split( '_' ), weekdaysShort: 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'), weekdaysMin: 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'), longDateFormat: { LT: 'HH:mm', LTS: 'HH:mm:ss', L: 'DD/MM/YYYY', LL: 'D MMMM YYYY', LLL: 'D MMMM YYYY HH:mm', LLLL: 'dddd D MMMM YYYY HH:mm', }, calendar: { sameDay: '[Today at] LT', nextDay: '[Tomorrow at] LT', nextWeek: 'dddd [at] LT', lastDay: '[Yesterday at] LT', lastWeek: '[Last] dddd [at] LT', sameElse: 'L', }, relativeTime: { future: 'in %s', past: '%s ago', s: 'a few seconds', ss: '%d seconds', m: 'a minute', mm: '%d minutes', h: 'an hour', hh: '%d hours', d: 'a day', dd: '%d days', M: 'a month', MM: '%d months', y: 'a year', yy: '%d years', }, dayOfMonthOrdinalParse: /\d{1,2}(st|nd|rd|th)/, ordinal: function (number) { var b = number % 10, output = ~~((number % 100) / 10) === 1 ? 'th' : b === 1 ? 'st' : b === 2 ? 'nd' : b === 3 ? 'rd' : 'th'; return number + output; }, week: { dow: 1, // Monday is the first day of the week. doy: 4, // The week that contains Jan 4th is the first week of the year. }, }); return enIe; })));
PypiClean
/watchmen_lineage-16.5.26.tar.gz/watchmen_lineage-16.5.26/src/watchmen_lineage/utils/constant_utils.py
from enum import Enum from typing import List from watchmen_lineage.model.ast import ConstantAST, FuncParameter, ASTObject, FuncAst class AstContext(str, Enum): START_PARAMETER = "start_parameter" START_OBJECT_FUNC = "start_object_func" START_FUNCTION_PARAMETER = "start_function_parameter" START_FUNCTION = "start_function" END_AST = "end_ast" END_PARAM = "end parameter" CONTINUE = "continue" grammar_dict = { 'word': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', "+", "-", "_"], # list of words 'punctuation': ['&', '{', '}', '.', '(', ')', ','], # list of punctuation marks } function_dictionary = {'nextSeq', 'dateDiff', 'now', 'sum', 'length', 'old', 'count', 'monthDiff', 'yearDiff', 'fmtDate', 'moveDate', "dayDiff"} def ask_function(curr_word): result = [] for func in function_dictionary: if func == curr_word: result.append(func) if len(result) == 1: return result[0] else: return "continue" def need_ask_function(context: AstContext): return context == AstContext.START_FUNCTION or AstContext.START_FUNCTION_PARAMETER def find_function(func_result: str): return func_result != CONTINUE def process_ast_function(context: AstContext, current_ast: ConstantAST, current_param: FuncParameter, func_result: str) -> ASTObject: if context == AstContext.START_FUNCTION: current_ast.funcAst = FuncAst(name=func_result) return ASTObject.function elif context == AstContext.START_FUNCTION_PARAMETER: ast = ConstantAST() ast.funcAst = FuncAst(name=func_result) current_ast.funcAst.params.append(ast) return ASTObject.function elif context == AstContext.START_OBJECT_FUNC: ast = ConstantAST() ast.funcAst = FuncAst(name=func_result) current_param.value.append(ast) current_ast.params.append(current_param) return ASTObject.method def parse_constant_parameter(constant: str): # Initialize variables previous_ch = None result: List = [] context = None current: ASTObject = None constant_value = '' current_ast = None current_param = None for ch in constant: # Check if the character matches any of the rules in the grammar dictionary for key, val in grammar_dict.items(): if ch in val: if is_word(key): constant_value += ch if need_ask_function(context): func_result = ask_function(constant_value) if find_function(func_result): current = process_ast_function(context, current_ast, current_param, func_result) elif is_punctuation(key): constant_value, context, current_ast, current_param, current = process_punctuation(ch, constant_value, context, current_ast, current_param, previous_ch, result, current) previous_ch = ch # Add the last word to the result return result def process_punctuation(ch, constant_value: str, context: AstContext, current_ast: ConstantAST, current_param: FuncParameter, previous_ch, result: List, current: ASTObject): if ch == "{": current_ast = ConstantAST() elif ch == "&": context = start_func_or_method(context, previous_ch) elif ch == "}": current_ast, current_param = end_ast(constant_value, context, current_ast, current_param, result, current) context = AstContext.END_AST elif ch == "(": ## start parameter current_param = FuncParameter() context = AstContext.START_PARAMETER elif ch == ")": current_param = end_param(constant_value, context, current_ast, current_param, current) context = AstContext.END_PARAM elif "." == ch: current_param, current_ast = call_method(constant_value, current_ast, context, current_param) current = ASTObject.method elif "," == ch: current_param, context = split_param(constant_value, current_ast, current_param, current, context) return '', context, current_ast, current_param, current def split_param(constant_value: str, current_ast: ConstantAST, current_param: FuncParameter, current: ASTObject, context: AstContext): if current == ASTObject.method and context == AstContext.START_PARAMETER: current_param.method = constant_value else: current_param.value.append(constant_value) current_ast.funcAst.params.append(current_param) current_param = None if context == AstContext.START_FUNCTION_PARAMETER: context = AstContext.START_PARAMETER return current_param, context def call_method(constant_value, current_ast: ConstantAST, context: AstContext, current_param: FuncParameter): if context == AstContext.START_FUNCTION: current_ast.funcAst = FuncAst(name=constant_value) else: if current_param is None: current_param = FuncParameter() current_param.value.append(constant_value) return current_param, current_ast def end_param(constant_value: str, context: AstContext, current_ast: ConstantAST, current_param: FuncParameter, current: ASTObject): if current_param: if context == AstContext.START_FUNCTION_PARAMETER: current_ast.funcAst.params.append(current_param) if current == ASTObject.method and context == AstContext.START_PARAMETER: current_param.method = constant_value current_ast.funcAst.params.append(current_param) else: current_param.value.append(constant_value) else: if current != ASTObject.function: current_ast.funcAst.params.append(constant_value) current_param = None return current_param def empty_context(context: AstContext): return context is None or context == AstContext.END_AST def end_ast(constant_value: str, context: AstContext, current_ast: ConstantAST, current_param: FuncParameter, result: List, current: ASTObject): if current_param: current_ast.params.append(current_param) if current == ASTObject.method and context == AstContext.START_FUNCTION: current_ast.funcAst.method = constant_value elif context == AstContext.START_OBJECT_FUNC and current == ASTObject.method and current_param: current_param.method = constant_value elif empty_context(context) and current == ASTObject.method and current_param: current_param.method = constant_value if constant_value and current_param is None: parameter = FuncParameter() parameter.value.append(constant_value) current_ast.params.append(parameter) current_param = None result.append(current_ast) current_ast = None return current_ast, current_param def start_func_or_method(context, previous_ch): if context == AstContext.START_PARAMETER and previous_ch != ".": context = AstContext.START_FUNCTION_PARAMETER elif previous_ch == '.': context = AstContext.START_OBJECT_FUNC else: context = AstContext.START_FUNCTION return context def is_punctuation(key: str): return key == "punctuation" def is_word(key: str): return key == 'word' # # # print(parse_constant_parameter("{&dayDiff(&now,eb_policy_listing.effective_dt)}")) # # # # print(parse_constant_parameter("{x.t} {b.s}")) # print(parse_constant_parameter("{da.b.c} {b.s}")) # print(parse_constant_parameter("{&nextSeq}")) # print(parse_constant_parameter("{&daydiff(dataset_clm_case.settle_approve_date,dataset_clm_case.report_time)}")) # print(parse_constant_parameter("{&x.daa}")) # print(parse_constant_parameter("{&dateDiff(test.date,test2.date2)}"))
PypiClean
/PyOblv-0.2.2.tar.gz/PyOblv-0.2.2/cli/service.py
import json import typer from . import utils app = typer.Typer() @app.command(help="To create a new service") def create(repo_owner: str = typer.Argument(..., help=("Repository Owner")), repo_name: str = typer.Argument(...,help=("Repository Name")), ref: str = typer.Argument(..., help=("Service ref name")), tag : bool = typer.Option(False, "--tag",help="If provided, the ref is considered to be a tag on repository"), service_file : str = typer.Option(None,help="Service yaml file in json format. Not allowed with --tag")): try: client = utils.read_credentials() if tag: client.add_service(repo_owner=repo_owner,repo_name=repo_name,ref=ref,ref_type="tag") else: args = {} if service_file!=None: with open(service_file,"r") as f: args = json.load(f) client.add_service(repo_owner=repo_owner,repo_name=repo_name,ref=ref, data=args) except FileNotFoundError as e: print("Kindly login before performing this action") except Exception as e: print(e) else: print("Successfully added the service") @app.command(help="To remove an existing service") def remove(repo_owner: str = typer.Argument(..., help=("Repository Owner")), repo_name: str = typer.Argument(...,help=("Repository Name")), ref: str = typer.Argument(..., help=("Service ref name")), tag : bool = typer.Option(False, "--tag", help="If provided, the ref is considered to be a tag on repository")): try: client = utils.read_credentials() if tag: client.remove_service(repo_owner=repo_owner,repo_name=repo_name,ref=ref,ref_type="tag") else: client.remove_service(repo_owner=repo_owner,repo_name=repo_name,ref=ref) except FileNotFoundError as e: print("Kindly login before performing this action") except Exception as e: print(e) else: print("Successfully removed the service") @app.command(help="To fetch user's services list") def fetch(): try: client = utils.read_credentials() print(client.user_services()) except FileNotFoundError as e: print("Kindly login before performing this action") except Exception as e: print(e) @app.command(help="To updates user's service") def update(repo_owner: str = typer.Argument(..., help=("Repository Owner")), repo_name: str = typer.Argument(...,help=("Repository Name")), ref: str = typer.Argument(..., help=("Service ref name")), tag : bool = typer.Option(False, "--tag", help="If provided, the ref is considered to be a tag on repository"), service_file : str = typer.Option(None,help="Service yaml file in json format. Not allowed with --tag")): try: client = utils.read_credentials() if tag: client.update_service(repo_owner=repo_owner,repo_name=repo_name,ref=ref,ref_type="tag") else: args = {} if service_file!=None: with open(service_file,"r") as f: args = json.load(f) client.update_service(repo_owner=repo_owner,repo_name=repo_name,ref=ref, data=args) except FileNotFoundError as e: print("Kindly login before performing this action") except Exception as e: print(e) else: print("Successfully updated the service")
PypiClean
/dueros_bot-3.2.0-py3-none-any.whl/dueros/directive/VideoPlayer/Play.py
# description: # author:jack # create_time: 2018/5/31 """ VideoPlayer视频播放 详见文档:https://dueros.baidu.com/didp/doc/dueros-bot-platform/dbp-custom/videoplayer_markdown#VideoPlayer.Play%E6%8C%87%E4%BB%A4 """ from dueros.directive.BaseDirective import BaseDirective from dueros.directive.AudioPlayer.PlayBehaviorEnum import PlayBehaviorEnum from dueros.Utils import Utils class VideoPlayer(BaseDirective): def __init__(self, url, play_behavior=PlayBehaviorEnum.REPLACE_ALL): super(VideoPlayer, self).__init__('VideoPlayer.Play') self.data['playBehavior'] = play_behavior.value self.data['videoItem'] = { 'stream': { 'url': url, 'offsetInMilliseconds': 0, 'token': self.gen_token() } } def set_token(self, token): """ 设置视频token, 注:此token会关联播放列表中每个item的token, 当token相同时item会标记为当前正在播放 :param token: :return: """ if token: self.data['videoItem']['stream']['token'] = token def get_token(self): """ 获取token :return: """ return self.data['videoItem']['stream']['token'] def set_url(self, url): """ 设置视频地址 :param url: :return: """ if isinstance(url, str): self.data['videoItem']['stream']['url'] = url def set_offset_in_milliseconds(self, milliseconds): """ 指定从当前offset播放视频 :param milliseconds: :return: """ milliseconds = Utils.convert_number(milliseconds) if milliseconds: self.data['videoItem']['stream']['offsetInMilliseconds'] = milliseconds def set_expiry_time(self, expiry_time): """ ISO8601格式,表示stream过期时间 :param expiry_time: :return: """ if isinstance(expiry_time, str): self.data['videoItem']['stream']['expiryTime'] = expiry_time def set_report_delay_in_ms(self, report_delay_ms): """ 设置directive的属性。如果此字段存在,则设备端在播放该video item时, 播放到所指定时间之后应该上报ProgressReportDelayElapsed事件; 如果此字段不存在,则设备端端不需要上报ProgressReportDelayEsapsed事件 :param report_delay_ms: :return: """ report_delay_ms = Utils.convert_number(report_delay_ms) if report_delay_ms: if 'progressReport' not in self.data['videoItem']['stream']: self.data['videoItem']['stream']['progressReport'] = {} self.data['videoItem']['stream']['progressReport']['progressReportDelayInMilliseconds'] = int(report_delay_ms) def set_report_interval_in_ms(self, interval_ms): """ 设置directive的属性。定时上报事件的间隔时间, 如果此字段存在,则设备端在播放该video item时,每隔指定时间上报ProgressReportIntervalElapsed事件; 如果此字段不存在,则设备端不需要上报ProgressReportIntervalElapsed事件 :param interval_ms: :return: """ interval_ms = Utils.convert_number(interval_ms) if interval_ms: if 'progressReport' not in self.data['videoItem']['stream']: self.data['videoItem']['stream']['progressReport'] = {} self.data['videoItem']['stream']['progressReport']['progressReportIntervalInMilliseconds'] = int(interval_ms) def set_expected_previous_token(self, previous_token): """ 设置directive的属性。如果此字段存在,则应当匹配前一个video item中的token,如果不匹配则不执行本Play指令 :param previous_token: :return: """ self.data['videoItem']['stream']['expectedPreviousToken'] = previous_token def set_title(self, title): """ 设置视频标题 :param title: :return: """ if isinstance(title, str): self.data['videoItem']['title'] = title if __name__ == '__main__': pass
PypiClean
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/users/item/events/item/instances/item/extensions/item/extension_item_request_builder.py
from __future__ import annotations from dataclasses import dataclass from kiota_abstractions.get_path_parameters import get_path_parameters from kiota_abstractions.method import Method from kiota_abstractions.request_adapter import RequestAdapter from kiota_abstractions.request_information import RequestInformation from kiota_abstractions.request_option import RequestOption from kiota_abstractions.response_handler import ResponseHandler from kiota_abstractions.serialization import Parsable, ParsableFactory from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union if TYPE_CHECKING: from .........models import extension from .........models.o_data_errors import o_data_error class ExtensionItemRequestBuilder(): """ Provides operations to manage the extensions property of the microsoft.graph.event entity. """ def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None: """ Instantiates a new ExtensionItemRequestBuilder and sets the default values. Args: pathParameters: The raw url or the Url template parameters for the request. requestAdapter: The request adapter to use to execute the requests. """ if path_parameters is None: raise Exception("path_parameters cannot be undefined") if request_adapter is None: raise Exception("request_adapter cannot be undefined") # Url template to use to build the URL for the current request builder self.url_template: str = "{+baseurl}/users/{user%2Did}/events/{event%2Did}/instances/{event%2Did1}/extensions/{extension%2Did}{?%24select,%24expand}" url_tpl_params = get_path_parameters(path_parameters) self.path_parameters = url_tpl_params self.request_adapter = request_adapter async def delete(self,request_configuration: Optional[ExtensionItemRequestBuilderDeleteRequestConfiguration] = None) -> None: """ Delete navigation property extensions for users Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. """ request_info = self.to_delete_request_information( request_configuration ) from .........models.o_data_errors import o_data_error error_mapping: Dict[str, ParsableFactory] = { "4XX": o_data_error.ODataError, "5XX": o_data_error.ODataError, } if not self.request_adapter: raise Exception("Http core is null") return await self.request_adapter.send_no_response_content_async(request_info, error_mapping) async def get(self,request_configuration: Optional[ExtensionItemRequestBuilderGetRequestConfiguration] = None) -> Optional[extension.Extension]: """ The collection of open extensions defined for the event. Nullable. Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: Optional[extension.Extension] """ request_info = self.to_get_request_information( request_configuration ) from .........models.o_data_errors import o_data_error error_mapping: Dict[str, ParsableFactory] = { "4XX": o_data_error.ODataError, "5XX": o_data_error.ODataError, } if not self.request_adapter: raise Exception("Http core is null") from .........models import extension return await self.request_adapter.send_async(request_info, extension.Extension, error_mapping) async def patch(self,body: Optional[extension.Extension] = None, request_configuration: Optional[ExtensionItemRequestBuilderPatchRequestConfiguration] = None) -> Optional[extension.Extension]: """ Update the navigation property extensions in users Args: body: The request body requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: Optional[extension.Extension] """ if body is None: raise Exception("body cannot be undefined") request_info = self.to_patch_request_information( body, request_configuration ) from .........models.o_data_errors import o_data_error error_mapping: Dict[str, ParsableFactory] = { "4XX": o_data_error.ODataError, "5XX": o_data_error.ODataError, } if not self.request_adapter: raise Exception("Http core is null") from .........models import extension return await self.request_adapter.send_async(request_info, extension.Extension, error_mapping) def to_delete_request_information(self,request_configuration: Optional[ExtensionItemRequestBuilderDeleteRequestConfiguration] = None) -> RequestInformation: """ Delete navigation property extensions for users Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: RequestInformation """ request_info = RequestInformation() request_info.url_template = self.url_template request_info.path_parameters = self.path_parameters request_info.http_method = Method.DELETE if request_configuration: request_info.add_request_headers(request_configuration.headers) request_info.add_request_options(request_configuration.options) return request_info def to_get_request_information(self,request_configuration: Optional[ExtensionItemRequestBuilderGetRequestConfiguration] = None) -> RequestInformation: """ The collection of open extensions defined for the event. Nullable. Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: RequestInformation """ request_info = RequestInformation() request_info.url_template = self.url_template request_info.path_parameters = self.path_parameters request_info.http_method = Method.GET request_info.headers["Accept"] = ["application/json"] if request_configuration: request_info.add_request_headers(request_configuration.headers) request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters) request_info.add_request_options(request_configuration.options) return request_info def to_patch_request_information(self,body: Optional[extension.Extension] = None, request_configuration: Optional[ExtensionItemRequestBuilderPatchRequestConfiguration] = None) -> RequestInformation: """ Update the navigation property extensions in users Args: body: The request body requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: RequestInformation """ if body is None: raise Exception("body cannot be undefined") request_info = RequestInformation() request_info.url_template = self.url_template request_info.path_parameters = self.path_parameters request_info.http_method = Method.PATCH request_info.headers["Accept"] = ["application/json"] if request_configuration: request_info.add_request_headers(request_configuration.headers) request_info.add_request_options(request_configuration.options) request_info.set_content_from_parsable(self.request_adapter, "application/json", body) return request_info @dataclass class ExtensionItemRequestBuilderDeleteRequestConfiguration(): """ Configuration for the request such as headers, query parameters, and middleware options. """ # Request headers headers: Optional[Dict[str, Union[str, List[str]]]] = None # Request options options: Optional[List[RequestOption]] = None @dataclass class ExtensionItemRequestBuilderGetQueryParameters(): """ The collection of open extensions defined for the event. Nullable. """ def get_query_parameter(self,original_name: Optional[str] = None) -> str: """ Maps the query parameters names to their encoded names for the URI template parsing. Args: originalName: The original query parameter name in the class. Returns: str """ if original_name is None: raise Exception("original_name cannot be undefined") if original_name == "expand": return "%24expand" if original_name == "select": return "%24select" return original_name # Expand related entities expand: Optional[List[str]] = None # Select properties to be returned select: Optional[List[str]] = None @dataclass class ExtensionItemRequestBuilderGetRequestConfiguration(): """ Configuration for the request such as headers, query parameters, and middleware options. """ # Request headers headers: Optional[Dict[str, Union[str, List[str]]]] = None # Request options options: Optional[List[RequestOption]] = None # Request query parameters query_parameters: Optional[ExtensionItemRequestBuilder.ExtensionItemRequestBuilderGetQueryParameters] = None @dataclass class ExtensionItemRequestBuilderPatchRequestConfiguration(): """ Configuration for the request such as headers, query parameters, and middleware options. """ # Request headers headers: Optional[Dict[str, Union[str, List[str]]]] = None # Request options options: Optional[List[RequestOption]] = None
PypiClean
/google-api-python-client-uritemplate-1.4.2.tar.gz/google-api-python-client-uritemplate-1.4.2/googleapiclient/schema.py
from __future__ import absolute_import import six # TODO(jcgregorio) support format, enum, minimum, maximum __author__ = '[email protected] (Joe Gregorio)' import copy from oauth2client import util class Schemas(object): """Schemas for an API.""" def __init__(self, discovery): """Constructor. Args: discovery: object, Deserialized discovery document from which we pull out the named schema. """ self.schemas = discovery.get('schemas', {}) # Cache of pretty printed schemas. self.pretty = {} @util.positional(2) def _prettyPrintByName(self, name, seen=None, dent=0): """Get pretty printed object prototype from the schema name. Args: name: string, Name of schema in the discovery document. seen: list of string, Names of schema already seen. Used to handle recursive definitions. Returns: string, A string that contains a prototype object with comments that conforms to the given schema. """ if seen is None: seen = [] if name in seen: # Do not fall into an infinite loop over recursive definitions. return '# Object with schema name: %s' % name seen.append(name) if name not in self.pretty: self.pretty[name] = _SchemaToStruct(self.schemas[name], seen, dent=dent).to_str(self._prettyPrintByName) seen.pop() return self.pretty[name] def prettyPrintByName(self, name): """Get pretty printed object prototype from the schema name. Args: name: string, Name of schema in the discovery document. Returns: string, A string that contains a prototype object with comments that conforms to the given schema. """ # Return with trailing comma and newline removed. return self._prettyPrintByName(name, seen=[], dent=1)[:-2] @util.positional(2) def _prettyPrintSchema(self, schema, seen=None, dent=0): """Get pretty printed object prototype of schema. Args: schema: object, Parsed JSON schema. seen: list of string, Names of schema already seen. Used to handle recursive definitions. Returns: string, A string that contains a prototype object with comments that conforms to the given schema. """ if seen is None: seen = [] return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName) def prettyPrintSchema(self, schema): """Get pretty printed object prototype of schema. Args: schema: object, Parsed JSON schema. Returns: string, A string that contains a prototype object with comments that conforms to the given schema. """ # Return with trailing comma and newline removed. return self._prettyPrintSchema(schema, dent=1)[:-2] def get(self, name): """Get deserialized JSON schema from the schema name. Args: name: string, Schema name. """ return self.schemas[name] class _SchemaToStruct(object): """Convert schema to a prototype object.""" @util.positional(3) def __init__(self, schema, seen, dent=0): """Constructor. Args: schema: object, Parsed JSON schema. seen: list, List of names of schema already seen while parsing. Used to handle recursive definitions. dent: int, Initial indentation depth. """ # The result of this parsing kept as list of strings. self.value = [] # The final value of the parsing. self.string = None # The parsed JSON schema. self.schema = schema # Indentation level. self.dent = dent # Method that when called returns a prototype object for the schema with # the given name. self.from_cache = None # List of names of schema already seen while parsing. self.seen = seen def emit(self, text): """Add text as a line to the output. Args: text: string, Text to output. """ self.value.extend([" " * self.dent, text, '\n']) def emitBegin(self, text): """Add text to the output, but with no line terminator. Args: text: string, Text to output. """ self.value.extend([" " * self.dent, text]) def emitEnd(self, text, comment): """Add text and comment to the output with line terminator. Args: text: string, Text to output. comment: string, Python comment. """ if comment: divider = '\n' + ' ' * (self.dent + 2) + '# ' lines = comment.splitlines() lines = [x.rstrip() for x in lines] comment = divider.join(lines) self.value.extend([text, ' # ', comment, '\n']) else: self.value.extend([text, '\n']) def indent(self): """Increase indentation level.""" self.dent += 1 def undent(self): """Decrease indentation level.""" self.dent -= 1 def _to_str_impl(self, schema): """Prototype object based on the schema, in Python code with comments. Args: schema: object, Parsed JSON schema file. Returns: Prototype object based on the schema, in Python code with comments. """ stype = schema.get('type') if stype == 'object': self.emitEnd('{', schema.get('description', '')) self.indent() if 'properties' in schema: for pname, pschema in six.iteritems(schema.get('properties', {})): self.emitBegin('"%s": ' % pname) self._to_str_impl(pschema) elif 'additionalProperties' in schema: self.emitBegin('"a_key": ') self._to_str_impl(schema['additionalProperties']) self.undent() self.emit('},') elif '$ref' in schema: schemaName = schema['$ref'] description = schema.get('description', '') s = self.from_cache(schemaName, seen=self.seen) parts = s.splitlines() self.emitEnd(parts[0], description) for line in parts[1:]: self.emit(line.rstrip()) elif stype == 'boolean': value = schema.get('default', 'True or False') self.emitEnd('%s,' % str(value), schema.get('description', '')) elif stype == 'string': value = schema.get('default', 'A String') self.emitEnd('"%s",' % str(value), schema.get('description', '')) elif stype == 'integer': value = schema.get('default', '42') self.emitEnd('%s,' % str(value), schema.get('description', '')) elif stype == 'number': value = schema.get('default', '3.14') self.emitEnd('%s,' % str(value), schema.get('description', '')) elif stype == 'null': self.emitEnd('None,', schema.get('description', '')) elif stype == 'any': self.emitEnd('"",', schema.get('description', '')) elif stype == 'array': self.emitEnd('[', schema.get('description')) self.indent() self.emitBegin('') self._to_str_impl(schema['items']) self.undent() self.emit('],') else: self.emit('Unknown type! %s' % stype) self.emitEnd('', '') self.string = ''.join(self.value) return self.string def to_str(self, from_cache): """Prototype object based on the schema, in Python code with comments. Args: from_cache: callable(name, seen), Callable that retrieves an object prototype for a schema with the given name. Seen is a list of schema names already seen as we recursively descend the schema definition. Returns: Prototype object based on the schema, in Python code with comments. The lines of the code will all be properly indented. """ self.from_cache = from_cache return self._to_str_impl(self.schema)
PypiClean
/qiling-1.4.6.tar.gz/qiling-1.4.6/README.md
[![Documentation Status](https://readthedocs.org/projects/qilingframework/badge/?version=latest)](https://docs.qiling.io) [![Downloads](https://pepy.tech/badge/qiling)](https://pepy.tech/project/qiling) [![Chat on Telegram](https://img.shields.io/badge/Chat%20on-Telegram-brightgreen.svg)](https://t.me/qilingframework) --- <p align="center"> <img width="150" height="150" src="https://raw.githubusercontent.com/qilingframework/qiling/master/docs/qiling2_logo_small.png"> </p> [Qiling's usecase, blog and related work](https://github.com/qilingframework/qiling/issues/134) Qiling is an advanced binary emulation framework, with the following features: - Emulate multi-platforms: Windows, MacOS, Linux, Android, BSD, UEFI, DOS, MBR, Ethereum Virtual Machine - Emulate multi-architectures: 8086, X86, X86_64, ARM, ARM64, MIPS, RISCV, PowerPC - Support multiple file formats: PE, MachO, ELF, COM, MBR - Support Windows Driver (.sys), Linux Kernel Module (.ko) & MacOS Kernel (.kext) via [Demigod](https://groundx.io/demigod/) - Emulates & sandbox code in an isolated environment - Provides a fully configurable sandbox - Provides in-depth memory, register, OS level and filesystem level API - Fine-grain instrumentation: allows hooks at various levels (instruction/basic-block/memory-access/exception/syscall/IO/etc) - Provides virtual machine level API such as save and restore current execution state - Supports cross architecture and platform debugging capabilities - Built-in debugger with reverse debugging capability - Allows dynamic hotpatch on-the-fly running code, including the loaded library - True framework in Python, making it easy to build customized security analysis tools on top Qiling also made its way to various international conferences. 2022: - [Black Hat, EU](https://www.blackhat.com/eu-22/arsenal/schedule/#reversing-mcu-with-firmware-emulation-29553) - [Black Hat, MEA](https://blackhatmea.com/node/724) 2021: - [Black Hat, USA](https://www.blackhat.com/us-21/arsenal/schedule/index.html#bringing-the-x-complete-re-experience-to-smart-contract-24119) - [Hack In The Box, Amsterdam](https://conference.hitb.org/hitbsecconf2021ams/sessions/when-qiling-framework-meets-symbolic-execution/) - [Black Hat, Asia](https://www.blackhat.com/asia-21/arsenal/schedule/index.html#qiling-smart-analysis-for-smart-contract-22643) 2020: - [Black Hat, Europe](https://www.blackhat.com/eu-20/arsenal/schedule/index.html#qiling-framework-deep-dive-into-obfuscated-binary-analysis-21781) - [Black Hat, USA](https://www.blackhat.com/us-20/arsenal/schedule/index.html#qiling-framework-from-dark-to-dawn-----enlightening-the-analysis-of-the-most-mysterious-iot-firmware--21062) - [Black Hat, USA (Demigod)](https://www.blackhat.com/us-20/briefings/schedule/#demigod-the-art-of-emulating-kernel-rootkits-20009) - [Black Hat, Asia](https://www.blackhat.com/asia-20/arsenal/schedule/index.html#qiling-lightweight-advanced-binary-analyzer-19245) - [Hack In The Box, Lockdown 001](https://conference.hitb.org/lockdown-livestream/) - [Hack In The Box, Lockdown 002](https://conference.hitb.org/hitb-lockdown002/virtual-labs/virtual-lab-qiling-framework-learn-how-to-build-a-fuzzer-based-on-a-1day-bug/) - [Hack In The Box, Cyberweek](https://cyberweek.ae/2020/lab-qiling-framework/) - [Nullcon](https://nullcon.net/website/goa-2020/speakers/kaijern-lau.php) 2019: - [Defcon, USA](https://www.defcon.org/html/defcon-27/dc-27-demolabs.html#QiLing) - [Hitcon](https://hitcon.org/2019/CMT/agenda) - [Zeronights](https://zeronights.ru/report-en/qiling-io-advanced-binary-emulation-framework/) Qiling is backed by [Unicorn engine](http://www.unicorn-engine.org). Visit our website https://www.qiling.io for more information. --- #### License This project is released and distributed under [free software license GPLv2](https://github.com/qilingframework/qiling/blob/master/COPYING) and later version. --- #### Qiling vs other Emulators There are many open source emulators, but two projects closest to Qiling are [Unicorn](http://www.unicorn-engine.org) & [Qemu usermode](https://qemu.org). This section explains the main differences of Qiling against them. ##### Qiling vs Unicorn engine Built on top of Unicorn, but Qiling & Unicorn are two different animals. - Unicorn is just a CPU emulator, so it focuses on emulating CPU instructions, that can understand emulator memory. Beyond that, Unicorn is not aware of higher level concepts, such as dynamic libraries, system calls, I/O handling or executable formats like PE, MachO or ELF. As a result, Unicorn can only emulate raw machine instructions, without Operating System (OS) context - Qiling is designed as a higher level framework, that leverages Unicorn to emulate CPU instructions, but can understand OS: it has executable format loaders (for PE, MachO & ELF at the moment), dynamic linkers (so we can load & relocate shared libraries), syscall & IO handlers. For this reason, Qiling can run executable binary without requiring its native OS ##### Qiling vs Qemu usermode Qemu usermode does similar thing to our emulator, that is to emulate whole executable binaries in cross-architecture way. However, Qiling offers some important differences against Qemu usermode. - Qiling is a true analysis framework, that allows you to build your own dynamic analysis tools on top (in friendly Python language). Meanwhile, Qemu is just a tool, not a framework - Qiling can perform dynamic instrumentation, and can even hotpatch code at runtime. Qemu does not do either - Not only working cross-architecture, Qiling is also cross-platform, so for example you can run Linux ELF file on top of Windows. In contrast, Qemu usermode only run binary of the same OS, such as Linux ELF on Linux, due to the way it forwards syscall from emulated code to native OS - Qiling supports more platforms, including Windows, MacOS, Linux & BSD. Qemu usermode can only handle Linux & BSD --- #### Installation Please see [setup guide](https://docs.qiling.io/en/latest/install/) file for how to install Qiling Framework. --- #### Examples - The example below shows how to use Qiling framework in the most striaghtforward way to emulate a Windows executable. ```python from qiling import Qiling if __name__ == "__main__": # initialize Qiling instance, specifying the executable to emulate and the emulated system root. # note that the current working directory is assumed to be Qiling home ql = Qiling([r'examples/rootfs/x86_windows/bin/x86_hello.exe'], r'examples/rootfs/x86_windows') # start emulation ql.run() ``` - The following example shows how a Windows crackme may be patched dynamically to make it always display the "Congratulation" dialog. ```python from qiling import Qiling def force_call_dialog_func(ql: Qiling): # get DialogFunc address from current stack frame lpDialogFunc = ql.stack_read(-8) # setup stack memory for DialogFunc ql.stack_push(0) ql.stack_push(1001) # IDS_APPNAME ql.stack_push(0x111) # WM_COMMAND ql.stack_push(0) # push return address ql.stack_push(0x0401018) # resume emulation from DialogFunc address ql.arch.regs.eip = lpDialogFunc if __name__ == "__main__": # initialize Qiling instance ql = Qiling([r'rootfs/x86_windows/bin/Easy_CrackMe.exe'], r'rootfs/x86_windows') # NOP out some code ql.patch(0x004010B5, b'\x90\x90') ql.patch(0x004010CD, b'\x90\x90') ql.patch(0x0040110B, b'\x90\x90') ql.patch(0x00401112, b'\x90\x90') # hook at an address with a callback ql.hook_address(force_call_dialog_func, 0x00401016) ql.run() ``` The below Youtube video shows how the above example works. #### Emulating ARM router firmware on Ubuntu X64 machine - Qiling Framework hot-patch and emulates ARM router's /usr/bin/httpd on a X86_64Bit Ubuntu [![qiling Tutorial: Emulating and Fuzz ARM router firmware](https://github.com/qilingframework/theme.qiling.io/blob/master/source/img/fuzzer.jpg?raw=true)](https://www.youtube.com/watch?v=e3_T3KLh2NU " Demo #3 Emulating and Fuzz ARM router firmware") #### Qiling's IDAPro Plugin: Instrument and Decrypt Mirai's Secret - This video demonstrate how Qiling's IDAPro plugin able to make IDApro run with Qiling instrumentation engine [![](http://img.youtube.com/vi/ZWMWTq2WTXk/0.jpg)](http://www.youtube.com/watch?v=ZWMWTq2WTXk "Qiling's IDAPro Plugin: Instrument and Decrypt Mirai's Secret") #### GDBserver with IDAPro demo - Solving a simple CTF challenge with Qiling Framework and IDAPro [![Solving a simple CTF challenge with Qiling Framework and IDAPro](https://i.ytimg.com/vi/SPjVAt2FkKA/0.jpg)](https://www.youtube.com/watch?v=SPjVAt2FkKA "Video DEMO 2") #### Emulating MBR - Qiling Framework emulates MBR [![qiling DEMO: Emulating MBR](https://github.com/qilingframework/theme.qiling.io/blob/master/source/img/mbr.png?raw=true)](https://github.com/qilingframework/theme.qiling.io/blob/master/source/img/mbr.png?raw=true "Demo #4 Emulating UEFI") --- #### Qltool Qiling also provides a friendly tool named `qltool` to quickly emulate shellcode & executable binaries. With qltool, easy execution can be performed: With shellcode: ``` $ ./qltool code --os linux --arch arm --format hex -f examples/shellcodes/linarm32_tcp_reverse_shell.hex ``` With binary file: ``` $ ./qltool run -f examples/rootfs/x8664_linux/bin/x8664_hello --rootfs examples/rootfs/x8664_linux/ ``` With binary and GDB debugger enable: ``` $ ./qltool run -f examples/rootfs/x8664_linux/bin/x8664_hello --gdb 127.0.0.1:9999 --rootfs examples/rootfs/x8664_linux ``` With code coverage collection (UEFI only for now): ``` $ ./qltool run -f examples/rootfs/x8664_efi/bin/TcgPlatformSetupPolicy --rootfs examples/rootfs/x8664_efi --coverage-format drcov --coverage-file TcgPlatformSetupPolicy.cov ``` With json output (Windows mainly): ``` $ ./qltool run -f examples/rootfs/x86_windows/bin/x86_hello.exe --rootfs examples/rootfs/x86_windows/ --console False --json ``` --- #### Contact Get the latest info from our website https://www.qiling.io Contact us at email [email protected], or via Twitter [@qiling_io](https://twitter.com/qiling_io) or [Weibo](https://www.weibo.com/sgniwx) --- #### Core developers, Key Contributors and etc Please refer to [CREDITS.md](https://github.com/qilingframework/qiling/blob/dev/CREDITS.md)
PypiClean
/geo_data_br-0.3.5.tar.gz/geo_data_br-0.3.5/prototype_notebooks/00_get_data_with_python.ipynb
``` root_dir = !git rev-parse --show-toplevel import os; os.chdir(root_dir[0]) BASE_URL = 'http://www.atlasbrasil.org.br' HEADERS = ''' -H 'Connection: keep-alive' -H 'Cache-Control: max-age=0' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3' -H 'Referer: http://www.atlasbrasil.org.br/2013/pt/consulta/' -H 'Accept-Language: en-US,en;q=0.9,pt-BR;q=0.8,pt;q=0.7' '''.replace('\n', ' ') !curl '{BASE_URL}/2013/pt/download/' {HEADERS} --insecure > /tmp/a.html import bs4 soup = bs4.BeautifulSoup(open('/tmp/a.html').read()) BASE_PATH = soup.find('head').find('base').attrs['href'] BASE_PATH downloads = soup.find_all(class_='blue_button') links = [b.parent.attrs['href'] for b in downloads] assert links[0] == 'data/rawData/Indicadores Atlas - RADAR IDHM.xlsx' !mkdir -p data/raw links_already_have = [ "Indicadores Atlas - RADAR IDHM.xlsx", "DADOS_DO_ATLAS_DESAGREGADOS_POR_COR__SEXO_E_DOMICILIO_-_2000_E_2010_-_FINAL.xlsx", "agrupamento_UDHs_para_calculo_no_IBGE_2000_2010.xlsx", "atlas2013_dadosbrutos_pt.xlsx", "dados_belem.zip", "dados_bh.zip", "dados_bs.zip", "dados_campinas.zip", "dados_curitiba.zip", "dados_df.zip", "dados_florianopolis.zip", "dados_fortaleza.zip", "dados_goiania.zip", "dados_maceio.zip", "dados_manaus.zip", "dados_natal.zip", "dados_pa.zip", "dados_petrolina_juazeiro.zip", "dados_recife.zip", "dados_sl.zip", "dados_teresina_timon.zip", "dados_vitoria.zip" ] links_remaining = set(links) - set(links_already_have) links_remaining = list(links_remaining) import urllib for l in links_remaining[:]: url = BASE_URL + BASE_PATH + urllib.parse.quote(l) print(url) file = l.split('/')[-1] !cd data/raw/; curl "{url}" {HEADERS} --insecure -o '{file}' print("Hey, listen") # !head data/raw/Indicadores\ Atlas\ -\ RADAR\ IDHM.xlsx !pwd lines = !ls -1 ../data/raw/*.zip size = !du -s ../data/raw/ assert int(size[0].split('\t')[0]) > 10000, size assert len(lines) == 24 ``` ## Unzippin all this crap ``` import rarfile import zipfile from os import listdir, remove, rmdir, rename from os.path import isfile, isdir, join def get_pattern_files(base_path, pattern): return [ join(base_path,file) for file in listdir(base_path) if isfile(join(base_path, file)) and pattern in file ] def get_all_generated_folders(base_path): return [ join(base_path,folder) for folder in listdir(data_path) if isdir(join(data_path, folder))] def extract_all(path_to_files, extract_to_path, extension): extractor = zipfile.ZipFile if extension == '.zip' else rarfile.RarFile for path_to_file in path_to_files: with extractor(path_to_file, 'r') as ref: ref.extractall(extract_to_path) def move_all_files_from_dir_to_target(target_path, dir_path, extension="."): files_names = [join(dir_path, filename) for filename in listdir(dir_path)] move_all_files_to_base(target_path, files_names) def move_all_files_to_target(target_path, files_paths): files_names = [ file_path.split('/')[-1] for file_path in files_paths] for filename, original_path in zip(files_names, files_paths): rename(original_path, join(target_path, filename)) def flat_list(l): return [item for sublist in l for item in sublist] ``` Preparing data folder ``` %%bash rm -rf ../data/preprocessed mkdir -p ../data/preprocessed mkdir -p ../data/preprocessed/others cp ../data/raw/* ../data/preprocessed/ shopt -s extglob cd ../data/preprocessed/ && mv !(dados_*.zip|others) others ``` Extracting everything and removing unnecessary files ``` data_path = '../data/preprocessed' zip_files = get_pattern_files(data_path, '.zip') extract_all(zip_files, data_path, '.zip') for folder_path in get_all_generated_folders(data_path): move_all_files_from_dir_to_base(data_path, folder_path) rar_files = get_pattern_files(data_path, '.rar') extract_all(rar_files, data_path, '.rar') for folder_path in get_all_generated_folders(data_path): move_all_files_from_dir_to_base(data_path, folder_path) for file in zip_files + rar_files: remove(file) directories_to_remove = get_all_generated_folders(data_path) for folder in directories_to_remove: rmdir(folder) ``` Moving each file type to its specific folder ``` %%bash set -e cd ../data/preprocessed/ mkdir udh; mkdir regional; mkdir regiao_metro; udh_files = get_pattern_files(data_path, "UDH") move_all_files_to_target(join(data_path, "udh"), udh_files) regional_files = get_pattern_files(data_path, "Regional") + get_pattern_files(data_path, "REGIO") move_all_files_to_target(join(data_path, "regional"), regional_files) metro_files = get_pattern_files(data_path, "RM") move_all_files_to_target(join(data_path, "regiao_metro"), metro_files) ``` ## Check ``` lines = !ls -1 ../data/preprocessed/udh/*.shp size = !du -s ../data/raw/ assert int(size[0].split('\t')[0]) > 10000, size assert len(lines) == 24 ``` # Convert Excels to parquets ``` import glob excels = glob.glob('../data/preprocessed/udh/*.xlsx') excels excels.remove('../data/preprocessed/udh/agrupamento_UDHs_para_calculo_no_IBGE_2000_2010.xlsx') import multiprocessing, glob, pandas as pd import pyarrow.lib def convert_to_parquet(path): print(f'converting {path}') dfs = pd.read_excel(path, sheet_name=None) for sheet_name, df in dfs.items(): try: if df.empty: continue df.to_parquet(path.replace('.xlsx', '__' + sheet_name + '.parquet'), index=False) except (Exception, pyarrow.lib.ArrowTypeError) as e: print('error in:') print((path, sheet_name, e)) return False return True res = map(convert_to_parquet, excels) all(res) !du -sh ../data/preprocessed/udh/*parquet ```
PypiClean
/django_bpp-1.0.9-py3-none-any.whl/django_bpp/staticroot/grappelli/tinymce/jscripts/tiny_mce/plugins/media/js/media.js
(function() { var url; if (url = tinyMCEPopup.getParam("media_external_list_url")) document.write('<script language="javascript" type="text/javascript" src="' + tinyMCEPopup.editor.documentBaseURI.toAbsolute(url) + '"></script>'); function get(id) { return document.getElementById(id); } function clone(obj) { var i, len, copy, attr; if (null == obj || "object" != typeof obj) return obj; // Handle Array if ('length' in obj) { copy = []; for (i = 0, len = obj.length; i < len; ++i) { copy[i] = clone(obj[i]); } return copy; } // Handle Object copy = {}; for (attr in obj) { if (obj.hasOwnProperty(attr)) copy[attr] = clone(obj[attr]); } return copy; } function getVal(id) { var elm = get(id); if (elm.nodeName == "SELECT") return elm.options[elm.selectedIndex].value; if (elm.type == "checkbox") return elm.checked; return elm.value; } function setVal(id, value, name) { if (typeof(value) != 'undefined' && value != null) { var elm = get(id); if (elm.nodeName == "SELECT") selectByValue(document.forms[0], id, value); else if (elm.type == "checkbox") { if (typeof(value) == 'string') { value = value.toLowerCase(); value = (!name && value === 'true') || (name && value === name.toLowerCase()); } elm.checked = !!value; } else elm.value = value; } } window.Media = { init : function() { var html, editor, self = this; self.editor = editor = tinyMCEPopup.editor; // Setup file browsers and color pickers get('filebrowsercontainer').innerHTML = getBrowserHTML('filebrowser','src','media','media'); get('qtsrcfilebrowsercontainer').innerHTML = getBrowserHTML('qtsrcfilebrowser','quicktime_qtsrc','media','media'); get('bgcolor_pickcontainer').innerHTML = getColorPickerHTML('bgcolor_pick','bgcolor'); get('video_altsource1_filebrowser').innerHTML = getBrowserHTML('video_filebrowser_altsource1','video_altsource1','media','media'); get('video_altsource2_filebrowser').innerHTML = getBrowserHTML('video_filebrowser_altsource2','video_altsource2','media','media'); get('audio_altsource1_filebrowser').innerHTML = getBrowserHTML('audio_filebrowser_altsource1','audio_altsource1','media','media'); get('audio_altsource2_filebrowser').innerHTML = getBrowserHTML('audio_filebrowser_altsource2','audio_altsource2','media','media'); get('video_poster_filebrowser').innerHTML = getBrowserHTML('filebrowser_poster','video_poster','image','media'); html = self.getMediaListHTML('medialist', 'src', 'media', 'media'); if (html == "") get("linklistrow").style.display = 'none'; else get("linklistcontainer").innerHTML = html; if (isVisible('filebrowser')) get('src').style.width = '230px'; if (isVisible('video_filebrowser_altsource1')) get('video_altsource1').style.width = '220px'; if (isVisible('video_filebrowser_altsource2')) get('video_altsource2').style.width = '220px'; if (isVisible('audio_filebrowser_altsource1')) get('audio_altsource1').style.width = '220px'; if (isVisible('audio_filebrowser_altsource2')) get('audio_altsource2').style.width = '220px'; if (isVisible('filebrowser_poster')) get('video_poster').style.width = '220px'; editor.dom.setOuterHTML(get('media_type'), self.getMediaTypeHTML(editor)); self.setDefaultDialogSettings(editor); self.data = clone(tinyMCEPopup.getWindowArg('data')); self.dataToForm(); self.preview(); updateColor('bgcolor_pick', 'bgcolor'); }, insert : function() { var editor = tinyMCEPopup.editor; this.formToData(); editor.execCommand('mceRepaint'); tinyMCEPopup.restoreSelection(); editor.selection.setNode(editor.plugins.media.dataToImg(this.data)); tinyMCEPopup.close(); }, preview : function() { get('prev').innerHTML = this.editor.plugins.media.dataToHtml(this.data, true); }, moveStates : function(to_form, field) { var data = this.data, editor = this.editor, mediaPlugin = editor.plugins.media, ext, src, typeInfo, defaultStates, src; defaultStates = { // QuickTime quicktime_autoplay : true, quicktime_controller : true, // Flash flash_play : true, flash_loop : true, flash_menu : true, // WindowsMedia windowsmedia_autostart : true, windowsmedia_enablecontextmenu : true, windowsmedia_invokeurls : true, // RealMedia realmedia_autogotourl : true, realmedia_imagestatus : true }; function parseQueryParams(str) { var out = {}; if (str) { tinymce.each(str.split('&'), function(item) { var parts = item.split('='); out[unescape(parts[0])] = unescape(parts[1]); }); } return out; }; function setOptions(type, names) { var i, name, formItemName, value, list; if (type == data.type || type == 'global') { names = tinymce.explode(names); for (i = 0; i < names.length; i++) { name = names[i]; formItemName = type == 'global' ? name : type + '_' + name; if (type == 'global') list = data; else if (type == 'video' || type == 'audio') { list = data.video.attrs; if (!list && !to_form) data.video.attrs = list = {}; } else list = data.params; if (list) { if (to_form) { setVal(formItemName, list[name], type == 'video' || type == 'audio' ? name : ''); } else { delete list[name]; value = getVal(formItemName); if ((type == 'video' || type == 'audio') && value === true) value = name; if (defaultStates[formItemName]) { if (value !== defaultStates[formItemName]) { value = "" + value; list[name] = value; } } else if (value) { value = "" + value; list[name] = value; } } } } } } if (!to_form) { data.type = get('media_type').options[get('media_type').selectedIndex].value; data.width = getVal('width'); data.height = getVal('height'); // Switch type based on extension src = getVal('src'); if (field == 'src') { ext = src.replace(/^.*\.([^.]+)$/, '$1'); if (typeInfo = mediaPlugin.getType(ext)) data.type = typeInfo.name.toLowerCase(); setVal('media_type', data.type); } if (data.type == "video" || data.type == "audio") { if (!data.video.sources) data.video.sources = []; data.video.sources[0] = {src: getVal('src')}; } } // Hide all fieldsets and show the one active get('video_options').style.display = 'none'; get('audio_options').style.display = 'none'; get('flash_options').style.display = 'none'; get('quicktime_options').style.display = 'none'; get('shockwave_options').style.display = 'none'; get('windowsmedia_options').style.display = 'none'; get('realmedia_options').style.display = 'none'; get('embeddedaudio_options').style.display = 'none'; if (get(data.type + '_options')) get(data.type + '_options').style.display = 'block'; setVal('media_type', data.type); setOptions('flash', 'play,loop,menu,swliveconnect,quality,scale,salign,wmode,base,flashvars'); setOptions('quicktime', 'loop,autoplay,cache,controller,correction,enablejavascript,kioskmode,autohref,playeveryframe,targetcache,scale,starttime,endtime,target,qtsrcchokespeed,volume,qtsrc'); setOptions('shockwave', 'sound,progress,autostart,swliveconnect,swvolume,swstretchstyle,swstretchhalign,swstretchvalign'); setOptions('windowsmedia', 'autostart,enabled,enablecontextmenu,fullscreen,invokeurls,mute,stretchtofit,windowlessvideo,balance,baseurl,captioningid,currentmarker,currentposition,defaultframe,playcount,rate,uimode,volume'); setOptions('realmedia', 'autostart,loop,autogotourl,center,imagestatus,maintainaspect,nojava,prefetch,shuffle,console,controls,numloop,scriptcallbacks'); setOptions('video', 'poster,autoplay,loop,muted,preload,controls'); setOptions('audio', 'autoplay,loop,preload,controls'); setOptions('embeddedaudio', 'autoplay,loop,controls'); setOptions('global', 'id,name,vspace,hspace,bgcolor,align,width,height'); if (to_form) { if (data.type == 'video') { if (data.video.sources[0]) setVal('src', data.video.sources[0].src); src = data.video.sources[1]; if (src) setVal('video_altsource1', src.src); src = data.video.sources[2]; if (src) setVal('video_altsource2', src.src); } else if (data.type == 'audio') { if (data.video.sources[0]) setVal('src', data.video.sources[0].src); src = data.video.sources[1]; if (src) setVal('audio_altsource1', src.src); src = data.video.sources[2]; if (src) setVal('audio_altsource2', src.src); } else { // Check flash vars if (data.type == 'flash') { tinymce.each(editor.getParam('flash_video_player_flashvars', {url : '$url', poster : '$poster'}), function(value, name) { if (value == '$url') data.params.src = parseQueryParams(data.params.flashvars)[name] || data.params.src || ''; }); } setVal('src', data.params.src); } } else { src = getVal("src"); // YouTube Embed if (src.match(/youtube\.com\/embed\/\w+/)) { data.width = 425; data.height = 350; data.params.frameborder = '0'; data.type = 'iframe'; setVal('src', src); setVal('media_type', data.type); } else { // YouTube *NEW* if (src.match(/youtu\.be\/[a-z1-9.-_]+/)) { data.width = 425; data.height = 350; data.params.frameborder = '0'; data.type = 'iframe'; src = 'http://www.youtube.com/embed/' + src.match(/youtu.be\/([a-z1-9.-_]+)/)[1]; setVal('src', src); setVal('media_type', data.type); } // YouTube if (src.match(/youtube\.com(.+)v=([^&]+)/)) { data.width = 425; data.height = 350; data.params.frameborder = '0'; data.type = 'iframe'; src = 'http://www.youtube.com/embed/' + src.match(/v=([^&]+)/)[1]; setVal('src', src); setVal('media_type', data.type); } } // Google video if (src.match(/video\.google\.com(.+)docid=([^&]+)/)) { data.width = 425; data.height = 326; data.type = 'flash'; src = 'http://video.google.com/googleplayer.swf?docId=' + src.match(/docid=([^&]+)/)[1] + '&hl=en'; setVal('src', src); setVal('media_type', data.type); } // Vimeo if (src.match(/vimeo\.com\/([0-9]+)/)) { data.width = 425; data.height = 350; data.params.frameborder = '0'; data.type = 'iframe'; src = 'http://player.vimeo.com/video/' + src.match(/vimeo.com\/([0-9]+)/)[1]; setVal('src', src); setVal('media_type', data.type); } // stream.cz if (src.match(/stream\.cz\/((?!object).)*\/([0-9]+)/)) { data.width = 425; data.height = 350; data.params.frameborder = '0'; data.type = 'iframe'; src = 'http://www.stream.cz/object/' + src.match(/stream.cz\/[^/]+\/([0-9]+)/)[1]; setVal('src', src); setVal('media_type', data.type); } // Google maps if (src.match(/maps\.google\.([a-z]{2,3})\/maps\/(.+)msid=(.+)/)) { data.width = 425; data.height = 350; data.params.frameborder = '0'; data.type = 'iframe'; src = 'http://maps.google.com/maps/ms?msid=' + src.match(/msid=(.+)/)[1] + "&output=embed"; setVal('src', src); setVal('media_type', data.type); } if (data.type == 'video') { if (!data.video.sources) data.video.sources = []; data.video.sources[0] = {src : src}; src = getVal("video_altsource1"); if (src) data.video.sources[1] = {src : src}; src = getVal("video_altsource2"); if (src) data.video.sources[2] = {src : src}; } else if (data.type == 'audio') { if (!data.video.sources) data.video.sources = []; data.video.sources[0] = {src : src}; src = getVal("audio_altsource1"); if (src) data.video.sources[1] = {src : src}; src = getVal("audio_altsource2"); if (src) data.video.sources[2] = {src : src}; } else data.params.src = src; // Set default size setVal('width', data.width || (data.type == 'audio' ? 300 : 320)); setVal('height', data.height || (data.type == 'audio' ? 32 : 240)); } }, dataToForm : function() { this.moveStates(true); }, formToData : function(field) { if (field == "width" || field == "height") this.changeSize(field); if (field == 'source') { this.moveStates(false, field); setVal('source', this.editor.plugins.media.dataToHtml(this.data)); this.panel = 'source'; } else { if (this.panel == 'source') { this.data = clone(this.editor.plugins.media.htmlToData(getVal('source'))); this.dataToForm(); this.panel = ''; } this.moveStates(false, field); this.preview(); } }, beforeResize : function() { this.width = parseInt(getVal('width') || (this.data.type == 'audio' ? "300" : "320"), 10); this.height = parseInt(getVal('height') || (this.data.type == 'audio' ? "32" : "240"), 10); }, changeSize : function(type) { var width, height, scale, size; if (get('constrain').checked) { width = parseInt(getVal('width') || (this.data.type == 'audio' ? "300" : "320"), 10); height = parseInt(getVal('height') || (this.data.type == 'audio' ? "32" : "240"), 10); if (type == 'width') { this.height = Math.round((width / this.width) * height); setVal('height', this.height); } else { this.width = Math.round((height / this.height) * width); setVal('width', this.width); } } }, getMediaListHTML : function() { if (typeof(tinyMCEMediaList) != "undefined" && tinyMCEMediaList.length > 0) { var html = ""; html += '<select id="linklist" name="linklist" style="width: 250px" onchange="this.form.src.value=this.options[this.selectedIndex].value;Media.formToData(\'src\');">'; html += '<option value="">---</option>'; for (var i=0; i<tinyMCEMediaList.length; i++) html += '<option value="' + tinyMCEMediaList[i][1] + '">' + tinyMCEMediaList[i][0] + '</option>'; html += '</select>'; return html; } return ""; }, getMediaTypeHTML : function(editor) { function option(media_type, element) { if (!editor.schema.getElementRule(element || media_type)) { return ''; } return '<option value="'+media_type+'">'+tinyMCEPopup.editor.translate("media_dlg."+media_type)+'</option>' } var html = ""; html += '<select id="media_type" name="media_type" onchange="Media.formToData(\'type\');">'; html += option("video"); html += option("audio"); html += option("flash", "object"); html += option("quicktime", "object"); html += option("shockwave", "object"); html += option("windowsmedia", "object"); html += option("realmedia", "object"); html += option("iframe"); if (editor.getParam('media_embedded_audio', false)) { html += option('embeddedaudio', "object"); } html += '</select>'; return html; }, setDefaultDialogSettings : function(editor) { var defaultDialogSettings = editor.getParam("media_dialog_defaults", {}); tinymce.each(defaultDialogSettings, function(v, k) { setVal(k, v); }); } }; tinyMCEPopup.requireLangPack(); tinyMCEPopup.onInit.add(function() { Media.init(); }); })();
PypiClean
/pagure-messages-1.1.0.tar.gz/pagure-messages-1.1.0/pagure_messages/issue_schema.py
from .base import ISSUE, PROJECT, PagureMessage, SCHEMA_URL class IssueAssignedAddedV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.assigned.added" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, }, "required": ["agent", "project", "issue"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "Issue: {fullname}#{id} assigned to {assignee}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, assignee=self.body["issue"]["assignee"]["name"], ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} assigned issue {name}#{id} to {assignee}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], assignee=self.body["issue"]["assignee"]["name"], ) @property def url(self): return self.body["issue"]["full_url"] class IssueAssignedResetV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.assigned.reset" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, }, "required": ["agent", "project", "issue"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "Issue un-assigned: {fullname}#{id}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} reset the assignee on issue {name}#{id}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], ) @property def url(self): return self.body["issue"]["full_url"] class IssueCommentAddedV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.comment.added" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, }, "required": ["agent", "project"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "Issue: {fullname}#{id} has a new comment\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} commented on the issue {name}#{id}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], ) @property def url(self): issue_url = self.body["issue"]["full_url"] commentid = self.body["issue"]["comments"][-1]["id"] return "{issue_url}#comment-{commentid}".format( issue_url=issue_url, commentid=commentid ) class IssueDependencyAddedV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.dependency.added" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, "added_dependency": {"type": "number"}, }, "required": ["agent", "project", "added_dependency"], } def __str__(self): """Return a complete human-readable representation of the message.""" return ( "Issue: {fullname}#{id} depends on #{depissueid}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, depissueid=self.body["added_dependency"], ) ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} set the issue {name}#{id} as depending on #{depissueid}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], depissueid=self.body["added_dependency"], ) @property def url(self): return self.body["issue"]["full_url"] class IssueDependencyRemovedV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.dependency.removed" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, "removed_dependency": {"type": "array", "items": {"type": "number"}}, }, "required": ["agent", "project", "removed_dependency"], } def __str__(self): """Return a complete human-readable representation of the message.""" return ( "Issue: {fullname}#{id} no longer depending" " on #{depissueid}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, depissueid=", #".join( [str(i) for i in self.body["removed_dependency"]] ), ) ) @property def summary(self): """Return a summary of the message.""" return ( "{agent_name} removed the dependency" " on #{depissueid} on the issue {name}#{id}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], depissueid=", #".join( [str(i) for i in self.body["removed_dependency"]] ), ) ) @property def url(self): return self.body["issue"]["full_url"] class IssueDropV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.drop" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, }, "required": ["agent", "project", "issue"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "Issue deleted: {fullname}#{id}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} deleted issue {name}#{id}: {title}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], title=self.body["issue"]["title"], ) @property def url(self): full_url = self.body["project"]["full_url"] return "{full_url}/issues".format(full_url=full_url) class IssueEditV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is updated. """ topic = "pagure.issue.edit" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, "fields": {"type": "array", "items": {"type": ["string", "null"]}}, }, "required": ["agent", "project", "issue", "fields"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "Edited Issue: {fullname}#{id}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, ) @property def summary(self): """Return a summary of the message.""" return ( "{agent_name} edited fields {fields} of issue {name}#{id}: {title}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], title=self.body["issue"]["title"], fields=", ".join(self.body["fields"]), ) ) @property def url(self): return self.body["issue"]["full_url"] class IssueNewV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when a new thing is created. """ topic = "pagure.issue.new" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, }, "required": ["agent", "project", "issue"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "New Issue: {fullname}#{id}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} created issue {name}#{id}: {title}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], title=self.body["issue"]["title"], ) @property def url(self): return self.body["issue"]["full_url"] class IssueTagAddedV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.tag.added" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, "tags": {"type": "array", "items": {"type": "string"}}, }, "required": ["agent", "project"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "Issue: {fullname}#{id} tagged with {tags}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, tags=", ".join(self.body["tags"]), ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} tagged the issue {name}#{id} with: {tags}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], tags=", ".join(self.body["tags"]), ) @property def url(self): return self.body["issue"]["full_url"] class IssueTagRemovedV1(PagureMessage): """ A sub-class of a Fedora message that defines a message schema for messages published by pagure when an issue is deleted. """ topic = "pagure.issue.tag.removed" body_schema = { "id": SCHEMA_URL + topic, "$schema": "http://json-schema.org/draft-04/schema#", "description": "Schema for messages sent when a new project is created", "type": "object", "properties": { "agent": {"type": "string"}, "project": PROJECT, "issue": ISSUE, "tags": {"type": "array", "items": {"type": "string"}}, }, "required": ["agent", "project"], } def __str__(self): """Return a complete human-readable representation of the message.""" return "Issue: {fullname}#{id} un-tagged with {tags}\nBy: {agent_name}".format( fullname=self.body["project"]["fullname"], id=self.body["issue"]["id"], agent_name=self.agent_name, tags=", ".join(self.body["tags"]), ) @property def summary(self): """Return a summary of the message.""" return "{agent_name} removed tags {tags} from issue {name}#{id}".format( agent_name=self.agent_name, name=self.body["project"]["fullname"], id=self.body["issue"]["id"], tags=", ".join(self.body["tags"]), ) @property def url(self): return self.body["issue"]["full_url"]
PypiClean
/instagram_private_api-1.6.0.0.tar.gz/instagram_private_api-1.6.0.0/instagram_private_api/constants.py
class Constants(object): """Constants holder class that stores the bulk of the fixed strings used in the library.""" IG_SIG_KEY = '19ce5f445dbfd9d29c59dc2a78c616a7fc090a8e018b9267bc4240a30244c53b' IG_CAPABILITIES = '3brTvw==' SIG_KEY_VERSION = '4' APP_VERSION = '76.0.0.15.395' APPLICATION_ID = '567067343352427' FB_HTTP_ENGINE = 'Liger' ANDROID_VERSION = 24 ANDROID_RELEASE = '7.0' PHONE_MANUFACTURER = 'samsung' PHONE_DEVICE = 'SM-G930F' PHONE_MODEL = 'herolte' PHONE_DPI = '640dpi' PHONE_RESOLUTION = '1440x2560' PHONE_CHIPSET = 'samsungexynos8890' VERSION_CODE = '138226743' USER_AGENT_FORMAT = \ 'Instagram {app_version} Android ({android_version:d}/{android_release}; ' \ '{dpi}; {resolution}; {brand}; {device}; {model}; {chipset}; en_US; {version_code})' USER_AGENT_EXPRESSION = \ r'Instagram\s(?P<app_version>[^\s]+)\sAndroid\s\((?P<android_version>[0-9]+)/(?P<android_release>[0-9\.]+);\s' \ r'(?P<dpi>\d+dpi);\s(?P<resolution>\d+x\d+);\s(?P<manufacturer>[^;]+);\s(?P<device>[^;]+);\s' \ r'(?P<model>[^;]+);\s(?P<chipset>[^;]+);\s[a-z]+_[A-Z]+;\s(?P<version_code>\d+)' USER_AGENT = USER_AGENT_FORMAT.format(**{ 'app_version': APP_VERSION, 'android_version': ANDROID_VERSION, 'android_release': ANDROID_RELEASE, 'brand': PHONE_MANUFACTURER, 'device': PHONE_DEVICE, 'model': PHONE_MODEL, 'dpi': PHONE_DPI, 'resolution': PHONE_RESOLUTION, 'chipset': PHONE_CHIPSET, 'version_code': VERSION_CODE}) LOGIN_EXPERIMENTS = 'ig_growth_android_profile_pic_prefill_with_fb_pic_2,ig_android_icon_perf2,ig_android_autosubmit_password_recovery_universe,ig_android_background_voice_phone_confirmation_prefilled_phone_number_only,ig_android_report_nux_completed_device,ig_account_recovery_via_whatsapp_universe,ig_android_stories_reels_tray_media_count_check,ig_android_background_voice_confirmation_block_argentinian_numbers,ig_android_device_verification_fb_signup,ig_android_reg_nux_headers_cleanup_universe,ig_android_reg_omnibox,ig_android_background_voice_phone_confirmation,ig_android_gmail_autocomplete_account_over_one_tap,ig_android_phone_reg_redesign_universe,ig_android_skip_signup_from_one_tap_if_no_fb_sso,ig_android_reg_login_profile_photo_universe,ig_android_access_flow_prefill,ig_android_email_suggestions_universe,ig_android_contact_import_placement_universe,ig_android_ask_for_permissions_on_reg,ig_android_onboarding_skip_fb_connect,ig_account_identity_logged_out_signals_global_holdout_universe,ig_android_account_switch_infra_universe,ig_android_hide_fb_connect_for_signup,ig_restore_focus_on_reg_textbox_universe,ig_android_login_identifier_fuzzy_match,ig_android_suma_biz_account,ig_android_session_scoping_facebook_account,ig_android_security_intent_switchoff,ig_android_do_not_show_back_button_in_nux_user_list,ig_android_aymh_signal_collecting_kill_switch,ig_android_nux_add_email_device,ig_android_multi_tap_login_new,ig_android_persistent_duplicate_notif_checker,ig_android_login_safetynet,ig_android_fci_onboarding_friend_search,ig_android_editable_username_in_reg,ig_android_phone_auto_login_during_reg,ig_android_one_tap_fallback_auto_login,ig_android_device_detection_info_upload,ig_android_updated_copy_user_lookup_failed,ig_fb_invite_entry_points,ig_android_hsite_prefill_new_carrier,ig_android_gmail_oauth_in_reg,ig_two_fac_login_screen,ig_android_reg_modularization_universe,ig_android_passwordless_auth,ig_android_sim_info_upload,ig_android_universe_noticiation_channels,ig_android_realtime_manager_cleanup_universe,ig_android_analytics_accessibility_event,ig_android_direct_main_tab_universe,ig_android_email_one_tap_auto_login_during_reg,ig_android_prefill_full_name_from_fb,ig_android_directapp_camera_open_and_reset_universe,ig_challenge_kill_switch,ig_android_video_bug_report_universe,ig_account_recovery_with_code_android_universe,ig_prioritize_user_input_on_switch_to_signup,ig_android_modularized_nux_universe_device,ig_android_account_recovery_auto_login,ig_android_hide_typeahead_for_logged_users,ig_android_targeted_one_tap_upsell_universe,ig_android_caption_typeahead_fix_on_o_universe,ig_android_retry_create_account_universe,ig_android_crosshare_feed_post,ig_android_abandoned_reg_flow,ig_android_remember_password_at_login,ig_android_smartlock_hints_universe,ig_android_2fac_auto_fill_sms_universe,ig_type_ahead_recover_account,ig_android_onetaplogin_optimization,ig_android_family_apps_user_values_provider_universe,ig_android_smart_prefill_killswitch,ig_android_direct_inbox_account_switching,ig_android_exoplayer_settings,ig_android_bottom_sheet,ig_android_publisher_integration,ig_sem_resurrection_logging,ig_android_login_forgot_password_universe,ig_android_hindi,ig_android_hide_fb_flow_in_add_account_flow,ig_android_dialog_email_reg_error_universe,ig_android_low_priority_notifications_universe,ig_android_device_sms_retriever_plugin_universe,ig_android_device_verification_separate_endpoint' # noqa EXPERIMENTS = 'ig_camera_android_badge_face_effects_universe,ig_android_dash_lazy_load_audio,ig_android_stories_landscape_mode,ig_android_whitehat_options_universe,ig_android_fb_profile_integration_fbnc_universe,ig_android_vc_create_thread_upon_call_initiation_universe,ig_android_stories_seen_state_swipe_forward_universe,ig_android_realtime_mqtt_logging,ig_branded_content_show_settings_universe,ig_android_stories_server_coverframe,ig_android_direct_mutation_manager_handler_thread_universe,ig_android_ad_async_ads_universe,ig_android_camera_arengine_shader_caching_universe,ig_android_live_audiomanager_leak,ig_feed_lockdown,ig_android_interactions_preview_comment_impression_universe,ig_android_enable_igrtc_module,ig_android_direct_vm_activity_sheet,ig_android_appstate_logger,ig_android_direct_breeze_sheet,ig_android_camera_new_post_smile_universe,ig_android_live_video_position_source_universe,mi_viewpoint_feed_switchover,ig_feed_ranking_report_issue,ig_camera_android_areffect_photo_capture_universe,ig_rtc_use_dtls_srtp,ig_android_video_prefetch_feed_fix,ig_android_direct_remove_in_composer_camera_button_animation_universe,ig_android_live_fault_tolerance_universe,ig_android_igtv_autoplay_on_prepare,ig_android_main_feed_new_posts_indicator_universe,ig_android_audience_control,ig_android_stories_gif_upload_fix,ig_android_one_tap_fbshare,ig_android_startup_thread_priority,ig_android_stories_question_sticker_new_formats_universe,ig_android_business_transaction_in_stories_consumer,ig_android_search_impression_logging,ig_android_rtl_api28_textlayout_crash_universe,ig_android_direct_thread_sidebar_send_states,ig_fbns_push,ig_face_effect_ranking,ig_android_direct_albums,ig_search_null_state_universe,ig_android_stories_music_sticker_default_variation,ig_android_direct_update_thread_metadata_universe,ig_android_codec_high_profile,ig_android_inline_appeal,ig_rti_inapp_notifications_universe,ig_promote_last_used_destination_universe,ig_android_vc_directapp_integration_universe,allow_publish_page_universe,ig_android_maintabfragment,ig_android_skip_get_fbupload_universe,ig_android_low_data_mode,ig_android_enable_zero_rating,ig_android_main_feed_refresh_style_universe,ig_android_reverse_audio,ig_background_prefetch,ig_android_request_verification_badge,ig_android_http_stack_experiment_2017,ig_direct_android_24h_visual_perf,ig_android_live_thread_delay_for_mute_universe,ig_android_fb_topsearch_sgp_fork_request,ig_hashtag_display_universe,ig_android_banyan_migration,ig_android_heap_uploads,ig_android_cookie_serialization_optimization_universe,ig_android_mqtt_cookie_auth_memcache_universe,ig_android_stories_feeback_message_composer_entry_point,ig_android_instacrash_detection,ig_explore_2018_h2_account_rec_deduplication_android,ig_android_photo_hashing,ig_android_increase_fd_limit,ig_android_log_failed_image_download_retries,ig_android_live_use_timestamp_normalizer,ig_android_direct_async_thread_store,ig_android_persistent_nux,ig_android_story_accidentally_click_investigation_universe,ig_android_live_capture_translucent_navigation_bar,ig_android_churned_find_friends_redirect_to_discover_people,ig_android_vc_capture_universe,ig_android_story_reactions,ig_android_video_playback_retry_time_threshold,ig_android_mi_netego_long_event_removal_universe,ig_android_global_scheduler_infra,ig_end_of_feed_ranking_universe,ig_android_live_emoji_easter_egg_universe,ig_stories_in_feed_unit_design_universe,ig_android_ads_manager_pause_resume_ads_universe,ig_android_show_welcome_card_self_post_universe,ig_android_hashtag_header_display,ig_android_delay_coldstart_logging,ig_android_explore_grid_icon_removal_universe,ig_android_hashtag_contextual_feed_follow_button,ig_internal_research_settings,ig_smb_ads_basket_of_values_universe,ig_android_shopping_pdp_universe,ig_android_anr,ig_close_friends_v4,ig_android_feed_seen_state_with_view_info,ig_android_direct_visual_previews_in_thread,ig_promote_budget_warning_view_universe,ig_android_vc_camera_zoom_universe,ig_promote_daily_budget_multiplier_universe,ig_android_interactions_direct_share_comment_universe,ig_camera_android_supported_capabilities_api_universe,ig_android_post_recs_hide_from_author_universe,ig_android_biz_conversion_editable_profile_review_universe,ig_android_ad_increase_story_adpreload_priority_universe,ig_android_cache_video_autoplay_checker,ig_android_photo_fbuploader_config,ig_android_ad_watchlead_universe,ig_android_live_viewer_single_tap_invite_universe,ig_android_cold_start_json_delivery_improvement,ig_stories_suggestions_for_small_tray_universe,ig_shopping_catalog_selection_done_button,ig_inventory_connections,ig_android_fb_profile_integration_universe,ig_android_stories_weblink_creation,ig_android_live_start_broadcast_optimized_universe,ig_android_netgo_cta,ig_android_histogram_reporter,ig_android_vc_universe,ig_fb_graph_differentiation_no_fb_data,ig_android_network_cancellation,ig_android_live_presence_universe,ig_android_search_normalization_recipients,ig_android_connect_owned_page_universe,ig_android_downloaded_image_decode_universe,ig_android_realtime_stories_fetching,ig_android_hashtag_following,ig_android_felix_release_all_players_on_pause,ig_android_low_data_mode_backup_1,ig_android_share_claim_page_universe,ig_direct_holdout_h2_2018,ig_android_reactive_feed_like_count,ig_android_redirect_to_web_on_oembed_fail_universe,ig_camera_android_facetracker_v12_universe,ig_android_biz_qp_suggest_page,ig_android_direct_mutation_manager_job_scheduler,ig_android_continuous_video_capture,ig_android_live_skin_smooth,ig_promote_net_promoter_score_universe,ig_android_qp_features,ig_android_reel_raven_video_segmented_upload_universe,ig_android_biz_new_choose_category,ig_android_rate_limit_mediafeedviewablehelper,ig_android_shopping_post_tagging_redesign,ig_android_invite_xout_universe,ig_android_direct_permanent_video_upload_length,ig_android_sso_use_trustedapp_universe,ig_mi_impression_mainfeed_switchover,ig_android_remove_follow_all_fb_list,ig_android_save_all,ig_android_vc_call_screen_universe,ig_android_vc_join_timeout_universe,felix_android_video_quality,ig_eof_demarcator_style_universe,ig_shopping_post_insights,ig_android_shopping_more_from_business,ig_android_igtv_feed_trailer,ig_android_skip_video_render,ig_android_highlight_stickers_universe,ig_android_gap_rule_enforcer_universe,ig_android_interactive_listview_during_refresh,ig_android_ffmpeg_muxer_write_retry_universe,ig_android_main_feed_carousels_universe,ig_android_post_recs_show_more_button_universe,ig_android_live_suggested_live_expansion,ig_android_direct_inbox_cache_inbox_row_qes_universe,ig_android_video_cover_frame_universe,ig_android_abr_settings,ig_android_direct_app_hide_recents_header_in_recipient_picker_universe,ig_android_disk_usage_logging_universe,ig_android_story_sharing_universe,ig_android_optic_camera_warmup,ig_android_video_refactor_logger,ig_promote_lotus_universe,ig_stories_engagement_team_holdout_universe,ig_android_stories_gallery_video_segmentation,ig_promote_review_screen_title_universe,ig_android_direct_replace_inbox_camera_with_stories_camera,ig_explore_2018_post_chaining_account_recs_dedupe_universe,ig_android_igtv_save,ig_android_direct_presence_indicator,ig_android_asset_picker_improvements,ig_android_react_native_universe_kill_switch,ig_android_fs_new_gallery,android_ig_live_blacklisting,ig_android_qp_kill_switch,ig_android_new_contact_invites_entry_points_universe,ig_android_optic_feature_testing,ig_android_ad_leadgen_single_screen_universe,ig_android_stories_highlights_fast_navigation_universe,ig_android_vc_add_users_universe,ig_android_react_native_email_sms_settings_universe,ig_android_sticker_search_explorations,ig_android_business_id_conversion_universe,ig_android_business_promote_refresh_fb_access_token_universe,ig_android_selfupdate_jobscheduler,ig_android_fb_url_universe,ig_camera_android_profile_ar_notification_universe,ig_android_story_viewer_linear_preloading_count,ig_live_holdout_h2_2018,ig_android_vc_missed_call_notification_action_call_back,ig_android_stories_tray_in_viewer,ig_android_betamap_universe,ig_android_feed_video_mute_button_position,instagram_aat,ig_login_activity,ig_video_experimental_encoding_consumption_universe,ig_android_stories_share_extension_video_segmentation,ig_camera_android_black_feed_sticker_fix_universe,ig_android_camera_post_smile_low_end_universe,ig_android_import_page_post_after_biz_conversion,ig_android_direct_inbox_rv_configuration_universe,ig_android_feed_upload_progress,ig_vc_h2_2018_holdout_universe,ig_camera_android_superzoom_icon_position_universe,ig_android_live_dash_latency_manager,instagram_interests_holdout,ig_android_user_detail_endpoint,ig_android_click_to_direct_story_reaction_universe,ig_android_shopping_sidecar_editing,ig_android_interactions_new_comment_like_pos_universe,ig_android_reel_tray_item_impression_logging_viewpoint,ig_android_gif_framerate_throttling,ig_android_shopping_checkout_mvp,ig_android_live_save_to_camera_roll_limit_by_screen_size_universe,ig_end_of_feed_universe,ig_android_live_use_all_preview_sizes,ig_promote_post_insights_entry_universe,ig_hero_player,ig_stories_music_themes,ig_android_video_ffmpeg_muxer_universe,ig_android_live_follow_from_comments_universe,ig_android_profile_phone_autoconfirm_universe,ig_android_inline_notifications_recommended_user,ig_android_live_ama_universe,ig_android_camera_use_gl_for_postcapture_type,ig_android_insights_media_hashtag_insight_universe,ig_account_recs_in_chaining,ig_android_igtv_whitelisted_for_web,ig_fb_cross_posting_sender_side_holdout,ig_android_felix_feed_badging_tooltip_universe,ig_camera_gallery_button_thumbnail_universe,ag_family_bridges_2018_h2_holdout,ig_android_arengine_separate_prepare,ig_android_direct_visual_history,ig_android_employee_options_override,ig_android_share_product_universe,ig_camera_android_ar_platform_universe,ig_android_nametag,ig_android_netego_scroll_perf,ig_fbns_preload_default,ig_android_cover_frame_blacklist,android_cameracore_ard_ig_integration,ig_android_use_iterative_box_blur,ig_android_direct_inbox_recyclerview_pool_size,ig_android_clear_inflight_image_request,ig_android_audio_ingestion_params,ig_android_native_logcat_interceptor,ig_android_stories_separate_overlay_creation,ig_android_enable_liger_preconnect_universe,ig_android_hacked_account_reporting,ig_android_high_res_gif_stickers,ig_android_direct_remove_permanent_reactions_bar,ig_android_vod_abr_universe,ig_payments_paypal,ig_android_hashtag_feed_tabbed,ig_android_vc_participants_grid_universe,ig_android_video_decoder_retry,ig_android_enable_main_feed_reel_tray_preloading,ig_android_camera_upsell_dialog,ig_account_identity_2018_h2_lockdown_phone_global_holdout,ig_android_one_tap_sharesheet_fb_extensions,ig_android_country_code_fix_universe,ig_android_optic_fast_preview_restart_listener,ig_android_inline_appeal_show_new_content,ig_android_show_su_in_other_users_follow_list,ig_android_fb_family_navigation_badging_user,ig_android_video_scrubber_thumbnail_universe,ig_lockdown_feed_caption_length_universe,ig_camera_android_optimizations_2018_h2_universe,ig_stories_music_sticker,ig_android_optic_disable_post_capture_preview_restart,ig_android_vc_minimized_viewer_universe,ig_android_share_others_post_reorder,ig_android_low_data_mode_backup_5,ig_school_community_v2_universe,ig_android_post_live_expanded_comments_view_universe,ig_android_story_ad_cta_context_universe,ig_android_save_auto_sharing_to_fb_option_on_server,ig_android_igtv_chaining,ig_android_profile_private_banner,ig_android_stories_video_prefetch_kb,ig_android_direct_stories_in_direct_inbox,android_cameracore_preview_frame_listener2_ig_universe,ig_android_live_stop_broadcast_on_404,ig_android_live_skip_live_encoder_pts_correction,ig_android_show_twitter_name_universe,ig_android_direct_new_message_ranking,ig_android_render_iframe_interval,ig_android_direct_allow_multiline_composition,ig_android_place_search_profile_image,live_with_request_to_join_button_universe,ig_story_camera_reverse_video_experiment,ig_android_file_descriptor_limit,ig_android_stories_tray_fast_scroll_universe,ig_android_story_ad_text_limitation_universe,ig_android_cameracore_ar_text_plugin_universe,ig_android_direct_audience_upgrade_in_thread_camera,ig_android_felix,ig_android_media_share_icon,ig_android_archive_features_holdout_universe,ig_share_to_story_toggle_include_shopping_product,ig_two_fac_totp_enable,ig_android_camera_universe,ig_android_insights_creative_tutorials_universe,ig_android_qp_slot_cooldown_enabled_universe,ig_android_photos_qpl,ig_android_video_call_finish_universe,ig_hashtag_following_holdout_universe,ig_android_facebook_global_state_sync_frequency_universe,ig_android_global_scheduler_direct,ig_android_unify_video_player,ig_android_webrtc_icerestart_universe,ig_android_scroll_stories_tray_to_front_when_stories_ready,ig_android_mi_holdout_h1_2019,ig_android_interactions_permalink_replace_single_media_universe,ig_android_ttcp_improvements,ig_android_live_comment_fetch_frequency_universe,ig_android_directapp_instagram_deeplinking,ig_android_direct_inbox_recyclerview,ig_shopping_viewer_share_action,ig_android_hashtag_row_preparer,ig_eof_caboose_universe,ig_android_optic_new_features_implementation,ig_android_optic_new_zoom_controller,ig_android_direct_log_badge_count_inconsistent,ig_android_qp_clash_management_enabled_v4_universe,ig_android_hide_button_for_invite_facebook_friends,ig_android_activity_feed_impression_logger,ig_android_visualcomposer_inapp_notification_universe,ig_android_direct_sticker_gifs_in_thread,ig_android_optic_surface_texture_cleanup,ig_android_live_align_by_2_universe,ig_android_mobile_boost_universe,ig_android_network_util_cache_info,ig_android_camera_new_early_show_smile_icon_universe,ig_android_ads_profile_cta_feed_universe,ig_android_viewpoint_netego_universe,ig_android_direct_remix_visual_messages,ig_android_camera_new_tray_behavior_universe,ig_android_auto_advance_su_unit_when_scrolled_off_screen,ig_android_business_ix_universe,ig_vp9_hd_blacklist,ig_android_new_one_tap_nux_universe,ig_feed_experience,ig_android_business_new_navigation_universe,ig_stories_injection_tool_enabled_universe,ig_android_direct_import_google_photos2,ig_android_stories_text_format_emphasis,ig_android_direct_app_invites,ig_android_promote_fbauth_universe,ig_android_video_resize_operation,ig_android_stories_loading_automatic_retry,ig_android_live_end_redirect_universe,ig_android_following_hashtags_tooltip,ig_direct_max_participants,ig_android_stories_whatsapp_share,ig_android_low_data_mode_backup_2,ig_android_bitmap_attribution_check,ig_android_contact_invites_nux_universe,ig_android_search_page_v2,ig_android_direct_share_story_to_facebook,ig_android_stories_music_overlay,ig_android_direct_null_state_activation_cards,ig_android_fbupload_sidecar_video_universe,ig_android_tagging_combined_indicator,ig_android_direct_app_thread_presence_header,ig_android_react_native_restart_after_error_universe,ig_android_camera_attribution_in_direct,ig_android_contact_point_upload_rate_limit_killswitch,ig_android_profile,ig_android_additional_contact_in_nux,ig_android_profile_activation_cards_expanded,ig_android_view_and_likes_cta_universe,ig_android_story_reactions_producer_holdout,ig_android_live_use_rtc_upload_universe,ig_android_live_replay_highlights_universe,ig_main_activity_cold_start,ig_android_direct_double_tap_like_everything,ig_android_direct_character_limit,ig_business_dynamic_conversion_universe,ig_android_shopping_channel_in_explore,ig_stories_holdout_h1_2018,ig_android_scroll_perf_qpl_killswitch,ig_android_fbns_optimization_universe,ig_camera_ar_effect_attribution_position,ig_android_video_ta_universe,ig_android_live_view_profile_from_comments_universe,ig_android_interactions_threaded_comments_in_feed_universe,ig_fbns_blocked,ig_android_sso_kototoro_app_universe,ig_android_stories_question_sticker_music_format,ig_android_biz_auto_slide_props,ig_media_account_rollout_universe,ig_android_show_fbunlink_button_based_on_server_data,ig_android_fs_creation_flow_tweaks,ig_android_recommend_accounts_killswitch,ig_android_shopping_save_product_collection_cell_redesign_universe,ig_android_direct_inbox_background_view_models,ig_android_page_claim_deeplink_qe,ig_android_profile_edit_phone_universe,ig_android_switch_back_option,ig_android_new_orders_entrypoint,ig_android_media_rows_async_inflate,ig_android_direct_story_chaining_v2,ig_android_ad_show_full_name_universe,ig_android_private_highlights_universe,ig_android_igtv_audio_always_on,ig_android_interactions_inline_composer_extensions_universe,ig_android_scroll_main_feed,ig_business_integrity_ipc_universe,ig_android_location_page_info_page_upsell,ig_camera_android_bg_processor,ig_android_stories_viewer_prefetch_improvements,ig_android_rate_limit_feed_item_viewable_helper,ig_android_fci_empty_feed_friend_search,ig_feed_requests_logs_universe,ig_android_video_qp_logger_universe,ig_nametag_data_collection,ig_discovery_holdout_universe,ig_android_recyclerview_binder_group_enabled_universe,ig_android_direct_create_shortcut,ig_android_ar_effects_button_display_timing,ig_vc_holdout_universe_h2,ig_android_stories_sampled_progress,ig_android_qpl_queue_time_universe,ig_android_downloadable_vp8_module,ig_android_ccu_jobscheduler_outer,ig_android_stories_viewer_modal_activity,ig_android_direct_thread_composer,ig_android_fbns_preload_direct_universe,ig_android_direct_24h_replayability_nux_killswitch_universe,ig_android_activity_feed_row_click,ig_android_time_spent_dashboard,ig_android_loom_v2,ig_android_ad_pbia_header_click_universe,ig_android_direct_quick_replies,ig_android_handle_username_in_media_urls_universe,ig_android_request_compression_universe,ig_android_live_pip_minimize_universe,ig_android_usersession_leak_patching_universe,ig_android_stories_viewer_tall_android_cap_media_universe,ig_android_growth_fci_team_holdout_universe,ig_android_insights_holdout,ig_feed_engagement_holdout_2018_h1,ig_fb_graph_differentiation_only_fb_candidates,ig_pacing_overriding_universe,ig_android_direct_app_multi_account_badging,ig_android_direct_persisted_text_drafts_universe,ig_android_felix_prefetch_thumbnail_sprite_sheet,ig_camera_android_segmentation_async_universe,ig_android_category_search_in_sign_up,ig_android_separate_network_executor,ig_android_interactions_comment_like_for_all_feed_universe,ig_android_remove_push_notifications,ig_android_video_segment_ffmpeg_muxer_universe,ig_android_downgrade_viewport_exit_behavior,ig_android_vc_call_ended_cleanup_universe,ig_android_universe_video_production,ig_android_intialization_chunk_410,ig_android_live_analytics,ig_android_stories_music_filters,ig_android_camera_gallery_upload_we_universe,ig_android_video_exoplayer_2,ig_android_stories_music_precapture,ig_android_bitmap_compress_retry_universe,ig_android_verified_comments_universe,ig_android_dash_script,ig_android_igtv_feed_banner_redesign,ig_shopping_viewer_intent_actions,ig_android_gallery_order_by_date_taken,ig_android_location_plugin_leak_detection,ig_android_custom_story_import_intent,ig_lockdown_feed_perf,ig_android_camera_ar_platform_profile_universe,ig_stories_allow_camera_actions_while_recording,ig_android_optic_new_architecture,ig_android_ig_to_fb_sync_universe,ig_android_fbc_upsell_on_dp_first_load,ig_android_video_watermark_universe_qe2,ig_android_shopping_video_product_tag_consumption,ig_android_share_others_post_share_sheet,ig_biz_growth_entry_value,ig_android_stories_alignment_guides_universe,ig_android_livewith_guest_adaptive_camera_universe,ig_android_business_transaction_in_stories_creator,ig_android_optic_thread_priorities,ig_android_delayed_comments,ig_profile_company_holdout_h2_2018,ig_android_feed_coldstart_universe,ig_android_felix_pager_center_buffer_bias,ig_android_edit_metadata,ig_android_user_url_deeplink_fbpage_endpoint,ig_android_direct_face_filter_button_in_composer,ig_android_stories_helium_balloon_badging_universe,ig_android_rate_limit_feed_video_module,ig_android_ad_watchbrowse_universe,ig_android_stories_private_mention_sharing_universe,ig_direct_raven_search_universe,ig_android_live_pivot_to_reshare_universe,ig_company_profile_holdout,ig_android_invite_list_button_redesign_universe,ig_android_log_mediacodec_info,ig_android_fb_follow_server_linkage_universe,ig_android_direct_expiring_media_loading_errors,ig_android_direct_remove_blurred_profile_photo_for_thread_camera_universe,ig_camera_regiontracking_use_similarity_tracker_for_scaling,ig_android_not_modified_cache_universe,ig_android_direct_thread_green_dot_presence_universe,ig_igds_snackbar_android_universe,ig_android_insights_relay_optimization_universe,ig_android_stories_viewer_bitmap_holder,ig_android_shopping_catalogsearch,ig_android_location_page_intent_survey,ig_android_reel_zoom_universe,ig_android_biz_suggested_category,ig_android_cpu_frame_rendering_universe,ig_android_stories_create_flow_favorites_tooltip,ig_android_q3lc_transparency_control_settings,ig_android_stories_music_broadcast_receiver,ig_android_direct_send_new_combined_reshare,ig_android_resuming_failed_image_downloads_universe,ig_android_push_notifications_settings_redesign_universe,ig_android_enable_request_compression_ccu,ig_android_vc_ongoing_call_notification_universe,ig_android_stories_helium_long_press_universe,ig_fb_notification_universe,ig_branded_content_paid_branded_content,ig_android_downloadable_igrtc_module,ig_android_hide_reset_with_fb_universe,ig_android_direct_newer_single_line_composer_universe,ig_android_story_decor_image_fbupload_universe,ig_android_hashtag_creation_development,ig_android_ad_view_ads_native_universe,ig_android_hero_player_settings,ig_promote_ppe_v2_universe,ig_android_stories_archive_calendar,ig_android_ad_watchbrowse_cta_universe,ig_android_player_crash_report,ig_business_signup_biz_id_universe,ig_android_video_render_device_tiers,ig_android_payload_based_scheduling,ig_android_realtime_iris,ig_android_direct_gifs_in_thread,ig_android_main_feed_fragment_scroll_timing_histogram_uni,ig_android_direct_inbox_recyclerview_fixedsize_universe,ig_android_qp_batch_fetch_caching_enabled_v1_universe,ig_android_inline_editing_local_prefill,ig_android_location_feed_related_business,ig_promote_audience_selection_universe,ig_android_direct_low_contrast_inbox,ig_android_media_rows_prepare_10_31,ig_android_stories_fix_current_active_item_bound_crash,ig_family_bridges_holdout_universe,ig_android_push_notification_settings_universe,ig_android_updatelistview_on_loadmore,ig_promote_no_create_ads_check_universe,ig_android_business_ix_self_serve,ig_direct_raven_sharesheet_ranking,ig_android_insta_video_consumption_infra,ig_android_api_urlencode_universe,ig_android_concurrent_cold_start_universe,ig_android_direct_inbox_custom_rv_prefetch,ig_android_vc_missed_call_notification_action_reply,ig_android_multi_capture_camera,ig_android_stories_cross_sharing_to_fb_holdout_universe,ig_smb_ads_holdout_2018_h2_universe,instagram_android_stories_sticker_tray_redesign,ig_android_edit_location_page_info,ig_android_felix_video_upload_length,ig_android_video_segment_resume_policy_universe,ig_android_igsystrace_universe,ig_android_direct_split_reshares,ig_android_igtv_banner_changes,ig_android_dash_for_vod_universe,ig_android_new_highlight_button_text,ig_android_video_call_participant_state_caller_universe,ig_android_story_ads_default_long_video_duration,ig_android_stories_camera_enhancements,ig_android_feed_stale_check_interval,ig_find_loaded_classes,ig_android_interactions_realtime_typing_indicator_and_live_comments,ig_android_video_live_trace_universe,ig_android_stories_gallery_improvements,ig_close_friends_v4_global,ig_android_stories_large_reel_navigation,ig_android_prefetch_notification_data,ig_android_3pspp,ig_android_direct_new_intro_card,ig_android_direct_pending_media,ig_camera_ar_image_transform_library,ig_android_live_share_post_live_universe,ig_android_comments_composer_newline_universe,ig_android_direct_mutation_manager_iris,ig_android_stories_gif_sticker,ig_android_interactions_feed_dwell_universe,ig_camera_android_superzoomv3_attribution_universe,ig_android_stories_posting_offline_ui,ig_camera_android_superzoomv3_universe,ig_android_account_hierarchy_account_association_signal_upload_kill_switch,ig_android_offline_mode_holdout,ig_android_comments_direct_reply_to_author,ig_android_video_streaming_upload_universe,ig_direct_holdout_h1_2019,ig_android_stepper_header,ig_android_family_bridge_discover,ig_direct_report_conversation_universe,igds_android_listrow_migration_universe,ig_android_camera_sdk_check_gl_surface_r2,ig_promote_story_insights_entry_universe,ig_android_http_service_same_thread,ig_challenge_general_v2,ig_android_expired_build_lockout,ig_android_felix_keep_video_view,ig_feed_video_autoplay_tap_threshold,ig_android_vpvd_impressions_universe,ig_android_stories_reel_interactive_tap_target_size,ig_android_rendering_controls,ig_android_os_version_blocking,ig_promote_fix_expired_fb_accesstoken_android_universe,ig_android_stories_combined_asset_search,ig_android_interactions_emoji_extension_followup_universe,ig_android_shopping_native_catalog_selection,ig_android_profile_unified_follow_view,ig_android_igtv_no_badge,ig_android_unfollow_from_main_feed_v2,ig_android_livewith_liveswap_optimization_universe,ig_promote_video_retry_universe,ig_android_vc_participant_state_callee_universe,ig_helium_v1,ig_android_buffered_analytics_logger_thread_safe,ig_android_fb_connect_follow_invite_flow,ig_android_video_stitch_after_segmenting_universe,ig_android_enable_swipe_to_dismiss_for_all_dialogs,ig_android_business_cross_post_with_biz_id_infra,ig_android_paid_branded_content_rendering,ig_android_rage_shake_whitelist,ig_android_low_data_mode_backup_4,ig_mi_analytics_uploader_diagnostics,ig_android_shopping_pdp_craft,ig_android_ad_connection_manager_universe,ig_android_skip_button_content_on_connect_fb_universe,ig_android_reset_to_feed_from_background,ig_android_ad_watchbrowse_carousel_universe,android_cameracore_ig_gl_oom_fixes_universe,ig_android_video_feed_universe,ig_android_hybrid_bitmap_version_2,ig_android_update_items_checks,ig_android_interactions_mention_search_presence_dot_universe,ig_android_direct_app_reel_grid_search,ig_android_live_disable_speed_test_ui_timeout_universe,ig_android_hashtag_page_reduced_related_items,ig_android_direct_mutation_manager_media_2,ig_direct_reshare_sharesheet_ranking,ig_android_image_fail_callback_fix_universe,ig_android_igtv_reshare,ig_direct_reshare_search_universe,ig_android_shopping_pdp_platformization,ig_branded_content_share_to_facebook,ig_android_building_aymf_universe,ig_android_stories_viewer_as_modal_high_end_launch,ig_android_collect_os_usage_events_universe,ig_android_shopping_product_appeals_universe,ig_android_direct_mqtt_send,ig_android_business_profile_share_link_universe,ig_android_reliability_leak_fixes_h2_2018,ig_promote_unified_insights_universe,ig_android_global_prefetch_scheduler,ig_fbns_shared,ig_android_stories_reel_media_item_automatic_retry,ig_android_interactions_composer_extensions_universe,ig_android_cache_timespan_objects,ig_android_rn_ads_manager_universe,ig_smb_ads_click_to_direct,ig_android_foreground_location_collection,ig_kill_connectivity_change_receiver,ig_android_pending_actions_serialization,ig_android_2018_h1_hashtag_report_universe,ig_android_new_camera_design_universe,ig_android_prefetch_carousels_on_swipe_universe,ig_android_ads_history_universe,ig_fb_graph_differentiation_top_k_fb_coefficients,ig_explore_2018_topic_channel_navigation_android_universe,ig_android_shopping_profile_tab_universe,ig_android_hashtag_unfollow_from_main_feed,ig_android_ad_watchmore_entry_point_universe,ig_android_stories_feedback_badging_universe,ig_android_low_latency_consumption_universe,ig_android_graphql_survey_new_proxy_universe,ig_android_resumable_downloads_logging_universe,ig_direct_recipients_search_universe,ig_android_scheduled_executor,ig_android_fblocation_universe,ig_promote_rename_to_boost_universe,ig_android_early_storyrequest,ig_android_ad_holdout_watchandmore_universe,ig_android_felix_insights,ig_android_interests_netego_dismiss,ig_android_realtime_always_start_connection_on_condition_universe,ig_android_split_contacts_list,ig_android_igtv_always_show_browse_ui,ig_android_always_use_server_recents,ig_android_carousel_prefetch_bumping,ig_fbns_kill_switch,ig_android_direct_send_thread_summary_fix_universe,ig_android_video_fix_logger,ig_stories_question_sticker_music_format_prompt,ig_mi_extra_bundle_investigation_universe,ig_camera_android_segmentation_qe2_universe,ig_android_direct_media_forwarding,ig_android_stories_close_friends_disable_first_time_badge,ig_android_reel_viewer_fetch_missing_reels_universe,ig_android_fb_link_ui_polish_universe,ig_android_signup_error_test,ig_android_video_webrtc_textureview,ig_android_business_promote_tooltip,mi_viewpoint_viewability_universe,ig_android_volume_controls,ig_xplat_shopping_cataloglist,ig_android_interactions_in_feed_comment_view_universe,ig_android_biz_category_prefill_universe,ig_android_pigeon_sampling,ig_android_gallery_high_quality_photo_thumbnails,ig_android_show_weekly_ci_upsell_limit,ig_android_tagging_video_preview,ig_direct_android_reply_modal_universe,ig_ei_option_setting_universe,ig_perf_android_holdout,ig_direct_core_holdout_q1_2018,ig_promote_insights_video_views_universe,ig_android_list_redesign,ig_android_claim_location_page,ig_android_search_normalization,ig_android_not_decoding_prefetch,ig_smb_review_screen_content_update_universe,ig_android_category_search_edit_profile,ig_android_direct_forward_messages_universe,ig_android_pbia_proxy_profile_universe,ig_android_cover_frame_rendering,ig_android_feed_post_sticker_alt,ig_camera_android_segmentation_enabled_universe,ig_android_shopping_profile_shop_redesign,ig_android_upload_retry_job_service,ig_android_stories_better_error_state_handling,ig_android_vc_in_app_notification_universe,ig_android_persistent_duplicate_notif_checker_user_based,ig_android_react_native_ota,ig_android_profile_memories_universe,ig_fb_graph_differentiation_control,ig_android_low_data_mode_backup_3,android_ig_camera_ar_asset_manager_improvements_universe,ig_android_explore_discover_people_entry_point_universe,ig_android_qcc_perf,ig_android_video_cache_evictor_universe,ig_android_limit_ashmem_cleanup_thread,ig_android_direct_business_holdout,ig_android_promote_feed_to_stories_universe,ig_media_geo_gating,ig_music_dash,ig_android_media_as_sticker,ig_android_internal_sticker_universe,ig_android_video_watermark_universe,ig_android_live_ama_viewer_universe,ig_android_live_streaming_experimental_abr_universe,ig_android_cronet_stack,ig_android_mention_sharing_from_reel_viewer_universe,ig_android_warm_headline_text,ig_android_new_block_flow,ig_android_story_landscape_ad_new_layout_universe,ig_android_long_form_video,ig_android_network_trace_migration,ig_android_story_ads_direct_cta_universe,ig_android_live_subscribe_user_level_universe,ig_android_ad_iab_qpl_kill_switch_universe,ig_android_fb_sync_options_universe,ig_android_saved_product_store,ig_android_stories_reappearing_tray_universe,ig_android_new_camera_design_container_animations_universe,ig_android_stories_disable_highlights_media_preloading,ig_fb_graph_differentiation,ig_android_logging_metric_universe_v2,ig_android_stories_persistent_tray_universe,ig_android_screen_recording_bugreport_universe,ig_android_friends_sticker,ig_android_whats_app_contact_invite_universe,ig_android_feed_auto_share_to_facebook_dialog,ig_android_felix_creation_enabled,ig_direct_android_larger_media_reshare_style,ig_android_stories_auto_retry_reels_media_and_segments,ig_android_image_mem_cache_strong_ref_universe,ig_direct_android_inbox_filter_for_all_universe,ig_android_suggested_highlights,ig_direct_giphy_gifs_rating,ig_stories_holdout_h2_2017,ig_android_fbpage_on_profile_side_tray,ig_android_video_server_coverframe,ig_android_video_controls_universe,ig_camera_holdout_h1_2018_performance,ig_android_stories_music_search_typeahead,ig_android_inappnotification_rootactivity_tweak,ig_android_local_info_page,ig_camera_holdout_h1_2018_product,ig_shopping_checkout_mvp_experiment,ig_android_hide_type_mode_camera_button,ig_timestamp_public_test,ig_android_webrtc_renderer_reuse_universe,ig_android_business_conversion_value_prop_v2,ig_android_live_wave_production_universe,ig_android_share_publish_page_universe,ig_android_question_sticker_replied_state,ig_android_early_feedrequest,ig_android_hashtag_search_suggestions,ig_android_hashtag_discover_tab,ig_android_leak_detector_upload_universe,ig_android_hashtag_page_support_places_tab,ig_android_cover_frame_retrieval,ig_android_live_bg_download_face_filter_assets_universe,ig_android_direct_continuous_capture,ig_android_search_hashtag_badges,ig_android_direct_tabbed_media_picker,ig_android_video_ssim_report_universe,ig_android_direct_view_more_qe,ig_camera_android_effect_info_bottom_sheet_universe,ig_promote_add_payment_navigation_universe,ig_android_direct_voice_messaging,ig_android_signup_refactor_santity,ig_android_profile_lazy_load_carousel_media,ig_android_reel_dashboard_camera_entry_point,ig_android_su_follow_back,ig_android_direct_reel_options_entry_point_2_universe,ig_android_ad_redesign_iab_universe,ig_android_universe_reel_video_production,ig_android_power_metrics,ig_android_modal_activity_no_animation_fix_universe,ig_android_bitmap_cache_executor_size,ig_android_direct_log_badge_count,ig_android_direct_remove_visual_messages_nuxs,ig_android_creation_new_post_title,ig_camera_fast_tti_universe,ig_android_non_square_first,ig_promote_media_picker_universe,ig_android_direct_thread_content_picker,ig_android_vc_fix_joining_other_call_with_new_intent,ig_android_drawable_usage_logging_universe,ig_android_reel_viewer_data_buffer_size,ig_android_hashtag_contextual_feed_account_recs,ig_traffic_routing_universe,ig_promote_political_ads_universe,ig_android_clarify_invite_options,ig_android_igtv_aspect_ratio_limits,ig_android_effect_tray_background,ig_android_disable_scroll_listeners,ig_android_profile_neue_universe,ig_android_create_page_on_top_universe,ig_stories_selfie_sticker,ig_android_video_upload_quality_qe1,ig_android_mobile_http_flow_sampling_weight_universe,ig_android_stories_music_awareness_universe,ig_android_live_nerd_stats_universe,ig_android_video_cache_size_universe,ig_camera_android_focus_attribution_universe,ig_android_promote_story_to_story_universe,ig_android_igds_edit_profile_fields,ig_android_reel_impresssion_cache_key_qe_universe,ig_video_holdout_h2_2017,ig_android_immersive_viewer_follow,ig_android_sso_family_key_universe,ig_android_direct_share_sheet_custom_fast_scroller,ig_android_external_gallery_import_affordance,ufi_share,ig_android_sonar_prober_universe,ig_android_swipe_up_area_universe,ig_android_video_segmented_upload_universe,ig_perf_android_holdout_2018_h1,ig_android_live_special_codec_size_list,ig_android_view_info_universe,ig_android_shopping_combined_tagging_universe,ig_android_cold_start_cool_off_universe,ig_android_shopping_video_product_tag_creation,ig_android_startup_sampling_rate_universe,ig_android_igtv_new_browse,ig_android_story_import_intent,ig_android_direct_inbox_typing_indicator,ig_android_edit_highlight_redesign,ig_android_insta_video_broadcaster_infra_perf,ig_android_live_webrtc_livewith_params,ig_android_show_fb_name_universe,ig_android_fix_prepare_direct_push,ig_android_stories_viewer_responsiveness_universe,ig_android_interactions_show_verified_badge_for_preview_comments_universe,ig_android_stories_skip_preload_to_launch_viewer,ig_android_live_start_live_button_universe,ig_android_direct_speed_cam_univ,ig_android_profile_menu_reorder_universe,ig_android_acra_double_oom_reservation,ig_android_live_viewer_tap_to_hide_chrome_universe,ig_android_vc_sounds_universe,ig_android_igtv_native_pip,ig_android_igtv_refresh_tv_guide_interval,ig_direct_inbox_search_universe,ig_android_experimental_onetap_dialogs_universe,ig_android_pendingmedia_retry,ig_android_settings_redesign,ig_android_direct_search_story_recipients_universe,ig_android_fb_sharing_shortcut,ig_android_direct_segmented_video,ig_android_grid_cell_count,ig_android_ad_watchinstall_universe,ig_android_realtime_manager_optimization,ig_android_shortcuts,ig_android_comments_notifications_universe,ig_android_vc_webrtc_params,ig_android_critical_path_manager_universe,ig_android_canvas_tilt_to_pan_universe,ig_android_feed_sharing_memory_leak,ig_android_ad_account_top_followers_universe,ig_android_offline_reel_feed,ig_promote_review_screen_universe,ig_android_vc_end_screen_user_feedback_universe,ig_android_vc_use_timestamp_normalizer,native_contact_invites_universe,ig_android_feed_post_sticker,ig_android_facebook_crosspost,ig_android_local_2018_h2_holdout,ig_android_stories_tray_refresh_universe,ig_android_viewer_tapback_size_universe,ig_android_nametag_save_experiment_universe,ig_promote_estimated_clicks_universe,ig_business_profile_18h1_holdout_universe,ig_android_nearby_venues_location_timeout_fallback,ig_android_category_clickable_rows_ui,ig_android_photo_invites,ig_interactions_h2_2018_team_holdout_universe,ig_branded_content_tagging_upsell,ig_android_ccu_jobscheduler_inner,ig_android_story_ads_instant_sub_impression_universe,ig_explore_2018_finite_chain_android_universe,ig_android_gqls_typing_indicator,ig_android_direct_visual_message_prefetch_count_universe,ig_android_webrtc_encoder_factory_universe,ig_ads_increase_connection_step2_v2,ig_scroll_by_two_cards_for_suggested_invite_universe,ig_android_internal_collab_save' # noqa
PypiClean
/dq-notebook-6.0.2.tar.gz/dq-notebook-6.0.2/docs/source/frontend_config.rst
.. _frontend_config: Configuring the notebook frontend ================================= .. note:: The ability to configure the notebook frontend UI and preferences is still a work in progress. This document is a rough explanation on how you can persist some configuration options for the notebook JavaScript. There is no exhaustive list of all the configuration options as most options are passed down to other libraries, which means that non valid configuration can be ignored without any error messages. How front end configuration works --------------------------------- The frontend configuration system works as follows: - get a handle of a configurable JavaScript object. - access its configuration attribute. - update its configuration attribute with a JSON patch. Example - Changing the notebook's default indentation ----------------------------------------------------- This example explains how to change the default setting ``indentUnit`` for CodeMirror Code Cells:: var cell = Jupyter.notebook.get_selected_cell(); var config = cell.config; var patch = { CodeCell:{ cm_config:{indentUnit:2} } } config.update(patch) You can enter the previous snippet in your browser's JavaScript console once. Then reload the notebook page in your browser. Now, the preferred indent unit should be equal to two spaces. The custom setting persists and you do not need to reissue the patch on new notebooks. ``indentUnit``, used in this example, is one of the many `CodeMirror options <https://codemirror.net/doc/manual.html#option_indentUnit>`_ which are available for configuration. Example - Restoring the notebook's default indentation ------------------------------------------------------ If you want to restore a notebook frontend preference to its default value, you will enter a JSON patch with a ``null`` value for the preference setting. For example, let's restore the indent setting ``indentUnit`` to its default of four spaces. Enter the following code snippet in your JavaScript console:: var cell = Jupyter.notebook.get_selected_cell(); var config = cell.config; var patch = { CodeCell:{ cm_config:{indentUnit: null} // only change here. } } config.update(patch) Reload the notebook in your browser and the default indent should again be two spaces. Persisting configuration settings --------------------------------- Under the hood, Jupyter will persist the preferred configuration settings in ``~/.jupyter/nbconfig/<section>.json``, with ``<section>`` taking various value depending on the page where the configuration is issued. ``<section>`` can take various values like ``notebook``, ``tree``, and ``editor``. A ``common`` section contains configuration settings shared by all pages.
PypiClean
/perfume-0.2.tar.gz/perfume-0.2/README.md
Perfume === Perfume aims at making Flask-apps more Object-Oriented friendly by providing a base class to create them. It's BSD licensed. Usage: - inherit from Perfume - decorate your methods with route(path) Perfume is Easy --- ```python from perfume import Perfume, route class Hello(Perfume): @route('/') def hello(self): return "Hello World !" if __name__ == "__main__": Hello().run() ``` And Easy to Setup --- ```bash $ pip install Perfume $ python hello.py * Running on http://localhost:5000/ ```
PypiClean
/bicycle_bell_seds_cli-0.0.3-py3-none-any.whl/seds_cli/seds_lib/data/time/delay.py
from dataclasses import dataclass @dataclass(frozen=True) class ReceiverDelay: """Data about the delay concerning the receiver worker. Attributes callback_offset_estimated_time (float) in seconds, needed to process the callback """ callback_offset_estimated_time: float @property def delay(self): """Time that is relevant for the delay between input and output of the overall system. Notes: callback_offset_estimated_time: Processed asynchronously, thus only relevant once for the latest input element in the chunk Returns: callback_offset_estimated_time """ return self.callback_offset_estimated_time @dataclass(frozen=True) class ChunkDelay: """Data about the delay concerning the chunk. Attributes processing_time (float) in seconds, needed mainly for the concatenation of the elements max_in_buffer_waiting_time (float) in seconds, time the oldest element in the buffer waited until chunk processing """ processing_time: float max_in_buffer_waiting_time: float @property def delay(self): """Time that is relevant for the delay between input and output of the overall system. Notes: processing_time: Elements are prepared by the Predictor thread to form an AudioChunk max_in_buffer_waiting_time: Should be used for more detailed interval specification of the delay. Including this time is only valid the worst-case. This margin can be lowered by parallel Predictor processes (NotYetImplemented). Returns: processing_time + max_in_buffer_waiting_time """ return self.processing_time + self.max_in_buffer_waiting_time @dataclass(frozen=True) class PredictorDelay: """Data about the delay within the Predictor Thread. Attributes chunk_delay (ChunkDelay) delay object concerning the chunk inference_time (float) in seconds, time needed for running an inference_time step on the model, including preprocessing """ chunk_delay: ChunkDelay inference_time: float @property def delay(self): """Time that is relevant for the delay between input and output of the overall system. Notes: chunk_delay.delay: Overall relevant chunk delay inference_time: Time needed of the Predictor Thread for getting a result for the current chunk/window. Returns: relevant_delay_of(chunk_delay) + inference_time """ return self.chunk_delay.delay + self.inference_time @dataclass(frozen=True) class Delay: """Data about the delay of all parts of the system. Attributes receiving_delay (ReceiverDelay) delay object concerning the Receiver (Thread) predicting_delay (PredictorDelay) delay object concerning the Predictor (Thread) """ receiving_delay: ReceiverDelay predicting_delay: PredictorDelay @property def delay(self): """Overall time that is relevant for the delay between input and output of the overall system. Notes: receiving_delay.delay: Overall relevant receiving_delay delay predicting_delay.delay: Overall relevant predicting_delay delay Returns: relevant_delay_of(receiving_delay) + relevant_delay_of(predicting_delay) """ return self.receiving_delay.delay + self.predicting_delay.delay
PypiClean
/e-fonenana-frontend-20190305.1.tar.gz/e-fonenana-frontend-20190305.1/hass_frontend_es5/workbox-v3.6.3/workbox-google-analytics.prod.js
this.workbox=this.workbox||{},this.workbox.googleAnalytics=function(e,n,t,o,r,c,s){"use strict";try{self.workbox.v["workbox:google-analytics:3.6.3"]=1}catch(e){}const l=/^\/(\w+\/)?collect/,i=(a=babelHelpers.asyncToGenerator(function*(e){return yield new Promise(function(n,t){const o=new FileReader;o.onloadend=function(){return n(o.result)},o.onerror=function(){return t(o.error)},o.readAsText(e)})}),function(e){return a.apply(this,arguments)});var a;const w=e=>(u=babelHelpers.asyncToGenerator(function*(n){let t,{url:o,requestInit:r,timestamp:c}=n;if(o=new URL(o),r.body){const e=r.body instanceof Blob?yield i(r.body):r.body;t=new URLSearchParams(e)}else t=o.searchParams;const s=c-(Number(t.get("qt"))||0),l=Date.now()-s;if(t.set("qt",l),e.parameterOverrides)for(const n of Object.keys(e.parameterOverrides)){const o=e.parameterOverrides[n];t.set(n,o)}"function"==typeof e.hitFilter&&e.hitFilter.call(null,t),r.body=t.toString(),r.method="POST",r.mode="cors",r.credentials="omit",r.headers={"Content-Type":"text/plain"},n.url=`${o.origin}${o.pathname}`}),function(e){return u.apply(this,arguments)});var u;return e.initialize=((e={})=>{const i=t.cacheNames.getGoogleAnalyticsName(e.cacheName),a=new n.Plugin("workbox-google-analytics",{maxRetentionTime:2880,callbacks:{requestWillReplay:w(e)}}),u=[(e=>{const n=new c.NetworkFirst({cacheName:e});return new o.Route(({url:e})=>"www.google-analytics.com"===e.hostname&&"/analytics.js"===e.pathname,n,"GET")})(i),(e=>{const n=new c.NetworkFirst({cacheName:e});return new o.Route(({url:e})=>"www.googletagmanager.com"===e.hostname&&"/gtag/js"===e.pathname,n,"GET")})(i),...(e=>{const n=({url:e})=>"www.google-analytics.com"===e.hostname&&l.test(e.pathname),t=new s.NetworkOnly({plugins:[e]});return[new o.Route(n,t,"GET"),new o.Route(n,t,"POST")]})(a)],f=new r.Router;for(const e of u)f.registerRoute(e);self.addEventListener("fetch",e=>{const n=f.handleRequest(e);n&&e.respondWith(n)})}),e}({},workbox.backgroundSync,workbox.core._private,workbox.routing,workbox.routing,workbox.strategies,workbox.strategies); //# sourceMappingURL=workbox-google-analytics.prod.js.map
PypiClean
/widgetastic.patternfly5-23.8.28.0-py3-none-any.whl/widgetastic_patternfly5/components/pagination.py
import math from contextlib import contextmanager from selenium.webdriver.common.keys import Keys from widgetastic.utils import ParametrizedLocator from widgetastic.widget import GenericLocatorWidget from widgetastic.widget import Text from widgetastic.widget import TextInput from widgetastic.widget import View from .menus.options_menu import OptionsMenu class PaginationNavDisabled(Exception): pass class BasePagination: """Represents the Patternfly pagination. https://www.patternfly.org/components/pagination """ DEFAULT_LOCATOR = ( ".//div[contains(@class, '-c-pagination') and not(contains(@class, 'pf-m-compact'))]" ) _first = GenericLocatorWidget(".//button[contains(@data-action, 'first')]") _previous = GenericLocatorWidget(".//button[contains(@data-action, 'previous')]") _next = GenericLocatorWidget(".//button[contains(@data-action, 'next')]") _last = GenericLocatorWidget(".//button[contains(@data-action, 'last')]") _options = OptionsMenu() _items = Text( ".//span[contains(@class, '-c-menu-toggle__text') or " "contains(@class, '-c-options-menu__toggle-text')]" ) _current_page = TextInput(locator=".//input[@aria-label='Current page']") _total_pages = Text( ".//div[contains(@class, '-c-pagination__nav-page-select')]/span[text()='of']" ) @property def is_enabled(self): """Overriding is_enabled property. Returns ``True`` when pagination dropdown button is enabled along with next & last button. """ el = self.browser.element(self._last) last_flag = el.is_enabled() if el.is_displayed() else True return ( self.browser.element(self._options.BUTTON_LOCATOR).is_enabled() and self.browser.element(self._next).is_enabled() and last_flag ) @property def cached_per_page_value(self): return getattr(self, "_cached_per_page_value", None) @cached_per_page_value.setter def cached_per_page_value(self, value): self._cached_per_page_value = value @property def is_first_disabled(self): """Returns boolean detailing if the first page button is disabled.""" return not self.browser.element(self._first).is_enabled() def first_page(self): """Clicks on the first page button.""" if self.no_items or self.is_first_disabled: raise PaginationNavDisabled("first") self._first.click() @property def is_previous_disabled(self): """Returns boolean detailing if the previous page button is disabled.""" return not self.browser.element(self._previous).is_enabled() def previous_page(self): """Clicks the previous page button.""" if self.no_items or self.is_previous_disabled: raise PaginationNavDisabled("previous") self._previous.click() @property def is_next_disabled(self): """Returns boolean detailing if the next page button is disabled.""" return not self.browser.element(self._next).is_enabled() def next_page(self): """Clicks the next page button.""" if self.is_next_disabled: raise PaginationNavDisabled("next") self._next.click() @property def is_last_disabled(self): """Returns boolean detailing if the last page button is disabled.""" return not self.browser.element(self._last).is_enabled() def last_page(self): """Clicks the last page button.""" if self.is_last_disabled: raise PaginationNavDisabled("last") self._last.click() @property def current_page(self): """Returns an int of the current page number.""" return int(self._current_page.value) @property def total_pages(self): """Returns int detailing the total number of pages.""" return int(self._total_pages.text.strip().split()[1]) @property def displayed_items(self): """Returns a string detailing the number of displayed items information. example "1 - 20 of 523 items" """ items_string = self._items.text first_num, last_num = items_string.split("of")[0].split("-") return int(first_num.strip()), int(last_num.strip()) @property def total_items(self): """Returns a string detailing the number of displayed items""" items_string = self._items.text return int(items_string.split("of")[1].split()[0]) @property def per_page_options(self): """Returns an iterable of the available pagination options.""" return self._options.items @property def no_items(self): """Returns wether the pagination object has elements or not""" return not self.total_items @property def current_per_page(self): """Returns an integer detailing how many items are shown per page.""" if self.cached_per_page_value: return self.cached_per_page_value if self.no_items: return 0 else: return int(self._options.selected_items[0].split()[0]) @contextmanager def cache_per_page_value(self): """ A context manager that can be used to prevent looking up the 'current page' value. This adds some efficiencies when iterating over pages or in cases where it is safe to assume that the "per page" setting is not going to change and it's not necessary to re-read it from the browser repeatedly. """ self.cached_per_page_value = None self.cached_per_page_value = self.current_per_page yield self.cached_per_page_value = None def set_per_page(self, count): """Sets the number of items per page. (Will cast to str)""" value = str(count) value_per_page = "{} per page".format(value) items = self._options.items if value_per_page in items: self._options.item_select(value_per_page) elif value in items: self._options.item_select(value) else: raise ValueError( "count '{}' is not a valid option in the pagination dropdown".format(count) ) def go_to_page(self, value): """Navigate to custom page number.""" self._current_page.fill(value) self.browser.send_keys(Keys.RETURN, self._current_page) def __iter__(self): if self.current_page > 1: self.first_page() self._page_counter = 0 return self def __next__(self): if self._page_counter < self.total_pages: self._page_counter += 1 if self._page_counter > 1: self.next_page() return self._page_counter else: raise StopIteration class Pagination(BasePagination, View): ROOT = ParametrizedLocator("{@locator}") def __init__(self, parent, locator=None, logger=None): super().__init__(parent=parent, logger=logger) if not locator: locator = self.DEFAULT_LOCATOR self.locator = locator class BaseCompactPagination: @property def is_first_disabled(self): """Compact paginator has no 'first' button.""" return self.is_previous_disabled def first_page(self): while not self.is_previous_disabled: self.previous_page() @property def is_last_disabled(self): """Compact paginator has no 'last' button.""" return self.is_next_disabled def last_page(self): """Compact paginator has no "last" button, so iterates until last is reached.""" while not self.is_next_disabled: self.next_page() @property def current_page(self): """ Calculate the current page we are on. Compact pagination does not explicitly show this, so use some math. For example, if "per page" is set to '20', we know that a page displaying items: 1-20 is on 20/20 = page 1 21-40 is on page 40/20 = page 2 41-60 is on page 60/20 = page 3 and so on. """ if self.no_items: return 0 else: _, last_num = self.displayed_items return math.ceil(last_num / self.current_per_page) @property def total_pages(self): """ Calculate total page count. Compact pagination does not explicitily show the page count, so use some math. """ if self.no_items: return 0 else: return math.ceil(self.total_items / self.current_per_page) class CompactPagination(BaseCompactPagination, Pagination): DEFAULT_LOCATOR = ( ".//div[contains(@class, '-c-pagination') and contains(@class, 'pf-m-compact')]" )
PypiClean
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/load-json-file/readme.md
# load-json-file [![Build Status](https://travis-ci.org/sindresorhus/load-json-file.svg?branch=master)](https://travis-ci.org/sindresorhus/load-json-file) > Read and parse a JSON file [Strips UTF-8 BOM](https://github.com/sindresorhus/strip-bom), uses [`graceful-fs`](https://github.com/isaacs/node-graceful-fs), and throws more [helpful JSON errors](https://github.com/sindresorhus/parse-json). ## Install ``` $ npm install --save load-json-file ``` ## Usage ```js const loadJsonFile = require('load-json-file'); loadJsonFile('foo.json').then(json => { console.log(json); //=> {foo: true} }); ``` ## API ### loadJsonFile(filepath) Returns a promise for the parsed JSON. ### loadJsonFile.sync(filepath) Returns the parsed JSON. ## Related - [write-json-file](https://github.com/sindresorhus/write-json-file) - Stringify and write JSON to a file atomically ## License MIT © [Sindre Sorhus](https://sindresorhus.com)
PypiClean
/cli-bdd-0.0.4.tar.gz/cli-bdd-0.0.4/cli_bdd/core/steps/file.py
import os import shutil from hamcrest import assert_that, equal_to from cli_bdd.core.steps.base import StepBase class CopyFileOrDirectory(StepBase): """Copies a file or directory. Examples: ```gherkin Given I copy a file from "/tmp/old.txt" to "/var/new.txt" Given I copy the file named "hello.txt" to "/var/" Given I copy a directory from "/tmp/hello/" to "/var/" ``` """ type_ = 'given' sentence = ( 'I copy (a|the) (?P<file_or_directory>(file|directory))' '( (named|from))? "(?P<source>[^"]*)" to "(?P<destination>[^"]*)"' ) def step(self, file_or_directory, source, destination): if file_or_directory == 'file': shutil.copyfile(source, destination) else: shutil.copytree(source, destination) class MoveFileOrDirectory(StepBase): """Moves a file or directory. Examples: ```gherkin Given I move a file from "/tmp/old.txt" to "/var/new.txt" Given I move the file named "hello.txt" to "/var/" Given I move a directory from "/tmp/hello/" to "/var/" ``` """ type_ = 'given' sentence = ( 'I move (a|the) (?P<file_or_directory>(file|directory))' '( (named|from))? "(?P<source>[^"]*)" to "(?P<destination>[^"]*)"' ) def step(self, file_or_directory, source, destination): shutil.move(source, destination) class CreateDirectory(StepBase): """Creates directory. Examples: ```gherkin Given a directory "/tmp/test/" Given the directory named "/tmp/test/" ``` """ type_ = 'given' sentence = ( '(a|the) directory' '( named)? "(?P<dir_path>[^"]*)"' ) def step(self, dir_path): if not os.path.exists(dir_path): os.makedirs(dir_path) class ChangeDirectory(StepBase): """Change directory. Examples: ```gherkin Given I cd to "/tmp/test/" ``` """ type_ = 'given' sentence = 'I cd to "(?P<dir_path>[^"]*)"' def step(self, dir_path): os.chdir(dir_path) class CreateFileWithContent(StepBase): """Creates a file. Examples: ```gherkin Given a file "/tmp/test/" with "some content" Given the file named "/tmp/test/" with "another content" ``` """ type_ = 'given' sentence = ( '(a|the) file' '( named)? "(?P<file_path>[^"]*)" with "(?P<file_content>[^"]*)"' ) def step(self, file_path, file_content): with open(file_path, 'wt') as ff: ff.write(file_content) class CreateFileWithMultilineContent(StepBase): '''Creates a file with multiline content. Examples: ```gherkin Given a file "/tmp/test/" with: """ line one line two line three """ Given a file named "/tmp/test/" with: """ line one line two line three """ ``` ''' type_ = 'given' sentence = ( '(a|the) file' '( named)? "(?P<file_path>[^"]*)" with' ) def step(self, file_path): with open(file_path, 'wt') as ff: ff.write(self.get_text()) class CheckFileOrDirectoryExist(StepBase): """Checks whether file or directory exist. Examples: ```gherkin Then a file "/var/new.txt" should exist Then the file named "/var/new.txt" should not exist Then the directory "/var/" should not exist ``` """ type_ = 'then' sentence = ( '(a|the) (?P<file_or_directory>(file|directory))' '( (named|from))? "(?P<path>[^"]*)" ' 'should( (?P<should_not>not))? exist' ) def step(self, file_or_directory, path, should_not=None): assert_that( os.path.exists(path), equal_to(not should_not) ) base_steps = [ { 'func_name': 'copy_file_or_directory', 'class': CopyFileOrDirectory }, { 'func_name': 'move_file_or_directory', 'class': MoveFileOrDirectory }, { 'func_name': 'create_directory', 'class': CreateDirectory }, { 'func_name': 'change_directory', 'class': ChangeDirectory }, { 'func_name': 'create_file_with_content', 'class': CreateFileWithContent }, { 'func_name': 'create_file_with_multiline_content', 'class': CreateFileWithMultilineContent }, { 'func_name': 'check_file_or_directory_exist', 'class': CheckFileOrDirectoryExist } ]
PypiClean
/xs_transformers-1.0.7-py3-none-any.whl/xs_transformers/models/nezha/__init__.py
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available, ) _import_structure = { "configuration_nezha": ["NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP", "NezhaConfig"], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: _import_structure["modeling_nezha"] = [ "NEZHA_PRETRAINED_MODEL_ARCHIVE_LIST", "NezhaForNextSentencePrediction", "NezhaForMaskedLM", "NezhaForPreTraining", "NezhaForMultipleChoice", "NezhaForQuestionAnswering", "NezhaForSequenceClassification", "NezhaForTokenClassification", "NezhaModel", "NezhaPreTrainedModel", ] if TYPE_CHECKING: from .configuration_nezha import NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP, NezhaConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_nezha import ( NEZHA_PRETRAINED_MODEL_ARCHIVE_LIST, NezhaForMaskedLM, NezhaForMultipleChoice, NezhaForNextSentencePrediction, NezhaForPreTraining, NezhaForQuestionAnswering, NezhaForSequenceClassification, NezhaForTokenClassification, NezhaModel, NezhaPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule( __name__, globals()["__file__"], _import_structure, module_spec=__spec__ )
PypiClean
/ipyvuetify-1.8.10.tar.gz/ipyvuetify-1.8.10/generate_source/node_modules/widget-gen/src/parsers/base.ts
import { Signal, ISignal } from '@phosphor/signaling'; import { IWidget, getWidgetRefs, Attributes } from '../core'; import { MSet } from '../setMethods'; export interface IParserConstructor { new (filename: string): Parser; } /** * The base parser class. * * A parser is something that generates widget definitions. * The pattern of use is as follows: * - Instantiate the parser with a filename. * - Connect consumers of widget definitions to the newWidget * signal. * - Call the start() method. Its promise will resolve once * a signal has been emitted for all found widgets. Note * that this does not entail that all consumers have finished * processing *if they perform async processing*. */ export abstract class Parser { /** * Initialize the parser. * * @param input String input to the parser, typically a filename */ constructor(protected input: string) { } /** * Start generating widget definitions. * * @returns {Promise<void>} A promise that will resolve once a * signal has been emitted for all found widgets. Note * that this does not entail that all consumers have finished * processing *if they perform async processing*. */ abstract start(): Promise<void>; /** * Find the names of all other *internal* widgets referenced by the * passed definition. Internal here means another widget that has/will * be parser by the parser. It should be valid to call before the * first time a newWidget signal is emitted. * * @param {IWidget} data The widget definition to inspect * @returns {MSet<string>} A set of widget names referenced */ resolveInternalRefs(properties: Attributes.Properties | undefined): MSet<string> { if (!properties) { return new MSet(); } let refs: MSet<string> = new MSet(); for (let propName of Object.keys(properties)) { let prop = properties[propName]; refs = refs.union(getWidgetRefs(prop)); } return this.widgetNames.intersection(refs); } /** * Signal emitted by parser when it finds a new widget definition. * * @readonly * @type {ISignal<this, IWidget>} */ get newWidget(): ISignal<this, IWidget> { return this._newWidget; } /** * A set of all widget names this parser finds. * * Note: This should be set (completed) before the * first time a newWidget signal is emitted! * * @type {MSet<string>} */ abstract readonly widgetNames: MSet<string>; protected _newWidget = new Signal<this, IWidget>(this); }
PypiClean
/django_cradmin-10.4.1.tar.gz/django_cradmin-10.4.1/django_cradmin/uicontainer/container.py
from django.conf import settings from django.forms.utils import flatatt from django_cradmin import renderable class NotBootsrappedError(Exception): """ Raised when trying to use features of :class:`.AbstractContainerRenderable` that requires is to have been bootstrapped. """ class AlreadyBootsrappedError(Exception): """ Raised when trying to :meth:`~.AbstractContainerRenderable.bootstrap` and already bootstrapped :class:`.AbstractContainerRenderable`. """ class NotAllowedToAddChildrenError(Exception): """ Raised when trying to add children to a :class:`.AbstractContainerRenderable` where :meth:`~.AbstractContainerRenderable.html_tag_supports_children` returns ``False``. """ class UnsupportedHtmlTagError(ValueError): """ Raised when providing an invalid ``html_tag`` kwarg to :class:`.AbstractContainerRenderable`. See :obj:`.AbstractContainerRenderable.supported_html_tags`. """ class InvalidBemError(ValueError): """ Raised when invalid BEM is supplied. """ class InvalidDomIdError(ValueError): """ Raised when invalid dom_id is supplied. """ class AbstractContainerRenderable(renderable.AbstractRenderableWithCss): """ Base class for all renderables in the uicontainer framework. This can not be used directly. You extend it, and at least override :meth:`.get_default_html_tag`, or use one of the subclasses. The most basic subclass is :class:`django_cradmin.uicontainer.div.Div`. .. attribute:: parent The parent AbstractContainerRenderable. Set in :meth:`.bootstrap`. The attribute does not exist if :meth:`.bootstrap` has not been run. Is ``None`` if this is the root of the container tree. .. attribute:: properties A dict of properties. These properties is copied down to the ``properties`` attribute of children (with the update-method, not full replace) in :meth:`.bootstrap`. This means that you can add properties in ``__init__()``, and make them available to any children recursively. """ template_name = 'django_cradmin/uicontainer/container.django.html' #: You can override this to specify a set of supported HTML tags #: for the ``html_tag`` attribute for :meth:`~.AbstractContainerRenderable.__init__`. #: This is useful to avoid typing errors. It should not be a big problem if you #: forget a tag that should be supported - developers can just create a subclass. #: #: If the value of this field is None, or any other value that is considered False by #: ``bool()``, we do not validate the ``html_tag`` kwarg. supported_html_tags = None def __init__(self, children=None, bem_block=None, bem_element=None, bem_variant_list=None, html_tag=None, css_classes_list=None, extra_css_classes_list=None, test_css_class_suffixes_list=None, role=False, dom_id=False, html_element_attributes=None, **kwargs): """ Args: children: List of children. Children must be objects of subclasses of :class:`.AbstractContainerRenderable`. css_classes_list (list): Override the :meth:`default css classes <.get_default_css_classes_list>` with your own list of css classes. extra_css_classes_list (list): Add extra css classes. This is appended to the css classes in the ``css_classes_list`` kwarg if that is specified, or appended to the css classes returned by :meth:`.get_default_css_classes_list`. role (str): The value of the role attribute. If this is not specified, we fall back on the value returned by :meth:`.get_default_role`. If both is ``False``, we do not render the role attribute. dom_id (str): The value of the id attribute. If this is not specified, we fall back on the value returned by :meth:`.get_default_dom_id`. If both is ``False``, we do not render the id attribute. html_element_attributes (dict): HTML element attributes to add to the HTML element. This adds attributes returned by :meth:`.get_html_element_attributes`. If this dict includes attributes returned by :meth:`.get_html_element_attributes`, the attributes specified in this kwarg takes presedense. The format of the dict is specified in :meth:`.get_html_element_attributes`. """ self.kwargs = kwargs self.validate_dom_id(dom_id=dom_id) self.validate_bem(bem_block=bem_block, bem_element=bem_element) self.validate_html_tag(html_tag=html_tag) self._childrenlist = [] self._virtual_childrenlist = [] self._is_bootstrapped = False self.properties = {} self._overridden_bem_block_or_element = bem_block or bem_element self._overridden_bem_variant_list = bem_variant_list self._overridden_role = role self._overridden_dom_id = dom_id self._overridden_html_tag = html_tag self._html_element_attributes = html_element_attributes self._overridden_css_classes_list = css_classes_list self._overridden_test_css_class_suffixes_list = test_css_class_suffixes_list self._extra_css_classes_list = extra_css_classes_list self.add_children(*self.prepopulate_children_list()) self.add_virtual_children(*self.prepopulate_virtual_children_list()) if children: self.add_children(*children) def should_validate_dom_id(self): """ Should we raise :class:`.InvalidDomIdError` exception when the ``dom_id`` kwarg is malformed. Returns the value of the :setting:`DJANGO_CRADMIN_UICONTAINER_VALIDATE_DOM_ID` setting, falling back to ``True`` if it is not defined. The validator requires the dom_id to start with ``id_``, be lowercase, and not contain ``-``. We recommend to not override this to ensure uniform DOM id naming. You should disable this validation in production using the :setting:`DJANGO_CRADMIN_UICONTAINER_VALIDATE_DOM_ID` setting. """ return getattr(settings, 'DJANGO_CRADMIN_UICONTAINER_VALIDATE_DOM_ID', True) def should_validate_bem(self): """ Should we raise :class:`.InvalidBemIdError` exception when the ``bem_block`` or ``bem_element`` kwarg is malformed? Returns the value of the :setting:`DJANGO_CRADMIN_UICONTAINER_VALIDATE_BEM` setting, falling back to ``True`` if it is not defined. The validator requires the bem_block to not contain ``__`` (double underscore), and the bem_element to comtain ``__`` (double underscore). We recommend to not chanding this to ensure BEM elements and blocks are used correctly. You should disable this validation in production using the :setting:`DJANGO_CRADMIN_UICONTAINER_VALIDATE_BEM` setting. """ return getattr(settings, 'DJANGO_CRADMIN_UICONTAINER_VALIDATE_BEM', True) def validate_dom_id(self, dom_id): if dom_id is False: return if not self.should_validate_dom_id(): return normalized_dom_id = dom_id.replace('-', '').lower() if not dom_id.startswith('id_') or dom_id != normalized_dom_id: raise InvalidDomIdError( 'dom_id must begin with "id_", be all lowercase, and can not contain "-". ' '{dom_id!r} does not match this requirement.'.format( dom_id=dom_id)) def validate_bem(self, bem_block, bem_element): if not self.should_validate_bem(): return if bem_block and bem_element: raise InvalidBemError( 'Can not specify both bem_element or bem_block. An ' 'HTML element is eighter a BEM block or a BEM element.') if bem_block: if '__' in bem_block: raise InvalidBemError( '{bem_block} is not a valid BEM block name. ' 'BEM blocks do not contain "__". Are you sure you ' 'did not mean to use the bem_element kwarg?'.format( bem_block=bem_block )) elif bem_element: if '__' not in bem_element: raise InvalidBemError( '{bem_element} is not a valid BEM element name. ' 'BEM elements must contain "__". Are you sure you ' 'did not mean to use the bem_block kwarg?'.format( bem_element=bem_element )) def get_full_class_path_as_string(self): """ Get full class path as string. Useful for providing some extra information in exceptions. Normally this will be in a traceback, but when dealing with things rendered by a Django template, this information is not always included. """ return '{}.{}'.format(self.__class__.__module__, self.__class__.__name__) def validate_html_tag(self, html_tag): if html_tag and self.supported_html_tags and html_tag not in self.supported_html_tags: raise UnsupportedHtmlTagError('Unsupported HTML tag for {classpath}: {html_tag}'.format( classpath=self.get_full_class_path_as_string(), html_tag=self._overridden_html_tag )) def get_default_html_tag(self): """ Get the default HTML tag to wrap renderable in. Can be overriden by the ``html_tag`` kwarg for :meth:`.__init__`. Returns ``"div"`` by default. """ return 'div' @property def html_tag(self): """ Get the HTML tag for this container. """ return self._overridden_html_tag or self.get_default_html_tag() @property def html_tag_supports_children(self): """ Does the html tag support children? If this returns ``False``, we: - Do not render an end tag for the wrapper element. - Do not allow children to be added to the container. Should be overridden to return ``False`` if the :meth:`.get_default_html_tag` does not allow for children. Examples of this case is if the wrapper html tag i ``input`` or ``hr``. See also :meth:`.can_have_children`, which should be used if the HTML tag should have and end tag, but not children. Returns: boolean: True by default. """ return True @property def can_have_children(self): """ Can this container have children? If this returns ``False``, :meth:`.add_child` will raise :class:`.NotAllowedToAddChildrenError`. Returns: boolean: The return value from :meth:`.html_tag_supports_children` by default. """ return self.html_tag_supports_children def get_default_role(self): """ Get the default value for the role attribute of the html element. Defaults to ``False``. """ return False @property def role(self): """ Get the value for the role attribute of the html element. You should not override this. Override :meth:`.get_default_role` instead. """ return self._overridden_role or self.get_default_role() def get_default_dom_id(self): """ Get the default value for the id attribute of the html element. Defaults to ``False``. """ return False @property def dom_id(self): """ Get the value for the id attribute of the html element. You should not override this. Override :meth:`.get_default_dom_id` instead. """ return self._overridden_dom_id or self.get_default_dom_id() def get_html_element_attributes(self): """ Get HTML element attributes as a dict. The dict is parsed by :func:`django.forms.utils.flatatt`, so: - ``{'myattribute': True}`` results in ``myattribute`` (no value). - ``{'myattribute': False}`` results in the attribute beeing ignored (not included in the output). - ``{'myattribute': 'Some value'}`` results in the ``myattribute="Some value"``. If you override this method, *remember to call super* to get the attributes set in the superclass. """ html_element_attributes = { 'role': self.role, 'id': self.dom_id, 'class': self.css_classes or False, # Fall back to false to avoid class="" } if self._html_element_attributes: html_element_attributes.update(self._html_element_attributes) return html_element_attributes @property def html_element_attributes_string(self): """ Get :meth:`.get_html_element_attributes` + any attributes in the ``html_element_attributes`` kwarg for :meth:`.__init__` encoded as a string using :func:`django.forms.utils.flatatt`. """ return flatatt(self.get_html_element_attributes()) def get_default_css_classes_list(self): """ Override this to provide a default list of css classes. The css classes specified here can be overridden using the ``css_classes_list`` kwarg for :meth:`.__init__`. """ return [] def get_default_bem_block_or_element(self): """ Get the default BEM block or element. A HTML element is eighter a BEM block or a BEM element, so we have joined this into a single method. """ return None def get_bem_block_or_element(self): """ Get the BEM block or element. DO NOT OVERRIDE THIS METHOD. Override :meth:`.get_default_bem_block_or_element` instead. """ return (self._overridden_bem_block_or_element or self.get_default_bem_block_or_element()) def get_default_bem_variant_list(self): """ Get the default BEM variants. The full CSS class of any variant in the list will be :meth:`.get_bem_block_or_element` with ``--`` and the variant appended, so if the bem block/element is ``"menu"``, and the variant is ``"expanded"``, the resulting css class will be ``"menu--expanded"``. """ return [] def get_bem_variant_list(self): """ Get the list of BEM variants. DO NOT OVERRIDE THIS METHOD. Override :meth:`.get_default_bem_variant_list` instead. """ return self._overridden_bem_variant_list or self.get_default_bem_variant_list() def get_bem_css_classes_list(self): """ Get the BEM css classes as list. DO NOT OVERRIDE THIS METHOD. Override :meth:`.get_default_bem_block_or_element` and :meth:`.get_default_bem_variant_list` instead. """ bem_block_or_element = self.get_bem_block_or_element() bem_css_classes = [] if bem_block_or_element: bem_css_classes.append(bem_block_or_element) for variant in self.get_bem_variant_list(): css_class = '{}--{}'.format(bem_block_or_element, variant) bem_css_classes.append(css_class) return bem_css_classes def get_css_classes_list(self): """ DO NOT OVERRIDE THIS METHOD. Unlike with :class:`django_cradmin.renderable.AbstractRenderableWithCss`, you do not override this class to add your own css classes. Override :meth:`.get_default_css_classes_list`. This is because this method respects the ``css_classes_list`` kwarg for :meth:`.__init__`, and just falls back to :meth:`.get_default_css_classes_list`. So if you override this method, the ``css_classes_list`` kwarg will be useless. """ css_classes_list = self.get_bem_css_classes_list() if self._overridden_css_classes_list: css_classes_list.extend(self._overridden_css_classes_list) else: css_classes_list.extend(self.get_default_css_classes_list()) if self._extra_css_classes_list: css_classes_list.extend(self._extra_css_classes_list) return css_classes_list def get_default_test_css_class_suffixes_list(self): """ Override this to provide a default list of css classes for unit tests. The css classes specified here can be overridden using the ``test_css_class_suffixes_list`` kwarg for :meth:`.__init__`. """ return ['uicontainer-{}'.format(self.__class__.__name__.lower())] def get_test_css_class_suffixes_list(self): """ DO NOT OVERRIDE THIS METHOD. Unlike with :class:`django_cradmin.renderable.AbstractRenderableWithCss`, you do not override this class to add your own test css classes. Override :meth:`.get_default_test_css_class_suffixes_list`. This is because this method respects the ``test_css_class_suffixes_list`` kwarg for :meth:`.__init__`, and just falls back to :meth:`.get_default_test_css_class_suffixes_list`. So if you override this method, the ``test_css_class_suffixes_list`` kwarg will be useless. """ if self._overridden_test_css_class_suffixes_list: test_css_class_suffixes_list = self._overridden_test_css_class_suffixes_list else: test_css_class_suffixes_list = self.get_default_test_css_class_suffixes_list() return test_css_class_suffixes_list def bootstrap(self, parent=None): """ Bootstrap the container. Must be called once on the top-level container in the tree of containers. Sets the provided parent as :attr:`.parent`. Updates the properties of all children (using dict update()) with :attr:`.properties`. """ if self._is_bootstrapped: raise AlreadyBootsrappedError('The container is already bootstrapped. Can not bootstrap ' 'the same container twice.') self.parent = parent if self.parent: self.properties.update(self.parent.properties) for child in self._virtual_childrenlist: child.bootstrap(parent=self) for child in self._childrenlist: child.bootstrap(parent=self) self._is_bootstrapped = True return self def prepopulate_children_list(self): """ Pre-polulate the children list. This is called in :meth:`.__init__` before any children from the kwargs is added. Returns: list: An empty list by default, but you can override this in subclasses. """ return [] def prepopulate_virtual_children_list(self): """ Pre-polulate the virtual children list. This is called in :meth:`.__init__` before any children from the kwargs is added, and before any children is :meth:`.prepopulate_children_list` is added. Returns: list: An empty list by default, but you can override this in subclasses. """ return [] def add_child(self, childcontainer): """ Add a child to the container. Args: childcontainer: A :class:`.AbstractContainerRenderable` object. Returns: A reference to self. This means that you can chain calls to this method. """ if self.can_have_children: self._childrenlist.append(childcontainer) if self._is_bootstrapped and not childcontainer._is_bootstrapped: childcontainer.bootstrap(parent=self) else: raise NotAllowedToAddChildrenError('{modulename}.{classname} can not have children'.format( modulename=self.__class__.__module__, classname=self.__class__.__name__ )) return self def add_virtual_child(self, childcontainer): """ Add a "virtual" child to the container. This child is not rendered as a child of the container automatically (that is left to the template rendering the container). But it inherits properties and is automatically bootstrapped just like a regular child. Args: childcontainer: A :class:`.AbstractContainerRenderable` object. Returns: A reference to self. This means that you can chain calls to this method. """ if self.can_have_children: self._virtual_childrenlist.append(childcontainer) if self._is_bootstrapped and not childcontainer._is_bootstrapped: childcontainer.bootstrap(parent=self) return self def add_children(self, *childcontainers): """ Add children to the container. Args: *childcontainers: Zero or more :class:`.AbstractContainerRenderable` objects. Returns: A reference to self. This means that you can chain calls to this method. """ for childcontainer in childcontainers: self.add_child(childcontainer) return self def add_virtual_children(self, *childcontainers): """ Add virtual children to the container. Args: *childcontainers: Zero or more :class:`.AbstractContainerRenderable` objects. Returns: A reference to self. This means that you can chain calls to this method. """ for childcontainer in childcontainers: self.add_virtual_child(childcontainer) return self def iter_children(self): """ Returns an iterator over the children of this container. The yielded children will be objects of :class:`.AbstractContainerRenderable` subclasses. """ return iter(self._childrenlist) def iter_virtual_children(self): """ Returns an iterator over the virtual children of this container. The yielded children will be objects of :class:`.AbstractContainerRenderable` subclasses. """ return iter(self._virtual_childrenlist) def get_childcount(self): """ Get the number of children in the container. """ return len(self._childrenlist) def get_virtual_childcount(self): """ Get the number of virtual children in the container. """ return len(self._virtual_childrenlist) @property def should_render(self): """ Should we render anything? Override this to make the :meth:`.render` to control if the container is rendered. If this returns ``False``, :meth:`.render` returns an empty string instead of rendering the template. Returns: bool: ``True`` by default, but subclasses can override this behavior. """ return True def render(self, **kwargs): """ Overrides :meth:`django_cradmin.renderable.AbstractRenderable.render`. The only change is that we return an empty string if :meth:`.should_render` returns ``False``. If it returns ``True``, we call the overriden method and returns the result. Args: **kwargs: Forwarded to the overridden method if it is called. """ if not self._is_bootstrapped: raise NotBootsrappedError( 'Can not render an AbstractContainerRenderable that has not been bootstrapped. ' 'Ensure you call bootsrap() on the top-level container in the container ' 'hierarchy before rendering. Class causing this issue: {classpath}'.format( classpath=self.get_full_class_path_as_string() )) if self.should_render: return super(AbstractContainerRenderable, self).render(**kwargs) else: return '' class Div(AbstractContainerRenderable): """ Renders a ``<div>``. The only thing this class does is to override :meth:`django_cradmin.uicontainer.container.AbstractContainerRenderable.get_default_html_tag` and return ``"div"``. """ def get_default_html_tag(self): return 'div' class NoWrapperElement(AbstractContainerRenderable): """ Renders children, but no wrapper HTML element. """ template_name = 'django_cradmin/uicontainer/no_wrapper_element.django.html'
PypiClean
/wax-ml-0.6.4.tar.gz/wax-ml-0.6.4/wax/modules/buffer.py
"""Implement buffering mechanism.""" from typing import Any, Callable, NamedTuple, Optional import haiku as hk import jax.numpy as jnp class BufferState(NamedTuple): buffer: Any len_buffer: int i_start: int class BufferFun(NamedTuple): init: Callable apply: Callable def buffer_fn(maxlen: int, fill_value=jnp.nan): def init(shape, dtype): buffer = jnp.full((maxlen,) + shape, fill_value, dtype=dtype) len_buffer = 0 i_start = maxlen return BufferState(buffer, len_buffer, i_start) def apply(x, state): buffer, len_buffer, i_start = state buffer = jnp.roll(buffer, -1, axis=0) buffer = buffer.at[-1].set(x) len_buffer = jnp.minimum(len_buffer + 1, maxlen) i_start = maxlen - len_buffer return buffer, BufferState(buffer, len_buffer, i_start) return BufferFun(init, apply) class Buffer(hk.Module): """Implement buffering mechanism.""" def __init__( self, maxlen: int, fill_value=jnp.nan, return_state: bool = False, name: Optional[str] = None, ): """Initialize the module/ Args: maxlen : length of the buffer fill_value : value to use to fill buffer while no data has been append. return_state : if true, the module returns a tuple (buffer, state) where state is the full buffer state (buffer, len_buffer, i_start). If false, the buffer is returned. name : name of the module. """ super().__init__(name=name) self.maxlen = maxlen self.fill_value = fill_value self.return_state = return_state def __call__(self, input: jnp.ndarray): """Record input data in the buffer. Args: input: data to record. """ fun = buffer_fn(self.maxlen, self.fill_value) buffer_state = hk.get_state( "buffer_state", input.shape, input.dtype, init=fun.init, ) buffer, buffer_state = fun.apply(input, buffer_state) hk.set_state("buffer_state", buffer_state) if self.return_state: return buffer, buffer_state else: return buffer
PypiClean
/125softNLP-0.0.1-py3-none-any.whl/bert/extract_feature.py
from bert.graph import import_tf from bert import modeling from bert import tokenization from bert.graph import optimize_graph from bert import args from queue import Queue from threading import Thread tf = import_tf(0, True) class InputExample(object): def __init__(self, unique_id, text_a, text_b): self.unique_id = unique_id self.text_a = text_a self.text_b = text_b class InputFeatures(object): """A single set of features of data.""" def __init__(self, unique_id, tokens, input_ids, input_mask, input_type_ids): self.unique_id = unique_id self.tokens = tokens self.input_ids = input_ids self.input_mask = input_mask self.input_type_ids = input_type_ids class BertVector: def __init__(self, batch_size=32, pooling_strategy="REDUCE_MEAN", max_seq_len=40): """ init BertVector :param batch_size: Depending on your memory default is 32 """ self.max_seq_length = max_seq_len self.layer_indexes = args.layer_indexes self.gpu_memory_fraction = 1 if pooling_strategy == "NONE": pooling_strategy = args.PoolingStrategy.NONE elif pooling_strategy == "REDUCE_MAX": pooling_strategy = args.PoolingStrategy.REDUCE_MAX elif pooling_strategy == "REDUCE_MEAN": pooling_strategy = args.PoolingStrategy.REDUCE_MEAN elif pooling_strategy == "REDUCE_MEAN_MAX": pooling_strategy = args.PoolingStrategy.REDUCE_MEAN_MAX self.graph_path = optimize_graph(pooling_strategy=pooling_strategy, max_seq_len=self.max_seq_length) self.tokenizer = tokenization.FullTokenizer(vocab_file=args.vocab_file, do_lower_case=True) self.batch_size = batch_size self.estimator = self.get_estimator() self.input_queue = Queue(maxsize=1) self.output_queue = Queue(maxsize=1) self.predict_thread = Thread(target=self.predict_from_queue, daemon=True) self.predict_thread.start() def get_estimator(self): from tensorflow.python.estimator.estimator import Estimator from tensorflow.python.estimator.run_config import RunConfig from tensorflow.python.estimator.model_fn import EstimatorSpec def model_fn(features, labels, mode, params): with tf.gfile.GFile(self.graph_path, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) input_names = ['input_ids', 'input_mask', 'input_type_ids'] output = tf.import_graph_def(graph_def, input_map={k + ':0': features[k] for k in input_names}, return_elements=['final_encodes:0']) return EstimatorSpec(mode=mode, predictions={ 'encodes': output[0] }) config = tf.ConfigProto() config.gpu_options.allow_growth = True config.gpu_options.per_process_gpu_memory_fraction = self.gpu_memory_fraction config.log_device_placement = False config.graph_options.optimizer_options.global_jit_level = tf.OptimizerOptions.ON_1 return Estimator(model_fn=model_fn, config=RunConfig(session_config=config), params={'batch_size': self.batch_size}) def predict_from_queue(self): prediction = self.estimator.predict(input_fn=self.queue_predict_input_fn, yield_single_examples=False) for i in prediction: self.output_queue.put(i) def encode(self, sentence): self.input_queue.put(sentence) prediction = self.output_queue.get() return prediction def queue_predict_input_fn(self): return (tf.data.Dataset.from_generator( self.generate_from_queue, output_types={'unique_ids': tf.int32, 'input_ids': tf.int32, 'input_mask': tf.int32, 'input_type_ids': tf.int32}, output_shapes={ 'unique_ids': (1,), 'input_ids': (None, self.max_seq_length), 'input_mask': (None, self.max_seq_length), 'input_type_ids': (None, self.max_seq_length)})) def generate_from_queue(self): while True: features = list(self.convert_examples_to_features(seq_length=self.max_seq_length, tokenizer=self.tokenizer)) yield { 'unique_ids': [f.unique_id for f in features], 'input_ids': [f.input_ids for f in features], 'input_mask': [f.input_mask for f in features], 'input_type_ids': [f.input_type_ids for f in features] } def input_fn_builder(self, features, seq_length): """Creates an `input_fn` closure to be passed to Estimator.""" all_unique_ids = [] all_input_ids = [] all_input_mask = [] all_input_type_ids = [] for feature in features: all_unique_ids.append(feature.unique_id) all_input_ids.append(feature.input_ids) all_input_mask.append(feature.input_mask) all_input_type_ids.append(feature.input_type_ids) def input_fn(params): """The actual input function.""" batch_size = params["batch_size"] num_examples = len(features) # This is for demo purposes and does NOT scale to large data sets. We do # not use Dataset.from_generator() because that uses tf.py_func which is # not TPU compatible. The right way to load data is with TFRecordReader. d = tf.data.Dataset.from_tensor_slices({ "unique_ids": tf.constant(all_unique_ids, shape=[num_examples], dtype=tf.int32), "input_ids": tf.constant( all_input_ids, shape=[num_examples, seq_length], dtype=tf.int32), "input_mask": tf.constant( all_input_mask, shape=[num_examples, seq_length], dtype=tf.int32), "input_type_ids": tf.constant( all_input_type_ids, shape=[num_examples, seq_length], dtype=tf.int32), }) d = d.batch(batch_size=batch_size, drop_remainder=False) return d return input_fn def model_fn_builder(self, bert_config, init_checkpoint, layer_indexes): """Returns `model_fn` closure for TPUEstimator.""" def model_fn(features, labels, mode, params): # pylint: disable=unused-argument """The `model_fn` for TPUEstimator.""" unique_ids = features["unique_ids"] input_ids = features["input_ids"] input_mask = features["input_mask"] input_type_ids = features["input_type_ids"] jit_scope = tf.contrib.compiler.jit.experimental_jit_scope with jit_scope(): model = modeling.BertModel( config=bert_config, is_training=False, input_ids=input_ids, input_mask=input_mask, token_type_ids=input_type_ids) if mode != tf.estimator.ModeKeys.PREDICT: raise ValueError("Only PREDICT modes are supported: %s" % (mode)) tvars = tf.trainable_variables() (assignment_map, initialized_variable_names) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) tf.logging.info("**** Trainable Variables ****") for var in tvars: init_string = "" if var.name in initialized_variable_names: init_string = ", *INIT_FROM_CKPT*" tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape, init_string) all_layers = model.get_all_encoder_layers() predictions = { "unique_id": unique_ids, } for (i, layer_index) in enumerate(layer_indexes): predictions["layer_output_%d" % i] = all_layers[layer_index] from tensorflow.python.estimator.model_fn import EstimatorSpec output_spec = EstimatorSpec(mode=mode, predictions=predictions) return output_spec return model_fn def convert_examples_to_features(self, seq_length, tokenizer): """Loads a data file into a list of `InputBatch`s.""" features = [] input_masks = [] examples = self._to_example(self.input_queue.get()) for (ex_index, example) in enumerate(examples): tokens_a = tokenizer.tokenize(example.text_a) # if the sentences's length is more than seq_length, only use sentence's left part if len(tokens_a) > seq_length - 2: tokens_a = tokens_a[0:(seq_length - 2)] # The convention in BERT is: # (a) For sequence pairs: # tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP] # type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1 # (b) For single sequences: # tokens: [CLS] the dog is hairy . [SEP] # type_ids: 0 0 0 0 0 0 0 # # Where "type_ids" are used to indicate whether this is the first # sequence or the second sequence. The embedding vectors for `type=0` and # `type=1` were learned during pre-training and are added to the wordpiece # embedding vector (and position vector). This is not *strictly* necessary # since the [SEP] token unambiguously separates the sequences, but it makes # it easier for the model to learn the concept of sequences. # # For classification tasks, the first vector (corresponding to [CLS]) is # used as as the "sentence vector". Note that this only makes sense because # the entire model is fine-tuned. tokens = [] input_type_ids = [] tokens.append("[CLS]") input_type_ids.append(0) for token in tokens_a: tokens.append(token) input_type_ids.append(0) tokens.append("[SEP]") input_type_ids.append(0) # Where "input_ids" are tokens's index in vocabulary input_ids = tokenizer.convert_tokens_to_ids(tokens) # The mask has 1 for real tokens and 0 for padding tokens. Only real # tokens are attended to. input_mask = [1] * len(input_ids) input_masks.append(input_mask) # Zero-pad up to the sequence length. while len(input_ids) < seq_length: input_ids.append(0) input_mask.append(0) input_type_ids.append(0) assert len(input_ids) == seq_length assert len(input_mask) == seq_length assert len(input_type_ids) == seq_length if ex_index < 5: tf.logging.info("*** Example ***") tf.logging.info("unique_id: %s" % (example.unique_id)) tf.logging.info("tokens: %s" % " ".join( [tokenization.printable_text(x) for x in tokens])) tf.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) tf.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) tf.logging.info( "input_type_ids: %s" % " ".join([str(x) for x in input_type_ids])) yield InputFeatures( unique_id=example.unique_id, tokens=tokens, input_ids=input_ids, input_mask=input_mask, input_type_ids=input_type_ids) def _truncate_seq_pair(self, tokens_a, tokens_b, max_length): """Truncates a sequence pair in place to the maximum length.""" # This is a simple heuristic which will always truncate the longer sequence # one token at a time. This makes more sense than truncating an equal percent # of tokens from each, since if one sequence is very short then each token # that's truncated likely contains more information than a longer sequence. while True: total_length = len(tokens_a) + len(tokens_b) if total_length <= max_length: break if len(tokens_a) > len(tokens_b): tokens_a.pop() else: tokens_b.pop() @staticmethod def _to_example(sentences): import re """ sentences to InputExample :param sentences: list of strings :return: list of InputExample """ unique_id = 0 for ss in sentences: line = tokenization.convert_to_unicode(ss) if not line: continue line = line.strip() text_a = None text_b = None m = re.match(r"^(.*) \|\|\| (.*)$", line) if m is None: text_a = line else: text_a = m.group(1) text_b = m.group(2) yield InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b) unique_id += 1 if __name__ == "__main__": import time bert = BertVector() while True: question = input('question: ') start = time.time() vectors = bert.encode([question]) print(str(vectors)) #print(f'predict time:----------{time.time() - start}')
PypiClean
/trojanzoo-2.0.2.tar.gz/trojanzoo-2.0.2/trojanvision/defenses/abstract.py
from trojanvision.environ import env from trojanzoo.defenses import Defense from trojanzoo.utils.logger import MetricLogger from trojanzoo.utils.metric import mask_jaccard, normalize_mad from trojanzoo.utils.output import output_iter, prints from trojanzoo.utils.tensor import tanh_func from trojanzoo.utils.data import TensorListDataset, sample_batch import torch import torch.optim as optim import numpy as np from sklearn import metrics import os from abc import abstractmethod from typing import TYPE_CHECKING from trojanvision.datasets import ImageSet from trojanvision.models import ImageModel from trojanvision.attacks.backdoor import BadNet import argparse from collections.abc import Iterable if TYPE_CHECKING: import torch.utils.data # TODO: python 3.10 def format_list(_list: list, _format: str = ':8.3f') -> str: return '[' + ', '.join(['{{{}}}'.format(_format).format(a) for a in _list]) + ']' class BackdoorDefense(Defense): r"""Backdoor defense abstract class. It inherits :class:`trojanzoo.defenses.Defense`. Args: original (bool): Whether to load original clean model. If ``False``, load attack poisoned model by calling ``self.attack.load()``. Attributes: real_mark (torch.Tensor): Watermark that the attacker uses with shape ``(C+1, H, W)``. real_mask (torch.Tensor): Mask of the watermark by calling :meth:`trojanvision.marks.Watermark.get_mask()`. """ name: str = 'backdoor_defense' @classmethod def add_argument(cls, group: argparse._ArgumentGroup): super().add_argument(group) group.add_argument('--original', action='store_true', help='whether to load original clean model ' '(default: False)') return group def __init__(self, attack: BadNet, original: bool = False, **kwargs): self.original: bool = original if not self.original: attack.load(**kwargs) super().__init__(attack=attack, **kwargs) self.dataset: ImageSet self.model: ImageModel self.attack: BadNet self.real_mark = self.attack.mark.mark.clone() self.real_mask = self.attack.mark.get_mask() @abstractmethod def detect(self, **kwargs): self.attack.validate_fn() def get_filename(self, **kwargs): r"""Get filenames for current defense settings.""" return self.attack.name + '_' + self.attack.get_filename(**kwargs) class InputFiltering(BackdoorDefense): r"""Backdoor defense abstract class of input filtering. It inherits :class:`trojanvision.defenses.BackdoorDefense`. It detects whether a test input is poisoned. The defense tests :attr:`defense_input_num` clean test inputs and their corresponding poison version (``2 * defense_input_num`` in total). Args: defense_input_num (int): Number of test inputs. Defaults to ``100``. Attributes: test_set (torch.utils.data.Dataset): Test dataset with length :attr:`defense_input_num`. """ name: str = 'input_filtering' @classmethod def add_argument(cls, group: argparse._ArgumentGroup): super().add_argument(group) group.add_argument('--defense_input_num', type=int, help='number of test inputs (default: 100)') return group def __init__(self, defense_input_num: int = 100, **kwargs): super().__init__(**kwargs) self.param_list['input_filtering'] = ['defense_input_num'] self.defense_input_num = defense_input_num self.test_input, self.test_label = self.get_test_data() def detect(self, **kwargs): super().detect(**kwargs) y_true = self.get_true_labels() y_pred = self.get_pred_labels() tn, fp, fn, tp = metrics.confusion_matrix(y_true, y_pred).ravel() print() print(f'{tn=:d} {fp=:d} {fn=:d} {tp=:d}') print(f'f1_score : {metrics.f1_score(y_true, y_pred):8.3f}') print(f'precision_score : {metrics.precision_score(y_true, y_pred):8.3f}') print(f'recall_score : {metrics.recall_score(y_true, y_pred):8.3f}') print(f'accuracy_score : {metrics.accuracy_score(y_true, y_pred):8.3f}') print(f'roc_auc_score : {metrics.roc_auc_score(y_true, y_pred):8.3f}') def get_test_data(self) -> tuple[torch.Tensor, torch.Tensor]: r"""Get test data. Returns: (torch.Tensor, torch.Tensor): Input and label tensors with length ``defense_input_num``. """ input_list = [] label_list = [] remain_counter = self.defense_input_num for data in self.dataset.loader['valid']: _input, _label = self.model.remove_misclassify(data) if len(_label) == 0: continue trigger_input = self.attack.add_mark(_input) trigger_label = self.attack.target_class * torch.ones_like(_label) _classification = self.model.get_class(trigger_input) repeat_idx = _classification.eq(trigger_label) _input, _label = _input[repeat_idx], _label[repeat_idx] if len(_label) == 0: continue if len(_input) < remain_counter: remain_counter -= len(_input) else: _input = _input[:remain_counter] _label = _label[:remain_counter] remain_counter = 0 input_list.append(_input.cpu()) label_list.extend(_label.cpu().tolist()) if remain_counter == 0: break else: raise Exception('No enough test data') return torch.cat(input_list), label_list def get_true_labels(self) -> torch.Tensor: r"""Get ground-truth labels for test inputs. Defaults to return ``[False] * defense_input_num + [True] * defense_input_num``. Returns: torch.Tensor: ``torch.BoolTensor`` with shape ``(2 * defense_input_num)``. """ zeros = torch.zeros(self.defense_input_num, dtype=torch.bool) ones = torch.ones_like(zeros) return torch.cat([zeros, ones]) def get_pred_labels(self) -> torch.Tensor: r"""Get predicted labels for test inputs (need overriding). Returns: torch.Tensor: ``torch.BoolTensor`` with shape ``(2 * defense_input_num)``. """ ... class TrainingFiltering(BackdoorDefense): r"""Backdoor defense abstract class of training data filtering. It inherits :class:`trojanvision.defenses.BackdoorDefense`. Provided :attr:`defense_input_num` training data, it detects which training data is poisoned. The defense evaluates clean and poison training inputs. - If :attr:`defense_input_num` is ``None``, use full training data. - Else, sample ``defense_input_num * poison_percent`` poison training data and ``defense_input_num * (1 - poison_percent)`` clean training data. If dataset is not using ``train_mode == 'dataset'``, construct poison dataset using all clean data with watermark attached. (If :attr:`defense_input_num` is ``None`` as well, the defense will evaluate the whole clean training set and its poisoned version.) Args: defense_input_num (int): Number of training inputs to evaluate. Defaults to ``None`` (all training set). Attributes: clean_set (torch.utils.data.Dataset): Clean training data to evaluate. poison_set (torch.utils.data.Dataset): Poison training data to evaluate. """ name: str = 'training_filtering' @classmethod def add_argument(cls, group: argparse._ArgumentGroup): super().add_argument(group) group.add_argument('--defense_input_num', type=int, help='the number of training inputs to evaluate ' '(default: None)') return group def __init__(self, defense_input_num: int = None, **kwargs): super().__init__(**kwargs) self.defense_input_num = defense_input_num self.clean_set, self.poison_set = self.get_datasets() def get_datasets(self) -> tuple[torch.utils.data.Dataset, torch.utils.data.Dataset]: r"""Get clean and poison datasets. Returns: (torch.utils.data.Dataset, torch.utils.data.Dataset): Clean training dataset and poison training dataset. """ if self.attack.poison_set is None: self.attack.poison_set = self.attack.get_poison_dataset( poison_num=len(self.dataset.loader['train'].dataset)) if not self.defense_input_num: return self.dataset.loader['train'].dataset, self.attack.poison_set if self.attack.train_mode != 'dataset': poison_num = int(self.defense_input_num * self.attack.poison_percent) clean_num = self.defense_input_num - poison_num clean_input, clean_label = sample_batch(self.dataset.loader['train'].dataset, batch_size=clean_num) trigger_input, trigger_label = sample_batch(self.attack.poison_set, batch_size=poison_num) clean_set = TensorListDataset(clean_input, clean_label.tolist()) poison_set = TensorListDataset(trigger_input, trigger_label.tolist()) return clean_set, poison_set def detect(self, **kwargs): super().detect(**kwargs) y_pred = self.get_pred_labels() y_true = self.get_true_labels() print(f'f1_score : {metrics.f1_score(y_true, y_pred):8.3f}') print(f'precision_score : {metrics.precision_score(y_true, y_pred):8.3f}') print(f'recall_score : {metrics.recall_score(y_true, y_pred):8.3f}') print(f'accuracy_score : {metrics.accuracy_score(y_true, y_pred):8.3f}') def get_true_labels(self) -> torch.Tensor: r"""Get ground-truth labels for training inputs. Defaults to return ``[False] * len(self.clean_set) + [True] * len(self.poison_set)``. Returns: torch.Tensor: ``torch.BoolTensor`` with shape ``(defense_input_num)``. """ return torch.cat([torch.zeros(len(self.clean_set), dtype=torch.bool), torch.ones(len(self.poison_set), dtype=torch.bool)]) @abstractmethod def get_pred_labels(self) -> torch.Tensor: r"""Get predicted labels for training inputs (need overriding). Returns: torch.Tensor: ``torch.BoolTensor`` with shape ``(defense_input_num)``. """ ... class ModelInspection(BackdoorDefense): r"""Backdoor defense abstract class of model inspection. It inherits :class:`trojanvision.defenses.BackdoorDefense`. Provided a model, it tries to search for a trigger. If trigger exists, that means the model is poisoned. Args: defense_remask_epoch (int): Defense watermark optimizing epochs. Defaults to ``10``. defense_remask_lr (float): Defense watermark optimizing learning rate. Defaults to ``0.1``. cost (float): Cost of mask norm loss. Defaults to ``1e-3``. Attributes: cost (float): Cost of mask norm loss. clean_set (torch.utils.data.Dataset): Clean training data to evaluate. poison_set (torch.utils.data.Dataset): Poison training data to evaluate. """ name: str = 'model_inspection' @classmethod def add_argument(cls, group: argparse._ArgumentGroup): super().add_argument(group) group.add_argument('--defense_remask_epoch', type=int, help='defense watermark optimizing epochs ' '(default: 10)') group.add_argument('--defense_remask_lr', type=float, help='defense watermark optimizing learning rate ' '(default: 0.1)') group.add_argument('--cost', type=float, help='cost of mask norm loss ' '(default: 1e-3)') return group def __init__(self, defense_remask_epoch: int = 10, defense_remask_lr: float = 0.1, cost: float = 1e-3, **kwargs): super().__init__(**kwargs) self.param_list['model_inspection'] = ['defense_remask_epoch', 'defense_remask_lr', 'cost'] self.defense_remask_epoch = defense_remask_epoch self.defense_remask_lr = defense_remask_lr self.cost = cost def detect(self, **kwargs): super().detect(**kwargs) self.mark_random_pos = self.attack.mark.mark_random_pos mark_keys = ['mark', 'mark_height', 'mark_width', 'mark_height_offset', 'mark_width_offset', 'mark_random_pos', ] self.mark_dict = {key: getattr(self.attack.mark, key) for key in mark_keys} self.new_dict = {'mark': torch.zeros(self.attack.mark.mark.size(0), self.attack.mark.data_shape[-2], self.attack.mark.data_shape[-1], device=self.attack.mark.mark.device), 'mark_height': self.attack.mark.data_shape[-2], 'mark_width': self.attack.mark.data_shape[-1], 'mark_height_offset': 0, 'mark_width_offset': 0, 'mark_random_pos': False, } for k, v in self.new_dict.items(): setattr(self.attack.mark, k, v) self.attack.mark.mark.zero_() mark_list, loss_list, asr_list = self.get_mark_loss_list() mask_norms: torch.Tensor = mark_list[:, -1].flatten(start_dim=1).norm(p=1, dim=1) mask_norm_list: list[float] = mask_norms.tolist() print() print('asr : ' + format_list(asr_list)) print('mask norms : ' + format_list(mask_norm_list)) print('loss : ' + format_list(loss_list)) print() print('asr MAD : ' + format_list(normalize_mad(asr_list).tolist())) print('mask norm MAD : ' + format_list(normalize_mad(mask_norms).tolist())) print('loss MAD : ' + format_list(normalize_mad(loss_list).tolist())) if not self.mark_random_pos: self.attack.mark.mark = mark_list[self.attack.target_class] select_num = self.attack.mark.mark_height * self.attack.mark.mark_width overlap = mask_jaccard(self.attack.mark.get_mask(), self.real_mask, select_num=select_num) print(f'Jaccard index: {overlap:.3f}') def get_mark_loss_list(self, verbose: bool = True, **kwargs) -> tuple[torch.Tensor, list[float], list[float]]: r"""Get list of mark, loss, asr of recovered trigger for each class. Args: verbose (bool): Whether to output jaccard index for each trigger. It's also passed to :meth:`optimize_mark()`. **kwargs: Keyword arguments passed to :meth:`optimize_mark()`. Returns: (torch.Tensor, list[float], list[float]): list of mark, loss, asr with length ``num_classes``. """ mark_list: list[torch.Tensor] = [] loss_list: list[float] = [] asr_list: list[float] = [] # todo: parallel to avoid for loop file_path = os.path.normpath(os.path.join( self.folder_path, self.get_filename() + '.npz')) org_target_class = self.attack.target_class for label in range(self.model.num_classes): print('Class: ', output_iter(label, self.model.num_classes)) self.attack.target_class = label mark, loss = self.optimize_mark(label, verbose=verbose, **kwargs) if verbose: asr, _ = self.attack.validate_fn(indent=4) if not self.mark_random_pos: select_num = self.attack.mark.mark_height * self.attack.mark.mark_width overlap = mask_jaccard(self.attack.mark.get_mask(), self.real_mask, select_num=select_num) prints(f'Jaccard index: {overlap:.3f}', indent=4) else: asr, _ = self.model._validate(get_data_fn=self.attack.get_data, keep_org=False, poison_label=True, verbose=False) mark_list.append(mark) loss_list.append(loss) asr_list.append(asr) np.savez(file_path, mark_list=np.stack([mark.detach().cpu().numpy() for mark in mark_list]), loss_list=np.array(loss_list)) self.attack.target_class = org_target_class print() print('Defense results saved at: ' + file_path) mark_list_tensor = torch.stack(mark_list) return mark_list_tensor, loss_list, asr_list def loss(self, _input: torch.Tensor, _label: torch.Tensor, target: int, trigger_output: None | torch.Tensor = None, **kwargs) -> torch.Tensor: r"""Loss function to optimize recovered trigger. Args: _input (torch.Tensor): Clean input tensor with shape ``(N, C, H, W)``. _label (torch.Tensor): Clean label tensor with shape ``(N)``. target (int): Target class. trigger_output (torch.Tensor): Output tensor of input tensor with trigger. Defaults to ``None``. Returns: torch.Tensor: Scalar loss tensor. """ trigger_input = self.attack.add_mark(_input) trigger_label = target * torch.ones_like(_label) if trigger_output is None: trigger_output = self.model(trigger_input, **kwargs) return self.model.loss(trigger_input, trigger_label, _output=trigger_output) def optimize_mark(self, label: int, loader: Iterable = None, logger_header: str = '', verbose: bool = True, **kwargs) -> tuple[torch.Tensor, float]: r""" Args: label (int): The class label to optimize. loader (collections.abc.Iterable): Data loader to optimize trigger. Defaults to ``self.dataset.loader['train']``. logger_header (str): Header string of logger. Defaults to ``''``. verbose (bool): Whether to use logger for output. Defaults to ``True``. **kwargs: Keyword arguments passed to :meth:`loss()`. Returns: (torch.Tensor, torch.Tensor): Optimized mark tensor with shape ``(C + 1, H, W)`` and loss tensor. """ atanh_mark = torch.randn_like(self.attack.mark.mark, requires_grad=True) optimizer = optim.Adam([atanh_mark], lr=self.defense_remask_lr, betas=(0.5, 0.9)) lr_scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=self.defense_remask_epoch) optimizer.zero_grad() loader = loader or self.dataset.loader['train'] # best optimization results norm_best: float = float('inf') mark_best: torch.Tensor = None loss_best: float = None logger = MetricLogger(indent=4) logger.create_meters(loss='{last_value:.3f}', acc='{last_value:.3f}', norm='{last_value:.3f}', entropy='{last_value:.3f}',) batch_logger = MetricLogger() logger.create_meters(loss=None, acc=None, entropy=None) iterator = range(self.defense_remask_epoch) if verbose: iterator = logger.log_every(iterator, header=logger_header) for _ in iterator: batch_logger.reset() for data in loader: self.attack.mark.mark = tanh_func(atanh_mark) # (c+1, h, w) _input, _label = self.model.get_data(data) trigger_input = self.attack.add_mark(_input) trigger_label = label * torch.ones_like(_label) trigger_output = self.model(trigger_input) batch_acc = trigger_label.eq(trigger_output.argmax(1)).float().mean() batch_entropy = self.loss(_input, _label, target=label, trigger_output=trigger_output, **kwargs) batch_norm: torch.Tensor = self.attack.mark.mark[-1].norm(p=1) batch_loss = batch_entropy + self.cost * batch_norm batch_loss.backward() optimizer.step() optimizer.zero_grad() batch_size = _label.size(0) batch_logger.update(n=batch_size, loss=batch_loss.item(), acc=batch_acc.item(), entropy=batch_entropy.item()) lr_scheduler.step() self.attack.mark.mark = tanh_func(atanh_mark) # (c+1, h, w) # check to save best mask or not loss = batch_logger.meters['loss'].global_avg acc = batch_logger.meters['acc'].global_avg norm = float(self.attack.mark.mark[-1].norm(p=1)) entropy = batch_logger.meters['entropy'].global_avg if norm < norm_best: mark_best = self.attack.mark.mark.detach().clone() loss_best = loss logger.update(loss=loss, acc=acc, norm=norm, entropy=entropy) if self.check_early_stop(loss=loss, acc=acc, norm=norm, entropy=entropy): print('early stop') break atanh_mark.requires_grad_(False) self.attack.mark.mark = mark_best return mark_best, loss_best def check_early_stop(self, *args, **kwargs) -> bool: r"""Check whether to early stop at the end of each remask epoch. Returns: bool: Whether to early stop. Defaults to ``False``. """ return False def load(self, path: None | str = None): r"""Load recovered mark from :attr:`path`. Args: path (str): npz path of recovered mark. Defaults to ``'{folder_path}/{self.get_filename()}.npz'``. """ if path is None: path = os.path.join(self.folder_path, self.get_filename() + '.npz') _dict = np.load(path) for k, v in self.new_dict.items(): setattr(self.attack.mark, k, v) self.attack.mark.mark = torch.from_numpy(_dict['mark_list'][self.attack.target_class]).to(device=env['device']) print('defense results loaded from:', path)
PypiClean
/async_metrics-0.1.0.tar.gz/async_metrics-0.1.0/async_metrics/ext/flask/flask.py
import os from flask import Blueprint, jsonify, make_response, render_template, request import async_metrics def setup_async_metrics(app, name: str = "async_metrics"): async_metrics = Blueprint( "async_metrics", "async_metrics", name, template_folder=os.path.join(os.getcwd(), "async_metrics/ext/flask/templates"), ) configure_routes(async_metrics, name) app.register_blueprint(async_metrics) def configure_routes(bp, name): @bp.route(f"/{name}/dashboard", methods=["GET", "HEAD"]) def summary(): return render_template( "dashboard.html", version=async_metrics.__version__, url=f"{request.scheme}://{request.host}/async_metrics/routes", ) @bp.route(f"/{name}/all", methods=["GET", "HEAD"]) def all(): return jsonify( { "system": async_metrics.sys.all(), } ) @bp.route(f"/{name}/asyncio", methods=["GET", "HEAD"]) def asyncio(): return jsonify({"asyncio": async_metrics.asyncio.all()}) @bp.route(f"/{name}/system", methods=["GET", "HEAD"]) def system(): return jsonify({"system": async_metrics.sys.all()}) @bp.route(f"/{name}/system/dependencies", methods=["GET", "HEAD"]) def dependencies(): return jsonify(async_metrics.sys.packages()) @bp.route(f"/{name}/system/python", methods=["GET", "HEAD"]) def python(): return jsonify(async_metrics.sys.python()) @bp.route(f"/{name}/system/partitions", methods=["GET", "HEAD"]) def partitions(): return jsonify(async_metrics.sys.partitions()) @bp.route(f"/{name}/system/process", methods=["GET", "HEAD"]) def process(): return jsonify(async_metrics.sys.process()) @bp.route(f"/{name}/about", methods=["GET", "HEAD"]) def about(): return jsonify( { "async_metrics_version": async_metrics.__version__, "project_url": "https://github.com/amenezes/async_metrics", "issues": "https://github.com/amenezes/async_metrics/issues", } ) @bp.route(f"/{name}/routes", methods=["GET", "HEAD"]) def routes(): routes = [ { "name": "async_metrics_summary", "method": "HEAD", "path": "/async_metrics", "description": "Show async_metrics available.", }, { "name": "async_metrics_summary", "method": "GET", "path": "/async_metrics", "description": "Show async_metrics available.", }, { "name": "async_metrics_summary", "method": "HEAD", "path": "/async_metrics/all", "description": "Show information about system environment.", }, { "name": "async_metrics_summary", "method": "GET", "path": "/async_metrics/all", "description": "Show information about system environment.", }, { "name": "async_metrics_summary", "method": "HEAD", "path": "/async_metrics/asyncio", "description": "Show information about async and system environment.", }, { "name": "async_metrics_summary", "method": "GET", "path": "/async_metrics/asyncio", "description": "Show information about async and system environment.", }, { "name": "async_metrics_system", "method": "HEAD", "path": "/async_metrics/system", "description": "Show information about system environment.", }, { "name": "async_metrics_system", "method": "GET", "path": "/async_metrics/system", "description": "Show information about system environment.", }, { "name": "async_metrics_dependencies", "method": "HEAD", "path": "/async_metrics/system/dependencies", "description": "Show applications dependencies.", }, { "name": "async_metrics_dependencies", "method": "GET", "path": "/async_metrics/system/dependencies", "description": "Show applications dependencies.", }, { "name": "async_metrics_python", "method": "HEAD", "path": "/async_metrics/system/python", "description": "Show information about current python environment.", }, { "name": "async_metrics_python", "method": "GET", "path": "/async_metrics/system/python", "description": "Show information about current python environment.", }, { "name": "async_metrics_process", "method": "HEAD", "path": "/async_metrics/system/process", "description": "Show summary information about application process. ", }, { "name": "async_metrics_process", "method": "GET", "path": "/async_metrics/system/process", "description": "Show summary information about application process.", }, { "name": "async_metrics_partitions", "method": "HEAD", "path": "/async_metrics/system/partitions", "description": "Show summary information about disk partition. ", }, { "name": "async_metrics_partitions", "method": "GET", "path": "/async_metrics/system/partitions", "description": "Show summary information about disk partition.", }, { "name": "async_metrics_about", "method": "HEAD", "path": "/async_metrics/system/about", "description": "Show information about async_metrics. ", }, { "name": "async_metrics_about", "method": "GET", "path": "/async_metrics/system/about", "description": "Show information about async_metrics.", }, ] resp = make_response(jsonify(routes), 200) resp.headers["Access-Control-Allow-Origin"] = "*" return resp
PypiClean
/ticketguardian_python-1.3.0-py3-none-any.whl/ticketguardian/policy/policy.py
from ticketguardian.abstract import ( PutResourceMixin, RetrieveResourceMixin, ListResourceMixin, ) from ticketguardian.item import Item from ticketguardian.policy.constants import UPGRADED from ticketguardian.policy.exceptions import NoBillingAddressException from ticketguardian._project import _validate class Policy(RetrieveResourceMixin, PutResourceMixin, ListResourceMixin): resource = 'policies' @property def id(self): return self.policy_number @property def item(self): if not hasattr(self._item, 'resource'): self._item = Item._construct(obj=self._item) return self._item @property def customer(self): if not hasattr(self._customer, 'resource'): self._item = Item._construct(obj=self._item) return self._customer def upgrade(self, item, currency='USD', **params): """ Upgrade a policy item. Keyword Arguments: item (dict): a dictionary containing the following values. name (str): The name of the item. reference_number (str): The unique number of the item. cost (float): The cost of the item. customer (dict): An optional customer object. Defaults to null. event (dict): An optional event object. Defaults to null. Optional Keyword Arguments: currency (str): The currency of the Items. Defaults to USD. card (dict): Card must contain the 'number', 'expire_month', and 'expire_year'. billing_address (dict): The order's billing address. Required if card is given. Must include address1, address2, city, state, country, zip_code. Returns: An instance of the new policy that was created from upgrading. This object is updated to reflect the changes made. """ if params.get('card'): _validate._validate_card(params['card']) if params.get('billing_address'): _validate._validate_address(params['billing_address']) else: raise NoBillingAddressException upgrade = self.update( 'upgrade', item=item, currency=currency, raw_data=True, **params) self.status = UPGRADED # The new policy is returned. return Policy.retrieve(upgrade.get("policy_number")) def exchange(self, item, currency='USD'): """ Exchange a policy item. Keyword Arguments: item (dict): a dictionary containing the following values. name (str): The name of the item. reference_number (str): The unique number of the item. cost (float): The cost of the item. customer (dict): An optional customer object. Defaults to null. event (dict): An optional event object. Defaults to null. Optional Keyword Arguments: currency (str): The currency of the Items. Defaults to USD. Returns: Nothing is returned. The object is updated to reflect the changes made to the policy. """ self.update( 'exchange', item=item, currency=currency )
PypiClean
/chronotco-0.0.1.tar.gz/chronotco-0.0.1/README.rst
ChronoTCO ==== **Version**: Experimental 0.0.1 This is an experimental decorator implementation of tail call optimization in Python3 via bytecode injection, reducing the space complexity of recursion to **O(1)** (rather than **O(n)**) by manipulating the function structure itself. If a function is tail-call recursive and you want to ensure you won't blow the stack, use chronotco! Installation ---- pip install chronotco Usage ---- Import the decorator: from chronotco import chronotco And decorate your tail-recursive function! @chronotco def tail_factorial(n, accumulator=1): if n == 0: return accumulator else: return tail_factorial(n-1, accumulator * n) Support ---- I do not provide any support, and I am not responsible for any melted faces from raw performance! License ---- The license is copyleft, just keep it free forever and do what you will.
PypiClean
/tencentcloud-sdk-python-oceanus-3.0.973.tar.gz/tencentcloud-sdk-python-oceanus-3.0.973/tencentcloud/oceanus/v20190422/oceanus_client.py
import json from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException from tencentcloud.common.abstract_client import AbstractClient from tencentcloud.oceanus.v20190422 import models class OceanusClient(AbstractClient): _apiVersion = '2019-04-22' _endpoint = 'oceanus.tencentcloudapi.com' _service = 'oceanus' def CheckSavepoint(self, request): """检查快照是否可用 :param request: Request instance for CheckSavepoint. :type request: :class:`tencentcloud.oceanus.v20190422.models.CheckSavepointRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.CheckSavepointResponse` """ try: params = request._serialize() headers = request.headers body = self.call("CheckSavepoint", params, headers=headers) response = json.loads(body) model = models.CheckSavepointResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def CopyJobs(self, request): """单条和批量复制作业 :param request: Request instance for CopyJobs. :type request: :class:`tencentcloud.oceanus.v20190422.models.CopyJobsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.CopyJobsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("CopyJobs", params, headers=headers) response = json.loads(body) model = models.CopyJobsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def CreateFolder(self, request): """作业列表页面新建文件夹请求 :param request: Request instance for CreateFolder. :type request: :class:`tencentcloud.oceanus.v20190422.models.CreateFolderRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.CreateFolderResponse` """ try: params = request._serialize() headers = request.headers body = self.call("CreateFolder", params, headers=headers) response = json.loads(body) model = models.CreateFolderResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def CreateJob(self, request): """新建作业接口,一个 AppId 最多允许创建1000个作业 :param request: Request instance for CreateJob. :type request: :class:`tencentcloud.oceanus.v20190422.models.CreateJobRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.CreateJobResponse` """ try: params = request._serialize() headers = request.headers body = self.call("CreateJob", params, headers=headers) response = json.loads(body) model = models.CreateJobResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def CreateJobConfig(self, request): """创建作业配置,一个作业最多有100个配置版本 :param request: Request instance for CreateJobConfig. :type request: :class:`tencentcloud.oceanus.v20190422.models.CreateJobConfigRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.CreateJobConfigResponse` """ try: params = request._serialize() headers = request.headers body = self.call("CreateJobConfig", params, headers=headers) response = json.loads(body) model = models.CreateJobConfigResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def CreateResource(self, request): """创建资源接口 :param request: Request instance for CreateResource. :type request: :class:`tencentcloud.oceanus.v20190422.models.CreateResourceRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.CreateResourceResponse` """ try: params = request._serialize() headers = request.headers body = self.call("CreateResource", params, headers=headers) response = json.loads(body) model = models.CreateResourceResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def CreateResourceConfig(self, request): """创建资源配置接口 :param request: Request instance for CreateResourceConfig. :type request: :class:`tencentcloud.oceanus.v20190422.models.CreateResourceConfigRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.CreateResourceConfigResponse` """ try: params = request._serialize() headers = request.headers body = self.call("CreateResourceConfig", params, headers=headers) response = json.loads(body) model = models.CreateResourceConfigResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DeleteJobs(self, request): """批量删除作业接口,批量操作数量上限20 :param request: Request instance for DeleteJobs. :type request: :class:`tencentcloud.oceanus.v20190422.models.DeleteJobsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DeleteJobsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DeleteJobs", params, headers=headers) response = json.loads(body) model = models.DeleteJobsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DeleteResourceConfigs(self, request): """删除资源版本 :param request: Request instance for DeleteResourceConfigs. :type request: :class:`tencentcloud.oceanus.v20190422.models.DeleteResourceConfigsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DeleteResourceConfigsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DeleteResourceConfigs", params, headers=headers) response = json.loads(body) model = models.DeleteResourceConfigsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DeleteResources(self, request): """删除资源接口 :param request: Request instance for DeleteResources. :type request: :class:`tencentcloud.oceanus.v20190422.models.DeleteResourcesRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DeleteResourcesResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DeleteResources", params, headers=headers) response = json.loads(body) model = models.DeleteResourcesResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DeleteTableConfig(self, request): """删除作业表配置 :param request: Request instance for DeleteTableConfig. :type request: :class:`tencentcloud.oceanus.v20190422.models.DeleteTableConfigRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DeleteTableConfigResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DeleteTableConfig", params, headers=headers) response = json.loads(body) model = models.DeleteTableConfigResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeClusters(self, request): """查询集群 :param request: Request instance for DescribeClusters. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeClustersRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeClustersResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeClusters", params, headers=headers) response = json.loads(body) model = models.DescribeClustersResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeJobConfigs(self, request): """查询作业配置列表,一次最多查询100个 :param request: Request instance for DescribeJobConfigs. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobConfigsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobConfigsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeJobConfigs", params, headers=headers) response = json.loads(body) model = models.DescribeJobConfigsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeJobSavepoint(self, request): """查找Savepoint列表 :param request: Request instance for DescribeJobSavepoint. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobSavepointRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobSavepointResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeJobSavepoint", params, headers=headers) response = json.loads(body) model = models.DescribeJobSavepointResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeJobSubmissionLog(self, request): """查询作业实例启动日志 :param request: Request instance for DescribeJobSubmissionLog. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobSubmissionLogRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobSubmissionLogResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeJobSubmissionLog", params, headers=headers) response = json.loads(body) model = models.DescribeJobSubmissionLogResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeJobs(self, request): """查询作业 :param request: Request instance for DescribeJobs. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeJobsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeJobs", params, headers=headers) response = json.loads(body) model = models.DescribeJobsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeResourceConfigs(self, request): """描述资源配置接口 :param request: Request instance for DescribeResourceConfigs. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeResourceConfigsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeResourceConfigsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeResourceConfigs", params, headers=headers) response = json.loads(body) model = models.DescribeResourceConfigsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeResourceRelatedJobs(self, request): """获取资源关联作业信息 :param request: Request instance for DescribeResourceRelatedJobs. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeResourceRelatedJobsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeResourceRelatedJobsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeResourceRelatedJobs", params, headers=headers) response = json.loads(body) model = models.DescribeResourceRelatedJobsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeResources(self, request): """描述资源接口 :param request: Request instance for DescribeResources. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeResourcesRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeResourcesResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeResources", params, headers=headers) response = json.loads(body) model = models.DescribeResourcesResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeSystemResources(self, request): """描述系统资源接口 :param request: Request instance for DescribeSystemResources. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeSystemResourcesRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeSystemResourcesResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeSystemResources", params, headers=headers) response = json.loads(body) model = models.DescribeSystemResourcesResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeTreeJobs(self, request): """生成树状作业显示结构 :param request: Request instance for DescribeTreeJobs. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeTreeJobsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeTreeJobsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeTreeJobs", params, headers=headers) response = json.loads(body) model = models.DescribeTreeJobsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeTreeResources(self, request): """查询树状结构资源列表 :param request: Request instance for DescribeTreeResources. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeTreeResourcesRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeTreeResourcesResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeTreeResources", params, headers=headers) response = json.loads(body) model = models.DescribeTreeResourcesResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def DescribeWorkSpaces(self, request): """授权工作空间列表 :param request: Request instance for DescribeWorkSpaces. :type request: :class:`tencentcloud.oceanus.v20190422.models.DescribeWorkSpacesRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.DescribeWorkSpacesResponse` """ try: params = request._serialize() headers = request.headers body = self.call("DescribeWorkSpaces", params, headers=headers) response = json.loads(body) model = models.DescribeWorkSpacesResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def ModifyJob(self, request): """更新作业属性,仅允许以下3种操作,不支持组合操作: (1) 更新作业名称 (2) 更新作业备注 (3) 更新作业最大并行度 变更前提:WorkerCuNum<=MaxParallelism 如果MaxParallelism变小,不重启作业,待下一次重启生效 如果MaxParallelism变大,则要求入参RestartAllowed必须为True 假设作业运行状态,则先停止作业,再启动作业,中间状态丢失 假设作业暂停状态,则将作业更改为停止状态,中间状态丢失 :param request: Request instance for ModifyJob. :type request: :class:`tencentcloud.oceanus.v20190422.models.ModifyJobRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.ModifyJobResponse` """ try: params = request._serialize() headers = request.headers body = self.call("ModifyJob", params, headers=headers) response = json.loads(body) model = models.ModifyJobResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def RunJobs(self, request): """批量启动或者恢复作业,批量操作数量上限20 :param request: Request instance for RunJobs. :type request: :class:`tencentcloud.oceanus.v20190422.models.RunJobsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.RunJobsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("RunJobs", params, headers=headers) response = json.loads(body) model = models.RunJobsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def StopJobs(self, request): """批量停止作业,批量操作数量上限为20 :param request: Request instance for StopJobs. :type request: :class:`tencentcloud.oceanus.v20190422.models.StopJobsRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.StopJobsResponse` """ try: params = request._serialize() headers = request.headers body = self.call("StopJobs", params, headers=headers) response = json.loads(body) model = models.StopJobsResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e)) def TriggerJobSavepoint(self, request): """触发Savepoint :param request: Request instance for TriggerJobSavepoint. :type request: :class:`tencentcloud.oceanus.v20190422.models.TriggerJobSavepointRequest` :rtype: :class:`tencentcloud.oceanus.v20190422.models.TriggerJobSavepointResponse` """ try: params = request._serialize() headers = request.headers body = self.call("TriggerJobSavepoint", params, headers=headers) response = json.loads(body) model = models.TriggerJobSavepointResponse() model._deserialize(response["Response"]) return model except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(type(e).__name__, str(e))
PypiClean
/alignak_webui-0.12.2-py3-none-any.whl/alignak_webui/plugins/worldmap/static/geocoder/Control.OSMGeocoder.js
if (typeof console == "undefined") { this.console = { log: function (msg) { /* do nothing since it would otherwise break IE */} }; } L.Control.OSMGeocoder = L.Control.extend({ options: { collapsed: true, position: 'topright', text: 'Locate', placeholder: '', bounds: null, // L.LatLngBounds email: null, // String callback: function (results) { if (results.length == 0) { console.log("ERROR: didn't find a result"); return; } var bbox = results[0].boundingbox, first = new L.LatLng(bbox[0], bbox[2]), second = new L.LatLng(bbox[1], bbox[3]), bounds = new L.LatLngBounds([first, second]); this._map.fitBounds(bounds); } }, _callbackId: 0, initialize: function (options) { L.Util.setOptions(this, options); }, onAdd: function (map) { this._map = map; var className = 'leaflet-control-geocoder', container = this._container = L.DomUtil.create('div', className); L.DomEvent.disableClickPropagation(container); var form = this._form = L.DomUtil.create('form', className + '-form'); var input = this._input = document.createElement('input'); input.type = "text"; input.placeholder = this.options.placeholder || ''; var submit = document.createElement('input'); submit.type = "submit"; submit.value = this.options.text; form.appendChild(input); form.appendChild(submit); L.DomEvent.addListener(form, 'submit', this._geocode, this); if (this.options.collapsed) { L.DomEvent.addListener(container, 'mouseover', this._expand, this); L.DomEvent.addListener(container, 'mouseout', this._collapse, this); var link = this._layersLink = L.DomUtil.create('a', className + '-toggle', container); link.href = '#'; link.title = 'Nominatim Geocoder'; L.DomEvent.addListener(link, L.Browser.touch ? 'click' : 'focus', this._expand, this); this._map.on('movestart', this._collapse, this); } else { this._expand(); } container.appendChild(form); return container; }, /* helper functions for cordinate extraction */ _createSearchResult : function(lat, lon) { //creates an position description similar to the result of a Nominatim search var diff = 0.005; var result = []; result[0] = {}; result[0]["boundingbox"] = [parseFloat(lat)-diff,parseFloat(lat)+diff,parseFloat(lon)-diff,parseFloat(lon)+diff]; result[0]["class"]="boundary"; result[0]["display_name"]="Position: "+lat+" "+lon; result[0]["lat"] = lat; result[0]["lon"] = lon; return result; }, _isLatLon : function (q) { //"lon lat" => xx.xxx x.xxxxx var re = /(-?\d+\.\d+)\s(-?\d+\.\d+)/; var m = re.exec(q); if (m != undefined) return m; //lat...xx.xxx...lon...x.xxxxx re = /lat\D*(-?\d+\.\d+)\D*lon\D*(-?\d+\.\d+)/; m = re.exec(q); //showRegExpResult(m); if (m != undefined) return m; else return null; }, _isLatLon_decMin : function (q) { //N 53° 13.785' E 010° 23.887' //re = /[NS]\s*(\d+)\D*(\d+\.\d+).?\s*[EW]\s*(\d+)\D*(\d+\.\d+)\D*/; re = /([ns])\s*(\d+)\D*(\d+\.\d+).?\s*([ew])\s*(\d+)\D*(\d+\.\d+)/i; m = re.exec(q.toLowerCase()); //showRegExpResult(m); if ((m != undefined)) return m; else return null; // +- dec min +- dec min }, _geocode : function (event) { wait_message('Searching...', true) L.DomEvent.preventDefault(event); var q = this._input.value; //try to find coordinates if (this._isLatLon(q) != null) { var m = this._isLatLon(q); //m = {lon, lat} this.options.callback.call(this, this._createSearchResult(m[1],m[2])); return; } else if (this._isLatLon_decMin(q) != null) { var m = this._isLatLon_decMin(q); //m: [ns, lat dec, lat min, ew, lon dec, lon min] var temp = new Array(); temp['n'] = 1; temp['s'] = -1; temp['e'] = 1; temp['w'] = -1; this.options.callback.call(this,this._createSearchResult( temp[m[1]]*(Number(m[2]) + m[3]/60), temp[m[4]]*(Number(m[5]) + m[6]/60) )); return; } //and now Nominatim //http://wiki.openstreetmap.org/wiki/Nominatim window[("_l_osmgeocoder_"+this._callbackId)] = L.Util.bind(this.options.callback, this); /* Set up params to send to Nominatim */ var params = { // Defaults q: this._input.value, json_callback : ("_l_osmgeocoder_"+this._callbackId++), format: 'json' }; if (this.options.bounds && this.options.bounds != null) { if( this.options.bounds instanceof L.LatLngBounds ) { params.viewbox = this.options.bounds.toBBoxString(); params.bounded = 1; } else { console.log('bounds must be of type L.LatLngBounds'); return; } } if (this.options.email && this.options.email != null) { if (typeof this.options.email == 'string') { params.email = this.options.email; } else{ console.log('email must be a string'); } } var protocol = location.protocol; if (protocol == "file:") protocol = "https:"; var url = protocol + "//nominatim.openstreetmap.org/search" + L.Util.getParamString(params), script = document.createElement("script"); script.type = "text/javascript"; script.src = url; script.id = this._callbackId; document.getElementsByTagName("head")[0].appendChild(script); }, _expand: function () { L.DomUtil.addClass(this._container, 'leaflet-control-geocoder-expanded'); }, _collapse: function () { this._container.className = this._container.className.replace(' leaflet-control-geocoder-expanded', ''); } });
PypiClean
/exactly-0.15.0-py3-none-any.whl/exactly_lib/type_val_prims/string_source/string_source.py
from abc import ABC, abstractmethod from typing import Tuple from exactly_lib.type_val_prims.description.tree_structured import WithNodeDescription, StructureRenderer from exactly_lib.type_val_prims.string_source.contents import StringSourceContents from exactly_lib.type_val_prims.string_source.structure_builder import StringSourceStructureBuilder from exactly_lib.util.description_tree.renderer import NodeRenderer from exactly_lib.util.description_tree.tree import Node from exactly_lib.util.str_ import read_lines class StringSource(WithNodeDescription, ABC): """Access to a string in various forms. The string is backed by a constant "string source". The string is constant, unless the "string source" may give different result at different times - e.g. an external program. The public methods are just different kind of access to the same string. Maybe a "freeze" method should be added to store the string in a file (if needed), to guarantee that the string is constant, even over time. """ @abstractmethod def new_structure_builder(self) -> StringSourceStructureBuilder: """"Gives a new object, for each invokation""" pass def structure(self) -> StructureRenderer: """Should (probably) not be overloaded.""" return _StructureRendererOfStringSource(self) @abstractmethod def freeze(self): """Freezes the contents of the model, to the contents generated the first invocations of any of the contents getters. For example, freezing an object guarantees that external processes (influencing the contents) will only be invoked once after the call to this method. The method should not generate contents by itself - to avoid generating contents that is not needed. The freezing just assures that the contents will be generated once, and then "shared" by all of the contents getters. The implementation should not involve any "external" resources (such as files and processes). This method is an opportunity for optimizations, since it signals that the object will be used multiple times. The method can be invoked arbitrary number of times. But usually only the first invocation should have any effect. """ pass @abstractmethod def contents(self) -> StringSourceContents: """Gives the contents that this object represents. The returned object may change over time - especially before and after freezing. Thus the returned object must not be stored as a constant representation of the StringSource's contents. Invoking this method should not take part in generating the actual contents (accessed via the returned object's methods). Especially should no Hard Error exceptions occur. This allows this method to be used while cheaply constructing variants of the contents, before it is known whether the actual contents will ever be used or not. """ pass class _StructureRendererOfStringSource(NodeRenderer[None]): def __init__(self, string_source: StringSource): self._string_source = string_source def render(self) -> Node[None]: return self._string_source.new_structure_builder().build().render() def read_lines_as_str__w_minimum_num_chars(min_num_chars_to_read: int, source: StringSourceContents) -> Tuple[str, bool]: """ :return: string read, source-may-have-more-contents """ with source.as_lines as lines: return read_lines.read_lines_as_str__w_minimum_num_chars(min_num_chars_to_read, lines)
PypiClean
/reddit_detective-0.1.4-py3-none-any.whl/reddit_detective/analytics/metrics.py
from neo4j import BoltDriver from reddit_detective.analytics.utils import (get_redditors, get_user_comments_times, get_submission_comments_times, get_subreddit_comments_times) def interaction_score(driver: BoltDriver, username): """ For a user in the Graph, shows # comments received / # comments received + # comments made Best practice is to use it in networks with nodes with limit=None Inspired from "Analyzing behavioral trends in community driven discussion platforms like Reddit" DOI: 10.1109/ASONAM.2018.8508687 Score close to 1: User is a "starter" Score close to 0: User is a "consumer" """ s = driver.session() comments_received = list(s.run(""" MATCH (:Redditor {username: "%s"})-[:AUTHORED]-(:Submission)-[:UNDER]-(c:Comment) WITH c RETURN count(c) """ % username))[0][0] # Converted Result object to integer comments_made = list(s.run(""" MATCH (:Redditor {username: "%s"})-[:AUTHORED]-(c:Comment) WITH c RETURN count(c) """ % username))[0][0] return comments_received / (comments_received + comments_made) def interaction_score_normalized(driver: BoltDriver, username): users_score = interaction_score(driver, username) total_score = sum([interaction_score(driver, user) for user in get_redditors(driver)]) return users_score / total_score def _cyborg_score(driver: BoltDriver, name, util_func) -> tuple: """ Calculates the ratio of cyborg-like comments to all comments of the user. Tuple's first element is the score, second element is a list of ids of the cyborg-like comments. Inspired from "Analyzing behavioral trends in community driven discussion platforms like Reddit" DOI: 10.1109/ASONAM.2018.8508687 At a subreddit, 17%-20% of the people exhibit such cyborg-like behaviors. If a post's first comment is made within 6 seconds, the chances of it being cyborg-like is 79%-83.9% according to the paper. This information is extracted by looking at the character sizes of those comments. A Cyborg-like comment can also be an advertisement, AutoModerator post or a copy-paste. """ cyborg_comms = [] ids, times = util_func(driver, name) for i in range(len(ids)): if times[i] <= 6: cyborg_comms.append(ids[i]) return len(cyborg_comms) / len(ids), cyborg_comms def cyborg_score_user(driver: BoltDriver, username): return _cyborg_score(driver, username, util_func=get_user_comments_times) def cyborg_score_submission(driver: BoltDriver, submission_id): return _cyborg_score(driver, submission_id, util_func=get_submission_comments_times) def cyborg_score_subreddit(driver: BoltDriver, subreddit_name): return _cyborg_score(driver, subreddit_name, util_func=get_subreddit_comments_times)
PypiClean
/Orange3_Bioinformatics-4.8.0-py3-none-any.whl/orangecontrib/bioinformatics/ncbi/gene/__init__.py
import json import sqlite3 import contextlib from typing import Dict, List, Tuple, Optional from Orange.data import Table, Domain, StringVariable from Orange.data.util import get_unique_names_domain from orangecontrib.bioinformatics.utils import serverfiles from orangecontrib.bioinformatics.ncbi.taxonomy import species_name_to_taxid from orangecontrib.bioinformatics.ncbi.gene.config import ( DOMAIN, ENTREZ_ID, query, query_exact, gene_info_attributes, ) from orangecontrib.bioinformatics.widgets.utils.data import TableAnnotation class Gene: """Representation of gene summary.""" __slots__ = gene_info_attributes + ('input_identifier',) def __init__(self, input_identifier: Optional[str] = None): """ If we want to match gene to it's corresponding Entrez ID we must, upon class initialization, provide some `input identifier`. This way :class:`GeneMatcher` will know what to match it against in Gene Database. Parameters ---------- input_identifier : str This can be any of the following: symbol, synonym, locus tag, other database id, ... """ self.input_identifier = input_identifier def __getattr__(self, attribute): if attribute not in self.__slots__: return None def __repr__(self): return ( f'<Gene symbol={self.symbol}, tax_id={self.tax_id}, gene_id={self.gene_id}>' ) def load_attributes( self, values: Tuple[str, ...], attributes: Tuple[str, ...] = gene_info_attributes, ): for attr, val in zip(attributes, values): setattr( self, attr, json.loads(val) if attr in ('synonyms', 'db_refs', 'homologs') else val, ) def homolog_gene(self, taxonomy_id: str) -> Optional[str]: """Returns gene homolog for given organism. Parameters ---------- taxonomy_id: str Taxonomy id of target organism. Returns ------- str Entrez ID (if available). """ return self.homologs.get(taxonomy_id, None) class GeneMatcher: """Gene name matching interface.""" def __init__(self, tax_id: str, progress_callback=None, auto_start=True): """ Parameters ---------- tax_id:: str Taxonomy id of target organism. """ self._tax_id: str = tax_id self._genes: List[Gene] = [] self._progress_callback = progress_callback self._auto_start = auto_start self.gene_db_path = self._gene_db_path() @property def tax_id(self): return self._tax_id @tax_id.setter def tax_id(self, tax_id: str) -> None: self._tax_id = tax_id self.gene_db_path = self._gene_db_path() @property def genes(self) -> List[Gene]: return self._genes @genes.setter def genes(self, genes: List[str]) -> None: self._genes = [Gene(input_identifier=gene) for gene in genes] if self._auto_start: self._match() def get_known_genes(self) -> List[Gene]: """Return Genes with known Entrez ID Returns ------- :class:`list` of :class:`Gene` instances Genes with unique match """ return [gene for gene in self.genes if gene.gene_id] def to_data_table(self, selected_genes: Optional[List[str]] = None) -> Table: """Transform GeneMatcher results to Orange data table. Optionally we can provide a list of genes (Entrez Ids). The table on the output will be populated only with provided genes. Parameters ---------- selected_genes: list List of Entrez Ids Returns ------- Orange.data.Table Summary of Gene info in tabular format """ data_x = [] metas = [ StringVariable('Input gene ID'), StringVariable(ENTREZ_ID), StringVariable('Symbol'), StringVariable('Synonyms'), StringVariable('Description'), StringVariable('Other IDs'), StringVariable('Type of gene'), StringVariable('Chromosome'), StringVariable('Map location'), StringVariable('Locus tag'), StringVariable('Symbol from nomenclature authority'), StringVariable('Full name from nomenclature authority'), StringVariable('Nomenclature status'), StringVariable('Other designations'), StringVariable('Species'), StringVariable('Taxonomy ID'), ] domain = Domain([], metas=metas) genes: List[Gene] = self.genes if selected_genes is not None: selected_genes_set = set(selected_genes) genes = [ gene for gene in self.genes if str(gene.gene_id) in selected_genes_set ] for gene in genes: db_refs = ( ', '.join( '{}: {}'.format(key, val) for (key, val) in gene.db_refs.items() ) if gene.db_refs else '' ) synonyms = ', '.join(gene.synonyms) if gene.synonyms else '' line = [ gene.input_identifier, gene.gene_id, gene.symbol, synonyms, gene.description, db_refs, gene.type_of_gene, gene.chromosome, gene.map_location, gene.locus_tag, gene.symbol_from_nomenclature_authority, gene.full_name_from_nomenclature_authority, gene.nomenclature_status, gene.other_designations, species_name_to_taxid(gene.species), gene.tax_id, ] data_x.append(line) table = Table.from_list(domain, data_x) table.name = 'Gene Matcher Results' table.attributes[TableAnnotation.tax_id] = self.tax_id table.attributes[TableAnnotation.gene_as_attr_name] = False table.attributes[TableAnnotation.gene_id_column] = ENTREZ_ID return table def match_table_column( self, data_table: Table, column_name: str, target_column: Optional[StringVariable] = None, ) -> Table: """Helper function for gene name matching with :class:`Orange.data.Table`. Give a column of genes, GeneMatcher will try to map genes to their corresponding Entrez Ids. Parameters ---------- data_table: :class:`Orange.data.Table` Data table column_name: str Name of the column where gene symbols are located target_column: :class:`StringVariable` Column where we store Entrez Ids. Defaults to StringVariable(ncbi.gene.config.NCBI_ID) Returns ------- :class:`Orange.data.Table` Data table with a column of Gene Ids """ if column_name in data_table.domain: self.genes = data_table.get_column(column_name) if target_column is None: target_column = StringVariable(ENTREZ_ID) new_domain = Domain( data_table.domain.attributes, data_table.domain.class_vars, data_table.domain.metas + (target_column,), ) new_data = data_table.transform(new_domain) with new_data.unlocked(new_data.metas): new_data[:, target_column] = [ [str(gene.gene_id) if gene.gene_id else '?'] for gene in self.genes ] return new_data def match_table_attributes( self, data_table, run=True, rename=False, source_name='Source ID' ) -> Table: """Helper function for gene name matching with :class:`Orange.data.Table`. Match table attributes and if a unique match is found create a new column attribute for Entrez Id. Attribute name is defined here: `orangecontrib.bioinformatics.ncbi.gene.config.NCBI_ID` Parameters ---------- data_table: :class:`Orange.data.Table` Data table Returns ------- :class:`Orange.data.Table` Data table column attributes are populated with Entrez Ids """ # run gene matcher if run: self.genes = [var.name for var in data_table.domain.attributes] def helper(gene, attribute): if gene.gene_id: if rename: attribute = attribute.renamed(gene.symbol) attribute.attributes[source_name] = gene.input_identifier attribute.attributes[ENTREZ_ID] = gene.gene_id return attribute attributes = [ helper(gene, attr) for gene, attr in zip(self.genes, data_table.domain.attributes) ] metas = data_table.domain.metas (attr_deduplicated, _, metas_deduplicated), renamed = get_unique_names_domain( [a.name for a in attributes], metas=[m.name for m in metas] ) if len(renamed): attributes = [ attr.renamed(new_name) for attr, new_name in zip(attributes, attr_deduplicated) ] metas = [ meta.renamed(new_name) for meta, new_name in zip(metas, metas_deduplicated) ] domain = Domain(attributes, data_table.domain.class_vars, metas) return data_table.transform(domain) def match_genes(self): self._match() def _gene_db_path(self): return serverfiles.localpath_download(DOMAIN, f'{self.tax_id}.sqlite') def _match(self): synonyms, db_refs = 4, 5 with contextlib.closing(sqlite3.connect(self.gene_db_path)) as con: with con as cursor: for gene in self.genes: if self._progress_callback: self._progress_callback() search_param = gene.input_identifier.lower() if search_param: match_statement = ( '{gene_id symbol locus_tag symbol_from_nomenclature_authority}:^"' + search_param + '"' ) match = cursor.execute( query_exact, (match_statement,) + tuple([search_param] * 4) ).fetchall() # if unique match if len(match) == 1: gene.load_attributes(match[0]) continue match = cursor.execute( query, (f'synonyms:"{search_param}"',) ).fetchall() synonym_matched_rows = [ m for m in match if search_param in (x.lower() for x in json.loads(m[synonyms])) ] # if unique match if len(synonym_matched_rows) == 1: gene.load_attributes(synonym_matched_rows[0]) continue match = cursor.execute( query, (f'db_refs:"{search_param}"',) ).fetchall() db_ref_matched_rows = [ m for m in match if search_param in (x.lower() for x in json.loads(m[db_refs]).values()) ] # if unique match if len(db_ref_matched_rows) == 1: gene.load_attributes(db_ref_matched_rows[0]) continue class GeneInfo(dict): def __init__(self, tax_id: str): """Loads genes for given organism in a dict. Each instance of :class:`Gene` is mapped to corresponding Entrez ID Parameters ---------- tax_id: str Taxonomy id of target organism. """ super().__init__() self.tax_id: str = tax_id self.gene_db_path: str = self._gene_db_path() connection = sqlite3.connect(self.gene_db_path) cursor = connection.cursor() for gene_info in cursor.execute('SELECT * FROM gene_info').fetchall(): gene = Gene() gene.load_attributes(gene_info) self[gene.gene_id] = gene cursor.close() connection.close() def _gene_db_path(self): return serverfiles.localpath_download(DOMAIN, f'{self.tax_id}.sqlite') def load_gene_summary(tax_d: str, genes: List[Optional[str]]) -> List[Optional[Gene]]: gene_db_path = serverfiles.localpath_download(DOMAIN, f'{tax_d}.sqlite') # filter NoneTypes _genes = [g for g in genes if g] with contextlib.closing(sqlite3.connect(gene_db_path)) as con: with con as cur: gene_map: Dict[str, Gene] = {} for gene_info in cur.execute( f'SELECT * FROM gene_info WHERE gene_id in ({",".join(_genes)})' ).fetchall(): gene = Gene() gene.load_attributes(gene_info) gene_map[gene.gene_id] = gene return [gene_map.get(gid, None) if gid else None for gid in genes] if __name__ == "__main__": gm = GeneMatcher('9606') gm.genes = ['CD4', '614535', 'ENSG00000205426', "2'-PDE", 'HB-1Y'] print(list(zip(gm.genes, [g.input_identifier for g in gm.genes]))) _homologs = load_gene_summary( '10090', [g.homolog_gene(taxonomy_id='10090') for g in gm.genes] ) print(_homologs)
PypiClean
/one_gadget-1.1.0.tar.gz/one_gadget-1.1.0/README.md
# one-gadget-lib [![Build Status](https://travis-ci.org/szk3y/one-gadget-lib.svg?branch=master)](https://travis-ci.org/szk3y/one-gadget-lib) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) One-gadget is code that invokes "/bin/sh" without any arguments, so all you need is jump to its address. This library provides the function to find offset to one-gadget in libc. One-gadget-lib works with both python2 and python3. ## Install ``` pip install one_gadget ``` or ``` pip3 install one_gadget ``` ## Dependencies - capstone - pyelftools However, you don't have to install them explicitly. ## Usage ```python from one_gadget import generate_one_gadget path_to_libc = '/lib/x86_64-linux-gnu/libc.so.6' for offset in generate_one_gadget(path_to_libc): print(offset) ``` ## Future works - Support ARM - Support complex case like this: ``` 45216: 48 8d 35 43 13 38 00 lea rsi,[rip+0x381343] # 3c6560 <__abort_msg@@GLIBC_PRIVATE+0x980> 4521d: 31 d2 xor edx,edx 4521f: bf 02 00 00 00 mov edi,0x2 45224: 48 89 5c 24 40 mov QWORD PTR [rsp+0x40],rbx 45229: 48 c7 44 24 48 00 00 mov QWORD PTR [rsp+0x48],0x0 45230: 00 00 45232: 48 89 44 24 30 mov QWORD PTR [rsp+0x30],rax 45237: 48 8d 05 16 7b 14 00 lea rax,[rip+0x147b16] # 18cd54 <_libc_intl_domainname@@GLIBC_2.2.5+0x194> 4523e: 48 89 44 24 38 mov QWORD PTR [rsp+0x38],rax 45243: e8 a8 04 ff ff call 356f0 <__sigaction@@GLIBC_2.2.5> 45248: 48 8d 35 71 12 38 00 lea rsi,[rip+0x381271] # 3c64c0 <__abort_msg@@GLIBC_PRIVATE+0x8e0> 4524f: 31 d2 xor edx,edx 45251: bf 03 00 00 00 mov edi,0x3 45256: e8 95 04 ff ff call 356f0 <__sigaction@@GLIBC_2.2.5> 4525b: 31 d2 xor edx,edx 4525d: 4c 89 e6 mov rsi,r12 45260: bf 02 00 00 00 mov edi,0x2 45265: e8 b6 04 ff ff call 35720 <sigprocmask@@GLIBC_2.2.5> 4526a: 48 8b 05 47 ec 37 00 mov rax,QWORD PTR [rip+0x37ec47] # 3c3eb8 <_IO_file_jumps@@GLIBC_2.2.5+0x7d8> 45271: 48 8d 3d df 7a 14 00 lea rdi,[rip+0x147adf] # 18cd57 <_libc_intl_domainname@@GLIBC_2.2.5+0x197> 45278: 48 8d 74 24 30 lea rsi,[rsp+0x30] 4527d: c7 05 19 12 38 00 00 mov DWORD PTR [rip+0x381219],0x0 # 3c64a0 <__abort_msg@@GLIBC_PRIVATE+0x8c0> 45284: 00 00 00 45287: c7 05 13 12 38 00 00 mov DWORD PTR [rip+0x381213],0x0 # 3c64a4 <__abort_msg@@GLIBC_PRIVATE+0x8c4> 4528e: 00 00 00 45291: 48 8b 10 mov rdx,QWORD PTR [rax] 45294: e8 d7 74 08 00 call cc770 <execve@@GLIBC_2.2.5> ``` ## Reference - [one_gadget](https://github.com/david942j/one_gadget) - [The one-gadget in glibc](https://david942j.blogspot.com/2017/02/project-one-gadget-in-glibc.html) (blog post by the author of [one_gadget](https://github.com/david942j/one_gadget))
PypiClean
/jupyter_declarativewidgets-0.7.0.tar.gz/jupyter_declarativewidgets-0.7.0/declarativewidgets/static/urth_components/moment/locale/cs.js
;(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' && typeof require === 'function' ? factory(require('../moment')) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, function (moment) { 'use strict'; var months = 'leden_únor_březen_duben_květen_červen_červenec_srpen_září_říjen_listopad_prosinec'.split('_'), monthsShort = 'led_úno_bře_dub_kvě_čvn_čvc_srp_zář_říj_lis_pro'.split('_'); function plural(n) { return (n > 1) && (n < 5) && (~~(n / 10) !== 1); } function translate(number, withoutSuffix, key, isFuture) { var result = number + ' '; switch (key) { case 's': // a few seconds / in a few seconds / a few seconds ago return (withoutSuffix || isFuture) ? 'pár sekund' : 'pár sekundami'; case 'm': // a minute / in a minute / a minute ago return withoutSuffix ? 'minuta' : (isFuture ? 'minutu' : 'minutou'); case 'mm': // 9 minutes / in 9 minutes / 9 minutes ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'minuty' : 'minut'); } else { return result + 'minutami'; } break; case 'h': // an hour / in an hour / an hour ago return withoutSuffix ? 'hodina' : (isFuture ? 'hodinu' : 'hodinou'); case 'hh': // 9 hours / in 9 hours / 9 hours ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'hodiny' : 'hodin'); } else { return result + 'hodinami'; } break; case 'd': // a day / in a day / a day ago return (withoutSuffix || isFuture) ? 'den' : 'dnem'; case 'dd': // 9 days / in 9 days / 9 days ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'dny' : 'dní'); } else { return result + 'dny'; } break; case 'M': // a month / in a month / a month ago return (withoutSuffix || isFuture) ? 'měsíc' : 'měsícem'; case 'MM': // 9 months / in 9 months / 9 months ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'měsíce' : 'měsíců'); } else { return result + 'měsíci'; } break; case 'y': // a year / in a year / a year ago return (withoutSuffix || isFuture) ? 'rok' : 'rokem'; case 'yy': // 9 years / in 9 years / 9 years ago if (withoutSuffix || isFuture) { return result + (plural(number) ? 'roky' : 'let'); } else { return result + 'lety'; } break; } } var cs = moment.defineLocale('cs', { months : months, monthsShort : monthsShort, monthsParse : (function (months, monthsShort) { var i, _monthsParse = []; for (i = 0; i < 12; i++) { // use custom parser to solve problem with July (červenec) _monthsParse[i] = new RegExp('^' + months[i] + '$|^' + monthsShort[i] + '$', 'i'); } return _monthsParse; }(months, monthsShort)), shortMonthsParse : (function (monthsShort) { var i, _shortMonthsParse = []; for (i = 0; i < 12; i++) { _shortMonthsParse[i] = new RegExp('^' + monthsShort[i] + '$', 'i'); } return _shortMonthsParse; }(monthsShort)), longMonthsParse : (function (months) { var i, _longMonthsParse = []; for (i = 0; i < 12; i++) { _longMonthsParse[i] = new RegExp('^' + months[i] + '$', 'i'); } return _longMonthsParse; }(months)), weekdays : 'neděle_pondělí_úterý_středa_čtvrtek_pátek_sobota'.split('_'), weekdaysShort : 'ne_po_út_st_čt_pá_so'.split('_'), weekdaysMin : 'ne_po_út_st_čt_pá_so'.split('_'), longDateFormat : { LT: 'H:mm', LTS : 'H:mm:ss', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY H:mm', LLLL : 'dddd D. MMMM YYYY H:mm', l : 'D. M. YYYY' }, calendar : { sameDay: '[dnes v] LT', nextDay: '[zítra v] LT', nextWeek: function () { switch (this.day()) { case 0: return '[v neděli v] LT'; case 1: case 2: return '[v] dddd [v] LT'; case 3: return '[ve středu v] LT'; case 4: return '[ve čtvrtek v] LT'; case 5: return '[v pátek v] LT'; case 6: return '[v sobotu v] LT'; } }, lastDay: '[včera v] LT', lastWeek: function () { switch (this.day()) { case 0: return '[minulou neděli v] LT'; case 1: case 2: return '[minulé] dddd [v] LT'; case 3: return '[minulou středu v] LT'; case 4: case 5: return '[minulý] dddd [v] LT'; case 6: return '[minulou sobotu v] LT'; } }, sameElse: 'L' }, relativeTime : { future : 'za %s', past : 'před %s', s : translate, m : translate, mm : translate, h : translate, hh : translate, d : translate, dd : translate, M : translate, MM : translate, y : translate, yy : translate }, ordinalParse : /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year. } }); return cs; }));
PypiClean
/boto3_type_annotations_with_docs-0.3.1.tar.gz/boto3_type_annotations_with_docs-0.3.1/boto3_type_annotations/neptune/client.py
from typing import Optional from botocore.client import BaseClient from typing import Dict from botocore.paginate import Paginator from datetime import datetime from botocore.waiter import Waiter from typing import Union from typing import List class Client(BaseClient): def add_role_to_db_cluster(self, DBClusterIdentifier: str, RoleArn: str): """ Associates an Identity and Access Management (IAM) role from an Neptune DB cluster. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/AddRoleToDBCluster>`_ **Request Syntax** :: response = client.add_role_to_db_cluster( DBClusterIdentifier='string', RoleArn='string' ) :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The name of the DB cluster to associate the IAM role with. :type RoleArn: string :param RoleArn: **[REQUIRED]** The Amazon Resource Name (ARN) of the IAM role to associate with the Neptune DB cluster, for example ``arn:aws:iam::123456789012:role/NeptuneAccessRole`` . :returns: None """ pass def add_source_identifier_to_subscription(self, SubscriptionName: str, SourceIdentifier: str) -> Dict: """ Adds a source identifier to an existing event notification subscription. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/AddSourceIdentifierToSubscription>`_ **Request Syntax** :: response = client.add_source_identifier_to_subscription( SubscriptionName='string', SourceIdentifier='string' ) **Response Syntax** :: { 'EventSubscription': { 'CustomerAwsId': 'string', 'CustSubscriptionId': 'string', 'SnsTopicArn': 'string', 'Status': 'string', 'SubscriptionCreationTime': 'string', 'SourceType': 'string', 'SourceIdsList': [ 'string', ], 'EventCategoriesList': [ 'string', ], 'Enabled': True|False, 'EventSubscriptionArn': 'string' } } **Response Structure** - *(dict) --* - **EventSubscription** *(dict) --* Contains the results of a successful invocation of the DescribeEventSubscriptions action. - **CustomerAwsId** *(string) --* The AWS customer account associated with the event notification subscription. - **CustSubscriptionId** *(string) --* The event notification subscription Id. - **SnsTopicArn** *(string) --* The topic ARN of the event notification subscription. - **Status** *(string) --* The status of the event notification subscription. Constraints: Can be one of the following: creating | modifying | deleting | active | no-permission | topic-not-exist The status "no-permission" indicates that Neptune no longer has permission to post to the SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created. - **SubscriptionCreationTime** *(string) --* The time the event notification subscription was created. - **SourceType** *(string) --* The source type for the event notification subscription. - **SourceIdsList** *(list) --* A list of source IDs for the event notification subscription. - *(string) --* - **EventCategoriesList** *(list) --* A list of event categories for the event notification subscription. - *(string) --* - **Enabled** *(boolean) --* A Boolean value indicating if the subscription is enabled. True indicates the subscription is enabled. - **EventSubscriptionArn** *(string) --* The Amazon Resource Name (ARN) for the event subscription. :type SubscriptionName: string :param SubscriptionName: **[REQUIRED]** The name of the event notification subscription you want to add a source identifier to. :type SourceIdentifier: string :param SourceIdentifier: **[REQUIRED]** The identifier of the event source to be added. Constraints: * If the source type is a DB instance, then a ``DBInstanceIdentifier`` must be supplied. * If the source type is a DB security group, a ``DBSecurityGroupName`` must be supplied. * If the source type is a DB parameter group, a ``DBParameterGroupName`` must be supplied. * If the source type is a DB snapshot, a ``DBSnapshotIdentifier`` must be supplied. :rtype: dict :returns: """ pass def add_tags_to_resource(self, ResourceName: str, Tags: List): """ Adds metadata tags to an Amazon Neptune resource. These tags can also be used with cost allocation reporting to track cost associated with Amazon Neptune resources, or used in a Condition statement in an IAM policy for Amazon Neptune. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/AddTagsToResource>`_ **Request Syntax** :: response = client.add_tags_to_resource( ResourceName='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) :type ResourceName: string :param ResourceName: **[REQUIRED]** The Amazon Neptune resource that the tags are added to. This value is an Amazon Resource Name (ARN). For information about creating an ARN, see `Constructing an Amazon Resource Name (ARN) <https://docs.aws.amazon.com/neptune/latest/UserGuide/tagging.ARN.html#tagging.ARN.Constructing>`__ . :type Tags: list :param Tags: **[REQUIRED]** The tags to be assigned to the Amazon Neptune resource. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :returns: None """ pass def apply_pending_maintenance_action(self, ResourceIdentifier: str, ApplyAction: str, OptInType: str) -> Dict: """ Applies a pending maintenance action to a resource (for example, to a DB instance). See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ApplyPendingMaintenanceAction>`_ **Request Syntax** :: response = client.apply_pending_maintenance_action( ResourceIdentifier='string', ApplyAction='string', OptInType='string' ) **Response Syntax** :: { 'ResourcePendingMaintenanceActions': { 'ResourceIdentifier': 'string', 'PendingMaintenanceActionDetails': [ { 'Action': 'string', 'AutoAppliedAfterDate': datetime(2015, 1, 1), 'ForcedApplyDate': datetime(2015, 1, 1), 'OptInStatus': 'string', 'CurrentApplyDate': datetime(2015, 1, 1), 'Description': 'string' }, ] } } **Response Structure** - *(dict) --* - **ResourcePendingMaintenanceActions** *(dict) --* Describes the pending maintenance actions for a resource. - **ResourceIdentifier** *(string) --* The ARN of the resource that has pending maintenance actions. - **PendingMaintenanceActionDetails** *(list) --* A list that provides details about the pending maintenance actions for the resource. - *(dict) --* Provides information about a pending maintenance action for a resource. - **Action** *(string) --* The type of pending maintenance action that is available for the resource. - **AutoAppliedAfterDate** *(datetime) --* The date of the maintenance window when the action is applied. The maintenance action is applied to the resource during its first maintenance window after this date. If this date is specified, any ``next-maintenance`` opt-in requests are ignored. - **ForcedApplyDate** *(datetime) --* The date when the maintenance action is automatically applied. The maintenance action is applied to the resource on this date regardless of the maintenance window for the resource. If this date is specified, any ``immediate`` opt-in requests are ignored. - **OptInStatus** *(string) --* Indicates the type of opt-in request that has been received for the resource. - **CurrentApplyDate** *(datetime) --* The effective date when the pending maintenance action is applied to the resource. This date takes into account opt-in requests received from the ApplyPendingMaintenanceAction API, the ``AutoAppliedAfterDate`` , and the ``ForcedApplyDate`` . This value is blank if an opt-in request has not been received and nothing has been specified as ``AutoAppliedAfterDate`` or ``ForcedApplyDate`` . - **Description** *(string) --* A description providing more detail about the maintenance action. :type ResourceIdentifier: string :param ResourceIdentifier: **[REQUIRED]** The Amazon Resource Name (ARN) of the resource that the pending maintenance action applies to. For information about creating an ARN, see `Constructing an Amazon Resource Name (ARN) <https://docs.aws.amazon.com/neptune/latest/UserGuide/tagging.ARN.html#tagging.ARN.Constructing>`__ . :type ApplyAction: string :param ApplyAction: **[REQUIRED]** The pending maintenance action to apply to this resource. Valid values: ``system-update`` , ``db-upgrade`` :type OptInType: string :param OptInType: **[REQUIRED]** A value that specifies the type of opt-in request, or undoes an opt-in request. An opt-in request of type ``immediate`` can\'t be undone. Valid values: * ``immediate`` - Apply the maintenance action immediately. * ``next-maintenance`` - Apply the maintenance action during the next maintenance window for the resource. * ``undo-opt-in`` - Cancel any existing ``next-maintenance`` opt-in requests. :rtype: dict :returns: """ pass def can_paginate(self, operation_name: str = None): """ Check if an operation can be paginated. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you\'d normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator(\"create_foo\")``. :return: ``True`` if the operation can be paginated, ``False`` otherwise. """ pass def copy_db_cluster_parameter_group(self, SourceDBClusterParameterGroupIdentifier: str, TargetDBClusterParameterGroupIdentifier: str, TargetDBClusterParameterGroupDescription: str, Tags: List = None) -> Dict: """ Copies the specified DB cluster parameter group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CopyDBClusterParameterGroup>`_ **Request Syntax** :: response = client.copy_db_cluster_parameter_group( SourceDBClusterParameterGroupIdentifier='string', TargetDBClusterParameterGroupIdentifier='string', TargetDBClusterParameterGroupDescription='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: { 'DBClusterParameterGroup': { 'DBClusterParameterGroupName': 'string', 'DBParameterGroupFamily': 'string', 'Description': 'string', 'DBClusterParameterGroupArn': 'string' } } **Response Structure** - *(dict) --* - **DBClusterParameterGroup** *(dict) --* Contains the details of an Amazon Neptune DB cluster parameter group. This data type is used as a response element in the DescribeDBClusterParameterGroups action. - **DBClusterParameterGroupName** *(string) --* Provides the name of the DB cluster parameter group. - **DBParameterGroupFamily** *(string) --* Provides the name of the DB parameter group family that this DB cluster parameter group is compatible with. - **Description** *(string) --* Provides the customer-specified description for this DB cluster parameter group. - **DBClusterParameterGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster parameter group. :type SourceDBClusterParameterGroupIdentifier: string :param SourceDBClusterParameterGroupIdentifier: **[REQUIRED]** The identifier or Amazon Resource Name (ARN) for the source DB cluster parameter group. For information about creating an ARN, see `Constructing an Amazon Resource Name (ARN) <https://docs.aws.amazon.com/neptune/latest/UserGuide/tagging.ARN.html#tagging.ARN.Constructing>`__ . Constraints: * Must specify a valid DB cluster parameter group. * If the source DB cluster parameter group is in the same AWS Region as the copy, specify a valid DB parameter group identifier, for example ``my-db-cluster-param-group`` , or a valid ARN. * If the source DB parameter group is in a different AWS Region than the copy, specify a valid DB cluster parameter group ARN, for example ``arn:aws:rds:us-east-1:123456789012:cluster-pg:custom-cluster-group1`` . :type TargetDBClusterParameterGroupIdentifier: string :param TargetDBClusterParameterGroupIdentifier: **[REQUIRED]** The identifier for the copied DB cluster parameter group. Constraints: * Cannot be null, empty, or blank * Must contain from 1 to 255 letters, numbers, or hyphens * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens Example: ``my-cluster-param-group1`` :type TargetDBClusterParameterGroupDescription: string :param TargetDBClusterParameterGroupDescription: **[REQUIRED]** A description for the copied DB cluster parameter group. :type Tags: list :param Tags: The tags to be assigned to the copied DB cluster parameter group. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns: """ pass def copy_db_cluster_snapshot(self, SourceDBClusterSnapshotIdentifier: str, TargetDBClusterSnapshotIdentifier: str, KmsKeyId: str = None, PreSignedUrl: str = None, CopyTags: bool = None, Tags: List = None, SourceRegion: str = None) -> Dict: """ Copies a snapshot of a DB cluster. To copy a DB cluster snapshot from a shared manual DB cluster snapshot, ``SourceDBClusterSnapshotIdentifier`` must be the Amazon Resource Name (ARN) of the shared DB cluster snapshot. You can't copy from one AWS Region to another. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CopyDBClusterSnapshot>`_ **Request Syntax** :: response = client.copy_db_cluster_snapshot( SourceDBClusterSnapshotIdentifier='string', TargetDBClusterSnapshotIdentifier='string', KmsKeyId='string', CopyTags=True|False, Tags=[ { 'Key': 'string', 'Value': 'string' }, ], SourceRegion='string' ) **Response Syntax** :: { 'DBClusterSnapshot': { 'AvailabilityZones': [ 'string', ], 'DBClusterSnapshotIdentifier': 'string', 'DBClusterIdentifier': 'string', 'SnapshotCreateTime': datetime(2015, 1, 1), 'Engine': 'string', 'AllocatedStorage': 123, 'Status': 'string', 'Port': 123, 'VpcId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1), 'MasterUsername': 'string', 'EngineVersion': 'string', 'LicenseModel': 'string', 'SnapshotType': 'string', 'PercentProgress': 123, 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DBClusterSnapshotArn': 'string', 'SourceDBClusterSnapshotArn': 'string', 'IAMDatabaseAuthenticationEnabled': True|False } } **Response Structure** - *(dict) --* - **DBClusterSnapshot** *(dict) --* Contains the details for an Amazon Neptune DB cluster snapshot This data type is used as a response element in the DescribeDBClusterSnapshots action. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. - *(string) --* - **DBClusterSnapshotIdentifier** *(string) --* Specifies the identifier for the DB cluster snapshot. - **DBClusterIdentifier** *(string) --* Specifies the DB cluster identifier of the DB cluster that this DB cluster snapshot was created from. - **SnapshotCreateTime** *(datetime) --* Provides the time when the snapshot was taken, in Universal Coordinated Time (UTC). - **Engine** *(string) --* Specifies the name of the database engine. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size in gibibytes (GiB). - **Status** *(string) --* Specifies the status of this DB cluster snapshot. - **Port** *(integer) --* Specifies the port that the DB cluster was listening on at the time of the snapshot. - **VpcId** *(string) --* Provides the VPC ID associated with the DB cluster snapshot. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). - **MasterUsername** *(string) --* Provides the master username for the DB cluster snapshot. - **EngineVersion** *(string) --* Provides the version of the database engine for this DB cluster snapshot. - **LicenseModel** *(string) --* Provides the license model information for this DB cluster snapshot. - **SnapshotType** *(string) --* Provides the type of the DB cluster snapshot. - **PercentProgress** *(integer) --* Specifies the percentage of the estimated data that has been transferred. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster snapshot is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. - **DBClusterSnapshotArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster snapshot. - **SourceDBClusterSnapshotArn** *(string) --* If the DB cluster snapshot was copied from a source DB cluster snapshot, the Amazon Resource Name (ARN) for the source DB cluster snapshot, otherwise, a null value. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. :type SourceDBClusterSnapshotIdentifier: string :param SourceDBClusterSnapshotIdentifier: **[REQUIRED]** The identifier of the DB cluster snapshot to copy. This parameter is not case-sensitive. You can\'t copy from one AWS Region to another. Constraints: * Must specify a valid system snapshot in the \"available\" state. * Specify a valid DB snapshot identifier. Example: ``my-cluster-snapshot1`` :type TargetDBClusterSnapshotIdentifier: string :param TargetDBClusterSnapshotIdentifier: **[REQUIRED]** The identifier of the new DB cluster snapshot to create from the source DB cluster snapshot. This parameter is not case-sensitive. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens. * First character must be a letter. * Cannot end with a hyphen or contain two consecutive hyphens. Example: ``my-cluster-snapshot2`` :type KmsKeyId: string :param KmsKeyId: The AWS AWS KMS key ID for an encrypted DB cluster snapshot. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key. If you copy an unencrypted DB cluster snapshot and specify a value for the ``KmsKeyId`` parameter, Amazon Neptune encrypts the target DB cluster snapshot using the specified KMS encryption key. If you copy an encrypted DB cluster snapshot from your AWS account, you can specify a value for ``KmsKeyId`` to encrypt the copy with a new KMS encryption key. If you don\'t specify a value for ``KmsKeyId`` , then the copy of the DB cluster snapshot is encrypted with the same KMS key as the source DB cluster snapshot. If you copy an encrypted DB cluster snapshot that is shared from another AWS account, then you must specify a value for ``KmsKeyId`` . KMS encryption keys are specific to the AWS Region that they are created in, and you can\'t use encryption keys from one AWS Region in another AWS Region. :type PreSignedUrl: string :param PreSignedUrl: Not currently supported. Please note that this parameter is automatically populated if it is not provided. Including this parameter is not required :type CopyTags: boolean :param CopyTags: True to copy all tags from the source DB cluster snapshot to the target DB cluster snapshot, and otherwise false. The default is false. :type Tags: list :param Tags: The tags to assign to the new DB cluster snapshot copy. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :type SourceRegion: string :param SourceRegion: The ID of the region that contains the snapshot to be copied. :rtype: dict :returns: """ pass def copy_db_parameter_group(self, SourceDBParameterGroupIdentifier: str, TargetDBParameterGroupIdentifier: str, TargetDBParameterGroupDescription: str, Tags: List = None) -> Dict: """ Copies the specified DB parameter group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CopyDBParameterGroup>`_ **Request Syntax** :: response = client.copy_db_parameter_group( SourceDBParameterGroupIdentifier='string', TargetDBParameterGroupIdentifier='string', TargetDBParameterGroupDescription='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: { 'DBParameterGroup': { 'DBParameterGroupName': 'string', 'DBParameterGroupFamily': 'string', 'Description': 'string', 'DBParameterGroupArn': 'string' } } **Response Structure** - *(dict) --* - **DBParameterGroup** *(dict) --* Contains the details of an Amazon Neptune DB parameter group. This data type is used as a response element in the DescribeDBParameterGroups action. - **DBParameterGroupName** *(string) --* Provides the name of the DB parameter group. - **DBParameterGroupFamily** *(string) --* Provides the name of the DB parameter group family that this DB parameter group is compatible with. - **Description** *(string) --* Provides the customer-specified description for this DB parameter group. - **DBParameterGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB parameter group. :type SourceDBParameterGroupIdentifier: string :param SourceDBParameterGroupIdentifier: **[REQUIRED]** The identifier or ARN for the source DB parameter group. For information about creating an ARN, see `Constructing an Amazon Resource Name (ARN) <https://docs.aws.amazon.com/neptune/latest/UserGuide/tagging.ARN.html#tagging.ARN.Constructing>`__ . Constraints: * Must specify a valid DB parameter group. * Must specify a valid DB parameter group identifier, for example ``my-db-param-group`` , or a valid ARN. :type TargetDBParameterGroupIdentifier: string :param TargetDBParameterGroupIdentifier: **[REQUIRED]** The identifier for the copied DB parameter group. Constraints: * Cannot be null, empty, or blank. * Must contain from 1 to 255 letters, numbers, or hyphens. * First character must be a letter. * Cannot end with a hyphen or contain two consecutive hyphens. Example: ``my-db-parameter-group`` :type TargetDBParameterGroupDescription: string :param TargetDBParameterGroupDescription: **[REQUIRED]** A description for the copied DB parameter group. :type Tags: list :param Tags: The tags to be assigned to the copied DB parameter group. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns: """ pass def create_db_cluster(self, DBClusterIdentifier: str, Engine: str, AvailabilityZones: List = None, BackupRetentionPeriod: int = None, CharacterSetName: str = None, DatabaseName: str = None, DBClusterParameterGroupName: str = None, VpcSecurityGroupIds: List = None, DBSubnetGroupName: str = None, EngineVersion: str = None, Port: int = None, MasterUsername: str = None, MasterUserPassword: str = None, OptionGroupName: str = None, PreferredBackupWindow: str = None, PreferredMaintenanceWindow: str = None, ReplicationSourceIdentifier: str = None, Tags: List = None, StorageEncrypted: bool = None, KmsKeyId: str = None, PreSignedUrl: str = None, EnableIAMDatabaseAuthentication: bool = None, SourceRegion: str = None) -> Dict: """ Creates a new Amazon Neptune DB cluster. You can use the ``ReplicationSourceIdentifier`` parameter to create the DB cluster as a Read Replica of another DB cluster or Amazon Neptune DB instance. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CreateDBCluster>`_ **Request Syntax** :: response = client.create_db_cluster( AvailabilityZones=[ 'string', ], BackupRetentionPeriod=123, CharacterSetName='string', DatabaseName='string', DBClusterIdentifier='string', DBClusterParameterGroupName='string', VpcSecurityGroupIds=[ 'string', ], DBSubnetGroupName='string', Engine='string', EngineVersion='string', Port=123, MasterUsername='string', MasterUserPassword='string', OptionGroupName='string', PreferredBackupWindow='string', PreferredMaintenanceWindow='string', ReplicationSourceIdentifier='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ], StorageEncrypted=True|False, KmsKeyId='string', EnableIAMDatabaseAuthentication=True|False, SourceRegion='string' ) **Response Syntax** :: { 'DBCluster': { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) } } **Response Structure** - *(dict) --* - **DBCluster** *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type AvailabilityZones: list :param AvailabilityZones: A list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* :type BackupRetentionPeriod: integer :param BackupRetentionPeriod: The number of days for which automated backups are retained. You must specify a minimum value of 1. Default: 1 Constraints: * Must be a value from 1 to 35 :type CharacterSetName: string :param CharacterSetName: A value that indicates that the DB cluster should be associated with the specified CharacterSet. :type DatabaseName: string :param DatabaseName: The name for your database of up to 64 alpha-numeric characters. If you do not provide a name, Amazon Neptune will not create a database in the DB cluster you are creating. :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The DB cluster identifier. This parameter is stored as a lowercase string. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens. * First character must be a letter. * Cannot end with a hyphen or contain two consecutive hyphens. Example: ``my-cluster1`` :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: The name of the DB cluster parameter group to associate with this DB cluster. If this argument is omitted, the default is used. Constraints: * If supplied, must match the name of an existing DBClusterParameterGroup. :type VpcSecurityGroupIds: list :param VpcSecurityGroupIds: A list of EC2 VPC security groups to associate with this DB cluster. - *(string) --* :type DBSubnetGroupName: string :param DBSubnetGroupName: A DB subnet group to associate with this DB cluster. Constraints: Must match the name of an existing DBSubnetGroup. Must not be default. Example: ``mySubnetgroup`` :type Engine: string :param Engine: **[REQUIRED]** The name of the database engine to be used for this DB cluster. Valid Values: ``neptune`` :type EngineVersion: string :param EngineVersion: The version number of the database engine to use. Example: ``1.0.1`` :type Port: integer :param Port: The port number on which the instances in the DB cluster accept connections. Default: ``8182`` :type MasterUsername: string :param MasterUsername: The name of the master user for the DB cluster. Constraints: * Must be 1 to 16 letters or numbers. * First character must be a letter. * Cannot be a reserved word for the chosen database engine. :type MasterUserPassword: string :param MasterUserPassword: The password for the master database user. This password can contain any printable ASCII character except \"/\", \"\"\", or \"@\". Constraints: Must contain from 8 to 41 characters. :type OptionGroupName: string :param OptionGroupName: A value that indicates that the DB cluster should be associated with the specified option group. Permanent options can\'t be removed from an option group. The option group can\'t be removed from a DB cluster once it is associated with a DB cluster. :type PreferredBackupWindow: string :param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled using the ``BackupRetentionPeriod`` parameter. The default is a 30-minute window selected at random from an 8-hour block of time for each AWS Region. To see the time blocks available, see `Adjusting the Preferred Maintenance Window <https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AdjustingTheMaintenanceWindow.html>`__ in the *Amazon Neptune User Guide.* Constraints: * Must be in the format ``hh24:mi-hh24:mi`` . * Must be in Universal Coordinated Time (UTC). * Must not conflict with the preferred maintenance window. * Must be at least 30 minutes. :type PreferredMaintenanceWindow: string :param PreferredMaintenanceWindow: The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). Format: ``ddd:hh24:mi-ddd:hh24:mi`` The default is a 30-minute window selected at random from an 8-hour block of time for each AWS Region, occurring on a random day of the week. To see the time blocks available, see `Adjusting the Preferred Maintenance Window <https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AdjustingTheMaintenanceWindow.html>`__ in the *Amazon Neptune User Guide.* Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun. Constraints: Minimum 30-minute window. :type ReplicationSourceIdentifier: string :param ReplicationSourceIdentifier: The Amazon Resource Name (ARN) of the source DB instance or DB cluster if this DB cluster is created as a Read Replica. :type Tags: list :param Tags: The tags to assign to the new DB cluster. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :type StorageEncrypted: boolean :param StorageEncrypted: Specifies whether the DB cluster is encrypted. :type KmsKeyId: string :param KmsKeyId: The AWS KMS key identifier for an encrypted DB cluster. The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are creating a DB cluster with the same AWS account that owns the KMS encryption key used to encrypt the new DB cluster, then you can use the KMS key alias instead of the ARN for the KMS encryption key. If an encryption key is not specified in ``KmsKeyId`` : * If ``ReplicationSourceIdentifier`` identifies an encrypted source, then Amazon Neptune will use the encryption key used to encrypt the source. Otherwise, Amazon Neptune will use your default encryption key. * If the ``StorageEncrypted`` parameter is true and ``ReplicationSourceIdentifier`` is not specified, then Amazon Neptune will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS Region. If you create a Read Replica of an encrypted DB cluster in another AWS Region, you must set ``KmsKeyId`` to a KMS key ID that is valid in the destination AWS Region. This key is used to encrypt the Read Replica in that AWS Region. :type PreSignedUrl: string :param PreSignedUrl: This parameter is not currently supported. Please note that this parameter is automatically populated if it is not provided. Including this parameter is not required :type EnableIAMDatabaseAuthentication: boolean :param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false. Default: ``false`` :type SourceRegion: string :param SourceRegion: The ID of the region that contains the source for the db cluster. :rtype: dict :returns: """ pass def create_db_cluster_parameter_group(self, DBClusterParameterGroupName: str, DBParameterGroupFamily: str, Description: str, Tags: List = None) -> Dict: """ Creates a new DB cluster parameter group. Parameters in a DB cluster parameter group apply to all of the instances in a DB cluster. A DB cluster parameter group is initially created with the default parameters for the database engine used by instances in the DB cluster. To provide custom values for any of the parameters, you must modify the group after creating it using ModifyDBClusterParameterGroup . Once you've created a DB cluster parameter group, you need to associate it with your DB cluster using ModifyDBCluster . When you associate a new DB cluster parameter group with a running DB cluster, you need to reboot the DB instances in the DB cluster without failover for the new DB cluster parameter group and associated settings to take effect. .. warning:: After you create a DB cluster parameter group, you should wait at least 5 minutes before creating your first DB cluster that uses that DB cluster parameter group as the default parameter group. This allows Amazon Neptune to fully complete the create action before the DB cluster parameter group is used as the default for a new DB cluster. This is especially important for parameters that are critical when creating the default database for a DB cluster, such as the character set for the default database defined by the ``character_set_database`` parameter. You can use the *Parameter Groups* option of the `Amazon Neptune console <https://console.aws.amazon.com/rds/>`__ or the DescribeDBClusterParameters command to verify that your DB cluster parameter group has been created or modified. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CreateDBClusterParameterGroup>`_ **Request Syntax** :: response = client.create_db_cluster_parameter_group( DBClusterParameterGroupName='string', DBParameterGroupFamily='string', Description='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: { 'DBClusterParameterGroup': { 'DBClusterParameterGroupName': 'string', 'DBParameterGroupFamily': 'string', 'Description': 'string', 'DBClusterParameterGroupArn': 'string' } } **Response Structure** - *(dict) --* - **DBClusterParameterGroup** *(dict) --* Contains the details of an Amazon Neptune DB cluster parameter group. This data type is used as a response element in the DescribeDBClusterParameterGroups action. - **DBClusterParameterGroupName** *(string) --* Provides the name of the DB cluster parameter group. - **DBParameterGroupFamily** *(string) --* Provides the name of the DB parameter group family that this DB cluster parameter group is compatible with. - **Description** *(string) --* Provides the customer-specified description for this DB cluster parameter group. - **DBClusterParameterGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster parameter group. :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: **[REQUIRED]** The name of the DB cluster parameter group. Constraints: * Must match the name of an existing DBClusterParameterGroup. .. note:: This value is stored as a lowercase string. :type DBParameterGroupFamily: string :param DBParameterGroupFamily: **[REQUIRED]** The DB cluster parameter group family name. A DB cluster parameter group can be associated with one and only one DB cluster parameter group family, and can be applied only to a DB cluster running a database engine and engine version compatible with that DB cluster parameter group family. :type Description: string :param Description: **[REQUIRED]** The description for the DB cluster parameter group. :type Tags: list :param Tags: The tags to be assigned to the new DB cluster parameter group. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns: """ pass def create_db_cluster_snapshot(self, DBClusterSnapshotIdentifier: str, DBClusterIdentifier: str, Tags: List = None) -> Dict: """ Creates a snapshot of a DB cluster. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CreateDBClusterSnapshot>`_ **Request Syntax** :: response = client.create_db_cluster_snapshot( DBClusterSnapshotIdentifier='string', DBClusterIdentifier='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: { 'DBClusterSnapshot': { 'AvailabilityZones': [ 'string', ], 'DBClusterSnapshotIdentifier': 'string', 'DBClusterIdentifier': 'string', 'SnapshotCreateTime': datetime(2015, 1, 1), 'Engine': 'string', 'AllocatedStorage': 123, 'Status': 'string', 'Port': 123, 'VpcId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1), 'MasterUsername': 'string', 'EngineVersion': 'string', 'LicenseModel': 'string', 'SnapshotType': 'string', 'PercentProgress': 123, 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DBClusterSnapshotArn': 'string', 'SourceDBClusterSnapshotArn': 'string', 'IAMDatabaseAuthenticationEnabled': True|False } } **Response Structure** - *(dict) --* - **DBClusterSnapshot** *(dict) --* Contains the details for an Amazon Neptune DB cluster snapshot This data type is used as a response element in the DescribeDBClusterSnapshots action. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. - *(string) --* - **DBClusterSnapshotIdentifier** *(string) --* Specifies the identifier for the DB cluster snapshot. - **DBClusterIdentifier** *(string) --* Specifies the DB cluster identifier of the DB cluster that this DB cluster snapshot was created from. - **SnapshotCreateTime** *(datetime) --* Provides the time when the snapshot was taken, in Universal Coordinated Time (UTC). - **Engine** *(string) --* Specifies the name of the database engine. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size in gibibytes (GiB). - **Status** *(string) --* Specifies the status of this DB cluster snapshot. - **Port** *(integer) --* Specifies the port that the DB cluster was listening on at the time of the snapshot. - **VpcId** *(string) --* Provides the VPC ID associated with the DB cluster snapshot. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). - **MasterUsername** *(string) --* Provides the master username for the DB cluster snapshot. - **EngineVersion** *(string) --* Provides the version of the database engine for this DB cluster snapshot. - **LicenseModel** *(string) --* Provides the license model information for this DB cluster snapshot. - **SnapshotType** *(string) --* Provides the type of the DB cluster snapshot. - **PercentProgress** *(integer) --* Specifies the percentage of the estimated data that has been transferred. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster snapshot is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. - **DBClusterSnapshotArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster snapshot. - **SourceDBClusterSnapshotArn** *(string) --* If the DB cluster snapshot was copied from a source DB cluster snapshot, the Amazon Resource Name (ARN) for the source DB cluster snapshot, otherwise, a null value. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. :type DBClusterSnapshotIdentifier: string :param DBClusterSnapshotIdentifier: **[REQUIRED]** The identifier of the DB cluster snapshot. This parameter is stored as a lowercase string. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens. * First character must be a letter. * Cannot end with a hyphen or contain two consecutive hyphens. Example: ``my-cluster1-snapshot1`` :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The identifier of the DB cluster to create a snapshot for. This parameter is not case-sensitive. Constraints: * Must match the identifier of an existing DBCluster. Example: ``my-cluster1`` :type Tags: list :param Tags: The tags to be assigned to the DB cluster snapshot. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns: """ pass def create_db_instance(self, DBInstanceIdentifier: str, DBInstanceClass: str, Engine: str, DBName: str = None, AllocatedStorage: int = None, MasterUsername: str = None, MasterUserPassword: str = None, DBSecurityGroups: List = None, VpcSecurityGroupIds: List = None, AvailabilityZone: str = None, DBSubnetGroupName: str = None, PreferredMaintenanceWindow: str = None, DBParameterGroupName: str = None, BackupRetentionPeriod: int = None, PreferredBackupWindow: str = None, Port: int = None, MultiAZ: bool = None, EngineVersion: str = None, AutoMinorVersionUpgrade: bool = None, LicenseModel: str = None, Iops: int = None, OptionGroupName: str = None, CharacterSetName: str = None, PubliclyAccessible: bool = None, Tags: List = None, DBClusterIdentifier: str = None, StorageType: str = None, TdeCredentialArn: str = None, TdeCredentialPassword: str = None, StorageEncrypted: bool = None, KmsKeyId: str = None, Domain: str = None, CopyTagsToSnapshot: bool = None, MonitoringInterval: int = None, MonitoringRoleArn: str = None, DomainIAMRoleName: str = None, PromotionTier: int = None, Timezone: str = None, EnableIAMDatabaseAuthentication: bool = None, EnablePerformanceInsights: bool = None, PerformanceInsightsKMSKeyId: str = None, EnableCloudwatchLogsExports: List = None) -> Dict: """ Creates a new DB instance. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CreateDBInstance>`_ **Request Syntax** :: response = client.create_db_instance( DBName='string', DBInstanceIdentifier='string', AllocatedStorage=123, DBInstanceClass='string', Engine='string', MasterUsername='string', MasterUserPassword='string', DBSecurityGroups=[ 'string', ], VpcSecurityGroupIds=[ 'string', ], AvailabilityZone='string', DBSubnetGroupName='string', PreferredMaintenanceWindow='string', DBParameterGroupName='string', BackupRetentionPeriod=123, PreferredBackupWindow='string', Port=123, MultiAZ=True|False, EngineVersion='string', AutoMinorVersionUpgrade=True|False, LicenseModel='string', Iops=123, OptionGroupName='string', CharacterSetName='string', PubliclyAccessible=True|False, Tags=[ { 'Key': 'string', 'Value': 'string' }, ], DBClusterIdentifier='string', StorageType='string', TdeCredentialArn='string', TdeCredentialPassword='string', StorageEncrypted=True|False, KmsKeyId='string', Domain='string', CopyTagsToSnapshot=True|False, MonitoringInterval=123, MonitoringRoleArn='string', DomainIAMRoleName='string', PromotionTier=123, Timezone='string', EnableIAMDatabaseAuthentication=True|False, EnablePerformanceInsights=True|False, PerformanceInsightsKMSKeyId='string', EnableCloudwatchLogsExports=[ 'string', ] ) **Response Syntax** :: { 'DBInstance': { 'DBInstanceIdentifier': 'string', 'DBInstanceClass': 'string', 'Engine': 'string', 'DBInstanceStatus': 'string', 'MasterUsername': 'string', 'DBName': 'string', 'Endpoint': { 'Address': 'string', 'Port': 123, 'HostedZoneId': 'string' }, 'AllocatedStorage': 123, 'InstanceCreateTime': datetime(2015, 1, 1), 'PreferredBackupWindow': 'string', 'BackupRetentionPeriod': 123, 'DBSecurityGroups': [ { 'DBSecurityGroupName': 'string', 'Status': 'string' }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'DBParameterGroups': [ { 'DBParameterGroupName': 'string', 'ParameterApplyStatus': 'string' }, ], 'AvailabilityZone': 'string', 'DBSubnetGroup': { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' }, 'PreferredMaintenanceWindow': 'string', 'PendingModifiedValues': { 'DBInstanceClass': 'string', 'AllocatedStorage': 123, 'MasterUserPassword': 'string', 'Port': 123, 'BackupRetentionPeriod': 123, 'MultiAZ': True|False, 'EngineVersion': 'string', 'LicenseModel': 'string', 'Iops': 123, 'DBInstanceIdentifier': 'string', 'StorageType': 'string', 'CACertificateIdentifier': 'string', 'DBSubnetGroupName': 'string', 'PendingCloudwatchLogsExports': { 'LogTypesToEnable': [ 'string', ], 'LogTypesToDisable': [ 'string', ] } }, 'LatestRestorableTime': datetime(2015, 1, 1), 'MultiAZ': True|False, 'EngineVersion': 'string', 'AutoMinorVersionUpgrade': True|False, 'ReadReplicaSourceDBInstanceIdentifier': 'string', 'ReadReplicaDBInstanceIdentifiers': [ 'string', ], 'ReadReplicaDBClusterIdentifiers': [ 'string', ], 'LicenseModel': 'string', 'Iops': 123, 'OptionGroupMemberships': [ { 'OptionGroupName': 'string', 'Status': 'string' }, ], 'CharacterSetName': 'string', 'SecondaryAvailabilityZone': 'string', 'PubliclyAccessible': True|False, 'StatusInfos': [ { 'StatusType': 'string', 'Normal': True|False, 'Status': 'string', 'Message': 'string' }, ], 'StorageType': 'string', 'TdeCredentialArn': 'string', 'DbInstancePort': 123, 'DBClusterIdentifier': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbiResourceId': 'string', 'CACertificateIdentifier': 'string', 'DomainMemberships': [ { 'Domain': 'string', 'Status': 'string', 'FQDN': 'string', 'IAMRoleName': 'string' }, ], 'CopyTagsToSnapshot': True|False, 'MonitoringInterval': 123, 'EnhancedMonitoringResourceArn': 'string', 'MonitoringRoleArn': 'string', 'PromotionTier': 123, 'DBInstanceArn': 'string', 'Timezone': 'string', 'IAMDatabaseAuthenticationEnabled': True|False, 'PerformanceInsightsEnabled': True|False, 'PerformanceInsightsKMSKeyId': 'string', 'EnabledCloudwatchLogsExports': [ 'string', ] } } **Response Structure** - *(dict) --* - **DBInstance** *(dict) --* Contains the details of an Amazon Neptune DB instance. This data type is used as a response element in the DescribeDBInstances action. - **DBInstanceIdentifier** *(string) --* Contains a user-supplied database identifier. This identifier is the unique key that identifies a DB instance. - **DBInstanceClass** *(string) --* Contains the name of the compute and memory capacity class of the DB instance. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB instance. - **DBInstanceStatus** *(string) --* Specifies the current state of this database. - **MasterUsername** *(string) --* Contains the master username for the DB instance. - **DBName** *(string) --* The database name. - **Endpoint** *(dict) --* Specifies the connection endpoint. - **Address** *(string) --* Specifies the DNS address of the DB instance. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size specified in gibibytes. - **InstanceCreateTime** *(datetime) --* Provides the date and time the DB instance was created. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **DBSecurityGroups** *(list) --* Provides List of DB security group elements containing only ``DBSecurityGroup.Name`` and ``DBSecurityGroup.Status`` subelements. - *(dict) --* Specifies membership in a designated DB security group. - **DBSecurityGroupName** *(string) --* The name of the DB security group. - **Status** *(string) --* The status of the DB security group. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security group elements that the DB instance belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **DBParameterGroups** *(list) --* Provides the list of DB parameter groups applied to this DB instance. - *(dict) --* The status of the DB parameter group. This data type is used as a response element in the following actions: * CreateDBInstance * DeleteDBInstance * ModifyDBInstance * RebootDBInstance - **DBParameterGroupName** *(string) --* The name of the DP parameter group. - **ParameterApplyStatus** *(string) --* The status of parameter updates. - **AvailabilityZone** *(string) --* Specifies the name of the Availability Zone the DB instance is located in. - **DBSubnetGroup** *(dict) --* Specifies information on the subnet group associated with the DB instance, including the name, description, and subnets in the subnet group. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **PendingModifiedValues** *(dict) --* Specifies that changes to the DB instance are pending. This element is only included when changes are pending. Specific changes are identified by subelements. - **DBInstanceClass** *(string) --* Contains the new ``DBInstanceClass`` for the DB instance that will be applied or is currently being applied. - **AllocatedStorage** *(integer) --* Contains the new ``AllocatedStorage`` size for the DB instance that will be applied or is currently being applied. - **MasterUserPassword** *(string) --* Contains the pending or currently-in-progress change of the master credentials for the DB instance. - **Port** *(integer) --* Specifies the pending port for the DB instance. - **BackupRetentionPeriod** *(integer) --* Specifies the pending number of days for which automated backups are retained. - **MultiAZ** *(boolean) --* Indicates that the Single-AZ DB instance is to change to a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **LicenseModel** *(string) --* The license model for the DB instance. Valid values: ``license-included`` | ``bring-your-own-license`` | ``general-public-license`` - **Iops** *(integer) --* Specifies the new Provisioned IOPS value for the DB instance that will be applied or is currently being applied. - **DBInstanceIdentifier** *(string) --* Contains the new ``DBInstanceIdentifier`` for the DB instance that will be applied or is currently being applied. - **StorageType** *(string) --* Specifies the storage type to be associated with the DB instance. - **CACertificateIdentifier** *(string) --* Specifies the identifier of the CA certificate for the DB instance. - **DBSubnetGroupName** *(string) --* The new DB subnet group for the DB instance. - **PendingCloudwatchLogsExports** *(dict) --* Specifies the CloudWatch logs to be exported. - **LogTypesToEnable** *(list) --* Log types that are in the process of being deactivated. After they are deactivated, these log types aren't exported to CloudWatch Logs. - *(string) --* - **LogTypesToDisable** *(list) --* Log types that are in the process of being enabled. After they are enabled, these log types are exported to CloudWatch Logs. - *(string) --* - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **MultiAZ** *(boolean) --* Specifies if the DB instance is a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **AutoMinorVersionUpgrade** *(boolean) --* Indicates that minor version patches are applied automatically. - **ReadReplicaSourceDBInstanceIdentifier** *(string) --* Contains the identifier of the source DB instance if this DB instance is a Read Replica. - **ReadReplicaDBInstanceIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB instance. - *(string) --* - **ReadReplicaDBClusterIdentifiers** *(list) --* Contains one or more identifiers of DB clusters that are Read Replicas of this DB instance. - *(string) --* - **LicenseModel** *(string) --* License model information for this DB instance. - **Iops** *(integer) --* Specifies the Provisioned IOPS (I/O operations per second) value. - **OptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB instance. - *(dict) --* Provides information on the option groups the DB instance is a member of. - **OptionGroupName** *(string) --* The name of the option group that the instance belongs to. - **Status** *(string) --* The status of the DB instance's option group membership. Valid values are: ``in-sync`` , ``pending-apply`` , ``pending-removal`` , ``pending-maintenance-apply`` , ``pending-maintenance-removal`` , ``applying`` , ``removing`` , and ``failed`` . - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this instance is associated with. - **SecondaryAvailabilityZone** *(string) --* If present, specifies the name of the secondary Availability Zone for a DB instance with multi-AZ support. - **PubliclyAccessible** *(boolean) --* This flag should no longer be used. - **StatusInfos** *(list) --* The status of a Read Replica. If the instance is not a Read Replica, this is blank. - *(dict) --* Provides a list of status information for a DB instance. - **StatusType** *(string) --* This value is currently "read replication." - **Normal** *(boolean) --* Boolean value that is true if the instance is operating normally, or false if the instance is in an error state. - **Status** *(string) --* Status of the DB instance. For a StatusType of read replica, the values can be replicating, error, stopped, or terminated. - **Message** *(string) --* Details of the error if there is an error for the instance. If the instance is not in an error state, this value is blank. - **StorageType** *(string) --* Specifies the storage type associated with DB instance. - **TdeCredentialArn** *(string) --* The ARN from the key store with which the instance is associated for TDE encryption. - **DbInstancePort** *(integer) --* Specifies the port that the DB instance listens on. If the DB instance is part of a DB cluster, this can be a different port than the DB cluster port. - **DBClusterIdentifier** *(string) --* If the DB instance is a member of a DB cluster, contains the name of the DB cluster that the DB instance is a member of. - **StorageEncrypted** *(boolean) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **KmsKeyId** *(string) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **DbiResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB instance. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB instance is accessed. - **CACertificateIdentifier** *(string) --* The identifier of the CA certificate for this DB instance. - **DomainMemberships** *(list) --* Not supported - *(dict) --* An Active Directory Domain membership record associated with a DB instance. - **Domain** *(string) --* The identifier of the Active Directory Domain. - **Status** *(string) --* The status of the DB instance's Active Directory Domain membership, such as joined, pending-join, failed etc). - **FQDN** *(string) --* The fully qualified domain name of the Active Directory Domain. - **IAMRoleName** *(string) --* The name of the IAM role to be used when making API calls to the Directory Service. - **CopyTagsToSnapshot** *(boolean) --* Specifies whether tags are copied from the DB instance to snapshots of the DB instance. - **MonitoringInterval** *(integer) --* The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. - **EnhancedMonitoringResourceArn** *(string) --* The Amazon Resource Name (ARN) of the Amazon CloudWatch Logs log stream that receives the Enhanced Monitoring metrics data for the DB instance. - **MonitoringRoleArn** *(string) --* The ARN for the IAM role that permits Neptune to send Enhanced Monitoring metrics to Amazon CloudWatch Logs. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **DBInstanceArn** *(string) --* The Amazon Resource Name (ARN) for the DB instance. - **Timezone** *(string) --* Not supported. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if AWS Identity and Access Management (IAM) authentication is enabled, and otherwise false. - **PerformanceInsightsEnabled** *(boolean) --* True if Performance Insights is enabled for the DB instance, and otherwise false. - **PerformanceInsightsKMSKeyId** *(string) --* The AWS KMS key identifier for encryption of Performance Insights data. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key. - **EnabledCloudwatchLogsExports** *(list) --* A list of log types that this DB instance is configured to export to CloudWatch Logs. - *(string) --* :type DBName: string :param DBName: Not supported. :type DBInstanceIdentifier: string :param DBInstanceIdentifier: **[REQUIRED]** The DB instance identifier. This parameter is stored as a lowercase string. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens. * First character must be a letter. * Cannot end with a hyphen or contain two consecutive hyphens. Example: ``mydbinstance`` :type AllocatedStorage: integer :param AllocatedStorage: The amount of storage (in gibibytes) to allocate for the DB instance. Type: Integer Not applicable. Neptune cluster volumes automatically grow as the amount of data in your database increases, though you are only charged for the space that you use in a Neptune cluster volume. :type DBInstanceClass: string :param DBInstanceClass: **[REQUIRED]** The compute and memory capacity of the DB instance, for example, ``db.m4.large`` . Not all DB instance classes are available in all AWS Regions. :type Engine: string :param Engine: **[REQUIRED]** The name of the database engine to be used for this instance. Valid Values: ``neptune`` :type MasterUsername: string :param MasterUsername: The name for the master user. Not used. :type MasterUserPassword: string :param MasterUserPassword: The password for the master user. The password can include any printable ASCII character except \"/\", \"\"\", or \"@\". Not used. :type DBSecurityGroups: list :param DBSecurityGroups: A list of DB security groups to associate with this DB instance. Default: The default DB security group for the database engine. - *(string) --* :type VpcSecurityGroupIds: list :param VpcSecurityGroupIds: A list of EC2 VPC security groups to associate with this DB instance. Not applicable. The associated list of EC2 VPC security groups is managed by the DB cluster. For more information, see CreateDBCluster . Default: The default EC2 VPC security group for the DB subnet group\'s VPC. - *(string) --* :type AvailabilityZone: string :param AvailabilityZone: The EC2 Availability Zone that the DB instance is created in Default: A random, system-chosen Availability Zone in the endpoint\'s AWS Region. Example: ``us-east-1d`` Constraint: The AvailabilityZone parameter can\'t be specified if the MultiAZ parameter is set to ``true`` . The specified Availability Zone must be in the same AWS Region as the current endpoint. :type DBSubnetGroupName: string :param DBSubnetGroupName: A DB subnet group to associate with this DB instance. If there is no DB subnet group, then it is a non-VPC DB instance. :type PreferredMaintenanceWindow: string :param PreferredMaintenanceWindow: The time range each week during which system maintenance can occur, in Universal Coordinated Time (UTC). Format: ``ddd:hh24:mi-ddd:hh24:mi`` The default is a 30-minute window selected at random from an 8-hour block of time for each AWS Region, occurring on a random day of the week. Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun. Constraints: Minimum 30-minute window. :type DBParameterGroupName: string :param DBParameterGroupName: The name of the DB parameter group to associate with this DB instance. If this argument is omitted, the default DBParameterGroup for the specified engine is used. Constraints: * Must be 1 to 255 letters, numbers, or hyphens. * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens :type BackupRetentionPeriod: integer :param BackupRetentionPeriod: The number of days for which automated backups are retained. Not applicable. The retention period for automated backups is managed by the DB cluster. For more information, see CreateDBCluster . Default: 1 Constraints: * Must be a value from 0 to 35 * Cannot be set to 0 if the DB instance is a source to Read Replicas :type PreferredBackupWindow: string :param PreferredBackupWindow: The daily time range during which automated backups are created. Not applicable. The daily time range for creating automated backups is managed by the DB cluster. For more information, see CreateDBCluster . :type Port: integer :param Port: The port number on which the database accepts connections. Not applicable. The port is managed by the DB cluster. For more information, see CreateDBCluster . Default: ``8182`` Type: Integer :type MultiAZ: boolean :param MultiAZ: Specifies if the DB instance is a Multi-AZ deployment. You can\'t set the AvailabilityZone parameter if the MultiAZ parameter is set to true. :type EngineVersion: string :param EngineVersion: The version number of the database engine to use. :type AutoMinorVersionUpgrade: boolean :param AutoMinorVersionUpgrade: Indicates that minor engine upgrades are applied automatically to the DB instance during the maintenance window. Default: ``true`` :type LicenseModel: string :param LicenseModel: License model information for this DB instance. Valid values: ``license-included`` | ``bring-your-own-license`` | ``general-public-license`` :type Iops: integer :param Iops: The amount of Provisioned IOPS (input/output operations per second) to be initially allocated for the DB instance. :type OptionGroupName: string :param OptionGroupName: Indicates that the DB instance should be associated with the specified option group. Permanent options, such as the TDE option for Oracle Advanced Security TDE, can\'t be removed from an option group, and that option group can\'t be removed from a DB instance once it is associated with a DB instance :type CharacterSetName: string :param CharacterSetName: Indicates that the DB instance should be associated with the specified CharacterSet. Not applicable. The character set is managed by the DB cluster. For more information, see CreateDBCluster . :type PubliclyAccessible: boolean :param PubliclyAccessible: This flag should no longer be used. :type Tags: list :param Tags: The tags to assign to the new instance. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :type DBClusterIdentifier: string :param DBClusterIdentifier: The identifier of the DB cluster that the instance will belong to. For information on creating a DB cluster, see CreateDBCluster . Type: String :type StorageType: string :param StorageType: Specifies the storage type to be associated with the DB instance. Not applicable. Storage is managed by the DB Cluster. :type TdeCredentialArn: string :param TdeCredentialArn: The ARN from the key store with which to associate the instance for TDE encryption. :type TdeCredentialPassword: string :param TdeCredentialPassword: The password for the given ARN from the key store in order to access the device. :type StorageEncrypted: boolean :param StorageEncrypted: Specifies whether the DB instance is encrypted. Not applicable. The encryption for DB instances is managed by the DB cluster. For more information, see CreateDBCluster . Default: false :type KmsKeyId: string :param KmsKeyId: The AWS KMS key identifier for an encrypted DB instance. The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are creating a DB instance with the same AWS account that owns the KMS encryption key used to encrypt the new DB instance, then you can use the KMS key alias instead of the ARN for the KM encryption key. Not applicable. The KMS key identifier is managed by the DB cluster. For more information, see CreateDBCluster . If the ``StorageEncrypted`` parameter is true, and you do not specify a value for the ``KmsKeyId`` parameter, then Amazon Neptune will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS Region. :type Domain: string :param Domain: Specify the Active Directory Domain to create the instance in. :type CopyTagsToSnapshot: boolean :param CopyTagsToSnapshot: True to copy all tags from the DB instance to snapshots of the DB instance, and otherwise false. The default is false. :type MonitoringInterval: integer :param MonitoringInterval: The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. To disable collecting Enhanced Monitoring metrics, specify 0. The default is 0. If ``MonitoringRoleArn`` is specified, then you must also set ``MonitoringInterval`` to a value other than 0. Valid Values: ``0, 1, 5, 10, 15, 30, 60`` :type MonitoringRoleArn: string :param MonitoringRoleArn: The ARN for the IAM role that permits Neptune to send enhanced monitoring metrics to Amazon CloudWatch Logs. For example, ``arn:aws:iam:123456789012:role/emaccess`` . If ``MonitoringInterval`` is set to a value other than 0, then you must supply a ``MonitoringRoleArn`` value. :type DomainIAMRoleName: string :param DomainIAMRoleName: Specify the name of the IAM role to be used when making API calls to the Directory Service. :type PromotionTier: integer :param PromotionTier: A value that specifies the order in which an Read Replica is promoted to the primary instance after a failure of the existing primary instance. Default: 1 Valid Values: 0 - 15 :type Timezone: string :param Timezone: The time zone of the DB instance. :type EnableIAMDatabaseAuthentication: boolean :param EnableIAMDatabaseAuthentication: True to enable AWS Identity and Access Management (IAM) authentication for Neptune. Default: ``false`` :type EnablePerformanceInsights: boolean :param EnablePerformanceInsights: True to enable Performance Insights for the DB instance, and otherwise false. :type PerformanceInsightsKMSKeyId: string :param PerformanceInsightsKMSKeyId: The AWS KMS key identifier for encryption of Performance Insights data. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key. :type EnableCloudwatchLogsExports: list :param EnableCloudwatchLogsExports: The list of log types that need to be enabled for exporting to CloudWatch Logs. - *(string) --* :rtype: dict :returns: """ pass def create_db_parameter_group(self, DBParameterGroupName: str, DBParameterGroupFamily: str, Description: str, Tags: List = None) -> Dict: """ Creates a new DB parameter group. A DB parameter group is initially created with the default parameters for the database engine used by the DB instance. To provide custom values for any of the parameters, you must modify the group after creating it using *ModifyDBParameterGroup* . Once you've created a DB parameter group, you need to associate it with your DB instance using *ModifyDBInstance* . When you associate a new DB parameter group with a running DB instance, you need to reboot the DB instance without failover for the new DB parameter group and associated settings to take effect. .. warning:: After you create a DB parameter group, you should wait at least 5 minutes before creating your first DB instance that uses that DB parameter group as the default parameter group. This allows Amazon Neptune to fully complete the create action before the parameter group is used as the default for a new DB instance. This is especially important for parameters that are critical when creating the default database for a DB instance, such as the character set for the default database defined by the ``character_set_database`` parameter. You can use the *Parameter Groups* option of the Amazon Neptune console or the *DescribeDBParameters* command to verify that your DB parameter group has been created or modified. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CreateDBParameterGroup>`_ **Request Syntax** :: response = client.create_db_parameter_group( DBParameterGroupName='string', DBParameterGroupFamily='string', Description='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: { 'DBParameterGroup': { 'DBParameterGroupName': 'string', 'DBParameterGroupFamily': 'string', 'Description': 'string', 'DBParameterGroupArn': 'string' } } **Response Structure** - *(dict) --* - **DBParameterGroup** *(dict) --* Contains the details of an Amazon Neptune DB parameter group. This data type is used as a response element in the DescribeDBParameterGroups action. - **DBParameterGroupName** *(string) --* Provides the name of the DB parameter group. - **DBParameterGroupFamily** *(string) --* Provides the name of the DB parameter group family that this DB parameter group is compatible with. - **Description** *(string) --* Provides the customer-specified description for this DB parameter group. - **DBParameterGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB parameter group. :type DBParameterGroupName: string :param DBParameterGroupName: **[REQUIRED]** The name of the DB parameter group. Constraints: * Must be 1 to 255 letters, numbers, or hyphens. * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens .. note:: This value is stored as a lowercase string. :type DBParameterGroupFamily: string :param DBParameterGroupFamily: **[REQUIRED]** The DB parameter group family name. A DB parameter group can be associated with one and only one DB parameter group family, and can be applied only to a DB instance running a database engine and engine version compatible with that DB parameter group family. :type Description: string :param Description: **[REQUIRED]** The description for the DB parameter group. :type Tags: list :param Tags: The tags to be assigned to the new DB parameter group. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns: """ pass def create_db_subnet_group(self, DBSubnetGroupName: str, DBSubnetGroupDescription: str, SubnetIds: List, Tags: List = None) -> Dict: """ Creates a new DB subnet group. DB subnet groups must contain at least one subnet in at least two AZs in the AWS Region. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CreateDBSubnetGroup>`_ **Request Syntax** :: response = client.create_db_subnet_group( DBSubnetGroupName='string', DBSubnetGroupDescription='string', SubnetIds=[ 'string', ], Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: { 'DBSubnetGroup': { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' } } **Response Structure** - *(dict) --* - **DBSubnetGroup** *(dict) --* Contains the details of an Amazon Neptune DB subnet group. This data type is used as a response element in the DescribeDBSubnetGroups action. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. :type DBSubnetGroupName: string :param DBSubnetGroupName: **[REQUIRED]** The name for the DB subnet group. This value is stored as a lowercase string. Constraints: Must contain no more than 255 letters, numbers, periods, underscores, spaces, or hyphens. Must not be default. Example: ``mySubnetgroup`` :type DBSubnetGroupDescription: string :param DBSubnetGroupDescription: **[REQUIRED]** The description for the DB subnet group. :type SubnetIds: list :param SubnetIds: **[REQUIRED]** The EC2 Subnet IDs for the DB subnet group. - *(string) --* :type Tags: list :param Tags: The tags to be assigned to the new DB subnet group. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns: """ pass def create_event_subscription(self, SubscriptionName: str, SnsTopicArn: str, SourceType: str = None, EventCategories: List = None, SourceIds: List = None, Enabled: bool = None, Tags: List = None) -> Dict: """ Creates an event notification subscription. This action requires a topic ARN (Amazon Resource Name) created by either the Neptune console, the SNS console, or the SNS API. To obtain an ARN with SNS, you must create a topic in Amazon SNS and subscribe to the topic. The ARN is displayed in the SNS console. You can specify the type of source (SourceType) you want to be notified of, provide a list of Neptune sources (SourceIds) that triggers the events, and provide a list of event categories (EventCategories) for events you want to be notified of. For example, you can specify SourceType = db-instance, SourceIds = mydbinstance1, mydbinstance2 and EventCategories = Availability, Backup. If you specify both the SourceType and SourceIds, such as SourceType = db-instance and SourceIdentifier = myDBInstance1, you are notified of all the db-instance events for the specified source. If you specify a SourceType but do not specify a SourceIdentifier, you receive notice of the events for that source type for all your Neptune sources. If you do not specify either the SourceType nor the SourceIdentifier, you are notified of events generated from all Neptune sources belonging to your customer account. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/CreateEventSubscription>`_ **Request Syntax** :: response = client.create_event_subscription( SubscriptionName='string', SnsTopicArn='string', SourceType='string', EventCategories=[ 'string', ], SourceIds=[ 'string', ], Enabled=True|False, Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: { 'EventSubscription': { 'CustomerAwsId': 'string', 'CustSubscriptionId': 'string', 'SnsTopicArn': 'string', 'Status': 'string', 'SubscriptionCreationTime': 'string', 'SourceType': 'string', 'SourceIdsList': [ 'string', ], 'EventCategoriesList': [ 'string', ], 'Enabled': True|False, 'EventSubscriptionArn': 'string' } } **Response Structure** - *(dict) --* - **EventSubscription** *(dict) --* Contains the results of a successful invocation of the DescribeEventSubscriptions action. - **CustomerAwsId** *(string) --* The AWS customer account associated with the event notification subscription. - **CustSubscriptionId** *(string) --* The event notification subscription Id. - **SnsTopicArn** *(string) --* The topic ARN of the event notification subscription. - **Status** *(string) --* The status of the event notification subscription. Constraints: Can be one of the following: creating | modifying | deleting | active | no-permission | topic-not-exist The status "no-permission" indicates that Neptune no longer has permission to post to the SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created. - **SubscriptionCreationTime** *(string) --* The time the event notification subscription was created. - **SourceType** *(string) --* The source type for the event notification subscription. - **SourceIdsList** *(list) --* A list of source IDs for the event notification subscription. - *(string) --* - **EventCategoriesList** *(list) --* A list of event categories for the event notification subscription. - *(string) --* - **Enabled** *(boolean) --* A Boolean value indicating if the subscription is enabled. True indicates the subscription is enabled. - **EventSubscriptionArn** *(string) --* The Amazon Resource Name (ARN) for the event subscription. :type SubscriptionName: string :param SubscriptionName: **[REQUIRED]** The name of the subscription. Constraints: The name must be less than 255 characters. :type SnsTopicArn: string :param SnsTopicArn: **[REQUIRED]** The Amazon Resource Name (ARN) of the SNS topic created for event notification. The ARN is created by Amazon SNS when you create a topic and subscribe to it. :type SourceType: string :param SourceType: The type of source that is generating the events. For example, if you want to be notified of events generated by a DB instance, you would set this parameter to db-instance. if this value is not specified, all events are returned. Valid values: ``db-instance`` | ``db-cluster`` | ``db-parameter-group`` | ``db-security-group`` | ``db-snapshot`` | ``db-cluster-snapshot`` :type EventCategories: list :param EventCategories: A list of event categories for a SourceType that you want to subscribe to. You can see a list of the categories for a given SourceType by using the **DescribeEventCategories** action. - *(string) --* :type SourceIds: list :param SourceIds: The list of identifiers of the event sources for which events are returned. If not specified, then all sources are included in the response. An identifier must begin with a letter and must contain only ASCII letters, digits, and hyphens; it can\'t end with a hyphen or contain two consecutive hyphens. Constraints: * If SourceIds are supplied, SourceType must also be provided. * If the source type is a DB instance, then a ``DBInstanceIdentifier`` must be supplied. * If the source type is a DB security group, a ``DBSecurityGroupName`` must be supplied. * If the source type is a DB parameter group, a ``DBParameterGroupName`` must be supplied. * If the source type is a DB snapshot, a ``DBSnapshotIdentifier`` must be supplied. - *(string) --* :type Enabled: boolean :param Enabled: A Boolean value; set to **true** to activate the subscription, set to **false** to create the subscription but not active it. :type Tags: list :param Tags: The tags to be applied to the new event subscription. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns: """ pass def delete_db_cluster(self, DBClusterIdentifier: str, SkipFinalSnapshot: bool = None, FinalDBSnapshotIdentifier: str = None) -> Dict: """ The DeleteDBCluster action deletes a previously provisioned DB cluster. When you delete a DB cluster, all automated backups for that DB cluster are deleted and can't be recovered. Manual DB cluster snapshots of the specified DB cluster are not deleted. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DeleteDBCluster>`_ **Request Syntax** :: response = client.delete_db_cluster( DBClusterIdentifier='string', SkipFinalSnapshot=True|False, FinalDBSnapshotIdentifier='string' ) **Response Syntax** :: { 'DBCluster': { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) } } **Response Structure** - *(dict) --* - **DBCluster** *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The DB cluster identifier for the DB cluster to be deleted. This parameter isn\'t case-sensitive. Constraints: * Must match an existing DBClusterIdentifier. :type SkipFinalSnapshot: boolean :param SkipFinalSnapshot: Determines whether a final DB cluster snapshot is created before the DB cluster is deleted. If ``true`` is specified, no DB cluster snapshot is created. If ``false`` is specified, a DB cluster snapshot is created before the DB cluster is deleted. .. note:: You must specify a ``FinalDBSnapshotIdentifier`` parameter if ``SkipFinalSnapshot`` is ``false`` . Default: ``false`` :type FinalDBSnapshotIdentifier: string :param FinalDBSnapshotIdentifier: The DB cluster snapshot identifier of the new DB cluster snapshot created when ``SkipFinalSnapshot`` is set to ``false`` . .. note:: Specifying this parameter and also setting the ``SkipFinalShapshot`` parameter to true results in an error. Constraints: * Must be 1 to 255 letters, numbers, or hyphens. * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens :rtype: dict :returns: """ pass def delete_db_cluster_parameter_group(self, DBClusterParameterGroupName: str): """ Deletes a specified DB cluster parameter group. The DB cluster parameter group to be deleted can't be associated with any DB clusters. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DeleteDBClusterParameterGroup>`_ **Request Syntax** :: response = client.delete_db_cluster_parameter_group( DBClusterParameterGroupName='string' ) :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: **[REQUIRED]** The name of the DB cluster parameter group. Constraints: * Must be the name of an existing DB cluster parameter group. * You can\'t delete a default DB cluster parameter group. * Cannot be associated with any DB clusters. :returns: None """ pass def delete_db_cluster_snapshot(self, DBClusterSnapshotIdentifier: str) -> Dict: """ Deletes a DB cluster snapshot. If the snapshot is being copied, the copy operation is terminated. .. note:: The DB cluster snapshot must be in the ``available`` state to be deleted. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DeleteDBClusterSnapshot>`_ **Request Syntax** :: response = client.delete_db_cluster_snapshot( DBClusterSnapshotIdentifier='string' ) **Response Syntax** :: { 'DBClusterSnapshot': { 'AvailabilityZones': [ 'string', ], 'DBClusterSnapshotIdentifier': 'string', 'DBClusterIdentifier': 'string', 'SnapshotCreateTime': datetime(2015, 1, 1), 'Engine': 'string', 'AllocatedStorage': 123, 'Status': 'string', 'Port': 123, 'VpcId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1), 'MasterUsername': 'string', 'EngineVersion': 'string', 'LicenseModel': 'string', 'SnapshotType': 'string', 'PercentProgress': 123, 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DBClusterSnapshotArn': 'string', 'SourceDBClusterSnapshotArn': 'string', 'IAMDatabaseAuthenticationEnabled': True|False } } **Response Structure** - *(dict) --* - **DBClusterSnapshot** *(dict) --* Contains the details for an Amazon Neptune DB cluster snapshot This data type is used as a response element in the DescribeDBClusterSnapshots action. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. - *(string) --* - **DBClusterSnapshotIdentifier** *(string) --* Specifies the identifier for the DB cluster snapshot. - **DBClusterIdentifier** *(string) --* Specifies the DB cluster identifier of the DB cluster that this DB cluster snapshot was created from. - **SnapshotCreateTime** *(datetime) --* Provides the time when the snapshot was taken, in Universal Coordinated Time (UTC). - **Engine** *(string) --* Specifies the name of the database engine. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size in gibibytes (GiB). - **Status** *(string) --* Specifies the status of this DB cluster snapshot. - **Port** *(integer) --* Specifies the port that the DB cluster was listening on at the time of the snapshot. - **VpcId** *(string) --* Provides the VPC ID associated with the DB cluster snapshot. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). - **MasterUsername** *(string) --* Provides the master username for the DB cluster snapshot. - **EngineVersion** *(string) --* Provides the version of the database engine for this DB cluster snapshot. - **LicenseModel** *(string) --* Provides the license model information for this DB cluster snapshot. - **SnapshotType** *(string) --* Provides the type of the DB cluster snapshot. - **PercentProgress** *(integer) --* Specifies the percentage of the estimated data that has been transferred. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster snapshot is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. - **DBClusterSnapshotArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster snapshot. - **SourceDBClusterSnapshotArn** *(string) --* If the DB cluster snapshot was copied from a source DB cluster snapshot, the Amazon Resource Name (ARN) for the source DB cluster snapshot, otherwise, a null value. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. :type DBClusterSnapshotIdentifier: string :param DBClusterSnapshotIdentifier: **[REQUIRED]** The identifier of the DB cluster snapshot to delete. Constraints: Must be the name of an existing DB cluster snapshot in the ``available`` state. :rtype: dict :returns: """ pass def delete_db_instance(self, DBInstanceIdentifier: str, SkipFinalSnapshot: bool = None, FinalDBSnapshotIdentifier: str = None) -> Dict: """ The DeleteDBInstance action deletes a previously provisioned DB instance. When you delete a DB instance, all automated backups for that instance are deleted and can't be recovered. Manual DB snapshots of the DB instance to be deleted by ``DeleteDBInstance`` are not deleted. If you request a final DB snapshot the status of the Amazon Neptune DB instance is ``deleting`` until the DB snapshot is created. The API action ``DescribeDBInstance`` is used to monitor the status of this operation. The action can't be canceled or reverted once submitted. Note that when a DB instance is in a failure state and has a status of ``failed`` , ``incompatible-restore`` , or ``incompatible-network`` , you can only delete it when the ``SkipFinalSnapshot`` parameter is set to ``true`` . If the specified DB instance is part of a DB cluster, you can't delete the DB instance if both of the following conditions are true: * The DB instance is the only instance in the DB cluster. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DeleteDBInstance>`_ **Request Syntax** :: response = client.delete_db_instance( DBInstanceIdentifier='string', SkipFinalSnapshot=True|False, FinalDBSnapshotIdentifier='string' ) **Response Syntax** :: { 'DBInstance': { 'DBInstanceIdentifier': 'string', 'DBInstanceClass': 'string', 'Engine': 'string', 'DBInstanceStatus': 'string', 'MasterUsername': 'string', 'DBName': 'string', 'Endpoint': { 'Address': 'string', 'Port': 123, 'HostedZoneId': 'string' }, 'AllocatedStorage': 123, 'InstanceCreateTime': datetime(2015, 1, 1), 'PreferredBackupWindow': 'string', 'BackupRetentionPeriod': 123, 'DBSecurityGroups': [ { 'DBSecurityGroupName': 'string', 'Status': 'string' }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'DBParameterGroups': [ { 'DBParameterGroupName': 'string', 'ParameterApplyStatus': 'string' }, ], 'AvailabilityZone': 'string', 'DBSubnetGroup': { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' }, 'PreferredMaintenanceWindow': 'string', 'PendingModifiedValues': { 'DBInstanceClass': 'string', 'AllocatedStorage': 123, 'MasterUserPassword': 'string', 'Port': 123, 'BackupRetentionPeriod': 123, 'MultiAZ': True|False, 'EngineVersion': 'string', 'LicenseModel': 'string', 'Iops': 123, 'DBInstanceIdentifier': 'string', 'StorageType': 'string', 'CACertificateIdentifier': 'string', 'DBSubnetGroupName': 'string', 'PendingCloudwatchLogsExports': { 'LogTypesToEnable': [ 'string', ], 'LogTypesToDisable': [ 'string', ] } }, 'LatestRestorableTime': datetime(2015, 1, 1), 'MultiAZ': True|False, 'EngineVersion': 'string', 'AutoMinorVersionUpgrade': True|False, 'ReadReplicaSourceDBInstanceIdentifier': 'string', 'ReadReplicaDBInstanceIdentifiers': [ 'string', ], 'ReadReplicaDBClusterIdentifiers': [ 'string', ], 'LicenseModel': 'string', 'Iops': 123, 'OptionGroupMemberships': [ { 'OptionGroupName': 'string', 'Status': 'string' }, ], 'CharacterSetName': 'string', 'SecondaryAvailabilityZone': 'string', 'PubliclyAccessible': True|False, 'StatusInfos': [ { 'StatusType': 'string', 'Normal': True|False, 'Status': 'string', 'Message': 'string' }, ], 'StorageType': 'string', 'TdeCredentialArn': 'string', 'DbInstancePort': 123, 'DBClusterIdentifier': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbiResourceId': 'string', 'CACertificateIdentifier': 'string', 'DomainMemberships': [ { 'Domain': 'string', 'Status': 'string', 'FQDN': 'string', 'IAMRoleName': 'string' }, ], 'CopyTagsToSnapshot': True|False, 'MonitoringInterval': 123, 'EnhancedMonitoringResourceArn': 'string', 'MonitoringRoleArn': 'string', 'PromotionTier': 123, 'DBInstanceArn': 'string', 'Timezone': 'string', 'IAMDatabaseAuthenticationEnabled': True|False, 'PerformanceInsightsEnabled': True|False, 'PerformanceInsightsKMSKeyId': 'string', 'EnabledCloudwatchLogsExports': [ 'string', ] } } **Response Structure** - *(dict) --* - **DBInstance** *(dict) --* Contains the details of an Amazon Neptune DB instance. This data type is used as a response element in the DescribeDBInstances action. - **DBInstanceIdentifier** *(string) --* Contains a user-supplied database identifier. This identifier is the unique key that identifies a DB instance. - **DBInstanceClass** *(string) --* Contains the name of the compute and memory capacity class of the DB instance. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB instance. - **DBInstanceStatus** *(string) --* Specifies the current state of this database. - **MasterUsername** *(string) --* Contains the master username for the DB instance. - **DBName** *(string) --* The database name. - **Endpoint** *(dict) --* Specifies the connection endpoint. - **Address** *(string) --* Specifies the DNS address of the DB instance. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size specified in gibibytes. - **InstanceCreateTime** *(datetime) --* Provides the date and time the DB instance was created. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **DBSecurityGroups** *(list) --* Provides List of DB security group elements containing only ``DBSecurityGroup.Name`` and ``DBSecurityGroup.Status`` subelements. - *(dict) --* Specifies membership in a designated DB security group. - **DBSecurityGroupName** *(string) --* The name of the DB security group. - **Status** *(string) --* The status of the DB security group. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security group elements that the DB instance belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **DBParameterGroups** *(list) --* Provides the list of DB parameter groups applied to this DB instance. - *(dict) --* The status of the DB parameter group. This data type is used as a response element in the following actions: * CreateDBInstance * DeleteDBInstance * ModifyDBInstance * RebootDBInstance - **DBParameterGroupName** *(string) --* The name of the DP parameter group. - **ParameterApplyStatus** *(string) --* The status of parameter updates. - **AvailabilityZone** *(string) --* Specifies the name of the Availability Zone the DB instance is located in. - **DBSubnetGroup** *(dict) --* Specifies information on the subnet group associated with the DB instance, including the name, description, and subnets in the subnet group. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **PendingModifiedValues** *(dict) --* Specifies that changes to the DB instance are pending. This element is only included when changes are pending. Specific changes are identified by subelements. - **DBInstanceClass** *(string) --* Contains the new ``DBInstanceClass`` for the DB instance that will be applied or is currently being applied. - **AllocatedStorage** *(integer) --* Contains the new ``AllocatedStorage`` size for the DB instance that will be applied or is currently being applied. - **MasterUserPassword** *(string) --* Contains the pending or currently-in-progress change of the master credentials for the DB instance. - **Port** *(integer) --* Specifies the pending port for the DB instance. - **BackupRetentionPeriod** *(integer) --* Specifies the pending number of days for which automated backups are retained. - **MultiAZ** *(boolean) --* Indicates that the Single-AZ DB instance is to change to a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **LicenseModel** *(string) --* The license model for the DB instance. Valid values: ``license-included`` | ``bring-your-own-license`` | ``general-public-license`` - **Iops** *(integer) --* Specifies the new Provisioned IOPS value for the DB instance that will be applied or is currently being applied. - **DBInstanceIdentifier** *(string) --* Contains the new ``DBInstanceIdentifier`` for the DB instance that will be applied or is currently being applied. - **StorageType** *(string) --* Specifies the storage type to be associated with the DB instance. - **CACertificateIdentifier** *(string) --* Specifies the identifier of the CA certificate for the DB instance. - **DBSubnetGroupName** *(string) --* The new DB subnet group for the DB instance. - **PendingCloudwatchLogsExports** *(dict) --* Specifies the CloudWatch logs to be exported. - **LogTypesToEnable** *(list) --* Log types that are in the process of being deactivated. After they are deactivated, these log types aren't exported to CloudWatch Logs. - *(string) --* - **LogTypesToDisable** *(list) --* Log types that are in the process of being enabled. After they are enabled, these log types are exported to CloudWatch Logs. - *(string) --* - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **MultiAZ** *(boolean) --* Specifies if the DB instance is a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **AutoMinorVersionUpgrade** *(boolean) --* Indicates that minor version patches are applied automatically. - **ReadReplicaSourceDBInstanceIdentifier** *(string) --* Contains the identifier of the source DB instance if this DB instance is a Read Replica. - **ReadReplicaDBInstanceIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB instance. - *(string) --* - **ReadReplicaDBClusterIdentifiers** *(list) --* Contains one or more identifiers of DB clusters that are Read Replicas of this DB instance. - *(string) --* - **LicenseModel** *(string) --* License model information for this DB instance. - **Iops** *(integer) --* Specifies the Provisioned IOPS (I/O operations per second) value. - **OptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB instance. - *(dict) --* Provides information on the option groups the DB instance is a member of. - **OptionGroupName** *(string) --* The name of the option group that the instance belongs to. - **Status** *(string) --* The status of the DB instance's option group membership. Valid values are: ``in-sync`` , ``pending-apply`` , ``pending-removal`` , ``pending-maintenance-apply`` , ``pending-maintenance-removal`` , ``applying`` , ``removing`` , and ``failed`` . - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this instance is associated with. - **SecondaryAvailabilityZone** *(string) --* If present, specifies the name of the secondary Availability Zone for a DB instance with multi-AZ support. - **PubliclyAccessible** *(boolean) --* This flag should no longer be used. - **StatusInfos** *(list) --* The status of a Read Replica. If the instance is not a Read Replica, this is blank. - *(dict) --* Provides a list of status information for a DB instance. - **StatusType** *(string) --* This value is currently "read replication." - **Normal** *(boolean) --* Boolean value that is true if the instance is operating normally, or false if the instance is in an error state. - **Status** *(string) --* Status of the DB instance. For a StatusType of read replica, the values can be replicating, error, stopped, or terminated. - **Message** *(string) --* Details of the error if there is an error for the instance. If the instance is not in an error state, this value is blank. - **StorageType** *(string) --* Specifies the storage type associated with DB instance. - **TdeCredentialArn** *(string) --* The ARN from the key store with which the instance is associated for TDE encryption. - **DbInstancePort** *(integer) --* Specifies the port that the DB instance listens on. If the DB instance is part of a DB cluster, this can be a different port than the DB cluster port. - **DBClusterIdentifier** *(string) --* If the DB instance is a member of a DB cluster, contains the name of the DB cluster that the DB instance is a member of. - **StorageEncrypted** *(boolean) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **KmsKeyId** *(string) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **DbiResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB instance. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB instance is accessed. - **CACertificateIdentifier** *(string) --* The identifier of the CA certificate for this DB instance. - **DomainMemberships** *(list) --* Not supported - *(dict) --* An Active Directory Domain membership record associated with a DB instance. - **Domain** *(string) --* The identifier of the Active Directory Domain. - **Status** *(string) --* The status of the DB instance's Active Directory Domain membership, such as joined, pending-join, failed etc). - **FQDN** *(string) --* The fully qualified domain name of the Active Directory Domain. - **IAMRoleName** *(string) --* The name of the IAM role to be used when making API calls to the Directory Service. - **CopyTagsToSnapshot** *(boolean) --* Specifies whether tags are copied from the DB instance to snapshots of the DB instance. - **MonitoringInterval** *(integer) --* The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. - **EnhancedMonitoringResourceArn** *(string) --* The Amazon Resource Name (ARN) of the Amazon CloudWatch Logs log stream that receives the Enhanced Monitoring metrics data for the DB instance. - **MonitoringRoleArn** *(string) --* The ARN for the IAM role that permits Neptune to send Enhanced Monitoring metrics to Amazon CloudWatch Logs. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **DBInstanceArn** *(string) --* The Amazon Resource Name (ARN) for the DB instance. - **Timezone** *(string) --* Not supported. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if AWS Identity and Access Management (IAM) authentication is enabled, and otherwise false. - **PerformanceInsightsEnabled** *(boolean) --* True if Performance Insights is enabled for the DB instance, and otherwise false. - **PerformanceInsightsKMSKeyId** *(string) --* The AWS KMS key identifier for encryption of Performance Insights data. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key. - **EnabledCloudwatchLogsExports** *(list) --* A list of log types that this DB instance is configured to export to CloudWatch Logs. - *(string) --* :type DBInstanceIdentifier: string :param DBInstanceIdentifier: **[REQUIRED]** The DB instance identifier for the DB instance to be deleted. This parameter isn\'t case-sensitive. Constraints: * Must match the name of an existing DB instance. :type SkipFinalSnapshot: boolean :param SkipFinalSnapshot: Determines whether a final DB snapshot is created before the DB instance is deleted. If ``true`` is specified, no DBSnapshot is created. If ``false`` is specified, a DB snapshot is created before the DB instance is deleted. Note that when a DB instance is in a failure state and has a status of \'failed\', \'incompatible-restore\', or \'incompatible-network\', it can only be deleted when the SkipFinalSnapshot parameter is set to \"true\". Specify ``true`` when deleting a Read Replica. .. note:: The FinalDBSnapshotIdentifier parameter must be specified if SkipFinalSnapshot is ``false`` . Default: ``false`` :type FinalDBSnapshotIdentifier: string :param FinalDBSnapshotIdentifier: The DBSnapshotIdentifier of the new DBSnapshot created when SkipFinalSnapshot is set to ``false`` . .. note:: Specifying this parameter and also setting the SkipFinalShapshot parameter to true results in an error. Constraints: * Must be 1 to 255 letters or numbers. * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens * Cannot be specified when deleting a Read Replica. :rtype: dict :returns: """ pass def delete_db_parameter_group(self, DBParameterGroupName: str): """ Deletes a specified DBParameterGroup. The DBParameterGroup to be deleted can't be associated with any DB instances. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DeleteDBParameterGroup>`_ **Request Syntax** :: response = client.delete_db_parameter_group( DBParameterGroupName='string' ) :type DBParameterGroupName: string :param DBParameterGroupName: **[REQUIRED]** The name of the DB parameter group. Constraints: * Must be the name of an existing DB parameter group * You can\'t delete a default DB parameter group * Cannot be associated with any DB instances :returns: None """ pass def delete_db_subnet_group(self, DBSubnetGroupName: str): """ Deletes a DB subnet group. .. note:: The specified database subnet group must not be associated with any DB instances. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DeleteDBSubnetGroup>`_ **Request Syntax** :: response = client.delete_db_subnet_group( DBSubnetGroupName='string' ) :type DBSubnetGroupName: string :param DBSubnetGroupName: **[REQUIRED]** The name of the database subnet group to delete. .. note:: You can\'t delete the default subnet group. Constraints: Constraints: Must match the name of an existing DBSubnetGroup. Must not be default. Example: ``mySubnetgroup`` :returns: None """ pass def delete_event_subscription(self, SubscriptionName: str) -> Dict: """ Deletes an event notification subscription. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DeleteEventSubscription>`_ **Request Syntax** :: response = client.delete_event_subscription( SubscriptionName='string' ) **Response Syntax** :: { 'EventSubscription': { 'CustomerAwsId': 'string', 'CustSubscriptionId': 'string', 'SnsTopicArn': 'string', 'Status': 'string', 'SubscriptionCreationTime': 'string', 'SourceType': 'string', 'SourceIdsList': [ 'string', ], 'EventCategoriesList': [ 'string', ], 'Enabled': True|False, 'EventSubscriptionArn': 'string' } } **Response Structure** - *(dict) --* - **EventSubscription** *(dict) --* Contains the results of a successful invocation of the DescribeEventSubscriptions action. - **CustomerAwsId** *(string) --* The AWS customer account associated with the event notification subscription. - **CustSubscriptionId** *(string) --* The event notification subscription Id. - **SnsTopicArn** *(string) --* The topic ARN of the event notification subscription. - **Status** *(string) --* The status of the event notification subscription. Constraints: Can be one of the following: creating | modifying | deleting | active | no-permission | topic-not-exist The status "no-permission" indicates that Neptune no longer has permission to post to the SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created. - **SubscriptionCreationTime** *(string) --* The time the event notification subscription was created. - **SourceType** *(string) --* The source type for the event notification subscription. - **SourceIdsList** *(list) --* A list of source IDs for the event notification subscription. - *(string) --* - **EventCategoriesList** *(list) --* A list of event categories for the event notification subscription. - *(string) --* - **Enabled** *(boolean) --* A Boolean value indicating if the subscription is enabled. True indicates the subscription is enabled. - **EventSubscriptionArn** *(string) --* The Amazon Resource Name (ARN) for the event subscription. :type SubscriptionName: string :param SubscriptionName: **[REQUIRED]** The name of the event notification subscription you want to delete. :rtype: dict :returns: """ pass def describe_db_cluster_parameter_groups(self, DBClusterParameterGroupName: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns a list of ``DBClusterParameterGroup`` descriptions. If a ``DBClusterParameterGroupName`` parameter is specified, the list will contain only the description of the specified DB cluster parameter group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBClusterParameterGroups>`_ **Request Syntax** :: response = client.describe_db_cluster_parameter_groups( DBClusterParameterGroupName='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Marker': 'string', 'DBClusterParameterGroups': [ { 'DBClusterParameterGroupName': 'string', 'DBParameterGroupFamily': 'string', 'Description': 'string', 'DBClusterParameterGroupArn': 'string' }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous ``DescribeDBClusterParameterGroups`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **DBClusterParameterGroups** *(list) --* A list of DB cluster parameter groups. - *(dict) --* Contains the details of an Amazon Neptune DB cluster parameter group. This data type is used as a response element in the DescribeDBClusterParameterGroups action. - **DBClusterParameterGroupName** *(string) --* Provides the name of the DB cluster parameter group. - **DBParameterGroupFamily** *(string) --* Provides the name of the DB parameter group family that this DB cluster parameter group is compatible with. - **Description** *(string) --* Provides the customer-specified description for this DB cluster parameter group. - **DBClusterParameterGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster parameter group. :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: The name of a specific DB cluster parameter group to return details for. Constraints: * If supplied, must match the name of an existing DBClusterParameterGroup. :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeDBClusterParameterGroups`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_db_cluster_parameters(self, DBClusterParameterGroupName: str, Source: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns the detailed parameter list for a particular DB cluster parameter group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBClusterParameters>`_ **Request Syntax** :: response = client.describe_db_cluster_parameters( DBClusterParameterGroupName='string', Source='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Parameters': [ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ], 'Marker': 'string' } **Response Structure** - *(dict) --* - **Parameters** *(list) --* Provides a list of parameters for the DB cluster parameter group. - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. - **Marker** *(string) --* An optional pagination token provided by a previous DescribeDBClusterParameters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: **[REQUIRED]** The name of a specific DB cluster parameter group to return parameter details for. Constraints: * If supplied, must match the name of an existing DBClusterParameterGroup. :type Source: string :param Source: A value that indicates to return only parameters for a specific source. Parameter sources can be ``engine`` , ``service`` , or ``customer`` . :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeDBClusterParameters`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_db_cluster_snapshot_attributes(self, DBClusterSnapshotIdentifier: str) -> Dict: """ Returns a list of DB cluster snapshot attribute names and values for a manual DB cluster snapshot. When sharing snapshots with other AWS accounts, ``DescribeDBClusterSnapshotAttributes`` returns the ``restore`` attribute and a list of IDs for the AWS accounts that are authorized to copy or restore the manual DB cluster snapshot. If ``all`` is included in the list of values for the ``restore`` attribute, then the manual DB cluster snapshot is public and can be copied or restored by all AWS accounts. To add or remove access for an AWS account to copy or restore a manual DB cluster snapshot, or to make the manual DB cluster snapshot public or private, use the ModifyDBClusterSnapshotAttribute API action. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBClusterSnapshotAttributes>`_ **Request Syntax** :: response = client.describe_db_cluster_snapshot_attributes( DBClusterSnapshotIdentifier='string' ) **Response Syntax** :: { 'DBClusterSnapshotAttributesResult': { 'DBClusterSnapshotIdentifier': 'string', 'DBClusterSnapshotAttributes': [ { 'AttributeName': 'string', 'AttributeValues': [ 'string', ] }, ] } } **Response Structure** - *(dict) --* - **DBClusterSnapshotAttributesResult** *(dict) --* Contains the results of a successful call to the DescribeDBClusterSnapshotAttributes API action. Manual DB cluster snapshot attributes are used to authorize other AWS accounts to copy or restore a manual DB cluster snapshot. For more information, see the ModifyDBClusterSnapshotAttribute API action. - **DBClusterSnapshotIdentifier** *(string) --* The identifier of the manual DB cluster snapshot that the attributes apply to. - **DBClusterSnapshotAttributes** *(list) --* The list of attributes and values for the manual DB cluster snapshot. - *(dict) --* Contains the name and values of a manual DB cluster snapshot attribute. Manual DB cluster snapshot attributes are used to authorize other AWS accounts to restore a manual DB cluster snapshot. For more information, see the ModifyDBClusterSnapshotAttribute API action. - **AttributeName** *(string) --* The name of the manual DB cluster snapshot attribute. The attribute named ``restore`` refers to the list of AWS accounts that have permission to copy or restore the manual DB cluster snapshot. For more information, see the ModifyDBClusterSnapshotAttribute API action. - **AttributeValues** *(list) --* The value(s) for the manual DB cluster snapshot attribute. If the ``AttributeName`` field is set to ``restore`` , then this element returns a list of IDs of the AWS accounts that are authorized to copy or restore the manual DB cluster snapshot. If a value of ``all`` is in the list, then the manual DB cluster snapshot is public and available for any AWS account to copy or restore. - *(string) --* :type DBClusterSnapshotIdentifier: string :param DBClusterSnapshotIdentifier: **[REQUIRED]** The identifier for the DB cluster snapshot to describe the attributes for. :rtype: dict :returns: """ pass def describe_db_cluster_snapshots(self, DBClusterIdentifier: str = None, DBClusterSnapshotIdentifier: str = None, SnapshotType: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None, IncludeShared: bool = None, IncludePublic: bool = None) -> Dict: """ Returns information about DB cluster snapshots. This API action supports pagination. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBClusterSnapshots>`_ **Request Syntax** :: response = client.describe_db_cluster_snapshots( DBClusterIdentifier='string', DBClusterSnapshotIdentifier='string', SnapshotType='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string', IncludeShared=True|False, IncludePublic=True|False ) **Response Syntax** :: { 'Marker': 'string', 'DBClusterSnapshots': [ { 'AvailabilityZones': [ 'string', ], 'DBClusterSnapshotIdentifier': 'string', 'DBClusterIdentifier': 'string', 'SnapshotCreateTime': datetime(2015, 1, 1), 'Engine': 'string', 'AllocatedStorage': 123, 'Status': 'string', 'Port': 123, 'VpcId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1), 'MasterUsername': 'string', 'EngineVersion': 'string', 'LicenseModel': 'string', 'SnapshotType': 'string', 'PercentProgress': 123, 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DBClusterSnapshotArn': 'string', 'SourceDBClusterSnapshotArn': 'string', 'IAMDatabaseAuthenticationEnabled': True|False }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous DescribeDBClusterSnapshots request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **DBClusterSnapshots** *(list) --* Provides a list of DB cluster snapshots for the user. - *(dict) --* Contains the details for an Amazon Neptune DB cluster snapshot This data type is used as a response element in the DescribeDBClusterSnapshots action. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. - *(string) --* - **DBClusterSnapshotIdentifier** *(string) --* Specifies the identifier for the DB cluster snapshot. - **DBClusterIdentifier** *(string) --* Specifies the DB cluster identifier of the DB cluster that this DB cluster snapshot was created from. - **SnapshotCreateTime** *(datetime) --* Provides the time when the snapshot was taken, in Universal Coordinated Time (UTC). - **Engine** *(string) --* Specifies the name of the database engine. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size in gibibytes (GiB). - **Status** *(string) --* Specifies the status of this DB cluster snapshot. - **Port** *(integer) --* Specifies the port that the DB cluster was listening on at the time of the snapshot. - **VpcId** *(string) --* Provides the VPC ID associated with the DB cluster snapshot. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). - **MasterUsername** *(string) --* Provides the master username for the DB cluster snapshot. - **EngineVersion** *(string) --* Provides the version of the database engine for this DB cluster snapshot. - **LicenseModel** *(string) --* Provides the license model information for this DB cluster snapshot. - **SnapshotType** *(string) --* Provides the type of the DB cluster snapshot. - **PercentProgress** *(integer) --* Specifies the percentage of the estimated data that has been transferred. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster snapshot is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. - **DBClusterSnapshotArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster snapshot. - **SourceDBClusterSnapshotArn** *(string) --* If the DB cluster snapshot was copied from a source DB cluster snapshot, the Amazon Resource Name (ARN) for the source DB cluster snapshot, otherwise, a null value. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. :type DBClusterIdentifier: string :param DBClusterIdentifier: The ID of the DB cluster to retrieve the list of DB cluster snapshots for. This parameter can\'t be used in conjunction with the ``DBClusterSnapshotIdentifier`` parameter. This parameter is not case-sensitive. Constraints: * If supplied, must match the identifier of an existing DBCluster. :type DBClusterSnapshotIdentifier: string :param DBClusterSnapshotIdentifier: A specific DB cluster snapshot identifier to describe. This parameter can\'t be used in conjunction with the ``DBClusterIdentifier`` parameter. This value is stored as a lowercase string. Constraints: * If supplied, must match the identifier of an existing DBClusterSnapshot. * If this identifier is for an automated snapshot, the ``SnapshotType`` parameter must also be specified. :type SnapshotType: string :param SnapshotType: The type of DB cluster snapshots to be returned. You can specify one of the following values: * ``automated`` - Return all DB cluster snapshots that have been automatically taken by Amazon Neptune for my AWS account. * ``manual`` - Return all DB cluster snapshots that have been taken by my AWS account. * ``shared`` - Return all manual DB cluster snapshots that have been shared to my AWS account. * ``public`` - Return all DB cluster snapshots that have been marked as public. If you don\'t specify a ``SnapshotType`` value, then both automated and manual DB cluster snapshots are returned. You can include shared DB cluster snapshots with these results by setting the ``IncludeShared`` parameter to ``true`` . You can include public DB cluster snapshots with these results by setting the ``IncludePublic`` parameter to ``true`` . The ``IncludeShared`` and ``IncludePublic`` parameters don\'t apply for ``SnapshotType`` values of ``manual`` or ``automated`` . The ``IncludePublic`` parameter doesn\'t apply when ``SnapshotType`` is set to ``shared`` . The ``IncludeShared`` parameter doesn\'t apply when ``SnapshotType`` is set to ``public`` . :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeDBClusterSnapshots`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :type IncludeShared: boolean :param IncludeShared: True to include shared manual DB cluster snapshots from other AWS accounts that this AWS account has been given permission to copy or restore, and otherwise false. The default is ``false`` . You can give an AWS account permission to restore a manual DB cluster snapshot from another AWS account by the ModifyDBClusterSnapshotAttribute API action. :type IncludePublic: boolean :param IncludePublic: True to include manual DB cluster snapshots that are public and can be copied or restored by any AWS account, and otherwise false. The default is ``false`` . The default is false. You can share a manual DB cluster snapshot as public by using the ModifyDBClusterSnapshotAttribute API action. :rtype: dict :returns: """ pass def describe_db_clusters(self, DBClusterIdentifier: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns information about provisioned DB clusters. This API supports pagination. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBClusters>`_ **Request Syntax** :: response = client.describe_db_clusters( DBClusterIdentifier='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Marker': 'string', 'DBClusters': [ { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* A pagination token that can be used in a subsequent DescribeDBClusters request. - **DBClusters** *(list) --* Contains a list of DB clusters for the user. - *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type DBClusterIdentifier: string :param DBClusterIdentifier: The user-supplied DB cluster identifier. If this parameter is specified, information from only the specific DB cluster is returned. This parameter isn\'t case-sensitive. Constraints: * If supplied, must match an existing DBClusterIdentifier. :type Filters: list :param Filters: A filter that specifies one or more DB clusters to describe. Supported filters: * ``db-cluster-id`` - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The results list will only include information about the DB clusters identified by these ARNs. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous DescribeDBClusters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_db_engine_versions(self, Engine: str = None, EngineVersion: str = None, DBParameterGroupFamily: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None, DefaultOnly: bool = None, ListSupportedCharacterSets: bool = None, ListSupportedTimezones: bool = None) -> Dict: """ Returns a list of the available DB engines. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBEngineVersions>`_ **Request Syntax** :: response = client.describe_db_engine_versions( Engine='string', EngineVersion='string', DBParameterGroupFamily='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string', DefaultOnly=True|False, ListSupportedCharacterSets=True|False, ListSupportedTimezones=True|False ) **Response Syntax** :: { 'Marker': 'string', 'DBEngineVersions': [ { 'Engine': 'string', 'EngineVersion': 'string', 'DBParameterGroupFamily': 'string', 'DBEngineDescription': 'string', 'DBEngineVersionDescription': 'string', 'DefaultCharacterSet': { 'CharacterSetName': 'string', 'CharacterSetDescription': 'string' }, 'SupportedCharacterSets': [ { 'CharacterSetName': 'string', 'CharacterSetDescription': 'string' }, ], 'ValidUpgradeTarget': [ { 'Engine': 'string', 'EngineVersion': 'string', 'Description': 'string', 'AutoUpgrade': True|False, 'IsMajorVersionUpgrade': True|False }, ], 'SupportedTimezones': [ { 'TimezoneName': 'string' }, ], 'ExportableLogTypes': [ 'string', ], 'SupportsLogExportsToCloudwatchLogs': True|False, 'SupportsReadReplica': True|False }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **DBEngineVersions** *(list) --* A list of ``DBEngineVersion`` elements. - *(dict) --* This data type is used as a response element in the action DescribeDBEngineVersions . - **Engine** *(string) --* The name of the database engine. - **EngineVersion** *(string) --* The version number of the database engine. - **DBParameterGroupFamily** *(string) --* The name of the DB parameter group family for the database engine. - **DBEngineDescription** *(string) --* The description of the database engine. - **DBEngineVersionDescription** *(string) --* The description of the database engine version. - **DefaultCharacterSet** *(dict) --* The default character set for new instances of this engine version, if the ``CharacterSetName`` parameter of the CreateDBInstance API is not specified. - **CharacterSetName** *(string) --* The name of the character set. - **CharacterSetDescription** *(string) --* The description of the character set. - **SupportedCharacterSets** *(list) --* A list of the character sets supported by this engine for the ``CharacterSetName`` parameter of the ``CreateDBInstance`` action. - *(dict) --* Specifies a character set. - **CharacterSetName** *(string) --* The name of the character set. - **CharacterSetDescription** *(string) --* The description of the character set. - **ValidUpgradeTarget** *(list) --* A list of engine versions that this database engine version can be upgraded to. - *(dict) --* The version of the database engine that a DB instance can be upgraded to. - **Engine** *(string) --* The name of the upgrade target database engine. - **EngineVersion** *(string) --* The version number of the upgrade target database engine. - **Description** *(string) --* The version of the database engine that a DB instance can be upgraded to. - **AutoUpgrade** *(boolean) --* A value that indicates whether the target version is applied to any source DB instances that have AutoMinorVersionUpgrade set to true. - **IsMajorVersionUpgrade** *(boolean) --* A value that indicates whether a database engine is upgraded to a major version. - **SupportedTimezones** *(list) --* A list of the time zones supported by this engine for the ``Timezone`` parameter of the ``CreateDBInstance`` action. - *(dict) --* A time zone associated with a DBInstance . - **TimezoneName** *(string) --* The name of the time zone. - **ExportableLogTypes** *(list) --* The types of logs that the database engine has available for export to CloudWatch Logs. - *(string) --* - **SupportsLogExportsToCloudwatchLogs** *(boolean) --* A value that indicates whether the engine version supports exporting the log types specified by ExportableLogTypes to CloudWatch Logs. - **SupportsReadReplica** *(boolean) --* Indicates whether the database engine version supports read replicas. :type Engine: string :param Engine: The database engine to return. :type EngineVersion: string :param EngineVersion: The database engine version to return. Example: ``5.1.49`` :type DBParameterGroupFamily: string :param DBParameterGroupFamily: The name of a specific DB parameter group family to return details for. Constraints: * If supplied, must match an existing DBParameterGroupFamily. :type Filters: list :param Filters: Not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more than the ``MaxRecords`` value is available, a pagination token called a marker is included in the response so that the following results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :type DefaultOnly: boolean :param DefaultOnly: Indicates that only the default version of the specified engine or engine and major version combination is returned. :type ListSupportedCharacterSets: boolean :param ListSupportedCharacterSets: If this parameter is specified and the requested engine supports the ``CharacterSetName`` parameter for ``CreateDBInstance`` , the response includes a list of supported character sets for each engine version. :type ListSupportedTimezones: boolean :param ListSupportedTimezones: If this parameter is specified and the requested engine supports the ``TimeZone`` parameter for ``CreateDBInstance`` , the response includes a list of supported time zones for each engine version. :rtype: dict :returns: """ pass def describe_db_instances(self, DBInstanceIdentifier: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns information about provisioned instances. This API supports pagination. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBInstances>`_ **Request Syntax** :: response = client.describe_db_instances( DBInstanceIdentifier='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Marker': 'string', 'DBInstances': [ { 'DBInstanceIdentifier': 'string', 'DBInstanceClass': 'string', 'Engine': 'string', 'DBInstanceStatus': 'string', 'MasterUsername': 'string', 'DBName': 'string', 'Endpoint': { 'Address': 'string', 'Port': 123, 'HostedZoneId': 'string' }, 'AllocatedStorage': 123, 'InstanceCreateTime': datetime(2015, 1, 1), 'PreferredBackupWindow': 'string', 'BackupRetentionPeriod': 123, 'DBSecurityGroups': [ { 'DBSecurityGroupName': 'string', 'Status': 'string' }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'DBParameterGroups': [ { 'DBParameterGroupName': 'string', 'ParameterApplyStatus': 'string' }, ], 'AvailabilityZone': 'string', 'DBSubnetGroup': { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' }, 'PreferredMaintenanceWindow': 'string', 'PendingModifiedValues': { 'DBInstanceClass': 'string', 'AllocatedStorage': 123, 'MasterUserPassword': 'string', 'Port': 123, 'BackupRetentionPeriod': 123, 'MultiAZ': True|False, 'EngineVersion': 'string', 'LicenseModel': 'string', 'Iops': 123, 'DBInstanceIdentifier': 'string', 'StorageType': 'string', 'CACertificateIdentifier': 'string', 'DBSubnetGroupName': 'string', 'PendingCloudwatchLogsExports': { 'LogTypesToEnable': [ 'string', ], 'LogTypesToDisable': [ 'string', ] } }, 'LatestRestorableTime': datetime(2015, 1, 1), 'MultiAZ': True|False, 'EngineVersion': 'string', 'AutoMinorVersionUpgrade': True|False, 'ReadReplicaSourceDBInstanceIdentifier': 'string', 'ReadReplicaDBInstanceIdentifiers': [ 'string', ], 'ReadReplicaDBClusterIdentifiers': [ 'string', ], 'LicenseModel': 'string', 'Iops': 123, 'OptionGroupMemberships': [ { 'OptionGroupName': 'string', 'Status': 'string' }, ], 'CharacterSetName': 'string', 'SecondaryAvailabilityZone': 'string', 'PubliclyAccessible': True|False, 'StatusInfos': [ { 'StatusType': 'string', 'Normal': True|False, 'Status': 'string', 'Message': 'string' }, ], 'StorageType': 'string', 'TdeCredentialArn': 'string', 'DbInstancePort': 123, 'DBClusterIdentifier': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbiResourceId': 'string', 'CACertificateIdentifier': 'string', 'DomainMemberships': [ { 'Domain': 'string', 'Status': 'string', 'FQDN': 'string', 'IAMRoleName': 'string' }, ], 'CopyTagsToSnapshot': True|False, 'MonitoringInterval': 123, 'EnhancedMonitoringResourceArn': 'string', 'MonitoringRoleArn': 'string', 'PromotionTier': 123, 'DBInstanceArn': 'string', 'Timezone': 'string', 'IAMDatabaseAuthenticationEnabled': True|False, 'PerformanceInsightsEnabled': True|False, 'PerformanceInsightsKMSKeyId': 'string', 'EnabledCloudwatchLogsExports': [ 'string', ] }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **DBInstances** *(list) --* A list of DBInstance instances. - *(dict) --* Contains the details of an Amazon Neptune DB instance. This data type is used as a response element in the DescribeDBInstances action. - **DBInstanceIdentifier** *(string) --* Contains a user-supplied database identifier. This identifier is the unique key that identifies a DB instance. - **DBInstanceClass** *(string) --* Contains the name of the compute and memory capacity class of the DB instance. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB instance. - **DBInstanceStatus** *(string) --* Specifies the current state of this database. - **MasterUsername** *(string) --* Contains the master username for the DB instance. - **DBName** *(string) --* The database name. - **Endpoint** *(dict) --* Specifies the connection endpoint. - **Address** *(string) --* Specifies the DNS address of the DB instance. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size specified in gibibytes. - **InstanceCreateTime** *(datetime) --* Provides the date and time the DB instance was created. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **DBSecurityGroups** *(list) --* Provides List of DB security group elements containing only ``DBSecurityGroup.Name`` and ``DBSecurityGroup.Status`` subelements. - *(dict) --* Specifies membership in a designated DB security group. - **DBSecurityGroupName** *(string) --* The name of the DB security group. - **Status** *(string) --* The status of the DB security group. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security group elements that the DB instance belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **DBParameterGroups** *(list) --* Provides the list of DB parameter groups applied to this DB instance. - *(dict) --* The status of the DB parameter group. This data type is used as a response element in the following actions: * CreateDBInstance * DeleteDBInstance * ModifyDBInstance * RebootDBInstance - **DBParameterGroupName** *(string) --* The name of the DP parameter group. - **ParameterApplyStatus** *(string) --* The status of parameter updates. - **AvailabilityZone** *(string) --* Specifies the name of the Availability Zone the DB instance is located in. - **DBSubnetGroup** *(dict) --* Specifies information on the subnet group associated with the DB instance, including the name, description, and subnets in the subnet group. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **PendingModifiedValues** *(dict) --* Specifies that changes to the DB instance are pending. This element is only included when changes are pending. Specific changes are identified by subelements. - **DBInstanceClass** *(string) --* Contains the new ``DBInstanceClass`` for the DB instance that will be applied or is currently being applied. - **AllocatedStorage** *(integer) --* Contains the new ``AllocatedStorage`` size for the DB instance that will be applied or is currently being applied. - **MasterUserPassword** *(string) --* Contains the pending or currently-in-progress change of the master credentials for the DB instance. - **Port** *(integer) --* Specifies the pending port for the DB instance. - **BackupRetentionPeriod** *(integer) --* Specifies the pending number of days for which automated backups are retained. - **MultiAZ** *(boolean) --* Indicates that the Single-AZ DB instance is to change to a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **LicenseModel** *(string) --* The license model for the DB instance. Valid values: ``license-included`` | ``bring-your-own-license`` | ``general-public-license`` - **Iops** *(integer) --* Specifies the new Provisioned IOPS value for the DB instance that will be applied or is currently being applied. - **DBInstanceIdentifier** *(string) --* Contains the new ``DBInstanceIdentifier`` for the DB instance that will be applied or is currently being applied. - **StorageType** *(string) --* Specifies the storage type to be associated with the DB instance. - **CACertificateIdentifier** *(string) --* Specifies the identifier of the CA certificate for the DB instance. - **DBSubnetGroupName** *(string) --* The new DB subnet group for the DB instance. - **PendingCloudwatchLogsExports** *(dict) --* Specifies the CloudWatch logs to be exported. - **LogTypesToEnable** *(list) --* Log types that are in the process of being deactivated. After they are deactivated, these log types aren't exported to CloudWatch Logs. - *(string) --* - **LogTypesToDisable** *(list) --* Log types that are in the process of being enabled. After they are enabled, these log types are exported to CloudWatch Logs. - *(string) --* - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **MultiAZ** *(boolean) --* Specifies if the DB instance is a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **AutoMinorVersionUpgrade** *(boolean) --* Indicates that minor version patches are applied automatically. - **ReadReplicaSourceDBInstanceIdentifier** *(string) --* Contains the identifier of the source DB instance if this DB instance is a Read Replica. - **ReadReplicaDBInstanceIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB instance. - *(string) --* - **ReadReplicaDBClusterIdentifiers** *(list) --* Contains one or more identifiers of DB clusters that are Read Replicas of this DB instance. - *(string) --* - **LicenseModel** *(string) --* License model information for this DB instance. - **Iops** *(integer) --* Specifies the Provisioned IOPS (I/O operations per second) value. - **OptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB instance. - *(dict) --* Provides information on the option groups the DB instance is a member of. - **OptionGroupName** *(string) --* The name of the option group that the instance belongs to. - **Status** *(string) --* The status of the DB instance's option group membership. Valid values are: ``in-sync`` , ``pending-apply`` , ``pending-removal`` , ``pending-maintenance-apply`` , ``pending-maintenance-removal`` , ``applying`` , ``removing`` , and ``failed`` . - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this instance is associated with. - **SecondaryAvailabilityZone** *(string) --* If present, specifies the name of the secondary Availability Zone for a DB instance with multi-AZ support. - **PubliclyAccessible** *(boolean) --* This flag should no longer be used. - **StatusInfos** *(list) --* The status of a Read Replica. If the instance is not a Read Replica, this is blank. - *(dict) --* Provides a list of status information for a DB instance. - **StatusType** *(string) --* This value is currently "read replication." - **Normal** *(boolean) --* Boolean value that is true if the instance is operating normally, or false if the instance is in an error state. - **Status** *(string) --* Status of the DB instance. For a StatusType of read replica, the values can be replicating, error, stopped, or terminated. - **Message** *(string) --* Details of the error if there is an error for the instance. If the instance is not in an error state, this value is blank. - **StorageType** *(string) --* Specifies the storage type associated with DB instance. - **TdeCredentialArn** *(string) --* The ARN from the key store with which the instance is associated for TDE encryption. - **DbInstancePort** *(integer) --* Specifies the port that the DB instance listens on. If the DB instance is part of a DB cluster, this can be a different port than the DB cluster port. - **DBClusterIdentifier** *(string) --* If the DB instance is a member of a DB cluster, contains the name of the DB cluster that the DB instance is a member of. - **StorageEncrypted** *(boolean) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **KmsKeyId** *(string) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **DbiResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB instance. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB instance is accessed. - **CACertificateIdentifier** *(string) --* The identifier of the CA certificate for this DB instance. - **DomainMemberships** *(list) --* Not supported - *(dict) --* An Active Directory Domain membership record associated with a DB instance. - **Domain** *(string) --* The identifier of the Active Directory Domain. - **Status** *(string) --* The status of the DB instance's Active Directory Domain membership, such as joined, pending-join, failed etc). - **FQDN** *(string) --* The fully qualified domain name of the Active Directory Domain. - **IAMRoleName** *(string) --* The name of the IAM role to be used when making API calls to the Directory Service. - **CopyTagsToSnapshot** *(boolean) --* Specifies whether tags are copied from the DB instance to snapshots of the DB instance. - **MonitoringInterval** *(integer) --* The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. - **EnhancedMonitoringResourceArn** *(string) --* The Amazon Resource Name (ARN) of the Amazon CloudWatch Logs log stream that receives the Enhanced Monitoring metrics data for the DB instance. - **MonitoringRoleArn** *(string) --* The ARN for the IAM role that permits Neptune to send Enhanced Monitoring metrics to Amazon CloudWatch Logs. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **DBInstanceArn** *(string) --* The Amazon Resource Name (ARN) for the DB instance. - **Timezone** *(string) --* Not supported. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if AWS Identity and Access Management (IAM) authentication is enabled, and otherwise false. - **PerformanceInsightsEnabled** *(boolean) --* True if Performance Insights is enabled for the DB instance, and otherwise false. - **PerformanceInsightsKMSKeyId** *(string) --* The AWS KMS key identifier for encryption of Performance Insights data. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key. - **EnabledCloudwatchLogsExports** *(list) --* A list of log types that this DB instance is configured to export to CloudWatch Logs. - *(string) --* :type DBInstanceIdentifier: string :param DBInstanceIdentifier: The user-supplied instance identifier. If this parameter is specified, information from only the specific DB instance is returned. This parameter isn\'t case-sensitive. Constraints: * If supplied, must match the identifier of an existing DBInstance. :type Filters: list :param Filters: A filter that specifies one or more DB instances to describe. Supported filters: * ``db-cluster-id`` - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The results list will only include information about the DB instances associated with the DB clusters identified by these ARNs. * ``db-instance-id`` - Accepts DB instance identifiers and DB instance Amazon Resource Names (ARNs). The results list will only include information about the DB instances identified by these ARNs. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeDBInstances`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_db_parameter_groups(self, DBParameterGroupName: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns a list of ``DBParameterGroup`` descriptions. If a ``DBParameterGroupName`` is specified, the list will contain only the description of the specified DB parameter group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBParameterGroups>`_ **Request Syntax** :: response = client.describe_db_parameter_groups( DBParameterGroupName='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Marker': 'string', 'DBParameterGroups': [ { 'DBParameterGroupName': 'string', 'DBParameterGroupFamily': 'string', 'Description': 'string', 'DBParameterGroupArn': 'string' }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **DBParameterGroups** *(list) --* A list of DBParameterGroup instances. - *(dict) --* Contains the details of an Amazon Neptune DB parameter group. This data type is used as a response element in the DescribeDBParameterGroups action. - **DBParameterGroupName** *(string) --* Provides the name of the DB parameter group. - **DBParameterGroupFamily** *(string) --* Provides the name of the DB parameter group family that this DB parameter group is compatible with. - **Description** *(string) --* Provides the customer-specified description for this DB parameter group. - **DBParameterGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB parameter group. :type DBParameterGroupName: string :param DBParameterGroupName: The name of a specific DB parameter group to return details for. Constraints: * If supplied, must match the name of an existing DBClusterParameterGroup. :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeDBParameterGroups`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_db_parameters(self, DBParameterGroupName: str, Source: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns the detailed parameter list for a particular DB parameter group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBParameters>`_ **Request Syntax** :: response = client.describe_db_parameters( DBParameterGroupName='string', Source='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Parameters': [ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ], 'Marker': 'string' } **Response Structure** - *(dict) --* - **Parameters** *(list) --* A list of Parameter values. - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. - **Marker** *(string) --* An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :type DBParameterGroupName: string :param DBParameterGroupName: **[REQUIRED]** The name of a specific DB parameter group to return details for. Constraints: * If supplied, must match the name of an existing DBParameterGroup. :type Source: string :param Source: The parameter types to return. Default: All parameter types returned Valid Values: ``user | system | engine-default`` :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeDBParameters`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_db_subnet_groups(self, DBSubnetGroupName: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns a list of DBSubnetGroup descriptions. If a DBSubnetGroupName is specified, the list will contain only the descriptions of the specified DBSubnetGroup. For an overview of CIDR ranges, go to the `Wikipedia Tutorial <http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing>`__ . See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeDBSubnetGroups>`_ **Request Syntax** :: response = client.describe_db_subnet_groups( DBSubnetGroupName='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Marker': 'string', 'DBSubnetGroups': [ { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **DBSubnetGroups** *(list) --* A list of DBSubnetGroup instances. - *(dict) --* Contains the details of an Amazon Neptune DB subnet group. This data type is used as a response element in the DescribeDBSubnetGroups action. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. :type DBSubnetGroupName: string :param DBSubnetGroupName: The name of the DB subnet group to return details for. :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous DescribeDBSubnetGroups request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_engine_default_cluster_parameters(self, DBParameterGroupFamily: str, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns the default engine and system parameter information for the cluster database engine. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeEngineDefaultClusterParameters>`_ **Request Syntax** :: response = client.describe_engine_default_cluster_parameters( DBParameterGroupFamily='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'EngineDefaults': { 'DBParameterGroupFamily': 'string', 'Marker': 'string', 'Parameters': [ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ] } } **Response Structure** - *(dict) --* - **EngineDefaults** *(dict) --* Contains the result of a successful invocation of the DescribeEngineDefaultParameters action. - **DBParameterGroupFamily** *(string) --* Specifies the name of the DB parameter group family that the engine default parameters apply to. - **Marker** *(string) --* An optional pagination token provided by a previous EngineDefaults request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **Parameters** *(list) --* Contains a list of engine default parameters. - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. :type DBParameterGroupFamily: string :param DBParameterGroupFamily: **[REQUIRED]** The name of the DB cluster parameter group family to return engine parameter information for. :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeEngineDefaultClusterParameters`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_engine_default_parameters(self, DBParameterGroupFamily: str, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns the default engine and system parameter information for the specified database engine. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeEngineDefaultParameters>`_ **Request Syntax** :: response = client.describe_engine_default_parameters( DBParameterGroupFamily='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'EngineDefaults': { 'DBParameterGroupFamily': 'string', 'Marker': 'string', 'Parameters': [ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ] } } **Response Structure** - *(dict) --* - **EngineDefaults** *(dict) --* Contains the result of a successful invocation of the DescribeEngineDefaultParameters action. - **DBParameterGroupFamily** *(string) --* Specifies the name of the DB parameter group family that the engine default parameters apply to. - **Marker** *(string) --* An optional pagination token provided by a previous EngineDefaults request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **Parameters** *(list) --* Contains a list of engine default parameters. - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. :type DBParameterGroupFamily: string :param DBParameterGroupFamily: **[REQUIRED]** The name of the DB parameter group family. :type Filters: list :param Filters: Not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribeEngineDefaultParameters`` request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_event_categories(self, SourceType: str = None, Filters: List = None) -> Dict: """ Displays a list of categories for all event source types, or, if specified, for a specified source type. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeEventCategories>`_ **Request Syntax** :: response = client.describe_event_categories( SourceType='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ] ) **Response Syntax** :: { 'EventCategoriesMapList': [ { 'SourceType': 'string', 'EventCategories': [ 'string', ] }, ] } **Response Structure** - *(dict) --* - **EventCategoriesMapList** *(list) --* A list of EventCategoriesMap data types. - *(dict) --* Contains the results of a successful invocation of the DescribeEventCategories action. - **SourceType** *(string) --* The source type that the returned categories belong to - **EventCategories** *(list) --* The event categories for the specified source type - *(string) --* :type SourceType: string :param SourceType: The type of source that is generating the events. Valid values: db-instance | db-parameter-group | db-security-group | db-snapshot :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :rtype: dict :returns: """ pass def describe_event_subscriptions(self, SubscriptionName: str = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Lists all the subscription descriptions for a customer account. The description for a subscription includes SubscriptionName, SNSTopicARN, CustomerID, SourceType, SourceID, CreationTime, and Status. If you specify a SubscriptionName, lists the description for that subscription. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeEventSubscriptions>`_ **Request Syntax** :: response = client.describe_event_subscriptions( SubscriptionName='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Marker': 'string', 'EventSubscriptionsList': [ { 'CustomerAwsId': 'string', 'CustSubscriptionId': 'string', 'SnsTopicArn': 'string', 'Status': 'string', 'SubscriptionCreationTime': 'string', 'SourceType': 'string', 'SourceIdsList': [ 'string', ], 'EventCategoriesList': [ 'string', ], 'Enabled': True|False, 'EventSubscriptionArn': 'string' }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous DescribeOrderableDBInstanceOptions request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **EventSubscriptionsList** *(list) --* A list of EventSubscriptions data types. - *(dict) --* Contains the results of a successful invocation of the DescribeEventSubscriptions action. - **CustomerAwsId** *(string) --* The AWS customer account associated with the event notification subscription. - **CustSubscriptionId** *(string) --* The event notification subscription Id. - **SnsTopicArn** *(string) --* The topic ARN of the event notification subscription. - **Status** *(string) --* The status of the event notification subscription. Constraints: Can be one of the following: creating | modifying | deleting | active | no-permission | topic-not-exist The status "no-permission" indicates that Neptune no longer has permission to post to the SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created. - **SubscriptionCreationTime** *(string) --* The time the event notification subscription was created. - **SourceType** *(string) --* The source type for the event notification subscription. - **SourceIdsList** *(list) --* A list of source IDs for the event notification subscription. - *(string) --* - **EventCategoriesList** *(list) --* A list of event categories for the event notification subscription. - *(string) --* - **Enabled** *(boolean) --* A Boolean value indicating if the subscription is enabled. True indicates the subscription is enabled. - **EventSubscriptionArn** *(string) --* The Amazon Resource Name (ARN) for the event subscription. :type SubscriptionName: string :param SubscriptionName: The name of the event notification subscription you want to describe. :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous DescribeOrderableDBInstanceOptions request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_events(self, SourceIdentifier: str = None, SourceType: str = None, StartTime: datetime = None, EndTime: datetime = None, Duration: int = None, EventCategories: List = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns events related to DB instances, DB security groups, DB snapshots, and DB parameter groups for the past 14 days. Events specific to a particular DB instance, DB security group, database snapshot, or DB parameter group can be obtained by providing the name as a parameter. By default, the past hour of events are returned. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeEvents>`_ **Request Syntax** :: response = client.describe_events( SourceIdentifier='string', SourceType='db-instance'|'db-parameter-group'|'db-security-group'|'db-snapshot'|'db-cluster'|'db-cluster-snapshot', StartTime=datetime(2015, 1, 1), EndTime=datetime(2015, 1, 1), Duration=123, EventCategories=[ 'string', ], Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'Marker': 'string', 'Events': [ { 'SourceIdentifier': 'string', 'SourceType': 'db-instance'|'db-parameter-group'|'db-security-group'|'db-snapshot'|'db-cluster'|'db-cluster-snapshot', 'Message': 'string', 'EventCategories': [ 'string', ], 'Date': datetime(2015, 1, 1), 'SourceArn': 'string' }, ] } **Response Structure** - *(dict) --* - **Marker** *(string) --* An optional pagination token provided by a previous Events request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . - **Events** *(list) --* A list of Event instances. - *(dict) --* This data type is used as a response element in the DescribeEvents action. - **SourceIdentifier** *(string) --* Provides the identifier for the source of the event. - **SourceType** *(string) --* Specifies the source type for this event. - **Message** *(string) --* Provides the text of this event. - **EventCategories** *(list) --* Specifies the category for the event. - *(string) --* - **Date** *(datetime) --* Specifies the date and time of the event. - **SourceArn** *(string) --* The Amazon Resource Name (ARN) for the event. :type SourceIdentifier: string :param SourceIdentifier: The identifier of the event source for which events are returned. If not specified, then all sources are included in the response. Constraints: * If SourceIdentifier is supplied, SourceType must also be provided. * If the source type is ``DBInstance`` , then a ``DBInstanceIdentifier`` must be supplied. * If the source type is ``DBSecurityGroup`` , a ``DBSecurityGroupName`` must be supplied. * If the source type is ``DBParameterGroup`` , a ``DBParameterGroupName`` must be supplied. * If the source type is ``DBSnapshot`` , a ``DBSnapshotIdentifier`` must be supplied. * Cannot end with a hyphen or contain two consecutive hyphens. :type SourceType: string :param SourceType: The event source to retrieve events for. If no value is specified, all events are returned. :type StartTime: datetime :param StartTime: The beginning of the time interval to retrieve events for, specified in ISO 8601 format. For more information about ISO 8601, go to the `ISO8601 Wikipedia page. <http://en.wikipedia.org/wiki/ISO_8601>`__ Example: 2009-07-08T18:00Z :type EndTime: datetime :param EndTime: The end of the time interval for which to retrieve events, specified in ISO 8601 format. For more information about ISO 8601, go to the `ISO8601 Wikipedia page. <http://en.wikipedia.org/wiki/ISO_8601>`__ Example: 2009-07-08T18:00Z :type Duration: integer :param Duration: The number of minutes to retrieve events for. Default: 60 :type EventCategories: list :param EventCategories: A list of event categories that trigger notifications for a event notification subscription. - *(string) --* :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous DescribeEvents request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_orderable_db_instance_options(self, Engine: str, EngineVersion: str = None, DBInstanceClass: str = None, LicenseModel: str = None, Vpc: bool = None, Filters: List = None, MaxRecords: int = None, Marker: str = None) -> Dict: """ Returns a list of orderable DB instance options for the specified engine. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeOrderableDBInstanceOptions>`_ **Request Syntax** :: response = client.describe_orderable_db_instance_options( Engine='string', EngineVersion='string', DBInstanceClass='string', LicenseModel='string', Vpc=True|False, Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], MaxRecords=123, Marker='string' ) **Response Syntax** :: { 'OrderableDBInstanceOptions': [ { 'Engine': 'string', 'EngineVersion': 'string', 'DBInstanceClass': 'string', 'LicenseModel': 'string', 'AvailabilityZones': [ { 'Name': 'string' }, ], 'MultiAZCapable': True|False, 'ReadReplicaCapable': True|False, 'Vpc': True|False, 'SupportsStorageEncryption': True|False, 'StorageType': 'string', 'SupportsIops': True|False, 'SupportsEnhancedMonitoring': True|False, 'SupportsIAMDatabaseAuthentication': True|False, 'SupportsPerformanceInsights': True|False, 'MinStorageSize': 123, 'MaxStorageSize': 123, 'MinIopsPerDbInstance': 123, 'MaxIopsPerDbInstance': 123, 'MinIopsPerGib': 123.0, 'MaxIopsPerGib': 123.0 }, ], 'Marker': 'string' } **Response Structure** - *(dict) --* - **OrderableDBInstanceOptions** *(list) --* An OrderableDBInstanceOption structure containing information about orderable options for the DB instance. - *(dict) --* Contains a list of available options for a DB instance. This data type is used as a response element in the DescribeOrderableDBInstanceOptions action. - **Engine** *(string) --* The engine type of a DB instance. - **EngineVersion** *(string) --* The engine version of a DB instance. - **DBInstanceClass** *(string) --* The DB instance class for a DB instance. - **LicenseModel** *(string) --* The license model for a DB instance. - **AvailabilityZones** *(list) --* A list of Availability Zones for a DB instance. - *(dict) --* Specifies an Availability Zone. - **Name** *(string) --* The name of the availability zone. - **MultiAZCapable** *(boolean) --* Indicates whether a DB instance is Multi-AZ capable. - **ReadReplicaCapable** *(boolean) --* Indicates whether a DB instance can have a Read Replica. - **Vpc** *(boolean) --* Indicates whether a DB instance is in a VPC. - **SupportsStorageEncryption** *(boolean) --* Indicates whether a DB instance supports encrypted storage. - **StorageType** *(string) --* Indicates the storage type for a DB instance. - **SupportsIops** *(boolean) --* Indicates whether a DB instance supports provisioned IOPS. - **SupportsEnhancedMonitoring** *(boolean) --* Indicates whether a DB instance supports Enhanced Monitoring at intervals from 1 to 60 seconds. - **SupportsIAMDatabaseAuthentication** *(boolean) --* Indicates whether a DB instance supports IAM database authentication. - **SupportsPerformanceInsights** *(boolean) --* True if a DB instance supports Performance Insights, otherwise false. - **MinStorageSize** *(integer) --* Minimum storage size for a DB instance. - **MaxStorageSize** *(integer) --* Maximum storage size for a DB instance. - **MinIopsPerDbInstance** *(integer) --* Minimum total provisioned IOPS for a DB instance. - **MaxIopsPerDbInstance** *(integer) --* Maximum total provisioned IOPS for a DB instance. - **MinIopsPerGib** *(float) --* Minimum provisioned IOPS per GiB for a DB instance. - **MaxIopsPerGib** *(float) --* Maximum provisioned IOPS per GiB for a DB instance. - **Marker** *(string) --* An optional pagination token provided by a previous OrderableDBInstanceOptions request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :type Engine: string :param Engine: **[REQUIRED]** The name of the engine to retrieve DB instance options for. :type EngineVersion: string :param EngineVersion: The engine version filter value. Specify this parameter to show only the available offerings matching the specified engine version. :type DBInstanceClass: string :param DBInstanceClass: The DB instance class filter value. Specify this parameter to show only the available offerings matching the specified DB instance class. :type LicenseModel: string :param LicenseModel: The license model filter value. Specify this parameter to show only the available offerings matching the specified license model. :type Vpc: boolean :param Vpc: The VPC filter value. Specify this parameter to show only the available VPC or non-VPC offerings. :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :type Marker: string :param Marker: An optional pagination token provided by a previous DescribeOrderableDBInstanceOptions request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by ``MaxRecords`` . :rtype: dict :returns: """ pass def describe_pending_maintenance_actions(self, ResourceIdentifier: str = None, Filters: List = None, Marker: str = None, MaxRecords: int = None) -> Dict: """ Returns a list of resources (for example, DB instances) that have at least one pending maintenance action. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribePendingMaintenanceActions>`_ **Request Syntax** :: response = client.describe_pending_maintenance_actions( ResourceIdentifier='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ], Marker='string', MaxRecords=123 ) **Response Syntax** :: { 'PendingMaintenanceActions': [ { 'ResourceIdentifier': 'string', 'PendingMaintenanceActionDetails': [ { 'Action': 'string', 'AutoAppliedAfterDate': datetime(2015, 1, 1), 'ForcedApplyDate': datetime(2015, 1, 1), 'OptInStatus': 'string', 'CurrentApplyDate': datetime(2015, 1, 1), 'Description': 'string' }, ] }, ], 'Marker': 'string' } **Response Structure** - *(dict) --* - **PendingMaintenanceActions** *(list) --* A list of the pending maintenance actions for the resource. - *(dict) --* Describes the pending maintenance actions for a resource. - **ResourceIdentifier** *(string) --* The ARN of the resource that has pending maintenance actions. - **PendingMaintenanceActionDetails** *(list) --* A list that provides details about the pending maintenance actions for the resource. - *(dict) --* Provides information about a pending maintenance action for a resource. - **Action** *(string) --* The type of pending maintenance action that is available for the resource. - **AutoAppliedAfterDate** *(datetime) --* The date of the maintenance window when the action is applied. The maintenance action is applied to the resource during its first maintenance window after this date. If this date is specified, any ``next-maintenance`` opt-in requests are ignored. - **ForcedApplyDate** *(datetime) --* The date when the maintenance action is automatically applied. The maintenance action is applied to the resource on this date regardless of the maintenance window for the resource. If this date is specified, any ``immediate`` opt-in requests are ignored. - **OptInStatus** *(string) --* Indicates the type of opt-in request that has been received for the resource. - **CurrentApplyDate** *(datetime) --* The effective date when the pending maintenance action is applied to the resource. This date takes into account opt-in requests received from the ApplyPendingMaintenanceAction API, the ``AutoAppliedAfterDate`` , and the ``ForcedApplyDate`` . This value is blank if an opt-in request has not been received and nothing has been specified as ``AutoAppliedAfterDate`` or ``ForcedApplyDate`` . - **Description** *(string) --* A description providing more detail about the maintenance action. - **Marker** *(string) --* An optional pagination token provided by a previous ``DescribePendingMaintenanceActions`` request. If this parameter is specified, the response includes only records beyond the marker, up to a number of records specified by ``MaxRecords`` . :type ResourceIdentifier: string :param ResourceIdentifier: The ARN of a resource to return pending maintenance actions for. :type Filters: list :param Filters: A filter that specifies one or more resources to return pending maintenance actions for. Supported filters: * ``db-cluster-id`` - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The results list will only include pending maintenance actions for the DB clusters identified by these ARNs. * ``db-instance-id`` - Accepts DB instance identifiers and DB instance ARNs. The results list will only include pending maintenance actions for the DB instances identified by these ARNs. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :type Marker: string :param Marker: An optional pagination token provided by a previous ``DescribePendingMaintenanceActions`` request. If this parameter is specified, the response includes only records beyond the marker, up to a number of records specified by ``MaxRecords`` . :type MaxRecords: integer :param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified ``MaxRecords`` value, a pagination token called a marker is included in the response so that the remaining results can be retrieved. Default: 100 Constraints: Minimum 20, maximum 100. :rtype: dict :returns: """ pass def describe_valid_db_instance_modifications(self, DBInstanceIdentifier: str) -> Dict: """ You can call DescribeValidDBInstanceModifications to learn what modifications you can make to your DB instance. You can use this information when you call ModifyDBInstance . See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/DescribeValidDBInstanceModifications>`_ **Request Syntax** :: response = client.describe_valid_db_instance_modifications( DBInstanceIdentifier='string' ) **Response Syntax** :: { 'ValidDBInstanceModificationsMessage': { 'Storage': [ { 'StorageType': 'string', 'StorageSize': [ { 'From': 123, 'To': 123, 'Step': 123 }, ], 'ProvisionedIops': [ { 'From': 123, 'To': 123, 'Step': 123 }, ], 'IopsToStorageRatio': [ { 'From': 123.0, 'To': 123.0 }, ] }, ] } } **Response Structure** - *(dict) --* - **ValidDBInstanceModificationsMessage** *(dict) --* Information about valid modifications that you can make to your DB instance. Contains the result of a successful call to the DescribeValidDBInstanceModifications action. You can use this information when you call ModifyDBInstance . - **Storage** *(list) --* Valid storage options for your DB instance. - *(dict) --* Information about valid modifications that you can make to your DB instance. Contains the result of a successful call to the DescribeValidDBInstanceModifications action. - **StorageType** *(string) --* The valid storage types for your DB instance. For example, gp2, io1. - **StorageSize** *(list) --* The valid range of storage in gibibytes. For example, 100 to 16384. - *(dict) --* A range of integer values. - **From** *(integer) --* The minimum value in the range. - **To** *(integer) --* The maximum value in the range. - **Step** *(integer) --* The step value for the range. For example, if you have a range of 5,000 to 10,000, with a step value of 1,000, the valid values start at 5,000 and step up by 1,000. Even though 7,500 is within the range, it isn't a valid value for the range. The valid values are 5,000, 6,000, 7,000, 8,000... - **ProvisionedIops** *(list) --* The valid range of provisioned IOPS. For example, 1000-20000. - *(dict) --* A range of integer values. - **From** *(integer) --* The minimum value in the range. - **To** *(integer) --* The maximum value in the range. - **Step** *(integer) --* The step value for the range. For example, if you have a range of 5,000 to 10,000, with a step value of 1,000, the valid values start at 5,000 and step up by 1,000. Even though 7,500 is within the range, it isn't a valid value for the range. The valid values are 5,000, 6,000, 7,000, 8,000... - **IopsToStorageRatio** *(list) --* The valid range of Provisioned IOPS to gibibytes of storage multiplier. For example, 3-10, which means that provisioned IOPS can be between 3 and 10 times storage. - *(dict) --* A range of double values. - **From** *(float) --* The minimum value in the range. - **To** *(float) --* The maximum value in the range. :type DBInstanceIdentifier: string :param DBInstanceIdentifier: **[REQUIRED]** The customer identifier or the ARN of your DB instance. :rtype: dict :returns: """ pass def failover_db_cluster(self, DBClusterIdentifier: str = None, TargetDBInstanceIdentifier: str = None) -> Dict: """ Forces a failover for a DB cluster. A failover for a DB cluster promotes one of the Read Replicas (read-only instances) in the DB cluster to be the primary instance (the cluster writer). Amazon Neptune will automatically fail over to a Read Replica, if one exists, when the primary instance fails. You can force a failover when you want to simulate a failure of a primary instance for testing. Because each instance in a DB cluster has its own endpoint address, you will need to clean up and re-establish any existing connections that use those endpoint addresses when the failover is complete. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/FailoverDBCluster>`_ **Request Syntax** :: response = client.failover_db_cluster( DBClusterIdentifier='string', TargetDBInstanceIdentifier='string' ) **Response Syntax** :: { 'DBCluster': { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) } } **Response Structure** - *(dict) --* - **DBCluster** *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type DBClusterIdentifier: string :param DBClusterIdentifier: A DB cluster identifier to force a failover for. This parameter is not case-sensitive. Constraints: * Must match the identifier of an existing DBCluster. :type TargetDBInstanceIdentifier: string :param TargetDBInstanceIdentifier: The name of the instance to promote to the primary instance. You must specify the instance identifier for an Read Replica in the DB cluster. For example, ``mydbcluster-replica1`` . :rtype: dict :returns: """ pass def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None): """ Generate a presigned url given a client, its method, and arguments :type ClientMethod: string :param ClientMethod: The client method to presign for :type Params: dict :param Params: The parameters normally passed to ``ClientMethod``. :type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned url is valid for. By default it expires in an hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The http method to use on the generated url. By default, the http method is whatever is used in the method\'s model. :returns: The presigned url """ pass def get_paginator(self, operation_name: str = None) -> Paginator: """ Create a paginator for an operation. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you\'d normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator(\"create_foo\")``. :raise OperationNotPageableError: Raised if the operation is not pageable. You can use the ``client.can_paginate`` method to check if an operation is pageable. :rtype: L{botocore.paginate.Paginator} :return: A paginator object. """ pass def get_waiter(self, waiter_name: str = None) -> Waiter: """ Returns an object that can wait for some condition. :type waiter_name: str :param waiter_name: The name of the waiter to get. See the waiters section of the service docs for a list of available waiters. :returns: The specified waiter object. :rtype: botocore.waiter.Waiter """ pass def list_tags_for_resource(self, ResourceName: str, Filters: List = None) -> Dict: """ Lists all tags on an Amazon Neptune resource. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ListTagsForResource>`_ **Request Syntax** :: response = client.list_tags_for_resource( ResourceName='string', Filters=[ { 'Name': 'string', 'Values': [ 'string', ] }, ] ) **Response Syntax** :: { 'TagList': [ { 'Key': 'string', 'Value': 'string' }, ] } **Response Structure** - *(dict) --* - **TagList** *(list) --* List of tags returned by the ListTagsForResource operation. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can't be prefixed with "aws:" or "rds:". The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can't be prefixed with "aws:" or "rds:". The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$"). :type ResourceName: string :param ResourceName: **[REQUIRED]** The Amazon Neptune resource with tags to be listed. This value is an Amazon Resource Name (ARN). For information about creating an ARN, see `Constructing an Amazon Resource Name (ARN) <https://docs.aws.amazon.com/neptune/latest/UserGuide/tagging.ARN.html#tagging.ARN.Constructing>`__ . :type Filters: list :param Filters: This parameter is not currently supported. - *(dict) --* This type is not currently supported. - **Name** *(string) --* **[REQUIRED]** This parameter is not currently supported. - **Values** *(list) --* **[REQUIRED]** This parameter is not currently supported. - *(string) --* :rtype: dict :returns: """ pass def modify_db_cluster(self, DBClusterIdentifier: str, NewDBClusterIdentifier: str = None, ApplyImmediately: bool = None, BackupRetentionPeriod: int = None, DBClusterParameterGroupName: str = None, VpcSecurityGroupIds: List = None, Port: int = None, MasterUserPassword: str = None, OptionGroupName: str = None, PreferredBackupWindow: str = None, PreferredMaintenanceWindow: str = None, EnableIAMDatabaseAuthentication: bool = None, EngineVersion: str = None) -> Dict: """ Modify a setting for a DB cluster. You can change one or more database configuration parameters by specifying these parameters and the new values in the request. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ModifyDBCluster>`_ **Request Syntax** :: response = client.modify_db_cluster( DBClusterIdentifier='string', NewDBClusterIdentifier='string', ApplyImmediately=True|False, BackupRetentionPeriod=123, DBClusterParameterGroupName='string', VpcSecurityGroupIds=[ 'string', ], Port=123, MasterUserPassword='string', OptionGroupName='string', PreferredBackupWindow='string', PreferredMaintenanceWindow='string', EnableIAMDatabaseAuthentication=True|False, EngineVersion='string' ) **Response Syntax** :: { 'DBCluster': { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) } } **Response Structure** - *(dict) --* - **DBCluster** *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The DB cluster identifier for the cluster being modified. This parameter is not case-sensitive. Constraints: * Must match the identifier of an existing DBCluster. :type NewDBClusterIdentifier: string :param NewDBClusterIdentifier: The new DB cluster identifier for the DB cluster when renaming a DB cluster. This value is stored as a lowercase string. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens * The first character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens Example: ``my-cluster2`` :type ApplyImmediately: boolean :param ApplyImmediately: A value that specifies whether the modifications in this request and any pending modifications are asynchronously applied as soon as possible, regardless of the ``PreferredMaintenanceWindow`` setting for the DB cluster. If this parameter is set to ``false`` , changes to the DB cluster are applied during the next maintenance window. The ``ApplyImmediately`` parameter only affects the ``NewDBClusterIdentifier`` and ``MasterUserPassword`` values. If you set the ``ApplyImmediately`` parameter value to false, then changes to the ``NewDBClusterIdentifier`` and ``MasterUserPassword`` values are applied during the next maintenance window. All other changes are applied immediately, regardless of the value of the ``ApplyImmediately`` parameter. Default: ``false`` :type BackupRetentionPeriod: integer :param BackupRetentionPeriod: The number of days for which automated backups are retained. You must specify a minimum value of 1. Default: 1 Constraints: * Must be a value from 1 to 35 :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: The name of the DB cluster parameter group to use for the DB cluster. :type VpcSecurityGroupIds: list :param VpcSecurityGroupIds: A list of VPC security groups that the DB cluster will belong to. - *(string) --* :type Port: integer :param Port: The port number on which the DB cluster accepts connections. Constraints: Value must be ``1150-65535`` Default: The same port as the original DB cluster. :type MasterUserPassword: string :param MasterUserPassword: The new password for the master database user. This password can contain any printable ASCII character except \"/\", \"\"\", or \"@\". Constraints: Must contain from 8 to 41 characters. :type OptionGroupName: string :param OptionGroupName: A value that indicates that the DB cluster should be associated with the specified option group. Changing this parameter doesn\'t result in an outage except in the following case, and the change is applied during the next maintenance window unless the ``ApplyImmediately`` parameter is set to ``true`` for this request. If the parameter change results in an option group that enables OEM, this change can cause a brief (sub-second) period during which new connections are rejected but existing connections are not interrupted. Permanent options can\'t be removed from an option group. The option group can\'t be removed from a DB cluster once it is associated with a DB cluster. :type PreferredBackupWindow: string :param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled, using the ``BackupRetentionPeriod`` parameter. The default is a 30-minute window selected at random from an 8-hour block of time for each AWS Region. Constraints: * Must be in the format ``hh24:mi-hh24:mi`` . * Must be in Universal Coordinated Time (UTC). * Must not conflict with the preferred maintenance window. * Must be at least 30 minutes. :type PreferredMaintenanceWindow: string :param PreferredMaintenanceWindow: The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). Format: ``ddd:hh24:mi-ddd:hh24:mi`` The default is a 30-minute window selected at random from an 8-hour block of time for each AWS Region, occurring on a random day of the week. Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun. Constraints: Minimum 30-minute window. :type EnableIAMDatabaseAuthentication: boolean :param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false. Default: ``false`` :type EngineVersion: string :param EngineVersion: The version number of the database engine to which you want to upgrade. Changing this parameter results in an outage. The change is applied during the next maintenance window unless the ApplyImmediately parameter is set to true. For a list of valid engine versions, see CreateDBInstance , or call DescribeDBEngineVersions . :rtype: dict :returns: """ pass def modify_db_cluster_parameter_group(self, DBClusterParameterGroupName: str, Parameters: List) -> Dict: """ Modifies the parameters of a DB cluster parameter group. To modify more than one parameter, submit a list of the following: ``ParameterName`` , ``ParameterValue`` , and ``ApplyMethod`` . A maximum of 20 parameters can be modified in a single request. .. note:: Changes to dynamic parameters are applied immediately. Changes to static parameters require a reboot without failover to the DB cluster associated with the parameter group before the change can take effect. .. warning:: After you create a DB cluster parameter group, you should wait at least 5 minutes before creating your first DB cluster that uses that DB cluster parameter group as the default parameter group. This allows Amazon Neptune to fully complete the create action before the parameter group is used as the default for a new DB cluster. This is especially important for parameters that are critical when creating the default database for a DB cluster, such as the character set for the default database defined by the ``character_set_database`` parameter. You can use the *Parameter Groups* option of the Amazon Neptune console or the DescribeDBClusterParameters command to verify that your DB cluster parameter group has been created or modified. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ModifyDBClusterParameterGroup>`_ **Request Syntax** :: response = client.modify_db_cluster_parameter_group( DBClusterParameterGroupName='string', Parameters=[ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ] ) **Response Syntax** :: { 'DBClusterParameterGroupName': 'string' } **Response Structure** - *(dict) --* - **DBClusterParameterGroupName** *(string) --* The name of the DB cluster parameter group. Constraints: * Must be 1 to 255 letters or numbers. * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens .. note:: This value is stored as a lowercase string. :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: **[REQUIRED]** The name of the DB cluster parameter group to modify. :type Parameters: list :param Parameters: **[REQUIRED]** A list of parameters in the DB cluster parameter group to modify. - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. :rtype: dict :returns: """ pass def modify_db_cluster_snapshot_attribute(self, DBClusterSnapshotIdentifier: str, AttributeName: str, ValuesToAdd: List = None, ValuesToRemove: List = None) -> Dict: """ Adds an attribute and values to, or removes an attribute and values from, a manual DB cluster snapshot. To share a manual DB cluster snapshot with other AWS accounts, specify ``restore`` as the ``AttributeName`` and use the ``ValuesToAdd`` parameter to add a list of IDs of the AWS accounts that are authorized to restore the manual DB cluster snapshot. Use the value ``all`` to make the manual DB cluster snapshot public, which means that it can be copied or restored by all AWS accounts. Do not add the ``all`` value for any manual DB cluster snapshots that contain private information that you don't want available to all AWS accounts. If a manual DB cluster snapshot is encrypted, it can be shared, but only by specifying a list of authorized AWS account IDs for the ``ValuesToAdd`` parameter. You can't use ``all`` as a value for that parameter in this case. To view which AWS accounts have access to copy or restore a manual DB cluster snapshot, or whether a manual DB cluster snapshot public or private, use the DescribeDBClusterSnapshotAttributes API action. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ModifyDBClusterSnapshotAttribute>`_ **Request Syntax** :: response = client.modify_db_cluster_snapshot_attribute( DBClusterSnapshotIdentifier='string', AttributeName='string', ValuesToAdd=[ 'string', ], ValuesToRemove=[ 'string', ] ) **Response Syntax** :: { 'DBClusterSnapshotAttributesResult': { 'DBClusterSnapshotIdentifier': 'string', 'DBClusterSnapshotAttributes': [ { 'AttributeName': 'string', 'AttributeValues': [ 'string', ] }, ] } } **Response Structure** - *(dict) --* - **DBClusterSnapshotAttributesResult** *(dict) --* Contains the results of a successful call to the DescribeDBClusterSnapshotAttributes API action. Manual DB cluster snapshot attributes are used to authorize other AWS accounts to copy or restore a manual DB cluster snapshot. For more information, see the ModifyDBClusterSnapshotAttribute API action. - **DBClusterSnapshotIdentifier** *(string) --* The identifier of the manual DB cluster snapshot that the attributes apply to. - **DBClusterSnapshotAttributes** *(list) --* The list of attributes and values for the manual DB cluster snapshot. - *(dict) --* Contains the name and values of a manual DB cluster snapshot attribute. Manual DB cluster snapshot attributes are used to authorize other AWS accounts to restore a manual DB cluster snapshot. For more information, see the ModifyDBClusterSnapshotAttribute API action. - **AttributeName** *(string) --* The name of the manual DB cluster snapshot attribute. The attribute named ``restore`` refers to the list of AWS accounts that have permission to copy or restore the manual DB cluster snapshot. For more information, see the ModifyDBClusterSnapshotAttribute API action. - **AttributeValues** *(list) --* The value(s) for the manual DB cluster snapshot attribute. If the ``AttributeName`` field is set to ``restore`` , then this element returns a list of IDs of the AWS accounts that are authorized to copy or restore the manual DB cluster snapshot. If a value of ``all`` is in the list, then the manual DB cluster snapshot is public and available for any AWS account to copy or restore. - *(string) --* :type DBClusterSnapshotIdentifier: string :param DBClusterSnapshotIdentifier: **[REQUIRED]** The identifier for the DB cluster snapshot to modify the attributes for. :type AttributeName: string :param AttributeName: **[REQUIRED]** The name of the DB cluster snapshot attribute to modify. To manage authorization for other AWS accounts to copy or restore a manual DB cluster snapshot, set this value to ``restore`` . :type ValuesToAdd: list :param ValuesToAdd: A list of DB cluster snapshot attributes to add to the attribute specified by ``AttributeName`` . To authorize other AWS accounts to copy or restore a manual DB cluster snapshot, set this list to include one or more AWS account IDs, or ``all`` to make the manual DB cluster snapshot restorable by any AWS account. Do not add the ``all`` value for any manual DB cluster snapshots that contain private information that you don\'t want available to all AWS accounts. - *(string) --* :type ValuesToRemove: list :param ValuesToRemove: A list of DB cluster snapshot attributes to remove from the attribute specified by ``AttributeName`` . To remove authorization for other AWS accounts to copy or restore a manual DB cluster snapshot, set this list to include one or more AWS account identifiers, or ``all`` to remove authorization for any AWS account to copy or restore the DB cluster snapshot. If you specify ``all`` , an AWS account whose account ID is explicitly added to the ``restore`` attribute can still copy or restore a manual DB cluster snapshot. - *(string) --* :rtype: dict :returns: """ pass def modify_db_instance(self, DBInstanceIdentifier: str, AllocatedStorage: int = None, DBInstanceClass: str = None, DBSubnetGroupName: str = None, DBSecurityGroups: List = None, VpcSecurityGroupIds: List = None, ApplyImmediately: bool = None, MasterUserPassword: str = None, DBParameterGroupName: str = None, BackupRetentionPeriod: int = None, PreferredBackupWindow: str = None, PreferredMaintenanceWindow: str = None, MultiAZ: bool = None, EngineVersion: str = None, AllowMajorVersionUpgrade: bool = None, AutoMinorVersionUpgrade: bool = None, LicenseModel: str = None, Iops: int = None, OptionGroupName: str = None, NewDBInstanceIdentifier: str = None, StorageType: str = None, TdeCredentialArn: str = None, TdeCredentialPassword: str = None, CACertificateIdentifier: str = None, Domain: str = None, CopyTagsToSnapshot: bool = None, MonitoringInterval: int = None, DBPortNumber: int = None, PubliclyAccessible: bool = None, MonitoringRoleArn: str = None, DomainIAMRoleName: str = None, PromotionTier: int = None, EnableIAMDatabaseAuthentication: bool = None, EnablePerformanceInsights: bool = None, PerformanceInsightsKMSKeyId: str = None, CloudwatchLogsExportConfiguration: Dict = None) -> Dict: """ Modifies settings for a DB instance. You can change one or more database configuration parameters by specifying these parameters and the new values in the request. To learn what modifications you can make to your DB instance, call DescribeValidDBInstanceModifications before you call ModifyDBInstance . See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ModifyDBInstance>`_ **Request Syntax** :: response = client.modify_db_instance( DBInstanceIdentifier='string', AllocatedStorage=123, DBInstanceClass='string', DBSubnetGroupName='string', DBSecurityGroups=[ 'string', ], VpcSecurityGroupIds=[ 'string', ], ApplyImmediately=True|False, MasterUserPassword='string', DBParameterGroupName='string', BackupRetentionPeriod=123, PreferredBackupWindow='string', PreferredMaintenanceWindow='string', MultiAZ=True|False, EngineVersion='string', AllowMajorVersionUpgrade=True|False, AutoMinorVersionUpgrade=True|False, LicenseModel='string', Iops=123, OptionGroupName='string', NewDBInstanceIdentifier='string', StorageType='string', TdeCredentialArn='string', TdeCredentialPassword='string', CACertificateIdentifier='string', Domain='string', CopyTagsToSnapshot=True|False, MonitoringInterval=123, DBPortNumber=123, PubliclyAccessible=True|False, MonitoringRoleArn='string', DomainIAMRoleName='string', PromotionTier=123, EnableIAMDatabaseAuthentication=True|False, EnablePerformanceInsights=True|False, PerformanceInsightsKMSKeyId='string', CloudwatchLogsExportConfiguration={ 'EnableLogTypes': [ 'string', ], 'DisableLogTypes': [ 'string', ] } ) **Response Syntax** :: { 'DBInstance': { 'DBInstanceIdentifier': 'string', 'DBInstanceClass': 'string', 'Engine': 'string', 'DBInstanceStatus': 'string', 'MasterUsername': 'string', 'DBName': 'string', 'Endpoint': { 'Address': 'string', 'Port': 123, 'HostedZoneId': 'string' }, 'AllocatedStorage': 123, 'InstanceCreateTime': datetime(2015, 1, 1), 'PreferredBackupWindow': 'string', 'BackupRetentionPeriod': 123, 'DBSecurityGroups': [ { 'DBSecurityGroupName': 'string', 'Status': 'string' }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'DBParameterGroups': [ { 'DBParameterGroupName': 'string', 'ParameterApplyStatus': 'string' }, ], 'AvailabilityZone': 'string', 'DBSubnetGroup': { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' }, 'PreferredMaintenanceWindow': 'string', 'PendingModifiedValues': { 'DBInstanceClass': 'string', 'AllocatedStorage': 123, 'MasterUserPassword': 'string', 'Port': 123, 'BackupRetentionPeriod': 123, 'MultiAZ': True|False, 'EngineVersion': 'string', 'LicenseModel': 'string', 'Iops': 123, 'DBInstanceIdentifier': 'string', 'StorageType': 'string', 'CACertificateIdentifier': 'string', 'DBSubnetGroupName': 'string', 'PendingCloudwatchLogsExports': { 'LogTypesToEnable': [ 'string', ], 'LogTypesToDisable': [ 'string', ] } }, 'LatestRestorableTime': datetime(2015, 1, 1), 'MultiAZ': True|False, 'EngineVersion': 'string', 'AutoMinorVersionUpgrade': True|False, 'ReadReplicaSourceDBInstanceIdentifier': 'string', 'ReadReplicaDBInstanceIdentifiers': [ 'string', ], 'ReadReplicaDBClusterIdentifiers': [ 'string', ], 'LicenseModel': 'string', 'Iops': 123, 'OptionGroupMemberships': [ { 'OptionGroupName': 'string', 'Status': 'string' }, ], 'CharacterSetName': 'string', 'SecondaryAvailabilityZone': 'string', 'PubliclyAccessible': True|False, 'StatusInfos': [ { 'StatusType': 'string', 'Normal': True|False, 'Status': 'string', 'Message': 'string' }, ], 'StorageType': 'string', 'TdeCredentialArn': 'string', 'DbInstancePort': 123, 'DBClusterIdentifier': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbiResourceId': 'string', 'CACertificateIdentifier': 'string', 'DomainMemberships': [ { 'Domain': 'string', 'Status': 'string', 'FQDN': 'string', 'IAMRoleName': 'string' }, ], 'CopyTagsToSnapshot': True|False, 'MonitoringInterval': 123, 'EnhancedMonitoringResourceArn': 'string', 'MonitoringRoleArn': 'string', 'PromotionTier': 123, 'DBInstanceArn': 'string', 'Timezone': 'string', 'IAMDatabaseAuthenticationEnabled': True|False, 'PerformanceInsightsEnabled': True|False, 'PerformanceInsightsKMSKeyId': 'string', 'EnabledCloudwatchLogsExports': [ 'string', ] } } **Response Structure** - *(dict) --* - **DBInstance** *(dict) --* Contains the details of an Amazon Neptune DB instance. This data type is used as a response element in the DescribeDBInstances action. - **DBInstanceIdentifier** *(string) --* Contains a user-supplied database identifier. This identifier is the unique key that identifies a DB instance. - **DBInstanceClass** *(string) --* Contains the name of the compute and memory capacity class of the DB instance. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB instance. - **DBInstanceStatus** *(string) --* Specifies the current state of this database. - **MasterUsername** *(string) --* Contains the master username for the DB instance. - **DBName** *(string) --* The database name. - **Endpoint** *(dict) --* Specifies the connection endpoint. - **Address** *(string) --* Specifies the DNS address of the DB instance. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size specified in gibibytes. - **InstanceCreateTime** *(datetime) --* Provides the date and time the DB instance was created. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **DBSecurityGroups** *(list) --* Provides List of DB security group elements containing only ``DBSecurityGroup.Name`` and ``DBSecurityGroup.Status`` subelements. - *(dict) --* Specifies membership in a designated DB security group. - **DBSecurityGroupName** *(string) --* The name of the DB security group. - **Status** *(string) --* The status of the DB security group. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security group elements that the DB instance belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **DBParameterGroups** *(list) --* Provides the list of DB parameter groups applied to this DB instance. - *(dict) --* The status of the DB parameter group. This data type is used as a response element in the following actions: * CreateDBInstance * DeleteDBInstance * ModifyDBInstance * RebootDBInstance - **DBParameterGroupName** *(string) --* The name of the DP parameter group. - **ParameterApplyStatus** *(string) --* The status of parameter updates. - **AvailabilityZone** *(string) --* Specifies the name of the Availability Zone the DB instance is located in. - **DBSubnetGroup** *(dict) --* Specifies information on the subnet group associated with the DB instance, including the name, description, and subnets in the subnet group. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **PendingModifiedValues** *(dict) --* Specifies that changes to the DB instance are pending. This element is only included when changes are pending. Specific changes are identified by subelements. - **DBInstanceClass** *(string) --* Contains the new ``DBInstanceClass`` for the DB instance that will be applied or is currently being applied. - **AllocatedStorage** *(integer) --* Contains the new ``AllocatedStorage`` size for the DB instance that will be applied or is currently being applied. - **MasterUserPassword** *(string) --* Contains the pending or currently-in-progress change of the master credentials for the DB instance. - **Port** *(integer) --* Specifies the pending port for the DB instance. - **BackupRetentionPeriod** *(integer) --* Specifies the pending number of days for which automated backups are retained. - **MultiAZ** *(boolean) --* Indicates that the Single-AZ DB instance is to change to a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **LicenseModel** *(string) --* The license model for the DB instance. Valid values: ``license-included`` | ``bring-your-own-license`` | ``general-public-license`` - **Iops** *(integer) --* Specifies the new Provisioned IOPS value for the DB instance that will be applied or is currently being applied. - **DBInstanceIdentifier** *(string) --* Contains the new ``DBInstanceIdentifier`` for the DB instance that will be applied or is currently being applied. - **StorageType** *(string) --* Specifies the storage type to be associated with the DB instance. - **CACertificateIdentifier** *(string) --* Specifies the identifier of the CA certificate for the DB instance. - **DBSubnetGroupName** *(string) --* The new DB subnet group for the DB instance. - **PendingCloudwatchLogsExports** *(dict) --* Specifies the CloudWatch logs to be exported. - **LogTypesToEnable** *(list) --* Log types that are in the process of being deactivated. After they are deactivated, these log types aren't exported to CloudWatch Logs. - *(string) --* - **LogTypesToDisable** *(list) --* Log types that are in the process of being enabled. After they are enabled, these log types are exported to CloudWatch Logs. - *(string) --* - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **MultiAZ** *(boolean) --* Specifies if the DB instance is a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **AutoMinorVersionUpgrade** *(boolean) --* Indicates that minor version patches are applied automatically. - **ReadReplicaSourceDBInstanceIdentifier** *(string) --* Contains the identifier of the source DB instance if this DB instance is a Read Replica. - **ReadReplicaDBInstanceIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB instance. - *(string) --* - **ReadReplicaDBClusterIdentifiers** *(list) --* Contains one or more identifiers of DB clusters that are Read Replicas of this DB instance. - *(string) --* - **LicenseModel** *(string) --* License model information for this DB instance. - **Iops** *(integer) --* Specifies the Provisioned IOPS (I/O operations per second) value. - **OptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB instance. - *(dict) --* Provides information on the option groups the DB instance is a member of. - **OptionGroupName** *(string) --* The name of the option group that the instance belongs to. - **Status** *(string) --* The status of the DB instance's option group membership. Valid values are: ``in-sync`` , ``pending-apply`` , ``pending-removal`` , ``pending-maintenance-apply`` , ``pending-maintenance-removal`` , ``applying`` , ``removing`` , and ``failed`` . - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this instance is associated with. - **SecondaryAvailabilityZone** *(string) --* If present, specifies the name of the secondary Availability Zone for a DB instance with multi-AZ support. - **PubliclyAccessible** *(boolean) --* This flag should no longer be used. - **StatusInfos** *(list) --* The status of a Read Replica. If the instance is not a Read Replica, this is blank. - *(dict) --* Provides a list of status information for a DB instance. - **StatusType** *(string) --* This value is currently "read replication." - **Normal** *(boolean) --* Boolean value that is true if the instance is operating normally, or false if the instance is in an error state. - **Status** *(string) --* Status of the DB instance. For a StatusType of read replica, the values can be replicating, error, stopped, or terminated. - **Message** *(string) --* Details of the error if there is an error for the instance. If the instance is not in an error state, this value is blank. - **StorageType** *(string) --* Specifies the storage type associated with DB instance. - **TdeCredentialArn** *(string) --* The ARN from the key store with which the instance is associated for TDE encryption. - **DbInstancePort** *(integer) --* Specifies the port that the DB instance listens on. If the DB instance is part of a DB cluster, this can be a different port than the DB cluster port. - **DBClusterIdentifier** *(string) --* If the DB instance is a member of a DB cluster, contains the name of the DB cluster that the DB instance is a member of. - **StorageEncrypted** *(boolean) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **KmsKeyId** *(string) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **DbiResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB instance. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB instance is accessed. - **CACertificateIdentifier** *(string) --* The identifier of the CA certificate for this DB instance. - **DomainMemberships** *(list) --* Not supported - *(dict) --* An Active Directory Domain membership record associated with a DB instance. - **Domain** *(string) --* The identifier of the Active Directory Domain. - **Status** *(string) --* The status of the DB instance's Active Directory Domain membership, such as joined, pending-join, failed etc). - **FQDN** *(string) --* The fully qualified domain name of the Active Directory Domain. - **IAMRoleName** *(string) --* The name of the IAM role to be used when making API calls to the Directory Service. - **CopyTagsToSnapshot** *(boolean) --* Specifies whether tags are copied from the DB instance to snapshots of the DB instance. - **MonitoringInterval** *(integer) --* The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. - **EnhancedMonitoringResourceArn** *(string) --* The Amazon Resource Name (ARN) of the Amazon CloudWatch Logs log stream that receives the Enhanced Monitoring metrics data for the DB instance. - **MonitoringRoleArn** *(string) --* The ARN for the IAM role that permits Neptune to send Enhanced Monitoring metrics to Amazon CloudWatch Logs. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **DBInstanceArn** *(string) --* The Amazon Resource Name (ARN) for the DB instance. - **Timezone** *(string) --* Not supported. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if AWS Identity and Access Management (IAM) authentication is enabled, and otherwise false. - **PerformanceInsightsEnabled** *(boolean) --* True if Performance Insights is enabled for the DB instance, and otherwise false. - **PerformanceInsightsKMSKeyId** *(string) --* The AWS KMS key identifier for encryption of Performance Insights data. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key. - **EnabledCloudwatchLogsExports** *(list) --* A list of log types that this DB instance is configured to export to CloudWatch Logs. - *(string) --* :type DBInstanceIdentifier: string :param DBInstanceIdentifier: **[REQUIRED]** The DB instance identifier. This value is stored as a lowercase string. Constraints: * Must match the identifier of an existing DBInstance. :type AllocatedStorage: integer :param AllocatedStorage: The new amount of storage (in gibibytes) to allocate for the DB instance. Not applicable. Storage is managed by the DB Cluster. :type DBInstanceClass: string :param DBInstanceClass: The new compute and memory capacity of the DB instance, for example, ``db.m4.large`` . Not all DB instance classes are available in all AWS Regions. If you modify the DB instance class, an outage occurs during the change. The change is applied during the next maintenance window, unless ``ApplyImmediately`` is specified as ``true`` for this request. Default: Uses existing setting :type DBSubnetGroupName: string :param DBSubnetGroupName: The new DB subnet group for the DB instance. You can use this parameter to move your DB instance to a different VPC. Changing the subnet group causes an outage during the change. The change is applied during the next maintenance window, unless you specify ``true`` for the ``ApplyImmediately`` parameter. Constraints: If supplied, must match the name of an existing DBSubnetGroup. Example: ``mySubnetGroup`` :type DBSecurityGroups: list :param DBSecurityGroups: A list of DB security groups to authorize on this DB instance. Changing this setting doesn\'t result in an outage and the change is asynchronously applied as soon as possible. Constraints: * If supplied, must match existing DBSecurityGroups. - *(string) --* :type VpcSecurityGroupIds: list :param VpcSecurityGroupIds: A list of EC2 VPC security groups to authorize on this DB instance. This change is asynchronously applied as soon as possible. Not applicable. The associated list of EC2 VPC security groups is managed by the DB cluster. For more information, see ModifyDBCluster . Constraints: * If supplied, must match existing VpcSecurityGroupIds. - *(string) --* :type ApplyImmediately: boolean :param ApplyImmediately: Specifies whether the modifications in this request and any pending modifications are asynchronously applied as soon as possible, regardless of the ``PreferredMaintenanceWindow`` setting for the DB instance. If this parameter is set to ``false`` , changes to the DB instance are applied during the next maintenance window. Some parameter changes can cause an outage and are applied on the next call to RebootDBInstance , or the next failure reboot. Default: ``false`` :type MasterUserPassword: string :param MasterUserPassword: Not applicable. :type DBParameterGroupName: string :param DBParameterGroupName: The name of the DB parameter group to apply to the DB instance. Changing this setting doesn\'t result in an outage. The parameter group name itself is changed immediately, but the actual parameter changes are not applied until you reboot the instance without failover. The db instance will NOT be rebooted automatically and the parameter changes will NOT be applied during the next maintenance window. Default: Uses existing setting Constraints: The DB parameter group must be in the same DB parameter group family as this DB instance. :type BackupRetentionPeriod: integer :param BackupRetentionPeriod: Not applicable. The retention period for automated backups is managed by the DB cluster. For more information, see ModifyDBCluster . Default: Uses existing setting :type PreferredBackupWindow: string :param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled. Not applicable. The daily time range for creating automated backups is managed by the DB cluster. For more information, see ModifyDBCluster . Constraints: * Must be in the format hh24:mi-hh24:mi * Must be in Universal Time Coordinated (UTC) * Must not conflict with the preferred maintenance window * Must be at least 30 minutes :type PreferredMaintenanceWindow: string :param PreferredMaintenanceWindow: The weekly time range (in UTC) during which system maintenance can occur, which might result in an outage. Changing this parameter doesn\'t result in an outage, except in the following situation, and the change is asynchronously applied as soon as possible. If there are pending actions that cause a reboot, and the maintenance window is changed to include the current time, then changing this parameter will cause a reboot of the DB instance. If moving this window to the current time, there must be at least 30 minutes between the current time and end of the window to ensure pending changes are applied. Default: Uses existing setting Format: ddd:hh24:mi-ddd:hh24:mi Valid Days: Mon | Tue | Wed | Thu | Fri | Sat | Sun Constraints: Must be at least 30 minutes :type MultiAZ: boolean :param MultiAZ: Specifies if the DB instance is a Multi-AZ deployment. Changing this parameter doesn\'t result in an outage and the change is applied during the next maintenance window unless the ``ApplyImmediately`` parameter is set to ``true`` for this request. :type EngineVersion: string :param EngineVersion: The version number of the database engine to upgrade to. Changing this parameter results in an outage and the change is applied during the next maintenance window unless the ``ApplyImmediately`` parameter is set to ``true`` for this request. For major version upgrades, if a nondefault DB parameter group is currently in use, a new DB parameter group in the DB parameter group family for the new engine version must be specified. The new DB parameter group can be the default for that DB parameter group family. :type AllowMajorVersionUpgrade: boolean :param AllowMajorVersionUpgrade: Indicates that major version upgrades are allowed. Changing this parameter doesn\'t result in an outage and the change is asynchronously applied as soon as possible. Constraints: This parameter must be set to true when specifying a value for the EngineVersion parameter that is a different major version than the DB instance\'s current version. :type AutoMinorVersionUpgrade: boolean :param AutoMinorVersionUpgrade: Indicates that minor version upgrades are applied automatically to the DB instance during the maintenance window. Changing this parameter doesn\'t result in an outage except in the following case and the change is asynchronously applied as soon as possible. An outage will result if this parameter is set to ``true`` during the maintenance window, and a newer minor version is available, and Neptune has enabled auto patching for that engine version. :type LicenseModel: string :param LicenseModel: Not supported. :type Iops: integer :param Iops: The new Provisioned IOPS (I/O operations per second) value for the instance. Changing this setting doesn\'t result in an outage and the change is applied during the next maintenance window unless the ``ApplyImmediately`` parameter is set to ``true`` for this request. Default: Uses existing setting :type OptionGroupName: string :param OptionGroupName: Indicates that the DB instance should be associated with the specified option group. Changing this parameter doesn\'t result in an outage except in the following case and the change is applied during the next maintenance window unless the ``ApplyImmediately`` parameter is set to ``true`` for this request. If the parameter change results in an option group that enables OEM, this change can cause a brief (sub-second) period during which new connections are rejected but existing connections are not interrupted. Permanent options, such as the TDE option for Oracle Advanced Security TDE, can\'t be removed from an option group, and that option group can\'t be removed from a DB instance once it is associated with a DB instance :type NewDBInstanceIdentifier: string :param NewDBInstanceIdentifier: The new DB instance identifier for the DB instance when renaming a DB instance. When you change the DB instance identifier, an instance reboot will occur immediately if you set ``Apply Immediately`` to true, or will occur during the next maintenance window if ``Apply Immediately`` to false. This value is stored as a lowercase string. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens. * The first character must be a letter. * Cannot end with a hyphen or contain two consecutive hyphens. Example: ``mydbinstance`` :type StorageType: string :param StorageType: Not supported. :type TdeCredentialArn: string :param TdeCredentialArn: The ARN from the key store with which to associate the instance for TDE encryption. :type TdeCredentialPassword: string :param TdeCredentialPassword: The password for the given ARN from the key store in order to access the device. :type CACertificateIdentifier: string :param CACertificateIdentifier: Indicates the certificate that needs to be associated with the instance. :type Domain: string :param Domain: Not supported. :type CopyTagsToSnapshot: boolean :param CopyTagsToSnapshot: True to copy all tags from the DB instance to snapshots of the DB instance, and otherwise false. The default is false. :type MonitoringInterval: integer :param MonitoringInterval: The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. To disable collecting Enhanced Monitoring metrics, specify 0. The default is 0. If ``MonitoringRoleArn`` is specified, then you must also set ``MonitoringInterval`` to a value other than 0. Valid Values: ``0, 1, 5, 10, 15, 30, 60`` :type DBPortNumber: integer :param DBPortNumber: The port number on which the database accepts connections. The value of the ``DBPortNumber`` parameter must not match any of the port values specified for options in the option group for the DB instance. Your database will restart when you change the ``DBPortNumber`` value regardless of the value of the ``ApplyImmediately`` parameter. Default: ``8182`` :type PubliclyAccessible: boolean :param PubliclyAccessible: This flag should no longer be used. :type MonitoringRoleArn: string :param MonitoringRoleArn: The ARN for the IAM role that permits Neptune to send enhanced monitoring metrics to Amazon CloudWatch Logs. For example, ``arn:aws:iam:123456789012:role/emaccess`` . If ``MonitoringInterval`` is set to a value other than 0, then you must supply a ``MonitoringRoleArn`` value. :type DomainIAMRoleName: string :param DomainIAMRoleName: Not supported :type PromotionTier: integer :param PromotionTier: A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. Default: 1 Valid Values: 0 - 15 :type EnableIAMDatabaseAuthentication: boolean :param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false. You can enable IAM database authentication for the following database engines Not applicable. Mapping AWS IAM accounts to database accounts is managed by the DB cluster. For more information, see ModifyDBCluster . Default: ``false`` :type EnablePerformanceInsights: boolean :param EnablePerformanceInsights: Not supported. :type PerformanceInsightsKMSKeyId: string :param PerformanceInsightsKMSKeyId: Not supported. :type CloudwatchLogsExportConfiguration: dict :param CloudwatchLogsExportConfiguration: The configuration setting for the log types to be enabled for export to CloudWatch Logs for a specific DB instance or DB cluster. - **EnableLogTypes** *(list) --* The list of log types to enable. - *(string) --* - **DisableLogTypes** *(list) --* The list of log types to disable. - *(string) --* :rtype: dict :returns: """ pass def modify_db_parameter_group(self, DBParameterGroupName: str, Parameters: List) -> Dict: """ Modifies the parameters of a DB parameter group. To modify more than one parameter, submit a list of the following: ``ParameterName`` , ``ParameterValue`` , and ``ApplyMethod`` . A maximum of 20 parameters can be modified in a single request. .. note:: Changes to dynamic parameters are applied immediately. Changes to static parameters require a reboot without failover to the DB instance associated with the parameter group before the change can take effect. .. warning:: After you modify a DB parameter group, you should wait at least 5 minutes before creating your first DB instance that uses that DB parameter group as the default parameter group. This allows Amazon Neptune to fully complete the modify action before the parameter group is used as the default for a new DB instance. This is especially important for parameters that are critical when creating the default database for a DB instance, such as the character set for the default database defined by the ``character_set_database`` parameter. You can use the *Parameter Groups* option of the Amazon Neptune console or the *DescribeDBParameters* command to verify that your DB parameter group has been created or modified. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ModifyDBParameterGroup>`_ **Request Syntax** :: response = client.modify_db_parameter_group( DBParameterGroupName='string', Parameters=[ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ] ) **Response Syntax** :: { 'DBParameterGroupName': 'string' } **Response Structure** - *(dict) --* - **DBParameterGroupName** *(string) --* Provides the name of the DB parameter group. :type DBParameterGroupName: string :param DBParameterGroupName: **[REQUIRED]** The name of the DB parameter group. Constraints: * If supplied, must match the name of an existing DBParameterGroup. :type Parameters: list :param Parameters: **[REQUIRED]** An array of parameter names, values, and the apply method for the parameter update. At least one parameter name, value, and apply method must be supplied; subsequent arguments are optional. A maximum of 20 parameters can be modified in a single request. Valid Values (for the application method): ``immediate | pending-reboot`` .. note:: You can use the immediate value with dynamic parameters only. You can use the pending-reboot value for both dynamic and static parameters, and changes are applied when you reboot the DB instance without failover. - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. :rtype: dict :returns: """ pass def modify_db_subnet_group(self, DBSubnetGroupName: str, SubnetIds: List, DBSubnetGroupDescription: str = None) -> Dict: """ Modifies an existing DB subnet group. DB subnet groups must contain at least one subnet in at least two AZs in the AWS Region. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ModifyDBSubnetGroup>`_ **Request Syntax** :: response = client.modify_db_subnet_group( DBSubnetGroupName='string', DBSubnetGroupDescription='string', SubnetIds=[ 'string', ] ) **Response Syntax** :: { 'DBSubnetGroup': { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' } } **Response Structure** - *(dict) --* - **DBSubnetGroup** *(dict) --* Contains the details of an Amazon Neptune DB subnet group. This data type is used as a response element in the DescribeDBSubnetGroups action. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. :type DBSubnetGroupName: string :param DBSubnetGroupName: **[REQUIRED]** The name for the DB subnet group. This value is stored as a lowercase string. You can\'t modify the default subnet group. Constraints: Must match the name of an existing DBSubnetGroup. Must not be default. Example: ``mySubnetgroup`` :type DBSubnetGroupDescription: string :param DBSubnetGroupDescription: The description for the DB subnet group. :type SubnetIds: list :param SubnetIds: **[REQUIRED]** The EC2 subnet IDs for the DB subnet group. - *(string) --* :rtype: dict :returns: """ pass def modify_event_subscription(self, SubscriptionName: str, SnsTopicArn: str = None, SourceType: str = None, EventCategories: List = None, Enabled: bool = None) -> Dict: """ Modifies an existing event notification subscription. Note that you can't modify the source identifiers using this call; to change source identifiers for a subscription, use the AddSourceIdentifierToSubscription and RemoveSourceIdentifierFromSubscription calls. You can see a list of the event categories for a given SourceType by using the **DescribeEventCategories** action. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ModifyEventSubscription>`_ **Request Syntax** :: response = client.modify_event_subscription( SubscriptionName='string', SnsTopicArn='string', SourceType='string', EventCategories=[ 'string', ], Enabled=True|False ) **Response Syntax** :: { 'EventSubscription': { 'CustomerAwsId': 'string', 'CustSubscriptionId': 'string', 'SnsTopicArn': 'string', 'Status': 'string', 'SubscriptionCreationTime': 'string', 'SourceType': 'string', 'SourceIdsList': [ 'string', ], 'EventCategoriesList': [ 'string', ], 'Enabled': True|False, 'EventSubscriptionArn': 'string' } } **Response Structure** - *(dict) --* - **EventSubscription** *(dict) --* Contains the results of a successful invocation of the DescribeEventSubscriptions action. - **CustomerAwsId** *(string) --* The AWS customer account associated with the event notification subscription. - **CustSubscriptionId** *(string) --* The event notification subscription Id. - **SnsTopicArn** *(string) --* The topic ARN of the event notification subscription. - **Status** *(string) --* The status of the event notification subscription. Constraints: Can be one of the following: creating | modifying | deleting | active | no-permission | topic-not-exist The status "no-permission" indicates that Neptune no longer has permission to post to the SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created. - **SubscriptionCreationTime** *(string) --* The time the event notification subscription was created. - **SourceType** *(string) --* The source type for the event notification subscription. - **SourceIdsList** *(list) --* A list of source IDs for the event notification subscription. - *(string) --* - **EventCategoriesList** *(list) --* A list of event categories for the event notification subscription. - *(string) --* - **Enabled** *(boolean) --* A Boolean value indicating if the subscription is enabled. True indicates the subscription is enabled. - **EventSubscriptionArn** *(string) --* The Amazon Resource Name (ARN) for the event subscription. :type SubscriptionName: string :param SubscriptionName: **[REQUIRED]** The name of the event notification subscription. :type SnsTopicArn: string :param SnsTopicArn: The Amazon Resource Name (ARN) of the SNS topic created for event notification. The ARN is created by Amazon SNS when you create a topic and subscribe to it. :type SourceType: string :param SourceType: The type of source that is generating the events. For example, if you want to be notified of events generated by a DB instance, you would set this parameter to db-instance. if this value is not specified, all events are returned. Valid values: db-instance | db-parameter-group | db-security-group | db-snapshot :type EventCategories: list :param EventCategories: A list of event categories for a SourceType that you want to subscribe to. You can see a list of the categories for a given SourceType by using the **DescribeEventCategories** action. - *(string) --* :type Enabled: boolean :param Enabled: A Boolean value; set to **true** to activate the subscription. :rtype: dict :returns: """ pass def promote_read_replica_db_cluster(self, DBClusterIdentifier: str) -> Dict: """ Not supported. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/PromoteReadReplicaDBCluster>`_ **Request Syntax** :: response = client.promote_read_replica_db_cluster( DBClusterIdentifier='string' ) **Response Syntax** :: { 'DBCluster': { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) } } **Response Structure** - *(dict) --* - **DBCluster** *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** Not supported. :rtype: dict :returns: """ pass def reboot_db_instance(self, DBInstanceIdentifier: str, ForceFailover: bool = None) -> Dict: """ You might need to reboot your DB instance, usually for maintenance reasons. For example, if you make certain modifications, or if you change the DB parameter group associated with the DB instance, you must reboot the instance for the changes to take effect. Rebooting a DB instance restarts the database engine service. Rebooting a DB instance results in a momentary outage, during which the DB instance status is set to rebooting. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/RebootDBInstance>`_ **Request Syntax** :: response = client.reboot_db_instance( DBInstanceIdentifier='string', ForceFailover=True|False ) **Response Syntax** :: { 'DBInstance': { 'DBInstanceIdentifier': 'string', 'DBInstanceClass': 'string', 'Engine': 'string', 'DBInstanceStatus': 'string', 'MasterUsername': 'string', 'DBName': 'string', 'Endpoint': { 'Address': 'string', 'Port': 123, 'HostedZoneId': 'string' }, 'AllocatedStorage': 123, 'InstanceCreateTime': datetime(2015, 1, 1), 'PreferredBackupWindow': 'string', 'BackupRetentionPeriod': 123, 'DBSecurityGroups': [ { 'DBSecurityGroupName': 'string', 'Status': 'string' }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'DBParameterGroups': [ { 'DBParameterGroupName': 'string', 'ParameterApplyStatus': 'string' }, ], 'AvailabilityZone': 'string', 'DBSubnetGroup': { 'DBSubnetGroupName': 'string', 'DBSubnetGroupDescription': 'string', 'VpcId': 'string', 'SubnetGroupStatus': 'string', 'Subnets': [ { 'SubnetIdentifier': 'string', 'SubnetAvailabilityZone': { 'Name': 'string' }, 'SubnetStatus': 'string' }, ], 'DBSubnetGroupArn': 'string' }, 'PreferredMaintenanceWindow': 'string', 'PendingModifiedValues': { 'DBInstanceClass': 'string', 'AllocatedStorage': 123, 'MasterUserPassword': 'string', 'Port': 123, 'BackupRetentionPeriod': 123, 'MultiAZ': True|False, 'EngineVersion': 'string', 'LicenseModel': 'string', 'Iops': 123, 'DBInstanceIdentifier': 'string', 'StorageType': 'string', 'CACertificateIdentifier': 'string', 'DBSubnetGroupName': 'string', 'PendingCloudwatchLogsExports': { 'LogTypesToEnable': [ 'string', ], 'LogTypesToDisable': [ 'string', ] } }, 'LatestRestorableTime': datetime(2015, 1, 1), 'MultiAZ': True|False, 'EngineVersion': 'string', 'AutoMinorVersionUpgrade': True|False, 'ReadReplicaSourceDBInstanceIdentifier': 'string', 'ReadReplicaDBInstanceIdentifiers': [ 'string', ], 'ReadReplicaDBClusterIdentifiers': [ 'string', ], 'LicenseModel': 'string', 'Iops': 123, 'OptionGroupMemberships': [ { 'OptionGroupName': 'string', 'Status': 'string' }, ], 'CharacterSetName': 'string', 'SecondaryAvailabilityZone': 'string', 'PubliclyAccessible': True|False, 'StatusInfos': [ { 'StatusType': 'string', 'Normal': True|False, 'Status': 'string', 'Message': 'string' }, ], 'StorageType': 'string', 'TdeCredentialArn': 'string', 'DbInstancePort': 123, 'DBClusterIdentifier': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbiResourceId': 'string', 'CACertificateIdentifier': 'string', 'DomainMemberships': [ { 'Domain': 'string', 'Status': 'string', 'FQDN': 'string', 'IAMRoleName': 'string' }, ], 'CopyTagsToSnapshot': True|False, 'MonitoringInterval': 123, 'EnhancedMonitoringResourceArn': 'string', 'MonitoringRoleArn': 'string', 'PromotionTier': 123, 'DBInstanceArn': 'string', 'Timezone': 'string', 'IAMDatabaseAuthenticationEnabled': True|False, 'PerformanceInsightsEnabled': True|False, 'PerformanceInsightsKMSKeyId': 'string', 'EnabledCloudwatchLogsExports': [ 'string', ] } } **Response Structure** - *(dict) --* - **DBInstance** *(dict) --* Contains the details of an Amazon Neptune DB instance. This data type is used as a response element in the DescribeDBInstances action. - **DBInstanceIdentifier** *(string) --* Contains a user-supplied database identifier. This identifier is the unique key that identifies a DB instance. - **DBInstanceClass** *(string) --* Contains the name of the compute and memory capacity class of the DB instance. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB instance. - **DBInstanceStatus** *(string) --* Specifies the current state of this database. - **MasterUsername** *(string) --* Contains the master username for the DB instance. - **DBName** *(string) --* The database name. - **Endpoint** *(dict) --* Specifies the connection endpoint. - **Address** *(string) --* Specifies the DNS address of the DB instance. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **AllocatedStorage** *(integer) --* Specifies the allocated storage size specified in gibibytes. - **InstanceCreateTime** *(datetime) --* Provides the date and time the DB instance was created. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **DBSecurityGroups** *(list) --* Provides List of DB security group elements containing only ``DBSecurityGroup.Name`` and ``DBSecurityGroup.Status`` subelements. - *(dict) --* Specifies membership in a designated DB security group. - **DBSecurityGroupName** *(string) --* The name of the DB security group. - **Status** *(string) --* The status of the DB security group. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security group elements that the DB instance belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **DBParameterGroups** *(list) --* Provides the list of DB parameter groups applied to this DB instance. - *(dict) --* The status of the DB parameter group. This data type is used as a response element in the following actions: * CreateDBInstance * DeleteDBInstance * ModifyDBInstance * RebootDBInstance - **DBParameterGroupName** *(string) --* The name of the DP parameter group. - **ParameterApplyStatus** *(string) --* The status of parameter updates. - **AvailabilityZone** *(string) --* Specifies the name of the Availability Zone the DB instance is located in. - **DBSubnetGroup** *(dict) --* Specifies information on the subnet group associated with the DB instance, including the name, description, and subnets in the subnet group. - **DBSubnetGroupName** *(string) --* The name of the DB subnet group. - **DBSubnetGroupDescription** *(string) --* Provides the description of the DB subnet group. - **VpcId** *(string) --* Provides the VpcId of the DB subnet group. - **SubnetGroupStatus** *(string) --* Provides the status of the DB subnet group. - **Subnets** *(list) --* Contains a list of Subnet elements. - *(dict) --* Specifies a subnet. This data type is used as a response element in the DescribeDBSubnetGroups action. - **SubnetIdentifier** *(string) --* Specifies the identifier of the subnet. - **SubnetAvailabilityZone** *(dict) --* Specifies the EC2 Availability Zone that the subnet is in. - **Name** *(string) --* The name of the availability zone. - **SubnetStatus** *(string) --* Specifies the status of the subnet. - **DBSubnetGroupArn** *(string) --* The Amazon Resource Name (ARN) for the DB subnet group. - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **PendingModifiedValues** *(dict) --* Specifies that changes to the DB instance are pending. This element is only included when changes are pending. Specific changes are identified by subelements. - **DBInstanceClass** *(string) --* Contains the new ``DBInstanceClass`` for the DB instance that will be applied or is currently being applied. - **AllocatedStorage** *(integer) --* Contains the new ``AllocatedStorage`` size for the DB instance that will be applied or is currently being applied. - **MasterUserPassword** *(string) --* Contains the pending or currently-in-progress change of the master credentials for the DB instance. - **Port** *(integer) --* Specifies the pending port for the DB instance. - **BackupRetentionPeriod** *(integer) --* Specifies the pending number of days for which automated backups are retained. - **MultiAZ** *(boolean) --* Indicates that the Single-AZ DB instance is to change to a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **LicenseModel** *(string) --* The license model for the DB instance. Valid values: ``license-included`` | ``bring-your-own-license`` | ``general-public-license`` - **Iops** *(integer) --* Specifies the new Provisioned IOPS value for the DB instance that will be applied or is currently being applied. - **DBInstanceIdentifier** *(string) --* Contains the new ``DBInstanceIdentifier`` for the DB instance that will be applied or is currently being applied. - **StorageType** *(string) --* Specifies the storage type to be associated with the DB instance. - **CACertificateIdentifier** *(string) --* Specifies the identifier of the CA certificate for the DB instance. - **DBSubnetGroupName** *(string) --* The new DB subnet group for the DB instance. - **PendingCloudwatchLogsExports** *(dict) --* Specifies the CloudWatch logs to be exported. - **LogTypesToEnable** *(list) --* Log types that are in the process of being deactivated. After they are deactivated, these log types aren't exported to CloudWatch Logs. - *(string) --* - **LogTypesToDisable** *(list) --* Log types that are in the process of being enabled. After they are enabled, these log types are exported to CloudWatch Logs. - *(string) --* - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **MultiAZ** *(boolean) --* Specifies if the DB instance is a Multi-AZ deployment. - **EngineVersion** *(string) --* Indicates the database engine version. - **AutoMinorVersionUpgrade** *(boolean) --* Indicates that minor version patches are applied automatically. - **ReadReplicaSourceDBInstanceIdentifier** *(string) --* Contains the identifier of the source DB instance if this DB instance is a Read Replica. - **ReadReplicaDBInstanceIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB instance. - *(string) --* - **ReadReplicaDBClusterIdentifiers** *(list) --* Contains one or more identifiers of DB clusters that are Read Replicas of this DB instance. - *(string) --* - **LicenseModel** *(string) --* License model information for this DB instance. - **Iops** *(integer) --* Specifies the Provisioned IOPS (I/O operations per second) value. - **OptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB instance. - *(dict) --* Provides information on the option groups the DB instance is a member of. - **OptionGroupName** *(string) --* The name of the option group that the instance belongs to. - **Status** *(string) --* The status of the DB instance's option group membership. Valid values are: ``in-sync`` , ``pending-apply`` , ``pending-removal`` , ``pending-maintenance-apply`` , ``pending-maintenance-removal`` , ``applying`` , ``removing`` , and ``failed`` . - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this instance is associated with. - **SecondaryAvailabilityZone** *(string) --* If present, specifies the name of the secondary Availability Zone for a DB instance with multi-AZ support. - **PubliclyAccessible** *(boolean) --* This flag should no longer be used. - **StatusInfos** *(list) --* The status of a Read Replica. If the instance is not a Read Replica, this is blank. - *(dict) --* Provides a list of status information for a DB instance. - **StatusType** *(string) --* This value is currently "read replication." - **Normal** *(boolean) --* Boolean value that is true if the instance is operating normally, or false if the instance is in an error state. - **Status** *(string) --* Status of the DB instance. For a StatusType of read replica, the values can be replicating, error, stopped, or terminated. - **Message** *(string) --* Details of the error if there is an error for the instance. If the instance is not in an error state, this value is blank. - **StorageType** *(string) --* Specifies the storage type associated with DB instance. - **TdeCredentialArn** *(string) --* The ARN from the key store with which the instance is associated for TDE encryption. - **DbInstancePort** *(integer) --* Specifies the port that the DB instance listens on. If the DB instance is part of a DB cluster, this can be a different port than the DB cluster port. - **DBClusterIdentifier** *(string) --* If the DB instance is a member of a DB cluster, contains the name of the DB cluster that the DB instance is a member of. - **StorageEncrypted** *(boolean) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **KmsKeyId** *(string) --* Not supported: The encryption for DB instances is managed by the DB cluster. - **DbiResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB instance. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB instance is accessed. - **CACertificateIdentifier** *(string) --* The identifier of the CA certificate for this DB instance. - **DomainMemberships** *(list) --* Not supported - *(dict) --* An Active Directory Domain membership record associated with a DB instance. - **Domain** *(string) --* The identifier of the Active Directory Domain. - **Status** *(string) --* The status of the DB instance's Active Directory Domain membership, such as joined, pending-join, failed etc). - **FQDN** *(string) --* The fully qualified domain name of the Active Directory Domain. - **IAMRoleName** *(string) --* The name of the IAM role to be used when making API calls to the Directory Service. - **CopyTagsToSnapshot** *(boolean) --* Specifies whether tags are copied from the DB instance to snapshots of the DB instance. - **MonitoringInterval** *(integer) --* The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. - **EnhancedMonitoringResourceArn** *(string) --* The Amazon Resource Name (ARN) of the Amazon CloudWatch Logs log stream that receives the Enhanced Monitoring metrics data for the DB instance. - **MonitoringRoleArn** *(string) --* The ARN for the IAM role that permits Neptune to send Enhanced Monitoring metrics to Amazon CloudWatch Logs. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **DBInstanceArn** *(string) --* The Amazon Resource Name (ARN) for the DB instance. - **Timezone** *(string) --* Not supported. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if AWS Identity and Access Management (IAM) authentication is enabled, and otherwise false. - **PerformanceInsightsEnabled** *(boolean) --* True if Performance Insights is enabled for the DB instance, and otherwise false. - **PerformanceInsightsKMSKeyId** *(string) --* The AWS KMS key identifier for encryption of Performance Insights data. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key. - **EnabledCloudwatchLogsExports** *(list) --* A list of log types that this DB instance is configured to export to CloudWatch Logs. - *(string) --* :type DBInstanceIdentifier: string :param DBInstanceIdentifier: **[REQUIRED]** The DB instance identifier. This parameter is stored as a lowercase string. Constraints: * Must match the identifier of an existing DBInstance. :type ForceFailover: boolean :param ForceFailover: When ``true`` , the reboot is conducted through a MultiAZ failover. Constraint: You can\'t specify ``true`` if the instance is not configured for MultiAZ. :rtype: dict :returns: """ pass def remove_role_from_db_cluster(self, DBClusterIdentifier: str, RoleArn: str): """ Disassociates an Identity and Access Management (IAM) role from a DB cluster. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/RemoveRoleFromDBCluster>`_ **Request Syntax** :: response = client.remove_role_from_db_cluster( DBClusterIdentifier='string', RoleArn='string' ) :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The name of the DB cluster to disassociate the IAM role from. :type RoleArn: string :param RoleArn: **[REQUIRED]** The Amazon Resource Name (ARN) of the IAM role to disassociate from the DB cluster, for example ``arn:aws:iam::123456789012:role/NeptuneAccessRole`` . :returns: None """ pass def remove_source_identifier_from_subscription(self, SubscriptionName: str, SourceIdentifier: str) -> Dict: """ Removes a source identifier from an existing event notification subscription. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/RemoveSourceIdentifierFromSubscription>`_ **Request Syntax** :: response = client.remove_source_identifier_from_subscription( SubscriptionName='string', SourceIdentifier='string' ) **Response Syntax** :: { 'EventSubscription': { 'CustomerAwsId': 'string', 'CustSubscriptionId': 'string', 'SnsTopicArn': 'string', 'Status': 'string', 'SubscriptionCreationTime': 'string', 'SourceType': 'string', 'SourceIdsList': [ 'string', ], 'EventCategoriesList': [ 'string', ], 'Enabled': True|False, 'EventSubscriptionArn': 'string' } } **Response Structure** - *(dict) --* - **EventSubscription** *(dict) --* Contains the results of a successful invocation of the DescribeEventSubscriptions action. - **CustomerAwsId** *(string) --* The AWS customer account associated with the event notification subscription. - **CustSubscriptionId** *(string) --* The event notification subscription Id. - **SnsTopicArn** *(string) --* The topic ARN of the event notification subscription. - **Status** *(string) --* The status of the event notification subscription. Constraints: Can be one of the following: creating | modifying | deleting | active | no-permission | topic-not-exist The status "no-permission" indicates that Neptune no longer has permission to post to the SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created. - **SubscriptionCreationTime** *(string) --* The time the event notification subscription was created. - **SourceType** *(string) --* The source type for the event notification subscription. - **SourceIdsList** *(list) --* A list of source IDs for the event notification subscription. - *(string) --* - **EventCategoriesList** *(list) --* A list of event categories for the event notification subscription. - *(string) --* - **Enabled** *(boolean) --* A Boolean value indicating if the subscription is enabled. True indicates the subscription is enabled. - **EventSubscriptionArn** *(string) --* The Amazon Resource Name (ARN) for the event subscription. :type SubscriptionName: string :param SubscriptionName: **[REQUIRED]** The name of the event notification subscription you want to remove a source identifier from. :type SourceIdentifier: string :param SourceIdentifier: **[REQUIRED]** The source identifier to be removed from the subscription, such as the **DB instance identifier** for a DB instance or the name of a security group. :rtype: dict :returns: """ pass def remove_tags_from_resource(self, ResourceName: str, TagKeys: List): """ Removes metadata tags from an Amazon Neptune resource. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/RemoveTagsFromResource>`_ **Request Syntax** :: response = client.remove_tags_from_resource( ResourceName='string', TagKeys=[ 'string', ] ) :type ResourceName: string :param ResourceName: **[REQUIRED]** The Amazon Neptune resource that the tags are removed from. This value is an Amazon Resource Name (ARN). For information about creating an ARN, see `Constructing an Amazon Resource Name (ARN) <https://docs.aws.amazon.com/neptune/latest/UserGuide/tagging.ARN.html#tagging.ARN.Constructing>`__ . :type TagKeys: list :param TagKeys: **[REQUIRED]** The tag key (name) of the tag to be removed. - *(string) --* :returns: None """ pass def reset_db_cluster_parameter_group(self, DBClusterParameterGroupName: str, ResetAllParameters: bool = None, Parameters: List = None) -> Dict: """ Modifies the parameters of a DB cluster parameter group to the default value. To reset specific parameters submit a list of the following: ``ParameterName`` and ``ApplyMethod`` . To reset the entire DB cluster parameter group, specify the ``DBClusterParameterGroupName`` and ``ResetAllParameters`` parameters. When resetting the entire group, dynamic parameters are updated immediately and static parameters are set to ``pending-reboot`` to take effect on the next DB instance restart or RebootDBInstance request. You must call RebootDBInstance for every DB instance in your DB cluster that you want the updated static parameter to apply to. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ResetDBClusterParameterGroup>`_ **Request Syntax** :: response = client.reset_db_cluster_parameter_group( DBClusterParameterGroupName='string', ResetAllParameters=True|False, Parameters=[ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ] ) **Response Syntax** :: { 'DBClusterParameterGroupName': 'string' } **Response Structure** - *(dict) --* - **DBClusterParameterGroupName** *(string) --* The name of the DB cluster parameter group. Constraints: * Must be 1 to 255 letters or numbers. * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens .. note:: This value is stored as a lowercase string. :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: **[REQUIRED]** The name of the DB cluster parameter group to reset. :type ResetAllParameters: boolean :param ResetAllParameters: A value that is set to ``true`` to reset all parameters in the DB cluster parameter group to their default values, and ``false`` otherwise. You can\'t use this parameter if there is a list of parameter names specified for the ``Parameters`` parameter. :type Parameters: list :param Parameters: A list of parameter names in the DB cluster parameter group to reset to the default values. You can\'t use this parameter if the ``ResetAllParameters`` parameter is set to ``true`` . - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. :rtype: dict :returns: """ pass def reset_db_parameter_group(self, DBParameterGroupName: str, ResetAllParameters: bool = None, Parameters: List = None) -> Dict: """ Modifies the parameters of a DB parameter group to the engine/system default value. To reset specific parameters, provide a list of the following: ``ParameterName`` and ``ApplyMethod`` . To reset the entire DB parameter group, specify the ``DBParameterGroup`` name and ``ResetAllParameters`` parameters. When resetting the entire group, dynamic parameters are updated immediately and static parameters are set to ``pending-reboot`` to take effect on the next DB instance restart or ``RebootDBInstance`` request. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/ResetDBParameterGroup>`_ **Request Syntax** :: response = client.reset_db_parameter_group( DBParameterGroupName='string', ResetAllParameters=True|False, Parameters=[ { 'ParameterName': 'string', 'ParameterValue': 'string', 'Description': 'string', 'Source': 'string', 'ApplyType': 'string', 'DataType': 'string', 'AllowedValues': 'string', 'IsModifiable': True|False, 'MinimumEngineVersion': 'string', 'ApplyMethod': 'immediate'|'pending-reboot' }, ] ) **Response Syntax** :: { 'DBParameterGroupName': 'string' } **Response Structure** - *(dict) --* - **DBParameterGroupName** *(string) --* Provides the name of the DB parameter group. :type DBParameterGroupName: string :param DBParameterGroupName: **[REQUIRED]** The name of the DB parameter group. Constraints: * Must match the name of an existing DBParameterGroup. :type ResetAllParameters: boolean :param ResetAllParameters: Specifies whether (``true`` ) or not (``false`` ) to reset all parameters in the DB parameter group to default values. Default: ``true`` :type Parameters: list :param Parameters: To reset the entire DB parameter group, specify the ``DBParameterGroup`` name and ``ResetAllParameters`` parameters. To reset specific parameters, provide a list of the following: ``ParameterName`` and ``ApplyMethod`` . A maximum of 20 parameters can be modified in a single request. Valid Values (for Apply method): ``pending-reboot`` - *(dict) --* Specifies a parameter. - **ParameterName** *(string) --* Specifies the name of the parameter. - **ParameterValue** *(string) --* Specifies the value of the parameter. - **Description** *(string) --* Provides a description of the parameter. - **Source** *(string) --* Indicates the source of the parameter value. - **ApplyType** *(string) --* Specifies the engine specific parameters type. - **DataType** *(string) --* Specifies the valid data type for the parameter. - **AllowedValues** *(string) --* Specifies the valid range of values for the parameter. - **IsModifiable** *(boolean) --* Indicates whether (``true`` ) or not (``false`` ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed. - **MinimumEngineVersion** *(string) --* The earliest engine version to which the parameter can apply. - **ApplyMethod** *(string) --* Indicates when to apply parameter updates. :rtype: dict :returns: """ pass def restore_db_cluster_from_snapshot(self, DBClusterIdentifier: str, SnapshotIdentifier: str, Engine: str, AvailabilityZones: List = None, EngineVersion: str = None, Port: int = None, DBSubnetGroupName: str = None, DatabaseName: str = None, OptionGroupName: str = None, VpcSecurityGroupIds: List = None, Tags: List = None, KmsKeyId: str = None, EnableIAMDatabaseAuthentication: bool = None, DBClusterParameterGroupName: str = None) -> Dict: """ Creates a new DB cluster from a DB snapshot or DB cluster snapshot. If a DB snapshot is specified, the target DB cluster is created from the source DB snapshot with a default configuration and default security group. If a DB cluster snapshot is specified, the target DB cluster is created from the source DB cluster restore point with the same configuration as the original source DB cluster, except that the new DB cluster is created with the default security group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/RestoreDBClusterFromSnapshot>`_ **Request Syntax** :: response = client.restore_db_cluster_from_snapshot( AvailabilityZones=[ 'string', ], DBClusterIdentifier='string', SnapshotIdentifier='string', Engine='string', EngineVersion='string', Port=123, DBSubnetGroupName='string', DatabaseName='string', OptionGroupName='string', VpcSecurityGroupIds=[ 'string', ], Tags=[ { 'Key': 'string', 'Value': 'string' }, ], KmsKeyId='string', EnableIAMDatabaseAuthentication=True|False, DBClusterParameterGroupName='string' ) **Response Syntax** :: { 'DBCluster': { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) } } **Response Structure** - *(dict) --* - **DBCluster** *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type AvailabilityZones: list :param AvailabilityZones: Provides the list of EC2 Availability Zones that instances in the restored DB cluster can be created in. - *(string) --* :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The name of the DB cluster to create from the DB snapshot or DB cluster snapshot. This parameter isn\'t case-sensitive. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens Example: ``my-snapshot-id`` :type SnapshotIdentifier: string :param SnapshotIdentifier: **[REQUIRED]** The identifier for the DB snapshot or DB cluster snapshot to restore from. You can use either the name or the Amazon Resource Name (ARN) to specify a DB cluster snapshot. However, you can use only the ARN to specify a DB snapshot. Constraints: * Must match the identifier of an existing Snapshot. :type Engine: string :param Engine: **[REQUIRED]** The database engine to use for the new DB cluster. Default: The same as source Constraint: Must be compatible with the engine of the source :type EngineVersion: string :param EngineVersion: The version of the database engine to use for the new DB cluster. :type Port: integer :param Port: The port number on which the new DB cluster accepts connections. Constraints: Value must be ``1150-65535`` Default: The same port as the original DB cluster. :type DBSubnetGroupName: string :param DBSubnetGroupName: The name of the DB subnet group to use for the new DB cluster. Constraints: If supplied, must match the name of an existing DBSubnetGroup. Example: ``mySubnetgroup`` :type DatabaseName: string :param DatabaseName: Not supported. :type OptionGroupName: string :param OptionGroupName: The name of the option group to use for the restored DB cluster. :type VpcSecurityGroupIds: list :param VpcSecurityGroupIds: A list of VPC security groups that the new DB cluster will belong to. - *(string) --* :type Tags: list :param Tags: The tags to be assigned to the restored DB cluster. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :type KmsKeyId: string :param KmsKeyId: The AWS KMS key identifier to use when restoring an encrypted DB cluster from a DB snapshot or DB cluster snapshot. The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are restoring a DB cluster with the same AWS account that owns the KMS encryption key used to encrypt the new DB cluster, then you can use the KMS key alias instead of the ARN for the KMS encryption key. If you do not specify a value for the ``KmsKeyId`` parameter, then the following will occur: * If the DB snapshot or DB cluster snapshot in ``SnapshotIdentifier`` is encrypted, then the restored DB cluster is encrypted using the KMS key that was used to encrypt the DB snapshot or DB cluster snapshot. * If the DB snapshot or DB cluster snapshot in ``SnapshotIdentifier`` is not encrypted, then the restored DB cluster is not encrypted. :type EnableIAMDatabaseAuthentication: boolean :param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false. Default: ``false`` :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: The name of the DB cluster parameter group to associate with the new DB cluster. Constraints: * If supplied, must match the name of an existing DBClusterParameterGroup. :rtype: dict :returns: """ pass def restore_db_cluster_to_point_in_time(self, DBClusterIdentifier: str, SourceDBClusterIdentifier: str, RestoreType: str = None, RestoreToTime: datetime = None, UseLatestRestorableTime: bool = None, Port: int = None, DBSubnetGroupName: str = None, OptionGroupName: str = None, VpcSecurityGroupIds: List = None, Tags: List = None, KmsKeyId: str = None, EnableIAMDatabaseAuthentication: bool = None, DBClusterParameterGroupName: str = None) -> Dict: """ Restores a DB cluster to an arbitrary point in time. Users can restore to any point in time before ``LatestRestorableTime`` for up to ``BackupRetentionPeriod`` days. The target DB cluster is created from the source DB cluster with the same configuration as the original DB cluster, except that the new DB cluster is created with the default DB security group. .. note:: This action only restores the DB cluster, not the DB instances for that DB cluster. You must invoke the CreateDBInstance action to create DB instances for the restored DB cluster, specifying the identifier of the restored DB cluster in ``DBClusterIdentifier`` . You can create DB instances only after the ``RestoreDBClusterToPointInTime`` action has completed and the DB cluster is available. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/neptune-2014-10-31/RestoreDBClusterToPointInTime>`_ **Request Syntax** :: response = client.restore_db_cluster_to_point_in_time( DBClusterIdentifier='string', RestoreType='string', SourceDBClusterIdentifier='string', RestoreToTime=datetime(2015, 1, 1), UseLatestRestorableTime=True|False, Port=123, DBSubnetGroupName='string', OptionGroupName='string', VpcSecurityGroupIds=[ 'string', ], Tags=[ { 'Key': 'string', 'Value': 'string' }, ], KmsKeyId='string', EnableIAMDatabaseAuthentication=True|False, DBClusterParameterGroupName='string' ) **Response Syntax** :: { 'DBCluster': { 'AllocatedStorage': 123, 'AvailabilityZones': [ 'string', ], 'BackupRetentionPeriod': 123, 'CharacterSetName': 'string', 'DatabaseName': 'string', 'DBClusterIdentifier': 'string', 'DBClusterParameterGroup': 'string', 'DBSubnetGroup': 'string', 'Status': 'string', 'PercentProgress': 'string', 'EarliestRestorableTime': datetime(2015, 1, 1), 'Endpoint': 'string', 'ReaderEndpoint': 'string', 'MultiAZ': True|False, 'Engine': 'string', 'EngineVersion': 'string', 'LatestRestorableTime': datetime(2015, 1, 1), 'Port': 123, 'MasterUsername': 'string', 'DBClusterOptionGroupMemberships': [ { 'DBClusterOptionGroupName': 'string', 'Status': 'string' }, ], 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'ReplicationSourceIdentifier': 'string', 'ReadReplicaIdentifiers': [ 'string', ], 'DBClusterMembers': [ { 'DBInstanceIdentifier': 'string', 'IsClusterWriter': True|False, 'DBClusterParameterGroupStatus': 'string', 'PromotionTier': 123 }, ], 'VpcSecurityGroups': [ { 'VpcSecurityGroupId': 'string', 'Status': 'string' }, ], 'HostedZoneId': 'string', 'StorageEncrypted': True|False, 'KmsKeyId': 'string', 'DbClusterResourceId': 'string', 'DBClusterArn': 'string', 'AssociatedRoles': [ { 'RoleArn': 'string', 'Status': 'string' }, ], 'IAMDatabaseAuthenticationEnabled': True|False, 'CloneGroupId': 'string', 'ClusterCreateTime': datetime(2015, 1, 1) } } **Response Structure** - *(dict) --* - **DBCluster** *(dict) --* Contains the details of an Amazon Neptune DB cluster. This data type is used as a response element in the DescribeDBClusters action. - **AllocatedStorage** *(integer) --* ``AllocatedStorage`` always returns 1, because Neptune DB cluster storage size is not fixed, but instead automatically adjusts as needed. - **AvailabilityZones** *(list) --* Provides the list of EC2 Availability Zones that instances in the DB cluster can be created in. - *(string) --* - **BackupRetentionPeriod** *(integer) --* Specifies the number of days for which automatic DB snapshots are retained. - **CharacterSetName** *(string) --* If present, specifies the name of the character set that this cluster is associated with. - **DatabaseName** *(string) --* Contains the name of the initial database of this DB cluster that was provided at create time, if one was specified when the DB cluster was created. This same name is returned for the life of the DB cluster. - **DBClusterIdentifier** *(string) --* Contains a user-supplied DB cluster identifier. This identifier is the unique key that identifies a DB cluster. - **DBClusterParameterGroup** *(string) --* Specifies the name of the DB cluster parameter group for the DB cluster. - **DBSubnetGroup** *(string) --* Specifies information on the subnet group associated with the DB cluster, including the name, description, and subnets in the subnet group. - **Status** *(string) --* Specifies the current state of this DB cluster. - **PercentProgress** *(string) --* Specifies the progress of the operation as a percentage. - **EarliestRestorableTime** *(datetime) --* Specifies the earliest time to which a database can be restored with point-in-time restore. - **Endpoint** *(string) --* Specifies the connection endpoint for the primary instance of the DB cluster. - **ReaderEndpoint** *(string) --* The reader endpoint for the DB cluster. The reader endpoint for a DB cluster load-balances connections across the Read Replicas that are available in a DB cluster. As clients request new connections to the reader endpoint, Neptune distributes the connection requests among the Read Replicas in the DB cluster. This functionality can help balance your read workload across multiple Read Replicas in your DB cluster. If a failover occurs, and the Read Replica that you are connected to is promoted to be the primary instance, your connection is dropped. To continue sending your read workload to other Read Replicas in the cluster, you can then reconnect to the reader endpoint. - **MultiAZ** *(boolean) --* Specifies whether the DB cluster has instances in multiple Availability Zones. - **Engine** *(string) --* Provides the name of the database engine to be used for this DB cluster. - **EngineVersion** *(string) --* Indicates the database engine version. - **LatestRestorableTime** *(datetime) --* Specifies the latest time to which a database can be restored with point-in-time restore. - **Port** *(integer) --* Specifies the port that the database engine is listening on. - **MasterUsername** *(string) --* Contains the master username for the DB cluster. - **DBClusterOptionGroupMemberships** *(list) --* Provides the list of option group memberships for this DB cluster. - *(dict) --* Contains status information for a DB cluster option group. - **DBClusterOptionGroupName** *(string) --* Specifies the name of the DB cluster option group. - **Status** *(string) --* Specifies the status of the DB cluster option group. - **PreferredBackupWindow** *(string) --* Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the ``BackupRetentionPeriod`` . - **PreferredMaintenanceWindow** *(string) --* Specifies the weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). - **ReplicationSourceIdentifier** *(string) --* Not supported by Neptune. - **ReadReplicaIdentifiers** *(list) --* Contains one or more identifiers of the Read Replicas associated with this DB cluster. - *(string) --* - **DBClusterMembers** *(list) --* Provides the list of instances that make up the DB cluster. - *(dict) --* Contains information about an instance that is part of a DB cluster. - **DBInstanceIdentifier** *(string) --* Specifies the instance identifier for this member of the DB cluster. - **IsClusterWriter** *(boolean) --* Value that is ``true`` if the cluster member is the primary instance for the DB cluster and ``false`` otherwise. - **DBClusterParameterGroupStatus** *(string) --* Specifies the status of the DB cluster parameter group for this member of the DB cluster. - **PromotionTier** *(integer) --* A value that specifies the order in which a Read Replica is promoted to the primary instance after a failure of the existing primary instance. - **VpcSecurityGroups** *(list) --* Provides a list of VPC security groups that the DB cluster belongs to. - *(dict) --* This data type is used as a response element for queries on VPC security group membership. - **VpcSecurityGroupId** *(string) --* The name of the VPC security group. - **Status** *(string) --* The status of the VPC security group. - **HostedZoneId** *(string) --* Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. - **StorageEncrypted** *(boolean) --* Specifies whether the DB cluster is encrypted. - **KmsKeyId** *(string) --* If ``StorageEncrypted`` is true, the AWS KMS key identifier for the encrypted DB cluster. - **DbClusterResourceId** *(string) --* The AWS Region-unique, immutable identifier for the DB cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. - **DBClusterArn** *(string) --* The Amazon Resource Name (ARN) for the DB cluster. - **AssociatedRoles** *(list) --* Provides a list of the AWS Identity and Access Management (IAM) roles that are associated with the DB cluster. IAM roles that are associated with a DB cluster grant permission for the DB cluster to access other AWS services on your behalf. - *(dict) --* Describes an AWS Identity and Access Management (IAM) role that is associated with a DB cluster. - **RoleArn** *(string) --* The Amazon Resource Name (ARN) of the IAM role that is associated with the DB cluster. - **Status** *(string) --* Describes the state of association between the IAM role and the DB cluster. The Status property returns one of the following values: * ``ACTIVE`` - the IAM role ARN is associated with the DB cluster and can be used to access other AWS services on your behalf. * ``PENDING`` - the IAM role ARN is being associated with the DB cluster. * ``INVALID`` - the IAM role ARN is associated with the DB cluster, but the DB cluster is unable to assume the IAM role in order to access other AWS services on your behalf. - **IAMDatabaseAuthenticationEnabled** *(boolean) --* True if mapping of AWS Identity and Access Management (IAM) accounts to database accounts is enabled, and otherwise false. - **CloneGroupId** *(string) --* Identifies the clone group to which the DB cluster is associated. - **ClusterCreateTime** *(datetime) --* Specifies the time when the DB cluster was created, in Universal Coordinated Time (UTC). :type DBClusterIdentifier: string :param DBClusterIdentifier: **[REQUIRED]** The name of the new DB cluster to be created. Constraints: * Must contain from 1 to 63 letters, numbers, or hyphens * First character must be a letter * Cannot end with a hyphen or contain two consecutive hyphens :type RestoreType: string :param RestoreType: The type of restore to be performed. The only type of restore currently supported is ``full-copy`` (the default). :type SourceDBClusterIdentifier: string :param SourceDBClusterIdentifier: **[REQUIRED]** The identifier of the source DB cluster from which to restore. Constraints: * Must match the identifier of an existing DBCluster. :type RestoreToTime: datetime :param RestoreToTime: The date and time to restore the DB cluster to. Valid Values: Value must be a time in Universal Coordinated Time (UTC) format Constraints: * Must be before the latest restorable time for the DB instance * Must be specified if ``UseLatestRestorableTime`` parameter is not provided * Cannot be specified if ``UseLatestRestorableTime`` parameter is true * Cannot be specified if ``RestoreType`` parameter is ``copy-on-write`` Example: ``2015-03-07T23:45:00Z`` :type UseLatestRestorableTime: boolean :param UseLatestRestorableTime: A value that is set to ``true`` to restore the DB cluster to the latest restorable backup time, and ``false`` otherwise. Default: ``false`` Constraints: Cannot be specified if ``RestoreToTime`` parameter is provided. :type Port: integer :param Port: The port number on which the new DB cluster accepts connections. Constraints: Value must be ``1150-65535`` Default: The same port as the original DB cluster. :type DBSubnetGroupName: string :param DBSubnetGroupName: The DB subnet group name to use for the new DB cluster. Constraints: If supplied, must match the name of an existing DBSubnetGroup. Example: ``mySubnetgroup`` :type OptionGroupName: string :param OptionGroupName: The name of the option group for the new DB cluster. :type VpcSecurityGroupIds: list :param VpcSecurityGroupIds: A list of VPC security groups that the new DB cluster belongs to. - *(string) --* :type Tags: list :param Tags: The tags to be applied to the restored DB cluster. - *(dict) --* Metadata assigned to an Amazon Neptune resource consisting of a key-value pair. - **Key** *(string) --* A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can\'t be prefixed with \"aws:\" or \"rds:\". The string can only contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :type KmsKeyId: string :param KmsKeyId: The AWS KMS key identifier to use when restoring an encrypted DB cluster from an encrypted DB cluster. The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are restoring a DB cluster with the same AWS account that owns the KMS encryption key used to encrypt the new DB cluster, then you can use the KMS key alias instead of the ARN for the KMS encryption key. You can restore to a new DB cluster and encrypt the new DB cluster with a KMS key that is different than the KMS key used to encrypt the source DB cluster. The new DB cluster is encrypted with the KMS key identified by the ``KmsKeyId`` parameter. If you do not specify a value for the ``KmsKeyId`` parameter, then the following will occur: * If the DB cluster is encrypted, then the restored DB cluster is encrypted using the KMS key that was used to encrypt the source DB cluster. * If the DB cluster is not encrypted, then the restored DB cluster is not encrypted. If ``DBClusterIdentifier`` refers to a DB cluster that is not encrypted, then the restore request is rejected. :type EnableIAMDatabaseAuthentication: boolean :param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false. Default: ``false`` :type DBClusterParameterGroupName: string :param DBClusterParameterGroupName: The name of the DB cluster parameter group to associate with the new DB cluster. Constraints: * If supplied, must match the name of an existing DBClusterParameterGroup. :rtype: dict :returns: """ pass
PypiClean
/sIBL_GUI-4.0.8.tar.gz/sIBL_GUI-4.0.8/sibl_gui/components/core/iblSetsOutliner/iblSetsOutliner.py
#********************************************************************************************************************** #*** Future imports. #********************************************************************************************************************** from __future__ import unicode_literals #********************************************************************************************************************** #*** External imports. #********************************************************************************************************************** import functools import os import platform import re import sys if sys.version_info[:2] <= (2, 6): from ordereddict import OrderedDict else: from collections import OrderedDict from PyQt4.QtCore import QString from PyQt4.QtCore import Qt from PyQt4.QtCore import pyqtSignal from PyQt4.QtGui import QAction from PyQt4.QtGui import QFileDialog from PyQt4.QtGui import QIcon from PyQt4.QtGui import QMenu from PyQt4.QtGui import QMessageBox from PyQt4.QtGui import QPixmap from PyQt4.QtGui import QSizePolicy from PyQt4.QtGui import QStackedWidget from PyQt4.QtGui import QStringListModel #********************************************************************************************************************** #*** Internal imports. #********************************************************************************************************************** import foundations.common import foundations.exceptions import foundations.strings import foundations.verbose import foundations.walkers import sibl_gui.components.core.database.exceptions import sibl_gui.components.core.database.operations import sibl_gui.ui.common import umbra.engine import umbra.exceptions import umbra.ui.common import umbra.ui.nodes import umbra.ui.widgets.messageBox as messageBox from manager.qwidgetComponent import QWidgetComponentFactory from sibl_gui.components.core.database.nodes import IblSetNode from sibl_gui.components.core.iblSetsOutliner.models import IblSetsModel from sibl_gui.components.core.iblSetsOutliner.views import Details_QTreeView from sibl_gui.components.core.iblSetsOutliner.views import Thumbnails_QListView from umbra.globals.uiConstants import UiConstants from umbra.globals.runtimeGlobals import RuntimeGlobals from umbra.ui.widgets.search_QLineEdit import Search_QLineEdit #********************************************************************************************************************** #*** Module attributes. #********************************************************************************************************************** __author__ = "Thomas Mansencal" __copyright__ = "Copyright (C) 2008 - 2014 - Thomas Mansencal" __license__ = "GPL V3.0 - http://www.gnu.org/licenses/" __maintainer__ = "Thomas Mansencal" __email__ = "[email protected]" __status__ = "Production" __all__ = ["LOGGER", "COMPONENT_UI_FILE", "IblSetsOutliner"] LOGGER = foundations.verbose.installLogger() COMPONENT_UI_FILE = os.path.join(os.path.dirname(__file__), "ui", "Ibl_Sets_Outliner.ui") #********************************************************************************************************************** #*** Module classes and definitions. #********************************************************************************************************************** class IblSetsOutliner(QWidgetComponentFactory(uiFile=COMPONENT_UI_FILE)): """ | Defines the :mod:`sibl_gui.components.core.iblSetsOutliner.iblSetsOutliner` Component Interface class. | It defines methods for Database Ibl Sets management. """ # Custom signals definitions. refreshNodes = pyqtSignal() """ This signal is emited by the :class:`IblSetsOutliner` class when :obj:`IblSetsOutliner.model` class property model nodes needs to be refreshed. ( pyqtSignal ) """ activeViewChanged = pyqtSignal(int) """ This signal is emited by the :class:`IblSetsOutliner` class when the current active View is changed. ( pyqtSignal ) :return: Current active view index. :rtype: int """ def __init__(self, parent=None, name=None, *args, **kwargs): """ Initializes the class. :param parent: Object parent. :type parent: QObject :param name: Component name. :type name: unicode :param \*args: Arguments. :type \*args: \* :param \*\*kwargs: Keywords arguments. :type \*\*kwargs: \*\* """ LOGGER.debug("> Initializing '{0}()' class.".format(self.__class__.__name__)) super(IblSetsOutliner, self).__init__(parent, name, *args, **kwargs) # --- Setting class attributes. --- self.deactivatable = False self.__uiResourcesDirectory = "resources" self.__uiThumbnailsViewImage = "Thumbnails_View.png" self.__uiColumnsViewImage = "Columns_View.png" self.__uiDetailsViewImage = "Details_View.png" self.__uiLargestSizeImage = "Largest_Size.png" self.__uiSmallestSizeImage = "Smallest_Size.png" self.__uiPanoramicLoadingImage = "Panoramic_Loading.png" self.__uiSquareLoadingImage = "Square_Loading.png" self.__uiSwitchThumbnailsTypeImage = "Switch_Thumbnails_Type.png" self.__dockArea = 8 self.__engine = None self.__settings = None self.__settingsSection = None self.__settingsSeparator = "," self.__extension = "ibl" self.__inspectLayout = "inspectCentric" self.__scriptEditor = None self.__collectionsOutliner = None self.__model = None self.__views = None self.__viewsPushButtons = None self.__thumbnailsView = None self.__detailsView = None self.__detailsHeaders = OrderedDict([("Ibl Set", "title"), ("Author", "author"), ("Shot Location", "location"), ("Latitude", "latitude"), ("Longitude", "longitude"), ("Shot Date", "date"), ("Shot Time", "time"), ("Comment", "comment")]) self.__panoramicThumbnails = "True" self.__panoramicThumbnailsSize = "XLarge" self.__squareThumbnailsSize = "Medium" self.__thumbnailsMinimumSize = "XSmall" self.__searchContexts = OrderedDict([("Search In Names", "title"), ("Search In Authors", "author"), ("Search In Links", "link"), ("Search In Locations", "location"), ("Search In Comments", "comment")]) self.__activeSearchContext = "Search In Names" self.__searchContextsMenu = None self.__iconPlaceHolder = None #****************************************************************************************************************** #*** Attributes properties. #****************************************************************************************************************** @property def uiResourcesDirectory(self): """ Property for **self.__uiResourcesDirectory** attribute. :return: self.__uiResourcesDirectory. :rtype: unicode """ return self.__uiResourcesDirectory @uiResourcesDirectory.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiResourcesDirectory(self, value): """ Setter for **self.__uiResourcesDirectory** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiResourcesDirectory")) @uiResourcesDirectory.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiResourcesDirectory(self): """ Deleter for **self.__uiResourcesDirectory** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiResourcesDirectory")) @property def uiThumbnailsViewImage(self): """ Property for **self.__uiThumbnailsViewImage** attribute. :return: self.__uiThumbnailsViewImage. :rtype: unicode """ return self.__uiThumbnailsViewImage @uiThumbnailsViewImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiThumbnailsViewImage(self, value): """ Setter for **self.__uiThumbnailsViewImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiThumbnailsViewImage")) @uiThumbnailsViewImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiThumbnailsViewImage(self): """ Deleter for **self.__uiThumbnailsViewImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiThumbnailsViewImage")) @property def uiColumnsViewImage(self): """ Property for **self.__uiColumnsViewImage** attribute. :return: self.__uiColumnsViewImage. :rtype: unicode """ return self.__uiColumnsViewImage @uiColumnsViewImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiColumnsViewImage(self, value): """ Setter for **self.__uiColumnsViewImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiColumnsViewImage")) @uiColumnsViewImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiColumnsViewImage(self): """ Deleter for **self.__uiColumnsViewImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiColumnsViewImage")) @property def uiDetailsViewImage(self): """ Property for **self.__uiDetailsViewImage** attribute. :return: self.__uiDetailsViewImage. :rtype: unicode """ return self.__uiDetailsViewImage @uiDetailsViewImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiDetailsViewImage(self, value): """ Setter for **self.__uiDetailsViewImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiDetailsViewImage")) @uiDetailsViewImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiDetailsViewImage(self): """ Deleter for **self.__uiDetailsViewImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiDetailsViewImage")) @property def uiLargestSizeImage(self): """ Property for **self.__uiLargestSizeImage** attribute. :return: self.__uiLargestSizeImage. :rtype: unicode """ return self.__uiLargestSizeImage @uiLargestSizeImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiLargestSizeImage(self, value): """ Setter for **self.__uiLargestSizeImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiLargestSizeImage")) @uiLargestSizeImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiLargestSizeImage(self): """ Deleter for **self.__uiLargestSizeImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiLargestSizeImage")) @property def uiSmallestSizeImage(self): """ Property for **self.__uiSmallestSizeImage** attribute. :return: self.__uiSmallestSizeImage. :rtype: unicode """ return self.__uiSmallestSizeImage @uiSmallestSizeImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiSmallestSizeImage(self, value): """ Setter for **self.__uiSmallestSizeImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiSmallestSizeImage")) @uiSmallestSizeImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiSmallestSizeImage(self): """ Deleter for **self.__uiSmallestSizeImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiSmallestSizeImage")) @property def uiPanoramicLoadingImage(self): """ Property for **self.__uiPanoramicLoadingImage** attribute. :return: self.__uiPanoramicLoadingImage. :rtype: unicode """ return self.__uiPanoramicLoadingImage @uiPanoramicLoadingImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiPanoramicLoadingImage(self, value): """ Setter for **self.__uiPanoramicLoadingImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiPanoramicLoadingImage")) @uiPanoramicLoadingImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiPanoramicLoadingImage(self): """ Deleter for **self.__uiPanoramicLoadingImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiPanoramicLoadingImage")) @property def uiSquareLoadingImage(self): """ Property for **self.__uiSquareLoadingImage** attribute. :return: self.__uiSquareLoadingImage. :rtype: unicode """ return self.__uiSquareLoadingImage @uiSquareLoadingImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiSquareLoadingImage(self, value): """ Setter for **self.__uiSquareLoadingImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiSquareLoadingImage")) @uiSquareLoadingImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiSquareLoadingImage(self): """ Deleter for **self.__uiSquareLoadingImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiSquareLoadingImage")) @property def uiSwitchThumbnailsTypeImage(self): """ Property for **self.__uiSwitchThumbnailsTypeImage** attribute. :return: self.__uiSwitchThumbnailsTypeImage. :rtype: unicode """ return self.__uiSwitchThumbnailsTypeImage @uiSwitchThumbnailsTypeImage.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiSwitchThumbnailsTypeImage(self, value): """ Setter for **self.__uiSwitchThumbnailsTypeImage** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "uiSwitchThumbnailsTypeImage")) @uiSwitchThumbnailsTypeImage.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uiSwitchThumbnailsTypeImage(self): """ Deleter for **self.__uiSwitchThumbnailsTypeImage** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "uiSwitchThumbnailsTypeImage")) @property def dockArea(self): """ Property for **self.__dockArea** attribute. :return: self.__dockArea. :rtype: int """ return self.__dockArea @dockArea.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def dockArea(self, value): """ Setter for **self.__dockArea** attribute. :param value: Attribute value. :type value: int """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "dockArea")) @dockArea.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def dockArea(self): """ Deleter for **self.__dockArea** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "dockArea")) @property def engine(self): """ Property for **self.__engine** attribute. :return: self.__engine. :rtype: QObject """ return self.__engine @engine.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def engine(self, value): """ Setter for **self.__engine** attribute. :param value: Attribute value. :type value: QObject """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "engine")) @engine.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def engine(self): """ Deleter for **self.__engine** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "engine")) @property def settings(self): """ Property for **self.__settings** attribute. :return: self.__settings. :rtype: QSettings """ return self.__settings @settings.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def settings(self, value): """ Setter for **self.__settings** attribute. :param value: Attribute value. :type value: QSettings """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "settings")) @settings.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def settings(self): """ Deleter for **self.__settings** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "settings")) @property def settingsSection(self): """ Property for **self.__settingsSection** attribute. :return: self.__settingsSection. :rtype: unicode """ return self.__settingsSection @settingsSection.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def settingsSection(self, value): """ Setter for **self.__settingsSection** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "settingsSection")) @settingsSection.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def settingsSection(self): """ Deleter for **self.__settingsSection** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "settingsSection")) @property def settingsSeparator(self): """ Property for **self.__settingsSeparator** attribute. :return: self.__settingsSeparator. :rtype: unicode """ return self.__settingsSeparator @settingsSeparator.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def settingsSeparator(self, value): """ Setter for **self.__settingsSeparator** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "settingsSeparator")) @settingsSeparator.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def settingsSeparator(self): """ Deleter for **self.__settingsSeparator** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "settingsSeparator")) @property def extension(self): """ Property for **self.__extension** attribute. :return: self.__extension. :rtype: unicode """ return self.__extension @extension.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def extension(self, value): """ Setter for **self.__extension** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "extension")) @extension.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def extension(self): """ Deleter for **self.__extension** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "extension")) @property def inspectLayout(self): """ Property for **self.__inspectLayout** attribute. :return: self.__inspectLayout. :rtype: unicode """ return self.__inspectLayout @inspectLayout.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def inspectLayout(self, value): """ Setter for **self.__inspectLayout** attribute. :param value: Attribute value. :type value: unicode """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "inspectLayout")) @inspectLayout.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def inspectLayout(self): """ Deleter for **self.__inspectLayout** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "inspectLayout")) @property def scriptEditor(self): """ Property for **self.__scriptEditor** attribute. :return: self.__scriptEditor. :rtype: QWidget """ return self.__scriptEditor @scriptEditor.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def scriptEditor(self, value): """ Setter for **self.__scriptEditor** attribute. :param value: Attribute value. :type value: QWidget """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "scriptEditor")) @scriptEditor.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def scriptEditor(self): """ Deleter for **self.__scriptEditor** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "scriptEditor")) @property def collectionsOutliner(self): """ Property for **self.__collectionsOutliner** attribute. :return: self.__collectionsOutliner. :rtype: QWidget """ return self.__collectionsOutliner @collectionsOutliner.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def collectionsOutliner(self, value): """ Setter for **self.__collectionsOutliner** attribute. :param value: Attribute value. :type value: QWidget """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "collectionsOutliner")) @collectionsOutliner.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def collectionsOutliner(self): """ Deleter for **self.__collectionsOutliner** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "collectionsOutliner")) @property def model(self): """ Property for **self.__model** attribute. :return: self.__model. :rtype: IblSetsModel """ return self.__model @model.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def model(self, value): """ Setter for **self.__model** attribute. :param value: Attribute value. :type value: IblSetsModel """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "model")) @model.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def model(self): """ Deleter for **self.__model** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "model")) @property def views(self): """ Property for **self.__views** attribute. :return: self.__views. :rtype: tuple """ return self.__views @views.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def views(self, value): """ Setter for **self.__views** attribute. :param value: Attribute value. :type value: tuple """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "views")) @views.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def views(self): """ Deleter for **self.__views** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "views")) @property def viewsPushButtons(self): """ Property for **self.__viewsPushButtons** attribute. :return: self.__viewsPushButtons. :rtype: dict """ return self.__viewsPushButtons @viewsPushButtons.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def viewsPushButtons(self, value): """ Setter for **self.__viewsPushButtons** attribute. :param value: Attribute value. :type value: dict """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "viewsPushButtons")) @viewsPushButtons.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def viewsPushButtons(self): """ Deleter for **self.__viewsPushButtons** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "viewsPushButtons")) @property def thumbnailsView(self): """ Property for **self.__thumbnailsView** attribute. :return: self.__thumbnailsView. :rtype: QListView """ return self.__thumbnailsView @thumbnailsView.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def thumbnailsView(self, value): """ Setter for **self.__thumbnailsView** attribute. :param value: Attribute value. :type value: QListView """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "thumbnailsView")) @thumbnailsView.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def thumbnailsView(self): """ Deleter for **self.__thumbnailsView** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "view")) @property def detailsView(self): """ Property for **self.__detailsView** attribute. :return: self.__detailsView. :rtype: QTreeView """ return self.__detailsView @detailsView.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def detailsView(self, value): """ Setter for **self.__detailsView** attribute. :param value: Attribute value. :type value: QTreeView """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "detailsView")) @detailsView.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def detailsView(self): """ Deleter for **self.__detailsView** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "view")) @property def detailsViewHeaders(self): """ Property for **self.__detailsViewHeaders** attribute. :return: self.__detailsViewHeaders. :rtype: OrderedDict """ return self.__detailsViewHeaders @detailsViewHeaders.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def detailsViewHeaders(self, value): """ Setter for **self.__detailsViewHeaders** attribute. :param value: Attribute value. :type value: OrderedDict """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "detailsViewHeaders")) @detailsViewHeaders.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def detailsViewHeaders(self): """ Deleter for **self.__detailsViewHeaders** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "view")) @property def panoramicThumbnails(self): """ Property for **self.__panoramicThumbnails** attribute. :return: self.__panoramicThumbnails. :rtype: bool """ return self.__panoramicThumbnails @panoramicThumbnails.setter @foundations.exceptions.handleExceptions(AssertionError) def panoramicThumbnails(self, value): """ Setter for **self.__panoramicThumbnails** attribute. :param value: Attribute value. :type value: bool """ if value is not None: assert type(value) is bool, "'{0}' attribute: '{1}' type is not 'bool'!".format("panoramicThumbnails", value) self.setPanoramicThumbnails(value) @panoramicThumbnails.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def panoramicThumbnails(self): """ Deleter for **self.__panoramicThumbnails** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "panoramicThumbnails")) @property def panoramicThumbnailsSize(self): """ Property for **self.__panoramicThumbnailsSize** attribute. :return: self.__panoramicThumbnailsSize. :rtype: unicode """ return self.__panoramicThumbnailsSize @panoramicThumbnailsSize.setter @foundations.exceptions.handleExceptions(AssertionError) def panoramicThumbnailsSize(self, value): """ Setter for **self.__panoramicThumbnailsSize** attribute. :param value: Attribute value. :type value: unicode """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format("panoramicThumbnailsSize", value) self.__panoramicThumbnailsSize = value @panoramicThumbnailsSize.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def panoramicThumbnailsSize(self): """ Deleter for **self.__panoramicThumbnailsSize** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "panoramicThumbnailsSize")) @property def squareThumbnailsSize(self): """ Property for **self.__squareThumbnailsSize** attribute. :return: self.__squareThumbnailsSize. :rtype: unicode """ return self.__squareThumbnailsSize @squareThumbnailsSize.setter @foundations.exceptions.handleExceptions(AssertionError) def squareThumbnailsSize(self, value): """ Setter for **self.__squareThumbnailsSize** attribute. :param value: Attribute value. :type value: unicode """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format("squareThumbnailsSize", value) self.__squareThumbnailsSize = value @squareThumbnailsSize.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def squareThumbnailsSize(self): """ Deleter for **self.__squareThumbnailsSize** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "squareThumbnailsSize")) @property def thumbnailsMinimumSize(self): """ Property for **self.__thumbnailsMinimumSize** attribute. :return: self.__thumbnailsMinimumSize. :rtype: dict """ return self.__thumbnailsMinimumSize @thumbnailsMinimumSize.setter @foundations.exceptions.handleExceptions(AssertionError) def thumbnailsMinimumSize(self, value): """ Setter for **self.__thumbnailsMinimumSize** attribute. :param value: Attribute value. :type value: dict """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format("thumbnailsMinimumSize", value) self.__thumbnailsMinimumSize = value @thumbnailsMinimumSize.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def thumbnailsMinimumSize(self): """ Deleter for **self.__thumbnailsMinimumSize** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "thumbnailsMinimumSize")) @property def searchContexts(self): """ Property for **self.__searchContexts** attribute. :return: self.__searchContexts. :rtype: OrderedDict """ return self.__searchContexts @searchContexts.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def searchContexts(self, value): """ Setter for **self.__searchContexts** attribute. :param value: Attribute value. :type value: OrderedDict """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "searchContexts")) @searchContexts.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def searchContexts(self): """ Deleter for **self.__searchContexts** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "searchContexts")) @property def activeSearchContext(self): """ Property for **self.__activeSearchContext** attribute. :return: self.__activeSearchContext. :rtype: OrderedDict """ return self.__activeSearchContext @activeSearchContext.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def activeSearchContext(self, value): """ Setter for **self.__activeSearchContext** attribute. :param value: Attribute value. :type value: OrderedDict """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "activeSearchContext")) @activeSearchContext.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def activeSearchContext(self): """ Deleter for **self.__activeSearchContext** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "activeSearchContext")) @property def searchContextMenu(self): """ Property for **self.__searchContextMenu** attribute. :return: self.__searchContextMenu. :rtype: QMenu """ return self.__searchContextMenu @searchContextMenu.setter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def searchContextMenu(self, value): """ Setter for **self.__searchContextMenu** attribute. :param value: Attribute value. ( self.__searchContextsMenu ) """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is read only!".format(self.__class__.__name__, "searchContextMenu")) @searchContextMenu.deleter @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def searchContextMenu(self): """ Deleter for **self.__searchContextMenu** attribute. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' attribute is not deletable!".format(self.__class__.__name__, "searchContextMenu")) #****************************************************************************************************************** #*** Class methods. #****************************************************************************************************************** def activate(self, engine): """ Activates the Component. :param engine: Engine to attach the Component to. :type engine: QObject :return: Method success. :rtype: bool """ LOGGER.debug("> Activating '{0}' Component.".format(self.__class__.__name__)) self.__uiResourcesDirectory = os.path.join(os.path.dirname(__file__), self.__uiResourcesDirectory) self.__engine = engine self.__settings = self.__engine.settings self.__settingsSection = self.name self.__scriptEditor = self.__engine.componentsManager["factory.scriptEditor"] self.__collectionsOutliner = self.__engine.componentsManager["core.collectionsOutliner"] self.activated = True return True @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def deactivate(self): """ Deactivates the Component. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' Component cannot be deactivated!".format(self.__class__.__name__, self.__name)) def initializeUi(self): """ Initializes the Component ui. :return: Method success. :rtype: bool """ LOGGER.debug("> Initializing '{0}' Component ui.".format(self.__class__.__name__)) self.__engine.parameters.databaseReadOnly and \ LOGGER.info("{0} | Model edition deactivated by '{1}' command line parameter value!".format(self.__class__.__name__, "databaseReadOnly")) self.__model = IblSetsModel(self, horizontalHeaders=self.__detailsHeaders) self.Ibl_Sets_Outliner_stackedWidget = QStackedWidget(self) self.Ibl_Sets_Outliner_gridLayout.addWidget(self.Ibl_Sets_Outliner_stackedWidget) self.__thumbnailsView = Thumbnails_QListView(self, self.__model, self.__engine.parameters.databaseReadOnly, "No Ibl Set to view!") self.__thumbnailsView.setObjectName("Thumbnails_listView") self.__thumbnailsView.setContextMenuPolicy(Qt.ActionsContextMenu) self.Ibl_Sets_Outliner_stackedWidget.addWidget(self.__thumbnailsView) self.__detailsView = Details_QTreeView(self, self.__model, self.__engine.parameters.databaseReadOnly, "No Ibl Set to view!") self.__detailsView.setObjectName("Details_treeView") self.__detailsView.setContextMenuPolicy(Qt.ActionsContextMenu) self.Ibl_Sets_Outliner_stackedWidget.addWidget(self.__detailsView) self.__views = (self.__thumbnailsView, self.__detailsView) self.__views_addActions() self.__viewsPushButtons = {0 : (self.Thumbnails_View_pushButton, self.__uiThumbnailsViewImage), 1 : (self.Details_View_pushButton, self.__uiDetailsViewImage)} for index, data in self.__viewsPushButtons.iteritems(): viewPushButton, image = data viewPushButton.setIcon(QIcon(os.path.join(self.__uiResourcesDirectory, image))) self.Switch_Thumbnails_Type_pushButton.setIcon(QIcon(os.path.join(self.__uiResourcesDirectory, self.__uiSwitchThumbnailsTypeImage))) self.Search_Database_lineEdit = Search_QLineEdit(self) self.Search_Database_horizontalLayout.addWidget(self.Search_Database_lineEdit) self.Search_Database_lineEdit.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed) self.__searchContextsMenu = QMenu() for context in self.__searchContexts.iterkeys(): self.__searchContextsMenu.addAction(self.__engine.actionsManager.registerAction( "Actions|Umbra|Components|core.iblSetsOutliner|Search|Set '{0}' Context ...".format(context), text="{0} ...".format(context), checkable=True, slot=functools.partial(self.setActiveSearchContext, context))) self.Search_Database_lineEdit.searchActiveLabel.setMenu(self.__searchContextsMenu) self.setActiveSearchContext(self.__activeSearchContext) self.Largest_Size_label.setPixmap(QPixmap(os.path.join(self.__uiResourcesDirectory, self.__uiLargestSizeImage))) self.Smallest_Size_label.setPixmap(QPixmap(os.path.join(self.__uiResourcesDirectory, self.__uiSmallestSizeImage))) if self.__settings.keyExists(self.__settingsSection, "panoramicThumbnails"): self.__panoramicThumbnails = self.__settings.getKey(self.__settingsSection, "panoramicThumbnails").toBool() self.__views_setUi( foundations.common.getFirstItem(self.__settings.getKey(self.__settingsSection, "listViewIconSize").toInt())) # Signals / Slots. for view in self.__views: self.__engine.imagesCaches.QIcon.contentAdded.connect(view.viewport().update) view.doubleClicked.connect(self.__views__doubleClicked) self.activeViewChanged.connect(self.__views__activeViewChanged) for index, data in self.__viewsPushButtons.iteritems(): viewPushButton, image = data viewPushButton.clicked.connect(functools.partial(self.__views_pushButtons__clicked, index)) self.Switch_Thumbnails_Type_pushButton.clicked.connect(self.__Switch_Thumbnails_Type_pushButton__clicked) self.Search_Database_lineEdit.textChanged.connect(self.__Search_Database_lineEdit__textChanged) self.Thumbnails_Size_horizontalSlider.valueChanged.connect(self.__Thumbnails_Size_horizontalSlider__changed) self.refreshNodes.connect(self.__model__refreshNodes) self.__model.modelReset.connect(self.__collectionsOutliner._CollectionsOutliner__model__refreshAttributes) if not self.__engine.parameters.databaseReadOnly: self.__engine.fileSystemEventsManager.fileChanged.connect(self.__engine_fileSystemEventsManager__fileChanged) self.__engine.contentDropped.connect(self.__engine__contentDropped) else: LOGGER.info("{0} | Ibl Sets file system events ignored by '{1}' command line parameter value!".format( self.__class__.__name__, "databaseReadOnly")) self.initializedUi = True return True @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def uninitializeUi(self): """ Uninitializes the Component ui. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' Component ui cannot be uninitialized!".format(self.__class__.__name__, self.name)) def addWidget(self): """ Adds the Component Widget to the engine. :return: Method success. :rtype: bool """ LOGGER.debug("> Adding '{0}' Component Widget.".format(self.__class__.__name__)) self.__engine.setCentralWidget(self) return True @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError) def removeWidget(self): """ Removes the Component Widget from the engine. """ raise foundations.exceptions.ProgrammingError( "{0} | '{1}' Component Widget cannot be removed!".format(self.__class__.__name__, self.name)) @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, Exception) def onStartup(self): """ Defines the slot triggered on Framework startup. :return: Method success. :rtype: bool """ LOGGER.debug("> Calling '{0}' Component Framework 'onStartup' method.".format(self.__class__.__name__)) if not self.__engine.parameters.databaseReadOnly: # Wizard if Ibl Sets table is empty. if not self.getIblSets(): if messageBox.messageBox("Question", "Question", "The Database has no Ibl Sets, would you like to add some?", buttons=QMessageBox.Yes | QMessageBox.No) == QMessageBox.Yes: directory = umbra.ui.common.storeLastBrowsedPath((QFileDialog.getExistingDirectory(self, "Add Content:", RuntimeGlobals.lastBrowsedPath))) if directory: if not self.addDirectory(directory): raise Exception( "{0} | Exception raised while adding '{1}' directory content to the Database!".format( self.__class__.__name__, directory)) # Ibl Sets table integrity checking. erroneousIblSets = sibl_gui.components.core.database.operations.checkIblSetsTableIntegrity() for iblSet, exceptions in erroneousIblSets.iteritems(): if sibl_gui.components.core.database.exceptions.MissingIblSetFileError in exceptions: choice = messageBox.messageBox("Question", "Error", "{0} | '{1}' Ibl Set file is missing, would you like to update it's location?".format( self.__class__.__name__, iblSet.name), QMessageBox.Critical, QMessageBox.Yes | QMessageBox.No, customButtons=((QString("No To All"), QMessageBox.RejectRole),)) if choice == 0: break if choice == QMessageBox.Yes: if self.updateIblSetLocationUi(iblSet): # TODO: Check updated Ibl Set file integrity. continue for exception in exceptions: self.__engine.notificationsManager.warnify( "{0} | '{1}' {2}".format(self.__class__.__name__, iblSet.name, sibl_gui.components.core.database.operations.DATABASE_EXCEPTIONS[exception])) else: LOGGER.info("{0} | Database Ibl Sets wizard and Ibl Sets integrity checking method deactivated\ by '{1}' command line parameter value!".format(self.__class__.__name__, "databaseReadOnly")) activeView, state = self.__settings.getKey(self.__settingsSection, "activeView").toInt() state and self.setActiveViewIndex(activeView) for view in self.__views: viewName = view.objectName() viewSelectedIblSetsIdentities = foundations.strings.toString(self.__settings.getKey(self.__settingsSection, "{0}_viewSelecteIblSets".format(viewName)).toString()) LOGGER.debug("> '{0}' View stored selected Ibl Sets identities: '{1}'.".format(viewName, viewSelectedIblSetsIdentities)) view.modelSelection["Default"] = viewSelectedIblSetsIdentities and \ [int(identity) for identity in viewSelectedIblSetsIdentities.split(self.__settingsSeparator)] or [] view.restoreModelSelection() return True def onClose(self): """ Defines the slot triggered on Framework close. :return: Method success. :rtype: bool """ LOGGER.debug("> Calling '{0}' Component Framework 'onClose' method.".format(self.__class__.__name__)) for view in self.__views: view.storeModelSelection() self.__settings.setKey(self.__settingsSection, "{0}_viewSelecteIblSets".format(view.objectName()), self.__settingsSeparator.join(foundations.strings.toString(identity) \ for identity in view.modelSelection["Default"])) self.__settings.setKey(self.__settingsSection, "activeView", self.getActiveViewIndex()) return True def __views_setUi(self, thumbnailsSize=None): """ Sets the Views ui. :param thumbnailsSize: Thumbnails size. :type thumbnailsSize: int """ if not thumbnailsSize: thumbnailsSize = UiConstants.thumbnailsSizes.get(self.__panoramicThumbnailsSize \ if self.__panoramicThumbnails else self.__squareThumbnailsSize) self.__iconPlaceHolder = \ sibl_gui.ui.common.getIcon(os.path.join(self.__uiResourcesDirectory, self.__uiPanoramicLoadingImage if self.__panoramicThumbnails else \ self.__uiSquareLoadingImage), asynchronousLoading=False) self.__thumbnailsView._Thumbnails_QListView__setDefaultUiState(thumbnailsSize, 2 if self.__panoramicThumbnails else 1) self.Thumbnails_Size_horizontalSlider.setMinimum(UiConstants.thumbnailsSizes.get(self.__thumbnailsMinimumSize)) self.Thumbnails_Size_horizontalSlider.setMaximum(UiConstants.thumbnailsSizes.get(self.__panoramicThumbnailsSize \ if self.__panoramicThumbnails else self.__squareThumbnailsSize)) self.Thumbnails_Size_horizontalSlider.setValue(thumbnailsSize) def __views_refreshUi(self, thumbnailsSize=None): """ Refreshes the Views ui. :param thumbnailsSize: Thumbnails size. :type thumbnailsSize: int """ self.__views_setUi(thumbnailsSize) def __model__refreshNodes(self): """ Defines the slot triggered by the Model when Nodes need refresh. """ self.setIblSets() def __views_addActions(self): """ Sets the Views actions. """ if not self.__engine.parameters.databaseReadOnly: addContentAction = self.__engine.actionsManager.registerAction( "Actions|Umbra|Components|core.iblSetsOutliner|Add Content ...", slot=self.__views_addContentAction__triggered) addIblSetAction = self.__engine.actionsManager.registerAction( "Actions|Umbra|Components|core.iblSetsOutliner|Add Ibl Set ...", slot=self.__views_addIblSetAction__triggered) removeIblSetsAction = self.__engine.actionsManager.registerAction( "Actions|Umbra|Components|core.iblSetsOutliner|Remove Ibl Set(s) ...", slot=self.__views_removeIblSetsAction__triggered) updateIblSetsLocationsAction = self.__engine.actionsManager.registerAction( "Actions|Umbra|Components|core.iblSetsOutliner|Update Ibl Set(s) Location(s) ...", slot=self.__views_updateIblSetsLocationsAction__triggered) for view in self.__views: separatorAction = QAction(view) separatorAction.setSeparator(True) for action in (addContentAction, addIblSetAction, removeIblSetsAction, updateIblSetsLocationsAction, separatorAction): view.addAction(action) else: LOGGER.info( "{0} | Ibl Sets Database alteration capabilities deactivated by '{1}' command line parameter value!".format( self.__class__.__name__, "databaseReadOnly")) def __views_addContentAction__triggered(self, checked): """ Defines the slot triggered by **'Actions|Umbra|Components|core.iblSetsOutliner|Add Content ...'** action. :param checked: Action checked state. :type checked: bool :return: Method success. :rtype: bool """ return self.addContentUi() def __views_addIblSetAction__triggered(self, checked): """ Defines the slot triggered by **'Actions|Umbra|Components|core.iblSetsOutliner|Add Ibl Set ...'** action. :param checked: Action checked state. :type checked: bool :return: Method success. :rtype: bool """ return self.addIblSetUi() def __views_removeIblSetsAction__triggered(self, checked): """ Defines the slot triggered by **'Actions|Umbra|Components|core.iblSetsOutliner|Remove Ibl Set(s) ...'** action. :param checked: Action checked state. :type checked: bool :return: Method success. :rtype: bool """ return self.removeIblSetsUi() def __views_updateIblSetsLocationsAction__triggered(self, checked): """ Defines the slot triggered by **'Actions|Umbra|Components|core.iblSetsOutliner|Update Ibl Set(s) Location(s) ...'** action. :param checked: Action checked state. :type checked: bool :return: Method success. :rtype: bool """ return self.updateSelectedIblSetsLocationUi() def __views_pushButtons__clicked(self, index, checked): """ Defines the slot triggered by **\*_View_pushButton** Widget when clicked. :param index: Button index. :type index: int :param checked: Checked state. :type checked: bool """ self.setActiveViewIndex(index) def __views__doubleClicked(self, index): """ Defines the slot triggered by a **\*_View** Widget when double clicked. :param index: Clicked item index. :type index: QModelIndex """ self.__engine.layoutsManager.restoreLayout(self.__inspectLayout) def __views__activeViewChanged(self, index): """ Defines the slot triggered by the active View changed. :param index: Current active View. :type index: int """ self.Ibl_Sets_Outliner_Thumbnails_Slider_frame.setVisible(not index) for viewIndex, data in self.__viewsPushButtons.iteritems(): viewPushButton, image = data viewPushButton.setChecked(True if viewIndex == index else False) def __Switch_Thumbnails_Type_pushButton__clicked(self, checked): """ Defines the slot triggered by **Switch_Thumbnails_Type_pushButton** Widget when clicked. :param checked: Checked state. :type checked: bool """ self.setPanoramicThumbnails(not self.__panoramicThumbnails) def __Search_Database_lineEdit__textChanged(self, text): """ Defines the slot triggered by **Search_Database_lineEdit** Widget when text changed. :param text: Current text value. :type text: QString """ self.setIblSets(self.__searchIblSets(foundations.strings.toString(self.Search_Database_lineEdit.text()), self.__searchContexts[self.__activeSearchContext], re.IGNORECASE if self.Case_Sensitive_Matching_pushButton.isChecked() else 0)) def __Thumbnails_Size_horizontalSlider__changed(self, value): """ Scales the View icons. :param value: Thumbnails size. :type value: int """ self.__thumbnailsView._Thumbnails_QListView__setDefaultUiState(value, 2 if self.__panoramicThumbnails else 1) # Storing settings key. LOGGER.debug("> Setting '{0}' with value '{1}'.".format("listViewIconSize", value)) self.__settings.setKey(self.__settingsSection, "listViewIconSize", value) @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, foundations.exceptions.UserError) @umbra.engine.showProcessing("Retrieving Ibl Sets ...") def __engine__contentDropped(self, event): """ Defines the slot triggered by content when dropped into the engine. :param event: Event. :type event: QEvent """ if not event.mimeData().hasUrls(): return LOGGER.debug("> Drag event urls list: '{0}'!".format(event.mimeData().urls())) if not self.__engine.parameters.databaseReadOnly: for url in event.mimeData().urls(): path = foundations.strings.toString(url.path()) LOGGER.debug("> Handling dropped '{0}' file.".format(path)) path = (platform.system() == "Windows" or platform.system() == "Microsoft") and \ re.search(r"^\/[A-Z]:", path) and path[1:] or path if re.search(r"\.{0}$".format(self.__extension), path): name = foundations.strings.getSplitextBasename(path) choice = messageBox.messageBox("Question", "Question", "'{0}' Ibl Set file has been dropped, would you like to 'Add' it to the Database or \ 'Edit' it in the Script Editor?".format(name), buttons=QMessageBox.Cancel, customButtons=((QString("Add"), QMessageBox.AcceptRole), (QString("Edit"), QMessageBox.AcceptRole))) if choice == 0: self.addIblSet(name, path) elif choice == 1: self.__scriptEditor.loadFile(path) and self.__scriptEditor.restoreDevelopmentLayout() else: if not os.path.isdir(path): return if not list(foundations.walkers.filesWalker(path, ("\.{0}$".format(self.__extension),), ("\._",))): return if messageBox.messageBox("Question", "Question", "Would you like to add '{0}' directory Ibl Set(s) file(s) to the Database?".format(path), buttons=QMessageBox.Yes | QMessageBox.No) == QMessageBox.Yes: self.addDirectory(path) self.__engine.processEvents() else: raise foundations.exceptions.UserError("{0} | Cannot perform action, Database has been set read only!".format( self.__class__.__name__)) def __engine_fileSystemEventsManager__fileChanged(self, file): """ Defines the slot triggered by the **fileSystemEventsManager** when a file is changed. :param file: File changed. :type file: unicode """ iblSet = foundations.common.getFirstItem(filter(lambda x: x.path == file, self.getIblSets())) if not iblSet: return if sibl_gui.components.core.database.operations.updateIblSetContent(iblSet): self.__engine.notificationsManager.notify( "{0} | '{1}' Ibl Set file has been reparsed and associated database object updated!".format( self.__class__.__name__, iblSet.title)) self.refreshNodes.emit() def __getCandidateCollectionId(self): """ Returns a Collection id. :return: Collection id. :rtype: int """ collections = self.__collectionsOutliner.getSelectedCollections() collection = foundations.common.getFirstItem(collections) identity = collection and collection.id or None return identity and identity or self.__collectionsOutliner.getCollectionId( self.__collectionsOutliner.defaultCollection) def __searchIblSets(self, pattern, attribute, flags=re.IGNORECASE): """ Filters the current Collection Ibl Sets. :param pattern: Ibl Sets filter pattern. :type pattern: unicode :param attribute: Attribute to filter Ibl Sets on. :type attribute: unicode :param flags: Regex filtering flags. :type flags: int :return: Filtered Ibl Sets. :rtype: list """ try: pattern = re.compile(pattern, flags) except Exception: return list() iblSets = [iblSet for iblSet in set(self.__collectionsOutliner.getCollectionsIblSets( self.__collectionsOutliner.getSelectedCollections() or \ self.__collectionsOutliner.getCollections())).intersection( sibl_gui.components.core.database.operations.filterIblSets( "{0}".format(foundations.strings.toString(pattern.pattern)), attribute, flags))] self.Search_Database_lineEdit.completer.setModel(QStringListModel(sorted((value for value in set((getattr(iblSetNode, attribute) for iblSetNode in iblSets if getattr(iblSetNode, attribute))))))) return iblSets def getActiveView(self): """ Returns the current active View. :return: Current active View. :rtype: QWidget """ return self.Ibl_Sets_Outliner_stackedWidget.currentWidget() def getActiveViewIndex(self): """ Returns the current active View index. :return: Current active View index. :rtype: int """ return self.Ibl_Sets_Outliner_stackedWidget.currentIndex() def setActiveView(self, view): """ Sets the active View to given View. :param view: View. :type view: QWidget :return: Method success. :rtype: bool """ index = self.Ibl_Sets_Outliner_stackedWidget.indexOf(view) self.Ibl_Sets_Outliner_stackedWidget.setCurrentIndex() self.activeViewChanged.emit(index) return True def setActiveViewIndex(self, index): """ Sets the active View to given index. :param index: Index. :type index: int :return: Method success. :rtype: bool """ self.Ibl_Sets_Outliner_stackedWidget.setCurrentIndex(index) self.activeViewChanged.emit(index) return True def setActiveSearchContext(self, context, *args): """ Sets the active search context. :param context: Search context. :type context: unicode :param \*args: Arguments. :type \*args: \* :return: Method succes. :rtype: bool """ text = "{0} ...".format(context) for action in self.__engine.actionsManager.getCategory( "Actions|Umbra|Components|core.iblSetsOutliner|Search").itervalues(): action.setChecked(action.text() == text and True or False) self.__activeSearchContext = context self.Search_Database_lineEdit.setPlaceholderText(text) return True def setPanoramicThumbnails(self, state): """ Sets the panoramic thumbnails. :param state: Panoramic thumbnails. :type state: bool :return: Method succes. :rtype: bool """ oldIn, oldOut = UiConstants.thumbnailsSizes.get(self.__thumbnailsMinimumSize), UiConstants.thumbnailsSizes.get( self.__panoramicThumbnailsSize if self.__panoramicThumbnails else self.__squareThumbnailsSize) newIn, newOut = UiConstants.thumbnailsSizes.get(self.__thumbnailsMinimumSize), UiConstants.thumbnailsSizes.get( self.__panoramicThumbnailsSize if state else self.__squareThumbnailsSize) thumbnailsSize = (((self.Thumbnails_Size_horizontalSlider.value() - oldIn) * (newOut - newIn)) \ / (oldOut - oldIn)) + newIn self.__panoramicThumbnails = state self.__settings.setKey(self.__settingsSection, "panoramicThumbnails", self.__panoramicThumbnails) self.__views_refreshUi(thumbnailsSize) self.setIblSets() return True @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, Exception) @umbra.engine.showProcessing("Adding Content ...") def addContentUi(self): """ Adds user defined content to the Database. :return: Method success. :rtype: bool :note: May require user interaction. """ directory = umbra.ui.common.storeLastBrowsedPath((QFileDialog.getExistingDirectory(self, "Add Content:", RuntimeGlobals.lastBrowsedPath))) if not directory: return False LOGGER.debug("> Chosen directory path: '{0}'.".format(directory)) if self.addDirectory(directory): return True else: raise Exception("{0} | Exception raised while adding '{1}' directory content to the Database!".format( self.__class__.__name__, directory)) @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, Exception) @umbra.engine.showProcessing("Adding Ibl Set ...") def addIblSetUi(self): """ Adds an user defined Ibl Set to the Database. :return: Method success. :rtype: bool :note: May require user interaction. """ path = umbra.ui.common.storeLastBrowsedPath((QFileDialog.getOpenFileName(self, "Add Ibl Set:", RuntimeGlobals.lastBrowsedPath, "Ibls files (*{0})".format(self.__extension)))) if not path: return False if not self.iblSetExists(path): LOGGER.debug("> Chosen Ibl Set path: '{0}'.".format(path)) if self.addIblSet(foundations.strings.getSplitextBasename(path), path): return True else: raise Exception("{0} | Exception raised while adding '{1}' Ibl Set to the Database!".format( self.__class__.__name__, path)) else: self.__engine.notificationsManager.warnify( "{0} | '{1}' Ibl Set already exists in Database!".format(self.__class__.__name__, path)) @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, Exception) @umbra.engine.encapsulateProcessing def removeIblSetsUi(self): """ Removes user selected Ibl Sets from the Database. :return: Method success. :rtype: bool :note: May require user interaction. """ selectedIblSets = self.getSelectedIblSets() if not selectedIblSets: return False if messageBox.messageBox("Question", "Question", "Are you sure you want to remove '{0}' sets(s)?".format( ", ".join((iblSet.title for iblSet in selectedIblSets))), buttons=QMessageBox.Yes | QMessageBox.No) == QMessageBox.Yes: self.__engine.startProcessing("Removing Ibl Sets ...", len(selectedIblSets)) success = True for iblSet in selectedIblSets: success *= umbra.ui.common.signalsBlocker(self, self.removeIblSet, iblSet) or False self.__engine.stepProcessing() self.__engine.stopProcessing() self.refreshNodes.emit() if success: return True else: raise Exception("{0} | Exception raised while removing '{1}' Ibls sets from the Database!".format( self.__class__.__name__, ", ". join((iblSet.title for iblSet in selectedIblSets)))) @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, sibl_gui.components.core.database.exceptions.DatabaseOperationError) def updateIblSetLocationUi(self, iblSet): """ Updates given Ibl Set location. :param iblSet: Ibl Set to update. :type iblSet: IblSet :return: Method success. :rtype: bool :note: May require user interaction. """ file = umbra.ui.common.storeLastBrowsedPath((QFileDialog.getOpenFileName(self, "Updating '{0}' Template Location:".format(iblSet.name), RuntimeGlobals.lastBrowsedPath, "Ibl Set files (*{0})".format(self.__extension)))) if not file: return False LOGGER.info("{0} | Updating '{1}' Ibl Set with new location '{2}'!".format(self.__class__.__name__, iblSet.name, file)) if sibl_gui.components.core.database.operations.updateIblSetLocation(iblSet, file): self.refreshNodes.emit() return True else: raise sibl_gui.components.core.database.exceptions.DatabaseOperationError( "{0} | Exception raised while updating '{1}' Ibl Set location!".format(self.__class__.__name__, iblSet.name)) @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, Exception) @umbra.engine.encapsulateProcessing def updateSelectedIblSetsLocationUi(self): """ Updates user selected Ibl Sets locations. :return: Method success. :rtype: bool :note: May require user interaction. """ selectedIblSets = self.getSelectedIblSets() if not selectedIblSets: return False self.__engine.startProcessing("Update Ibl Sets Locations ...", len(selectedIblSets)) success = True for iblSet in selectedIblSets: success *= self.updateIblSetLocationUi(iblSet) self.__engine.stepProcessing() self.__engine.stopProcessing() self.refreshNodes.emit() if success: return True else: raise Exception("{0} | Exception raised while updating '{1}' Ibls sets locations!".format( self.__class__.__name__, ", ". join((iblSet.title for iblSet in selectedIblSets)))) @foundations.exceptions.handleExceptions(foundations.exceptions.ProgrammingError, sibl_gui.components.core.database.exceptions.DatabaseOperationError) def addIblSet(self, name, path, collectionId=None): """ Adds an Ibl Set to the Database. :param name: Ibl Set name. :type name: unicode :param path: Ibl Set path. :type path: unicode :param collectionId: Target Collection id. :type collectionId: int :return: Method success. :rtype: bool """ if not self.iblSetExists(path): LOGGER.info("{0} | Adding '{1}' Ibl Set to the Database!".format(self.__class__.__name__, name)) if sibl_gui.components.core.database.operations.addIblSet( name, path, collectionId or self.__getCandidateCollectionId()): self.refreshNodes.emit() return True else: raise sibl_gui.components.core.database.exceptions.DatabaseOperationError( "{0} | Exception raised while adding '{1}' Ibl Set to the Database!".format(self.__class__.__name__, name)) else: raise foundations.exceptions.ProgrammingError( "{0} | '{1}' Ibl Set already exists in Database!".format(self.__class__.__name__, name)) @foundations.exceptions.handleExceptions(umbra.exceptions.notifyExceptionHandler, Exception) @umbra.engine.encapsulateProcessing def addDirectory(self, directory, collectionId=None): """ Adds directory Ibl Sets to the Database. :param directory: Directory to add. :type directory: unicode :param collectionId: Target Collection id. :type collectionId: int :return: Method success. :rtype: bool """ LOGGER.debug("> Initializing directory '{0}' filesWalker.".format(directory)) files = list(foundations.walkers.filesWalker(directory, ("\.{0}$".format(self.__extension),), ("\._",))) self.__engine.startProcessing("Adding Directory Ibl Sets ...", len(files)) success = True for path in files: if not self.iblSetExists(path): success *= umbra.ui.common.signalsBlocker(self, self.addIblSet, foundations.strings.getSplitextBasename(path), path, collectionId or self.__getCandidateCollectionId()) or False self.__engine.stepProcessing() self.__engine.stopProcessing() self.refreshNodes.emit() if success: return True else: raise Exception("{0} | Exception raised while adding '{1}' directory content to the Database!".format( self.__class__.__name__, directory)) @foundations.exceptions.handleExceptions(sibl_gui.components.core.database.exceptions.DatabaseOperationError) def removeIblSet(self, iblSet): """ Removes given Ibl Set from the Database. :param iblSet: Ibl Set to remove. :type iblSet: IblSet :return: Method success. :rtype: bool """ LOGGER.info("{0} | Removing '{1}' Ibl Set from the Database!".format(self.__class__.__name__, iblSet.title)) if sibl_gui.components.core.database.operations.removeIblSet(iblSet.id): self.refreshNodes.emit() return True else: raise sibl_gui.components.core.database.exceptions.DatabaseOperationError( "{0} | Exception raised while removing '{1}' Ibl Set from the Database!".format(self.__class__.__name__, iblSet.title)) @foundations.exceptions.handleExceptions(sibl_gui.components.core.database.exceptions.DatabaseOperationError) def updateIblSetLocation(self, iblSet, file): """ Updates given Ibl Set location. :param iblSet: Ibl Set to update. :type iblSet: IblSet :param iblSet: New Ibl Set file. :type iblSet: unicode :return: Method success. :rtype: bool """ LOGGER.info("{0} | Updating '{1}' Ibl Set with new location: '{2}'!".format(self.__class__.__name__, iblSet.title, file)) if sibl_gui.components.core.database.operations.updateIblSetLocation(iblSet, file): self.refreshNodes.emit() return True else: raise sibl_gui.components.core.database.exceptions.DatabaseOperationError( "{0} | Exception raised while updating '{1}' Ibl Set location!".format(self.__class__.__name__, iblSet.title)) def getIblSets(self): """ Returns Database Ibl Sets. :return: Database Ibl Sets. :rtype: list """ return [iblSet for iblSet in sibl_gui.components.core.database.operations.getIblSets()] def filterIblSets(self, pattern, attribute, flags=re.IGNORECASE): """ Filters the Database Ibl Sets on given attribute using given pattern. :param pattern: Filter pattern. :type pattern: unicode :param attribute: Attribute to filter on. :type attribute: unicode :param flags: Regex filtering flags. :type flags: int :return: Filtered Database Ibl Sets. :rtype: list """ try: pattern = re.compile(pattern, flags) except Exception: return list() return list(set(self.getIblSets()).intersection( sibl_gui.components.core.database.operations.filterIblSets( "{0}".format(foundations.strings.toString(pattern.pattern)), attribute, flags))) def iblSetExists(self, path): """ Returns if given Ibl Set path exists in the Database. :param path: Collection path. :type path: unicode :return: Collection exists. :rtype: bool """ return sibl_gui.components.core.database.operations.iblSetExists(path) def listIblSets(self): """ Lists Database Ibl Sets names. :return: Database Ibl Sets names. :rtype: list :note: The list is actually returned using 'title' attributes instead of 'name' attributes """ return [iblSet.title for iblSet in self.getIblSets()] def setIblSets(self, iblSets=None): """ Sets the Ibl Sets Model nodes. :param iblSets: Ibl Sets to set. :type iblSets: list :return: Method success. :rtype: bool """ nodeFlags = self.__engine.parameters.databaseReadOnly and int(Qt.ItemIsSelectable | Qt.ItemIsEnabled) or \ int(Qt.ItemIsSelectable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled) iblSets = iblSets or self.__collectionsOutliner.getCollectionsIblSets( self.__collectionsOutliner.getSelectedCollections() or self.__collectionsOutliner.getCollections()) rootNode = umbra.ui.nodes.DefaultNode(name="InvisibleRootNode") for iblSet in iblSets: if self.__panoramicThumbnails: iconPath = foundations.common.getFirstItem(filter(foundations.common.pathExists, [iblSet.backgroundImage, iblSet.previewImage])) iconSize = self.__panoramicThumbnailsSize else: iconPath = iblSet.icon iconSize = self.__squareThumbnailsSize iblSetNode = IblSetNode(iblSet, name=iblSet.title, parent=rootNode, nodeFlags=nodeFlags, attributesFlags=int(Qt.ItemIsSelectable | Qt.ItemIsEnabled), iconPath=iconPath, iconSize=iconSize, iconPlaceholder=self.__iconPlaceHolder) path = foundations.strings.toString(iblSet.path) if not foundations.common.pathExists(path): continue not self.__engine.fileSystemEventsManager.isPathRegistered(path) and \ self.__engine.fileSystemEventsManager.registerPath(path, modifiedTime=float(iblSet.osStats.split(",")[8])) rootNode.sortChildren(attribute="title") self.__model.initializeModel(rootNode) return True def getIblSetByName(self, name): """ Returns Database Ibl Set with given name. :param name: Ibl Set name. :type name: unicode :return: Database Ibl Set. :rtype: IblSet :note: The filtering is actually performed on 'title' attributes instead of 'name' attributes. """ iblSets = self.filterIblSets(r"^{0}$".format(name), "title") return foundations.common.getFirstItem(iblSets) def getSelectedNodes(self): """ Returns the current active View selected nodes. :return: View selected nodes. :rtype: dict """ return self.getActiveView().getSelectedNodes() def getSelectedIblSetsNodes(self): """ Returns the current active View selected Ibl Sets nodes. :return: View selected Ibl Sets nodes. :rtype: list """ return [node for node in self.getSelectedNodes() if node.family == "IblSet"] def getSelectedIblSets(self): """ Returns the current active View selected Ibl Sets. :return: View selected Ibl Sets. :rtype: list """ return [node.databaseItem for node in self.getSelectedIblSetsNodes()]
PypiClean
/ray_for_mars-1.12.1-cp38-cp38-manylinux2014_x86_64.whl/ray_for_mars-1.12.1.data/purelib/ray/_private/async_compat.py
import asyncio import inspect try: import uvloop except ImportError: uvloop = None try: # This function has been added in Python 3.7. Prior to Python 3.7, # the low-level asyncio.ensure_future() function can be used instead. from asyncio import create_task # noqa: F401 except ImportError: from asyncio import ensure_future as create_task # noqa: F401 try: from asyncio import get_running_loop # noqa: F401 except ImportError: from asyncio import _get_running_loop as get_running_loop # noqa: F401 def get_new_event_loop(): """Construct a new event loop. Ray will use uvloop if it exists""" if uvloop: return uvloop.new_event_loop() else: return asyncio.new_event_loop() def sync_to_async(func): """Convert a blocking function to async function""" if inspect.iscoroutinefunction(func): return func async def wrapper(*args, **kwargs): return func(*args, **kwargs) return wrapper try: from contextlib import asynccontextmanager except ImportError: # Copy from https://github.com/python-trio/async_generator # for compatible with Python 3.6 import sys from functools import wraps from inspect import isasyncgenfunction class _aclosing: def __init__(self, aiter): self._aiter = aiter async def __aenter__(self): return self._aiter async def __aexit__(self, *args): await self._aiter.aclose() # Very much derived from the one in contextlib, by copy/pasting and then # asyncifying everything. (Also I dropped the obscure support for using # context managers as function decorators. It could be re-added; I just # couldn't be bothered.) # So this is a derivative work licensed under the PSF License, which requires # the following notice: # # Copyright © 2001-2017 Python Software Foundation; All Rights Reserved class _AsyncGeneratorContextManager: def __init__(self, func, args, kwds): self._func_name = func.__name__ self._agen = func(*args, **kwds).__aiter__() async def __aenter__(self): if sys.version_info < (3, 5, 2): self._agen = await self._agen try: return await self._agen.asend(None) except StopAsyncIteration: raise RuntimeError("async generator didn't yield") from None async def __aexit__(self, type, value, traceback): async with _aclosing(self._agen): if type is None: try: await self._agen.asend(None) except StopAsyncIteration: return False else: raise RuntimeError("async generator didn't stop") else: # It used to be possible to have type != None, value == None: # https://bugs.python.org/issue1705170 # but AFAICT this can't happen anymore. assert value is not None try: await self._agen.athrow(type, value, traceback) raise RuntimeError("async generator didn't stop after athrow()") except StopAsyncIteration as exc: # Suppress StopIteration *unless* it's the same exception # that was passed to throw(). This prevents a # StopIteration raised inside the "with" statement from # being suppressed. return exc is not value except RuntimeError as exc: # Don't re-raise the passed in exception. (issue27112) if exc is value: return False # Likewise, avoid suppressing if a StopIteration exception # was passed to throw() and later wrapped into a # RuntimeError (see PEP 479). if ( isinstance(value, (StopIteration, StopAsyncIteration)) and exc.__cause__ is value ): return False raise except: # noqa: E722 # only re-raise if it's *not* the exception that was # passed to throw(), because __exit__() must not raise an # exception unless __exit__() itself failed. But throw() # has to raise the exception to signal propagation, so # this fixes the impedance mismatch between the throw() # protocol and the __exit__() protocol. # if sys.exc_info()[1] is value: return False raise def __enter__(self): raise RuntimeError( "use 'async with {func_name}(...)', not 'with {func_name}(...)'".format( func_name=self._func_name ) ) def __exit__(self): # pragma: no cover assert False, """Never called, but should be defined""" def asynccontextmanager(func): """Like @contextmanager, but async.""" if not isasyncgenfunction(func): raise TypeError( "must be an async generator (native or from async_generator; " "if using @async_generator then @acontextmanager must be on top." ) @wraps(func) def helper(*args, **kwds): return _AsyncGeneratorContextManager(func, args, kwds) # A hint for sphinxcontrib-trio: helper.__returns_acontextmanager__ = True return helper
PypiClean
/flux-python-0.51.0.tar.gz/flux-python-0.51.0/flux/constraint/parser.py
import argparse import json import re import ply.yacc as yacc from ply import lex class ConstraintSyntaxError(Exception): """ Specialized SyntaxError exception to allow ConstraintParser to throw a SyntaxError without PLY trying to force recovery. """ pass class ConstraintLexer(object): """ Simple constraint query syntax lexical analyzer based on RFC 35. Used mainly as the lexer for BaseConstraintParser """ # Different quoting states for single vs double quotes: states = ( ("squoting", "exclusive"), ("dquoting", "exclusive"), ) tokens = ( "NOT", "AND", "OR", "NEGATE", "LPAREN", "RPAREN", "TOKEN", "QUOTE", ) # Ignore whitespace in default state t_ignore = " \t\r\n\f\v" # Tokens in 'quoting' state t_squoting_ignore = "" t_dquoting_ignore = "" def __init__(self, **kw_args): super().__init__() self.lexer = lex.lex(module=self, **kw_args) self.parens_level = 0 self.last_lparens = 0 self.last_rparens = 0 self.last_quote = None self.quote_start = None self.pending_token = None def input(self, data): self.lexer.push_state("INITIAL") self.parens_level = 0 self.last_lparens = 0 self.last_rparens = 0 self.last_quote = None self.quote_start = None self.lexer.input(data) def __getattr__(self, attr): return getattr(self.lexer, attr) def t_ANY_error(self, t): raise ConstraintSyntaxError( f"Illegal character '{t.value[0]}' at position {t.lexer.lexpos}" ) # Define special tokens as functions before t_TOKEN so they are # guaranteed to take precedence. # c.f. http://www.dabeaz.com/ply/ply.html#ply_nn6 def t_NEGATE(self, t): r"-" return t def t_NOT(self, t): r"not\b" return t def t_AND(self, t): r"&{1,2}|and\b" return t def t_OR(self, t): r"\|{1,2}|or\b" return t def t_LPAREN(self, t): r"\(" self.parens_level += 1 self.last_lparens = t.lexer.lexpos - 1 return t def t_RPAREN(self, t): r"\)" self.parens_level -= 1 self.last_rparens = t.lexer.lexpos - 1 return t def t_TOKEN(self, t): r"[^()|&\s\"\']+" if self.pending_token is not None: t.value = self.pending_token.value + t.value self.pending_token = None elif t.value.endswith(":"): # Save a token that ends with ':' to possibly combine with # any following token. This allows op:"quoted string" self.pending_token = t return None return t def t_eof(self, t): if self.pending_token is not None: val = self.pending_token.value raise ConstraintSyntaxError(f"Missing argument to token '{val}'") return None def t_QUOTE(self, t): r"'|\"" # fmt: skip self.quote_start = t.lexer.lexpos self.last_quote = t.lexer.lexpos - 1 if t.value == "'": t.lexer.begin("squoting") else: t.lexer.begin("dquoting") # quoting state: def t_squoting_TOKEN(self, t): r"([^'])+" return self.t_TOKEN(t) def t_squoting_QUOTE(self, t): r"'" self.last_quote = None t.lexer.begin("INITIAL") def t_squoting_eof(self, t): pos = self.quote_start raise ConstraintSyntaxError(f'Unclosed quote "\'" at position {pos}') def t_dquoting_TOKEN(self, t): r"([^\"])+" return self.t_TOKEN(t) def t_dquoting_QUOTE(self, t): r"\"" # fmt: skip self.last_quote = None t.lexer.begin("INITIAL") def t_dquoting_eof(self, t): pos = self.quote_start raise ConstraintSyntaxError(f"Unclosed quote '\"' at position {pos}") class ConstraintParser: r""" Base constraint query syntax parser class. This class implements an RFC 35 compliant constraint query syntax parser with the following simplified grammar: :: expr : expr expr | expr and expr | expr or expr | not expr | '(' expr ')' | '-' term | term and : &{1,2}|and|AND or : \|{1,2}|or|OR not : not|NOT term : \w*:?.+ # i.e. [operator:]operand Where a term is a constraint operation which has the form '[operator:]operand'. If the token does not include a `:`, then a class default operator may optionally be substituted, e.g. "operand" becomes "default:operand". By default, ``operand`` is included as a single entry in the RFC 31 values array for the operator, i.e. ``{"operator":["operand"]}``. However, if ``operator`` appears in the self.split_values dict of the parser object, then the corresponding string will be used to split ``value`` into multiple entries. E.g. if :: split_values = { "op": "," } Then ``op:foo,bar`` would result in ``{"op":["foo","bar"]}``. Terms are joined by AND unless OR is specified, such that ``a b c`` is the same as ``a && b && c``. A term can be negated with ``-`` (e.g. ``-a b c`` is equivlant to ``(not a)&b&c``), but to negate a whole expression, NOT must be used (e.g. ``-(a|b)`` is a syntax error, use ``not (a|b)`` instead). As a result of parsing, an RFC 31 constraint object is returned as a Python dict. Attributes: operator_map (dict): A mapping of operator names, used to specify default and shorthand operator names. To configura a default operator, specify ``None`` as a key, e.g. :: operator_map = { None: "name" } split_values (dict): A mapping of operator name to string on which to split values of that operator. For instance ``{"op": ","}`` would autosplit operator ``op`` values on comma. combined_terms (set): A set of operator terms whose values can be combined when joined with the AND logical operator. E.g. if "test" is in ``combined_terms``, then :: test:a test:b would produce :: {"test": ["a", "b"]} instead of :: {"and": [{"test": ["a"]}, {"test": ["b"]}]} Subclasses can set the values of the above attributes to create a custom parser with default operators, split value handling, and set of combined terms. E.g.: :: class MyConstraintParser(ConstraintParser): operator_map = { None: "default", "foo": "long_foo" } split_values = { "default": "," } combined_terms = set("default") By default there is no mapping for ``None`` which means each term requires an operator. """ precedence = ( ("left", "OR"), ("left", "AND"), ("right", "NOT"), ("right", "NEGATE"), ) # Mapping of operator shorthand names to full names # Subclasses should provide this mapping operator_map = {} # Mapping of operator to a string on which to split the operator's # value. The value is typically stored as one entry in an array, # but if set, the split string can be used to return multiple entries. split_values = {} # Combined terms combined_terms = set() def __init__( self, lexer=None, optimize=True, debug=False, write_tables=False, **kw_args ): super().__init__() self.lexer = ConstraintLexer() if lexer is None else lexer self.tokens = self.lexer.tokens self.query = None self.parser = yacc.yacc( module=self, optimize=optimize, debug=debug, write_tables=write_tables, **kw_args, ) def parse(self, query, **kw_args): self.query = query return self.parser.parse(query, lexer=self.lexer, debug=0, **kw_args) def p_error(self, p): if p is None: if self.lexer.parens_level > 0: pos = self.lexer.last_lparens raise ConstraintSyntaxError(f"Unclosed parenthesis in position {pos}") raise ConstraintSyntaxError(f"Unexpected end of input in '{self.query}'") if self.lexer.parens_level < 0: raise ConstraintSyntaxError( "Mismatched parentheses starting at position {self.lexer.last_rparens}." ) raise ConstraintSyntaxError(f"Invalid token '{p.value}' at position {p.lexpos}") def combine_like_terms(self, p1, p2): combined_terms = {} terms = [] entries = [p1, p2] # First, attempt to combine any "and" terms if "and" in p1: p1["and"].append(p2) entries = [p1] # Then, combine any requested combined terms for entry in entries: key = list(entry)[0] if key not in self.combined_terms: terms.append(entry) elif key not in combined_terms: combined_terms[key] = entry[key] terms.append(entry) else: combined_terms[key].extend(entry[key]) if len(terms) == 1: return terms[0] else: return {"and": terms} @staticmethod def invalid_operator(op): match = re.search(r"[^\w.+@-]", op) if not match: return None return match[0] def p_expression_space(self, p): """ expression : expression expression %prec AND """ p[0] = self.combine_like_terms(p[1], p[2]) def p_expression_and(self, p): """ expression : expression AND expression """ p[0] = self.combine_like_terms(p[1], p[3]) def p_expression_or(self, p): """ expression : expression OR expression """ if "or" in p[1]: # Combine this `or` with a previous one p[1]["or"].append(p[3]) p[0] = p[1] else: p[0] = {"or": [p[1], p[3]]} def p_expression_unot(self, p): """ expression : NOT expression | NEGATE token """ p[0] = {"not": [p[2]]} def p_expression_parens(self, p): """ expression : LPAREN expression RPAREN """ p[0] = p[2] def p_token(self, p): """ expression : token """ p[0] = p[1] def p_expression_token(self, p): """ token : TOKEN """ op, colon, value = p[1].partition(":") if not colon: if None not in self.operator_map: raise ConstraintSyntaxError(f'Missing required operator in "{p[1]}"') op = self.operator_map[None] value = p[1] elif op in self.operator_map: op = self.operator_map[op] invalid = self.invalid_operator(op) if invalid: raise ConstraintSyntaxError( f"invalid character '{invalid}' in operator '{op}:'" ) if op in self.split_values: p[0] = {op: value.split(self.split_values[op])} else: p[0] = {op: [value]} def p_quoted_token(self, p): """ token : QUOTE TOKEN QUOTE """ p[0] = p[2] if __name__ == "__main__": """ Test command which can be run as flux python -m flux.constraint.parser Also used to generate ply's parsetab.py in defined outputdir. """ argparser = argparse.ArgumentParser(prog="constraint.parser") argparser.add_argument( "--outputdir", metavar="DIR", type=str, help="Set outputdir for parsetab.py generation", ) argparser.add_argument( "--default-op", metavar="NAME", type=str, help="Set a default operator to substitute for bare terms", ) argparser.add_argument( "--debug", action="store_true", help="Emit lexer debug information before parsing expression", ) argparser.add_argument( "expression", metavar="EXPRESSION", type=str, nargs="*", help="Expression to parse", ) args = argparser.parse_args() if args.outputdir: print(f"Generating constraint parsetab.py in {args.outputdir}") class TConstraintParser(ConstraintParser): if args.default_op: operator_map = {None: args.default_op} parser = TConstraintParser( optimize=False, debug=True, write_tables=True, outputdir=args.outputdir ) if args.expression: s = " ".join(args.expression) if args.debug: print(f"parsing expression `{s}'") if args.debug: lexer = ConstraintLexer() lexer.input(s) while True: tok = lexer.token() if not tok: break print(tok) print(json.dumps(parser.parse(s)))
PypiClean
/differentiable_robot_model-0.2.3-py3-none-any.whl/differentiable_robot_model/rigid_body_params.py
import torch import numpy as np import math from . import se3_so3_util, utils class UnconstrainedScalar(torch.nn.Module): def __init__(self, init_val=None): super(UnconstrainedScalar, self).__init__() if init_val is None: self.param = torch.nn.Parameter(torch.rand(1)) else: self.param = torch.nn.Parameter(init_val) def forward(self): return self.param class PositiveScalar(torch.nn.Module): def __init__( self, min_val=0.0, init_param_std=1.0, init_param=None, ): super().__init__() self._min_val = min_val if init_param is None: init_param_value = torch.empty(1, 1).normal_(mean=0.0, std=init_param_std) else: init_param_value = torch.sqrt(init_param - self._min_val) self.l = torch.nn.Parameter(init_param_value.squeeze()) def forward(self): positive_value = ((self.l * self.l) + self._min_val).squeeze() return positive_value class UnconstrainedTensor(torch.nn.Module): def __init__(self, dim1, dim2, init_tensor=None, init_std=0.1): super().__init__() self._dim1 = dim1 self._dim2 = dim2 if init_tensor is None: init_tensor = torch.empty(dim1, dim2).normal_(mean=0.0, std=init_std) self.param = torch.nn.Parameter(init_tensor) def forward(self): return self.param class SymmMatNet(torch.nn.Module): """ Symmetric Matrix Networks """ def __init__(self, qdim): self._qdim = qdim super().__init__() def forward(self, l): """ :param l: vector containing lower triangular and diagonal components of the output symmetric matrix SM :return: Symmetric matrix SM """ batch_size = l.size(0) SM = l.new_zeros(batch_size, self._qdim, self._qdim) L_tril = l.new_zeros(batch_size, self._qdim, self._qdim) if self._qdim > 1: l_tril = l[:, self._qdim :] L_tril = utils.bfill_lowertriangle(L_tril, l_tril) l_diag = l[:, : self._qdim] SM = utils.bfill_diagonal(SM, l_diag) SM += L_tril + L_tril.transpose(-2, -1) return SM class CholeskyNet(torch.nn.Module): """ Symmetric Positive Definite Matrix Networks via Cholesky Decomposition """ def __init__(self, qdim, bias): self._qdim = qdim self._bias = bias super().__init__() def get_raw_l(self, raw_l_input): """ Return vector raw_l, which is the non-zero elements of lower-triangular matrix L in Cholesky decomposition, WITHOUT adding positive bias (yet) to the components of raw_l that corresponds to the diagonal components of L. """ return raw_l_input # identity mapping def get_l(self, raw_l_input): raw_l = self.get_raw_l(raw_l_input) l = raw_l.new_zeros(raw_l.shape) l[ :, : self._qdim ] += ( self._bias ) # add bias to ensure positive definiteness of the resulting inertia matrix l += raw_l return l def get_L(self, l): batch_size = l.size(0) L = l.new_zeros(batch_size, self._qdim, self._qdim) if self._qdim > 1: l_tril = l[:, self._qdim :] L = utils.bfill_lowertriangle(L, l_tril) l_diag = l[:, : self._qdim] L = utils.bfill_diagonal(L, l_diag) return L def get_symm_pos_semi_def_matrix_and_l(self, raw_l_input): """ :param raw_l_input: please see definition of get_raw_l() :return: Symmetric positive semi-definite matrix SPSD and the vector l (please see definition of get_l()) """ l = self.get_l(raw_l_input) L = self.get_L(l) SPSD = L @ L.transpose(-2, -1) return SPSD, l class TriangParam3DInertiaMatrixNet(torch.nn.Module): """ 3D inertia matrix with triangular parameterized principal moments of inertia """ def __init__( self, bias, init_param_std=0.01, init_param=None, is_initializing_params=True ): self._qdim = 3 self._bias = bias super().__init__() if (init_param is None) or (not is_initializing_params): init_inertia_ori_axis_angle_param_value = torch.empty(1, 3).normal_( mean=0.0, std=init_param_std ) init_J1_param_value = None init_J2_param_value = None init_alpha_param_param_value = None else: init_param = init_param.squeeze().numpy() [R, J_diag, _] = np.linalg.svd(init_param, full_matrices=True) if ( np.linalg.det(R) < 0.0 ): # make sure this is really a member of SO(3), not just O(3) R[:, 0] = -R[:, 0] init_inertia_ori_axis_angle_param_value = ( se3_so3_util.getVec3FromSkewSymMat(se3_so3_util.logMapSO3(R)) ) init_J1_param_value = J_diag[0] init_J2_param_value = J_diag[1] init_alpha_param_value = np.arccos( ( (J_diag[0] * J_diag[0]) + (J_diag[1] * J_diag[1]) - (J_diag[2] * J_diag[2]) ) / (2.0 * J_diag[0] * J_diag[1]) ) init_alpha_div_pi_param_value = init_alpha_param_value / math.pi # inverse sigmoid: init_alpha_param_param_value = np.log( init_alpha_div_pi_param_value / (1.0 - init_alpha_div_pi_param_value) ) init_inertia_ori_axis_angle_param_value = torch.tensor( init_inertia_ori_axis_angle_param_value, dtype=torch.float32 ) init_J1_param_value = torch.tensor(init_J1_param_value, dtype=torch.float32) init_J2_param_value = torch.tensor(init_J2_param_value, dtype=torch.float32) assert ( init_J1_param_value > bias ), "Please set bias value smaller, such that this condition is satisfied!" assert ( init_J2_param_value > bias ), "Please set bias value smaller, such that this condition is satisfied!" init_alpha_param_param_value = torch.tensor( init_alpha_param_param_value, dtype=torch.float32 ) self.inertia_ori_axis_angle = torch.nn.Parameter( init_inertia_ori_axis_angle_param_value.squeeze() ) self.inertia_ori_axis_angle.requires_grad = True self.J1net = PositiveScalar( min_val=bias, init_param_std=0.1, init_param=init_J1_param_value, ) self.J2net = PositiveScalar( min_val=bias, init_param_std=0.1, init_param=init_J2_param_value, ) self.alpha_param_net = UnconstrainedTensor( dim1=1, dim2=1, init_std=init_param_std, init_param=init_alpha_param_param_value, ) self.J = None self.R = None self.inertia_mat = None def forward(self): alpha = math.pi * torch.sigmoid( self.alpha_param_net().squeeze() ) # 0 < alpha < pi J1 = self.J1net().squeeze() J2 = self.J2net().squeeze() J3 = torch.sqrt((J1 * J1) + (J2 * J2) - (2.0 * J1 * J2 * torch.cos(alpha))) self.J = torch.zeros((3, 3), device=alpha.device) self.J[0, 0] = J1 self.J[1, 1] = J2 self.J[2, 2] = J3 self.R = utils.exp_map_so3(self.inertia_ori_axis_angle) self.inertia_mat = self.R @ (self.J @ self.R.t()) # if (np.isnan(self.inertia_mat.detach().numpy()).any()): # print(self.inertia_mat) return self.inertia_mat class CovParameterized3DInertiaMatrixNet(CholeskyNet): """ Inertia matrix parameterized by density-weighted covariance of a rigid body (please see the paper "Linear Matrix Inequalities for Physically-Consistent Inertial Parameter Identification: A Statistical Perspective on the Mass Distribution" by Wensing et al. (2017), section IV.A and IV.B) """ def __init__( self, bias=1.0e-7, init_param_std=0.01, init_param=None, is_initializing_params=True, ): super().__init__(qdim=3, bias=0) self.spd_3d_cov_inertia_mat_diag_bias = bias if (init_param is None) or (not is_initializing_params): init_param_value = torch.empty(1, 6).normal_(mean=0.0, std=init_param_std) else: init_inertia_matrix = init_param.squeeze() init_spd_3d_cov_inertia_matrix = init_param.new_zeros((3, 3)) init_spd_3d_cov_inertia_matrix[0, 0] = 0.5 * ( -init_inertia_matrix[0, 0] + init_inertia_matrix[1, 1] + init_inertia_matrix[2, 2] ) init_spd_3d_cov_inertia_matrix[1, 1] = 0.5 * ( init_inertia_matrix[0, 0] - init_inertia_matrix[1, 1] + init_inertia_matrix[2, 2] ) init_spd_3d_cov_inertia_matrix[2, 2] = 0.5 * ( init_inertia_matrix[0, 0] + init_inertia_matrix[1, 1] - init_inertia_matrix[2, 2] ) init_spd_3d_cov_inertia_matrix[1, 0] = -init_inertia_matrix[1, 0] init_spd_3d_cov_inertia_matrix[2, 0] = -init_inertia_matrix[2, 0] init_spd_3d_cov_inertia_matrix[2, 1] = -init_inertia_matrix[2, 1] init_spd_3d_cov_inertia_matrix[0, 1] = init_spd_3d_cov_inertia_matrix[1, 0] init_spd_3d_cov_inertia_matrix[0, 2] = init_spd_3d_cov_inertia_matrix[2, 0] init_spd_3d_cov_inertia_matrix[1, 2] = init_spd_3d_cov_inertia_matrix[2, 1] L = torch.tensor( np.linalg.cholesky( init_spd_3d_cov_inertia_matrix.numpy() - (self.spd_3d_cov_inertia_mat_diag_bias * np.eye(3)) ), dtype=torch.float32, ) diag_indices = np.diag_indices( min( init_spd_3d_cov_inertia_matrix.size(-2), init_spd_3d_cov_inertia_matrix.size(-1), ) ) tril_indices = np.tril_indices( init_spd_3d_cov_inertia_matrix.size(-2), k=-1, m=init_spd_3d_cov_inertia_matrix.size(-1), ) dim0_indices = np.hstack([diag_indices[0], tril_indices[0]]) dim1_indices = np.hstack([diag_indices[1], tril_indices[1]]) init_param_value = L[dim0_indices, dim1_indices].reshape( (1, dim0_indices.shape[0]) ) self.l = torch.nn.Parameter(init_param_value.squeeze()) self.l.requires_grad = True def forward(self): raw_l_input = self.l.unsqueeze(0) [spsd_3d_cov_inertia_matrix, _] = super().get_symm_pos_semi_def_matrix_and_l( raw_l_input=raw_l_input ) spsd_3d_cov_inertia_matrix = spsd_3d_cov_inertia_matrix.squeeze() spd_3d_cov_inertia_matrix = spsd_3d_cov_inertia_matrix + ( self.spd_3d_cov_inertia_mat_diag_bias * torch.eye(3, device=self.l.device) ) inertia_matrix = spd_3d_cov_inertia_matrix.new_zeros((3, 3)) inertia_matrix[0, 0] = ( spd_3d_cov_inertia_matrix[1, 1] + spd_3d_cov_inertia_matrix[2, 2] ) inertia_matrix[1, 1] = ( spd_3d_cov_inertia_matrix[0, 0] + spd_3d_cov_inertia_matrix[2, 2] ) inertia_matrix[2, 2] = ( spd_3d_cov_inertia_matrix[0, 0] + spd_3d_cov_inertia_matrix[1, 1] ) inertia_matrix[1, 0] = -spd_3d_cov_inertia_matrix[1, 0] inertia_matrix[2, 0] = -spd_3d_cov_inertia_matrix[2, 0] inertia_matrix[2, 1] = -spd_3d_cov_inertia_matrix[2, 1] inertia_matrix[0, 1] = inertia_matrix[1, 0] inertia_matrix[0, 2] = inertia_matrix[2, 0] inertia_matrix[1, 2] = inertia_matrix[2, 1] return inertia_matrix class SymmPosDef3DInertiaMatrixNet(CholeskyNet): def __init__( self, bias=1e-7, init_param_std=0.01, init_param=None, is_initializing_params=True, ): super().__init__(qdim=3, bias=0) self.spd_3d_inertia_mat_diag_bias = bias if (init_param is None) or (not is_initializing_params): init_param_value = torch.empty(1, 6).normal_(mean=0.0, std=init_param_std) else: L = torch.tensor( np.linalg.cholesky( init_param.squeeze().numpy() - (self.spd_3d_inertia_mat_diag_bias * np.eye(3)) ), dtype=torch.float32, ) diag_indices = np.diag_indices( min(init_param.size(-2), init_param.size(-1)) ) tril_indices = np.tril_indices( init_param.size(-2), k=-1, m=init_param.size(-1) ) dim0_indices = np.hstack([diag_indices[0], tril_indices[0]]) dim1_indices = np.hstack([diag_indices[1], tril_indices[1]]) init_param_value = L[dim0_indices, dim1_indices].reshape( (1, dim0_indices.shape[0]) ) self.l = torch.nn.Parameter(init_param_value.squeeze()) self.l.requires_grad = True def forward(self): raw_l_input = self.l.unsqueeze(0) [spsd_3d_inertia_matrix, _] = super().get_symm_pos_semi_def_matrix_and_l( raw_l_input=raw_l_input ) spd_3d_inertia_matrix = spsd_3d_inertia_matrix.squeeze() + ( self.spd_3d_inertia_mat_diag_bias * torch.eye(3, device=self.l.device) ) return spd_3d_inertia_matrix class Symm3DInertiaMatrixNet(SymmMatNet): def __init__( self, init_param_std=0.01, init_param=None, is_initializing_params=True ): super().__init__(qdim=3) if (init_param is None) or (not is_initializing_params): init_param_value = torch.empty(1, 6).normal_(mean=0.0, std=init_param_std) else: diag_indices = np.diag_indices(3) tril_indices = np.tril_indices(3, k=-1, m=3) dim0_indices = np.hstack([diag_indices[0], tril_indices[0]]) dim1_indices = np.hstack([diag_indices[1], tril_indices[1]]) init_param_value = init_param[0, dim0_indices, dim1_indices].reshape((1, 6)) self.l = torch.nn.Parameter(init_param_value.squeeze()) self.l.requires_grad = True def forward(self): return super().forward(self.l.unsqueeze(0)).squeeze()
PypiClean
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/mmdetection/mmdet/models/dense_heads/lad_head.py
import torch from mmcv.runner import force_fp32 from mmdet.core import bbox_overlaps, multi_apply from ..builder import HEADS from .paa_head import PAAHead, levels_to_images @HEADS.register_module() class LADHead(PAAHead): """Label Assignment Head from the paper: `Improving Object Detection by Label Assignment Distillation <https://arxiv.org/pdf/2108.10520.pdf>`_""" @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'iou_preds')) def get_label_assignment(self, cls_scores, bbox_preds, iou_preds, gt_bboxes, gt_labels, img_metas, gt_bboxes_ignore=None): """Get label assignment (from teacher). Args: cls_scores (list[Tensor]): Box scores for each scale level. Has shape (N, num_anchors * num_classes, H, W) bbox_preds (list[Tensor]): Box energies / deltas for each scale level with shape (N, num_anchors * 4, H, W) iou_preds (list[Tensor]): iou_preds for each scale level with shape (N, num_anchors * 1, H, W) gt_bboxes (list[Tensor]): Ground truth bboxes for each image with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. gt_labels (list[Tensor]): class indices corresponding to each box img_metas (list[dict]): Meta information of each image, e.g., image size, scaling factor, etc. gt_bboxes_ignore (list[Tensor] | None): Specify which bounding boxes can be ignored when are computing the loss. Returns: tuple: Returns a tuple containing label assignment variables. - labels (Tensor): Labels of all anchors, each with shape (num_anchors,). - labels_weight (Tensor): Label weights of all anchor. each with shape (num_anchors,). - bboxes_target (Tensor): BBox targets of all anchors. each with shape (num_anchors, 4). - bboxes_weight (Tensor): BBox weights of all anchors. each with shape (num_anchors, 4). - pos_inds_flatten (Tensor): Contains all index of positive sample in all anchor. - pos_anchors (Tensor): Positive anchors. - num_pos (int): Number of positive anchors. """ featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] assert len(featmap_sizes) == self.prior_generator.num_levels device = cls_scores[0].device anchor_list, valid_flag_list = self.get_anchors( featmap_sizes, img_metas, device=device) label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 cls_reg_targets = self.get_targets( anchor_list, valid_flag_list, gt_bboxes, img_metas, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=gt_labels, label_channels=label_channels, ) (labels, labels_weight, bboxes_target, bboxes_weight, pos_inds, pos_gt_index) = cls_reg_targets cls_scores = levels_to_images(cls_scores) cls_scores = [ item.reshape(-1, self.cls_out_channels) for item in cls_scores ] bbox_preds = levels_to_images(bbox_preds) bbox_preds = [item.reshape(-1, 4) for item in bbox_preds] pos_losses_list, = multi_apply(self.get_pos_loss, anchor_list, cls_scores, bbox_preds, labels, labels_weight, bboxes_target, bboxes_weight, pos_inds) with torch.no_grad(): reassign_labels, reassign_label_weight, \ reassign_bbox_weights, num_pos = multi_apply( self.paa_reassign, pos_losses_list, labels, labels_weight, bboxes_weight, pos_inds, pos_gt_index, anchor_list) num_pos = sum(num_pos) # convert all tensor list to a flatten tensor labels = torch.cat(reassign_labels, 0).view(-1) flatten_anchors = torch.cat( [torch.cat(item, 0) for item in anchor_list]) labels_weight = torch.cat(reassign_label_weight, 0).view(-1) bboxes_target = torch.cat(bboxes_target, 0).view(-1, bboxes_target[0].size(-1)) pos_inds_flatten = ((labels >= 0) & (labels < self.num_classes)).nonzero().reshape(-1) if num_pos: pos_anchors = flatten_anchors[pos_inds_flatten] else: pos_anchors = None label_assignment_results = (labels, labels_weight, bboxes_target, bboxes_weight, pos_inds_flatten, pos_anchors, num_pos) return label_assignment_results def forward_train(self, x, label_assignment_results, img_metas, gt_bboxes, gt_labels=None, gt_bboxes_ignore=None, **kwargs): """Forward train with the available label assignment (student receives from teacher). Args: x (list[Tensor]): Features from FPN. label_assignment_results (tuple): As the outputs defined in the function `self.get_label_assignment`. img_metas (list[dict]): Meta information of each image, e.g., image size, scaling factor, etc. gt_bboxes (Tensor): Ground truth bboxes of the image, shape (num_gts, 4). gt_labels (Tensor): Ground truth labels of each box, shape (num_gts,). gt_bboxes_ignore (Tensor): Ground truth bboxes to be ignored, shape (num_ignored_gts, 4). Returns: losses: (dict[str, Tensor]): A dictionary of loss components. """ outs = self(x) if gt_labels is None: loss_inputs = outs + (gt_bboxes, img_metas) else: loss_inputs = outs + (gt_bboxes, gt_labels, img_metas) losses = self.loss( *loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore, label_assignment_results=label_assignment_results) return losses @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'iou_preds')) def loss(self, cls_scores, bbox_preds, iou_preds, gt_bboxes, gt_labels, img_metas, gt_bboxes_ignore=None, label_assignment_results=None): """Compute losses of the head. Args: cls_scores (list[Tensor]): Box scores for each scale level Has shape (N, num_anchors * num_classes, H, W) bbox_preds (list[Tensor]): Box energies / deltas for each scale level with shape (N, num_anchors * 4, H, W) iou_preds (list[Tensor]): iou_preds for each scale level with shape (N, num_anchors * 1, H, W) gt_bboxes (list[Tensor]): Ground truth bboxes for each image with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. gt_labels (list[Tensor]): class indices corresponding to each box img_metas (list[dict]): Meta information of each image, e.g., image size, scaling factor, etc. gt_bboxes_ignore (list[Tensor] | None): Specify which bounding boxes can be ignored when are computing the loss. label_assignment_results (tuple): As the outputs defined in the function `self.get_label_assignment`. Returns: dict[str, Tensor]: A dictionary of loss gmm_assignment. """ (labels, labels_weight, bboxes_target, bboxes_weight, pos_inds_flatten, pos_anchors, num_pos) = label_assignment_results cls_scores = levels_to_images(cls_scores) cls_scores = [ item.reshape(-1, self.cls_out_channels) for item in cls_scores ] bbox_preds = levels_to_images(bbox_preds) bbox_preds = [item.reshape(-1, 4) for item in bbox_preds] iou_preds = levels_to_images(iou_preds) iou_preds = [item.reshape(-1, 1) for item in iou_preds] # convert all tensor list to a flatten tensor cls_scores = torch.cat(cls_scores, 0).view(-1, cls_scores[0].size(-1)) bbox_preds = torch.cat(bbox_preds, 0).view(-1, bbox_preds[0].size(-1)) iou_preds = torch.cat(iou_preds, 0).view(-1, iou_preds[0].size(-1)) losses_cls = self.loss_cls( cls_scores, labels, labels_weight, avg_factor=max(num_pos, len(img_metas))) # avoid num_pos=0 if num_pos: pos_bbox_pred = self.bbox_coder.decode( pos_anchors, bbox_preds[pos_inds_flatten]) pos_bbox_target = bboxes_target[pos_inds_flatten] iou_target = bbox_overlaps( pos_bbox_pred.detach(), pos_bbox_target, is_aligned=True) losses_iou = self.loss_centerness( iou_preds[pos_inds_flatten], iou_target.unsqueeze(-1), avg_factor=num_pos) losses_bbox = self.loss_bbox( pos_bbox_pred, pos_bbox_target, avg_factor=num_pos) else: losses_iou = iou_preds.sum() * 0 losses_bbox = bbox_preds.sum() * 0 return dict( loss_cls=losses_cls, loss_bbox=losses_bbox, loss_iou=losses_iou)
PypiClean
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/groups/item/team/schedule/open_shift_change_requests/open_shift_change_requests_request_builder.py
from __future__ import annotations from dataclasses import dataclass from kiota_abstractions.get_path_parameters import get_path_parameters from kiota_abstractions.method import Method from kiota_abstractions.request_adapter import RequestAdapter from kiota_abstractions.request_information import RequestInformation from kiota_abstractions.request_option import RequestOption from kiota_abstractions.response_handler import ResponseHandler from kiota_abstractions.serialization import Parsable, ParsableFactory from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union if TYPE_CHECKING: from ......models import open_shift_change_request, open_shift_change_request_collection_response from ......models.o_data_errors import o_data_error from .count import count_request_builder from .item import open_shift_change_request_item_request_builder class OpenShiftChangeRequestsRequestBuilder(): """ Provides operations to manage the openShiftChangeRequests property of the microsoft.graph.schedule entity. """ def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None: """ Instantiates a new OpenShiftChangeRequestsRequestBuilder and sets the default values. Args: pathParameters: The raw url or the Url template parameters for the request. requestAdapter: The request adapter to use to execute the requests. """ if path_parameters is None: raise Exception("path_parameters cannot be undefined") if request_adapter is None: raise Exception("request_adapter cannot be undefined") # Url template to use to build the URL for the current request builder self.url_template: str = "{+baseurl}/groups/{group%2Did}/team/schedule/openShiftChangeRequests{?%24top,%24skip,%24search,%24filter,%24count,%24orderby,%24select,%24expand}" url_tpl_params = get_path_parameters(path_parameters) self.path_parameters = url_tpl_params self.request_adapter = request_adapter def by_open_shift_change_request_id(self,open_shift_change_request_id: str) -> open_shift_change_request_item_request_builder.OpenShiftChangeRequestItemRequestBuilder: """ Provides operations to manage the openShiftChangeRequests property of the microsoft.graph.schedule entity. Args: open_shift_change_request_id: Unique identifier of the item Returns: open_shift_change_request_item_request_builder.OpenShiftChangeRequestItemRequestBuilder """ if open_shift_change_request_id is None: raise Exception("open_shift_change_request_id cannot be undefined") from .item import open_shift_change_request_item_request_builder url_tpl_params = get_path_parameters(self.path_parameters) url_tpl_params["openShiftChangeRequest%2Did"] = open_shift_change_request_id return open_shift_change_request_item_request_builder.OpenShiftChangeRequestItemRequestBuilder(self.request_adapter, url_tpl_params) async def get(self,request_configuration: Optional[OpenShiftChangeRequestsRequestBuilderGetRequestConfiguration] = None) -> Optional[open_shift_change_request_collection_response.OpenShiftChangeRequestCollectionResponse]: """ The open shift requests in the schedule. Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: Optional[open_shift_change_request_collection_response.OpenShiftChangeRequestCollectionResponse] """ request_info = self.to_get_request_information( request_configuration ) from ......models.o_data_errors import o_data_error error_mapping: Dict[str, ParsableFactory] = { "4XX": o_data_error.ODataError, "5XX": o_data_error.ODataError, } if not self.request_adapter: raise Exception("Http core is null") from ......models import open_shift_change_request_collection_response return await self.request_adapter.send_async(request_info, open_shift_change_request_collection_response.OpenShiftChangeRequestCollectionResponse, error_mapping) async def post(self,body: Optional[open_shift_change_request.OpenShiftChangeRequest] = None, request_configuration: Optional[OpenShiftChangeRequestsRequestBuilderPostRequestConfiguration] = None) -> Optional[open_shift_change_request.OpenShiftChangeRequest]: """ Create new navigation property to openShiftChangeRequests for groups Args: body: The request body requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: Optional[open_shift_change_request.OpenShiftChangeRequest] """ if body is None: raise Exception("body cannot be undefined") request_info = self.to_post_request_information( body, request_configuration ) from ......models.o_data_errors import o_data_error error_mapping: Dict[str, ParsableFactory] = { "4XX": o_data_error.ODataError, "5XX": o_data_error.ODataError, } if not self.request_adapter: raise Exception("Http core is null") from ......models import open_shift_change_request return await self.request_adapter.send_async(request_info, open_shift_change_request.OpenShiftChangeRequest, error_mapping) def to_get_request_information(self,request_configuration: Optional[OpenShiftChangeRequestsRequestBuilderGetRequestConfiguration] = None) -> RequestInformation: """ The open shift requests in the schedule. Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: RequestInformation """ request_info = RequestInformation() request_info.url_template = self.url_template request_info.path_parameters = self.path_parameters request_info.http_method = Method.GET request_info.headers["Accept"] = ["application/json"] if request_configuration: request_info.add_request_headers(request_configuration.headers) request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters) request_info.add_request_options(request_configuration.options) return request_info def to_post_request_information(self,body: Optional[open_shift_change_request.OpenShiftChangeRequest] = None, request_configuration: Optional[OpenShiftChangeRequestsRequestBuilderPostRequestConfiguration] = None) -> RequestInformation: """ Create new navigation property to openShiftChangeRequests for groups Args: body: The request body requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: RequestInformation """ if body is None: raise Exception("body cannot be undefined") request_info = RequestInformation() request_info.url_template = self.url_template request_info.path_parameters = self.path_parameters request_info.http_method = Method.POST request_info.headers["Accept"] = ["application/json"] if request_configuration: request_info.add_request_headers(request_configuration.headers) request_info.add_request_options(request_configuration.options) request_info.set_content_from_parsable(self.request_adapter, "application/json", body) return request_info @property def count(self) -> count_request_builder.CountRequestBuilder: """ Provides operations to count the resources in the collection. """ from .count import count_request_builder return count_request_builder.CountRequestBuilder(self.request_adapter, self.path_parameters) @dataclass class OpenShiftChangeRequestsRequestBuilderGetQueryParameters(): """ The open shift requests in the schedule. """ def get_query_parameter(self,original_name: Optional[str] = None) -> str: """ Maps the query parameters names to their encoded names for the URI template parsing. Args: originalName: The original query parameter name in the class. Returns: str """ if original_name is None: raise Exception("original_name cannot be undefined") if original_name == "count": return "%24count" if original_name == "expand": return "%24expand" if original_name == "filter": return "%24filter" if original_name == "orderby": return "%24orderby" if original_name == "search": return "%24search" if original_name == "select": return "%24select" if original_name == "skip": return "%24skip" if original_name == "top": return "%24top" return original_name # Include count of items count: Optional[bool] = None # Expand related entities expand: Optional[List[str]] = None # Filter items by property values filter: Optional[str] = None # Order items by property values orderby: Optional[List[str]] = None # Search items by search phrases search: Optional[str] = None # Select properties to be returned select: Optional[List[str]] = None # Skip the first n items skip: Optional[int] = None # Show only the first n items top: Optional[int] = None @dataclass class OpenShiftChangeRequestsRequestBuilderGetRequestConfiguration(): """ Configuration for the request such as headers, query parameters, and middleware options. """ # Request headers headers: Optional[Dict[str, Union[str, List[str]]]] = None # Request options options: Optional[List[RequestOption]] = None # Request query parameters query_parameters: Optional[OpenShiftChangeRequestsRequestBuilder.OpenShiftChangeRequestsRequestBuilderGetQueryParameters] = None @dataclass class OpenShiftChangeRequestsRequestBuilderPostRequestConfiguration(): """ Configuration for the request such as headers, query parameters, and middleware options. """ # Request headers headers: Optional[Dict[str, Union[str, List[str]]]] = None # Request options options: Optional[List[RequestOption]] = None
PypiClean
/tumbo-server-0.4.12.tar.gz/tumbo-server-0.4.12/tumbo/core/static/bower_components/jquery/src/ajax/jsonp.js
define([ "../core", "./var/nonce", "./var/rquery", "../ajax" ], function( jQuery, nonce, rquery ) { var oldCallbacks = [], rjsonp = /(=)\?(?=&|$)|\?\?/; // Default jsonp settings jQuery.ajaxSetup({ jsonp: "callback", jsonpCallback: function() { var callback = oldCallbacks.pop() || ( jQuery.expando + "_" + ( nonce++ ) ); this[ callback ] = true; return callback; } }); // Detect, normalize options and install callbacks for jsonp requests jQuery.ajaxPrefilter( "json jsonp", function( s, originalSettings, jqXHR ) { var callbackName, overwritten, responseContainer, jsonProp = s.jsonp !== false && ( rjsonp.test( s.url ) ? "url" : typeof s.data === "string" && !( s.contentType || "" ).indexOf("application/x-www-form-urlencoded") && rjsonp.test( s.data ) && "data" ); // Handle iff the expected data type is "jsonp" or we have a parameter to set if ( jsonProp || s.dataTypes[ 0 ] === "jsonp" ) { // Get callback name, remembering preexisting value associated with it callbackName = s.jsonpCallback = jQuery.isFunction( s.jsonpCallback ) ? s.jsonpCallback() : s.jsonpCallback; // Insert callback into url or form data if ( jsonProp ) { s[ jsonProp ] = s[ jsonProp ].replace( rjsonp, "$1" + callbackName ); } else if ( s.jsonp !== false ) { s.url += ( rquery.test( s.url ) ? "&" : "?" ) + s.jsonp + "=" + callbackName; } // Use data converter to retrieve json after script execution s.converters["script json"] = function() { if ( !responseContainer ) { jQuery.error( callbackName + " was not called" ); } return responseContainer[ 0 ]; }; // force json dataType s.dataTypes[ 0 ] = "json"; // Install callback overwritten = window[ callbackName ]; window[ callbackName ] = function() { responseContainer = arguments; }; // Clean-up function (fires after converters) jqXHR.always(function() { // Restore preexisting value window[ callbackName ] = overwritten; // Save back as free if ( s[ callbackName ] ) { // make sure that re-using the options doesn't screw things around s.jsonpCallback = originalSettings.jsonpCallback; // save the callback name for future use oldCallbacks.push( callbackName ); } // Call if it was a function and we have a response if ( responseContainer && jQuery.isFunction( overwritten ) ) { overwritten( responseContainer[ 0 ] ); } responseContainer = overwritten = undefined; }); // Delegate to script return "script"; } }); });
PypiClean
/tackle-box-0.4.1.tar.gz/tackle-box-0.4.1/tackle/providers/tackle/hooks/debug.py
import sys from InquirerPy import prompt from pprint import pprint from rich import print from tackle import BaseHook, Field class DebugHook(BaseHook): """Hook for debugging that prints the output context and pauses runtime.""" hook_type: str = 'debug' key: str = Field(None, description="A path to a key to debug") context: str = Field( None, description="Which context to examine. One of `public`, `private`, " "`temporary`, or `existing`. Omit for all.", ) _contexts: list = ['public', 'private', 'temporary', 'existing'] def print_key(self, print_context): if self.key is not None: if self.key in print_context: pprint(print_context[self.key]) return True else: return False else: pprint(print_context[self.key]) def print_context(self, print_context, context_name: str): print(f"[bold magenta]{context_name.title()} Context[/bold magenta]") # TODO: Improve this -> The builtin pprint is better now since it does the # first level as the top level keys. # pretty = Pretty(dict(print_context)) # panel = Panel(pretty) # print(panel) pprint(print_context) def exec(self) -> None: if self.context is not None: if self.context in self._contexts: output = getattr(self, f'{self.context}_context') if output is not None: self.print_context(output, self.context) else: print(f"Debugging {self.context} not possible because it is empty.") else: print( f"Input context in debug hook `{self.context}` must be one of " f"`public`, `private`, `temporary`, or `existing`" ) else: printed = None for i in self._contexts: output = getattr(self, f'{i}_context') if output is not None and output != {}: if self.key is not None: if self.key in output: self.print_context(output[self.key], i) printed = True else: self.print_context(output, i) else: continue if self.key is not None and printed is None: print(f"Key={self.key} not found in ") if not self.no_input: question = { 'type': 'confirm', 'name': 'tmp', 'message': 'CONTINUE', } try: response = prompt([question]) except KeyboardInterrupt: print("Exiting...") sys.exit(0) # Catch keyboard exits with return an empty dict if response == {}: sys.exit(0)
PypiClean
/h2o_pysparkling_3.1-3.42.0.2.post1.tar.gz/h2o_pysparkling_3.1-3.42.0.2.post1/ai/h2o/sparkling/ml/algos/H2ORuleFit.py
from warnings import warn from pyspark import keyword_only from ai.h2o.sparkling.Initializer import Initializer from ai.h2o.sparkling.ml.Utils import Utils from ai.h2o.sparkling.ml.algos.H2OSupervisedAlgorithm import H2OSupervisedAlgorithm from ai.h2o.sparkling.ml.models.H2ORuleFitMOJOModel import H2ORuleFitMOJOModel from ai.h2o.sparkling.ml.params.H2ORuleFitParams import H2ORuleFitParams from ai.h2o.sparkling.ml.algos.DistributionBasedH2OTrainFramePreparation import DistributionBasedH2OTrainFramePreparation class H2ORuleFit(H2ORuleFitParams, H2OSupervisedAlgorithm, DistributionBasedH2OTrainFramePreparation): @keyword_only def __init__(self, offsetCol=None, ignoredCols=None, columnsToCategorical=[], keepBinaryModels=False, withContributions=False, dataFrameSerializer="ai.h2o.sparkling.utils.JSONDataFrameSerializer", withLeafNodeAssignments=False, convertInvalidNumbersToNa=False, detailedPredictionCol="detailed_prediction", validationDataFrame=None, featuresCols=[], predictionCol="prediction", convertUnknownCategoricalLevelsToNa=False, splitRatio=1.0, withStageResults=False, seed=-1, algorithm="AUTO", minRuleLength=3, maxRuleLength=3, maxNumRules=-1, modelType="RULES_AND_LINEAR", ruleGenerationNtrees=50, removeDuplicates=True, lambdaValue=None, modelId=None, distribution="AUTO", labelCol="label", weightCol=None, maxCategoricalLevels=10, aucType="AUTO"): Initializer.load_sparkling_jar() super(H2ORuleFit, self).__init__() self._java_obj = self._new_java_obj("ai.h2o.sparkling.ml.algos.H2ORuleFit", self.uid) self._setDefaultValuesFromJava() kwargs = Utils.getInputKwargs(self) kwargs = self._updateInitKwargs(kwargs) if 'interactionPairs' in kwargs: warn("Interaction pairs are not supported!") self._set(**kwargs) self._transfer_params_to_java() def _create_model(self, javaModel): return H2ORuleFitMOJOModel(javaModel)
PypiClean
/ansys_fluent_core-0.17.0-py3-none-any.whl/ansys/fluent/core/solver/settings_231/root.py
from ansys.fluent.core.solver.flobject import * from ansys.fluent.core.solver.flobject import _ChildNamedObjectAccessorMixin from ansys.fluent.core.solver.flobject import _CreatableNamedObjectMixin from ansys.fluent.core.solver.flobject import _NonCreatableNamedObjectMixin from ansys.fluent.core.solver.flobject import _HasAllowedValuesMixin from .file import file as file_cls from .mesh import mesh as mesh_cls from .server import server as server_cls from .setup import setup as setup_cls from .solution import solution as solution_cls from .results import results as results_cls from .parametric_studies import parametric_studies as parametric_studies_cls from .current_parametric_study import current_parametric_study as current_parametric_study_cls from .parallel_1 import parallel as parallel_cls from .report_1 import report as report_cls from .exit import exit as exit_cls class root(Group): """ 'root' object. """ fluent_name = "" child_names = \ ['file', 'mesh', 'server', 'setup', 'solution', 'results', 'parametric_studies', 'current_parametric_study', 'parallel', 'report'] file: file_cls = file_cls """ file child of root. """ mesh: mesh_cls = mesh_cls """ mesh child of root. """ server: server_cls = server_cls """ server child of root. """ setup: setup_cls = setup_cls """ setup child of root. """ solution: solution_cls = solution_cls """ solution child of root. """ results: results_cls = results_cls """ results child of root. """ parametric_studies: parametric_studies_cls = parametric_studies_cls """ parametric_studies child of root. """ current_parametric_study: current_parametric_study_cls = current_parametric_study_cls """ current_parametric_study child of root. """ parallel: parallel_cls = parallel_cls """ parallel child of root. """ report: report_cls = report_cls """ report child of root. """ command_names = \ ['exit'] exit: exit_cls = exit_cls """ exit command of root. """
PypiClean
/jupyter-omnicm-0.0.5.tar.gz/jupyter-omnicm-0.0.5/docs/usage.rst
===== Usage ===== Behind the scene, the HDFS support of jupyter-omnicm relies on pyarrow (https://arrow.apache.org/). To use jupyter-omnicm HDFS in Jupyter, add the following line in your jupyter_notebook_config.py:: c.NotebookApp.contents_manager_class = 'jupyter_omnicm.hdfs.hdfs_manager.HDFSContentsManager' *Running Jupyter as a YARN container* You can run a Jupyter notebook as a YARN container using Skein: https://github.com/jcrist/skein for eg. If the cluster is kerberized, you don't need more configuration as the HDFS delegation token will be used automatically. If the custer is not kerberized, you can add the following in jupyter_notebook_config.py:: c.HDFSContentsManager.user = 'username' *Running Jupyter outside of a YARN container* In the case where you run the jupyter notebook outside of a YARN container (laptop, server, other orchestrator, ...), you need to do a bit more configuration. Depending on your setup, you can add any of the following lines in your jupyter_notebook_config.py:: c.HDFSContentsManager.host = 'namenode DNS (str). Default 'default' (detect from *-site.xml).' c.HDFSContentsManager.port = 'namenode client RPC port (int). Default 0 (detect from *-site.xml).' c.HDFSContentsManager.user = 'username. Default None.' c.HDFSContentsManager.kerb_ticket = 'Path to Kerberos ticket cache. Default None.' c.HDFSContentsManager.driver = 'Connect using libhdfs (JNI-based) or libhdfs3 (3rd-party C++ library). Default libhdfs.' c.HDFSContentsManager.extra_conf = {key:value} 'extra Key/Value pairs for config; Will override any hdfs-site.xml properties.' No need to add all of them however! Keep in mind that we use pyarrow under the hood which is able to read HDFS configuration *-site.xml automatically given that variables like HADOOP_CONF_DIR, HADOOP_HOME, ... are properly setup. Refer to pyarrow documentation for it. *Configuring the root directory* An absolute path in Jupyter's default notebook_dir is used for that. If you don't specify anything for this parameter, the notebook dir will be the user's home directory on HDFS:: c.NotebookApp.notebook_dir
PypiClean
/antares-0.3.23.1-py3-none-manylinux1_x86_64.whl/antares_core/3rdparty/tvm/python/tvm/relay/backend/te_compiler.py
"""TE compiler engine (replacing legacy compile_engine).""" from __future__ import absolute_import import logging import tvm from tvm import te, autotvm from tvm.ir.transform import PassContext from tvm.runtime import Object from tvm.support import libinfo from tvm.target import Target from ..backend.utils import mangle_module_name from .. import function as _function from .. import ty as _ty from . import _backend logger = logging.getLogger("te_compiler") autotvm_logger = logging.getLogger("autotvm") _first_warning = True @tvm._ffi.register_object("relay.LoweredOutput") class LoweredOutput(Object): """Lowered output""" def __init__(self, outputs, implement): self.__init_handle_by_constructor__(_backend._make_LoweredOutput, outputs, implement) @tvm._ffi.register_object("relay.CCacheKey") class CCacheKey(Object): """Key in the TE Compiler. Parameters ---------- source_func : tvm.relay.Function The source function. target : tvm.Target The target we want to run the function on. """ def __init__(self, source_func, target): self.__init_handle_by_constructor__(_backend._make_CCacheKey, source_func, target) @tvm._ffi.register_object("relay.CCacheValue") class CCacheValue(Object): """Value in the TE Compiler, including usage statistics.""" def _get_cache_key(source_func, target): if isinstance(source_func, _function.Function): if isinstance(target, str): target = Target(target) if not target: raise ValueError("Need target when source_func is a Function") return CCacheKey(source_func, target) if not isinstance(source_func, CCacheKey): raise TypeError("Expect source_func to be CCacheKey") return source_func def get_valid_implementations(op, attrs, inputs, out_type, target): """Get all valid implementations from the op strategy. Note that this function doesn't support op with symbolic input shapes. Parameters ---------- op : tvm.ir.Op Relay operator. attrs : object The op attribute. inputs : List[tvm.te.Tensor] Input tensors to the op. out_type : relay.Type The output type. target : tvm.target.Target The target to compile the op. Returns ------- ret : List[relay.op.OpImplementation] The list of all valid op implementations. """ fstrategy = op.get_attr("FTVMStrategy") assert fstrategy is not None, ( "%s doesn't have an FTVMStrategy registered. You can register " "one in python with `tvm.relay.op.register_strategy`." % op.name ) with target: strategy = fstrategy(attrs, inputs, out_type, target) analyzer = tvm.arith.Analyzer() ret = [] for spec in strategy.specializations: if spec.condition: # check if all the clauses in the specialized condition are true flag = True for clause in spec.condition.clauses: clause = analyzer.canonical_simplify(clause) if isinstance(clause, tvm.tir.IntImm) and clause.value: continue flag = False break if flag: for impl in spec.implementations: ret.append(impl) else: for impl in spec.implementations: ret.append(impl) return ret def select_implementation(op, attrs, inputs, out_type, target, use_autotvm=True): """Select the best implementation from the op strategy. If use_autotvm is True, it'll first try to find the best implementation based on AutoTVM profile results. If no AutoTVM profile result is found, it'll choose the implementation with highest plevel. If use_autotvm is False, it'll directly choose the implementation with highest plevel. Note that this function doesn't support op with symbolic input shapes. Parameters ---------- op : tvm.ir.Op Relay operator. attrs : object The op attribute. inputs : List[tvm.te.Tensor] Input tensors to the op. out_type : relay.Type The output type. target : tvm.target.Target The target to compile the op. use_autotvm : bool Whether query AutoTVM to pick the best. Returns ------- ret : tuple(relay.op.OpImplementation, List[tvm.te.Tensor]) The best op implementation and the corresponding output tensors. """ all_impls = get_valid_implementations(op, attrs, inputs, out_type, target) best_plevel_impl = max(all_impls, key=lambda x: x.plevel) # Disable autotvm if auto_scheduler is enabled. # (i.e., always return the implementation with the highest priority for auto-scheduler). if PassContext.current().config.get("relay.backend.use_auto_scheduler", False): use_autotvm = False # If not use autotvm, always return the implementation with the highest priority if not use_autotvm: logger.info( "Using %s for %s based on highest priority (%d)", best_plevel_impl.name, op.name, best_plevel_impl.plevel, ) outs = best_plevel_impl.compute(attrs, inputs, out_type) return best_plevel_impl, outs # Otherwise, try autotvm templates outputs = {} workloads = {} best_autotvm_impl = None best_cfg = None dispatch_ctx = autotvm.task.DispatchContext.current old_silent = autotvm.GLOBAL_SCOPE.silent autotvm.GLOBAL_SCOPE.silent = True for impl in all_impls: outs = impl.compute(attrs, inputs, out_type) outputs[impl] = outs workload = autotvm.task.get_workload(outs) workloads[impl] = workload if workload is None: # Not an AutoTVM tunable implementation continue cfg = dispatch_ctx.query(target, workload) if cfg.is_fallback: # Skip fallback config continue logger.info("Implementation %s for %s has cost %.2e", impl.name, op.name, cfg.cost) if best_cfg is None or best_cfg.cost > cfg.cost: best_autotvm_impl = impl best_cfg = cfg autotvm.GLOBAL_SCOPE.silent = old_silent if best_autotvm_impl: # The best autotvm implementation definitely doesn't use fallback config logger.info( "Using %s for %s based on lowest cost (%.2e)", best_autotvm_impl.name, op.name, best_cfg.cost, ) return best_autotvm_impl, outputs[best_autotvm_impl] # Use the implementation with highest plevel if workloads[best_plevel_impl] is not None: msg = ( "Cannot find tuning records for:\n target=%s\n key=%s\n" "TVM will apply a default schedule which may negatively impact performance." % (target, workloads[best_plevel_impl]) ) if ( not autotvm.env.GLOBAL_SCOPE.silent and msg not in autotvm.task.DispatchContext.warning_messages ): autotvm.task.DispatchContext.warning_messages.add(msg) global _first_warning if _first_warning: _first_warning = False info_msg = ( "One or more operators have not been tuned. Please tune your model " "for better performance. Use DEBUG logging level to see more details." ) autotvm_logger.warning(info_msg) autotvm_logger.debug(msg) logger.info( "Using %s for %s based on highest priority (%s)", best_plevel_impl.name, op.name, best_plevel_impl.plevel, ) return best_plevel_impl, outputs[best_plevel_impl] def get_shape(shape): """Convert the shape to correct dtype and vars.""" ret = [] for dim in shape: if isinstance(dim, tvm.tir.IntImm): if libinfo()["INDEX_DEFAULT_I64"] == "ON": ret.append(dim) else: val = int(dim) assert val <= np.iinfo(np.int32).max ret.append(tvm.tir.IntImm("int32", val)) elif isinstance(dim, tvm.tir.Any): ret.append(te.var("any_dim", "int32")) else: ret.append(dim) return ret @tvm._ffi.register_func("relay.backend.lower_call") def lower_call(call, inputs, target): """Lower the call expression to op implementation and tensor outputs.""" assert isinstance(call.op, tvm.ir.Op) op = call.op # Prepare the call_node->checked_type(). For the call node inputs, we ensure that # the shape is Int32. Following code ensures the same for the output as well. # TODO(@icemelon9): Support recursive tuple ret_type = call.checked_type if isinstance(ret_type, _ty.TensorType): ret_type = _ty.TensorType(get_shape(ret_type.shape), ret_type.dtype) elif isinstance(ret_type, _ty.TupleType): new_fields = [] for field in ret_type.fields: if isinstance(field, _ty.TensorType): new_fields.append(_ty.TensorType(get_shape(field.shape), field.dtype)) else: new_fields.append(field) ret_type = _ty.TupleType(new_fields) is_dyn = _ty.is_dynamic(call.checked_type) for arg in call.args: is_dyn = is_dyn or _ty.is_dynamic(arg.checked_type) # check if in the AutoTVM tracing mode, and disable if op is not in wanted list env = autotvm.task.TaskExtractEnv.current reenable_tracing = False if env is not None and env.tracing: if env.wanted_relay_ops is not None and op not in env.wanted_relay_ops: env.tracing = False reenable_tracing = True if not is_dyn: best_impl, outputs = select_implementation(op, call.attrs, inputs, ret_type, target) else: # TODO(@icemelon9): Allow tvm to generate multiple kernels for dynamic shapes. best_impl, outputs = select_implementation( op, call.attrs, inputs, ret_type, target, use_autotvm=False ) # re-enable AutoTVM tracing if reenable_tracing: env.tracing = True return LoweredOutput(outputs, best_impl) @tvm._ffi.register_object("relay.TECompiler") class TECompiler(Object): """TECompiler to get lowered code.""" def __init__(self): raise RuntimeError("Cannot construct a TECompiler") def lower(self, source_func, target=None, mod_name="default"): """Lower a source_func to a CachedFunc. Parameters ---------- source_func : Union[tvm.relay.Function, CCacheKey] The source relay function. target : tvm.Target The target platform. Returns ------- cached_func: CachedFunc The result of lowering. """ # pylint: disable=broad-except, import-outside-toplevel try: mod_name = mangle_module_name(mod_name) key = _get_cache_key(source_func, target) return _backend._TECompilerLower(self, key, mod_name) except Exception: import traceback msg = traceback.format_exc() msg += "Error during compile func\n" msg += "--------------------------\n" msg += source_func.astext(show_meta_data=False) msg += "--------------------------\n" raise RuntimeError(msg) def jit(self, source_func, target=None): """JIT a source_func to a tvm.runtime.PackedFunc. Parameters ---------- source_func : Union[tvm.relay.Function, CCacheKey] The source relay function. target : tvm.Target The target platform. Returns ------- jited_func: tvm.runtime.PackedFunc The result of jited function. """ key = _get_cache_key(source_func, target) return _backend._TECompilerJIT(self, key) def clear(self): """clear the existing cached functions""" _backend._TECompilerClear(self) def items(self): """List items in the cache. Returns ------- item_list : List[Tuple[CCacheKey, CCacheValue]] The list of items. """ res = _backend._TECompilerListItems(self) assert len(res) % 2 == 0 return [(res[2 * i], res[2 * i + 1]) for i in range(len(res) // 2)] def get(): """Get the global TE Compiler. Returns ------- engine : tvm.relay.backend.TECompiler The TE Compiler. """ return _backend._TECompilerGlobal()
PypiClean
/gamification-engine-0.4.0.tar.gz/gamification-engine-0.4.0/gengine/app/jsscripts/node_modules/autoprefixer/lib/hacks/grid-template.js
'use strict'; function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var parser = require('postcss-value-parser'); var Declaration = require('../declaration'); var GridTemplate = function (_Declaration) { _inherits(GridTemplate, _Declaration); function GridTemplate() { _classCallCheck(this, GridTemplate); return _possibleConstructorReturn(this, _Declaration.apply(this, arguments)); } /** * Change property name for IE */ GridTemplate.prototype.prefixed = function prefixed(prop, prefix) { if (prefix === '-ms-') { return prefix + prop.replace('template-', ''); } else { return _Declaration.prototype.prefixed.call(this, prop, prefix); } }; /** * Change IE property back */ GridTemplate.prototype.normalize = function normalize(prop) { return prop.replace(/^grid-(rows|columns)/, 'grid-template-$1'); }; /** * Recursive part of changeRepeat */ GridTemplate.prototype.walkRepeat = function walkRepeat(node) { var fixed = []; for (var _iterator = node.nodes, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _iterator[Symbol.iterator]();;) { var _ref; if (_isArray) { if (_i >= _iterator.length) break; _ref = _iterator[_i++]; } else { _i = _iterator.next(); if (_i.done) break; _ref = _i.value; } var i = _ref; if (i.nodes) { this.walkRepeat(i); } fixed.push(i); if (i.type === 'function' && i.value === 'repeat') { var first = i.nodes.shift(); if (first) { var count = first.value; i.nodes.shift(); i.value = ''; fixed.push({ type: 'word', value: '[' + count + ']' }); } } } node.nodes = fixed; }; /** * IE repeating syntax */ GridTemplate.prototype.changeRepeat = function changeRepeat(value) { var ast = parser(value); this.walkRepeat(ast); return ast.toString(); }; /** * Change repeating syntax for IE */ GridTemplate.prototype.set = function set(decl, prefix) { if (prefix === '-ms-' && decl.value.indexOf('repeat(') !== -1) { decl.value = this.changeRepeat(decl.value); } return _Declaration.prototype.set.call(this, decl, prefix); }; return GridTemplate; }(Declaration); Object.defineProperty(GridTemplate, 'names', { enumerable: true, writable: true, value: ['grid-template-rows', 'grid-template-columns', 'grid-rows', 'grid-columns'] }); module.exports = GridTemplate;
PypiClean
/pymcxray-0.1.4.tar.gz/pymcxray-0.1.4/README.rst
=============================== pymcxray =============================== .. image:: https://img.shields.io/pypi/v/pymcxray.svg :target: https://pypi.python.org/pypi/pymcxray .. image:: https://img.shields.io/travis/drix00/pymcxray.svg :target: https://travis-ci.org/drix00/pymcxray .. image:: https://readthedocs.org/projects/pymcxray/badge/?version=latest :target: https://pymcxray.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status .. image:: https://pyup.io/repos/github/drix00/pymcxray/shield.svg :target: https://pyup.io/repos/github/drix00/pymcxray/ :alt: Updates Python scripts for using mcxray software * Free software: Apache Software License 2.0 * Documentation: https://pymcxray.readthedocs.io. Features -------- * TODO Credits --------- This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template. .. _Cookiecutter: https://github.com/audreyr/cookiecutter .. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
PypiClean
/strawberry_django_auth-0.376.2-py3-none-any.whl/gqlauth/core/utils.py
import contextlib import inspect import typing from typing import Dict, Iterable from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.auth.base_user import AbstractBaseUser from django.contrib.auth.models import AnonymousUser from django.core import signing from strawberry.field import StrawberryField from strawberry.types import Info from strawberry.utils.str_converters import to_camel_case from gqlauth.core.exceptions import TokenScopeError if typing.TYPE_CHECKING: # pragma: no cover from gqlauth.models import UserStatus from gqlauth.settings_type import GqlAuthSettings class UserProto(AbstractBaseUser): status: UserStatus UserProto: "UserProto" = "UserProto" # type: ignore # noqa: F811 USER_MODEL = get_user_model() USER_UNION = typing.Union["UserProto", AnonymousUser, AbstractBaseUser] app_settings: "GqlAuthSettings" = settings.GQL_AUTH def hide_args_kwargs(field): sig = inspect.signature(field) cleared = tuple(p for p in sig.parameters.values() if p.name not in ("kwargs", "args")) field.__signature__ = inspect.signature(field).replace(parameters=(cleared)) return field def isiterable(value): try: iter(value) except TypeError: # pragma: no cover return False return True def camelize(data): if isinstance(data, dict): return {to_camel_case(k): camelize(v) for k, v in data.items()} if isiterable(data) and not isinstance(data, str): return [camelize(d) for d in data] return data def get_user(info: Info) -> USER_UNION: try: return info.context.request.user # type: ignore except AttributeError: return info.context["request"].user def cast_to_status_user(user: USER_UNION) -> UserProto: user.status # type: ignore # raise attribute error return user # type: ignore def get_user_by_email(email: str) -> "UserProto": user = USER_MODEL.objects.get(**{USER_MODEL.EMAIL_FIELD: email}) assert hasattr(user, "status") return user # type: ignore def get_token(user, action, **kwargs): username = user.get_username() if hasattr(username, "pk"): username = username.pk payload = {user.USERNAME_FIELD: username, "action": action} if kwargs: payload.update(**kwargs) token = signing.dumps(payload) return token def get_payload_from_token(token, action, exp=None): payload = signing.loads(token, max_age=exp) _action = payload.pop("action") if _action != action: raise TokenScopeError return payload def revoke_user_refresh_token(user): refresh_tokens = user.refresh_tokens.all() for refresh_token in refresh_tokens: with contextlib.suppress(Exception): refresh_token.revoke() def fields_names(strawberry_fields: Iterable[StrawberryField]): return [field.python_name for field in strawberry_fields] def inject_fields(fields: typing.Iterable[StrawberryField], annotations_only=False): def wrapped(cls: type): # python 3.8 compat: if not hasattr(cls, "__annotations__"): # pragma: no cover cls.__annotations__ = {} for field in fields: if not field.name: continue if not annotations_only: setattr(cls, field.name, field) assert field.type_annotation cls.__annotations__[field.name] = field.type_annotation.annotation return cls return wrapped def inject_arguments(args: Dict[str, type]): """Injects arguments to the decorated resolver. :param args:`dict[name, type]` of arguments to be injected., """ def wrapped(fn): sig = inspect.signature(fn) params = { inspect.Parameter(name, inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=type_) for name, type_ in args.items() } params.update(sig.parameters.values()) fn.__signature__ = inspect.signature(fn).replace(parameters=params) return fn return wrapped
PypiClean
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/minipass-flush/README.md
# minipass-flush A Minipass stream that calls a flush function before emitting 'end' ## USAGE ```js const Flush = require('minipass-flush') cons f = new Flush({ flush (cb) { // call the cb when done, or return a promise // the 'end' event will wait for it, along with // close, finish, and prefinish. // call the cb with an error, or return a rejecting // promise to emit 'error' instead of doing the 'end' return rerouteAllEncryptions().then(() => clearAllChannels()) }, // all other minipass options accepted as well }) someDataSource.pipe(f).on('end', () => { // proper flushing has been accomplished }) // Or as a subclass implementing a 'flush' method: class MyFlush extends Flush { flush (cb) { // old fashioned callback style! rerouteAllEncryptions(er => { if (er) return cb(er) clearAllChannels(er => { if (er) cb(er) cb() }) }) } } ``` That's about it. If your `flush` method doesn't have to do anything asynchronous, then it's better to call the callback right away in this tick, rather than returning `Promise.resolve()`, so that the `end` event can happen as soon as possible.
PypiClean
/mostpopularnewscnn-0.0.3.tar.gz/mostpopularnewscnn-0.0.3/README.md
# Most Popular News CNN Indonesia This package will get the most popular news in CNN Indonesia ## HOW IT WORK? This package will scrape from [CNN Indonesia](https://www.cnnindonesia.com/) to get most popular news in CNN Indonesia This package uses beatifulsoup4 and requests then produces output in the form of json which can be used in web and mobile applications ''' if __name__ == '__main__': most_popular_news_cnn_id = Mostpopularnewscnn('https://www.cnnindonesia.com/') print('Description class news cnn id', most_popular_news_cnn_id.description) most_popular_news_cnn_id.run() '''
PypiClean
/visionlib-1.2.0.tar.gz/visionlib-1.2.0/README.md
# Visionlib A simple high level api made for assisting in cv-related projects. ## Features - Track faces using - MTCNN module - Dlib hog Based detector - Opencv Haar casscades - Dnn based model - Predict Gender - Detect Objects - Yolo v3 - tiny-yolo ### Installation #### Dependencies `sudo apt-get install build-essential cmake pkg-config` `sudo apt-get install libx11-dev libatlas-base-dev` `sudo apt-get install libgtk-3-dev libboost-python-dev` This should install Dependencies required by dlib. `pip install visionlib` This will install visionlib. ##### Optional If You want to install from source `git clone https://github.com/ashwinvin/Visionlib.git` `cd visionlib` `pip install .` ### Face Detection Detecting face in an image is easy . This will return the image with bounding box and box coordinates `from visionlib.face.detection import FDetector` `detector = FDetector()` `detector.detect_face(img, show=True)` This would detect face and display it automatically. `detector.set_detector("mtcnn")` Dont like the default detector?, change it like this. #### Examples ![Detection](docs/images/face_detected.jpg) ![Detection](docs/images/face_detected_group.jpg) ### Gender Detection Once face is detected, it can be passed on to detect_gender() function to recognize gender. It will return the labels (man, woman) and associated probabilities.Like this `from visionlib.gender.detection import GDetector` `Gdetector = GDetector()` `pred, confidence = Gdetector.detect_gender(c_img)` ##### Example ![Gender Detection](docs/images/gender_detected_single.jpg) ### Object Detection Detecting common objects in the scene is enabled through a single function call detect_objects(). It will return the labeled image for the detected objects in the image. By default it uses yolov3-tiny model. `from visionlib.object.detection import Detection` `import cv2` `detector = Detection()` `d_img = detector.detect_objects(img)` You can enable gpu usage by `detector.detect_objects(img, enable_gpu=True)` #### Example ![object Detection](docs/images/object_detected_objects.jpg) For more examples look in examples directory
PypiClean
/murano-repository-0.4.a28.g0c584bc.tar.gz/murano-repository-0.4.a28.g0c584bc/muranorepository/utils/archiver.py
import os import tarfile import tempfile import shutil import hashlib import logging as log from oslo.config import cfg from muranorepository.consts import DATA_TYPES, ARCHIVE_PKG_NAME CONF = cfg.CONF CHUNK_SIZE = 1 << 20 # 1MB class Archiver(object): def _copy_data(self, file_lists, src, dst): if not os.path.exists(dst): os.makedirs(dst) for path in file_lists: source = os.path.join(src, path) destination = os.path.join(dst, path) base_dir = os.path.dirname(destination) if (base_dir != dst) and (not os.path.exists(base_dir)): os.makedirs(os.path.dirname(destination)) try: shutil.copyfile(source, destination) except IOError: log.error("Unable to copy file " "{0}".format(file)) def _get_hash(self, archive_path): """Calculate SHA1-hash of archive file. SHA-1 take a bit more time than MD5 (see http://tinyurl.com/kpj5jy7), but is more secure. """ # Copy-pasted from muranodashboard/panel/services/metadata.py if os.path.exists(archive_path): sha1 = hashlib.sha1() with open(archive_path) as f: buf = f.read(CHUNK_SIZE) while buf: sha1.update(buf) buf = f.read(CHUNK_SIZE) hsum = sha1.hexdigest() log.debug("Archive '{0}' has hash-sum {1}".format( archive_path, hsum)) return hsum else: log.info( "Archive '{0}' doesn't exist, no hash to calculate".format( archive_path)) return None def _compose_archive(self, path, cache_dir): with tarfile.open(ARCHIVE_PKG_NAME, "w:gz") as tar: for item in os.listdir(path): tar.add(os.path.join(path, item), item) try: shutil.rmtree(path, ignore_errors=True) except Exception as e: log.error("Unable to delete temp directory: {0}".format(e)) hash_folder = self.create_hash(cache_dir) return os.path.abspath(os.path.join(hash_folder, ARCHIVE_PKG_NAME)) def get_existing_hash(self, cache_dir): existing_caches = os.listdir(cache_dir) log.debug('Assert there is just one archive in cache folder. Clear ' 'folder {0} in case of Assertion Error'.format(cache_dir)) assert len(existing_caches) < 2 if not len(existing_caches): return None else: path = os.path.join(cache_dir, existing_caches[0], ARCHIVE_PKG_NAME) if not os.path.exists(path): raise RuntimeError( 'Archive package is missing at dir {0}'.format( os.path.join(cache_dir))) return existing_caches[0] def _hashes_match(self, cache_dir, existing_hash, hash_to_check): if hash_to_check is None or existing_hash is None: return False if existing_hash == hash_to_check: log.debug('Archive package matches hash-sum {0}.'.format( hash_to_check)) return True else: self.remove_existing_hash(cache_dir, existing_hash) return False def create(self, cache_dir, manifests, types): """ manifests -- list of Manifest objects types -- desired data types to be added to archive return: absolute path to created archive """ #TODO: temporary hack for mockfs try: temp_dir = tempfile.mkdtemp() except: temp_dir = '/tmp' for data_type in types: if data_type not in DATA_TYPES: raise Exception("Please, specify one of the supported data " "types: {0}".format(DATA_TYPES)) for manifest in manifests: if not manifest.enabled and not manifest.valid: continue if hasattr(manifest, data_type): file_list = getattr(manifest, data_type) dst_directory = os.path.join(temp_dir, getattr(CONF.output, data_type)) scr_directory = os.path.join(CONF.manifests, getattr(CONF, data_type)) self._copy_data(file_list, scr_directory, dst_directory) else: log.info( "Manifest for {0} service has no file definitions for " "{1}".format(manifest.service_display_name, data_type)) return self._compose_archive(temp_dir, cache_dir) def remove_existing_hash(self, cache_dir, hash): path = os.path.join(cache_dir, hash) log.info('Deleting archive package from {0}.'.format(path)) shutil.rmtree(path, ignore_errors=True) def create_hash(self, cache_dir): """ Creates folder with data archive inside that has name equals to hash calculated from archive Return path to created hash folder """ hash_sum = self._get_hash(ARCHIVE_PKG_NAME) pkg_dir = os.path.join(cache_dir, hash_sum) if not os.path.exists(pkg_dir): os.mkdir(pkg_dir) shutil.move(ARCHIVE_PKG_NAME, os.path.join(pkg_dir, ARCHIVE_PKG_NAME)) return pkg_dir
PypiClean
/jigna-0.10.1.tar.gz/jigna-0.10.1/examples/ex18_embedding_chaco.py
from chaco.api import Plot, ArrayPlotData from jigna.api import HTMLWidget, Template from jigna.qt import QtGui from numpy import linspace, sin from traits.api import ( HasTraits, CInt, Instance, Array, Property, on_trait_change ) #### Domain model #### class DomainModel(HasTraits): """ The algorithmic domain model which specifies the mathematical relationship between x and y. """ #: Independent variable of the domain equation x = Array def _x_default(self): return linspace(-14, 14, 1000) #: Dependent variable of the domain equation y = Property(Array, depends_on=['x', 'scaling_factor']) def _get_y(self): return sin(self.scaling_factor * self.x) * self.x**3 #: A scaling factor to tune the output scaling_factor = CInt #### Controller layer #### class PlotController(HasTraits): """ A Controller class which creates a Chaco plot object for the given domain model. """ #: Instance of the domain model which is being displayed by this controller domain_model = Instance(DomainModel) # The Chaco Plot object. This is the object which is usually visualized via # traitsui using the enable ComponentEditor plot = Instance(Plot) def _plot_default(self): plot = Plot(ArrayPlotData(x=self.domain_model.x, y=self.domain_model.y)) plot.plot(("x", "y"), type="line", color="blue") return plot @on_trait_change('domain_model.scaling_factor') def update_plot(self): self.plot.data.set_data('x', self.domain_model.x) self.plot.data.set_data('y', self.domain_model.y) def create_plot_widget(self): """ This method is used as a factory to create the QWidget for the Chaco 'plot' component. This is the standard way to obtain the QWidget out of a Chaco Plot object. We create a hidden traitsui view and return it's 'control' to obtain the required QWidget. """ from traitsui.api import View, Item from enable.api import ComponentEditor view = View(Item('plot', editor=ComponentEditor(), show_label=False)) ui = self.edit_traits(view=view, parent=None, kind='subpanel') return ui.control #### UI layer #### body_html = """ <div> Scaling factor: <input type="range" ng-model="domain_model.scaling_factor" min=0 max=100><br> Plot:<br> <!-- Specify type='application/x-qwidget' to embed a QWidget in the jigna view. Note that 'widget-factory' here refers to the factory method in Python which will generate that QWidget. --> <div> <object type="application/x-qwidget" widget-factory="plot_controller.create_plot_widget" width="500" height="400"> </object> </div> </div> """ template = Template(body_html=body_html, recommended_size=(600, 600)) #### Entry point #### def main(): # Start the Qt application app = QtGui.QApplication.instance() or QtGui.QApplication([]) # Instantiate the domain model and the plot controller domain_model = DomainModel(scaling_factor=50) plot_controller = PlotController(domain_model=domain_model) # Create the jigna based HTML widget which renders the given HTML template # with the given context. # # The widget contains an embedded Chaco QWidget showing a 2D plot of # the domain model. Moving the slider on the UI changes the domain model # and hence the Chaco plot. widget = HTMLWidget( template=template, context={ 'domain_model': domain_model, 'plot_controller': plot_controller } ) widget.show() # Start the event loop app.exec_() if __name__ == "__main__": main() #### EOF ######################################################################
PypiClean
/xformers-0.0.21.tar.gz/xformers-0.0.21/third_party/flash-attention/csrc/cutlass/docs/search/typedefs_a.js
var searchData= [ ['lanelayout',['LaneLayout',['../structcutlass_1_1gemm_1_1warp_1_1MmaSimtPolicy.html#abbddbec34fec9b85d8421a49f112f937',1,'cutlass::gemm::warp::MmaSimtPolicy']]], ['lanemmashape',['LaneMmaShape',['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShafafd5c61db86cbfe90863578ddd11092.html#a830c544f4fed1d21e6cb9d9e39b2f55e',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha8da7a0cfbbe859b701fdd9f2b8566aa7.html#a70bd709536f3452656041c40f0732fc0',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha84e9f8afb6a4ca9f5dcd219b182d16e7.html#a5bc6621f63ea24c3bc98c297b8d7603f',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShab94a11a77dd0565102710907089acee0.html#a1fa217acb8da1bc6973b882e6e3fefe5',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha34a52cc7b2942e8c290f0032b6779b52.html#a030d4d758adeae583d0d80871773fec0',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShaaf312aafe9da92ea9d417bcc12a8e7dc.html#ae491f8ca4cf60e4ec48e14c364ba5346',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha863d4139ccaa713bc4bde32c425f4067.html#a29c891204ddc7ff86d296dfab985a731',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha2c0d0b7cdb5c4bcb11e83c058eb65345.html#a154a5c15eeed1313f05522a9e4474791',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LaneMmaShape()'],['../structcutlass_1_1gemm_1_1warp_1_1MmaSimtPolicy.html#ac0da8df27cc497e2c72240c5a8ef4424',1,'cutlass::gemm::warp::MmaSimtPolicy::LaneMmaShape()']]], ['layout',['Layout',['../classcutlass_1_1epilogue_1_1threadblock_1_1DirectEpilogueTensorOp.html#affbcb5b699be059628d3d37a589b2667',1,'cutlass::epilogue::threadblock::DirectEpilogueTensorOp::Layout()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1Epilogue.html#a7589e814463281fbf6397a7597e3fac1',1,'cutlass::epilogue::threadblock::Epilogue::Layout()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1EpilogueBase.html#ac3b3149663228d308b5cec15950b652e',1,'cutlass::epilogue::threadblock::EpilogueBase::Layout()'],['../structcutlass_1_1epilogue_1_1threadblock_1_1EpilogueBase_1_1SharedStorage.html#a702ca51bb3a780cefa51fd28028b65b1',1,'cutlass::epilogue::threadblock::EpilogueBase::SharedStorage::Layout()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1InterleavedEpilogue.html#a879c34cca329147f6a081492a4c5f103',1,'cutlass::epilogue::threadblock::InterleavedEpilogue::Layout()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1PredicatedTileIterator.html#a00c57a2cc53aa5aacd6672cf9af16e26',1,'cutlass::epilogue::threadblock::PredicatedTileIterator::Layout()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1InterleavedPredicatedTileIterator.html#a160a6628f34c37e13d13a9f87b0090a3',1,'cutlass::epilogue::threadblock::InterleavedPredicatedTileIterator::Layout()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1SharedLoadIterator.html#abc1ac65c12315e325e02c5c2e313e0a7',1,'cutlass::epilogue::threadblock::SharedLoadIterator::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1FragmentIteratorComplexTensorOp_3_01WarpShape___00_01Operato8cf03c624cf3210c71b7cbd580b080f8.html#a2c2b69f22e14ea9230d90b797b21e842',1,'cutlass::epilogue::warp::FragmentIteratorComplexTensorOp&lt; WarpShape_, OperatorShape_, OperatorElementC_, OperatorFragmentC_, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1FragmentIteratorSimt_3_01WarpShape___00_01Operator___00_01la3f2abc523201c1b0228df99119ab88e1.html#abdb233c59e0c5657e91d737a715e817b',1,'cutlass::epilogue::warp::FragmentIteratorSimt&lt; WarpShape_, Operator_, layout::RowMajor, MmaSimtPolicy_ &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1FragmentIteratorTensorOp_3_01WarpShape___00_01OperatorShape_5e78dabe303f20d76b00c600aab61eda.html#a58f1b1e6cdba518d44a4ce5035d943cb',1,'cutlass::epilogue::warp::FragmentIteratorTensorOp&lt; WarpShape_, OperatorShape_, OperatorElementC_, OperatorFragmentC_, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1FragmentIteratorTensorOp_3_01WarpShape___00_01OperatorShape_e459aab140a2ce78336e584f95886726.html#a7178bbf806b6bf01af88d1b551b3745c',1,'cutlass::epilogue::warp::FragmentIteratorTensorOp&lt; WarpShape_, OperatorShape_, OperatorElementC_, OperatorFragmentC_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt; &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1FragmentIteratorVoltaTensorOp_3_01WarpShape___00_01gemm_1_1G16e08718cffa0989cce3fe8dbc4b075b.html#aeae5ca1155296bf2eb1cb54cf860f4f2',1,'cutlass::epilogue::warp::FragmentIteratorVoltaTensorOp&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, half_t, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1FragmentIteratorVoltaTensorOp_3_01WarpShape___00_01gemm_1_1Gdb805a2dc5571ac3b66e0fe6ffdcede2.html#a7602a39bc56a4f7ec66085093bab92ca',1,'cutlass::epilogue::warp::FragmentIteratorVoltaTensorOp&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, float, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1FragmentIteratorWmmaTensorOp_3_01WarpShape___00_01OperatorShfdb1f120c6797383663f9fd11d0fc599.html#a63ac51b72e212f5b7a880b409c8f0223',1,'cutlass::epilogue::warp::FragmentIteratorWmmaTensorOp&lt; WarpShape_, OperatorShape_, OperatorElementC_, OperatorFragmentC_, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorSimt_3_01WarpShape___00_01Operator___00_01Elemenf2bd262ed3e202b25d5802d83965bf3b.html#a9928349624ec760485d8921255edf9e4',1,'cutlass::epilogue::warp::TileIteratorSimt&lt; WarpShape_, Operator_, Element_, layout::RowMajor, MmaSimtPolicy_ &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorTensorOp_3_01WarpShape___00_01OperatorShape___003cbb32beb84b4984cb7853662096d289.html#aeaef96491169d1e8f2e831a3d858382f',1,'cutlass::epilogue::warp::TileIteratorTensorOp&lt; WarpShape_, OperatorShape_, Element_, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorVoltaTensorOp_3_01WarpShape___00_01gemm_1_1GemmSa0ceeeddc22575876eb977da7f5416a8.html#a4a162c8119b397c927535a1d985e3e06',1,'cutlass::epilogue::warp::TileIteratorVoltaTensorOp&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, half_t, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorVoltaTensorOp_3_01WarpShape___00_01gemm_1_1GemmS2fe0c60b727c738c622c18fc3dd76644.html#ae9f78285e55b5b2ff53fe69b24852660',1,'cutlass::epilogue::warp::TileIteratorVoltaTensorOp&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, float, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorWmmaTensorOp_3_01WarpShape___00_01OperatorShape_fd6a91cd8bbd07ecd1344326b830e3a4.html#adc20ef932ebacdc3db1d73399d3c0b4a',1,'cutlass::epilogue::warp::TileIteratorWmmaTensorOp&lt; WarpShape_, OperatorShape_, OperatorFragment_, layout::RowMajor &gt;::Layout()'],['../structcutlass_1_1epilogue_1_1warp_1_1VoltaTensorOpPolicy_3_01WarpShape___00_01gemm_1_1GemmShape_1d48185f49e4d066f8e9327bf0856b7f.html#a4bbecd66d0179c7c9c75addaecd17529',1,'cutlass::epilogue::warp::VoltaTensorOpPolicy&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, half_t, layout::RowMajor &gt;::Layout()'],['../structcutlass_1_1epilogue_1_1warp_1_1VoltaTensorOpPolicy_3_01WarpShape___00_01gemm_1_1GemmShape_136ce744d4c1c6e8707f5a9785196194.html#a55da51a0f629e4770da71bd7238cd6c8',1,'cutlass::epilogue::warp::VoltaTensorOpPolicy&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, float, layout::RowMajor &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kA_00_01Element_67ca7e11a38e38f2c51b84767654a90f.html#aef68dbd3b3964f1de5eddc07c9c08b61',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kA, Element_, layout::ColumnMajor, Policy_, PartitionsK, PartitionGroupSize &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kB_00_01Element_ea0a4e7ce3cd5d25cabf79383efdf4d9.html#ad402b2a984d3464f70675919e1701e34',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kB, Element_, layout::RowMajor, Policy_, PartitionsK, PartitionGroupSize &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kC_00_01Element_4ccafbc821b3a55cd532602442a74031.html#a16ea3fc3bc328859cf6a7c0b9784cddb',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kC, Element_, layout::ColumnMajor, Policy_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kC_00_01Element_8f92ea79e85febb67169c4b2d94b1b20.html#a67267fe681599b2b234a78caae863bb4',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kC, Element_, layout::RowMajor, Policy_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kA_00_01Element_f0ce904a9294556f15e1cc9cf7c99a93.html#a58f69b7de1925ee11711aca12735a460',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kA, Element_, layout::ColumnMajorInterleaved&lt; 4 &gt;, Policy_, PartitionsK, PartitionGroupSize &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kB_00_01Element_ada156b62fcbdce47009c5bf1321c92c.html#a821db88e04e5c8ef45df93d01db1a800',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kB, Element_, layout::RowMajorInterleaved&lt; 4 &gt;, Policy_, PartitionsK, PartitionGroupSize &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0ed7daaeba1c095e77f68533d4d2c475c.html#a15f54eea90d76efb8051333f883dd67c',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, 64 &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0b84f53cd44b339eccc12067c9f86e11c.html#ae93f560012e11ed9d4aaad6dd0b3d2f1',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::ColumnMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___039819fb3ccd43786d556c2c9669508ef.html#ab2818c6132241f008d412a47e5b65e76',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::RowMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0c7d419c589d601ce4eb603be566fea21.html#a2f70d5b508aa97478efb818412f385ff',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0e52ad425e1ee3e68544873f66733237b.html#add223eafdccc3e8557a1b797179ab645',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::ColumnMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0352e0dcab42bc8360606874e00173556.html#a130187b87f2d284d9f3f9d1bcacd5a47',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::RowMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpAccumulatorTileIterator_3_01Shape___00_01Element___006c39f57875e0aa9d0ad82c8043ed8b98.html#a6c8cd9e5eb2068157e3a757637cb4ac1',1,'cutlass::gemm::warp::MmaTensorOpAccumulatorTileIterator&lt; Shape_, Element_, cutlass::layout::RowMajor, InstructionShape_, OpDelta_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpAccumulatorTileIterator_3_01Shape___00_01Element___008f607b871a2b3d854eb4def64712c042.html#a61620a099fbc4ab7f1e2f02db47c5680',1,'cutlass::gemm::warp::MmaTensorOpAccumulatorTileIterator&lt; Shape_, Element_, cutlass::layout::ColumnMajor, InstructionShape_, OpDelta_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpAccumulatorTileIterator_3_01Shape___00_01Element___00027dabdc144edd6276f664ca74088510.html#ab376a9049849d37e9f68cf8ffe19d338',1,'cutlass::gemm::warp::MmaTensorOpAccumulatorTileIterator&lt; Shape_, Element_, cutlass::layout::ColumnMajorInterleaved&lt; InterleavedN &gt;, InstructionShape_, OpDelta_ &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan34be8e21a40af3ebd2dc3dff460dca72.html#ae9ca07324bb631109c44088d039e28eb',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kA, Element_, cutlass::layout::VoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan16c56cdc2dda5eeb996af8ec0242d501.html#a84d8ebb28373aa1d56b6ed9a872a4c39',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kB, Element_, cutlass::layout::VoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan0d3248553e52cd61ed8a2b3b12a20343.html#aa4e9fc7dc5c05b19dd8d2af7764d057c',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kA, Element_, cutlass::layout::ColumnMajorVoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand734577b7e54a074d143aba59828c2f2.html#a7826b08e571f45ada4b56ddd1076572a',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kB, Element_, cutlass::layout::RowMajorVoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpAccumulatorTileIterator.html#afaee932bcd7c0e32352e4857903debcb',1,'cutlass::gemm::warp::MmaVoltaTensorOpAccumulatorTileIterator::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operana2f40b28f0d2286b84d86f7238d67b52.html#aad2cda3df238417d24b17a7826723be8',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::VoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan5a221944f4a0e16ccab77ba684856942.html#a91e8c28b2e72b1de20bc80eafd8df966',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::ColumnMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::Layout()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operandcc9821c435540895138bc9af495f321.html#a8033f46ad0858c1f27dec25622fff990',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::RowMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::Layout()'],['../classcutlass_1_1TensorRef.html#ae0e372b28e665820e6a2d17fc9f68d2b',1,'cutlass::TensorRef::Layout()'],['../classcutlass_1_1TensorView.html#ae5168523626a13d6e7391a64bca8caf0',1,'cutlass::TensorView::Layout()'],['../classcutlass_1_1thread_1_1Matrix.html#ac4212d7ab194569009ef64699101d3a9',1,'cutlass::thread::Matrix::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen784a0e9da3f55064c47e5613791f51f7.html#a12a2e76e4234253777cf0ed2d4436370',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen89c687c583745a73cb485041911a4c4e.html#a17f6d1bf692e90209a451b68060e38e0',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen9838736ad62fae54213fbaf722a989ab.html#a6ffbed57ffb6e886d40ba0f51a0efa42',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemenab63a1e105bf37f6371516cb9e2c5a7a.html#a54198cb8aa550d134e19f225349699a8',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen809793e785fb4211888c6b4e5dcfcb39.html#a98ca86656e2498f1fb40f03ae4828814',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::RowMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator2dThreadTile_3_01Shape__1790abaa54a01f277d75766d5882fec8.html#a932730641df3febdafa6f22b17272d0c',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator2dThreadTile&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator2dThreadTile_3_01Shape__da632779aba661c0f4cfaaa78126b771.html#a83321d65fe175a65a43bc5431a70deea',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator2dThreadTile&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator2dThreadTile_3_01Shape__7327fa15996bcb8502cdfcc192350fe1.html#a02967da59cb489f75bdb919a3dab6588',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator2dThreadTile&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___00e7c2c404e7aedfe60ad56bb5571306a1.html#a274c38e6b7a428643379438969243d2b',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, AccessSize &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___0068b3e874b5d93d11f0fa902c7f1d11d9.html#af156b766930c841133c2edaee29bab16',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, AccessSize &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___0041ea81994f8af0d4d071fdb9e66b5ff0.html#aa2f5d30f2316367c6deef3d6e1811132',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, AccessSize &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___00f6b3a9dfab5e7c72d5233f7e5e6e3b9b.html#a53196148230c2f8933616f736b1ba704',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessSize &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___00d670f969180a8d182dffb356ebcc957e.html#a08dc4dc5f67cf7255ad3b4757646f6d8',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::RowMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessSize &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator2dThreadTile_3_01Shape___00_017a517f3c73efd795ab05059cc9b111e1.html#a412ae2569a2339172a1facddd9c9fcb3',1,'cutlass::transform::threadblock::PredicatedTileIterator2dThreadTile&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Transpose_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator2dThreadTile_3_01Shape___00_0165b39a630d10785a3558406f9adb99b9.html#ada60020d427455a3e3aaedfe7fbdd7e4',1,'cutlass::transform::threadblock::PredicatedTileIterator2dThreadTile&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, Transpose_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator2dThreadTile_3_01Shape___00_013671177d6219bfeb0e1b4dc4c1b5bf11.html#a2f676f0cc3fa9698a1c59ea02f65aca7',1,'cutlass::transform::threadblock::PredicatedTileIterator2dThreadTile&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, Transpose_ &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__0855e9d9ab619202d2397180c1e4c4a5.html#a0aad5710e7639b984ab63a92d6b24808',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__eb7d20f8b9d69e0ae5e7ef51dc480867.html#a210a883012e95a30672bf5a2adc67d84',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__f04332958a49a47d6fb2b25201764630.html#a2192d7893944e1f444d78328b432436c',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__ebf4714349612673e8b6609b763eeb6f.html#ab7cfff85a366f0132c3f242e88a7570a',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__2c1476eaf582bfe972793e17babfe985.html#a03235e88ac2d0697c9259169fc42e7e8',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__6baada077236f1a368c61c5e11b45b72.html#a5958d05653fca54158d052d3cdba42ac',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__e9a9e0f4286f652f55eb9b863b21effe.html#a15f26bb9cabd05984ef2cce8ee0e41fb',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__a3c11cf1f00ef7a1efb8389ac6e4c6e0.html#a1e68bd5d902141ecd4281fcbbcf855be',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__0184b7188941788a96624510a4b2f876.html#a0b3842ebd2d0ded853ed99d800e37f6d',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0184a89653916f5d51ab59d1b386989a17.html#a802d26c95029a3eab7632a845e04b6f0',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0149454d361ea5885cf5166a920b5145df.html#a4e2e59ccbd6d1098913f2669cd69b6ea',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_011d3637dbd8bc58bcb020b51bf57fbfc0.html#aa38acb9ec06003a9f1d0b727576fca33',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator2dThreadTile_3_01Shape___00_01Ele76ed82829532ae1c17f4c78158f036c7.html#a81fbdca6c1e906c90600afc8c30e83bd',1,'cutlass::transform::threadblock::RegularTileIterator2dThreadTile&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator2dThreadTile_3_01Shape___00_01Ele654c8f6161ae5340f040397a4e2e045c.html#a6669caee6afc9026118e8621632d6244',1,'cutlass::transform::threadblock::RegularTileIterator2dThreadTile&lt; Shape_, Element_, layout::RowMajorInterleaved&lt; 4 &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator2dThreadTile_3_01Shape___00_01Eleb60d066756d1c18f05fceee6a27bdb8a.html#a7475661629ef97ceb430442b20a369af',1,'cutlass::transform::threadblock::RegularTileIterator2dThreadTile&lt; Shape_, Element_, layout::ColumnMajorInterleaved&lt; 4 &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01efd5013a2503d6567e2bf6b40c97360c.html#a6e717e77e6e781c76b0c47bbe09c8098',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_017982f81d4ef592e19c8427de2ea933a3.html#a25a1f7a57d49ecfe4834f80cd0bc2d78',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01c20d35180520077a5a09b1e33543c1a5.html#a504c8c39fa4c30fe8bcdf234b1522557',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0197fef2242a3454a7d1cebe61aee28b43.html#afd642156ffcdad62413eefd68e8fb783',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_010889a732373c350de9b9a9f6c13cd761.html#a0129cbff33d0d4ab7069b35518fca30f',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01a31b454d9c930525c1e9ca406a514f40.html#a8937ddf46d646bf0656d266a42c33860',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01f96bbeb63e6d4ce4a2551279de3a9f0e.html#ab8e8585f9bf1619c1642278fc002011a',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::VoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01793f74bfd8f116a827948ab01a37349a.html#a19fbc5db6a9793730cf7b6ca230f9120',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorVoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01f6f6511b5033cad31083644ac69c54d8.html#abed24036a2294ef781e9f9bb01f80882',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorVoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01a75d2cd74e722d6ad6a3b41aabfd432d.html#a206a652e720a119d51b7037e3b9b9d11',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::VoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01187f8574e1fe9d7d5e8fbf09bd834bf0.html#a9cdfcfd7346aa4a1a6c9161943c98b01',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorVoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0104ad31bd559a88cc418ae1cab7492ed5.html#acd1c21d7b8ccb22547cace4950a0bbfd',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorVoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01dbd6b8468d5bd787308d2f615a24d123.html#acdf54b7d86527c290d5461428f84a4a0',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::VoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Shape_::kContiguous &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01bd31b3810c1fedf2e7e5959ff92b5d3d.html#a44405a9b5c408774bab7f64df5a8ec0d',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Shape_::kRow &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01b3fa5720e807697de61b9f937b269cd0.html#ac016e571bdec3872b2479b848419d396',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Shape_::kColumn &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::Layout()'],['../classcutlass_1_1HostTensor.html#a49c87d7b2438c8350b736ab6ea31c38a',1,'cutlass::HostTensor::Layout()']]], ['layouta',['LayoutA',['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_c7f88bfd32a544fba8111d2dcadeab11.html#a603cf0d1ef5f61bfcbc81c89c6af63ca',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_4b7308177b308a272c1889fbe9670275.html#a787be30e695a6cd38d318ff3b9a7e414',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::RowMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_31defda8ea2b7d855642ffd77da1a411.html#afff92f809be9ee9ffbe1465b7bdc9831',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_73d9802d6b944a5299bc255887db6bbc.html#a60ad279594343116b8993fc1dea98f18',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::RowMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_b0242d7a01097510effbc4718040d3e5.html#a7f8b5cefb08e57b96309e41e08e1f232',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_44a3b2a8df88a2b067f1284515cb5371.html#a311665140e34cb9383301466bda45753',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::RowMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_5a9888862cebd333ecaf11f7262f77d4.html#a44ca88ec6a930bd2e303245cf030d042',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_839a7c8bb938d1661f4611e68f85d8cb.html#a16c04d720e42f4feb91a370b527abc77',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::RowMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_0116_00_018_00_018_01_4_00_0132_00_01half__96363097c47b056f0ca1911afd7f8b7a.html#a232a8519c5619137690151780fd08b55',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 16, 8, 8 &gt;, 32, half_t, layout::RowMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_0116_00_018_00_018_01_4_00_0132_00_01half__02a3f19a78995f97d793a668e0e4d4f0.html#a46c0b9a6c68ad975d8ed897467f943db',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 16, 8, 8 &gt;, 32, half_t, layout::RowMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__927179f46017ea5f58f859f1196c4829.html#a3d9f6acfee4b31a35eb6b415a26c75c0',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_a62aa63a212985df306fb27e8a50aeae.html#adaed38fb2a70b8515613965a91e883db',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__5299c9c90c8f2f521be0c8cec1c3eb08.html#ab21b750163e0a30d0e64a9ae58436f40',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_5221708cec5828d35db1d1c47cb4964e.html#ae00efe15be75aa09452febc90328fb92',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__8ebae0cbdf333fddfe5c24d35ebe8e02.html#ab89f8f7b43d94c518012b79e156e7903',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_ab741d81fdc991345cb9e43c29fca573.html#acff9b38c35ce613ecd5cba23857306ef',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__f083347e265b1e9eea5572d86ddb6bf9.html#a183902aaf411a80d9716b9499b3bf9e3',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_bef0c048bc0f8ba2d875cb7ab26d363b.html#a213e6953b39cd3ce1079541e2e9937b2',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_6e513ccbc44ae7909a60d93b9b5435b3.html#af73260f1928688306a5b3b73fc89b7f3',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b03e3b50dbcb30d0d1ac062f3a9d5abef.html#a4db3ab8cf6a7699aa72fc8a23735284e',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_4746fc55e614df0016c518d3fda2677e.html#a3b2bd84cca60d0445b2ca537ffba656f',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4bc4b6ba004e25c44bfd9266c61f937dfb.html#af35d4d5def4092a468571da91f4d8889',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_0ee08a4520882d24ba9026879265e892.html#a51d34d49eafc1263b17e379d26a92164',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b6d968039dde5c9f062ab15f90a8049fe.html#a8eee7edc5eed11b184d6cf9b26aeafe3',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_546e9ec6de6a5970b326da6f6280f1d4.html#a8b946d8e8e48a76794481d8626c89522',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b451d5cf5d7e8cbbe476afe3dab5c09b2.html#a9fb116f4747407dc93db7e3264ac0c91',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutA()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_01128_01_4_00_0132_00_01uint15918972b95027764b3a849b03075ed2b.html#a9b36cb6bd0c15c32da7fe128901748f6',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 128 &gt;, 32, uint1b_t, layout::RowMajor, uint1b_t, layout::ColumnMajor, int, layout::RowMajor, OpXorPopc &gt;::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1Gemm.html#a6bbefc44e1ed6822dc8a4e2edc2b276b',1,'cutlass::gemm::device::Gemm::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1Gemm_3_01ElementA___00_01LayoutA___00_01ElementB___00_01Layout4d0960ae6b1d1bf19e6239dbd002249c.html#a5212eb5b3af32e5bc43cc4179bb346ef',1,'cutlass::gemm::device::Gemm&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, AlignmentA, AlignmentB, SplitKSerial, Operator_, IsBetaZero &gt;::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1GemmBatched.html#a9f9d11529b28ced91c4b05c2530b7a70',1,'cutlass::gemm::device::GemmBatched::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1GemmBatched_3_01ElementA___00_01LayoutA___00_01ElementB___00_0c9bb6f4463ab6085e6008b5d5ad6abfd.html#af623ca54d9554cdfafc09af7a22cdd62',1,'cutlass::gemm::device::GemmBatched&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, AlignmentA, AlignmentB, Operator_ &gt;::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1GemmComplex.html#aa37dddae3eda85464fdded62d4f12728',1,'cutlass::gemm::device::GemmComplex::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1GemmComplex_3_01ElementA___00_01LayoutA___00_01ElementB___00_07c56401b4df75709ae636675d9980a9a.html#a9b48f3a933f3b37814f9b70503b7684a',1,'cutlass::gemm::device::GemmComplex&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, TransformA, TransformB, SplitKSerial &gt;::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1GemmSplitKParallel.html#ae59bd456dc03a51e64a655367382535d',1,'cutlass::gemm::device::GemmSplitKParallel::LayoutA()'],['../classcutlass_1_1gemm_1_1device_1_1GemmSplitKParallel_3_01ElementA___00_01LayoutA___00_01ElementBbe7c1f7154ad5b5bf9d4d28301e2b457.html#ab0f19b729484a5d7e384af1a310f3f8c',1,'cutlass::gemm::device::GemmSplitKParallel&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ConvertScaledOp_, ReductionOp_, ThreadblockSwizzle_, Stages, kAlignmentA, kAlignmentB, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1kernel_1_1DefaultGemm_3_01ElementA_00_01layout_1_1ColumnMajorInterleave661fe54d13cc2c9153dcdf31e4beaa30.html#a191985619d1f91de3261c6fcb13a7756',1,'cutlass::gemm::kernel::DefaultGemm&lt; ElementA, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, kAlignmentA, ElementB, layout::RowMajorInterleaved&lt; InterleavedK &gt;, kAlignmentB, ElementC, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, int32_t, arch::OpClassTensorOp, arch::Sm75, ThreadblockShape, WarpShape, InstructionShape, EpilogueOutputOp, ThreadblockSwizzle, 2, SplitKSerial, Operator, IsBetaZero &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1kernel_1_1DefaultGemv.html#a7bc6441557a4376eb9f34eebc593a6a6',1,'cutlass::gemm::kernel::DefaultGemv::LayoutA()'],['../structcutlass_1_1gemm_1_1thread_1_1MmaGeneric.html#abba82524c11d44ef3d9efbcc7288414a',1,'cutlass::gemm::thread::MmaGeneric::LayoutA()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01ElementA___00_01LayoutA___00_01ElementB_e41c1cd6078b6d1347fac239b0639d56.html#af1c453f655d29855f026ab6dfc8f7ae9',1,'cutlass::gemm::thread::Mma&lt; Shape_, ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, LayoutC_, arch::OpMultiplyAdd, bool &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01half__t_00_01LayoutA___00_01half__t_00_088f0e99e501b6012297eb30b4e89bcea.html#a0975c18cc4a9d376011858c6dbf740d0',1,'cutlass::gemm::thread::Mma&lt; Shape_, half_t, LayoutA_, half_t, LayoutB_, half_t, layout::RowMajor, arch::OpMultiplyAdd, typename platform::enable_if&lt; detail::EnableMma_Crow_SM60&lt; LayoutA_, LayoutB_ &gt;::value &gt;::type &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01int8__t_00_01layout_1_1RowMajor_00_01int89c659e7faf47264972bdba6cd80f42b.html#ae97a267ff5cb731e4c396cf6f7524e01',1,'cutlass::gemm::thread::Mma&lt; Shape_, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int32_t, LayoutC_, arch::OpMultiplyAdd, bool &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01int8__t_00_01layout_1_1ColumnMajor_00_013f3785e722edc6e9aab6f866309b8623.html#a285e7f7a777dcc1b553e3647bcf3833a',1,'cutlass::gemm::thread::Mma&lt; Shape_, int8_t, layout::ColumnMajor, int8_t, layout::RowMajor, int32_t, LayoutC_, arch::OpMultiplyAdd, int8_t &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultGemvCore.html#a78fe6df9a97b578844e143d27015138f',1,'cutlass::gemm::threadblock::DefaultGemvCore::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShafafd5c61db86cbfe90863578ddd11092.html#accc6d889c070c36baf7d471d6982ccef',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha8da7a0cfbbe859b701fdd9f2b8566aa7.html#a8c50192463a45aae57d1d4d6f3a75723',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha84e9f8afb6a4ca9f5dcd219b182d16e7.html#a6c983b87a5a65a86b4d6360f6a679c2a',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShab94a11a77dd0565102710907089acee0.html#a2abba1ee7e00d2cbafa6c2cc113f4523',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha34a52cc7b2942e8c290f0032b6779b52.html#acab21f4d5466d09c3c97a3729f27ce96',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShaaf312aafe9da92ea9d417bcc12a8e7dc.html#a2f121d87f28c3f69a37813fce5e193be',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha863d4139ccaa713bc4bde32c425f4067.html#af884e14f4e9bfb37f6273d7360729524',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha2c0d0b7cdb5c4bcb11e83c058eb65345.html#af221f78897173f9b8c641b3423ff9568',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha46446d1e3871e31d2e728f710d78c8c1.html#ab359d7ee8ae6c06ef59a21af58af79ef',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_, &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha69bef08ea63dd930f99d9788105873dd.html#ae0db22993acc56306d5c595bcbdd1c4e',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha3adf608332a8c9ee7014fced0da8a9ca.html#aa74f0a7c85f2458dac08ad3198e64311',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShab7edfba3cdf43a07e3c4d719d87565a4.html#a56ddbc559842b8ad20f4815cfa9f25ca',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShaf03a122202ad10acdc96f280106d678b.html#a91c45030b70a2f3b191cde69e3dc8166',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instrucf60fe02fcdd80d28b7fd419133465dcc.html#ac4e95f3f7ceebcd609b9667fbb5a4143',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc24092ddc01fc83dabb7db4c14880fe60.html#a8de78b83d5df23a4f2fc72ba71ab3a9e',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc4fee9f2965b8468bfb42b94a74527d22.html#a2125dd1d3f438e263f32ee3e946a0f93',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc803d38bc1e4618c07c47f54c87ae2678.html#a47cfed46c8135fef79d65184f49765e2',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutA()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc2bf00737f4ad0a9da9a8be6d3e66c152.html#a7e9a0b2e2b67fc64db7fca326f2fe227',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, ElementB_, layout::RowMajorInterleaved&lt; InterleavedK &gt;, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_, AccumulatorsInRowMajor &gt;::LayoutA()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaComplexTensorOp_3_01Shape___00_01complex_3_01RealElementA_01_146441010dad1f40eb51b6dae3ded216.html#af47012d8fd28d82558d2ea39c1bba8f2',1,'cutlass::gemm::warp::MmaComplexTensorOp&lt; Shape_, complex&lt; RealElementA &gt;, LayoutA_, complex&lt; RealElementB &gt;, LayoutB_, complex&lt; RealElementC &gt;, LayoutC_, Policy_, TransformA, TransformB, Enable &gt;::LayoutA()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimt.html#a3097af2631be85cb7f7311e7ac84461c',1,'cutlass::gemm::warp::MmaSimt::LayoutA()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOp.html#a8ea4f03edb1809d4a2a07c05f22b8398',1,'cutlass::gemm::warp::MmaTensorOp::LayoutA()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOp.html#a3d0bf8bf722d8ecf84b00abd9e5be227',1,'cutlass::gemm::warp::MmaVoltaTensorOp::LayoutA()']]], ['layoutaccumulator',['LayoutAccumulator',['../structcutlass_1_1gemm_1_1kernel_1_1DefaultGemv.html#a5a72ada5ea6586abe7e2ddd37c8706e4',1,'cutlass::gemm::kernel::DefaultGemv']]], ['layoutb',['LayoutB',['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_c7f88bfd32a544fba8111d2dcadeab11.html#a75e3c9233081a8d0a94b3a5fd8c1f03a',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_4b7308177b308a272c1889fbe9670275.html#a0d086392c53e68a98654147ec29aff3b',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::RowMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_31defda8ea2b7d855642ffd77da1a411.html#a2b61fee2eabbd2a55a038a6c7cf16127',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_73d9802d6b944a5299bc255887db6bbc.html#ade40e5bcd4b7fb060fba17f953c1f4e9',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::RowMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_b0242d7a01097510effbc4718040d3e5.html#a07f9c1ea2cbd296f67e06c2c4d21dd9d',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_44a3b2a8df88a2b067f1284515cb5371.html#a2735fdab5db4822df054551ecd5e0521',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::RowMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_5a9888862cebd333ecaf11f7262f77d4.html#a869b59da79e761da8742c724736c700c',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_839a7c8bb938d1661f4611e68f85d8cb.html#a98d94650bf376fc0296333781ee37214',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::RowMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_0116_00_018_00_018_01_4_00_0132_00_01half__96363097c47b056f0ca1911afd7f8b7a.html#a50220de83f3f4f10ed5d104bf59692a0',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 16, 8, 8 &gt;, 32, half_t, layout::RowMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_0116_00_018_00_018_01_4_00_0132_00_01half__02a3f19a78995f97d793a668e0e4d4f0.html#a5b2efd45368dfc24e2e4f8ab2074e233',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 16, 8, 8 &gt;, 32, half_t, layout::RowMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__927179f46017ea5f58f859f1196c4829.html#a1afd356710b8faa29fbb9a95adf3c4eb',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_a62aa63a212985df306fb27e8a50aeae.html#a0a4da68cd738577a58d77e879151a5c5',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__5299c9c90c8f2f521be0c8cec1c3eb08.html#a11fd1120cf45028b300dc152f0ec7183',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_5221708cec5828d35db1d1c47cb4964e.html#a9fa63e68ec7d202cfec7e322f35f229e',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__8ebae0cbdf333fddfe5c24d35ebe8e02.html#a0b7706ae320c1afb29a00500ce000d73',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_ab741d81fdc991345cb9e43c29fca573.html#ad945842d0bb33d68a18e56d0782a4c09',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__f083347e265b1e9eea5572d86ddb6bf9.html#af24d458187fe031b34f5ad3a02b773bd',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_bef0c048bc0f8ba2d875cb7ab26d363b.html#acd73dbd50c1ecc90898bfde1175e7c96',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_6e513ccbc44ae7909a60d93b9b5435b3.html#a722058d8c0d890095d05264de70d2de0',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b03e3b50dbcb30d0d1ac062f3a9d5abef.html#a3ee5fce9ca3eda767a45c379f389678a',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_4746fc55e614df0016c518d3fda2677e.html#a35d3ac13044ce4cdefafb1dd859253b0',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4bc4b6ba004e25c44bfd9266c61f937dfb.html#a24c194a830b30f2c5fb6e27b9c0c972d',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_0ee08a4520882d24ba9026879265e892.html#a98674b2fc7f81fd5352c7d4805c751e5',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b6d968039dde5c9f062ab15f90a8049fe.html#af7cf911df0e40c0ff6dff72aa423a04f',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_546e9ec6de6a5970b326da6f6280f1d4.html#a8247ad4d1e963afcac682b9791b6da47',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b451d5cf5d7e8cbbe476afe3dab5c09b2.html#a12e96c632e9b28388bc736e7b1303299',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutB()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_01128_01_4_00_0132_00_01uint15918972b95027764b3a849b03075ed2b.html#a19199abbcfff67f4e32fb9246899011e',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 128 &gt;, 32, uint1b_t, layout::RowMajor, uint1b_t, layout::ColumnMajor, int, layout::RowMajor, OpXorPopc &gt;::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1Gemm.html#ab389fd311c5763587315c210f40a11f2',1,'cutlass::gemm::device::Gemm::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1Gemm_3_01ElementA___00_01LayoutA___00_01ElementB___00_01Layout4d0960ae6b1d1bf19e6239dbd002249c.html#a1841e0e97e59862c7a92fc8d2ab7c9bc',1,'cutlass::gemm::device::Gemm&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, AlignmentA, AlignmentB, SplitKSerial, Operator_, IsBetaZero &gt;::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1GemmBatched.html#acb489f1bb4fed9e4314a6b6a3cbd04a9',1,'cutlass::gemm::device::GemmBatched::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1GemmBatched_3_01ElementA___00_01LayoutA___00_01ElementB___00_0c9bb6f4463ab6085e6008b5d5ad6abfd.html#a4aaaa6ca0e4b9f983fe37b4105fd058f',1,'cutlass::gemm::device::GemmBatched&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, AlignmentA, AlignmentB, Operator_ &gt;::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1GemmComplex.html#a28c08233c5cb150326f506e4dd1f638a',1,'cutlass::gemm::device::GemmComplex::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1GemmComplex_3_01ElementA___00_01LayoutA___00_01ElementB___00_07c56401b4df75709ae636675d9980a9a.html#a90e18e93d96cd07f03201134d3c1b5a0',1,'cutlass::gemm::device::GemmComplex&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, TransformA, TransformB, SplitKSerial &gt;::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1GemmSplitKParallel.html#a08550ee41dd02322c514364035f5d098',1,'cutlass::gemm::device::GemmSplitKParallel::LayoutB()'],['../classcutlass_1_1gemm_1_1device_1_1GemmSplitKParallel_3_01ElementA___00_01LayoutA___00_01ElementBbe7c1f7154ad5b5bf9d4d28301e2b457.html#ad3783855d4101f59892e1af5024288ff',1,'cutlass::gemm::device::GemmSplitKParallel&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ConvertScaledOp_, ReductionOp_, ThreadblockSwizzle_, Stages, kAlignmentA, kAlignmentB, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1kernel_1_1DefaultGemm_3_01ElementA_00_01layout_1_1ColumnMajorInterleave661fe54d13cc2c9153dcdf31e4beaa30.html#a8a5df77ecc67dd02845c94037d1189ab',1,'cutlass::gemm::kernel::DefaultGemm&lt; ElementA, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, kAlignmentA, ElementB, layout::RowMajorInterleaved&lt; InterleavedK &gt;, kAlignmentB, ElementC, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, int32_t, arch::OpClassTensorOp, arch::Sm75, ThreadblockShape, WarpShape, InstructionShape, EpilogueOutputOp, ThreadblockSwizzle, 2, SplitKSerial, Operator, IsBetaZero &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1kernel_1_1DefaultGemv.html#ab745507f3927cdf7c657d4869de7cdc7',1,'cutlass::gemm::kernel::DefaultGemv::LayoutB()'],['../structcutlass_1_1gemm_1_1thread_1_1MmaGeneric.html#a9f0bf5d5870ae9a6524b425fda31a2c6',1,'cutlass::gemm::thread::MmaGeneric::LayoutB()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01ElementA___00_01LayoutA___00_01ElementB_e41c1cd6078b6d1347fac239b0639d56.html#a0e71571693f24560bdba20fbd2ea1a77',1,'cutlass::gemm::thread::Mma&lt; Shape_, ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, LayoutC_, arch::OpMultiplyAdd, bool &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01half__t_00_01LayoutA___00_01half__t_00_088f0e99e501b6012297eb30b4e89bcea.html#a28b637c1f311310a27b39c44e89e698e',1,'cutlass::gemm::thread::Mma&lt; Shape_, half_t, LayoutA_, half_t, LayoutB_, half_t, layout::RowMajor, arch::OpMultiplyAdd, typename platform::enable_if&lt; detail::EnableMma_Crow_SM60&lt; LayoutA_, LayoutB_ &gt;::value &gt;::type &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01int8__t_00_01layout_1_1RowMajor_00_01int89c659e7faf47264972bdba6cd80f42b.html#a2f2ab8637971d2837beec904d4e1c09f',1,'cutlass::gemm::thread::Mma&lt; Shape_, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int32_t, LayoutC_, arch::OpMultiplyAdd, bool &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01int8__t_00_01layout_1_1ColumnMajor_00_013f3785e722edc6e9aab6f866309b8623.html#ab4fdf251bf699a1e5972385d66f8220d',1,'cutlass::gemm::thread::Mma&lt; Shape_, int8_t, layout::ColumnMajor, int8_t, layout::RowMajor, int32_t, LayoutC_, arch::OpMultiplyAdd, int8_t &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultGemvCore.html#a9d8c09c8241b5bbee9cda92bfc2a7783',1,'cutlass::gemm::threadblock::DefaultGemvCore::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShafafd5c61db86cbfe90863578ddd11092.html#a8ac8547ec04936655980a0e5f270815a',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha8da7a0cfbbe859b701fdd9f2b8566aa7.html#acd277c26115341aa9dd95f682cd98bed',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha84e9f8afb6a4ca9f5dcd219b182d16e7.html#ac2c4bacb4c37e02bd29db3c5e76232ff',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShab94a11a77dd0565102710907089acee0.html#a500021e29b6263b4faf4ac8d9011c5be',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha34a52cc7b2942e8c290f0032b6779b52.html#a034033ba6eb8d4a013536bd6bba4d289',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShaaf312aafe9da92ea9d417bcc12a8e7dc.html#a44f471eebd05eb3c24fb6de9bbb56a95',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha863d4139ccaa713bc4bde32c425f4067.html#a9dc63d44b2cf0b8f6c7e93f3234e65e9',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha2c0d0b7cdb5c4bcb11e83c058eb65345.html#ad74d9da21f4555f6181837e64a7450dc',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha46446d1e3871e31d2e728f710d78c8c1.html#a318ba4d89ec5915d8de4a9685cfa59cf',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_, &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha69bef08ea63dd930f99d9788105873dd.html#abff453e33928281f73a7ee5616b77392',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha3adf608332a8c9ee7014fced0da8a9ca.html#a8c2b354ec13e3c57afe63ac396b2604e',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShab7edfba3cdf43a07e3c4d719d87565a4.html#a5ab978237d60ced612657219ddd982d1',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShaf03a122202ad10acdc96f280106d678b.html#aefac10886e9f063655176c734491def5',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instrucf60fe02fcdd80d28b7fd419133465dcc.html#a08ef646c5303e2823bf8a63ee43eb58e',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc24092ddc01fc83dabb7db4c14880fe60.html#ac9b019566edb0e60ecec7ad432b59b67',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc4fee9f2965b8468bfb42b94a74527d22.html#a1cb7bd430843d57f17ecaf76a1eed0fa',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc803d38bc1e4618c07c47f54c87ae2678.html#a463363f5ddb187622ce3c5c2ec550128',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutB()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc2bf00737f4ad0a9da9a8be6d3e66c152.html#a8b2fa2419d5000c4177542907b8aa5cd',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, ElementB_, layout::RowMajorInterleaved&lt; InterleavedK &gt;, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_, AccumulatorsInRowMajor &gt;::LayoutB()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaComplexTensorOp_3_01Shape___00_01complex_3_01RealElementA_01_146441010dad1f40eb51b6dae3ded216.html#a7f2b34a8389659ce02fbeeae5a4fc8c3',1,'cutlass::gemm::warp::MmaComplexTensorOp&lt; Shape_, complex&lt; RealElementA &gt;, LayoutA_, complex&lt; RealElementB &gt;, LayoutB_, complex&lt; RealElementC &gt;, LayoutC_, Policy_, TransformA, TransformB, Enable &gt;::LayoutB()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimt.html#a0634ec4b52cb6f42c9e549d866985fc1',1,'cutlass::gemm::warp::MmaSimt::LayoutB()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOp.html#af94b734ca46b1e68a273b35f49155e16',1,'cutlass::gemm::warp::MmaTensorOp::LayoutB()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOp.html#ab4964d7a2cd0d0f77bae1c053a6a0528',1,'cutlass::gemm::warp::MmaVoltaTensorOp::LayoutB()']]], ['layoutc',['LayoutC',['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_c7f88bfd32a544fba8111d2dcadeab11.html#a6aae6467d42a09bbac58dfb4811e97b1',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_4b7308177b308a272c1889fbe9670275.html#a24381a0e4dd6c128bcfd58577bc68f95',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::RowMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_31defda8ea2b7d855642ffd77da1a411.html#a67b008d0407ce4f2e39c50eee5384431',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_73d9802d6b944a5299bc255887db6bbc.html#a5041e9e3d302946d7700705fa8697d02',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::RowMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_b0242d7a01097510effbc4718040d3e5.html#af0681dd564da1c3048e6ddf0b5fe607a',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_44a3b2a8df88a2b067f1284515cb5371.html#aabf6ea4c69e52871d492b95e554daa30',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::ColumnMajor, half_t, layout::RowMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_5a9888862cebd333ecaf11f7262f77d4.html#a0c58554cd98046f9b5c080aac90d4514',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_014_01_4_00_018_00_01half__t_839a7c8bb938d1661f4611e68f85d8cb.html#a53e76c274de0899677f740ead0f2a609',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 4 &gt;, 8, half_t, layout::RowMajor, half_t, layout::RowMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_0116_00_018_00_018_01_4_00_0132_00_01half__96363097c47b056f0ca1911afd7f8b7a.html#a48fee34b66d29fd4fcfbd7b5756533cc',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 16, 8, 8 &gt;, 32, half_t, layout::RowMajor, half_t, layout::ColumnMajor, half_t, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_0116_00_018_00_018_01_4_00_0132_00_01half__02a3f19a78995f97d793a668e0e4d4f0.html#a23200300c2c395289b0569d530ea4547',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 16, 8, 8 &gt;, 32, half_t, layout::RowMajor, half_t, layout::ColumnMajor, float, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__927179f46017ea5f58f859f1196c4829.html#aa5253809da540990f803709b6c808658',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_a62aa63a212985df306fb27e8a50aeae.html#a4852419db439763a5a23fe6f31b280de',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__5299c9c90c8f2f521be0c8cec1c3eb08.html#ac916725a76608219b33fff205ffc7fd6',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_5221708cec5828d35db1d1c47cb4964e.html#a03cc19274dfa50600ab4d1829d69bd12',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__8ebae0cbdf333fddfe5c24d35ebe8e02.html#a9e24efa27ed21ebbb29f9d812f91b8c5',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_ab741d81fdc991345cb9e43c29fca573.html#aabc210827f1b8bd9a7ee03060106a989',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01int8__f083347e265b1e9eea5572d86ddb6bf9.html#a34b3a513bb412f9c84d4051d3ff8fbd1',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, int8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0116_01_4_00_0132_00_01uint8_bef0c048bc0f8ba2d875cb7ab26d363b.html#ad4f20652dacbc5af8553a9985bedef71',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 16 &gt;, 32, uint8_t, layout::RowMajor, uint8_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_6e513ccbc44ae7909a60d93b9b5435b3.html#a07799cfaa15dd4af93e140ae952af86b',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b03e3b50dbcb30d0d1ac062f3a9d5abef.html#aa31f949152771a58452ce6442fea2a04',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_4746fc55e614df0016c518d3fda2677e.html#a5ef6b7ff2696d4cd47fcfbaeb24adc22',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4bc4b6ba004e25c44bfd9266c61f937dfb.html#a7d41b2e8b9599e0c822a3933949dd1b0',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAdd &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_0ee08a4520882d24ba9026879265e892.html#a2dae92055d9f39f58de043da90d12b03',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b6d968039dde5c9f062ab15f90a8049fe.html#abd5c18542cc29c61fbd8cc4a2cb65ac3',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, int4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01int4b_546e9ec6de6a5970b326da6f6280f1d4.html#a7f427c5eeb7461c070494bbc6fbbba0d',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, int4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_0132_01_4_00_0132_00_01uint4b451d5cf5d7e8cbbe476afe3dab5c09b2.html#a8eee05508c2cb2ef9543d00ab1b18d15',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 32 &gt;, 32, uint4b_t, layout::RowMajor, uint4b_t, layout::ColumnMajor, int, layout::RowMajor, OpMultiplyAddSaturate &gt;::LayoutC()'],['../structcutlass_1_1arch_1_1Mma_3_01gemm_1_1GemmShape_3_018_00_018_00_01128_01_4_00_0132_00_01uint15918972b95027764b3a849b03075ed2b.html#a544ceebbea23bbefa1aed667f02ac2c3',1,'cutlass::arch::Mma&lt; gemm::GemmShape&lt; 8, 8, 128 &gt;, 32, uint1b_t, layout::RowMajor, uint1b_t, layout::ColumnMajor, int, layout::RowMajor, OpXorPopc &gt;::LayoutC()'],['../structcutlass_1_1epilogue_1_1threadblock_1_1DefaultEpilogueComplexTensorOp.html#a89a49a6d25f358cffe669ece2abe2f11',1,'cutlass::epilogue::threadblock::DefaultEpilogueComplexTensorOp::LayoutC()'],['../structcutlass_1_1epilogue_1_1threadblock_1_1DefaultEpilogueSimt.html#aa9a4df44226930d4a3865b42ce8f68ad',1,'cutlass::epilogue::threadblock::DefaultEpilogueSimt::LayoutC()'],['../structcutlass_1_1epilogue_1_1threadblock_1_1DefaultEpilogueTensorOp.html#afd0f305199038a1da8197e1e3884c390',1,'cutlass::epilogue::threadblock::DefaultEpilogueTensorOp::LayoutC()'],['../structcutlass_1_1epilogue_1_1threadblock_1_1DefaultInterleavedEpilogueTensorOp.html#a8b1b6d953b952165804d9eb742343b13',1,'cutlass::epilogue::threadblock::DefaultInterleavedEpilogueTensorOp::LayoutC()'],['../structcutlass_1_1epilogue_1_1threadblock_1_1DefaultEpilogueVoltaTensorOp.html#afe6ace14e2e4708042c9be74fce5ee28',1,'cutlass::epilogue::threadblock::DefaultEpilogueVoltaTensorOp::LayoutC()'],['../structcutlass_1_1epilogue_1_1threadblock_1_1DefaultEpilogueWmmaTensorOp.html#a37f9240209d3a0f2b17ea9a4b1f0819c',1,'cutlass::epilogue::threadblock::DefaultEpilogueWmmaTensorOp::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1Gemm.html#a93acaff17a5916bc11040a9ddf04d9f2',1,'cutlass::gemm::device::Gemm::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1Gemm_3_01ElementA___00_01LayoutA___00_01ElementB___00_01Layout4d0960ae6b1d1bf19e6239dbd002249c.html#afe4685fea6a4603a7459bbe9923c9cb3',1,'cutlass::gemm::device::Gemm&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, AlignmentA, AlignmentB, SplitKSerial, Operator_, IsBetaZero &gt;::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1GemmBatched.html#af35efd1f40deeb9d8e295f700fa84dbd',1,'cutlass::gemm::device::GemmBatched::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1GemmBatched_3_01ElementA___00_01LayoutA___00_01ElementB___00_0c9bb6f4463ab6085e6008b5d5ad6abfd.html#aed31a68c08cbfe9bf32d788be3f41679',1,'cutlass::gemm::device::GemmBatched&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, AlignmentA, AlignmentB, Operator_ &gt;::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1GemmComplex.html#aa1c616831c52a468900df39dca00ede3',1,'cutlass::gemm::device::GemmComplex::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1GemmComplex_3_01ElementA___00_01LayoutA___00_01ElementB___00_07c56401b4df75709ae636675d9980a9a.html#af2b903fa011363e7049d5f0807b77731',1,'cutlass::gemm::device::GemmComplex&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ThreadblockSwizzle_, Stages, TransformA, TransformB, SplitKSerial &gt;::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1GemmSplitKParallel.html#ab2572b47bae5f0ae0f06647cfe86ad4e',1,'cutlass::gemm::device::GemmSplitKParallel::LayoutC()'],['../classcutlass_1_1gemm_1_1device_1_1GemmSplitKParallel_3_01ElementA___00_01LayoutA___00_01ElementBbe7c1f7154ad5b5bf9d4d28301e2b457.html#a33d738b2e304c974a9b77be0b176fb59',1,'cutlass::gemm::device::GemmSplitKParallel&lt; ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, layout::ColumnMajor, ElementAccumulator_, OperatorClass_, ArchTag_, ThreadblockShape_, WarpShape_, InstructionShape_, EpilogueOutputOp_, ConvertScaledOp_, ReductionOp_, ThreadblockSwizzle_, Stages, kAlignmentA, kAlignmentB, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1kernel_1_1DefaultGemm_3_01ElementA_00_01layout_1_1ColumnMajorInterleave661fe54d13cc2c9153dcdf31e4beaa30.html#ade9ce4cd8a7c4b66d7c89a57e9cd9dad',1,'cutlass::gemm::kernel::DefaultGemm&lt; ElementA, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, kAlignmentA, ElementB, layout::RowMajorInterleaved&lt; InterleavedK &gt;, kAlignmentB, ElementC, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, int32_t, arch::OpClassTensorOp, arch::Sm75, ThreadblockShape, WarpShape, InstructionShape, EpilogueOutputOp, ThreadblockSwizzle, 2, SplitKSerial, Operator, IsBetaZero &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1thread_1_1MmaGeneric.html#a6982992a7471d2d93532d1b9548328b4',1,'cutlass::gemm::thread::MmaGeneric::LayoutC()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01ElementA___00_01LayoutA___00_01ElementB_e41c1cd6078b6d1347fac239b0639d56.html#a08a2137eb47c1caa00adaf3572c706a0',1,'cutlass::gemm::thread::Mma&lt; Shape_, ElementA_, LayoutA_, ElementB_, LayoutB_, ElementC_, LayoutC_, arch::OpMultiplyAdd, bool &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01half__t_00_01LayoutA___00_01half__t_00_088f0e99e501b6012297eb30b4e89bcea.html#a397dfb5a622d1ebe47177825194a03a9',1,'cutlass::gemm::thread::Mma&lt; Shape_, half_t, LayoutA_, half_t, LayoutB_, half_t, layout::RowMajor, arch::OpMultiplyAdd, typename platform::enable_if&lt; detail::EnableMma_Crow_SM60&lt; LayoutA_, LayoutB_ &gt;::value &gt;::type &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01int8__t_00_01layout_1_1RowMajor_00_01int89c659e7faf47264972bdba6cd80f42b.html#abccf798bd97c69887268f62cfabf2aac',1,'cutlass::gemm::thread::Mma&lt; Shape_, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, int32_t, LayoutC_, arch::OpMultiplyAdd, bool &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1thread_1_1Mma_3_01Shape___00_01int8__t_00_01layout_1_1ColumnMajor_00_013f3785e722edc6e9aab6f866309b8623.html#a5a8bef91fe385be165980f9e0cd4e453',1,'cutlass::gemm::thread::Mma&lt; Shape_, int8_t, layout::ColumnMajor, int8_t, layout::RowMajor, int32_t, LayoutC_, arch::OpMultiplyAdd, int8_t &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultGemvCore.html#a5774c05736aeeffee40954748bab926f',1,'cutlass::gemm::threadblock::DefaultGemvCore::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShafafd5c61db86cbfe90863578ddd11092.html#a926109870af5f4d9120ce5c98bbed2b6',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha8da7a0cfbbe859b701fdd9f2b8566aa7.html#abbf4045091090bcaf32905afaefb01c5',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha84e9f8afb6a4ca9f5dcd219b182d16e7.html#a6a1ad914c597f2ced6ddb356516a1413',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShab94a11a77dd0565102710907089acee0.html#aeb7ff4ca2dd952e411f92e8628d6c015',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha34a52cc7b2942e8c290f0032b6779b52.html#acdc66d5a5d190e94e0562eed3d3ddf30',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShaaf312aafe9da92ea9d417bcc12a8e7dc.html#ae3de45321d07b5b45e17b1ba7ff136ce',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha863d4139ccaa713bc4bde32c425f4067.html#af7b416c742dacf93ab522a40699c6c71',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::RowMajor, int8_t, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha2c0d0b7cdb5c4bcb11e83c058eb65345.html#acc5cd244b86a03c2b8dd57b01545fbe8',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 4 &gt;, int8_t, layout::ColumnMajor, int8_t, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha46446d1e3871e31d2e728f710d78c8c1.html#a1b4266ff74d2c77270c8d51c0153a681',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 1, 1, 1 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassSimt, 2, Operator_, &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha69bef08ea63dd930f99d9788105873dd.html#a2fb601e219420bc2a306683811797683',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmSha3adf608332a8c9ee7014fced0da8a9ca.html#a9e58fd89b08ef7ad8faa64e637d035f4',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShab7edfba3cdf43a07e3c4d719d87565a4.html#a8f4be1faae9ea90acdc95f84cb341f75',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01GemmShaf03a122202ad10acdc96f280106d678b.html#ad16689bb335e3e45a4e907a2ca8f92ac',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, GemmShape&lt; 8, 8, 4 &gt;, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instrucf60fe02fcdd80d28b7fd419133465dcc.html#a67bc3dd301ab29719b202f4fb3bb521b',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc24092ddc01fc83dabb7db4c14880fe60.html#a38f2b41228f1a22e6b703df3c9be96bc',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::RowMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc4fee9f2965b8468bfb42b94a74527d22.html#a1aab3ec6af133dc7b2ecac764551fc6d',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::RowMajor, ElementB_, layout::RowMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc803d38bc1e4618c07c47f54c87ae2678.html#a2af3430e9915d3b64d9ed7a42384bdf4',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajor, ElementB_, layout::ColumnMajor, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_ &gt;::LayoutC()'],['../structcutlass_1_1gemm_1_1threadblock_1_1DefaultMmaCore_3_01Shape___00_01WarpShape___00_01Instruc2bf00737f4ad0a9da9a8be6d3e66c152.html#a2bb671c086c1f09c235811dd38efd87d',1,'cutlass::gemm::threadblock::DefaultMmaCore&lt; Shape_, WarpShape_, InstructionShape_, ElementA_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, ElementB_, layout::RowMajorInterleaved&lt; InterleavedK &gt;, ElementC_, LayoutC_, arch::OpClassTensorOp, 2, Operator_, AccumulatorsInRowMajor &gt;::LayoutC()'],['../classcutlass_1_1gemm_1_1threadblock_1_1MmaPipelined.html#a3bfd1d89c66d37d5a81d36262ad333d7',1,'cutlass::gemm::threadblock::MmaPipelined::LayoutC()'],['../classcutlass_1_1gemm_1_1threadblock_1_1MmaSingleStage.html#a2aff65c0191d6cf98b80088d41758090',1,'cutlass::gemm::threadblock::MmaSingleStage::LayoutC()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaComplexTensorOp_3_01Shape___00_01complex_3_01RealElementA_01_146441010dad1f40eb51b6dae3ded216.html#acf4d11de96716652758b60ba06d641cc',1,'cutlass::gemm::warp::MmaComplexTensorOp&lt; Shape_, complex&lt; RealElementA &gt;, LayoutA_, complex&lt; RealElementB &gt;, LayoutB_, complex&lt; RealElementC &gt;, LayoutC_, Policy_, TransformA, TransformB, Enable &gt;::LayoutC()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimt.html#a02df31d6419aba3fed4e5aa27260bd53',1,'cutlass::gemm::warp::MmaSimt::LayoutC()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOp.html#a77dea454b02646c21c7e9d3f340b9bdb',1,'cutlass::gemm::warp::MmaTensorOp::LayoutC()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOp.html#aafda08a1d0480ae202fc3a66203e4a16',1,'cutlass::gemm::warp::MmaVoltaTensorOp::LayoutC()']]], ['layoutcd',['LayoutCD',['../structcutlass_1_1gemm_1_1kernel_1_1DefaultGemv.html#a396e94326248f8819ae3593aa70b40bb',1,'cutlass::gemm::kernel::DefaultGemv']]], ['ldsiterations',['LdsIterations',['../structcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Opera33cdf53848564e894d4407637dc86caf.html#a5a872b0e23893fdf63fdac34ef4145a7',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kA, Element_, cutlass::layout::VoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Policy::LdsIterations()'],['../structcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Opera6fa6d2d3725bb3ec613d5c527ea3ffe7.html#a59d5b0e90268306f6c164659c7dc4626',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kB, Element_, cutlass::layout::VoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Policy::LdsIterations()'],['../structcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operafa294175b280756dd8388f9ffe7b72c4.html#ae2e710c2cd5c735062eef6da0a99ab9a',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::VoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::Policy::LdsIterations()']]], ['ldsmiterations',['LdsmIterations',['../structcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___07638f8b7761f6e2e2e6918e2c05e739.html#a8672f4cac4739f8c6a1212d5260575d7',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, 64 &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Policy::LdsmIterations()'],['../structcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0784c74bd670999ec23ad8ef9dc55777.html#a87a27eed18b2cdac298d5bb0e9b9b0f2',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Policy::LdsmIterations()']]], ['ldsmshape',['LdsmShape',['../structcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___07638f8b7761f6e2e2e6918e2c05e739.html#a0008b4520085f127411ea8233de4b9db',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, 64 &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Policy::LdsmShape()'],['../structcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0784c74bd670999ec23ad8ef9dc55777.html#ab918363f3c1f0e516f30b636d1b7650c',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::Policy::LdsmShape()']]], ['ldsshape',['LdsShape',['../structcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Opera33cdf53848564e894d4407637dc86caf.html#a9824e494b5044736123d3e834ab0ccf6',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kA, Element_, cutlass::layout::VoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Policy::LdsShape()'],['../structcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Opera6fa6d2d3725bb3ec613d5c527ea3ffe7.html#aa04730d0ce210762957ac3096aaff2b5',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kB, Element_, cutlass::layout::VoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::Policy::LdsShape()'],['../structcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operafa294175b280756dd8388f9ffe7b72c4.html#aa4d35de13746a7faeaa527cc5905344f',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::VoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::Policy::LdsShape()']]], ['longindex',['LongIndex',['../structcutlass_1_1Coord.html#ab61db7c2bfacaf0b7ce465e70d48c44f',1,'cutlass::Coord::LongIndex()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1Epilogue.html#a8a5f856300412017f5c12082af70b82f',1,'cutlass::epilogue::threadblock::Epilogue::LongIndex()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1PredicatedTileIterator.html#a479672c177874980a3ccf436ae7946d5',1,'cutlass::epilogue::threadblock::PredicatedTileIterator::LongIndex()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1InterleavedPredicatedTileIterator.html#ab340cbc86fcd16ac7c380dac35f5c699',1,'cutlass::epilogue::threadblock::InterleavedPredicatedTileIterator::LongIndex()'],['../classcutlass_1_1epilogue_1_1threadblock_1_1SharedLoadIterator.html#ae5daed317dfa5a63b781f4900454bcd3',1,'cutlass::epilogue::threadblock::SharedLoadIterator::LongIndex()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorSimt_3_01WarpShape___00_01Operator___00_01Elemenf2bd262ed3e202b25d5802d83965bf3b.html#a5c553a18b6215ed1158e863851be631a',1,'cutlass::epilogue::warp::TileIteratorSimt&lt; WarpShape_, Operator_, Element_, layout::RowMajor, MmaSimtPolicy_ &gt;::LongIndex()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorTensorOp_3_01WarpShape___00_01OperatorShape___003cbb32beb84b4984cb7853662096d289.html#a38473d6ddc3a0eab3fde84840611e2d4',1,'cutlass::epilogue::warp::TileIteratorTensorOp&lt; WarpShape_, OperatorShape_, Element_, layout::RowMajor &gt;::LongIndex()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorVoltaTensorOp_3_01WarpShape___00_01gemm_1_1GemmSa0ceeeddc22575876eb977da7f5416a8.html#acca8fbc04b64161456135dae7f44d40c',1,'cutlass::epilogue::warp::TileIteratorVoltaTensorOp&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, half_t, layout::RowMajor &gt;::LongIndex()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorVoltaTensorOp_3_01WarpShape___00_01gemm_1_1GemmS2fe0c60b727c738c622c18fc3dd76644.html#a5d111ad4fdbb7dfde82edb39b8d90cbc',1,'cutlass::epilogue::warp::TileIteratorVoltaTensorOp&lt; WarpShape_, gemm::GemmShape&lt; 32, 32, 4 &gt;, float, layout::RowMajor &gt;::LongIndex()'],['../classcutlass_1_1epilogue_1_1warp_1_1TileIteratorWmmaTensorOp_3_01WarpShape___00_01OperatorShape_fd6a91cd8bbd07ecd1344326b830e3a4.html#a8dac1c533b728593e7552dfea06c96f4',1,'cutlass::epilogue::warp::TileIteratorWmmaTensorOp&lt; WarpShape_, OperatorShape_, OperatorFragment_, layout::RowMajor &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kA_00_01Element_67ca7e11a38e38f2c51b84767654a90f.html#a9385fb64b984f44eaae51221bec81d18',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kA, Element_, layout::ColumnMajor, Policy_, PartitionsK, PartitionGroupSize &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kB_00_01Element_ea0a4e7ce3cd5d25cabf79383efdf4d9.html#adbeb76bf10056fe3184e7625aa06637a',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kB, Element_, layout::RowMajor, Policy_, PartitionsK, PartitionGroupSize &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kC_00_01Element_4ccafbc821b3a55cd532602442a74031.html#adf1a4e12a7145fb27d6005ba6383a5a6',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kC, Element_, layout::ColumnMajor, Policy_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kC_00_01Element_8f92ea79e85febb67169c4b2d94b1b20.html#a054e64cf098e4b7fca05b3cdd128d9c2',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kC, Element_, layout::RowMajor, Policy_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kA_00_01Element_f0ce904a9294556f15e1cc9cf7c99a93.html#a9001b40c9c441108dea96e858b8bf830',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kA, Element_, layout::ColumnMajorInterleaved&lt; 4 &gt;, Policy_, PartitionsK, PartitionGroupSize &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaSimtTileIterator_3_01Shape___00_01Operand_1_1kB_00_01Element_ada156b62fcbdce47009c5bf1321c92c.html#aba90fdcfb884cebce717720a2f2fdbc3',1,'cutlass::gemm::warp::MmaSimtTileIterator&lt; Shape_, Operand::kB, Element_, layout::RowMajorInterleaved&lt; 4 &gt;, Policy_, PartitionsK, PartitionGroupSize &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0ed7daaeba1c095e77f68533d4d2c475c.html#a6f687d886a3e6eb106bfd59298e6f5c6',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, 64 &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0b84f53cd44b339eccc12067c9f86e11c.html#a1d168c4b070f87a65bf900e7a63e14f3',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::ColumnMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___039819fb3ccd43786d556c2c9669508ef.html#a86cfe634c08dbf97d12b62eeaff1fa2c',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::RowMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0c7d419c589d601ce4eb603be566fea21.html#ab4334bc63da845c9606103e04f389a6d',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0e52ad425e1ee3e68544873f66733237b.html#a2deff3a17ae49f6b0c6f7d46f3296144',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::ColumnMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand___0352e0dcab42bc8360606874e00173556.html#ad9e19bef825c4f9eb203ad22149a79a5',1,'cutlass::gemm::warp::MmaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::RowMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, InstructionShape_, OpDelta_, 32, PartitionsK_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpAccumulatorTileIterator_3_01Shape___00_01Element___006c39f57875e0aa9d0ad82c8043ed8b98.html#a1bf4e5cf980f73ff62c0df8d6d2e99a9',1,'cutlass::gemm::warp::MmaTensorOpAccumulatorTileIterator&lt; Shape_, Element_, cutlass::layout::RowMajor, InstructionShape_, OpDelta_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpAccumulatorTileIterator_3_01Shape___00_01Element___008f607b871a2b3d854eb4def64712c042.html#af415b1bda869f2ef506c6a92dafc5829',1,'cutlass::gemm::warp::MmaTensorOpAccumulatorTileIterator&lt; Shape_, Element_, cutlass::layout::ColumnMajor, InstructionShape_, OpDelta_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaTensorOpAccumulatorTileIterator_3_01Shape___00_01Element___00027dabdc144edd6276f664ca74088510.html#affeae7791be815ff2e0c99c60b079dab',1,'cutlass::gemm::warp::MmaTensorOpAccumulatorTileIterator&lt; Shape_, Element_, cutlass::layout::ColumnMajorInterleaved&lt; InterleavedN &gt;, InstructionShape_, OpDelta_ &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan34be8e21a40af3ebd2dc3dff460dca72.html#a3d25902aecd1742c8d5bc9229dc80455',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kA, Element_, cutlass::layout::VoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan16c56cdc2dda5eeb996af8ec0242d501.html#a3cb3d779d8063f1f1e9ccb9a2d8ae7bf',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kB, Element_, cutlass::layout::VoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan0d3248553e52cd61ed8a2b3b12a20343.html#ad5070e7b4a207fbc27c2daec3acd5a1f',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kA, Element_, cutlass::layout::ColumnMajorVoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operand734577b7e54a074d143aba59828c2f2.html#a7c27b972639b99fcb4b7865d2294f214',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand::kB, Element_, cutlass::layout::RowMajorVoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, InstructionShape_, OpDelta_, 32 &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpAccumulatorTileIterator.html#ac3e514d9c671bff4053be3a535713ddc',1,'cutlass::gemm::warp::MmaVoltaTensorOpAccumulatorTileIterator::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operana2f40b28f0d2286b84d86f7238d67b52.html#a89a7b481a74de5bc5fa29ee1d1925d27',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::VoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operan5a221944f4a0e16ccab77ba684856942.html#a2408b63b278b7a738c95fa89209ddd11',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::ColumnMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::LongIndex()'],['../classcutlass_1_1gemm_1_1warp_1_1MmaVoltaTensorOpMultiplicandTileIterator_3_01Shape___00_01Operandcc9821c435540895138bc9af495f321.html#a2f7d43791644b737de765245eb3d60ef',1,'cutlass::gemm::warp::MmaVoltaTensorOpMultiplicandTileIterator&lt; Shape_, Operand_, Element_, cutlass::layout::RowMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, KBlock &gt;, InstructionShape_, OpDelta_, 32 &gt;::LongIndex()'],['../classcutlass_1_1layout_1_1RowMajor.html#a4062a36ab044fdea058504ed52ee60b8',1,'cutlass::layout::RowMajor::LongIndex()'],['../classcutlass_1_1layout_1_1ColumnMajor.html#a4cc90aa67c4692f0a2cd9f59b8a07997',1,'cutlass::layout::ColumnMajor::LongIndex()'],['../structcutlass_1_1layout_1_1RowMajorInterleaved.html#a4b39c8accf5d11d6cc8d33aeb2b2e5fe',1,'cutlass::layout::RowMajorInterleaved::LongIndex()'],['../structcutlass_1_1layout_1_1ColumnMajorInterleaved.html#a85c8727c1bf645ac264dfed8186a1240',1,'cutlass::layout::ColumnMajorInterleaved::LongIndex()'],['../structcutlass_1_1layout_1_1ContiguousMatrix.html#a6f84cee3bc8d376971745b00f2eade68',1,'cutlass::layout::ContiguousMatrix::LongIndex()'],['../structcutlass_1_1layout_1_1ColumnMajorBlockLinear.html#a3560440cff27c7723c731efd75b3e838',1,'cutlass::layout::ColumnMajorBlockLinear::LongIndex()'],['../structcutlass_1_1layout_1_1RowMajorBlockLinear.html#a080409ff8bd04625b959055476271840',1,'cutlass::layout::RowMajorBlockLinear::LongIndex()'],['../structcutlass_1_1layout_1_1GeneralMatrix.html#afe6f769cf227aa2e8f8aedb7012c789b',1,'cutlass::layout::GeneralMatrix::LongIndex()'],['../classcutlass_1_1layout_1_1PitchLinear.html#a1a4b31740e77b3c03925f507650978ea',1,'cutlass::layout::PitchLinear::LongIndex()'],['../classcutlass_1_1layout_1_1TensorNHWC.html#a4e66e7ef2905194af62c8f0a8d3be4c9',1,'cutlass::layout::TensorNHWC::LongIndex()'],['../classcutlass_1_1layout_1_1TensorNCHW.html#a9f0b098e30b11bc2f5aaef336c8ed204',1,'cutlass::layout::TensorNCHW::LongIndex()'],['../classcutlass_1_1layout_1_1TensorNCxHWx.html#a32f02e6a348caf051d2faab503fbc65c',1,'cutlass::layout::TensorNCxHWx::LongIndex()'],['../classcutlass_1_1layout_1_1TensorCxRSKx.html#a782c4ac33be5bb8a934897f2e7f588dd',1,'cutlass::layout::TensorCxRSKx::LongIndex()'],['../structcutlass_1_1layout_1_1VoltaTensorOpMultiplicandCongruous.html#aed6da31b1c9467654006afb9154ef4ca',1,'cutlass::layout::VoltaTensorOpMultiplicandCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1ColumnMajorVoltaTensorOpMultiplicandCongruous.html#ac398ab0e518a8e8ff09ca0ee2d58be8c',1,'cutlass::layout::ColumnMajorVoltaTensorOpMultiplicandCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1RowMajorVoltaTensorOpMultiplicandCongruous.html#afefc0ee1313d3771e6ada7dfe5a1e96c',1,'cutlass::layout::RowMajorVoltaTensorOpMultiplicandCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1VoltaTensorOpMultiplicandBCongruous.html#a635537ca830a3f621ed99ab323b1e7f1',1,'cutlass::layout::VoltaTensorOpMultiplicandBCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1ColumnMajorVoltaTensorOpMultiplicandBCongruous.html#aece8bef8b6b52ecafccb265c439a64b0',1,'cutlass::layout::ColumnMajorVoltaTensorOpMultiplicandBCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1RowMajorVoltaTensorOpMultiplicandBCongruous.html#a25800f82b5edd600b45db3ce69b51ddb',1,'cutlass::layout::RowMajorVoltaTensorOpMultiplicandBCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1VoltaTensorOpMultiplicandCrosswise.html#a61455b7fac8d838b05b8d0c3cf6b5b12',1,'cutlass::layout::VoltaTensorOpMultiplicandCrosswise::LongIndex()'],['../structcutlass_1_1layout_1_1ColumnMajorVoltaTensorOpMultiplicandCrosswise.html#ab0f9e152320c2785c856965e6e3c02fb',1,'cutlass::layout::ColumnMajorVoltaTensorOpMultiplicandCrosswise::LongIndex()'],['../structcutlass_1_1layout_1_1RowMajorVoltaTensorOpMultiplicandCrosswise.html#aea4095c067a1f92f8d65c9f3372de90e',1,'cutlass::layout::RowMajorVoltaTensorOpMultiplicandCrosswise::LongIndex()'],['../structcutlass_1_1layout_1_1TensorOpMultiplicand.html#a28b111ff2701662606d3c69d53c49a84',1,'cutlass::layout::TensorOpMultiplicand::LongIndex()'],['../structcutlass_1_1layout_1_1TensorOpMultiplicandCongruous.html#aa04ef0da8d8a859c2b7bb08cb3752d3d',1,'cutlass::layout::TensorOpMultiplicandCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1TensorOpMultiplicandCongruous_3_0132_00_01Crosswise_01_4.html#a8873becf0049da7289f853021e3beec3',1,'cutlass::layout::TensorOpMultiplicandCongruous&lt; 32, Crosswise &gt;::LongIndex()'],['../structcutlass_1_1layout_1_1ColumnMajorTensorOpMultiplicandCongruous.html#a37fd29e6b74010e9a3ae089d6a777724',1,'cutlass::layout::ColumnMajorTensorOpMultiplicandCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1RowMajorTensorOpMultiplicandCongruous.html#ad49467d9028288b0bb93dbf376ef573a',1,'cutlass::layout::RowMajorTensorOpMultiplicandCongruous::LongIndex()'],['../structcutlass_1_1layout_1_1TensorOpMultiplicandCrosswise.html#a07de92ffadfe255537479c37b04cde6f',1,'cutlass::layout::TensorOpMultiplicandCrosswise::LongIndex()'],['../structcutlass_1_1layout_1_1ColumnMajorTensorOpMultiplicandCrosswise.html#a8e1f0b54a8c1b3f37df9ec0dd3548985',1,'cutlass::layout::ColumnMajorTensorOpMultiplicandCrosswise::LongIndex()'],['../structcutlass_1_1layout_1_1RowMajorTensorOpMultiplicandCrosswise.html#a85b214a913b534fc9cc5366664a414e3',1,'cutlass::layout::RowMajorTensorOpMultiplicandCrosswise::LongIndex()'],['../structcutlass_1_1layout_1_1TensorOpMultiplicandColumnMajorInterleaved.html#a6c2a122f2d16f1ca7957d7b0749248fc',1,'cutlass::layout::TensorOpMultiplicandColumnMajorInterleaved::LongIndex()'],['../structcutlass_1_1layout_1_1TensorOpMultiplicandRowMajorInterleaved.html#ace9bb56ad8e186a09e07a44f96ba8b6e',1,'cutlass::layout::TensorOpMultiplicandRowMajorInterleaved::LongIndex()'],['../classcutlass_1_1layout_1_1PackedVectorLayout.html#ac97c429c4de5e90a57fe14a90cb30f6b',1,'cutlass::layout::PackedVectorLayout::LongIndex()'],['../structcutlass_1_1Tensor4DCoord.html#a779bf9ea896ac4ae9d4def10cd23eb45',1,'cutlass::Tensor4DCoord::LongIndex()'],['../classcutlass_1_1IdentityTensorLayout.html#aea0b83b611144c3f5860712967234ab4',1,'cutlass::IdentityTensorLayout::LongIndex()'],['../classcutlass_1_1TensorRef.html#adeada5e33b231f125a4aaeaf963bd3a3',1,'cutlass::TensorRef::LongIndex()'],['../classcutlass_1_1TensorView.html#a59dc3d93fa2f5132659f768c9d254627',1,'cutlass::TensorView::LongIndex()'],['../classcutlass_1_1thread_1_1Matrix.html#aa1ba37a162d1844a125d5659b3d72fc8',1,'cutlass::thread::Matrix::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen784a0e9da3f55064c47e5613791f51f7.html#a7c23a0eeec23eb0810e41f36e29ca5a3',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen89c687c583745a73cb485041911a4c4e.html#a8818989486db7fea2f97a41fc4c83db0',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen9838736ad62fae54213fbaf722a989ab.html#adf7eadc025fe96a3e755155a98772638',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemenab63a1e105bf37f6371516cb9e2c5a7a.html#ac2c0ac3fbe83a7bb1c36bbfc3d50cc35',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator_3_01Shape___00_01Elemen809793e785fb4211888c6b4e5dcfcb39.html#afc181d8c438f424b1c611769467bf3a4',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator&lt; Shape_, Element_, layout::RowMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator2dThreadTile_3_01Shape__1790abaa54a01f277d75766d5882fec8.html#aa571875c3582fd7329ceac7dda9ea708',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator2dThreadTile&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator2dThreadTile_3_01Shape__da632779aba661c0f4cfaaa78126b771.html#a394d46875ccc84fa0cd9432c5b14ea5a',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator2dThreadTile&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileAccessIterator2dThreadTile_3_01Shape__7327fa15996bcb8502cdfcc192350fe1.html#affe1f1064aaf7d829f7b06c4ac933544',1,'cutlass::transform::threadblock::PredicatedTileAccessIterator2dThreadTile&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, AccessType_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___00e7c2c404e7aedfe60ad56bb5571306a1.html#a6994e38394aac156a8bcf006fe0a86dc',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, AccessSize &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___0068b3e874b5d93d11f0fa902c7f1d11d9.html#abd7913223c57a050ba28cb3683094384',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, AccessSize &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___0041ea81994f8af0d4d071fdb9e66b5ff0.html#a42dd3be2ba430549488aa4aff56f5296',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, AccessSize &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___00f6b3a9dfab5e7c72d5233f7e5e6e3b9b.html#af950c284f063cced0e0946d824aff078',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::ColumnMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessSize &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator_3_01Shape___00_01Element___00d670f969180a8d182dffb356ebcc957e.html#ad4b81297032276616f712800457bded4',1,'cutlass::transform::threadblock::PredicatedTileIterator&lt; Shape_, Element_, layout::RowMajorInterleaved&lt; InterleavedK &gt;, AdvanceRank, ThreadMap_, AccessSize &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator2dThreadTile_3_01Shape___00_017a517f3c73efd795ab05059cc9b111e1.html#a560fc3bce354a657f12b423969eb5b37',1,'cutlass::transform::threadblock::PredicatedTileIterator2dThreadTile&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Transpose_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator2dThreadTile_3_01Shape___00_0165b39a630d10785a3558406f9adb99b9.html#a28c6395acf81c013c8089414a91b66e7',1,'cutlass::transform::threadblock::PredicatedTileIterator2dThreadTile&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, Transpose_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1PredicatedTileIterator2dThreadTile_3_01Shape___00_013671177d6219bfeb0e1b4dc4c1b5bf11.html#a33d57eb89928e9e38a9be7190452928c',1,'cutlass::transform::threadblock::PredicatedTileIterator2dThreadTile&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, Transpose_ &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__0855e9d9ab619202d2397180c1e4c4a5.html#a77aba5390f22c62a78fb71e90b5cd14a',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__eb7d20f8b9d69e0ae5e7ef51dc480867.html#ab6a33c0f41c4f171beaf7d1b328c5a22',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__f04332958a49a47d6fb2b25201764630.html#acb4bdae1b54e2bc58a504a372ca8b2f5',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__ebf4714349612673e8b6609b763eeb6f.html#a4051bd65d1bf0b1b127bf46ad2044b83',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__2c1476eaf582bfe972793e17babfe985.html#acee6b05cbcccab9deba3632b35d2f4e7',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__6baada077236f1a368c61c5e11b45b72.html#a81ddaa615b424060e6442d1328d02e55',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__e9a9e0f4286f652f55eb9b863b21effe.html#af7654c26faa6bbef9898085f10bceb9f',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__a3c11cf1f00ef7a1efb8389ac6e4c6e0.html#a17f3bf28de19feaafff96d2e0c558047',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileAccessIterator_3_01Shape___00_01Element__0184b7188941788a96624510a4b2f876.html#a1a58d2ccf23ef435ec977aadb67ac7f7',1,'cutlass::transform::threadblock::RegularTileAccessIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0184a89653916f5d51ab59d1b386989a17.html#a83a6797234c779650f57dd7ed54b932a',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0149454d361ea5885cf5166a920b5145df.html#a589c4fa14fed9f28631a5c291bd10e5e',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajor, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_011d3637dbd8bc58bcb020b51bf57fbfc0.html#a57a1ee8a67d2b206fb7eec6751a14948',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajor, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator2dThreadTile_3_01Shape___00_01Ele76ed82829532ae1c17f4c78158f036c7.html#a32bb8d248e332079b29ca370839a7ef2',1,'cutlass::transform::threadblock::RegularTileIterator2dThreadTile&lt; Shape_, Element_, layout::PitchLinear, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator2dThreadTile_3_01Shape___00_01Ele654c8f6161ae5340f040397a4e2e045c.html#afff821adbe137bac91d9a6a23436a9b2',1,'cutlass::transform::threadblock::RegularTileIterator2dThreadTile&lt; Shape_, Element_, layout::RowMajorInterleaved&lt; 4 &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator2dThreadTile_3_01Shape___00_01Eleb60d066756d1c18f05fceee6a27bdb8a.html#a67f3bb5d910ed83222456ffc2dc87583',1,'cutlass::transform::threadblock::RegularTileIterator2dThreadTile&lt; Shape_, Element_, layout::ColumnMajorInterleaved&lt; 4 &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01efd5013a2503d6567e2bf6b40c97360c.html#a5fb0596afba74c34a41db45b7792e38a',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_017982f81d4ef592e19c8427de2ea933a3.html#a529df837eb7dd3c7a27d4554f9c2e285',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01c20d35180520077a5a09b1e33543c1a5.html#a6f1bb0ed57fa72329fc5730202687986',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value, int(128/sizeof(Element_))&gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0197fef2242a3454a7d1cebe61aee28b43.html#a3829addb6edfaa8bd45df8788efc36a1',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::TensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_010889a732373c350de9b9a9f6c13cd761.html#a604ec9e4aa8b5d3664eb8cd0b312027d',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01a31b454d9c930525c1e9ca406a514f40.html#a53457007895b6b8367f2866523a28da2',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Crosswise &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01f96bbeb63e6d4ce4a2551279de3a9f0e.html#ae4dc48ecdcd3b406b2f1cbdb667a2d4e',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::VoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01793f74bfd8f116a827948ab01a37349a.html#a64c6d70cc0b40ffd47d84fffb8fc926c',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorVoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01f6f6511b5033cad31083644ac69c54d8.html#aa5226c5b5f90cf8dc54ba015db3ccd9e',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorVoltaTensorOpMultiplicandCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01a75d2cd74e722d6ad6a3b41aabfd432d.html#a9ca411260555a8ca68c99c84926a2eb3',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::VoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01187f8574e1fe9d7d5e8fbf09bd834bf0.html#a283bb753b525a1d2040a2c1649526596',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorVoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_0104ad31bd559a88cc418ae1cab7492ed5.html#a13a59579b70d828e9f0e8335a0ca7a77',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorVoltaTensorOpMultiplicandBCongruous&lt; sizeof_bits&lt; Element_ &gt;::value &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01dbd6b8468d5bd787308d2f615a24d123.html#a78fb04addeb4a04ac8446c52c3c62b59',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::VoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Shape_::kContiguous &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01bd31b3810c1fedf2e7e5959ff92b5d3d.html#a4c762f53f401653ff9b7e2d2da03149a',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::ColumnMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Shape_::kRow &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1transform_1_1threadblock_1_1RegularTileIterator_3_01Shape___00_01Element___00_01b3fa5720e807697de61b9f937b269cd0.html#ac7957b4618b201a465ccd825b5736576',1,'cutlass::transform::threadblock::RegularTileIterator&lt; Shape_, Element_, layout::RowMajorVoltaTensorOpMultiplicandCrosswise&lt; sizeof_bits&lt; Element_ &gt;::value, Shape_::kColumn &gt;, AdvanceRank, ThreadMap_, Alignment &gt;::LongIndex()'],['../classcutlass_1_1HostTensor.html#aa42c24683584f21aee1a4333ebaefccc',1,'cutlass::HostTensor::LongIndex()']]] ];
PypiClean
/DI_engine-0.4.9-py3-none-any.whl/dizoo/competitive_rl/envs/competitive_rl_env_wrapper.py
import cv2 import gym import os.path as osp import numpy as np from typing import Union, Optional from collections import deque from competitive_rl.pong.builtin_policies import get_builtin_agent_names, single_obs_space, single_act_space, get_random_policy, get_rule_based_policy from competitive_rl.utils.policy_serving import Policy def get_compute_action_function_ours(agent_name, num_envs=1): resource_dir = osp.join(osp.dirname(__file__), "resources", "pong") if agent_name == "STRONG": return Policy( single_obs_space, single_act_space, num_envs, osp.join(resource_dir, "checkpoint-strong.pkl"), use_light_model=False ) if agent_name == "MEDIUM": return Policy( single_obs_space, single_act_space, num_envs, osp.join(resource_dir, "checkpoint-medium.pkl"), use_light_model=True ) if agent_name == "ALPHA_PONG": return Policy( single_obs_space, single_act_space, num_envs, osp.join(resource_dir, "checkpoint-alphapong.pkl"), use_light_model=False ) if agent_name == "WEAK": return Policy( single_obs_space, single_act_space, num_envs, osp.join(resource_dir, "checkpoint-weak.pkl"), use_light_model=True ) if agent_name == "RANDOM": return get_random_policy(num_envs) if agent_name == "RULE_BASED": return get_rule_based_policy(num_envs) raise ValueError("Unknown agent name: {}".format(agent_name)) class BuiltinOpponentWrapper(gym.Wrapper): def __init__(self, env: 'gym.Env', num_envs: int = 1) -> None: # noqa super().__init__(env) self.agents = { agent_name: get_compute_action_function_ours(agent_name, num_envs) for agent_name in get_builtin_agent_names() } self.agent_names = list(self.agents) self.prev_opponent_obs = None self.current_opponent_name = "RULE_BASED" self.current_opponent = self.agents[self.current_opponent_name] self.observation_space = env.observation_space[0] self.action_space = env.action_space[0] self.num_envs = num_envs def reset_opponent(self, agent_name: str) -> None: assert agent_name in self.agent_names, (agent_name, self.agent_names) self.current_opponent_name = agent_name self.current_opponent = self.agents[self.current_opponent_name] def step(self, action): tuple_action = (action.item(), self.current_opponent(self.prev_opponent_obs)) obs, rew, done, info = self.env.step(tuple_action) self.prev_opponent_obs = obs[1] # if done.ndim == 2: # done = done[:, 0] # return obs[0], rew[:, 0].reshape(-1, 1), done.reshape(-1, 1), info return obs[0], rew[0], done, info def reset(self): obs = self.env.reset() self.prev_opponent_obs = obs[1] return obs[0] def seed(self, s): self.env.seed(s) def wrap_env(env_id, builtin_wrap, opponent, frame_stack=4, warp_frame=True, only_info=False): """Configure environment for DeepMind-style Atari. The observation is channel-first: (c, h, w) instead of (h, w, c). :param str env_id: the atari environment id. :param bool episode_life: wrap the episode life wrapper. :param bool clip_rewards: wrap the reward clipping wrapper. :param int frame_stack: wrap the frame stacking wrapper. :param bool scale: wrap the scaling observation wrapper. :param bool warp_frame: wrap the grayscale + resize observation wrapper. :return: the wrapped atari environment. """ if not only_info: env = gym.make(env_id) if builtin_wrap: env = BuiltinOpponentWrapper(env) env.reset_opponent(opponent) if warp_frame: env = WarpFrameWrapperCompetitveRl(env, builtin_wrap) if frame_stack: env = FrameStackWrapperCompetitiveRl(env, frame_stack, builtin_wrap) return env else: wrapper_info = '' if builtin_wrap: wrapper_info += BuiltinOpponentWrapper.__name__ + '\n' if warp_frame: wrapper_info = WarpFrameWrapperCompetitveRl.__name__ + '\n' if frame_stack: wrapper_info = FrameStackWrapperCompetitiveRl.__name__ + '\n' return wrapper_info class WarpFrameWrapperCompetitveRl(gym.ObservationWrapper): """Warp frames to 84x84 as done in the Nature paper and later work. :param gym.Env env: the environment to wrap. """ def __init__(self, env, builtin_wrap): super().__init__(env) self.size = 84 obs_space = env.observation_space self.builtin_wrap = builtin_wrap if builtin_wrap: # single player self.observation_space = gym.spaces.Box( low=np.min(obs_space.low), high=np.max(obs_space.high), shape=(self.size, self.size), dtype=obs_space.dtype ) else: # double player self.observation_space = gym.spaces.tuple.Tuple( [ gym.spaces.Box( low=np.min(obs_space[0].low), high=np.max(obs_space[0].high), shape=(self.size, self.size), dtype=obs_space[0].dtype ) for _ in range(len(obs_space)) ] ) def observation(self, frame): """returns the current observation from a frame""" if self.builtin_wrap: frame = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY) return cv2.resize(frame, (self.size, self.size), interpolation=cv2.INTER_AREA) else: frames = [] for one_frame in frame: one_frame = cv2.cvtColor(one_frame, cv2.COLOR_RGB2GRAY) one_frame = cv2.resize(one_frame, (self.size, self.size), interpolation=cv2.INTER_AREA) frames.append(one_frame) return frames class FrameStackWrapperCompetitiveRl(gym.Wrapper): """Stack n_frames last frames. :param gym.Env env: the environment to wrap. :param int n_frames: the number of frames to stack. """ def __init__(self, env, n_frames, builtin_wrap): super().__init__(env) self.n_frames = n_frames self.builtin_wrap = builtin_wrap obs_space = env.observation_space if self.builtin_wrap: self.frames = deque([], maxlen=n_frames) shape = (n_frames, ) + obs_space.shape self.observation_space = gym.spaces.Box( low=np.min(obs_space.low), high=np.max(obs_space.high), shape=shape, dtype=obs_space.dtype ) else: self.frames = [deque([], maxlen=n_frames) for _ in range(len(obs_space))] shape = (n_frames, ) + obs_space[0].shape self.observation_space = gym.spaces.tuple.Tuple( [ gym.spaces.Box( low=np.min(obs_space[0].low), high=np.max(obs_space[0].high), shape=shape, dtype=obs_space[0].dtype ) for _ in range(len(obs_space)) ] ) def reset(self): if self.builtin_wrap: obs = self.env.reset() for _ in range(self.n_frames): self.frames.append(obs) return self._get_ob(self.frames) else: obs = self.env.reset() for i, one_obs in enumerate(obs): for _ in range(self.n_frames): self.frames[i].append(one_obs) return np.stack([self._get_ob(self.frames[i]) for i in range(len(obs))]) def step(self, action): obs, reward, done, info = self.env.step(action) if self.builtin_wrap: self.frames.append(obs) return self._get_ob(self.frames), reward, done, info else: for i, one_obs in enumerate(obs): self.frames[i].append(one_obs) return np.stack([self._get_ob(self.frames[i]) for i in range(len(obs))], axis=0), reward, done, info @staticmethod def _get_ob(frames): # the original wrapper use `LazyFrames` but since we use np buffer, # it has no effect return np.stack(frames, axis=0)
PypiClean
/ckanext_datajson-0.1.21-py3-none-any.whl/ckanext/datajson/package2pod.py
from sqlalchemy.util import OrderedDict from ckan.lib import helpers as h from logging import getLogger import re from . import helpers log = getLogger(__name__) class Package2Pod(object): def __init__(self): pass seen_identifiers = None @staticmethod def wrap_json_catalog(dataset_dict, json_export_map): catalog_headers = [(x, y) for x, y in json_export_map.get('catalog_headers').items()] catalog = OrderedDict( catalog_headers + [('dataset', dataset_dict)] ) return catalog @staticmethod def filter(content): if not isinstance(content, str): return content content = Package2Pod.strip_redacted_tags(content) content = helpers.strip_if_string(content) return content @staticmethod def strip_redacted_tags(content): if not isinstance(content, str): return content return re.sub(helpers.REDACTED_TAGS_REGEX, '', content) @staticmethod def mask_redacted(content, reason): if not content: content = '' if reason: # check if field is partial redacted masked = content for redact in re.findall(helpers.PARTIAL_REDACTION_REGEX, masked): masked = masked.replace(redact, '') if len(masked) < len(content): return masked return '[[REDACTED-EX ' + reason + ']]' return content @staticmethod def convert_package(package, json_export_map, redaction_enabled=False): import os import sys try: dataset = Package2Pod.export_map_fields(package, json_export_map, redaction_enabled) # skip validation if we export whole /data.json catalog if json_export_map.get('validation_enabled'): return Package2Pod.validate(package, dataset) else: return dataset except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() filename = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] log.error("%s : %s : %s : %s", exc_type, filename, exc_tb.tb_lineno, str(e)) raise e @staticmethod def export_map_fields(package, json_export_map, redaction_enabled=False): import os import sys public_access_level = helpers.get_extra(package, 'public_access_level') if not public_access_level or public_access_level not in ['non-public', 'restricted public']: redaction_enabled = False Wrappers.redaction_enabled = redaction_enabled json_fields = json_export_map.get('dataset_fields_map') try: dataset = OrderedDict([("@type", "dcat:Dataset")]) Wrappers.pkg = package Wrappers.full_field_map = json_fields for key, field_map in json_fields.items(): # log.debug('%s => %s', key, field_map) field_type = field_map.get('type', 'direct') is_extra = field_map.get('extra') array_key = field_map.get('array_key') field = field_map.get('field') split = field_map.get('split') wrapper = field_map.get('wrapper') default = field_map.get('default') if redaction_enabled and field and 'publisher' != field and 'direct' != field_type: redaction_reason = helpers.get_extra(package, 'redacted_' + field, False) # keywords(tags) have some UI-related issues with this, so we'll check both versions here if not redaction_reason and 'tags' == field: redaction_reason = helpers.get_extra(package, 'redacted_tag_string', False) if redaction_reason: dataset[key] = '[[REDACTED-EX ' + redaction_reason + ']]' continue if 'direct' == field_type and field: if is_extra: # log.debug('field: %s', field) # log.debug('value: %s', helpers.get_extra(package, field)) dataset[key] = helpers.strip_if_string(helpers.get_extra(package, field, default)) else: dataset[key] = helpers.strip_if_string(package.get(field, default)) if redaction_enabled and 'publisher' != field: redaction_reason = helpers.get_extra(package, 'redacted_' + field, False) # keywords(tags) have some UI-related issues with this, so we'll check both versions here if redaction_reason: dataset[key] = Package2Pod.mask_redacted(dataset[key], redaction_reason) continue else: dataset[key] = Package2Pod.filter(dataset[key]) elif 'array' == field_type: if is_extra: found_element = helpers.strip_if_string(helpers.get_extra(package, field)) if found_element: if helpers.is_redacted(found_element): dataset[key] = found_element elif split: dataset[key] = [Package2Pod.filter(x) for x in found_element.split(split)] else: if array_key: dataset[key] = [Package2Pod.filter(t[array_key]) for t in package.get(field, {})] if wrapper: # log.debug('wrapper: %s', wrapper) method = getattr(Wrappers, wrapper) if method: Wrappers.current_field_map = field_map dataset[key] = method(dataset.get(key)) # CKAN doesn't like empty values on harvest, let's get rid of them # Remove entries where value is None, "", or empty list [] dataset = OrderedDict([(x, y) for x, y in dataset.items() if y is not None and y != "" and y != []]) return dataset except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() filename = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] log.error("%s : %s : %s : %s", exc_type, filename, exc_tb.tb_lineno, str(e)) raise e @staticmethod def validate(pkg, dataset_dict): import os import sys global currentPackageOrg try: # When saved from UI DataQuality value is stored as "on" instead of True. # Check if value is "on" and replace it with True. dataset_dict = OrderedDict(dataset_dict) if dataset_dict.get('dataQuality') == "on" \ or dataset_dict.get('dataQuality') == "true" \ or dataset_dict.get('dataQuality') == "True": dataset_dict['dataQuality'] = True elif dataset_dict.get('dataQuality') == "false" \ or dataset_dict.get('dataQuality') == "False": dataset_dict['dataQuality'] = False # WARNING: Validation was removed from here # Previously, there was a `do_validation` function that was an # old implementation of DCAT-US Metadata Schema validation. # It was determined that this was not necessary anymore. # It may be necessary to add in the replacement in the future. return dataset_dict except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() filename = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] log.error("%s : %s : %s", exc_type, filename, exc_tb.tb_lineno) raise e class Wrappers(object): def __init__(self): pass redaction_enabled = False pkg = None current_field_map = None full_field_map = None bureau_code_list = None resource_formats = None @staticmethod def catalog_publisher(value): publisher = None if value: publisher = helpers.get_responsible_party(value) if not publisher and 'organization' in Wrappers.pkg and 'title' in Wrappers.pkg.get('organization'): publisher = Wrappers.pkg.get('organization').get('title') return OrderedDict([ ("@type", "org:Organization"), ("name", publisher) ]) @staticmethod def inventory_publisher(value): global currentPackageOrg publisher = helpers.strip_if_string(helpers.get_extra(Wrappers.pkg, Wrappers.current_field_map.get('field'))) if publisher is None: return None currentPackageOrg = publisher organization_list = list() organization_list.append([ ('@type', 'org:Organization'), # optional ('name', Package2Pod.filter(publisher)), # required ]) for i in range(1, 6): pub_key = 'publisher_' + str(i) # e.g. publisher_1 if helpers.get_extra(Wrappers.pkg, pub_key): # e.g. package.extras.publisher_1 organization_list.append([ ('@type', 'org:Organization'), # optional ('name', Package2Pod.filter(helpers.get_extra(Wrappers.pkg, pub_key))), # required ]) currentPackageOrg = Package2Pod.filter(helpers.get_extra(Wrappers.pkg, pub_key)) # e.g. GSA if Wrappers.redaction_enabled: redaction_mask = helpers.get_extra(Wrappers.pkg, 'redacted_' + Wrappers.current_field_map.get('field'), False) if redaction_mask: return OrderedDict( [ ('@type', 'org:Organization'), # optional ('name', '[[REDACTED-EX ' + redaction_mask + ']]'), # required ] ) # so now we should have list() organization_list e.g. # ( # [('@type', 'org:Org'), ('name','GSA')], # [('@type', 'org:Org'), ('name','OCSIT')] # ) size = len(organization_list) # e.g. 2 tree = organization_list[0] for i in range(1, size): tree = organization_list[i] + [('subOrganizationOf', OrderedDict(tree))] return OrderedDict(tree) # used by get_accrual_periodicity accrual_periodicity_dict = { 'completely irregular': 'irregular', 'decennial': 'R/P10Y', 'quadrennial': 'R/P4Y', 'annual': 'R/P1Y', 'bimonthly': 'R/P2M', # or R/P0.5M 'semiweekly': 'R/P3.5D', 'daily': 'R/P1D', 'biweekly': 'R/P2W', # or R/P0.5W 'semiannual': 'R/P6M', 'biennial': 'R/P2Y', 'triennial': 'R/P3Y', 'three times a week': 'R/P0.33W', 'three times a month': 'R/P0.33M', 'continuously updated': 'R/PT1S', 'monthly': 'R/P1M', 'quarterly': 'R/P3M', 'semimonthly': 'R/P0.5M', 'three times a year': 'R/P4M', 'weekly': 'R/P1W', 'hourly': 'R/PT1H', 'continual': 'R/PT1S', 'fortnightly': 'R/P0.5M', 'annually': 'R/P1Y', 'biannualy': 'R/P0.5Y', 'asneeded': 'irregular', 'irregular': 'irregular', 'notplanned': 'irregular', 'unknown': 'irregular', 'not updated': 'irregular' } @staticmethod def fix_accrual_periodicity(frequency): return Wrappers.accrual_periodicity_dict.get(str(frequency).lower().strip(), frequency) @staticmethod def build_contact_point(someValue): import os import sys try: contact_point_map = Wrappers.full_field_map.get('contactPoint').get('map') if not contact_point_map: return None package = Wrappers.pkg if contact_point_map.get('fn').get('extra'): fn = helpers.get_extra(package, contact_point_map.get('fn').get('field'), helpers.get_extra(package, "Contact Name", package.get('maintainer'))) else: fn = package.get(contact_point_map.get('fn').get('field'), helpers.get_extra(package, "Contact Name", package.get('maintainer'))) fn = helpers.get_responsible_party(fn) if Wrappers.redaction_enabled: redaction_reason = helpers.get_extra(package, 'redacted_' + contact_point_map.get('fn').get('field'), False) if redaction_reason: fn = Package2Pod.mask_redacted(fn, redaction_reason) else: fn = Package2Pod.filter(fn) if contact_point_map.get('hasEmail').get('extra'): email = helpers.get_extra(package, contact_point_map.get('hasEmail').get('field'), package.get('maintainer_email')) else: email = package.get(contact_point_map.get('hasEmail').get('field'), package.get('maintainer_email')) if email and not helpers.is_redacted(email) and '@' in email: email = 'mailto:' + email if Wrappers.redaction_enabled: redaction_reason = helpers.get_extra(package, 'redacted_' + contact_point_map.get('hasEmail').get('field'), False) if redaction_reason: email = Package2Pod.mask_redacted(email, redaction_reason) else: email = Package2Pod.filter(email) contact_point = OrderedDict([('@type', 'vcard:Contact')]) if fn: contact_point['fn'] = fn if email: contact_point['hasEmail'] = email return contact_point except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() filename = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] log.error("%s : %s : %s", exc_type, filename, exc_tb.tb_lineno) raise e @staticmethod def inventory_parent_uid(parent_dataset_id): if parent_dataset_id: import ckan.model as model parent = model.Package.get(parent_dataset_id) if parent and parent.extras['unique_id']: parent_dataset_id = parent.extras['unique_id'] return parent_dataset_id @staticmethod def generate_distribution(someValue): arr = [] package = Wrappers.pkg distribution_map = Wrappers.full_field_map.get('distribution').get('map') if not distribution_map or 'resources' not in package: return arr for r in package["resources"]: resource = OrderedDict([('@type', "dcat:Distribution")]) for pod_key, json_map in distribution_map.items(): value = helpers.strip_if_string(r.get(json_map.get('field'), json_map.get('default'))) if Wrappers.redaction_enabled: if 'redacted_' + json_map.get('field') in r and r.get('redacted_' + json_map.get('field')): value = Package2Pod.mask_redacted(value, r.get('redacted_' + json_map.get('field'))) else: value = Package2Pod.filter(value) # filtering/wrapping if defined by export_map wrapper = json_map.get('wrapper') if wrapper: method = getattr(Wrappers, wrapper) if method: value = method(value) if value: resource[pod_key] = value # inventory rules res_url = helpers.strip_if_string(r.get('url')) if Wrappers.redaction_enabled: if 'redacted_url' in r and r.get('redacted_url'): res_url = '[[REDACTED-EX ' + r.get('redacted_url') + ']]' else: res_url = Package2Pod.filter(res_url) if res_url: res_url = res_url.replace('http://[[REDACTED', '[[REDACTED') res_url = res_url.replace('http://http', 'http') if r.get('resource_type') in ['api', 'accessurl']: resource['accessURL'] = res_url if 'mediaType' in resource: resource.pop('mediaType') else: if 'accessURL' in resource: resource.pop('accessURL') resource['downloadURL'] = res_url if 'mediaType' not in resource: log.warn("Missing mediaType for resource in package ['%s']", package.get('id')) else: log.warn("Missing downloadURL for resource in package ['%s']", package.get('id')) striped_resource = OrderedDict( [(x, y) for x, y in resource.items() if y is not None and y != "" and y != []]) arr += [OrderedDict(striped_resource)] return arr @staticmethod def bureau_code(value): if value: return value if not 'organization' not in Wrappers.pkg or 'title' not in Wrappers.pkg.get('organization'): return None org_title = Wrappers.pkg.get('organization').get('title') log.debug("org title: %s", org_title) code_list = Wrappers._get_bureau_code_list() if org_title not in code_list: return None bureau = code_list.get(org_title) log.debug("found match: %s", "[{0}:{1}]".format(bureau.get('OMB Agency Code'), bureau.get('OMB Bureau Code'))) result = "{0}:{1}".format(bureau.get('OMB Agency Code'), bureau.get('OMB Bureau Code')) log.debug("found match: '%s'", result) return [result] @staticmethod def _get_bureau_code_list(): if Wrappers.bureau_code_list: return Wrappers.bureau_code_list import json import os bc_file = open( os.path.join(os.path.dirname(__file__), "resources", "omb-agency-bureau-treasury-codes.json"), "r" ) code_list = json.load(bc_file) Wrappers.bureau_code_list = {} for bureau in code_list: Wrappers.bureau_code_list[bureau['Agency']] = bureau return Wrappers.bureau_code_list @staticmethod def mime_type_it(value): if not value: return value formats = h.resource_formats() formats.update(helpers.get_additional_formats()) format_clean = value.lower() if format_clean in formats: mime_type = formats[format_clean][0] mime_type = mime_type if mime_type else 'application/octet-stream' else: mime_type = value msg = value + ' ... BECOMES ... ' + mime_type log.debug(msg) return mime_type
PypiClean
/casai-home-frontend-20220503.0.tar.gz/casai-home-frontend-20220503.0/hass_frontend/frontend_latest/cb45130b.js
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[29311],{83270:(e,t,o)=>{o.d(t,{_Y:()=>n,VU:()=>i,u_:()=>a,bi:()=>r,_t:()=>s,LI:()=>c,AV:()=>l,Mc:()=>d,dn:()=>p,H9:()=>h,De:()=>u,LV:()=>f,QD:()=>C,A$:()=>g,tW:()=>m});const n=(e,t,o)=>e.callApi("POST","cloud/login",{email:t,password:o}),i=e=>e.callApi("POST","cloud/logout"),a=(e,t)=>e.callApi("POST","cloud/forgot_password",{email:t}),r=(e,t,o)=>e.callApi("POST","cloud/register",{email:t,password:o}),s=(e,t)=>e.callApi("POST","cloud/resend_confirm",{email:t}),c=e=>e.callWS({type:"cloud/status"}),l=(e,t)=>e.callWS({type:"cloud/cloudhook/create",webhook_id:t}),d=(e,t)=>e.callWS({type:"cloud/cloudhook/delete",webhook_id:t}),p=e=>e.callWS({type:"cloud/remote/connect"}),h=e=>e.callWS({type:"cloud/remote/disconnect"}),u=e=>e.callWS({type:"cloud/subscription"}),f=(e,t)=>e.callWS({type:"cloud/update_prefs",...t}),C=(e,t,o)=>e.callWS({type:"cloud/google_assistant/entities/update",entity_id:t,...o}),g=e=>e.callApi("POST","cloud/google_actions/sync"),m=(e,t,o)=>e.callWS({type:"cloud/alexa/entities/update",entity_id:t,...o})},29311:(e,t,o)=>{o.r(t),o.d(t,{configSections:()=>L});var n=o(33310),i=o(7323),a=o(59708),r=o(83270),s=(o(15291),o(18199));function c(){c=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(o){t.forEach((function(t){t.kind===o&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var o=e.prototype;["method","field"].forEach((function(n){t.forEach((function(t){var i=t.placement;if(t.kind===n&&("static"===i||"prototype"===i)){var a="static"===i?e:o;this.defineClassElement(a,t)}}),this)}),this)},defineClassElement:function(e,t){var o=t.descriptor;if("field"===t.kind){var n=t.initializer;o={enumerable:o.enumerable,writable:o.writable,configurable:o.configurable,value:void 0===n?void 0:n.call(e)}}Object.defineProperty(e,t.key,o)},decorateClass:function(e,t){var o=[],n=[],i={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,i)}),this),e.forEach((function(e){if(!p(e))return o.push(e);var t=this.decorateElement(e,i);o.push(t.element),o.push.apply(o,t.extras),n.push.apply(n,t.finishers)}),this),!t)return{elements:o,finishers:n};var a=this.decorateConstructor(o,t);return n.push.apply(n,a.finishers),a.finishers=n,a},addElementPlacement:function(e,t,o){var n=t[e.placement];if(!o&&-1!==n.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");n.push(e.key)},decorateElement:function(e,t){for(var o=[],n=[],i=e.decorators,a=i.length-1;a>=0;a--){var r=t[e.placement];r.splice(r.indexOf(e.key),1);var s=this.fromElementDescriptor(e),c=this.toElementFinisherExtras((0,i[a])(s)||s);e=c.element,this.addElementPlacement(e,t),c.finisher&&n.push(c.finisher);var l=c.extras;if(l){for(var d=0;d<l.length;d++)this.addElementPlacement(l[d],t);o.push.apply(o,l)}}return{element:e,finishers:n,extras:o}},decorateConstructor:function(e,t){for(var o=[],n=t.length-1;n>=0;n--){var i=this.fromClassDescriptor(e),a=this.toClassDescriptor((0,t[n])(i)||i);if(void 0!==a.finisher&&o.push(a.finisher),void 0!==a.elements){e=a.elements;for(var r=0;r<e.length-1;r++)for(var s=r+1;s<e.length;s++)if(e[r].key===e[s].key&&e[r].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[r].key+")")}}return{elements:e,finishers:o}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return C(e,t);var o=Object.prototype.toString.call(e).slice(8,-1);return"Object"===o&&e.constructor&&(o=e.constructor.name),"Map"===o||"Set"===o?Array.from(e):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?C(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var o=f(e.key),n=String(e.placement);if("static"!==n&&"prototype"!==n&&"own"!==n)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+n+'"');var i=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var a={kind:t,key:o,placement:n,descriptor:Object.assign({},i)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(i,"get","The property descriptor of a field descriptor"),this.disallowProperty(i,"set","The property descriptor of a field descriptor"),this.disallowProperty(i,"value","The property descriptor of a field descriptor"),a.initializer=e.initializer),a},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:u(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var o=u(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:o}},runClassFinishers:function(e,t){for(var o=0;o<t.length;o++){var n=(0,t[o])(e);if(void 0!==n){if("function"!=typeof n)throw new TypeError("Finishers must return a constructor.");e=n}}return e},disallowProperty:function(e,t,o){if(void 0!==e[t])throw new TypeError(o+" can't have a ."+t+" property.")}};return e}function l(e){var t,o=f(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var n={kind:"field"===e.kind?"field":"method",key:o,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(n.decorators=e.decorators),"field"===e.kind&&(n.initializer=e.value),n}function d(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function p(e){return e.decorators&&e.decorators.length}function h(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function u(e,t){var o=e[t];if(void 0!==o&&"function"!=typeof o)throw new TypeError("Expected '"+t+"' to be a function");return o}function f(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var o=e[Symbol.toPrimitive];if(void 0!==o){var n=o.call(e,t||"default");if("object"!=typeof n)return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function C(e,t){(null==t||t>e.length)&&(t=e.length);for(var o=0,n=new Array(t);o<t;o++)n[o]=e[o];return n}function g(e,t,o){return g="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(e,t,o){var n=function(e,t){for(;!Object.prototype.hasOwnProperty.call(e,t)&&null!==(e=m(e)););return e}(e,t);if(n){var i=Object.getOwnPropertyDescriptor(n,t);return i.get?i.get.call(o):i.value}},g(e,t,o||e)}function m(e){return m=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)},m(e)}const y="M12,4A4,4 0 0,1 16,8A4,4 0 0,1 12,12A4,4 0 0,1 8,8A4,4 0 0,1 12,4M12,14C16.42,14 20,15.79 20,18V20H4V18C4,15.79 7.58,14 12,14Z",A="M12,3A9,9 0 0,0 3,12H0L4,16L8,12H5A7,7 0 0,1 12,5A7,7 0 0,1 19,12A7,7 0 0,1 12,19C10.5,19 9.09,18.5 7.94,17.7L6.5,19.14C8.04,20.3 9.94,21 12,21A9,9 0 0,0 21,12A9,9 0 0,0 12,3M14,12A2,2 0 0,0 12,10A2,2 0 0,0 10,12A2,2 0 0,0 12,14A2,2 0 0,0 14,12Z",H="M12,15.5A3.5,3.5 0 0,1 8.5,12A3.5,3.5 0 0,1 12,8.5A3.5,3.5 0 0,1 15.5,12A3.5,3.5 0 0,1 12,15.5M19.43,12.97C19.47,12.65 19.5,12.33 19.5,12C19.5,11.67 19.47,11.34 19.43,11L21.54,9.37C21.73,9.22 21.78,8.95 21.66,8.73L19.66,5.27C19.54,5.05 19.27,4.96 19.05,5.05L16.56,6.05C16.04,5.66 15.5,5.32 14.87,5.07L14.5,2.42C14.46,2.18 14.25,2 14,2H10C9.75,2 9.54,2.18 9.5,2.42L9.13,5.07C8.5,5.32 7.96,5.66 7.44,6.05L4.95,5.05C4.73,4.96 4.46,5.05 4.34,5.27L2.34,8.73C2.21,8.95 2.27,9.22 2.46,9.37L4.57,11C4.53,11.34 4.5,11.67 4.5,12C4.5,12.33 4.53,12.65 4.57,12.97L2.46,14.63C2.27,14.78 2.21,15.05 2.34,15.27L4.34,18.73C4.46,18.95 4.73,19.03 4.95,18.95L7.44,17.94C7.96,18.34 8.5,18.68 9.13,18.93L9.5,21.58C9.54,21.82 9.75,22 10,22H14C14.25,22 14.46,21.82 14.5,21.58L14.87,18.93C15.5,18.67 16.04,18.34 16.56,17.94L19.05,18.95C19.27,19.03 19.54,18.95 19.66,18.73L21.66,15.27C21.78,15.05 21.73,14.78 21.54,14.63L19.43,12.97Z",V="M3 6H21V4H3C1.9 4 1 4.9 1 6V18C1 19.1 1.9 20 3 20H7V18H3V6M13 12H9V13.78C8.39 14.33 8 15.11 8 16C8 16.89 8.39 17.67 9 18.22V20H13V18.22C13.61 17.67 14 16.88 14 16S13.61 14.33 13 13.78V12M11 17.5C10.17 17.5 9.5 16.83 9.5 16S10.17 14.5 11 14.5 12.5 15.17 12.5 16 11.83 17.5 11 17.5M22 8H16C15.5 8 15 8.5 15 9V19C15 19.5 15.5 20 16 20H22C22.5 20 23 19.5 23 19V9C23 8.5 22.5 8 22 8M21 18H17V10H21V18Z",b="M18,6H13A2,2 0 0,0 11,8V10.28C10.41,10.62 10,11.26 10,12A2,2 0 0,0 12,14C13.11,14 14,13.1 14,12C14,11.26 13.6,10.62 13,10.28V8H16V16H8V8H10V6H8L6,6V18H18M20,20H4V4H20M20,2H4A2,2 0 0,0 2,4V20A2,2 0 0,0 4,22H20C21.11,22 22,21.1 22,20V4C22,2.89 21.11,2 20,2Z",v="M20.5,11H19V7C19,5.89 18.1,5 17,5H13V3.5A2.5,2.5 0 0,0 10.5,1A2.5,2.5 0 0,0 8,3.5V5H4A2,2 0 0,0 2,7V10.8H3.5C5,10.8 6.2,12 6.2,13.5C6.2,15 5,16.2 3.5,16.2H2V20A2,2 0 0,0 4,22H7.8V20.5C7.8,19 9,17.8 10.5,17.8C12,17.8 13.2,19 13.2,20.5V22H17A2,2 0 0,0 19,20V16H20.5A2.5,2.5 0 0,0 23,13.5A2.5,2.5 0 0,0 20.5,11Z",P="M12,2A2,2 0 0,1 14,4C14,4.74 13.6,5.39 13,5.73V7H14A7,7 0 0,1 21,14H22A1,1 0 0,1 23,15V18A1,1 0 0,1 22,19H21V20A2,2 0 0,1 19,22H5A2,2 0 0,1 3,20V19H2A1,1 0 0,1 1,18V15A1,1 0 0,1 2,14H3A7,7 0 0,1 10,7H11V5.73C10.4,5.39 10,4.74 10,4A2,2 0 0,1 12,2M7.5,13A2.5,2.5 0 0,0 5,15.5A2.5,2.5 0 0,0 7.5,18A2.5,2.5 0 0,0 10,15.5A2.5,2.5 0 0,0 7.5,13M16.5,13A2.5,2.5 0 0,0 14,15.5A2.5,2.5 0 0,0 16.5,18A2.5,2.5 0 0,0 19,15.5A2.5,2.5 0 0,0 16.5,13Z",k="M11,13.5V21.5H3V13.5H11M12,2L17.5,11H6.5L12,2M17.5,13C20,13 22,15 22,17.5C22,20 20,22 17.5,22C15,22 13,20 13,17.5C13,15 15,13 17.5,13Z",w="M12.5 7C12.5 5.89 13.39 5 14.5 5H18C19.1 5 20 5.9 20 7V9.16C18.84 9.57 18 10.67 18 11.97V14H12.5V7M6 11.96V14H11.5V7C11.5 5.89 10.61 5 9.5 5H6C4.9 5 4 5.9 4 7V9.15C5.16 9.56 6 10.67 6 11.96M20.66 10.03C19.68 10.19 19 11.12 19 12.12V15H5V12C5 10.9 4.11 10 3 10S1 10.9 1 12V17C1 18.1 1.9 19 3 19V21H5V19H19V21H21V19C22.1 19 23 18.1 23 17V12C23 10.79 21.91 9.82 20.66 10.03Z",M="M13,3V9H21V3M13,21H21V11H13M3,21H11V15H3M3,13H11V3H3V13Z",L={dashboard:[{path:"/config/integrations",translationKey:"devices",iconPath:V,iconColor:"#0D47A1",core:!0},{path:"/config/automation",translationKey:"automations",iconPath:P,iconColor:"#518C43",core:!0},{path:"/config/areas",translationKey:"areas",iconPath:w,iconColor:"#E48629",components:["zone"]},{path:"/hassio",translationKey:"supervisor",iconPath:v,iconColor:"#F1C447",component:"hassio"},{path:"/config/lovelace/dashboards",translationKey:"dashboards",iconPath:M,iconColor:"#B1345C",component:"lovelace"},{path:"/config/tags",translationKey:"tags",iconPath:b,iconColor:"#616161",component:"tag"},{path:"/config/person",translationKey:"people",iconPath:y,iconColor:"#5A87FA",components:["person","users"]},{path:"#external-app-configuration",translationKey:"companion",iconPath:"M9.82,12.5C9.84,12.33 9.86,12.17 9.86,12C9.86,11.83 9.84,11.67 9.82,11.5L10.9,10.69C11,10.62 11,10.5 10.96,10.37L9.93,8.64C9.87,8.53 9.73,8.5 9.62,8.53L8.34,9.03C8.07,8.83 7.78,8.67 7.47,8.54L7.27,7.21C7.27,7.09 7.16,7 7.03,7H5C4.85,7 4.74,7.09 4.72,7.21L4.5,8.53C4.21,8.65 3.92,8.83 3.65,9L2.37,8.5C2.25,8.47 2.12,8.5 2.06,8.63L1.03,10.36C0.97,10.5 1,10.61 1.1,10.69L2.18,11.5C2.16,11.67 2.15,11.84 2.15,12C2.15,12.17 2.17,12.33 2.19,12.5L1.1,13.32C1,13.39 1,13.53 1.04,13.64L2.07,15.37C2.13,15.5 2.27,15.5 2.38,15.5L3.66,15C3.93,15.18 4.22,15.34 4.53,15.47L4.73,16.79C4.74,16.91 4.85,17 5,17H7.04C7.17,17 7.28,16.91 7.29,16.79L7.5,15.47C7.8,15.35 8.09,15.17 8.36,15L9.64,15.5C9.76,15.53 9.89,15.5 9.95,15.37L11,13.64C11.04,13.53 11,13.4 10.92,13.32L9.82,12.5M6,13.75C5,13.75 4.2,12.97 4.2,12C4.2,11.03 5,10.25 6,10.25C7,10.25 7.8,11.03 7.8,12C7.8,12.97 7,13.75 6,13.75M17,1H7A2,2 0 0,0 5,3V6H7V4H17V20H7V18H5V21A2,2 0 0,0 7,23H17A2,2 0 0,0 19,21V3A2,2 0 0,0 17,1Z",iconColor:"#8E24AA"},{path:"/config/system",translationKey:"system",iconPath:H,iconColor:"#301ABE",core:!0},{path:"/config/info",translationKey:"about",iconPath:"M11,9H13V7H11M12,20C7.59,20 4,16.41 4,12C4,7.59 7.59,4 12,4C16.41,4 20,7.59 20,12C20,16.41 16.41,20 12,20M12,2A10,10 0 0,0 2,12A10,10 0 0,0 12,22A10,10 0 0,0 22,12A10,10 0 0,0 12,2M11,17H13V11H11V17Z",iconColor:"#4A5963",core:!0}],backup:[{path:"/config/backup",translationKey:"ui.panel.config.backup.caption",iconPath:A,iconColor:"#4084CD",component:"backup"}],devices:[{component:"integrations",path:"/config/integrations",translationKey:"ui.panel.config.integrations.caption",iconPath:v,iconColor:"#2D338F",core:!0},{component:"devices",path:"/config/devices",translationKey:"ui.panel.config.devices.caption",iconPath:V,iconColor:"#2D338F",core:!0},{component:"entities",path:"/config/entities",translationKey:"ui.panel.config.entities.caption",iconPath:k,iconColor:"#2D338F",core:!0},{component:"helpers",path:"/config/helpers",translationKey:"ui.panel.config.helpers.caption",iconPath:"M21.71 20.29L20.29 21.71A1 1 0 0 1 18.88 21.71L7 9.85A3.81 3.81 0 0 1 6 10A4 4 0 0 1 2.22 4.7L4.76 7.24L5.29 6.71L6.71 5.29L7.24 4.76L4.7 2.22A4 4 0 0 1 10 6A3.81 3.81 0 0 1 9.85 7L21.71 18.88A1 1 0 0 1 21.71 20.29M2.29 18.88A1 1 0 0 0 2.29 20.29L3.71 21.71A1 1 0 0 0 5.12 21.71L10.59 16.25L7.76 13.42M20 2L16 4V6L13.83 8.17L15.83 10.17L18 8H20L22 4Z",iconColor:"#4D2EA4",core:!0}],automations:[{component:"automation",path:"/config/automation",translationKey:"ui.panel.config.automation.caption",iconPath:P,iconColor:"#518C43"},{component:"scene",path:"/config/scene",translationKey:"ui.panel.config.scene.caption",iconPath:"M17.5,12A1.5,1.5 0 0,1 16,10.5A1.5,1.5 0 0,1 17.5,9A1.5,1.5 0 0,1 19,10.5A1.5,1.5 0 0,1 17.5,12M14.5,8A1.5,1.5 0 0,1 13,6.5A1.5,1.5 0 0,1 14.5,5A1.5,1.5 0 0,1 16,6.5A1.5,1.5 0 0,1 14.5,8M9.5,8A1.5,1.5 0 0,1 8,6.5A1.5,1.5 0 0,1 9.5,5A1.5,1.5 0 0,1 11,6.5A1.5,1.5 0 0,1 9.5,8M6.5,12A1.5,1.5 0 0,1 5,10.5A1.5,1.5 0 0,1 6.5,9A1.5,1.5 0 0,1 8,10.5A1.5,1.5 0 0,1 6.5,12M12,3A9,9 0 0,0 3,12A9,9 0 0,0 12,21A1.5,1.5 0 0,0 13.5,19.5C13.5,19.11 13.35,18.76 13.11,18.5C12.88,18.23 12.73,17.88 12.73,17.5A1.5,1.5 0 0,1 14.23,16H16A5,5 0 0,0 21,11C21,6.58 16.97,3 12,3Z",iconColor:"#518C43"},{component:"script",path:"/config/script",translationKey:"ui.panel.config.script.caption",iconPath:"M17.8,20C17.4,21.2 16.3,22 15,22H5C3.3,22 2,20.7 2,19V18H5L14.2,18C14.6,19.2 15.7,20 17,20H17.8M19,2C20.7,2 22,3.3 22,5V6H20V5C20,4.4 19.6,4 19,4C18.4,4 18,4.4 18,5V18H17C16.4,18 16,17.6 16,17V16H5V5C5,3.3 6.3,2 8,2H19M8,6V8H15V6H8M8,10V12H14V10H8Z",iconColor:"#518C43"},{component:"blueprint",path:"/config/blueprint",translationKey:"ui.panel.config.blueprint.caption",iconPath:"M2.53,19.65L3.87,20.21V11.18L1.44,17.04C1.03,18.06 1.5,19.23 2.53,19.65M22.03,15.95L17.07,4C16.76,3.23 16.03,2.77 15.26,2.75C15,2.75 14.73,2.79 14.47,2.9L7.1,5.95C6.35,6.26 5.89,7 5.87,7.75C5.86,8 5.91,8.29 6,8.55L11,20.5C11.29,21.28 12.03,21.74 12.81,21.75C13.07,21.75 13.33,21.7 13.58,21.6L20.94,18.55C21.96,18.13 22.45,16.96 22.03,15.95M7.88,8.75A1,1 0 0,1 6.88,7.75A1,1 0 0,1 7.88,6.75C8.43,6.75 8.88,7.2 8.88,7.75C8.88,8.3 8.43,8.75 7.88,8.75M5.88,19.75A2,2 0 0,0 7.88,21.75H9.33L5.88,13.41V19.75Z",iconColor:"#518C43"}],tags:[{component:"tag",path:"/config/tags",translationKey:"ui.panel.config.tag.caption",iconPath:b,iconColor:"#616161"}],energy:[{component:"energy",path:"/config/energy",translationKey:"ui.panel.config.energy.caption",iconPath:"M11 15H6L13 1V9H18L11 23V15Z",iconColor:"#F1C447"}],lovelace:[{component:"lovelace",path:"/config/lovelace/dashboards",translationKey:"ui.panel.config.lovelace.caption",iconPath:M,iconColor:"#B1345C"}],persons:[{component:"person",path:"/config/person",translationKey:"ui.panel.config.person.caption",iconPath:y,iconColor:"#5A87FA"},{component:"users",path:"/config/users",translationKey:"ui.panel.config.users.caption",iconPath:"M22,4H14V7H10V4H2A2,2 0 0,0 0,6V20A2,2 0 0,0 2,22H22A2,2 0 0,0 24,20V6A2,2 0 0,0 22,4M8,9A2,2 0 0,1 10,11A2,2 0 0,1 8,13A2,2 0 0,1 6,11A2,2 0 0,1 8,9M12,17H4V16C4,14.67 6.67,14 8,14C9.33,14 12,14.67 12,16V17M20,18H14V16H20V18M20,14H14V12H20V14M20,10H14V8H20V10M13,6H11V2H13V6Z",iconColor:"#5A87FA",core:!0,advancedOnly:!0}],areas:[{component:"areas",path:"/config/areas",translationKey:"ui.panel.config.areas.caption",iconPath:w,iconColor:"#2D338F",core:!0},{component:"zone",path:"/config/zone",translationKey:"ui.panel.config.zone.caption",iconPath:"M12,2C15.31,2 18,4.66 18,7.95C18,12.41 12,19 12,19C12,19 6,12.41 6,7.95C6,4.66 8.69,2 12,2M12,6A2,2 0 0,0 10,8A2,2 0 0,0 12,10A2,2 0 0,0 14,8A2,2 0 0,0 12,6M20,19C20,21.21 16.42,23 12,23C7.58,23 4,21.21 4,19C4,17.71 5.22,16.56 7.11,15.83L7.75,16.74C6.67,17.19 6,17.81 6,18.5C6,19.88 8.69,21 12,21C15.31,21 18,19.88 18,18.5C18,17.81 17.33,17.19 16.25,16.74L16.89,15.83C18.78,16.56 20,17.71 20,19Z",iconColor:"#E48629"}],general:[{path:"/config/general",translationKey:"core",iconPath:H,iconColor:"#653249",core:!0},{path:"/config/updates",translationKey:"updates",iconPath:"M21,10.12H14.22L16.96,7.3C14.23,4.6 9.81,4.5 7.08,7.2C4.35,9.91 4.35,14.28 7.08,17C9.81,19.7 14.23,19.7 16.96,17C18.32,15.65 19,14.08 19,12.1H21C21,14.08 20.12,16.65 18.36,18.39C14.85,21.87 9.15,21.87 5.64,18.39C2.14,14.92 2.11,9.28 5.62,5.81C9.13,2.34 14.76,2.34 18.27,5.81L21,3V10.12M12.5,8V12.25L16,14.33L15.28,15.54L11,13V8H12.5Z",iconColor:"#3B808E"},{component:"logs",path:"/config/logs",translationKey:"logs",iconPath:"M18 7C16.9 7 16 7.9 16 9V15C16 16.1 16.9 17 18 17H20C21.1 17 22 16.1 22 15V11H20V15H18V9H22V7H18M2 7V17H8V15H4V7H2M11 7C9.9 7 9 7.9 9 9V15C9 16.1 9.9 17 11 17H13C14.1 17 15 16.1 15 15V9C15 7.9 14.1 7 13 7H11M11 9H13V15H11V9Z",iconColor:"#C65326",core:!0},{path:"/config/backup",translationKey:"backup",iconPath:A,iconColor:"#0D47A1",component:"backup"},{path:"/hassio/backups",translationKey:"backup",iconPath:A,iconColor:"#0D47A1",component:"hassio"},{path:"/config/analytics",translationKey:"analytics",iconPath:k,iconColor:"#f1c447"},{path:"/config/network",translationKey:"network",iconPath:"M17,3A2,2 0 0,1 19,5V15A2,2 0 0,1 17,17H13V19H14A1,1 0 0,1 15,20H22V22H15A1,1 0 0,1 14,23H10A1,1 0 0,1 9,22H2V20H9A1,1 0 0,1 10,19H11V17H7C5.89,17 5,16.1 5,15V5A2,2 0 0,1 7,3H17Z",iconColor:"#B1345C"},{path:"/config/storage",translationKey:"storage",iconPath:"M12,3C7.58,3 4,4.79 4,7C4,9.21 7.58,11 12,11C16.42,11 20,9.21 20,7C20,4.79 16.42,3 12,3M4,9V12C4,14.21 7.58,16 12,16C16.42,16 20,14.21 20,12V9C20,11.21 16.42,13 12,13C7.58,13 4,11.21 4,9M4,14V17C4,19.21 7.58,21 12,21C16.42,21 20,19.21 20,17V14C20,16.21 16.42,18 12,18C7.58,18 4,16.21 4,14Z",iconColor:"#518C43",component:"hassio"},{path:"/config/hardware",translationKey:"hardware",iconPath:"M17,17H7V7H17M21,11V9H19V7C19,5.89 18.1,5 17,5H15V3H13V5H11V3H9V5H7C5.89,5 5,5.89 5,7V9H3V11H5V13H3V15H5V17A2,2 0 0,0 7,19H9V21H11V19H13V21H15V19H17A2,2 0 0,0 19,17V15H21V13H19V11M13,13H11V11H13M15,9H9V15H15V9Z",iconColor:"#301A8E",component:"hassio"},{path:"/config/system_health",translationKey:"system_health",iconPath:"M12,21.35L10.55,20.03C5.4,15.36 2,12.27 2,8.5C2,5.41 4.42,3 7.5,3C9.24,3 10.91,3.81 12,5.08C13.09,3.81 14.76,3 16.5,3C19.58,3 22,5.41 22,8.5C22,12.27 18.6,15.36 13.45,20.03L12,21.35Z",iconColor:"#507FfE",components:["system_health","hassio"]}],about:[{component:"info",path:"/config/info",translationKey:"ui.panel.config.info.caption",iconPath:"M13,9H11V7H13M13,17H11V11H13M12,2A10,10 0 0,0 2,12A10,10 0 0,0 12,22A10,10 0 0,0 22,12A10,10 0 0,0 12,2Z",iconColor:"#4A5963",core:!0}]};!function(e,t,o,n){var i=c();if(n)for(var a=0;a<n.length;a++)i=n[a](i);var r=t((function(e){i.initializeInstanceElements(e,s.elements)}),o),s=i.decorateClass(function(e){for(var t=[],o=function(e){return"method"===e.kind&&e.key===a.key&&e.placement===a.placement},n=0;n<e.length;n++){var i,a=e[n];if("method"===a.kind&&(i=t.find(o)))if(h(a.descriptor)||h(i.descriptor)){if(p(a)||p(i))throw new ReferenceError("Duplicated methods ("+a.key+") can't be decorated.");i.descriptor=a.descriptor}else{if(p(a)){if(p(i))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+a.key+").");i.decorators=a.decorators}d(a,i)}else t.push(a)}return t}(r.d.map(l)),e);i.initializeClassElements(r.F,s.elements),i.runClassFinishers(r.F,s.finishers)}([(0,n.Mo)("ha-panel-config")],(function(e,t){class s extends t{constructor(...t){super(...t),e(this)}}return{F:s,d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"narrow",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"route",value:void 0},{kind:"field",key:"routerOptions",value:()=>({defaultPage:"dashboard",routes:{analytics:{tag:"ha-config-section-analytics",load:()=>Promise.all([o.e(41985),o.e(97142),o.e(33968),o.e(87534)]).then(o.bind(o,62396))},areas:{tag:"ha-config-areas",load:()=>Promise.all([o.e(51644),o.e(95916),o.e(1359),o.e(83567)]).then(o.bind(o,83567))},automation:{tag:"ha-config-automation",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(88278),o.e(59799),o.e(6294),o.e(41985),o.e(45507),o.e(5906),o.e(80339),o.e(51644),o.e(9874),o.e(95916),o.e(77426),o.e(49842),o.e(1548),o.e(49075),o.e(49540),o.e(42983),o.e(57188),o.e(90086),o.e(11520),o.e(39858),o.e(12545),o.e(13701),o.e(77576),o.e(29925),o.e(74535),o.e(65040),o.e(67065),o.e(93796),o.e(20515),o.e(5664),o.e(93677),o.e(3005),o.e(50148)]).then(o.bind(o,50148))},backup:{tag:"ha-config-backup",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(59799),o.e(6294),o.e(41985),o.e(9874),o.e(95916),o.e(49540),o.e(59038),o.e(29925),o.e(65040),o.e(1359),o.e(67065),o.e(12519),o.e(48429),o.e(46636)]).then(o.bind(o,46636))},blueprint:{tag:"ha-config-blueprint",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(41985),o.e(9874),o.e(95916),o.e(49540),o.e(57188),o.e(65040),o.e(31742),o.e(1359),o.e(67065),o.e(12519),o.e(32958)]).then(o.bind(o,32958))},tags:{tag:"ha-config-tags",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(41985),o.e(9874),o.e(95916),o.e(49540),o.e(65040),o.e(1359),o.e(67065),o.e(12519),o.e(89704)]).then(o.bind(o,89704))},cloud:{tag:"ha-config-cloud",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(88278),o.e(59799),o.e(6294),o.e(51644),o.e(99843),o.e(56489),o.e(94627)]).then(o.bind(o,57673))},devices:{tag:"ha-config-devices",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(59799),o.e(6294),o.e(41985),o.e(51644),o.e(9874),o.e(95916),o.e(49842),o.e(1548),o.e(49075),o.e(49540),o.e(57188),o.e(90086),o.e(7662),o.e(29925),o.e(65040),o.e(31742),o.e(1359),o.e(3143),o.e(49644),o.e(67065),o.e(12519),o.e(5664),o.e(65403),o.e(87224),o.e(61198),o.e(2365)]).then(o.bind(o,2365))},system:{tag:"ha-config-system-navigation",load:()=>Promise.all([o.e(24103),o.e(37378),o.e(10105),o.e(43827),o.e(99417)]).then(o.bind(o,99417))},logs:{tag:"ha-config-logs",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(88278),o.e(59799),o.e(6294),o.e(51644),o.e(17175),o.e(65040),o.e(1359),o.e(54909),o.e(94066),o.e(91575)]).then(o.bind(o,46443))},info:{tag:"ha-config-info",load:()=>Promise.all([o.e(10105),o.e(17809)]).then(o.bind(o,17809))},customize:"dashboard",dashboard:{tag:"ha-config-dashboard",load:()=>Promise.all([o.e(24103),o.e(59799),o.e(6294),o.e(42983),o.e(17407),o.e(29925),o.e(31742),o.e(3143),o.e(49644),o.e(43827),o.e(66668),o.e(72182)]).then(o.bind(o,74222))},entities:{tag:"ha-config-entities",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(59799),o.e(6294),o.e(41985),o.e(9874),o.e(49540),o.e(3123),o.e(65040),o.e(1359),o.e(67065),o.e(12519),o.e(94009)]).then(o.bind(o,94009))},energy:{tag:"ha-config-energy",load:()=>Promise.all([o.e(38322),o.e(29925),o.e(31742),o.e(49644),o.e(55424),o.e(878),o.e(80148)]).then(o.bind(o,74313))},hardware:{tag:"ha-config-hardware",load:()=>Promise.all([o.e(24103),o.e(59799),o.e(6294),o.e(16013),o.e(10105),o.e(44544)]).then(o.bind(o,44544))},integrations:{tag:"ha-config-integrations",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(59799),o.e(6294),o.e(41985),o.e(51644),o.e(95916),o.e(98433),o.e(81480),o.e(55149),o.e(65040),o.e(1359),o.e(61198),o.e(86466)]).then(o.bind(o,47090))},lovelace:{tag:"ha-config-lovelace",load:()=>o.e(52730).then(o.bind(o,52730))},network:{tag:"ha-config-section-network",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(41985),o.e(45507),o.e(22001),o.e(25188),o.e(93152),o.e(10105),o.e(16003),o.e(57325)]).then(o.bind(o,83977))},person:{tag:"ha-config-person",load:()=>Promise.all([o.e(51644),o.e(95916),o.e(1359),o.e(77399)]).then(o.bind(o,77399))},script:{tag:"ha-config-script",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(88278),o.e(59799),o.e(6294),o.e(41985),o.e(45507),o.e(5906),o.e(80339),o.e(51644),o.e(9874),o.e(95916),o.e(77426),o.e(49842),o.e(1548),o.e(49075),o.e(49540),o.e(42983),o.e(57188),o.e(90086),o.e(11520),o.e(39858),o.e(12545),o.e(13701),o.e(77576),o.e(29925),o.e(74535),o.e(65040),o.e(67065),o.e(93796),o.e(20515),o.e(5664),o.e(93677),o.e(3005),o.e(50850)]).then(o.bind(o,50850))},scene:{tag:"ha-config-scene",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(59799),o.e(41985),o.e(5906),o.e(80339),o.e(51644),o.e(9874),o.e(95916),o.e(49540),o.e(11520),o.e(77576),o.e(29925),o.e(74535),o.e(65040),o.e(31742),o.e(1359),o.e(3143),o.e(49644),o.e(67065),o.e(68101),o.e(12519),o.e(93677),o.e(24518)]).then(o.bind(o,38562))},helpers:{tag:"ha-config-helpers",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(59799),o.e(6294),o.e(41985),o.e(9874),o.e(95916),o.e(49540),o.e(17136),o.e(29925),o.e(65040),o.e(31742),o.e(1359),o.e(49644),o.e(67065),o.e(12519),o.e(48429),o.e(48352)]).then(o.bind(o,7916))},storage:{tag:"ha-config-section-storage",load:()=>Promise.all([o.e(24103),o.e(59799),o.e(6294),o.e(41985),o.e(1995),o.e(97142),o.e(10105),o.e(56880),o.e(33968),o.e(56738)]).then(o.bind(o,56738))},system_health:{tag:"ha-config-system-health",load:()=>Promise.all([o.e(24103),o.e(59799),o.e(6294),o.e(1995),o.e(56880),o.e(46277)]).then(o.bind(o,46277))},updates:{tag:"ha-config-section-updates",load:()=>Promise.all([o.e(24103),o.e(59799),o.e(6294),o.e(1995),o.e(29925),o.e(31742),o.e(3143),o.e(49644),o.e(56880),o.e(66668),o.e(61775)]).then(o.bind(o,43955))},users:{tag:"ha-config-users",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(41985),o.e(9874),o.e(95916),o.e(49540),o.e(65040),o.e(1359),o.e(67065),o.e(12519),o.e(47204)]).then(o.bind(o,47204))},zone:{tag:"ha-config-zone",load:()=>Promise.all([o.e(51644),o.e(95916),o.e(98433),o.e(46297),o.e(1359),o.e(23956),o.e(48456),o.e(22703)]).then(o.bind(o,10873))},general:{tag:"ha-config-section-general",load:()=>Promise.all([o.e(29563),o.e(98985),o.e(24103),o.e(88278),o.e(59799),o.e(6294),o.e(45507),o.e(63857),o.e(31742),o.e(23956),o.e(48456),o.e(66759)]).then(o.bind(o,19559))},zha:{tag:"zha-config-dashboard-router",load:()=>o.e(86094).then(o.bind(o,86094))},mqtt:{tag:"mqtt-config-panel",load:()=>Promise.all([o.e(49842),o.e(11833),o.e(53822),o.e(91089)]).then(o.bind(o,91089))},zwave_js:{tag:"zwave_js-config-router",load:()=>o.e(17100).then(o.bind(o,17100))}}})},{kind:"field",decorators:[(0,n.SB)()],key:"_wideSidebar",value:()=>!1},{kind:"field",decorators:[(0,n.SB)()],key:"_wide",value:()=>!1},{kind:"field",decorators:[(0,n.SB)()],key:"_cloudStatus",value:void 0},{kind:"field",key:"_listeners",value:()=>[]},{kind:"method",key:"connectedCallback",value:function(){g(m(s.prototype),"connectedCallback",this).call(this),this._listeners.push((0,a.K)("(min-width: 1040px)",(e=>{this._wide=e}))),this._listeners.push((0,a.K)("(min-width: 1296px)",(e=>{this._wideSidebar=e})))}},{kind:"method",key:"disconnectedCallback",value:function(){for(g(m(s.prototype),"disconnectedCallback",this).call(this);this._listeners.length;)this._listeners.pop()()}},{kind:"method",key:"firstUpdated",value:function(e){g(m(s.prototype),"firstUpdated",this).call(this,e),this.hass.loadBackendTranslation("title"),(0,i.p)(this.hass,"cloud")&&(this._updateCloudStatus(),this.addEventListener("connection-status",(e=>{"connected"===e.detail&&this._updateCloudStatus()}))),this.addEventListener("ha-refresh-cloud-status",(()=>this._updateCloudStatus())),this.style.setProperty("--app-header-background-color","var(--sidebar-background-color)"),this.style.setProperty("--app-header-text-color","var(--sidebar-text-color)"),this.style.setProperty("--app-header-border-bottom","1px solid var(--divider-color)"),this.style.setProperty("--ha-card-border-radius","var(--ha-config-card-border-radius, 8px)")}},{kind:"method",key:"updatePageEl",value:function(e){const t="docked"===this.hass.dockedSidebar?this._wideSidebar:this._wide;var o,n;"setProperties"in e?e.setProperties({route:this.routeTail,hass:this.hass,showAdvanced:Boolean(null===(o=this.hass.userData)||void 0===o?void 0:o.showAdvanced),isWide:t,narrow:this.narrow,cloudStatus:this._cloudStatus}):(e.route=this.routeTail,e.hass=this.hass,e.showAdvanced=Boolean(null===(n=this.hass.userData)||void 0===n?void 0:n.showAdvanced),e.isWide=t,e.narrow=this.narrow,e.cloudStatus=this._cloudStatus)}},{kind:"method",key:"_updateCloudStatus",value:async function(){this._cloudStatus=await(0,r.LI)(this.hass),("connecting"===this._cloudStatus.cloud||this._cloudStatus.logged_in&&this._cloudStatus.prefs.remote_enabled&&!this._cloudStatus.remote_connected)&&setTimeout((()=>this._updateCloudStatus()),5e3)}}]}}),s.n)}}]);
PypiClean
/trinket-0.1.5.tar.gz/trinket-0.1.5/docs/HISTORY.rst
********* Changelog ********* 0.1.5 (2019-12-18) ================== * Fixed body duplication on raw_body calls, due to the concatenation already happening in on_body, triggered by the reader. 0.1.4 (2019-12-16) ================== * Dependencies maintenance : bumping versions to latest stable. 0.1.3 (2019-01-24) ================== * Pinned specific package versions * Updated to use the last version of wsproto (0.13) * The Trinket application now inherits from dict to allow arbitrary storage * Decoupled the TCP serving/socket creation from the actual application 0.1.2 (2019-01-18) ================== * Updated distribution files 0.1.1 (2019-01-18) ================== * Added tests and testing utilities * Fixed missing imports 0.1 (2019-01-17) ================ * Initial release
PypiClean
/cctbx_base-2020.8-0_py38h167b89d-cp38-cp38m-manylinux2010_x86_64.whl/wxtbx/phil_controls/strctrl.py
from __future__ import absolute_import, division, print_function from wxtbx.phil_controls.text_base import ValidatedTextCtrl, TextCtrlValidator from wxtbx import phil_controls import wxtbx from libtbx.phil import tokenizer from libtbx.utils import to_unicode, to_str from libtbx import Auto import libtbx.phil import wx import sys class StrCtrl(ValidatedTextCtrl): def __init__(self, *args, **kwds): kwds = dict(kwds) if (kwds.get("size", wx.DefaultSize) == wx.DefaultSize): kwds['size'] = (200,-1) super(StrCtrl, self).__init__(*args, **kwds) self._min_len = 0 self._max_len = sys.maxunicode def CreateValidator(self): return StrValidator() def SetValue(self, value): if (value in [None, Auto]): ValidatedTextCtrl.SetValue(self, "") else : if isinstance(value, str): if wxtbx.is_unicode_build(): ValidatedTextCtrl.SetValue(self, to_unicode(value)) else : ValidatedTextCtrl.SetValue(self, value) else : if (not isinstance(value, unicode)): raise RuntimeError("Improper value (type: %s) for control %s" % (type(value).__name__, self.GetName())) ValidatedTextCtrl.SetValue(self, value) def GetPhilValue(self): self.Validate() val_str = self.GetValue().strip() if (val_str in ["", "none", "None"]): return self.ReturnNoneIfOptional() return val_str def GetStringValue(self): value = self.GetPhilValue() if (value is None): return "None" else : return parse_str(value) def FormatValue(self, value): if wxtbx.is_unicode_build(): return to_str(value) else : return value def SetMinLength(self, n): assert (n >= 0) self._min_len = n def SetMaxLength(self, n): assert (n >= 1) self._max_len = n def GetMinLength(self): return self._min_len def GetMaxLength(self): return self._max_len class StrValidator(TextCtrlValidator): def CheckFormat(self, value): window = self.GetWindow() if ("$" in value): raise ValueError("The dollar symbol ($) may not be used here.") elif (len(value) > window.GetMaxLength()): raise ValueError("Value must be %d characters or less." % window.GetMaxLength()) elif (len(value) < window.GetMinLength()): raise ValueError("Value must be at least %d characters." % window.GetMinLength()) return value # XXX does anything else need to be done here? def parse_str(value): #value = value.decode("utf-8") try : word = tokenizer.word(value, quote_token='"""') phil_string = str(word) except ValueError as e : raise else : return phil_string if (__name__ == "__main__"): app = wx.App(0) frame = wx.Frame(None, -1, "String parameter test") panel = wx.Panel(frame, -1, size=(720,400)) txt1 = wx.StaticText(panel, -1, "Job title:", pos=(10,100)) ctrl1 = StrCtrl(panel, -1, value=None, pos=(160, 100), size=(400,-1), name="Job title") txt2 = wx.StaticText(panel, -1, "Output file prefix:", pos=(10,200)) ctrl2 = StrCtrl(panel, -1, value="refine", pos=(160,200), name="Output file prefix") ctrl2.SetOptional(False) btn = wx.Button(panel, -1, "Process input", pos=(400, 360)) btn2 = wx.Button(panel, -1, "Toggle title", pos=(200, 360)) master_phil = libtbx.phil.parse(""" title = None .type = str prefix = None .type = str""") def OnOkay(evt): print("""title = %s""" % ctrl1.GetStringValue()) title_phil = libtbx.phil.parse("""title = %s""" % ctrl1.GetStringValue()) prefix_phil = libtbx.phil.parse("""prefix = %s""" % ctrl2.GetStringValue()) p = master_phil.fetch(sources=[title_phil, prefix_phil]).extract() print("title recycled via phil:", p.title) print("prefix recycled via phil:", p.prefix) value1 = ctrl1.GetPhilValue() value2 = ctrl2.GetPhilValue() assert (p.title == value1), value1 assert (p.prefix == value2) assert (ctrl1.GetPhilValue() is None) assert (ctrl1.GetStringValue() == "None") inp_str = """This string has bad; characters f\"""" ctrl1.SetValue(inp_str) assert (ctrl1.GetPhilValue() == inp_str) #assert (ctrl1.GetStringValue() == '"This string has bad; characters f\\""') title_phil = libtbx.phil.parse("""title = %s""" % ctrl1.GetStringValue()) p = master_phil.fetch(source=title_phil).extract() assert (p.title == inp_str) assert (ctrl2.GetPhilValue() == "refine") assert (ctrl2.GetStringValue() == '"""refine"""') def OnChange(evt): pass def OnToggle(evt): if (ctrl1.IsEnabled()) : ctrl1.Enable(False) else : ctrl1.Enable() frame.Bind(wx.EVT_BUTTON, OnOkay, btn) frame.Bind(phil_controls.EVT_PHIL_CONTROL, OnChange) frame.Bind(wx.EVT_BUTTON, OnToggle, btn2) frame.Fit() frame.Show() app.MainLoop()
PypiClean
/ais_dom-2023.7.2-py3-none-any.whl/homeassistant/components/lightwave/light.py
from __future__ import annotations from typing import Any from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import LIGHTWAVE_LINK MAX_BRIGHTNESS = 255 async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Find and return LightWave lights.""" if not discovery_info: return lights = [] lwlink = hass.data[LIGHTWAVE_LINK] for device_id, device_config in discovery_info.items(): name = device_config[CONF_NAME] lights.append(LWRFLight(name, device_id, lwlink)) async_add_entities(lights) class LWRFLight(LightEntity): """Representation of a LightWaveRF light.""" _attr_color_mode = ColorMode.BRIGHTNESS _attr_supported_color_modes = {ColorMode.BRIGHTNESS} _attr_should_poll = False def __init__(self, name, device_id, lwlink): """Initialize LWRFLight entity.""" self._attr_name = name self._device_id = device_id self._attr_brightness = MAX_BRIGHTNESS self._lwlink = lwlink async def async_turn_on(self, **kwargs: Any) -> None: """Turn the LightWave light on.""" self._attr_is_on = True if ATTR_BRIGHTNESS in kwargs: self._attr_brightness = kwargs[ATTR_BRIGHTNESS] if self._attr_brightness != MAX_BRIGHTNESS: self._lwlink.turn_on_with_brightness( self._device_id, self._attr_name, self._attr_brightness ) else: self._lwlink.turn_on_light(self._device_id, self._attr_name) self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the LightWave light off.""" self._attr_is_on = False self._lwlink.turn_off(self._device_id, self._attr_name) self.async_write_ha_state()
PypiClean
/release-bot-0.7.1.tar.gz/release-bot-0.7.1/release_bot/github.py
import logging import os import time import re from release_bot.exceptions import ReleaseException, GitException from release_bot.utils import insert_in_changelog, parse_changelog, look_for_version_files import jwt import requests logger = logging.getLogger('release-bot') # github app auth code "stolen" from https://github.com/swinton/github-app-demo.py class JWTAuth(requests.auth.AuthBase): def __init__(self, iss, key, expiration=10 * 60): self.iss = iss self.key = key self.expiration = expiration def generate_token(self): # Generate the JWT payload = { # issued at time 'iat': int(time.time()), # JWT expiration time (10 minute maximum) 'exp': int(time.time()) + self.expiration, # GitHub App's identifier 'iss': self.iss } tok = jwt.encode(payload, self.key, algorithm='RS256') return tok.decode('utf-8') def __call__(self, r): r.headers['Authorization'] = 'bearer {}'.format(self.generate_token()) return r class GitHubApp: def __init__(self, app_id, private_key_path): self.session = requests.Session() self.session.headers.update(dict( accept='application/vnd.github.machine-man-preview+json')) self.private_key = None self.private_key_path = private_key_path self.session.auth = JWTAuth(iss=app_id, key=self.read_private_key()) self.domain = 'api.github.com' # not sure if it makes sense to make this configurable def _request(self, method, path): response = self.session.request(method, 'https://{}/{}'.format(self.domain, path)) return response.json() def _get(self, path): return self._request('GET', path) def _post(self, path): return self._request('POST', path) def read_private_key(self): if self.private_key is None: with open(self.private_key_path) as fp: self.private_key = fp.read() return self.private_key def get_app(self): return self._get('app') def get_installations(self): return self._get('app/installations') def get_installation_access_token(self, installation_id): return self._post('installations/{}/access_tokens'.format(installation_id))["token"] class Github: API_ENDPOINT = "https://api.github.com/graphql" API3_ENDPOINT = "https://api.github.com/" def __init__(self, configuration, git): """ :param configuration: instance of Configuration :param git: instance of Git """ self.conf = configuration self.logger = configuration.logger self.session = requests.Session() self.session.headers.update({'Authorization': f'token {configuration.github_token}'}) self.github_app_session = None if self.conf.github_app_installation_id and self.conf.github_app_id and self.conf.github_app_cert_path: self.github_app_session = requests.Session() self.github_app = GitHubApp(self.conf.github_app_id, self.conf.github_app_cert_path) self.update_github_app_token() self.comment = [] self.git = git def update_github_app_token(self): token = self.github_app.get_installation_access_token(self.conf.github_app_installation_id) self.logger.debug("github app token obtained") self.github_app_session.headers.update({'Authorization': f'token {token}'}) def do_request(self, query=None, method=None, json_payload=None, url=None, use_github_auth=False): """ a single wrapper to make any type of request: * query using graphql * a request with selected method and json payload * utilizing both tokens: github app and user token this method returns requests.Response so that methods can play with return code :param query: :param method: :param json_payload: :param url: :param use_github_auth: auth as github app, not as user (default is user) :return: requests.Response """ if query: self.logger.debug(f'query = {query}') if use_github_auth and self.github_app_session: response = self.github_app_session.post(url=self.API_ENDPOINT, json={'query': query}) else: response = self.session.post(url=self.API_ENDPOINT, json={'query': query}) if response.status_code == 401 and self.github_app_session: self.update_github_app_token() response = self.github_app_session.post(url=self.API_ENDPOINT, json={'query': query}) elif method and url: self.logger.debug(f'{method} {url}') if use_github_auth and self.github_app_session: response = self.github_app_session.request(method=method, url=url, json=json_payload) else: response = self.session.request(method=method, url=url, json=json_payload) if response.status_code == 401 and self.github_app_session: self.update_github_app_token() response = self.github_app_session.request(method=method, url=url, json=json_payload) if not response.ok: self.logger.error(f"error message: {response.content}") else: raise RuntimeError("please specify query or both method and url") return response def query_repository(self, query): """ Query a Github repo using GraphQL API :param query: str :return: requests.Response """ repo_query = (f'query {{repository(owner: "{self.conf.repository_owner}", ' f'name: "{self.conf.repository_name}") {{{query}}}}}') return self.do_request(query=repo_query) def add_comment(self, subject_id): """Add self.comment to subject_id issue/PR""" if not subject_id or not self.comment: return if self.conf.dry_run: self.logger.info("I would add a comment to the pull request created.") return None comment = '\n'.join(self.comment) mutation = (f'mutation {{addComment(input:' f'{{subjectId: "{subject_id}", body: "{comment}"}})' + '''{ subject { id } }}''') response = self.do_request(query=mutation, use_github_auth=True).json() self.detect_api_errors(response) self.logger.debug(f'Comment added to PR: {comment}') self.comment = [] # clean up return response @staticmethod def detect_api_errors(response): """This function looks for errors in API response""" msg = '\n'.join((err['message'] for err in response.get('errors', []))) if msg: raise ReleaseException(msg) def latest_release(self, cursor=''): """ Get the latest project release number on Github. Ignores drafts and pre releases :return: Release number or 0.0.0 """ query = (f"releases(last: 1 " + (f'before:"{cursor}"' if cursor else '') + '''){ edges{ cursor node { isPrerelease isDraft tagName } } } ''') response = self.query_repository(query).json() self.detect_api_errors(response) # check for empty response edges = response['data']['repository']['releases']['edges'] if not edges: self.logger.debug("There is no github release") return '0.0.0' release = edges[0]['node'] # check for pre-release / draft if release['isPrerelease'] or release['isDraft']: self.logger.debug("Latest github release is a Prerelease/Draft") return self.latest_release(cursor=edges[0]['cursor']) return release["tagName"] def walk_through_prs(self, start='', direction='after', which="last", closed=True): """ Searches merged pull requests :param start: A cursor to start at :param direction: Direction to go from cursor, can be 'after' or 'before' :param which: Indicates which part of the result list should be returned, can be 'first' or 'last' :param closed: filters PRs by state (closed/open). True by default :return: edges from API query response """ state = 'MERGED' if closed else 'OPEN' while True: query = (f"pullRequests(states: {state} {which}: 5 " + (f'{direction}: "{start}"' if start else '') + '''){ edges { cursor node { id title number mergeCommit { oid author { name email } } } } }''') response = self.query_repository(query).json() self.detect_api_errors(response) return response['data']['repository']['pullRequests']['edges'] def walk_through_open_issues(self, start='', direction='after', which="last"): """ Searches open issues for a release trigger :return: edges from API query response """ while True: query = (f"issues(states: OPEN {which}: 5 " + (f'{direction}: "{start}"' if start else '') + '''){ edges { cursor node { id number title authorAssociation } } }''') response = self.query_repository(query).json() self.detect_api_errors(response) return response['data']['repository']['issues']['edges'] def make_new_release(self, new_release): """ Makes new release to Github. This has to be done using github api v3 because v4 (GraphQL) doesn't support this yet :param new_release: version number of the new release :return: tuple (released, new_release) - released is bool, new_release contains info about the new release """ payload = {"tag_name": new_release.version, "target_commitish": new_release.commitish, "name": new_release.version, "prerelease": False, "draft": False} url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/releases") self.logger.debug(f"About to release {new_release.version} on Github") response = self.do_request(method="POST", url=url, json_payload=payload, use_github_auth=True) if response.status_code != 201: msg = f"Failed to create new release on github:\n{response.text}" raise ReleaseException(msg) return True, new_release def update_changelog(self, new_version): self.git.fetch_tags() self.git.checkout(new_version) # FIXME: make the file name configurable p = os.path.join(self.git.repo_path, "CHANGELOG.md") try: with open(p, "r") as fd: changelog_content = fd.read() except FileNotFoundError: logger.info("CHANGELOG.md not found") return finally: self.git.checkout('master') # get latest release changelog = parse_changelog(new_version, changelog_content) url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/releases/latest") latest_release = self.do_request(method="GET", url=url, use_github_auth=True).json() # check if the changelog needs updating if latest_release["body"] == changelog: return url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/releases/{latest_release['id']}") response = self.do_request(method="POST", url=url, json_payload={'body': changelog}, use_github_auth=True) if response.status_code != 200: self.logger.error((f"Something went wrong during changelog " f"update for {new_version}:\n{response.text}")) def branch_exists(self, branch): """ Makes a call to github api to check if branch already exists :param branch: name of the branch :return: True if exists, False if not """ url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/branches/{branch}") response = self.do_request(method="GET", url=url) if response.status_code == 200: return True elif response.status_code == 404: self.logger.debug(response.text) return False else: msg = f"Unexpected response code from Github:\n{response.text}" raise ReleaseException(msg) def make_pr(self, branch, version, log, changed_version_files, base='master', labels=None): """ Makes a pull request with info on the new release :param branch: name of the branch to make PR from :param version: version that is being released :param log: changelog :param changed_version_files: list of files that have been changed in order to update version :param base: base of the PR. 'master' by default :param labels: list of str, labels to be put on PR :return: url of the PR """ message = (f'Hi,\n you have requested a release PR from me. Here it is!\n' f'This is the changelog I created:\n' f'### Changes\n{log}\n\nYou can change it by editing `CHANGELOG.md` ' f'in the root of this repository and pushing to `{branch}` branch' f' before merging this PR.\n') if len(changed_version_files) == 1: message += 'I have also updated the `__version__ ` in file:\n' elif len(changed_version_files) > 1: message += ('There were multiple files where `__version__ ` was set, ' 'so I left updating them up to you. These are the files:\n') elif not changed_version_files: message += "I didn't find any files where `__version__` is set." for file in changed_version_files: message += f'* {file}\n' payload = {'title': f'{version} release', 'head': branch, 'base': base, 'body': message, 'maintainer_can_modify': True} url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/pulls") self.logger.debug(f'Attempting a PR for {branch} branch') response = self.do_request(method="POST", url=url, json_payload=payload, use_github_auth=True) if response.status_code == 201: parsed = response.json() self.logger.info(f"Created PR: {parsed['html_url']}") # put labels on PR if labels is not None: self.put_labels_on_issue(parsed['number'], labels) return parsed['html_url'] else: msg = (f"Something went wrong with creating " f"PR on github:\n{response.text}") raise ReleaseException(msg) def make_release_pr(self, new_pr): """ Makes the steps to prepare new branch for the release PR, like generating changelog and updating version :param new_pr: dict with info about the new release :return: True on success, False on fail """ repo = new_pr.repo version = new_pr.version branch = f'{version}-release' if self.branch_exists(branch): self.logger.warning(f'Branch {branch} already exists, aborting creating PR.') return False if self.conf.dry_run: msg = (f"I would make a new PR for release of version " f"{version} based on the issue.") self.logger.info(msg) return False try: name, email = self.get_user_contact() repo.set_credentials(name, email) repo.set_credential_store() # The bot first checks out the master branch and from master # it creates the new branch, checks out to it and then perform the release # This makes sure that the new release_pr branch has all the commits # from the master branch for the lastest release. repo.checkout('master') changelog = repo.get_log_since_last_release(new_pr.previous_version) repo.checkout_new_branch(branch) changed = look_for_version_files(repo.repo_path, new_pr.version) if insert_in_changelog(f'{repo.repo_path}/CHANGELOG.md', new_pr.version, changelog): repo.add(['CHANGELOG.md']) if changed: repo.add(changed) repo.commit(f'{version} release', allow_empty=True) repo.push(branch) if not self.pr_exists(f'{version} release'): new_pr.pr_url = self.make_pr(branch, f'{version}', changelog, changed, labels=new_pr.labels) return True except GitException as exc: raise ReleaseException(exc) finally: repo.checkout('master') return False def pr_exists(self, name): """ Makes a call to github api to check if PR already exists :param name: name of the PR :return: PR number if exists, False if not """ cursor = '' while True: edges = self.walk_through_prs(start=cursor, direction='before', closed=False) if not edges: self.logger.debug(f"No open PR's found") return False for edge in reversed(edges): cursor = edge['cursor'] match = re.match(name, edge['node']['title'].lower()) if match: return edge['node']['number'] def get_user_contact(self): """ Makes a call to github api to get user's contact details :return: name and email """ query = (f'query {{user(login: "{self.conf.github_username}")' ''' { email name } }''') response = self.do_request(query=query).json() self.detect_api_errors(response) name = response['data']['user']['name'] email = response['data']['user']['email'] if not name: name = 'Release bot' if not email: email = '[email protected]' return name, email def close_issue(self, number): """ Close an github issue :param number: number of the issue in repository :return: True on success, False on fail """ payload = {'state': 'closed'} url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/issues/{number}") self.logger.debug(f'Attempting to close issue #{number}') response = self.do_request(method='PATCH', url=url, json_payload=payload, use_github_auth=True) if response.status_code == 200: self.logger.debug(f'Closed issue #{number}') return True self.logger.error(f'Failed to close issue #{number}') return False def put_labels_on_issue(self, number, labels): """ Put labels on Github issue or PR :param number: number of issue/PR :param labels: list of str :return: True on success, False on fail """ if self.conf.dry_run: self.logger.info("I would add labels to issue #%s", number) return False payload = {'labels': labels} url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/issues/{number}") self.logger.debug(f'Attempting to put labels on issue/PR #{number}') response = self.do_request(method='PATCH', url=url, json_payload=payload, use_github_auth=True) if response.status_code == 200: self.logger.debug(f'Following labels: #{",".join(labels)} put on issue #{number}:') return True self.logger.error(f'Failed to put labels on issue #{number}') return False def get_file(self, name): """ Fetches a specific file via Github API :return: file content or None in case of error """ url = (f"{self.API3_ENDPOINT}repos/{self.conf.repository_owner}/" f"{self.conf.repository_name}/contents/{name}") self.logger.debug(f'Fetching {name}') response = self.do_request(url=url, method='GET') if response.status_code != 200: self.logger.error(f'Failed to fetch {name}') return None parsed = response.json() download_url = parsed['download_url'] response = requests.get(url=download_url) if response.status_code != 200: self.logger.error(f'Failed to fetch {name}') return None return response.text
PypiClean
/zepben.evolve-0.35.0b2-py3-none-any.whl/zepben/evolve/examples/simple_node_breaker_feeder.py
from zepben.evolve import NetworkService, DiagramService, Diagram, DiagramStyle, BaseVoltage, PositionPoint, Location, Feeder, EnergySource, \ PowerTransformerInfo, DiagramObject, AcLineSegment, ConductingEquipment, Junction, EnergyConsumer, PowerTransformer, DiagramObjectPoint, ConnectivityNode __all__ = ["SimpleNodeBreakerFeeder"] class SimpleNodeBreakerFeeder: def __init__(self, breaker_is_open=False): self.breaker_is_open = breaker_is_open self.network_service: NetworkService = NetworkService() self.diagram_service: DiagramService = DiagramService() self.diagram: Diagram = Diagram(diagram_style=DiagramStyle.GEOGRAPHIC) self._create_network_service() self._create_diagram_service() def _create_network_service(self): # Create BaseVoltages # noinspection PyArgumentList bv_hv: BaseVoltage = BaseVoltage(mrid="20kV", nominal_voltage=20000, name="20kV") # noinspection PyArgumentList bv_lv: BaseVoltage = BaseVoltage(mrid="415V", nominal_voltage=3000, name="415V") self.network_service.add(bv_hv) self.network_service.add(bv_lv) # Create Locations for buses # noinspection PyArgumentList point1 = PositionPoint(x_position=149.12791965570293, y_position=-35.277592101000934) # noinspection PyArgumentList point2 = PositionPoint(x_position=149.12779472660375, y_position=-35.278183862759285) loc1 = Location().add_point(point1) loc2 = Location().add_point(point2) # Create connectivity_nodes cn1 = ConnectivityNode(name="cn1") cn2 = ConnectivityNode(name="cn2") cn3 = ConnectivityNode(name="cn3") cn4 = ConnectivityNode(name="cn4") # Create EnergySource # noinspection PyUnresolvedReferences energy_source: EnergySource = self.network_service.create_energy_source(cn=cn1, base_voltage=bv_hv, voltage_magnitude=1.02 * bv_hv.nominal_voltage, name="Grid Connection", location=loc1) # TODO: Replace createEnergySource with creation of EquivalentInjection # Create Feeder fdr = Feeder(normal_head_terminal=energy_source.get_terminal_by_sn(1)) self.network_service.add(fdr) # Create Transformer # noinspection PyUnresolvedReferences self.network_service.create_two_winding_power_transformer(cn1=cn1, cn2=cn2, name="Trafo", location=loc1, asset_info=PowerTransformerInfo()) # TODO: Associate the PowerTransformerInfo() to th PowerTransformer instance # TODO: Add ptInfo= self.network_service.getAvailablePowerTransformerInfo("0.4 MVA 20/0.4 kV") # Create Breaker # noinspection PyUnresolvedReferences breaker = self.network_service.create_breaker(cn1=cn2, cn2=cn3, base_voltage=bv_lv) breaker.set_open(self.breaker_is_open) # Create location for the Line line_location = Location().add_point(point1).add_point(point2) self.network_service.add(line_location) # Create Line # noinspection PyUnresolvedReferences self.network_service.create_ac_line_segment(cn1=cn3, cn2=cn4, name="Line", length=100., base_voltage=bv_lv, location=line_location) # Create EnergyConsumer # noinspection PyUnresolvedReferences self.network_service.create_energy_consumer(cn=cn3, p=100000., q=50000., name="Load", location=loc2, base_voltage=bv_lv) def _create_diagram_service(self): self.diagram_service.add(self.diagram) self._add_diagram_objects() # TODO: In ?voltages geo view the acls does not appear. def _add_diagram_objects(self): # Add DiagramObject for ConductingEquipments ce_list = self.network_service.objects(ConductingEquipment) for ce in ce_list: # diagram_object_mapping = defaultdict( # lambda: DiagramObject(identified_object_mrid=ce.mrid, style="JUNCTION", # diagram=self.diagram)) if isinstance(ce, Junction): diagram_object = DiagramObject(identified_object_mrid=ce.mrid, style="JUNCTION", diagram=self.diagram) elif isinstance(ce, EnergySource): diagram_object = DiagramObject(identified_object_mrid=ce.mrid, style="ENERGY_SOURCE", diagram=self.diagram) elif isinstance(ce, EnergyConsumer): diagram_object = DiagramObject(identified_object_mrid=ce.mrid, style="USAGE_POINT", diagram=self.diagram) elif isinstance(ce, PowerTransformer): diagram_object = DiagramObject(identified_object_mrid=ce.mrid, style="DIST_TRANSFORMER", diagram=self.diagram) elif isinstance(ce, AcLineSegment): diagram_object = self._add_diagram_objects_to_ac_line_segment(ce) else: diagram_object = DiagramObject(identified_object_mrid=ce.mrid, style="JUNCTION", diagram=self.diagram) self.diagram.add_diagram_object(diagram_object) self.diagram_service.add(diagram_object) def _add_diagram_objects_to_ac_line_segment(self, ac_line_segment: AcLineSegment): # Create DiagramObject for AcLineSegments diagram_object = DiagramObject(diagram=self.diagram) diagram_object.mrid = ac_line_segment.mrid + "-do" diagram_object.style = "CONDUCTOR_LV" diagram_object.diagram = self.diagram for position_point in ac_line_segment.location.points: # noinspection PyArgumentList diagram_point = DiagramObjectPoint(x_position=position_point.x_position, y_position=position_point.y_position) diagram_object.add_point(diagram_point) return diagram_object
PypiClean
/ObjectListView2-1.0.0.tar.gz/ObjectListView2-1.0.0/docs/groupListView.rst
.. -*- coding: UTF-8 -*- .. include:: _substitutions.txt .. _using-grouplistview: Using a GroupListView ===================== A flat list is enough in many cases, but sometimes it would be really nice to be able to put the model objects into groups, making it easier for users to see where something belongs. It's nice that our database of songs can be sorted by "Album," but it would be even nicer if the control was able to put all the tracks for an album together under their own title. This is what a `GroupListView` does. And it looks like this: .. image:: images/grouplist-example1.png Understanding the process ------------------------- To make a GroupListView work, the control needs to collect the model objects into different groups. This is done in the following steps: 1. Calculate a "group key" for each object. 2. All objects with the same group key are placed into the same group. 3. The group key is converted into a string, which become the title of the group. Understanding this simple process is the key to working with a `GroupListView`. Remember this, and you will have conquered the `GroupListView`. Getting the "group key" ----------------------- The "group key" for an object is normally whatever value the object has in the "group by" column. So if list of tracks is being grouped by the "Artist" column, the group key for a track will be, for example, "Coldplay" or "Nickelback". However, that isn't always the best group key. For example, without any other configuration, if we group our tracks by the "Last Played" column, every track ends up in it's own group (bonus points if you can explain why). If we want the tracks to be more usefully grouped, we will need to install a group key getter for the "Last Played" column. In this case, we want all tracks that were played in the same month to be placed into the same group. So for each track, we want to calculate the month it was last played and return that as the group key. We would do this by creating a function and then installing it as the group key getter for the column:: def lastPlayedGroupKey(track): # We only want to group tracks by the month in which they were played return datetime.date(track.lastPlayed.year, track.lastPlayed.month, 1) ... ColumnDefn("Last Played", "left", 100, "lastPlayed", groupKeyGetter=lastPlayedGroupKey) The *groupKeyGetter* can be specified in the same ways that a *valueGetter* can be specified: 1. a callable that accepts the model whose group key should be calculated. 2. a string which will be treated as: - the name of parameter-less instance method - the name of an instance variable - an index into a dictionary-like object 3. an integer, used as an index into a indexable collection Grouping by initial letter ^^^^^^^^^^^^^^^^^^^^^^^^^^ One common pattern is for objects to be grouped by the first letter of a string value. For example on the "Title" column, all tracks starting with "A" would be grouped together. This is so common that there is a built-in way to do it: set useInitialLetterForGroupKey to True for a column:: ColumnDefn("Title", "left", 120, "title", imageGetter=musicImage, useInitialLetterForGroupKey=True) Converting the "group key" to title ----------------------------------- Once the group keys have been calculated for each model object, and all the model objects with the same group key have been collected into their respective groups, we are almost ready to present the groups to the user. The final remaining step is to decide that to call the group. The name of a group is normally its group key converted to a string. This works well when the group key is a string, and reasonably well for other data types, but sometimes you need something different. In those cases, you can install a *groupKeyConverter* on the column:: def lastPlayedGroupKeyConverter(groupKey): # Convert the given group key (which is a date) into a representation string return groupKey.strftime("%B %Y") ... ColumnDefn("Last Played", "left", 100, "lastPlayed", groupKeyGetter=lastPlayedGroupKey, groupKeyConverter=lastPlayedGroupKeyConverter) Here our group key is the first of the month in which the track was last played. Without a *groupKeyConverter*, the title of the groups would look like "2008/05/01". But with our *groupKeyConverter*, the title of the groups end up like "May 2008", which is nicer. The *groupKeyConverter* operates in the same way that a *stringConverter* operates. Using `SetGroups()` directly ---------------------------- All of the above steps are used when you give the `GroupListView` a straight list of model objects, leaving the `GroupListView` to convert the model objects into groups. It is also possible for the programmer to manually create the groups and then tell the GroupListView to show the groups that the programmer has created. Each group is represented by a `ListGroup` object. A `ListGroup` basically consists of a title and a list of model objects that are to be shown in the groups. Once the programmer has created a list of `ListGroup` objects, they should be given to `SetGroups()` method. The order of the groups in the list, and the order of the model objects in the group are the order in which they will be presented to the user. If you manually create the groups, you will need to handle sorting yourself, or turn off sorting altogether. This is necessary since the `GroupListView` will not know how to recalculate the groups. Customizing using events ------------------------ A `GroupListView` triggers several events which allow the programmer to change key behaviours of the control. +-------------------+-------------------------------------------------------------+ |EVT_GROUP_CREATING | Triggered when a new collection of groups has been created | | | but not yet displayed to the user. The handler of this event| | | can make any changes they like to the groups, including | | | the names of the group and the members. | +-------------------+-------------------------------------------------------------+ |EVT_GROUP_SORT | Triggered when the groups need to be sorted. Both the groups| | | themselves and the model objects within each group should | | | be sorted. The handler of this event should called | | | `Handled()` on the event, otherwise normal sort processing | | | will occur. | +-------------------+-------------------------------------------------------------+ |EVT_EXPANDING | Triggered when one or more groups is being expanded. | | | The handler of this event can call `Veto()` to prevent | | | the groups from being expanded. | +-------------------+-------------------------------------------------------------+ |EVT_EXPANDED | Triggered after one or more groups have been expanded. | | | This is a notification event only. | +-------------------+-------------------------------------------------------------+ |EVT_COLLAPSING | Triggered when one or more groups is being collapsed. | | | The handler of this event can call `Veto()` to prevent | | | the groups from being collapsed. | +-------------------+-------------------------------------------------------------+ |EVT_COLLAPSED | Triggered after one or more groups have been collapsed. | | | This is a notification event only. | +-------------------+-------------------------------------------------------------+ Other capabilities ------------------ A `GroupListView` can stop showing groups and revert to a straight `ObjectListView` by calling *SetShowGroups(False)*.
PypiClean
/smartsheet-python-sdk-3.0.2.tar.gz/smartsheet-python-sdk-3.0.2/ADVANCED.md
# Advanced Topics for the Smartsheet SDK for Python ## Manual install The following packages are required. * [setuptools](https://pypi.org/project/setuptools/) * [six](https://pypi.python.org/pypi/six) * [requests](https://pypi.python.org/pypi/requests) To install this SDK manually: 1. Clone the source code from this repo [GitHub](https://github.com/smartsheet-python-sdk) 2. Install the required packages: ```bash pip install setuptools six requests ``` 3. Ensure you are in the `smartsheet-python-sdk` directory 4. Install it using setup.py: ```bash python setup.py install ``` ## Logging There are three log levels currently supported by the Smartsheet Python SDK (in increasing order of verbosity): **ERROR** - messages related to API or JSON serialization errors **INFO** - messages about the API resources being requested **DEBUG** - API request and response bodies and messages regarding object attributes that are changed by the SDK due to the nature of the API call being made Use the logging facility's [basicConfig](https://docs.python.org/2/library/logging.html#logging.basicConfig) method to set your logging properties: ```python import logging logging.basicConfig(filename='mylog.log', level=logging.DEBUG) ``` ## Passthrough Option If there is an API feature that is not yet supported by the Python SDK, there is a passthrough option that allows you to pass and receive raw JSON objects. To invoke the passthrough, your code can call one of the following four methods: `response = client.Passthrough.get(endpoint, query_params)` `response = client.Passthrough.post(endpoint, payload, query_params)` `response = client.Passthrough.put(endpoint, payload, query_parameters)` `response = client.Passthrough.delete(endpoint)` * `endpoint`: The specific API endpoint you wish to invoke. The client object base URL gets prepended to the caller’s endpoint URL argument, so in the above `get` example, if endpoint is `'/sheets'` an HTTP GET is requested from the URL `https://api.smartsheet.com/2.0/sheets` * `payload`: The data to be passed through, can be either a dictionary or string. * `query_params`: An optional dictionary of query parameters. All calls to passthrough methods return a JSON result. The `data` attribute contains the JSON result as a dictionary. For example, after a PUT operation the API's result message will be contained in `response.data['message']`. If you prefer raw JSON instead of a dictionary, you can use the `to_json()` method, for example `response.to_json()`. ### Passthrough Example The following example shows how to POST data to `https://api.smartsheet.com/2.0/sheets` using the passthrough method and a dictionary: ```python payload = {"name": "my new sheet", "columns": [ {"title": "Favorite", "type": "CHECKBOX", "symbol": "STAR"}, {"title": "Primary Column", "primary": True, "type": "TEXT_NUMBER"} ] } response = client.Passthrough.post('/sheets', payload) ``` ## Testing ### Integration Tests 1. Follow the instructions [here](tests/integration/README.md) 2. Run `pytest tests/integration` ### Mock API Tests **NOTE:** the mock API tests will fail unless the mock server is running. 1. Clone the [Smartsheet SDK tests](https://github.com/smartsheet-platform/smartsheet-sdk-tests) repo and follow the instructions from the README to start the mock server 2. Run `pytest tests/mock_api` ## HTTP Proxy The following example shows how to enable a proxy by providing a `proxies` argument when initializing the Smartsheet client. ```python # Initialize client proxies = { 'https': 'http://127.0.0.1:8888' } smartsheet_client = smartsheet.Smartsheet(proxies=proxies) ``` ## Event Reporting The following sample demonstrates best practices for consuming the event stream from the Smartsheet Event Reporting feature. The sample uses the `smartsheet_client.Events.list_events` method to request a list of events from the stream. The first request sets the `since` parameter with the point in time (i.e. event occurrence datetime) in the stream from which to start consuming events. The `since` parameter can be set with a datetime value that is either formatted as ISO 8601 (e.g. 2010-01-01T00:00:00Z) or as UNIX epoch (in which case the `numeric_dates` parameter must also be set to `True`. By default the `numeric_dates` parameter is set to `False`). To consume the next list of events after the initial list of events is returned, set the `stream_position` parameter with the `next_stream_position` property obtained from the previous request and don't set the `since` parameter with any values. This is because when using the `list_events` method, either the `since` parameter or the `stream_position` parameter should be set, but never both. Note that the `more_available` property in a response indicates whether more events are immediately available for consumption. If events are not immediately available, they may still be generating so subsequent requests should keep using the same `stream_position` value until the next list of events is retrieved. Many events have additional information available as a part of the event. That information can be accessed using the Python dictionary stored in the `additional_details` property (Note that values of the `additional_details` dictionary use camelCase/JSON names, e.g. `sheetName` not `sheet_name`). Information about the additional details provided can be found [here.](https://smartsheet.redoc.ly/tag/eventsDescription) ```python # this example is looking specifically for new sheet events def print_new_sheet_events_in_list(events_list): # enumerate all events in the list of returned events for event in events_list.data: # find all created sheets if event.object_type == smartsheet.models.enums.EventObjectType.SHEET and event.action == smartsheet.models.enums.EventAction.CREATE: # additional details are available for some events, they can be accessed as a Python dictionary # in the additional_details attribute print(event.additional_details['sheetName']) smartsheet_client = smartsheet.Smartsheet() smartsheet_client.errors_as_exceptions() # begin listing events in the stream starting with the `since` parameter last_week = datetime.now() - timedelta(days=7) # this example looks at the previous 7 days of events by providing a `since` argument set to last week's date in ISO format events_list = smartsheet_client.Events.list_events(since=last_week.isoformat(), max_count=1000) print_new_sheet_events_in_list(events_list) # continue listing events in the stream by using the stream_position, if the previous response indicates that more # data is available. while events_list.more_available: events_list = smartsheet_client.Events.list_events(stream_position=events_list.next_stream_position, max_count=10000, numeric_dates=True) print_new_sheet_events_in_list(events_list) ``` ## Working with Smartsheetgov.com Accounts If you need to access Smartsheetgov you will need to specify the Smartsheetgov API URI as the base URI during creation of the Smartsheet client object. Smartsheetgov uses a base URI of <https://api.smartsheetgov.com/2.0/>. The base URI is defined as a constant (`smartsheet.__gov_base__`). You can create a client using the Smartsheetgov.com URI using the api_base parameter: ```python client = smartsheet.Smartsheet(api_base=smartsheet.__gov_base__) ``` ## Working With Smartsheet Regions Europe Accounts If you need to access Smartsheet Regions Europe you will need to specify the Smartsheet.eu API URI as the base URI during creation of the Smartsheet client object. Smartsheet.eu uses a base URI of <https://api.smartsheet.eu/2.0/>. The base URI is defined as a constant (`smartsheet._eu_base_`). You can create a client using the Smartsheet.eu URI using the api_base parameter: ```python client = smartsheet.Smartsheet(api_base=smartsheet._eu_base_) ```
PypiClean
/sourtimes-0.0.7.tar.gz/sourtimes-0.0.7/README.md
![PyPI](https://img.shields.io/pypi/v/sourtimes?color=blue) [![GitHub license](https://img.shields.io/github/license/kgbzen/sourtimes)](https://github.com/kgbzen/sourtimes/blob/main/LICENSE) [![GitHub issues](https://img.shields.io/github/issues/kgbzen/sourtimes)](https://github.com/kgbzen/sourtimes/issues) The Python Eksisozluk API Wrapper ## Features * Get autocomplete results * Get ```gündem``` titles * Get ```debe``` titles * Search for titles with parameters * Get entries from a page ## Documentation https://sourtimes.readthedocs.io/ ## PyPI https://pypi.org/project/sourtimes/ ## Installation ``` pip install sourtimes ``` ## Quickstart ``` from sourtimes import Sour eksi = Sour() q = eksi.autocomplete("uzun a") print(q.titles) ```
PypiClean
/superset-d1-0.26.3.tar.gz/superset-d1-0.26.3/superset/dataframe.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from datetime import date, datetime import logging import numpy as np import pandas as pd from pandas.core.common import _maybe_box_datetimelike from pandas.core.dtypes.dtypes import ExtensionDtype from past.builtins import basestring from superset.utils import JS_MAX_INTEGER INFER_COL_TYPES_THRESHOLD = 95 INFER_COL_TYPES_SAMPLE_SIZE = 100 def dedup(l, suffix='__'): """De-duplicates a list of string by suffixing a counter Always returns the same number of entries as provided, and always returns unique values. >>> print(','.join(dedup(['foo', 'bar', 'bar', 'bar']))) foo,bar,bar__1,bar__2 """ new_l = [] seen = {} for s in l: if s in seen: seen[s] += 1 s += suffix + str(seen[s]) else: seen[s] = 0 new_l.append(s) return new_l class SupersetDataFrame(object): # Mapping numpy dtype.char to generic database types type_map = { 'b': 'BOOL', # boolean 'i': 'INT', # (signed) integer 'u': 'INT', # unsigned integer 'l': 'INT', # 64bit integer 'f': 'FLOAT', # floating-point 'c': 'FLOAT', # complex-floating point 'm': None, # timedelta 'M': 'DATETIME', # datetime 'O': 'OBJECT', # (Python) objects 'S': 'BYTE', # (byte-)string 'U': 'STRING', # Unicode 'V': None, # raw data (void) } def __init__(self, data, cursor_description, db_engine_spec): column_names = [] if cursor_description: column_names = [col[0] for col in cursor_description] self.column_names = dedup( db_engine_spec.get_normalized_column_names(cursor_description)) data = data or [] self.df = ( pd.DataFrame(list(data), columns=column_names).infer_objects()) self._type_dict = {} try: # The driver may not be passing a cursor.description self._type_dict = { col: db_engine_spec.get_datatype(cursor_description[i][1]) for i, col in enumerate(self.column_names) if cursor_description } except Exception as e: logging.exception(e) @property def size(self): return len(self.df.index) @property def data(self): # work around for https://github.com/pandas-dev/pandas/issues/18372 data = [dict((k, _maybe_box_datetimelike(v)) for k, v in zip(self.df.columns, np.atleast_1d(row))) for row in self.df.values] for d in data: for k, v in list(d.items()): # if an int is too big for Java Script to handle # convert it to a string if isinstance(v, int): if abs(v) > JS_MAX_INTEGER: d[k] = str(v) return data @classmethod def db_type(cls, dtype): """Given a numpy dtype, Returns a generic database type""" if isinstance(dtype, ExtensionDtype): return cls.type_map.get(dtype.kind) elif hasattr(dtype, 'char'): return cls.type_map.get(dtype.char) @classmethod def datetime_conversion_rate(cls, data_series): success = 0 total = 0 for value in data_series: total += 1 try: pd.to_datetime(value) success += 1 except Exception: continue return 100 * success / total @classmethod def is_date(cls, dtype): if dtype.name: return dtype.name.startswith('datetime') @classmethod def is_dimension(cls, dtype, column_name): if cls.is_id(column_name): return False return dtype.name in ('object', 'bool') @classmethod def is_id(cls, column_name): return column_name.startswith('id') or column_name.endswith('id') @classmethod def agg_func(cls, dtype, column_name): # consider checking for key substring too. if cls.is_id(column_name): return 'count_distinct' if (hasattr(dtype, 'type') and issubclass(dtype.type, np.generic) and np.issubdtype(dtype, np.number)): return 'sum' return None @property def columns(self): """Provides metadata about columns for data visualization. :return: dict, with the fields name, type, is_date, is_dim and agg. """ if self.df.empty: return None columns = [] sample_size = min(INFER_COL_TYPES_SAMPLE_SIZE, len(self.df.index)) sample = self.df if sample_size: sample = self.df.sample(sample_size) for col in self.df.dtypes.keys(): col_db_type = ( self._type_dict.get(col) or self.db_type(self.df.dtypes[col]) ) column = { 'name': col, 'agg': self.agg_func(self.df.dtypes[col], col), 'type': col_db_type, 'is_date': self.is_date(self.df.dtypes[col]), 'is_dim': self.is_dimension(self.df.dtypes[col], col), } if column['type'] in ('OBJECT', None): v = sample[col].iloc[0] if not sample[col].empty else None if isinstance(v, basestring): column['type'] = 'STRING' elif isinstance(v, int): column['type'] = 'INT' elif isinstance(v, float): column['type'] = 'FLOAT' elif isinstance(v, (datetime, date)): column['type'] = 'DATETIME' column['is_date'] = True column['is_dim'] = False # check if encoded datetime if ( column['type'] == 'STRING' and self.datetime_conversion_rate(sample[col]) > INFER_COL_TYPES_THRESHOLD): column.update({ 'is_date': True, 'is_dim': False, 'agg': None, }) # 'agg' is optional attribute if not column['agg']: column.pop('agg', None) columns.append(column) return columns
PypiClean
/quast-5.2.0.tar.gz/quast-5.2.0/quast_libs/site_packages/joblib2/disk.py
import errno import os import shutil import sys import time def disk_used(path): """ Return the disk usage in a directory.""" size = 0 for file in os.listdir(path) + ['.']: stat = os.stat(os.path.join(path, file)) if hasattr(stat, 'st_blocks'): size += stat.st_blocks * 512 else: # on some platform st_blocks is not available (e.g., Windows) # approximate by rounding to next multiple of 512 size += (stat.st_size // 512 + 1) * 512 # We need to convert to int to avoid having longs on some systems (we # don't want longs to avoid problems we SQLite) return int(size / 1024.) def memstr_to_kbytes(text): """ Convert a memory text to it's value in kilobytes. """ kilo = 1024 units = dict(K=1, M=kilo, G=kilo ** 2) try: size = int(units[text[-1]] * float(text[:-1])) except (KeyError, ValueError): raise ValueError( "Invalid literal for size give: %s (type %s) should be " "alike '10G', '500M', '50K'." % (text, type(text)) ) return size def mkdirp(d): """Ensure directory d exists (like mkdir -p on Unix) No guarantee that the directory is writable. """ try: os.makedirs(d) except OSError, e: if e.errno != errno.EEXIST: raise # if a rmtree operation fails in rm_subdirs, wait for this much time (in secs), # then retry once. if it still fails, raise the exception RM_SUBDIRS_RETRY_TIME = 0.1 def rm_subdirs(path, onerror=None): """Remove all subdirectories in this path. The directory indicated by `path` is left in place, and its subdirectories are erased. If onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is os.listdir, os.remove, or os.rmdir; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If onerror is None, an exception is raised. """ # NOTE this code is adapted from the one in shutil.rmtree, and is # just as fast names = [] try: names = os.listdir(path) except os.error, err: if onerror is not None: onerror(os.listdir, path, sys.exc_info()) else: raise for name in names: fullname = os.path.join(path, name) if os.path.isdir(fullname): if onerror is not None: shutil.rmtree(fullname, False, onerror) else: # allow the rmtree to fail once, wait and re-try. # if the error is raised again, fail err_count = 0 while True: try: shutil.rmtree(fullname, False, None) break except os.error, err: if err_count > 0: raise err_count += 1 time.sleep(RM_SUBDIRS_RETRY_TIME)
PypiClean
/django-bulbs-3.25.0.tar.gz/django-bulbs-3.25.0/bulbs/promotion/migrations/0001_initial.py
from __future__ import unicode_literals from django.db import models, migrations import json_field.fields class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0001_initial'), ('content', '0001_initial'), ] operations = [ migrations.CreateModel( name='ContentList', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.SlugField(unique=True)), ('length', models.IntegerField(default=10)), ('data', json_field.fields.JSONField(default=[], help_text='Enter a valid JSON object')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='ContentListHistory', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('data', json_field.fields.JSONField(default=[], help_text='Enter a valid JSON object')), ('date', models.DateTimeField(auto_now_add=True)), ('content_list', models.ForeignKey(related_name='history', to='promotion.ContentList')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='ContentListOperation', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('when', models.DateTimeField()), ('applied', models.BooleanField(default=False)), ], options={ 'ordering': ['-when'], }, bases=(models.Model,), ), migrations.CreateModel( name='InsertOperation', fields=[ ('contentlistoperation_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='promotion.ContentListOperation')), ('index', models.IntegerField(default=0)), ('lock', models.BooleanField(default=False)), ('content', models.ForeignKey(related_name='+', to='content.Content')), ], options={ 'abstract': False, }, bases=('promotion.contentlistoperation',), ), migrations.CreateModel( name='LockOperation', fields=[ ('contentlistoperation_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='promotion.ContentListOperation')), ('target', models.ForeignKey(related_name='+', to='content.Content')), ], options={ 'abstract': False, }, bases=('promotion.contentlistoperation',), ), migrations.CreateModel( name='ReplaceOperation', fields=[ ('contentlistoperation_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='promotion.ContentListOperation')), ('lock', models.BooleanField(default=False)), ('content', models.ForeignKey(related_name='+', to='content.Content')), ('target', models.ForeignKey(related_name='+', to='content.Content')), ], options={ 'abstract': False, }, bases=('promotion.contentlistoperation',), ), migrations.CreateModel( name='UnlockOperation', fields=[ ('contentlistoperation_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='promotion.ContentListOperation')), ('target', models.ForeignKey(related_name='+', to='content.Content')), ], options={ 'abstract': False, }, bases=('promotion.contentlistoperation',), ), migrations.AddField( model_name='contentlistoperation', name='content_list', field=models.ForeignKey(related_name='operations', to='promotion.ContentList'), preserve_default=True, ), migrations.AddField( model_name='contentlistoperation', name='polymorphic_ctype', field=models.ForeignKey(related_name='polymorphic_promotion.contentlistoperation_set', editable=False, to='contenttypes.ContentType', null=True), preserve_default=True, ), ]
PypiClean
/ansible-kkvesper-2.3.2.0.tar.gz/ansible-kkvesper-2.3.2.0/lib/ansible/errors/__init__.py
# Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.errors.yaml_strings import ( YAML_POSITION_DETAILS, YAML_COMMON_UNQUOTED_VARIABLE_ERROR, YAML_COMMON_DICT_ERROR, YAML_COMMON_UNQUOTED_COLON_ERROR, YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR, YAML_COMMON_UNBALANCED_QUOTES_ERROR, YAML_COMMON_LEADING_TAB_ERROR) from ansible.module_utils._text import to_native, to_text class AnsibleError(Exception): ''' This is the base class for all errors raised from Ansible code, and can be instantiated with two optional parameters beyond the error message to control whether detailed information is displayed when the error occurred while parsing a data file of some kind. Usage: raise AnsibleError('some message here', obj=obj, show_content=True) Where "obj" is some subclass of ansible.parsing.yaml.objects.AnsibleBaseYAMLObject, which should be returned by the DataLoader() class. ''' def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False): # we import this here to prevent an import loop problem, # since the objects code also imports ansible.errors from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject self._obj = obj self._show_content = show_content if obj and isinstance(obj, AnsibleBaseYAMLObject): extended_error = self._get_extended_error() if extended_error and not suppress_extended_error: self.message = '%s\n\n%s' % (to_native(message), to_native(extended_error)) else: self.message = '%s' % to_native(message) else: self.message = '%s' % to_native(message) def __str__(self): return self.message def __repr__(self): return self.message def _get_error_lines_from_file(self, file_name, line_number): ''' Returns the line in the file which corresponds to the reported error location, as well as the line preceding it (if the error did not occur on the first line), to provide context to the error. ''' target_line = '' prev_line = '' with open(file_name, 'r') as f: lines = f.readlines() target_line = lines[line_number] if line_number > 0: prev_line = lines[line_number - 1] return (target_line, prev_line) def _get_extended_error(self): ''' Given an object reporting the location of the exception in a file, return detailed information regarding it including: * the line which caused the error as well as the one preceding it * causes and suggested remedies for common syntax errors If this error was created with show_content=False, the reporting of content is suppressed, as the file contents may be sensitive (ie. vault data). ''' error_message = '' try: (src_file, line_number, col_number) = self._obj.ansible_pos error_message += YAML_POSITION_DETAILS % (src_file, line_number, col_number) if src_file not in ('<string>', '<unicode>') and self._show_content: (target_line, prev_line) = self._get_error_lines_from_file(src_file, line_number - 1) target_line = to_text(target_line) prev_line = to_text(prev_line) if target_line: stripped_line = target_line.replace(" ","") arrow_line = (" " * (col_number-1)) + "^ here" #header_line = ("=" * 73) error_message += "\nThe offending line appears to be:\n\n%s\n%s\n%s\n" % (prev_line.rstrip(), target_line.rstrip(), arrow_line) # TODO: There may be cases where there is a valid tab in a line that has other errors. if '\t' in target_line: error_message += YAML_COMMON_LEADING_TAB_ERROR # common error/remediation checking here: # check for unquoted vars starting lines if ('{{' in target_line and '}}' in target_line) and ('"{{' not in target_line or "'{{" not in target_line): error_message += YAML_COMMON_UNQUOTED_VARIABLE_ERROR # check for common dictionary mistakes elif ":{{" in stripped_line and "}}" in stripped_line: error_message += YAML_COMMON_DICT_ERROR # check for common unquoted colon mistakes elif len(target_line) and len(target_line) > 1 and len(target_line) > col_number and target_line[col_number] == ":" and target_line.count(':') > 1: error_message += YAML_COMMON_UNQUOTED_COLON_ERROR # otherwise, check for some common quoting mistakes else: parts = target_line.split(":") if len(parts) > 1: middle = parts[1].strip() match = False unbalanced = False if middle.startswith("'") and not middle.endswith("'"): match = True elif middle.startswith('"') and not middle.endswith('"'): match = True if len(middle) > 0 and middle[0] in [ '"', "'" ] and middle[-1] in [ '"', "'" ] and target_line.count("'") > 2 or target_line.count('"') > 2: unbalanced = True if match: error_message += YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR if unbalanced: error_message += YAML_COMMON_UNBALANCED_QUOTES_ERROR except (IOError, TypeError): error_message += '\n(could not open file to display line)' except IndexError: error_message += '\n(specified line no longer in file, maybe it changed?)' return error_message class AnsibleOptionsError(AnsibleError): ''' bad or incomplete options passed ''' pass class AnsibleParserError(AnsibleError): ''' something was detected early that is wrong about a playbook or data file ''' pass class AnsibleInternalError(AnsibleError): ''' internal safeguards tripped, something happened in the code that should never happen ''' pass class AnsibleRuntimeError(AnsibleError): ''' ansible had a problem while running a playbook ''' pass class AnsibleModuleError(AnsibleRuntimeError): ''' a module failed somehow ''' pass class AnsibleConnectionFailure(AnsibleRuntimeError): ''' the transport / connection_plugin had a fatal error ''' pass class AnsibleFilterError(AnsibleRuntimeError): ''' a templating failure ''' pass class AnsibleLookupError(AnsibleRuntimeError): ''' a lookup failure ''' pass class AnsibleCallbackError(AnsibleRuntimeError): ''' a callback failure ''' pass class AnsibleUndefinedVariable(AnsibleRuntimeError): ''' a templating failure ''' pass class AnsibleFileNotFound(AnsibleRuntimeError): ''' a file missing failure ''' pass class AnsibleModuleExit(Exception): ''' local module exit ''' def __init__(self, result): self.result = result class AnsibleActionSkip(AnsibleRuntimeError): ''' an action runtime skip''' pass class AnsibleActionFail(AnsibleRuntimeError): ''' an action runtime failure''' pass
PypiClean