id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/nef_pipelines-0.1.46.tar.gz/nef_pipelines-0.1.46/src/nef_pipelines/tools/frames/delete.py
|
import inspect
import sys
from fnmatch import fnmatch
from typing import List
import typer
from pynmrstar import Entry
from nef_pipelines.lib.util import exit_error, running_in_pycharm
from nef_pipelines.tools.frames import frames_app
UNDERSCORE = "_"
parser = None
# noinspection PyUnusedLocal
@frames_app.command()
def delete(
use_categories: bool = typer.Option(
False,
"-c",
"--category",
help="if selected use the category of the frame to select it for deletion rather than it name",
),
exact: bool = typer.Option(
False, "-e", "--exact", help="don't treat name as a wild card"
),
selectors: List[str] = typer.Argument(
...,
help="a list of frames to delete by type or name, names can be wildcards, names have lead _'s removed and "
"surrounding back quotes ` removed",
),
):
"""- delete frames in the current input by type or name"""
entry = _create_entry_from_stdin_or_exit(current_function())
to_delete = []
for name in selectors:
for frame in entry:
frame_full_name = frame.name
frame_category = frame.category
frame_name = frame_full_name[len(frame_category) :].lstrip("_").strip("`")
for selector in selectors:
if not exact:
selector = f"*{selector}*"
if use_categories:
if fnmatch(frame_category, selector):
to_delete.append(frame)
else:
if fnmatch(frame_name, selector):
to_delete.append(frame)
entry.remove_saveframe(to_delete)
print(entry)
def current_function():
return inspect.stack()[1][3]
def calling_function():
return inspect.stack()[2][3]
# TODO: This should be a library function
def _create_entry_from_stdin_or_exit(command_name: str):
try:
if sys.stdin.isatty():
exit_error(
f"the command {command_name} reads from stdin and there is no stream..."
)
if running_in_pycharm():
exit_error("you can't build read fron stdin in pycharm...")
lines = list(iter(sys.stdin))
if len(lines) == 0:
exit_error(
f"the command {command_name} reads from stdin and the stream is empty..."
)
entry = Entry.from_string("".join(lines))
except Exception as e:
exit_error(f"failed to read nef entry from stdin because {e}", e)
return entry
|
PypiClean
|
/smart_home_tng-2023.1.3.tar.gz/smart_home_tng-2023.1.3/nodejs/frontend/src/panels/lovelace/cards/energy/hui-energy-compare-card.ts
|
import { differenceInDays, endOfDay } from "date-fns";
import { UnsubscribeFunc } from "home-assistant-js-websocket";
import { css, CSSResultGroup, html, LitElement, TemplateResult } from "lit";
import { customElement, property, state } from "lit/decorators";
import { formatDate } from "../../../../common/datetime/format_date";
import { EnergyData, getEnergyDataCollection } from "../../../../data/energy";
import { SubscribeMixin } from "../../../../mixins/subscribe-mixin";
import { HomeAssistant } from "../../../../types";
import { LovelaceCard } from "../../types";
import { EnergyCardBaseConfig } from "../types";
@customElement("hui-energy-compare-card")
export class HuiEnergyCompareCard
extends SubscribeMixin(LitElement)
implements LovelaceCard
{
@property({ attribute: false }) public hass!: HomeAssistant;
@state() private _config?: EnergyCardBaseConfig;
@state() private _start?: Date;
@state() private _end?: Date;
@state() private _startCompare?: Date;
@state() private _endCompare?: Date;
@property({ type: Boolean, reflect: true }) hidden = true;
public getCardSize(): Promise<number> | number {
return 1;
}
public setConfig(config: EnergyCardBaseConfig): void {
this._config = config;
}
protected hassSubscribeRequiredHostProps = ["_config"];
public hassSubscribe(): UnsubscribeFunc[] {
return [
getEnergyDataCollection(this.hass, {
key: this._config!.collection_key,
}).subscribe((data) => this._update(data)),
];
}
protected render(): TemplateResult {
if (!this._startCompare || !this._endCompare) {
return html``;
}
const dayDifference = differenceInDays(
this._endCompare,
this._startCompare
);
return html`
<ha-alert dismissable @alert-dismissed-clicked=${this._stopCompare}>
${this.hass.localize("ui.panel.energy.compare.info", {
start: html`<b
>${formatDate(this._start!, this.hass.locale)}${dayDifference > 0
? ` -
${formatDate(this._end || endOfDay(new Date()), this.hass.locale)}`
: ""}</b
>`,
end: html`<b
>${formatDate(
this._startCompare,
this.hass.locale
)}${dayDifference > 0
? ` -
${formatDate(this._endCompare, this.hass.locale)}`
: ""}</b
>`,
})}
</ha-alert>
`;
}
private _update(data: EnergyData): void {
this._start = data.start;
this._end = data.end;
this._startCompare = data.startCompare;
this._endCompare = data.endCompare;
this.hidden = !this._startCompare;
}
private _stopCompare(): void {
const energyCollection = getEnergyDataCollection(this.hass, {
key: this._config!.collection_key,
});
energyCollection.setCompare(false);
energyCollection.refresh();
}
static get styles(): CSSResultGroup {
return css`
mwc-button {
width: max-content;
}
`;
}
}
declare global {
interface HTMLElementTagNameMap {
"hui-energy-compare-card": HuiEnergyCompareCard;
}
}
|
PypiClean
|
/django-dedupe-backend-0.0.2.tar.gz/django-dedupe-backend-0.0.2/dedupebackend/fields.py
|
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget, RelatedFieldWidgetWrapper
from django.core.files.base import File
from django.db.models import ForeignKey
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from dedupebackend.storage import DedupedStorage
from dedupebackend.models import UniqueFile
__all__ = ('UniqueFileField', 'UniqueImageField')
class UniqueFileAdminWidget(forms.ClearableFileInput):
template_name = 'dedupebackend/uniquefield.html'
@property
def choices(self):
return self.attrs['limit_choices_to']
@choices.setter
def choices(self, value):
self.attrs['limit_choices_to'] = value
def render(self, name, value, attrs={}):
context = {
'name': name,
'is_initial': False,
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_checkbox_label': self.clear_checkbox_label,
'value': value,
'is_required': self.is_required,
}
context.update(self.attrs)
context.update(attrs)
context['lookup_url'] = "%(admin_site)s:dedupebackend_uniquefile_changelist" % context
if self.is_initial(value):
context['is_initial'] = True
return mark_safe(render_to_string(self.template_name, context))
def raw_id_field_name(self, name):
return "%s-id" % name
def value_from_datadict(self, data, files, name):
val = super(UniqueFileAdminWidget, self).value_from_datadict(data, files, name)
if val is None:
return data.get(self.raw_id_field_name(name))
return val
class Media:
css = {
'all': ('dedupebackend/uniquefield.css',)
}
class UniqueFileAdminField(forms.FileField):
widget = UniqueFileAdminWidget
def __init__(self, limit_choices_to=None, admin_site='admin', queryset=None, to_field_name=None, *args, **kwargs):
self.limit_choices_to = limit_choices_to
self.admin_site = admin_site
super(UniqueFileAdminField, self).__init__(*args, **kwargs)
def widget_attrs(self, widget):
base = super(UniqueFileAdminField, self).widget_attrs(widget)
base.update({
'limit_choices_to': self.limit_choices_to,
'admin_site': self.admin_site
})
return base
def prepare_value(self, value):
if isinstance(value, basestring) and len(value) == 40:
return UniqueFile.objects.get(pk=value)
return super(UniqueFileAdminField, self).prepare_value(value)
def to_python(self, data):
if isinstance(data, basestring) and len(data) == 40:
return data
return super(UniqueFileAdminField, self).to_python(data)
def has_changed(self, initial, data):
if data is False: # file is being cleared.
return True
return getattr(initial, 'id', None) != self.to_python(data)
class UniqueFileField(ForeignKey):
form_class = UniqueFileAdminField
def __init__(self, verbose_name=None, *args, **kwargs):
self.storage = kwargs.pop('storage', DedupedStorage())
if 'related_name' not in kwargs: # do not create backwards accessor by default
kwargs['related_name'] = '+'
kwargs['to'] = 'dedupebackend.UniqueFile'
kwargs['verbose_name'] = verbose_name
super(UniqueFileField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(UniqueFileField, self).deconstruct()
if not self.storage.__class__ is DedupedStorage:
kwargs['storage'] = self.storage
return name, path, args, kwargs
def formfield(self, **kwargs):
defaults = {
'form_class': self.form_class,
'max_length': self.max_length,
}
widget = kwargs.pop('widget', None)
if isinstance(widget, ForeignKeyRawIdWidget) or isinstance(widget, RelatedFieldWidgetWrapper):
defaults['admin_site'] = widget.admin_site.name
else:
defaults['widget'] = widget
# If a file has been provided previously, then the form doesn't require
# that a new file is provided this time.
# The code to mark the form field as not required is used by
# form_for_instance, but can probably be removed once form_for_instance
# is gone. ModelForm uses a different method to check for an existing file.
if 'initial' in kwargs:
defaults['required'] = False
defaults.update(kwargs)
return super(UniqueFileField, self).formfield(**defaults)
def save_form_data(self, instance, data):
if data is False: # file is being cleared.
data = None
if isinstance(data, File):
data = self.storage.save(None, data)
if isinstance(data, basestring) and len(data) == 40:
data = UniqueFile(pk=data)
super(UniqueFileField, self).save_form_data(instance, data)
def value_from_object(self, instance):
return getattr(instance, self.name)
class UniqueImageAdminWidget(UniqueFileAdminWidget):
template_name = 'dedupebackend/uniqueimagefield.html'
class UniqueImageAdminField(UniqueFileAdminField):
widget = UniqueImageAdminWidget
class UniqueImageField(UniqueFileField):
form_class = UniqueImageAdminField
|
PypiClean
|
/monaco-qt-0.1.7.tar.gz/monaco-qt-0.1.7/monaco/monaco-editor/min/vs/basic-languages/systemverilog/systemverilog.js
|
define("vs/basic-languages/systemverilog/systemverilog", ["require","require"],(require)=>{
var moduleExports=(()=>{var r=Object.defineProperty;var s=Object.getOwnPropertyDescriptor;var a=Object.getOwnPropertyNames;var c=Object.prototype.hasOwnProperty;var d=n=>r(n,"__esModule",{value:!0});var l=(n,e)=>{for(var i in e)r(n,i,{get:e[i],enumerable:!0})},p=(n,e,i,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let t of a(e))!c.call(n,t)&&(i||t!=="default")&&r(n,t,{get:()=>e[t],enumerable:!(o=s(e,t))||o.enumerable});return n};var u=(n=>(e,i)=>n&&n.get(e)||(i=p(d({}),e,1),n&&n.set(e,i),i))(typeof WeakMap!="undefined"?new WeakMap:0);var g={};l(g,{conf:()=>m,language:()=>f});var m={comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["{","}"],["[","]"],["(",")"],["begin","end"],["case","endcase"],["casex","endcase"],["casez","endcase"],["checker","endchecker"],["class","endclass"],["clocking","endclocking"],["config","endconfig"],["function","endfunction"],["generate","endgenerate"],["group","endgroup"],["interface","endinterface"],["module","endmodule"],["package","endpackage"],["primitive","endprimitive"],["program","endprogram"],["property","endproperty"],["specify","endspecify"],["sequence","endsequence"],["table","endtable"],["task","endtask"]],autoClosingPairs:[{open:"[",close:"]"},{open:"{",close:"}"},{open:"(",close:")"},{open:"'",close:"'",notIn:["string","comment"]},{open:'"',close:'"',notIn:["string"]}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],folding:{offSide:!1,markers:{start:new RegExp("^(?:\\s*|.*(?!\\/[\\/\\*])[^\\w])(?:begin|case(x|z)?|class|clocking|config|covergroup|function|generate|interface|module|package|primitive|property|program|sequence|specify|table|task)\\b"),end:new RegExp("^(?:\\s*|.*(?!\\/[\\/\\*])[^\\w])(?:end|endcase|endclass|endclocking|endconfig|endgroup|endfunction|endgenerate|endinterface|endmodule|endpackage|endprimitive|endproperty|endprogram|endsequence|endspecify|endtable|endtask)\\b")}}},f={defaultToken:"",tokenPostfix:".sv",brackets:[{token:"delimiter.curly",open:"{",close:"}"},{token:"delimiter.parenthesis",open:"(",close:")"},{token:"delimiter.square",open:"[",close:"]"},{token:"delimiter.angle",open:"<",close:">"}],keywords:["accept_on","alias","always","always_comb","always_ff","always_latch","and","assert","assign","assume","automatic","before","begin","bind","bins","binsof","bit","break","buf","bufif0","bufif1","byte","case","casex","casez","cell","chandle","checker","class","clocking","cmos","config","const","constraint","context","continue","cover","covergroup","coverpoint","cross","deassign","default","defparam","design","disable","dist","do","edge","else","end","endcase","endchecker","endclass","endclocking","endconfig","endfunction","endgenerate","endgroup","endinterface","endmodule","endpackage","endprimitive","endprogram","endproperty","endspecify","endsequence","endtable","endtask","enum","event","eventually","expect","export","extends","extern","final","first_match","for","force","foreach","forever","fork","forkjoin","function","generate","genvar","global","highz0","highz1","if","iff","ifnone","ignore_bins","illegal_bins","implements","implies","import","incdir","include","initial","inout","input","inside","instance","int","integer","interconnect","interface","intersect","join","join_any","join_none","large","let","liblist","library","local","localparam","logic","longint","macromodule","matches","medium","modport","module","nand","negedge","nettype","new","nexttime","nmos","nor","noshowcancelled","not","notif0","notif1","null","or","output","package","packed","parameter","pmos","posedge","primitive","priority","program","property","protected","pull0","pull1","pulldown","pullup","pulsestyle_ondetect","pulsestyle_onevent","pure","rand","randc","randcase","randsequence","rcmos","real","realtime","ref","reg","reject_on","release","repeat","restrict","return","rnmos","rpmos","rtran","rtranif0","rtranif1","s_always","s_eventually","s_nexttime","s_until","s_until_with","scalared","sequence","shortint","shortreal","showcancelled","signed","small","soft","solve","specify","specparam","static","string","strong","strong0","strong1","struct","super","supply0","supply1","sync_accept_on","sync_reject_on","table","tagged","task","this","throughout","time","timeprecision","timeunit","tran","tranif0","tranif1","tri","tri0","tri1","triand","trior","trireg","type","typedef","union","unique","unique0","unsigned","until","until_with","untyped","use","uwire","var","vectored","virtual","void","wait","wait_order","wand","weak","weak0","weak1","while","wildcard","wire","with","within","wor","xnor","xor"],builtin_gates:["and","nand","nor","or","xor","xnor","buf","not","bufif0","bufif1","notif1","notif0","cmos","nmos","pmos","rcmos","rnmos","rpmos","tran","tranif1","tranif0","rtran","rtranif1","rtranif0"],operators:["=","+=","-=","*=","/=","%=","&=","|=","^=","<<=",">>+","<<<=",">>>=","?",":","+","-","!","~","&","~&","|","~|","^","~^","^~","+","-","*","/","%","==","!=","===","!==","==?","!=?","&&","||","**","<","<=",">",">=","&","|","^",">>","<<",">>>","<<<","++","--","->","<->","inside","dist","::","+:","-:","*>","&&&","|->","|=>","#=#"],symbols:/[=><!~?:&|+\-*\/\^%#]+/,escapes:/%%|\\(?:[antvf\\"']|x[0-9A-Fa-f]{1,2}|[0-7]{1,3})/,identifier:/(?:[a-zA-Z_][a-zA-Z0-9_$\.]*|\\\S+ )/,systemcall:/[$][a-zA-Z0-9_]+/,timeunits:/s|ms|us|ns|ps|fs/,tokenizer:{root:[[/^(\s*)(@identifier)/,["",{cases:{"@builtin_gates":{token:"keyword.$2",next:"@module_instance"},table:{token:"keyword.$2",next:"@table"},"@keywords":{token:"keyword.$2"},"@default":{token:"identifier",next:"@module_instance"}}}]],[/^\s*`include/,{token:"keyword.directive.include",next:"@include"}],[/^\s*`\s*\w+/,"keyword"],{include:"@identifier_or_keyword"},{include:"@whitespace"},[/\(\*.*\*\)/,"annotation"],[/@systemcall/,"variable.predefined"],[/[{}()\[\]]/,"@brackets"],[/[<>](?!@symbols)/,"@brackets"],[/@symbols/,{cases:{"@operators":"delimiter","@default":""}}],{include:"@numbers"},[/[;,.]/,"delimiter"],{include:"@strings"}],identifier_or_keyword:[[/@identifier/,{cases:{"@keywords":{token:"keyword.$0"},"@default":"identifier"}}]],numbers:[[/\d+?[\d_]*(?:\.[\d_]+)?[eE][\-+]?\d+/,"number.float"],[/\d+?[\d_]*\.[\d_]+(?:\s*@timeunits)?/,"number.float"],[/(?:\d+?[\d_]*\s*)?'[sS]?[dD]\s*[0-9xXzZ?]+?[0-9xXzZ?_]*/,"number"],[/(?:\d+?[\d_]*\s*)?'[sS]?[bB]\s*[0-1xXzZ?]+?[0-1xXzZ?_]*/,"number.binary"],[/(?:\d+?[\d_]*\s*)?'[sS]?[oO]\s*[0-7xXzZ?]+?[0-7xXzZ?_]*/,"number.octal"],[/(?:\d+?[\d_]*\s*)?'[sS]?[hH]\s*[0-9a-fA-FxXzZ?]+?[0-9a-fA-FxXzZ?_]*/,"number.hex"],[/1step/,"number"],[/[\dxXzZ]+?[\dxXzZ_]*(?:\s*@timeunits)?/,"number"],[/'[01xXzZ]+/,"number"]],module_instance:[{include:"@whitespace"},[/(#?)(\()/,["",{token:"@brackets",next:"@port_connection"}]],[/@identifier\s*[;={}\[\],]/,{token:"@rematch",next:"@pop"}],[/@symbols|[;={}\[\],]/,{token:"@rematch",next:"@pop"}],[/@identifier/,"type"],[/;/,"delimiter","@pop"]],port_connection:[{include:"@identifier_or_keyword"},{include:"@whitespace"},[/@systemcall/,"variable.predefined"],{include:"@numbers"},{include:"@strings"},[/[,]/,"delimiter"],[/\(/,"@brackets","@port_connection"],[/\)/,"@brackets","@pop"]],whitespace:[[/[ \t\r\n]+/,""],[/\/\*/,"comment","@comment"],[/\/\/.*$/,"comment"]],comment:[[/[^\/*]+/,"comment"],[/\*\//,"comment","@pop"],[/[\/*]/,"comment"]],strings:[[/"([^"\\]|\\.)*$/,"string.invalid"],[/"/,"string","@string"]],string:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"/,"string","@pop"]],include:[[/(\s*)(")([\w*\/*]*)(.\w*)(")/,["","string.include.identifier","string.include.identifier","string.include.identifier",{token:"string.include.identifier",next:"@pop"}]],[/(\s*)(<)([\w*\/*]*)(.\w*)(>)/,["","string.include.identifier","string.include.identifier","string.include.identifier",{token:"string.include.identifier",next:"@pop"}]]],table:[{include:"@whitespace"},[/[()]/,"@brackets"],[/[:;]/,"delimiter"],[/[01\-*?xXbBrRfFpPnN]/,"variable.predefined"],["endtable","keyword.endtable","@pop"]]}};return u(g);})();
return moduleExports;
});
|
PypiClean
|
/apachecn_sec_zh_pt2-2022.9.27.0-py3-none-any.whl/ApachecnSecZhPt2/docs/real-world-crypto/07.md
|
# 七、签名和零知识证明
本章涵盖了
* 零知识证明和密码签名
* 现有的加密签名标准
* 签名的微妙行为和避免它们的陷阱
您将了解到最普遍、最强大的加密原语之一——数字签名。简而言之,数字签名类似于您所熟悉的现实生活中的签名,即您在支票和合同上书写的签名。当然,除了数字签名是加密的,因此它们比它们的笔和纸等价物提供了更多的保证。
在协议的世界里,数字签名开启了许多不同的可能性,在本书的第二部分你会一次又一次地遇到它们。在这一章中,我将介绍这个新的原语是什么,它如何在现实世界中使用,以及现代数字签名标准是什么。最后,我将谈谈安全考虑和使用数字签名的危险。
注 签名在密码学中通常被称为*数字签名*或*签名方案*。在本书中,我交替使用这些术语。
对于这一章,你需要阅读
* 第二章杂凑函数
* 第五章重点交流
* 第六章非对称加密
## 7.1 什么是签名?
我在第一章中解释过,加密签名与现实生活中的签名非常相似。因此,它们通常是最容易理解的加密原语之一:
* 只有您可以使用您的签名来签署任意消息。
* 任何人都可以在邮件上验证你的签名。
由于我们在非对称加密领域,你可能会猜到这种不对称是如何发生的。一个*签名方案*通常包含三种不同的算法:
* 一种密钥对生成算法,签名者使用它来创建新的私有和公共密钥(公共密钥可以与任何人共享)。
* 一种采用私钥和消息生成签名的签名算法。
* 采用公钥、消息和签名并返回成功或错误消息的验证算法。
有时私钥也被称为*签名密钥*,而公钥被称为*验证密钥*。有道理,对吧?我在图 7.1 中概括了这三种算法。

图 7.1 数字签名界面。像其他公钥加密算法一样,您首先需要通过一个密钥生成算法生成一个密钥对,该算法采用一个安全参数和一些随机性。然后,您可以使用带有私钥的签名算法对消息进行签名,并使用带有公钥的验证算法对消息上的签名进行验证。如果您没有访问相关私钥的权限,就不能伪造验证公钥的签名。
签名有什么用?它们适用于验证消息的来源以及消息的完整性:
* *出处*——如果上面有我的签名,那就是我写的。
* *完整性*—如果有人修改了消息,签名就会失效。
注意 虽然这两个属性与认证相关联,但它们通常被区分为两个独立的属性:*来源认证*和*消息认证*(或完整性)。
从某种意义上来说,签名类似于你在第 3 章学到的消息认证代码(MAC)。但与 MAC 不同,它们允许我们不对称地认证消息:参与者可以在不知道私钥或签名密钥的情况下验证消息是否被篡改。接下来,我将向您展示如何在实践中使用这些算法。
运动
正如您在第 3 章中看到的,MAC 产生的认证标签必须在恒定时间内进行验证,以避免计时攻击。你认为我们需要为验证签名做同样的事情吗?
### 7.1.1 实践中如何签名和验证签名
我们来看一个实际例子。为此,我使用了 pyca/cryptography([https://cryptography . io](https://cryptography.io)),这是一个备受推崇的 Python 库。下面的清单只是生成一个密钥对,使用私钥部分对消息进行签名,然后使用公钥部分验证签名。
清单 7.1 用 Python 签署和验证签名
```
from cryptography.hazmat.primitives.asymmetric.ed25519 import (
Ed25519PrivateKey ❶
)
private_key = Ed25519PrivateKey.generate() ❷
public_key = private_key.public_key() ❷
message = b"example.com has the public key 0xab70..." ❸
signature = private_key.sign(message) ❸
try: ❹
public_key.verify(signature, message) ❹
print("valid signature") ❹
except InvalidSignature: ❹
print("invalid signature") ❹
```
❶ 使用 Ed25519 签名算法,这是一种流行的签名方案
❷ 先生成私钥,再生成公钥
❸ 使用私钥,签署消息并获得签名
❹使用公钥,验证消息上的签名
如我之前所说,数字签名开启了现实世界中的许多用例。让我们在下一节看一个例子。
### 7.1.2 签名的一个主要用例:经认证的密钥交换
第 5 章和第 6 章介绍了两个参与者之间执行密钥交换的不同方式。在相同的章节中,您了解到这些密钥交换对于协商共享机密是有用的,共享机密可用于通过认证的加密算法保护通信。然而,密钥交换并没有完全解决在两个参与者之间建立安全连接的问题,因为一个活跃的中间人(MITM)攻击者可以扮演密钥交换的双方。这是签名进入环的地方。
假设 Alice 和 Bob 试图在他们之间建立一个安全的通信通道,并且 Bob 知道 Alice 的验证密钥。知道了这一点,Alice 就可以使用她的签名密钥来验证她在密钥交换中的身份:她生成一个密钥交换密钥对,用她的签名密钥对公钥部分进行签名,然后将密钥交换公钥与签名一起发送。Bob 可以使用他已知的相关验证密钥来验证签名是否有效,然后使用密钥交换公钥来执行密钥交换。
我们称这样的密钥交换为*认证密钥交换*。如果签名无效,Bob 可以知道有人正在积极地 MITM 密钥交换。我在图 7.2 中说明了认证密钥交换。

图 7.2 第一幅图(上图)代表一个未经认证的密钥交换,对于一个活跃的 MITM 攻击者来说是不安全的,他可以通过交换他们自己的公钥来模拟交换的双方。第二张图片(底部)表示密钥交换的开始,通过 Alice 在她的公钥上的签名进行验证。由于 Bob(他知道 Alice 的验证密钥)在消息被 MITM 攻击者篡改后无法验证签名,他中止了密钥交换。
注意,在这个例子中,密钥交换只在一方被认证:虽然 Alice 不能被模仿,但是 Bob 可以。如果双方都被认证(鲍勃会签署他的密钥交换部分),我们称这个密钥交换为*相互认证的密钥交换*。签名密钥交换可能看起来还不是非常有用。似乎我们把事先不知道 Alice 的密钥交换公钥的问题移到了事先不知道她的验证密钥的问题上。下一节将介绍认证密钥交换的实际应用,这将更有意义。
### 7.1.3 真实世界的使用:公钥基础设施
如果你假设信任是*可传递的*,签名会变得更加强大。我的意思是,如果你信任我,我信任爱丽丝,那么你也可以信任爱丽丝。她很酷。
信任的传递性允许你以极端的方式扩展系统中的信任。假设您对某个权威机构及其验证密钥有信心。此外,假设这个机构已经签署了消息,表明 Charles 的公钥是什么,David 的公钥是什么,等等。然后,你可以选择对这个映射有信心!这样的映射被称为*公钥基础设施*。例如,如果您试图与 Charles 进行密钥交换,而他声称他的公钥是一个看起来像 3848 的大数字。。。,您可以通过检查您“亲爱的”权威机构是否签署了类似“Charles 的公钥是 3848”的消息来验证这一点。。."
这个概念的一个实际应用是 *web 公钥基础设施* (web PKI)。web PKI 是您的 web 浏览器用来验证它与您每天访问的大量网站进行的密钥交换的工具。对 web PKI(如图 7.3 所示)的一个简单解释如下:当你下载一个浏览器时,它带有一些嵌入到程序中的验证密钥。这个验证密钥链接到一个权威机构,该机构的责任是签署成千上万个网站的公钥,这样你就可以在不知道这些公钥的情况下信任它们。你没有看到的是,这些网站必须向权威机构证明他们真正拥有自己的域名,然后才能获得公钥签名。(实际上,您的浏览器信任许多权威机构来完成这项工作,而不仅仅是一个。)

图 7.3 在 web PKI 中,浏览器信任一个权威机构来证明一些域链接到一些公钥。当安全地访问一个网站时,您的浏览器可以通过验证权威机构的签名来验证该网站的公钥确实是他们的(而不是来自某个 MITM)。
在这一部分,你从高层次的角度了解了签名。让我们更深入地了解签名是如何工作的。但为此,我们首先需要绕道而行,看一看一种叫做零知识证明(ZKP)的东西。
## 7.2 零知识证明:签名的起源
要理解签名在密码学中是如何工作的,最好的方法就是理解它们来自哪里。出于这个原因,让我们花点时间简单介绍一下 ZKPs,然后我将回到签名。
想象一下,佩吉想要向维克多证明什么。例如,她想证明她知道某个组元素的底数的离散对数。换句话说,她想证明她知道 *x* 给定*Y*=*g*T7】x 与 *g* 某组的生成元。

当然,最简单的解决方案是佩吉简单地发送值 *x* (称为*证人*)。这个解决方案将是一个简单的*知识证明*,这个也可以,除非 Peggy 不想让 Victor 知道。
注 从理论上来说,我们说这个协议产生的一个证明是*完成的*如果佩姬可以用它向维克多证明她认识证人的话。如果她不能用它来证明她所知道的,那么这个方案就没用了,对吗?
在密码学中,我们最感兴趣的是不会向验证者泄露证人的知识证明。这样的证明被称为*零知识证明* (ZKPs)。
### 7.2.1 Schnorr 识别协议:交互式零知识证明
在接下来的几页中,我将从被破坏的协议中逐步构建一个 ZKP,向你展示爱丽丝如何证明她知道 *x* 而不泄露 *x* 。
在密码学中,处理这类问题的典型方法是“隐藏”具有一定随机性的值(例如,通过加密)。但是我们做的不仅仅是隐藏:我们还想证明它就在那里。为了做到这一点,我们需要一个代数的方法来隐藏它。一个简单的解决方案是简单地给见证添加一个随机生成的值 *k* :
*s*=*k*+*x*
然后,佩吉可以将隐藏的见证人 *s* 和随机值 *k* 一起发送给维克多。此时,维克多没有理由相信佩吉确实在 *s* 中隐藏了证人。的确,如果她不知道证人 *x* ,那么 *s* 很可能只是某个随机值。维克多所知道的是,证人 *x* 隐藏在 *g* 的指数中,因为他知道*Y*=*g*<sup class="fm-superscript2">x</sup>。
为了查看佩吉是否真的认识证人,维克多可以检查她给他的信息是否与他所知道的相符,这也必须在 *g* 的指数中完成(因为证人就在这里)。换句话说,Victor 检查这两个数字是否相等:
* *g*<sup class="fm-superscript2">s</sup>(=*g*<sup class="fm-superscript1">T8】k+*x*</sup>)
* *Y*×*g*T5】k(=*g*<sup class="fm-superscript2">x</sup>×*g*<sup class="fm-superscript2">k</sup>=*g*<sup class="fm-superscript1">*x*+*k*</sup>)
这个想法是,只有认识证人 *x* 的人才能构建一个满足这个等式的“盲”证人 *s* 。因此,这是知识的证明。我在图 7.4 中概括了这个 ZKP 系统。

图 7.4 为了向 Victor 证明她认识一个证人 *x* ,Peggy 将其隐藏(通过将其添加到一个随机值 *k* 中)并发送隐藏的证人 *s* 来代替。
没那么快。这个方案有一个问题——它显然不安全!事实上,因为隐藏证人 *x* 的等式只有一个未知数( *x* 本身),所以维克多可以简单地反转等式来检索证人:
*x*=*s*–*k*
为了解决这个问题,佩吉可以隐藏随机值 *k* 本身!这一次,她必须将随机值隐藏在指数中(而不是将其与另一个随机值相加),以确保维克托的方程仍然有效:
*R*=*g*T5】k
通过这种方式,Victor 不知道值 *k* (这是第 5 章中涉及的离散对数问题),因此,无法恢复证人 *x* 。然而,他仍然有足够的信息来验证佩吉知道 *x* !Victor 只需检查*g*<sup class="fm-superscript2">s</sup>(=*g*<sup class="fm-superscript1">*k*+*x*</sup>=*g*<sup class="fm-superscript2">k</sup>×*g*<sup class="fm-superscript2">x</sup>是否等于*Y*×*R*(=*g*我在图 7.5 中回顾了 ZKP 协议的第二次尝试。

图 7.5 做一个知识证明*零知识*,证明者可以用一个随机值 *k* 隐藏见证人 *x* ,然后隐藏随机值本身。
我们的计划还有最后一个问题——佩吉可以作弊。她可以让维克多相信她认识 *x* 而不认识 *x* !她所要做的就是颠倒她计算证明的步骤。她首先生成一个随机值 *s* ,然后根据 *s* 计算值 *R* :
*R*=*g*T5】s×*Y*<sup class="fm-superscript">–1</sup>
Victor 然后计算出*Y*×*R*=*Y*×*g*T9】s×*Y*T13】–1,确实匹配 *g* <sup class="fm-superscript2">s</sup> 。(Peggy 使用逆运算来计算值的技巧在密码学的许多攻击中都有使用。)
注 从理论上来说,我们说,如果佩姬无法欺骗(如果她不知道 *x* ,那么她就无法欺骗维克多),那么这个方案就是“健全的”。
为了让 ZKP 协议听起来合理,Victor 必须确保 Peggy 从 *R* 计算 *s* ,而不是逆运算。为此,Victor 使协议*交互*:
1. 佩吉必须提交她的随机值 *k* ,这样她以后就不能改变它了。
2. 在得到 Peggy 的承诺后,Victor 在协议中介绍了自己的一些随意性。他生成一个随机值 *c* (称为*挑战*)并发送给佩吉。
3. 然后,佩姬可以根据随机值 *k* 和挑战 *c* 计算她的隐藏提交。
注意 你已经在第二章中学习了提交方案,我们使用了一个哈希函数来提交一个值,这个值我们以后可以公开。但是基于哈希函数的承诺方案不允许我们对隐藏值进行有趣的运算。相反,我们可以简单地将生成器提高到值, *g* <sup class="fm-superscript3">k</sup> ,我们已经在做了。
因为没有维克多的挑战 *c* 佩吉无法执行最后一步,并且维克多不会在没有看到对随机值 *k* 的承诺的情况下将挑战发送给她,佩吉被迫基于 *k* 计算 *s* 。我在图 7.6 中说明的获得的协议通常被称为作为 *Schnorr 识别协议*。

图 7.6 施诺尔辨认协议是一个交互式的 ZKP,它是*完全的*(佩吉可以证明她认识某个证人)*声音的*(佩吉如果不认识证人就什么也证明不了)*零知识*(维克多对证人一无所知)。
所谓的*交互式 ZKP 系统*遵循三步模式(承诺、挑战和证明),在文献中通常被称为*适马协议*,有时也被写成σ协议(由于希腊字母的说明性形状)。但是这和数字签名有什么关系呢?
注 schn orr 鉴别协议在*诚实验证者零知识* (HVZK) *模型*中起作用:如果验证者(Victor)行为不诚实,没有随机选择挑战,他们可以了解到证人的一些情况。一些更强的 ZKP 方案是零知识的,即使验证者是恶意的。
### 7.2.2 作为非交互式零知识证明的签名
之前的交互式 ZKP 的问题是,嗯,它是*交互式*,而现实世界的协议通常不喜欢交互性。除非两个参与者同时在线,否则交互式协议增加了一些不可忽略的开销,因为它们需要几个消息(可能通过网络)并且增加了无限的延迟。由于这个原因,交互式 zkp 在应用密码术的世界中大多是缺席的。
然而,所有这些讨论并不是毫无意义的!1986 年,阿莫斯·菲亚特和阿迪·萨莫尔发表了一项技术,允许人们轻松地将交互式 ZKP 转换成非交互式 ZKP。他们引入的技巧(被称为*菲亚特-沙米尔启发式*或*菲亚特-沙米尔转换*)是让证明者自己计算挑战,以一种他们无法控制的方式。
这里有一个窍门——将挑战计算为到那时为止作为协议的一部分发送和接收的所有消息的哈希(我们称之为*抄本*)。如果我们假设哈希函数给出的输出与真正的随机数没有区别(换句话说,它看起来是随机的),那么它可以成功地模拟验证者的角色。
Schnorr 更进了一步。他注意到哈希中可以包含任何内容!例如,如果我们在里面包含一条消息呢?我们得到的不仅仅是我们认识某个证人 *x* 的证据,而是对一条与该证据加密关联的信息的承诺。换句话说,如果证据是正确的,那么只有知道证人(成为签名密钥)的人才能提交该消息。
*那是签名!*数字签名只是非交互式的 zkp。将 Fiat-Shamir 变换应用于 Schnorr 识别协议,我们得到了 *Schnorr 签名方案*,我在图 7.7 中说明了。

图 7.7 左边的协议是之前讨论过的 Schnorr 识别协议,是一个交互协议。右边的协议是 Schnorr 签名,它是左边协议的非交互式版本(其中验证者消息被对抄本上的哈希函数的调用所代替)。
概括地说,Schnorr 签名本质上是两个值, *R* 和 *s* ,其中 *R* 是对某个机密随机值的承诺(通常称为 *nonce* ,因为它需要每个签名都是唯一的),而 *s* 是在承诺 *R* 、私钥(见证方 *x* 和消息的帮助下计算的值。接下来,让我们看看签名算法的现代标准。
## 7.3 您应该使用(或不使用)的签名算法
和密码学中的其他领域一样,数字签名有很多标准,有时候很难理解用哪一个。这就是我在这里的原因!幸运的是,签名的算法类型与密钥交换的相似:有基于算术模大数的算法,如 Diffie-Hellman (DH)和 RSA,也有基于椭圆曲线的算法,如椭圆曲线 Diffie-Hellman (ECDH)。
确保你充分理解了第五章和第六章中的算法,因为我们现在要在这些算法的基础上进行构建。有趣的是,介绍 DH 密钥交换的论文还提出了数字签名的概念(没有给出解决方案):
*为了开发一个能够用某种纯电子形式的通信来替代当前书面合同的系统,我们必须发现一种与书面签名具有相同属性的数字现象。任何人都必须很容易识别签名的真实性,但是除了合法的签名者之外,任何人都不可能出示签名。我们将把任何这样的技术称为单向认证。由于任何数字信号都可以被精确复制,一个真正的数字签名必须在不为人知的情况下被识别*。
Diffie 和 Hellman(“密码学新方向”,1976 年)
一年后(1977 年),第一个签名算法(称为 RSA)与 RSA 非对称加密算法(在第 6 章中有所介绍)一起推出。RSA 签名是我们要学习的第一个算法。
1991 年,NIST 提出了*数字签名算法(DSA)* 作为避免 Schnorr 签名专利的尝试。由于这个原因,DSA 是 Schnorr 签名的一个怪异变体,在没有安全证明的情况下发布(尽管到目前为止还没有发现攻击)。该算法被许多人采用,但很快被一个名为 *ECDSA* (椭圆曲线数字签名算法)的椭圆曲线版本取代,正如椭圆曲线 Diffie-Hellman (ECDH)取代 Diffie-Hellman (DH)一样,这得益于它更小的密钥(见第 5 章)。ECDSA 是我将在本节中讨论的第二个签名算法。
在 Schnorr 签名的专利于 2008 年到期后,ChaCha20-Poly1305(在第 4 章中介绍)和 X25519(在第 5 章中介绍)的发明者 Daniel J. Bernstein 引入了一个新的签名方案,称为*edd sa*(Edwards-curve 数字签名算法),基于 Schnorr 签名。自从 EdDSA 发明以来,它很快获得了广泛的应用,现在被认为是现实世界应用中数字签名的最先进技术。EdDSA 是我将在本节中讨论的第三个也是最后一个签名算法。
### 7.3.1 RSA PKCS#1 v1.5:一个糟糕的标准
RSA 签名目前到处都在使用,尽管它们不应该被使用(正如您将在本节中看到的,它们存在许多问题)。这是因为该算法是第一个被标准化的签名方案,而且现实世界的应用程序向更新更好的算法转移的速度很慢。因此,在您的旅程中,您很可能会遇到 RSA 签名,我不能不解释它们是如何工作的,以及哪些标准是被采用的。但是我想说的是,如果您理解了第 6 章中 RSA 加密的工作原理,那么这一节应该很简单,因为使用 RSA 签名与使用 RSA 加密正好相反:
* 要签名,你*用私钥(而不是公钥)加密*消息,产生一个签名(组中的随机元素)。
* 为了验证签名,你*用公钥(而不是私钥)解密*签名。如果它返回原始消息,那么签名是有效的。
注 实际上,消息在被签名之前通常被哈希,因为它将占用更少的空间(RSA 只能对小于其模数的消息进行签名)。结果也被解释为一个大数字,以便可以在数学运算中使用。
如果你的私钥是私钥指数 *d* ,公钥是公钥指数 *e* 和公钥模数 *N* ,你可以
* 通过计算签署消息*签名* = *消息*T5】dT7】modT9】N
* 通过计算*签名*<sup class="fm-superscript2">e</sup>mod*N*来验证签名,并检查它是否等于消息
我在图 7.8 中直观地说明了这一点。

图 7.8 要使用 RSA 签名,我们只需执行 RSA 加密算法的逆运算:我们用私有指数对消息求幂,然后为了验证,我们用公共指数对签名求幂,公共指数返回给消息。
这是可行的,因为只有知道私有指数 *d* 的人才能在消息上生成签名。与 RSA 加密一样,安全性与因式分解问题的难度紧密相关。
使用 RSA 进行签名的标准是什么?幸运的是,它们遵循与 RSA 加密相同的模式:
* 在 PKCS#1 v1.5 文档中,用于加密的 RSA 被松散地标准化。同一文档包含了与 RSA 签名的规范(没有安全证明)。
* RSA 随后在 PKCS#1 v2 文档中以更好的结构被再次标准化(称为 RSA-OAEP)。RSA 签名也出现了同样的情况,RSA-PSS 在同一文档中被标准化(带有安全证明)。
我在关于非对称加密的第 6 章中谈到了 RSA PKCS#1 v1.5。该文档中标准化的签名方案与加密方案非常相似。要签名,首先使用您选择的哈希函数对消息进行哈希处理,然后根据 PKCS#1 v1.5 的签名填充(类似于同一标准中的加密填充)进行填充。接下来,用您的私有指数加密填充和哈希的消息。我在图 7.9 中对此进行了说明。

图 7.9 RSA PKCS#1 v1.5 签名。为了签名,哈希然后用 PKCS#1 v1.5 填充方案填充消息。最后一步是用私钥 *d* 模 *N* 对填充的哈希消息求幂。要进行验证,只需用公共指数 *e* 模 *N* 对签名求幂,并验证它是否与填充和哈希后的消息匹配。
众多 RSA
顺便说一句,不要被围绕 RSA 的不同术语所迷惑。有 RSA(*非对称加密原语*)和 RSA(*签名原语*)。除此之外,还有 RSA(该公司),由 RSA 的发明者创立。当提到使用 RSA 加密时,大多数人会提到 RSA PKCS#1 v1.5 和 RSA-OAEP 方案。当提到使用 RSA 的签名时,大多数人会提到 RSA PKCS#1 v1.5 和 RSA-PSS 方案。
我知道这可能会令人困惑,尤其是对于 PKCS#1 v1.5 标准。虽然在 PKCS#1 v1.5 中有正式的名称来区分加密和签名算法(RSAES-PKCS1-v1_5 用于加密,RSASSA-PKCS1-v1_5 用于签名),但我很少看到使用这些名称。
在第六章中,我提到了针对 RSA PKCS#1 v1.5 版加密的破坏性攻击;不幸的是,RSA PKCS # 1 1.5 版签名也是如此。1998 年,在 Bleichenbacher 发现 RSA PKCS#1 v1.5 对加密的毁灭性攻击后,他决定看看签名标准。Bleichenbacher 在 2006 年带着对 RSA PKCS#1 v1.5 的*签名伪造*攻击回来了,这是对签名攻击的最具灾难性的类型之一——攻击者可以在不知道私钥的情况下伪造签名!与第一次攻击直接破解加密算法不同,第二次攻击是实现攻击。这意味着如果签名方案被正确实现(根据规范),攻击就不会成功。
一个实现缺陷听起来并不像一个算法缺陷那么糟糕,也就是说,如果它很容易避免并且不会影响很多实现的话。不幸的是,2019 年显示,令人尴尬的 RSA PKCS#1 v1.5 签名开源实现实际上落入了那个陷阱,并错误地实现了标准(参见 Chau 等人的“用符号执行分析语义正确性:关于 PKCS#1 v1.5 签名验证的案例研究”)。各种实现缺陷最终导致了 Bleichenbacher 伪造攻击的不同变体。
不幸的是,用于签名的 RSA PKCS#1 v1.5 仍然被广泛使用。如果出于向后兼容的原因,你真的*不得不*使用这个算法,请注意这些问题。话虽如此,但这并不意味着用于签名的 RSA 是不安全的。故事并没有到此结束。
### 7.3.2 RSA-PSS:更好的标准
RSA-PSS 在更新的 PKCS#1 v2.1 中被标准化,并包含了一个安全证明(不同于以前的 PKCS#1 v1.5 中标准化的签名方案)。新的规范是这样工作的:
* 使用 PSS 编码算法对消息进行编码
* 使用 RSA 对编码的消息进行签名(如 PKCS#1 v1.5 标准中所做的)
PSS 编码稍微复杂一些,并且类似于 OAEP(最佳非对称加密填充)。我在图 7.10 中对此进行了说明。

图 7.10 RSA-PSS 签名方案使用掩码生成函数(MGF)对消息进行编码,就像你在第 6 章中学习的 RSA-OAEP 算法一样,然后以通常的 RSA 方式进行签名。
验证 RSA-PSS 生成的签名只是在签名被提升到公共指数模公共模数时反转编码的问题。
PSS 的可证明安全性
PSS(用于*概率签名方案*)是可证明安全的,这意味着没有人能够在不知道私钥的情况下伪造签名。RSA-PSS 不是证明如果 RSA 是安全的,那么 RSA-PSS 是安全的,而是证明了逆命题:如果有人能破解 RSA-PSS,那么有人也能破解 RSA。这是密码学中常用的证明方法。当然,这只有在 RSA 是安全的情况下才有效,这是我们在证明中假设的。
如果你还记得的话,我在第六章中还谈到了 RSA 加密的第三种算法(称为 RSA-KEM),这是一种更简单的算法,没有人使用过,但也被证明是安全的。有趣的是,RSA for signatures 也反映了 RSA 加密历史的这一部分,并且有一个几乎没人使用的简单得多的算法;它叫做*全域名哈希* (FDH)。FDH 的工作方式是简单地哈希一条消息,然后使用 RSA 对其签名(将摘要解释为一个数字)。
尽管 RSA-PSS 和 FDH 都有安全证明并且更容易正确实现,但今天大多数协议仍然使用 RSA PKCS#1 v1.5 进行签名。这只是另一个通常发生在不认可的加密算法周围的缓慢的例子。因为旧的实现仍然必须与新的实现一起工作,所以很难移除或替换算法。想想不更新应用程序的用户、不提供新版本软件的供应商、无法更新的硬件设备等等。接下来,我们来看一个更现代的算法。
### 7.3.3 椭圆曲线数字签名算法(ECDSA)
在这一节中,我们来看看 ECDSA,它是 DSA 的一种椭圆曲线变体,其发明目的只是为了规避 Schnorr 签名中的专利。许多标准中都规定了签名方案,包括 NIST 的 FIPS 186-2、ISO 14888-3、ANSI X9.62、IEEE P1363 等等。并非所有的标准都兼容,希望互操作的应用程序必须确保它们使用相同的标准。
不幸的是,和 DSA 一样,ECDSA 没有安全证明,而 Schnorr 签名有。尽管如此,ECDSA 已经被广泛采用,并且是最常用的签名方案之一。在这一节中,我将解释 ECDSA 如何工作以及如何使用它。与所有此类方案一样,公钥几乎总是根据相同的公式生成:
* 私钥是随机生成的一个大数 *x* 。
* 公钥是将 *x* 作为一个生成器(椭圆曲线密码学中称为*基点*)创建的一个组中的一个索引来获得的。
更具体地说,在 ECDSA 中,使用[*x**G*计算公钥,这是标量 *x* 与基点 *G* 的标量乘法。
加法还是乘法记数法?
注意,我使用了*加法符号*(在标量周围放置括号的椭圆曲线语法),但是如果我想使用*乘法符号*,我可以编写*public _ key*=*G*<sup class="fm-superscript3">x</sup>。这些差异在实践中无关紧要。大多数情况下,不关心群的基本性质的密码协议是使用乘法符号编写的,而专门在基于椭圆曲线的群中定义的协议倾向于使用加法符号编写。
为了计算 ECDSA 签名,您需要与 Schnorr 签名相同的输入:您正在签名的消息的哈希( *H* ( *m* ))、您的私钥 *x* ,以及每个签名唯一的随机数 *k* 。ECDSA 签名是两个整数, *r* 和 *s* ,计算如下:
* *r* 是*k**G*的 x 坐标
* *s* 等于*k*t1(*h*(*m*)+*xr【T12 })】*
为了验证 ECDSA 签名,验证者需要使用相同的哈希消息 *H* ( *m* )、签名者的公钥以及签名值 *r* 和 *s* 。然后验证者
1. 计算[*H*(*m*)*s*<sup class="fm-superscript">–1</sup>*G*+[*RS*<sup class="fm-superscript">–1</sup>]*public _ key*
2. 验证获得的点的 x 坐标与签名的值 *r* 相同
你肯定能听出和 Schnorr 签名有一些相似之处。随机数 *k* 有时被称为*随机数*,因为它是一个只能使用一次的数字,有时也被称为*临时密钥*,因为它必须保密。
警告我再重申一遍: *k* 决不能重复,也不可预测!如果没有这些,恢复私钥是很容易的。
一般来说,加密库在幕后执行这个 nonce 的生成(即 *k* 值),但有时他们不这样做,而是让调用者提供它。当然,这是一个灾难的处方。例如,在 2010 年,索尼的 Playstation 3 被发现使用带有重复 nonces 的 ECDSA(这泄露了他们的私钥)。
警告更微妙的是,如果 nonce *k* 不是均匀随机选取的(具体来说,如果你能预测前几位),仍然存在强大的攻击,可以在短时间内恢复私钥(所谓的*格攻击*)。理论上,我们称这种密钥检索攻击为*完全破解*(因为它们破解一切!).这种完全中断在实践中非常罕见,这使得 ECDSA 成为一种可能以惊人的方式失败的算法。
存在避免随机数问题的尝试。例如,RFC 6979 规定了一种*确定性 ECDSA* 方案,即基于消息和私钥生成随机数。这意味着对相同的消息签名两次涉及相同的 nonce 两次,因此产生相同的签名两次(这显然不是问题)。
【ECDSA 中使用的椭圆曲线与椭圆曲线 Diffie-Hellman (ECDH)算法(见第 5 章)中流行的曲线非常相似,只有一个明显的例外: *Secp256k1* 。Secp256k1 曲线在 SEC 2:“推荐的椭圆曲线域参数”(【https://secg.org/sec2-v2.pdf】)中定义,由高效密码组标准(SECG)编写。由于对我在第五章提到的 NIST 曲线缺乏信任,比特币决定使用它而不是更流行的 NIST 曲线后,它获得了很大的吸引力。
Secp256k1 是一种椭圆曲线,被称为 *Koblitz 曲线*。Koblitz 曲线只是一条椭圆曲线,其参数中有一些约束,允许实现优化曲线上的一些操作。椭圆曲线具有以下等式:
*和*<sup class="fm-superscript">【2】</sup>=*<sup class="fm-superscript">【3】</sup>+*【ax】*+*【b】**
*其中 *a* = 0、 *b* = 7 为常数, *x* 和 *y* 定义在以素数 *p* 为模的数上:
*p*= 2<sup class="fm-superscript">192</sup>–2<sup class="fm-superscript">32</sup>–2<sup class="fm-superscript">12</sup>–2<sup class="fm-superscript">8</sup>–2<sup class="fm-superscript">7</sup>–2<sup class="fm-superscript">6</sup>–2<sup class="fm-superscript">3</sup>–1
这定义了一组素数阶,就像 NIST 曲线一样。今天,我们有有效的公式来计算椭圆曲线上的点数。以下是素数,即 Secp256k1 曲线中的点数(包括无穷远点):
【例】19554 . 868686866667
我们用坐标的定点 *G* 作为生成器(或基点)
*x*= 5506626302227734366957871889516853432625064175500187360389116729240
和
y= 326705100207588169780830851305070431844712732759337482424。
尽管如此,今天 ECDSA 主要用于 NIST 曲线 P-256(有时也称为*secp 256 r 1*;注意区别)。接下来让我们看看另一个广泛流行的签名方案。
### 7.3.4 爱德华兹曲线数字签名算法(EdDSA)
我来介绍一下本章最后一个签名算法, *Edwards-curve 数字签名算法* (EdDSA),由 Daniel J. Bernstein 于 2011 年发表,旨在回应政府机构创造的 NIST 和其他曲线缺乏信任的问题。EdDSA 这个名字似乎表明它是像 ECDSA 一样基于 DSA 算法,但这是骗人的。EdDSA 实际上是基于 Schnorr 签名的,这是可能的,因为 Schnorr 签名的专利在 2008 年初就到期了。
EdDSA 的一个特点是,该方案不要求每次签名操作都有新的随机性。EdDSA 确定性地产生签名*。这使得该算法非常有吸引力,并且已经被许多协议和标准采用。*
*EdDSA 有望被纳入 NIST 即将发布的 FIPS 186-5 标准更新版本(截至 2021 年初仍为草案)。当前的官方标准是 RFC 8032,它定义了两条不同安全级别的曲线,用于 EdDSA。两个定义的曲线都是*扭曲的爱德华兹曲线*(一种能够实现有趣的实施优化的椭圆曲线):
* *Edwards25519 基于丹尼尔·j·伯恩斯坦(Daniel J. Bernstein)的 Curve25519(已在第五章中介绍)。*它的曲线运算比 Curve25519 实现得更快,这要归功于椭圆曲线类型实现的优化。由于它是在 Curve25519 之后发明的,基于 Curve25519 的密钥交换 X25519 并没有从这些速度改进中受益。与 Curve25519 一样,Edwards25519 提供 128 位安全性。
* *Edwards448 是基于迈克·汉堡的 Ed448-Goldilocks 曲线。*它提供 224 位安全。
在实践中,EdDSA 大多用 Edwards25519 曲线实例化,组合被称为 *Ed25519* (而带有 Edwards448 的 EdDSA 被简称为 Ed448)。使用 EdDSA 的密钥生成与其他现有方案略有不同。EdDSA 不是直接生成一个签名密钥,而是生成一个机密密钥,然后用它来导出实际的签名密钥和另一个密钥,我们称之为 nonce 密钥。那个临时密钥很重要!它用于确定性地生成所需的每个签名随机数。
注意 根据您使用的加密库,您可能会存储机密密钥或两个派生密钥:签名密钥和临时密钥。这并不重要,但是如果您不知道这一点,那么如果您遇到 Ed25519 密钥被存储为 32 字节或 64 字节(取决于所使用的实现),您可能会感到困惑。
为了签名,EdDSA 首先通过将 nonce 密钥与要签名的消息哈希来确定性地生成 nonce。此后,类似于 Schnorr 签名的过程如下:
1. 将随机数计算为*哈希* ( *随机数密钥* || *消息*)
2. 将承诺 *R* 计算为[*nonce*】*G*,其中 *G* 是组的基点
3. 将挑战计算为*哈希* ( *承诺* || *公钥* || *消息*)
4. 计算证明 *S* 为 *nonce* + *挑战* × *签名密钥*
署名为( *R* , *S* )。我在图 7.11 中说明了 EdDSA 的重要部分。

图 7.11 EdDSA 密钥生成产生一个机密密钥,然后用于导出另外两个密钥。第一个导出密钥是实际的签名密钥,因此可以用于导出公钥;另一个派生密钥是 nonce 密钥,用于在签名操作期间确定性地派生 nonce。EdDSA 签名类似于 Schnorr 签名,除了(1)随机数从随机数密钥和消息中确定性地生成,以及(2)签名者的公钥作为质询的一部分被包括在内。
注意随机数(或临时密钥)是如何确定性地而不是概率性地从随机数密钥和给定消息中导出的。这意味着对两个不同的消息进行签名应该涉及两个不同的随机数,巧妙地防止签名者重用随机数,从而泄露密钥(ECDSA 就可能发生这种情况)。对同一消息进行两次签名会产生两次相同的 nonce,然后产生两次相同的签名。这显然不是问题。可以通过计算以下两个等式来验证签名:
*S**G*
*R*+*哈希* ( *R* || *公钥* || *消息*)】*公钥*
如果两个值匹配,则签名有效。这正是 Schnorr 签名的工作方式,除了我们现在在一个椭圆曲线组中,我在这里使用加法符号。
EdDSA 最广泛使用的实例 Ed25519 是用 Edwards25519 曲线和 SHA-512 作为哈希函数定义的。Edwards25519 曲线由满足该方程的所有点定义:
–*x*<sup class="fm-superscript">【2】</sup>+*和*<sup class="fm-superscript">【2】</sup>= 1+***x***
其中值 *d* 是大数
【例】56860 . 46868668661
和变量 *x* 和 *y* 取模 *p* 大数 2<sup class="fm-superscript">255</sup>–19(用于 Curve25519 的同一个素数)。基点是坐标的 *G*
*x*= 151122213495354007725011514095853151145401269304185720604613283949847762202
和
*y*= 4631683569492647816942839400347516314130793625256157833603165251855960。
RFC 8032 实际上使用 Edwards25519 曲线定义了 EdDSA 的三种变体。所有三种变体都遵循相同的密钥生成算法,但使用不同的签名和验证算法:
* *【ed 25519(或 pured 25519)*——这是我之前解释过的算法。
* *ed 25519 CTX*—这种算法引入了一个强制性的定制字符串,在实践中即使使用,也很少实现。唯一的区别是,每次调用哈希函数时,都会添加一些用户选择的前缀。
* *Ed25519ph(或 hashed 25519)*—这允许应用程序在签署消息之前对其进行预哈希处理(因此名字中有 *ph* )。它也建立在 Ed25519ctx 之上,允许调用者包含一个可选的自定义字符串。
添加一个*定制字符串*在密码学中相当常见,正如你在第二章中看到的一些哈希函数或者将在第八章中看到的密钥派生函数。当协议中的参与者在不同的上下文中使用相同的密钥来签署消息时,这是一个有用的补充。例如,您可以想象一个应用程序,它允许您使用您的私钥签署交易,还可以签署发给您交谈的人的私人消息。如果您错误地签署了一个看起来像是事务的消息并发送给了您邪恶的朋友 Eve,如果没有办法区分您正在签署的两种类型的有效载荷,她可能会尝试将其作为有效的事务重新发布。
Ed25519ph 的推出仅仅是为了满足需要签署大消息的呼叫者。正如你在第 2 章中看到的,哈希函数通常提供一个“初始化-更新-终结”接口,允许你连续哈希一个数据流,而不必将整个输入保存在内存中。
现在,您已经完成了在现实应用中使用的签名方案之旅。接下来,让我们看看在使用这些签名算法时,如何可能会搬起石头砸自己的脚。但首先,回顾一下:
* RSA PKCS#1 v1.5 仍在广泛使用,但很难正确实施,许多实施已被发现被破坏。
* RSA-PSS 具有安全性证明,更易于实现,但是由于基于椭圆曲线的更新方案,其采用率很低。
* ECDSA 是 RSA PKCS#1 v1.5 的主要竞争对手,主要用于 NIST 曲线 P-256,但在 Secp256k1 似乎占主导地位的加密货币领域除外。
* Ed25519 基于 Schnorr 签名,已被广泛采用,与 ECDSA 相比更易于实施;对于每个签名操作,它不需要新的随机性。如果可以的话,这是你应该使用的算法。
## 7.4 签名方案的微妙行为
签名方案可能表现出许多微妙的特性。虽然它们在大多数协议中可能无关紧要,但是在处理更复杂和非常规的协议时,没有意识到这些“陷阱”可能最终会使您陷入困境。本章的结尾重点关注数字签名的已知问题。
### 7.4.1 签名替换攻击
*数字签名并不唯一地标识一把钥匙或一条信息*。
—安德鲁·艾耶尔(《让我们加密》中的重复签名密钥选择攻击,2015)
*替换攻击*,也被称为*重复签名密钥选择* (DSKS),在 RSA PKCS#1 v1.5 和 RSA-PSS 上都有可能。有两种 DSKS 变体:
* *密钥替换攻击*—不同的密钥对或公钥用于验证给定消息上的给定签名。
* *消息密钥替换攻击*——不同的密钥对或公钥被用来验证一条*新*消息上的给定签名。
再来一次:第一次攻击修复消息和签名;第二个只修复签名。我在图 7.12 中概括了这一点。

图 7.12 像 RSA 这样的签名算法容易受到密钥替换攻击,这对于密码学的大多数用户来说都是令人惊讶和意想不到的行为。一种*密钥替换*攻击允许一个人在一条消息上获取一个签名,并创建一个新的密钥对来验证原始签名。一种叫做*消息密钥替换*的变种允许攻击者创建一个新的密钥对和一个在原始签名下有效的新消息。
适应性选择消息攻击下的存在不可伪造性(EUF-CMA)
替换攻击是理论密码学和应用密码学之间的一个鸿沟。密码学中的签名通常用 *EUF-CMA 模型*来分析,该模型代表在适应性选择消息攻击下的存在不可伪造性。在这个模型中,您生成一个密钥对,然后我请求您签署一些任意的消息。当我观察您生成的签名时,如果我能在某个时间点生成一个我以前没有请求的消息的有效签名,我就赢了。不幸的是,这个 EUF-CMA 模型似乎没有包含每一种边缘情况,而且像替代这样的危险微妙之处也没有被考虑在内。
### 7.4.2 签名延展性
*2014 年 2 月,曾经最大的比特币交易所 MtGox 关闭并申请破产,声称攻击者利用延展性攻击来耗尽其账户*。
—Christian Decker 和 Roger Wattenhofer(“比特币交易延展性和 MtGox”,2014 年)
大多数签名方案都是可延展的*:如果你给我一个有效的签名,我可以修改这个签名,使它成为一个不同的,但仍然有效的签名。我不知道签名密钥是什么,但我设法创建了一个新的有效签名。*
*不可延展性并不一定意味着签名是独一无二的:如果我是签名者,我通常可以为同一条消息创建不同的签名,这通常没问题。一些构造,如可验证随机函数(你将在第八章后面看到)依赖于签名的唯一性,因此它们必须处理这个问题或使用具有唯一签名的签名方案(如博纳-林恩-沙查姆或 BLS 签名)。
强大的 EUF-CMA
称为 SUF-CMA(强 EUF-CMA)的新安全模型试图在签名方案的安全定义中包含不可延展性(或抗延展性)。一些最近的标准,如 RFC 8032,它规定了 Ed25519,包括针对可延展性攻击的缓解措施。因为这些缓解措施并不总是存在或常见,所以您不应该依赖于协议中不可延展的签名。
如何处理所有这些信息?请放心,签名方案肯定不会被破坏,如果您对签名的使用不是太出格,您可能不应该担心。但是,如果您正在设计加密协议,或者如果您正在实现一个比日常加密更复杂的协议,您可能希望将这些微妙的属性隐藏起来。
## 总结
* 数字签名类似于纸笔签名,但有加密技术支持,因此任何不控制签名(私人)密钥的人都无法伪造。
* 数字签名可用于认证来源(例如,密钥交换的一方)以及提供可传递的信任(如果我信任 Alice,她信任 Bob,我也可以信任 Bob)。
* 零知识证明(ZKPs)允许证明者证明特定信息的知识(称为见证),而不透露该信息。签名可以被视为非交互式 zkp,因为它们不要求验证者在签名操作期间在线。
* 你可以使用许多标准来签名:
* RSA PKCS#1 v1.5 目前被广泛使用,但不推荐使用,因为它很难正确实施。
* RSA-PSS 是一个更好的签名方案,因为它更容易实现并且具有安全性证明。不幸的是,它现在并不流行,因为椭圆曲线变体支持更短的密钥,因此对网络协议更有吸引力。
* 目前最流行的签名方案是基于椭圆曲线的:ECDSA 和 EdDSA。ECDSA 通常与 NIST 曲线 P-256 一起使用,EdDSA 通常与 Edwards25519 曲线一起使用(这种组合称为 Ed25519)。
* 如果以非传统方式使用签名,一些微妙的属性可能会很危险:
* 因为一些签名方案容易受到密钥替换攻击,所以总是避免关于谁签署了消息的模糊性。外部参与者可以创建新的密钥对来验证消息上已经存在的签名,或者创建新的密钥对和新的消息来验证给定的签名。
* 不要依赖签名的唯一性。首先,在大多数签名方案中,签名者可以为同一消息创建任意数量的签名。第二,大多数签名方案是可延展的 T2,这意味着外部参与者可以接受一个签名并为同一消息创建另一个有效的签名。***
|
PypiClean
|
/apache_airflow_providers_apache_sqoop-4.0.0rc1-py3-none-any.whl/airflow/providers/apache/sqoop/operators/sqoop.py
|
"""This module contains a sqoop 1 operator."""
from __future__ import annotations
import os
import signal
from typing import TYPE_CHECKING, Any, Sequence
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.apache.sqoop.hooks.sqoop import SqoopHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class SqoopOperator(BaseOperator):
"""
Execute a Sqoop job.
Documentation for Apache Sqoop can be found here: https://sqoop.apache.org/docs/1.4.2/SqoopUserGuide.html
:param conn_id: str
:param cmd_type: str specify command to execute "export" or "import"
:param schema: Schema name
:param table: Table to read
:param query: Import result of arbitrary SQL query. Instead of using the table,
columns and where arguments, you can specify a SQL statement with the query
argument. Must also specify a destination directory with target_dir.
:param target_dir: HDFS destination directory where the data
from the rdbms will be written
:param append: Append data to an existing dataset in HDFS
:param file_type: "avro", "sequence", "text" Imports data to
into the specified format. Defaults to text.
:param columns: <col,col,col> Columns to import from table
:param num_mappers: Use n mapper tasks to import/export in parallel
:param split_by: Column of the table used to split work units
:param where: WHERE clause to use during import
:param export_dir: HDFS Hive database directory to export to the rdbms
:param input_null_string: The string to be interpreted as null
for string columns
:param input_null_non_string: The string to be interpreted as null
for non-string columns
:param staging_table: The table in which data will be staged before
being inserted into the destination table
:param clear_staging_table: Indicate that any data present in the
staging table can be deleted
:param enclosed_by: Sets a required field enclosing character
:param escaped_by: Sets the escape character
:param input_fields_terminated_by: Sets the input field separator
:param input_lines_terminated_by: Sets the input end-of-line character
:param input_optionally_enclosed_by: Sets a field enclosing character
:param batch: Use batch mode for underlying statement execution
:param direct: Use direct export fast path
:param driver: Manually specify JDBC driver class to use
:param verbose: Switch to more verbose logging for debug purposes
:param relaxed_isolation: use read uncommitted isolation level
:param hcatalog_database: Specifies the database name for the HCatalog table
:param hcatalog_table: The argument value for this option is the HCatalog table
:param create_hcatalog_table: Have sqoop create the hcatalog table passed
in or not
:param properties: additional JVM properties passed to sqoop
:param extra_options: Extra import/export options to pass as dict to the SqoopHook.
If a key doesn't have a value, just pass an empty string to it.
Don't include prefix of -- for sqoop options.
:param libjars: Optional Comma separated jar files to include in the classpath.
"""
template_fields: Sequence[str] = (
"conn_id",
"cmd_type",
"table",
"query",
"target_dir",
"file_type",
"columns",
"split_by",
"where",
"export_dir",
"input_null_string",
"input_null_non_string",
"staging_table",
"enclosed_by",
"escaped_by",
"input_fields_terminated_by",
"input_lines_terminated_by",
"input_optionally_enclosed_by",
"properties",
"extra_options",
"driver",
"hcatalog_database",
"hcatalog_table",
"schema",
)
template_fields_renderers = {"query": "sql"}
ui_color = "#7D8CA4"
def __init__(
self,
*,
conn_id: str = "sqoop_default",
cmd_type: str = "import",
table: str | None = None,
query: str | None = None,
target_dir: str | None = None,
append: bool = False,
file_type: str = "text",
columns: str | None = None,
num_mappers: int | None = None,
split_by: str | None = None,
where: str | None = None,
export_dir: str | None = None,
input_null_string: str | None = None,
input_null_non_string: str | None = None,
staging_table: str | None = None,
clear_staging_table: bool = False,
enclosed_by: str | None = None,
escaped_by: str | None = None,
input_fields_terminated_by: str | None = None,
input_lines_terminated_by: str | None = None,
input_optionally_enclosed_by: str | None = None,
batch: bool = False,
direct: bool = False,
driver: Any | None = None,
verbose: bool = False,
relaxed_isolation: bool = False,
properties: dict[str, Any] | None = None,
hcatalog_database: str | None = None,
hcatalog_table: str | None = None,
create_hcatalog_table: bool = False,
extra_options: dict[str, Any] | None = None,
schema: str | None = None,
libjars: str | None = None,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self.conn_id = conn_id
self.cmd_type = cmd_type
self.table = table
self.query = query
self.target_dir = target_dir
self.append = append
self.file_type = file_type
self.columns = columns
self.num_mappers = num_mappers
self.split_by = split_by
self.where = where
self.export_dir = export_dir
self.input_null_string = input_null_string
self.input_null_non_string = input_null_non_string
self.staging_table = staging_table
self.clear_staging_table = clear_staging_table
self.enclosed_by = enclosed_by
self.escaped_by = escaped_by
self.input_fields_terminated_by = input_fields_terminated_by
self.input_lines_terminated_by = input_lines_terminated_by
self.input_optionally_enclosed_by = input_optionally_enclosed_by
self.batch = batch
self.direct = direct
self.driver = driver
self.verbose = verbose
self.relaxed_isolation = relaxed_isolation
self.hcatalog_database = hcatalog_database
self.hcatalog_table = hcatalog_table
self.create_hcatalog_table = create_hcatalog_table
self.properties = properties
self.extra_options = extra_options or {}
self.hook: SqoopHook | None = None
self.schema = schema
self.libjars = libjars
def execute(self, context: Context) -> None:
"""Execute sqoop job."""
if self.hook is None:
self.hook = self._get_hook()
if self.cmd_type == "export":
self.hook.export_table(
table=self.table, # type: ignore
export_dir=self.export_dir,
input_null_string=self.input_null_string,
input_null_non_string=self.input_null_non_string,
staging_table=self.staging_table,
clear_staging_table=self.clear_staging_table,
enclosed_by=self.enclosed_by,
escaped_by=self.escaped_by,
input_fields_terminated_by=self.input_fields_terminated_by,
input_lines_terminated_by=self.input_lines_terminated_by,
input_optionally_enclosed_by=self.input_optionally_enclosed_by,
batch=self.batch,
relaxed_isolation=self.relaxed_isolation,
schema=self.schema,
)
elif self.cmd_type == "import":
if self.table and self.query:
raise AirflowException("Cannot specify query and table together. Need to specify either or.")
if self.table:
self.hook.import_table(
table=self.table,
target_dir=self.target_dir,
append=self.append,
file_type=self.file_type,
columns=self.columns,
split_by=self.split_by,
where=self.where,
direct=self.direct,
driver=self.driver,
schema=self.schema,
)
elif self.query:
self.hook.import_query(
query=self.query,
target_dir=self.target_dir,
append=self.append,
file_type=self.file_type,
split_by=self.split_by,
direct=self.direct,
driver=self.driver,
)
else:
raise AirflowException("Provide query or table parameter to import using Sqoop")
else:
raise AirflowException("cmd_type should be 'import' or 'export'")
def on_kill(self) -> None:
if self.hook is None:
self.hook = self._get_hook()
self.log.info("Sending SIGTERM signal to bash process group")
os.killpg(os.getpgid(self.hook.sub_process_pid), signal.SIGTERM)
def _get_hook(self) -> SqoopHook:
"""Returns a SqoopHook instance."""
# Add `create-hcatalog-table` to extra options if option passed to operator in case of `import`
# command. Similarly, if new parameters are added to the operator, you can pass them to
# `extra_options` so that you don't need to modify `SqoopHook` for each new parameter.
if self.cmd_type == "import" and self.create_hcatalog_table:
self.extra_options["create-hcatalog-table"] = ""
return SqoopHook(
conn_id=self.conn_id,
verbose=self.verbose,
num_mappers=self.num_mappers,
hcatalog_database=self.hcatalog_database,
hcatalog_table=self.hcatalog_table,
properties=self.properties,
libjars=self.libjars,
extra_options=self.extra_options,
)
|
PypiClean
|
/XCorVar-0.1.2.tar.gz/XCorVar-0.1.2/sample/toolbox/diffusion/scheme.py
|
import copy as cpy
import numpy as np
import math as mth
from sample.toolbox.diffusion.utilities import Step, State, SuperState, StoDifEq
class Scheme:
#public:
def __init__ (self, init, sto_dif_eq, step):
self.state = cpy.copy (init)
self.sto_dif_eq = sto_dif_eq
self.step = step
@property
def curr (self):
return self.state
def next_time (self):
self.state.time += self.step.time
def next_space (self, noise):
self.state.space = self.get_next_space (self.state.time, self.state.space, noise)
if isinstance (self.state, SuperState): self.state.bar_space = self.get_next_space (self.state.time, self.state.bar_space, -noise)
def next (self, noise):
self.next_space (noise)
self.next_time ()
def get_coeff (self, time, space):
raise NotImplementedError
def get_next_space (self, time, space, noise):
raise NotImplementedError
class EulerScheme (Scheme):
#public:
def __init__ (self, init, sto_dif_eq, step):
super ().__init__ (init, sto_dif_eq, step)
def get_coeff (self, time, space):
return self.sto_dif_eq.coeff (time, space)
def get_next_space (self, time, space, noise=None):
drift, vol = self.get_coeff (time, space)
return space + self.step.time * drift + self.step.brown * vol * noise
class MultiEulerScheme (Scheme):
#public:
def __init__ (self, init, multi_sto_dif_eq, step):
super ().__init__ (init, multi_sto_dif_eq, step)
if hasattr (multi_sto_dif_eq, 'corr'): self.sqr_corr = np.linalg.cholesky (multi_sto_dif_eq.corr)
def get_coeff (self, time, space):
return self.sto_dif_eq.coeff (time, space)
def get_next_space (self, time, space, noise=None):
drift, vol = self.get_coeff (time, space)
normal = self.sqr_corr @ noise if hasattr (self, 'sqr_corr') else noise
return space + self.step.time * drift + self.step.brown * vol @ normal
class MultiCorrDepEulerScheme (Scheme):
#public:
def __init__ (self, init, multi_corr_dep_sto_dif_eq, step):
super ().__init__ (init, multi_corr_dep_sto_dif_eq, step)
def get_coeff (self, time, space):
return self.sto_dif_eq.coeff (time, space)
def get_next_space (self, time, space, noise=None):
drift, vol, corr = self.get_coeff (time, space)
sqr_corr = np.linalg.cholesky (corr)
return space + self.step.time * drift + self.step.brown * vol @ sqr_corr @ noise
class BlackScholesScheme (Scheme):
#public:
def __init__ (self, spot, vol, mu, step):
step = Step (step)
init = State (0, spot)
sto_diff_eq = StoDifEq (lambda time, space: (mu, vol))
super ().__init__ (init, sto_diff_eq, step)
def get_coeff (self, time, space):
return self.sto_dif_eq.coeff (time, space)
def get_next_space (self, time, space, noise=None):
drift, vol = self.get_coeff (time, space)
return space * mth.exp (drift * self.step.time + vol * self.step.brown * noise)
class LocalVolatilityScheme (EulerScheme):
#public:
def __init__ (self, loc_vol, step):
init = State (0, 0)
step = Step (step)
def get_coeff (time, space):
sig = loc_vol (time, space)
return (-0.5 * sig * sig, sig)
sto_diff_eq = StoDifEq (get_coeff)
super ().__init__ (init, sto_diff_eq, step)
|
PypiClean
|
/peachapisec-ci-jira-1.5.41.tar.gz/peachapisec-ci-jira-1.5.41/README.rst
|
# Peach API Security CI JIRA Integration
This package provides the Peach API Security JIRA Integration
scripts. The integration scripts allow populating found issues
into JIRA when running from a CI system.
## Installation
This integration is available as a pip installable package.
`$ pip install peachapisec-ci-jira`
### Offline Installation
Install required python dependencies using our offline dependencies
folder.
`$ pip install --no-index --find-links ../../deps -r requirements.txt`
## Usage
Please see the Peach API Security User Guide for information on usage.
|
PypiClean
|
/oct-firecam-0.1.3.tar.gz/oct-firecam-0.1.3/firecam/lib/img_archive.py
|
from firecam.lib import goog_helper
import os
import logging
import urllib.request
import time, datetime, dateutil.parser
from html.parser import HTMLParser
import requests
import re
from PIL import Image, ImageMath
def getImgPath(outputDir, cameraID, timestamp, cropCoords=None, diffMinutes=0):
"""Generate properly formatted image filename path following Firecam conventions
E.g.: lo-s-mobo-c__2018-06-06T11;12;23_Diff1_Crop_627x632x1279x931.jpg
Args:
outputDir (str): Output directory
cameraID (str): ID of camera
timestamp (int): timestamp
cropCoords (tuple): (x0, y0, x1, y1) coordinates of the crop rectangle
diffMinutes (int): number of minutes separating the images (for subtracted images)
Returns:
String to full path name
"""
timeStr = datetime.datetime.fromtimestamp(timestamp).isoformat()
timeStr = timeStr.replace(':', ';') # make windows happy
imgName = '__'.join([cameraID, timeStr])
if diffMinutes:
imgName += ('_Diff%d' % diffMinutes)
if cropCoords:
imgName += '_Crop_' + 'x'.join(list(map(lambda x: str(x), cropCoords)))
imgPath = os.path.join(outputDir, imgName + '.jpg')
return imgPath
def repackFileName(parsedName):
"""Generate properly formatted image filename following Firecam conventions
based on information from parsedName dictionary
E.g.: lo-s-mobo-c__2018-06-06T11;12;23_Diff1_Crop_627x632x1279x931.jpg
Args:
parsedName (dict): Dictionary containing various attributes of image
(likely result from earlier call to parseFilename())
Returns:
String to file name
"""
cropCoords = None
if 'minX' in parsedName:
cropCoords=(parsedName['minX'], parsedName['minY'], parsedName['maxX'], parsedName['maxY'])
return getImgPath('', parsedName['cameraID'], parsedName['unixTime'],
cropCoords=cropCoords,
diffMinutes=parsedName['diffMinutes'])
def parseFilename(fileName):
"""Parse the image source attributes given the properly formatted image filename
Args:
fileName (str):
Returns:
Dictionary with parsed out attributes
"""
# regex to match names like Axis-BaldCA_2018-05-29T16_02_30_129496.jpg
# and bm-n-mobo-c__2017-06-25z11;53;33.jpg
regexExpanded = '([A-Za-z0-9-_]+[^_])_+(\d{4}-\d\d-\d\d)T(\d\d)[_;](\d\d)[_;](\d\d)'
# regex to match diff minutes spec for subtracted images
regexDiff = '(_Diff(\d+))?'
# regex to match optional crop information e.g., Axis-Cowles_2019-02-19T16;23;49_Crop_270x521x569x820.jpg
regexOptionalCrop = '(_Crop_(-?\d+)x(-?\d+)x(\d+)x(\d+))?'
matchesExp = re.findall(regexExpanded + regexDiff + regexOptionalCrop, fileName)
# regex to match names like 1499546263.jpg
regexUnixTime = '(1\d{9})'
matchesUnix = re.findall(regexUnixTime + regexDiff + regexOptionalCrop, fileName)
cropInfo = None
if len(matchesExp) == 1:
match = matchesExp[0]
parsed = {
'cameraID': match[0],
'date': match[1],
'hours': match[2],
'minutes': match[3],
'seconds': match[4]
}
isoStr = '{date}T{hour}:{min}:{sec}'.format(date=parsed['date'],hour=parsed['hours'],min=parsed['minutes'],sec=parsed['seconds'])
dt = dateutil.parser.parse(isoStr)
unixTime = time.mktime(dt.timetuple())
parsed['diffMinutes'] = int(match[6] or 0)
cropInfo = match[-4:]
elif len(matchesUnix) == 1:
match = matchesUnix[0]
unixTime = int(match[0])
dt = datetime.datetime.fromtimestamp(unixTime)
isoStr = datetime.datetime.fromtimestamp(unixTime).isoformat()
parsed = {
'cameraID': 'UNKNOWN_' + fileName,
'date': dt.date().isoformat(),
'hours': str(dt.hour),
'minutes': str(dt.minute),
'seconds': str(dt.second)
}
parsed['diffMinutes'] = int(match[2] or 0)
cropInfo = match[-4:]
else:
logging.error('Failed to parse name %s', fileName)
return None
if cropInfo[0]:
parsed['minX'] = int(cropInfo[0])
parsed['minY'] = int(cropInfo[1])
parsed['maxX'] = int(cropInfo[2])
parsed['maxY'] = int(cropInfo[3])
parsed['isoStr'] = isoStr
parsed['unixTime'] = unixTime
return parsed
class HpwrenHTMLParser(HTMLParser):
"""Dervied class from HTMLParser to pull out file information from HTML directory listing pages
Allows caller to specify fileType (extension) the caller cares about
"""
def __init__(self, fileType):
self.table = []
self.filetype = fileType
super().__init__()
def handle_starttag(self, tag, attrs):
"""Handler for HTML starting tag (<).
If the tag type is <a> and it contains an href link pointing to file of specified type,
then save the name for extraction by getTable()
"""
if (tag == 'a') and len(attrs) > 0:
# print('Found <a> %s', len(attrs), attrs)
for attr in attrs:
# print('Found attr %s', len(attr), attr)
if len(attr) == 2 and attr[0]=='href' and attr[1][-4:] == self.filetype:
self.table.append(attr[1])
def getTable(self):
return self.table
def parseDirHtml(dirHtml, fileType):
"""Wrapper around HpwrenHTMLParser to pull out entries of given fileType
Args:
dirHtml (str): HTML page for directory listing
fileType (str): File extension (e.g.: '.jpg')
Returns:
List of file names matching extension
"""
parser = HpwrenHTMLParser(fileType)
parser.feed(dirHtml)
return parser.getTable()
def fetchImgOrDir(url, verboseLogs):
"""Read the given URL and return the data. Also note if data is an image
Args:
url (str): URL to read
verboseLogs (bool): Write verbose logs for debugging
Returns:
Tuple indicating image or directory and the data
"""
try:
resp = urllib.request.urlopen(url)
except Exception as e:
if verboseLogs:
logging.error('Result of fetch from %s: %s', url, str(e))
return (None, None)
if resp.getheader('content-type') == 'image/jpeg':
return ('img', resp)
else:
return ('dir', resp)
def readUrlDir(urlPartsQ, verboseLogs, fileType):
"""Get the files of given fileType from the given HPWREN Q directory URL
Args:
urlPartsQ (list): HPWREN Q directory URL as list of string parts
verboseLogs (bool): Write verbose logs for debugging
fileType (str): File extension (e.g.: '.jpg')
Returns:
List of file names matching extension
"""
# logging.warning('Dir URLparts %s', urlPartsQ)
url = '/'.join(urlPartsQ)
# logging.warning('Dir URL %s', url)
(imgOrDir, resp) = fetchImgOrDir(url, verboseLogs)
if not imgOrDir:
return None
assert imgOrDir == 'dir'
dirHtml = resp.read().decode('utf-8')
return parseDirHtml(dirHtml, fileType)
def listTimesinQ(urlPartsQ, verboseLogs):
"""Get the timestamps of images from the given HPWREN Q directory URL
Args:
urlPartsQ (list): HPWREN Q directory URL as list of string parts
verboseLogs (bool): Write verbose logs for debugging
Returns:
List of timestamps
"""
files = readUrlDir(urlPartsQ, verboseLogs, '.jpg')
if files:
return list(map(lambda x: {'time': int(x[:-4])}, files))
return None
def downloadHttpFileAtTime(outputDir, urlPartsQ, cameraID, closestTime, verboseLogs):
"""Download HPWREN image from given HPWREN Q directory URL at given time
Args:
outputDir (str): Output directory path
urlPartsQ (list): HPWREN Q directory URL as list of string parts
cameraID (str): ID of camera
closestTime (int): Desired timestamp
verboseLogs (bool): Write verbose logs for debugging
Returns:
Local filesystem path to downloaded image
"""
imgPath = getImgPath(outputDir, cameraID, closestTime)
if verboseLogs:
logging.warning('Local file %s', imgPath)
if os.path.isfile(imgPath):
logging.warning('File %s already downloaded', imgPath)
return imgPath
closestFile = str(closestTime) + '.jpg'
urlParts = urlPartsQ[:] # copy URL parts array
urlParts.append(closestFile)
# logging.warning('File URLparts %s', urlParts)
url = '/'.join(urlParts)
logging.warning('File URL %s', url)
# urllib.request.urlretrieve(url, imgPath)
resp = requests.get(url, stream=True)
with open(imgPath, 'wb') as f:
for chunk in resp.iter_content(chunk_size=8192):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
resp.close()
return imgPath
def downloadGCSFileAtTime(outputDir, closestEntry):
"""Download HPWREN image from GCS folder from ffmpeg Google Cloud Function
Args:
outputDir (str): Output directory path
closestEntry (dict): Desired timestamp and GCS file
Returns:
Local filesystem path to downloaded image
"""
imgPath = os.path.join(outputDir, closestEntry['name'])
logging.warning('Local file %s', imgPath)
if os.path.isfile(imgPath):
logging.warning('File %s already downloaded', imgPath)
return imgPath
parsedPath = goog_helper.parseGCSPath(closestEntry['id'])
goog_helper.downloadBucketFile(parsedPath['bucket'], parsedPath['name'], imgPath)
return imgPath
def getMp4Url(urlPartsDate, qNum, verboseLogs):
"""Get the URL for the MP4 video for given Q
Args:
urlPartsDate (list): HPWREN date directory URL as list of string parts
qNum (int): Q number (1-8) where each Q represents 3 hour period
verboseLogs (bool): Write verbose logs for debugging
Returns:
URL to Q diretory
"""
urlPartsMp4 = urlPartsDate[:] # copy URL
urlPartsMp4.append('MP4')
files = readUrlDir(urlPartsMp4, verboseLogs, '.mp4')
if verboseLogs:
logging.warning('MP4s %s', files)
qMp4Name = 'Q' + str(qNum) + '.mp4'
if files and (qMp4Name in files):
urlPartsMp4.append(qMp4Name)
return '/'.join(urlPartsMp4)
return None
def gcfFfmpeg(gcfUrl, googleServices, hpwrenSource, qNum, folderID):
"""invoke the Google Cloud Function for ffpeg decompression with proper parameters and credentials
Args:
gcfUrl (str): URL for ffmpeg cloud function
googleServices (): Google services and credentials
hpwrenSource (dict): Dictionary containing various HPWREN source information
qNum (int): Q number (1-8) where each Q represents 3 hour period
folderID (str): google drive ID of folder where to extract images
Returns:
Cloud function result
"""
token = goog_helper.getIdToken(googleServices, gcfUrl)
headers = {'Authorization': 'bearer {}'.format(token)}
gcfParams = {
'hostName': hpwrenSource['server'],
'cameraID': hpwrenSource['cameraID'],
'archiveCamDir': hpwrenSource['urlParts'][1],
'yearDir': hpwrenSource['year'],
'dateDir': hpwrenSource['dateDirName'],
'qNum': qNum,
'uploadDir': folderID
}
rawResponse = requests.post(gcfUrl, headers=headers, data=gcfParams)
response = rawResponse.content.decode()
if response != 'done':
raise ValueError('Failed to upload to cloud (%s, %s). Please retry' % (response, rawResponse))
return response
def getGCSMp4(googleServices, settings, hpwrenSource, qNum):
"""Extract images from Q MP4 video into GCS folder
Args:
googleServices (): Google services and credentials
settings (): settings module
hpwrenSource (dict): Dictionary containing various HPWREN source information
qNum (int): Q number (1-8) where each Q represents 3 hour period
Returns:
list of files in GCS bucket with metadata
"""
ffmpegParsedGCS = goog_helper.parseGCSPath(settings.ffmpegFolder)
folderName = hpwrenSource['cameraID'] + '__' + hpwrenSource['dateDirName'] + 'Q' + str(qNum)
folderPath = ffmpegParsedGCS['name'] + '/' + folderName
files = goog_helper.listBucketEntries(ffmpegParsedGCS['bucket'], prefix=(folderPath + '/'))
logging.warning('Found %d GCS files', len(files))
if not files:
logging.warning('Calling Cloud Function for folder %s', folderName)
uploadDir = goog_helper.repackGCSPath(ffmpegParsedGCS['bucket'],folderPath)
gcfRes = gcfFfmpeg(settings.ffmpegUrl, googleServices, hpwrenSource, qNum, uploadDir)
logging.warning('Cloud function result %s', gcfRes)
files = goog_helper.listBucketEntries(ffmpegParsedGCS['bucket'], prefix=(folderPath + '/'))
# logging.warning('GDM4: files %d %s', len(files), files)
imgTimes = []
for filePath in files:
fileName = filePath.split('/')[-1]
nameParsed = parseFilename(fileName)
imgTimes.append({
'time': nameParsed['unixTime'],
'id': goog_helper.repackGCSPath(ffmpegParsedGCS['bucket'], filePath),
'name': fileName
})
return imgTimes
outputDirCheckOnly = '/CHECK:WITHOUT:DOWNLOAD'
def downloadFilesForDate(googleServices, settings, outputDir, hpwrenSource, gapMinutes, verboseLogs):
"""Download HPWREN images from given given date time range with specified gaps
If outputDir is special value outputDirCheckOnly, then just check if files are retrievable
Args:
googleServices (): Google services and credentials
settings (): settings module
outputDir (str): Output directory path
hpwrenSource (dict): Dictionary containing various HPWREN source information
gapMinutes (int): Number of minutes of gap between images for downloading
verboseLogs (bool): Write verbose logs for debugging
Returns:
List of local filesystem paths to downloaded images
"""
startTimeDT = hpwrenSource['startTimeDT']
endTimeDT = hpwrenSource['endTimeDT']
dateDirName = '{year}{month:02d}{date:02d}'.format(year=startTimeDT.year, month=startTimeDT.month, date=startTimeDT.day)
hpwrenSource['dateDirName'] = dateDirName
urlPartsDate = hpwrenSource['urlParts'][:] # copy URL
urlPartsDate.append(dateDirName)
hpwrenSource['urlPartsDate'] = urlPartsDate
timeGapDelta = datetime.timedelta(seconds = 60*gapMinutes)
imgTimes = None
lastQNum = 0 # 0 never matches because Q numbers start with 1
curTimeDT = startTimeDT
downloaded_files = []
while curTimeDT <= endTimeDT:
qNum = 1 + int(curTimeDT.hour/3)
urlPartsQ = urlPartsDate[:] # copy URL
urlPartsQ.append('Q' + str(qNum))
if qNum != lastQNum:
# List times of files in Q dir and cache
useHttp = True
imgTimes = listTimesinQ(urlPartsQ, verboseLogs)
if not imgTimes:
if verboseLogs:
logging.error('No images in Q dir %s', '/'.join(urlPartsQ))
mp4Url = getMp4Url(urlPartsDate, qNum, verboseLogs)
if not mp4Url:
return downloaded_files
if outputDir != outputDirCheckOnly:
imgTimes = getGCSMp4(googleServices, settings, hpwrenSource, qNum)
useHttp = False
# logging.warning('imgTimes %d %s', len(imgTimes), imgTimes)
lastQNum = qNum
if outputDir == outputDirCheckOnly:
downloaded_files.append(outputDirCheckOnly)
else:
desiredTime = time.mktime(curTimeDT.timetuple())
closestEntry = min(imgTimes, key=lambda x: abs(x['time']-desiredTime))
closestTime = closestEntry['time']
downloaded = None
if useHttp:
downloaded = downloadHttpFileAtTime(outputDir, urlPartsQ, hpwrenSource['cameraID'], closestTime, verboseLogs)
else:
downloaded = downloadGCSFileAtTime(outputDir, closestEntry)
if downloaded and verboseLogs:
logging.warning('Successful download for time %s', str(datetime.datetime.fromtimestamp(closestTime)))
if downloaded:
downloaded_files.append(downloaded)
curTimeDT += timeGapDelta
return downloaded_files
def downloadFilesHpwren(googleServices, settings, outputDir, hpwrenSource, gapMinutes, verboseLogs):
"""Download HPWREN images from given given date time range with specified gaps
Calls downloadFilesForDate to do the heavy lifting, but first determines the hpwren server.
First tries without year directory in URL path, and if that fails, then retries with year dir
Args:
googleServices (): Google services and credentials
settings (): settings module
outputDir (str): Output directory path
hpwrenSource (dict): Dictionary containing various HPWREN source information
gapMinutes (int): Number of minutes of gap between images for downloading
verboseLogs (bool): Write verbose logs for debugging
Returns:
List of local filesystem paths to downloaded images
"""
regexDir = '(c[12])/([^/]+)/large/?'
matches = re.findall(regexDir, hpwrenSource['dirName'])
if len(matches) != 1:
logging.error('Could not parse dir: %s', hpwrenSource['dirName'])
return None
match = matches[0]
(server, subdir) = match
hpwrenBase = 'http://{server}.hpwren.ucsd.edu/archive'.format(server=server)
hpwrenSource['server'] = server
urlParts = [hpwrenBase, subdir, 'large']
hpwrenSource['urlParts'] = urlParts
# first try without year directory
hpwrenSource['year'] = ''
downloaded_files = downloadFilesForDate(googleServices, settings, outputDir, hpwrenSource, gapMinutes, verboseLogs)
if downloaded_files:
return downloaded_files
# retry with year directory
hpwrenSource['year'] = str(hpwrenSource['startTimeDT'].year)
urlParts.append(hpwrenSource['year'])
hpwrenSource['urlParts'] = urlParts
return downloadFilesForDate(googleServices, settings, outputDir, hpwrenSource, gapMinutes, verboseLogs)
def getHpwrenCameraArchives(hpwrenArchivesPath):
"""Get the HPWREN camera archive directories from given file
Args:
hpwrenArchivesPath (str): path (local of GCS) to file with archive info
Returns:
List of archive directories
"""
archiveData = goog_helper.readFile(hpwrenArchivesPath)
camArchives = []
for line in archiveData.split('\n'):
camInfo = line.split(' ')
# logging.warning('info %d, %s', len(camInfo), camInfo)
if len(camInfo) != 2:
logging.warning('Ignoring archive entry without two columns %s', camInfo)
continue
dirInfo = camInfo[1].split('/')
if len(dirInfo) < 2:
logging.warning('Ignoring archive entry without proper ID %s', dirInfo)
continue
cameraID = dirInfo[1]
matchesID = list(filter(lambda x: cameraID == x['id'], camArchives))
if matchesID:
if camInfo[1] not in matchesID[0]['dirs']:
matchesID[0]['dirs'].append(camInfo[1])
# logging.warning('Merging duplicate ID dir %s, %s', camInfo[1], matchesID[0])
continue
preIndex = camInfo[0].find('pre')
if preIndex > 0:
searchName = camInfo[0][:(preIndex-1)]
matchesName = list(filter(lambda x: searchName in x['name'], camArchives))
for match in matchesName:
if camInfo[1] not in match['dirs']:
match['dirs'].append(camInfo[1])
# logging.warning('Mergig pre dir %s to %s', camInfo[1], match)
continue
camData = {'id': cameraID, 'name': camInfo[0], 'dirs': [camInfo[1]]}
# logging.warning('data %s', camData)
camArchives.append(camData)
logging.warning('Discovered total %d camera archive dirs', len(camArchives))
return camArchives
def findCameraInArchive(camArchives, cameraID):
"""Find the entries in the camera archive directories for the given camera
Args:
camArchives (list): Result of getHpwrenCameraArchives() above
cameraID (str): ID of camera to fetch images from
Returns:
List of archive dirs that matching camera
"""
matchingCams = list(filter(lambda x: cameraID == x['id'], camArchives))
# logging.warning('Found %d match(es): %s', len(matchingCams), matchingCams)
if matchingCams:
return matchingCams[0]['dirs']
else:
return []
def getHpwrenImages(googleServices, settings, outputDir, camArchives, cameraID, startTimeDT, endTimeDT, gapMinutes):
"""Download HPWREN images from given camera and date time range with specified gaps
Iterates over all directories for given camera in the archives and then downloads the images
by calling downloadFilesHpwren
Args:
googleServices (): Google services and credentials
settings (): settings module
outputDir (str): Output directory path
camArchives (list): Result of getHpwrenCameraArchives() above
cameraID (str): ID of camera to fetch images from
startTimeDT (datetime): starting time of time range
endTimeDT (datetime): ending time of time range
gapMinutes (int): Number of minutes of gap between images for downloading
Returns:
List of local filesystem paths to downloaded images
"""
matchingDirs = findCameraInArchive(camArchives, cameraID)
for matchingDir in matchingDirs:
hpwrenSource = {
'cameraID': cameraID,
'dirName': matchingDir,
'startTimeDT': startTimeDT,
'endTimeDT': endTimeDT
}
logging.warning('Searching for files in dir %s', hpwrenSource['dirName'])
found = downloadFilesHpwren(googleServices, settings, outputDir, hpwrenSource, gapMinutes, False)
if found:
return found
return None
def diffImages(imgA, imgB):
"""Subtract two images (r-r, g-g, b-b). Also add 128 to reduce negative values
If a pixel is exactly same in both images, then the result will be 128,128,128 gray
Out of range values (<0 and > 255) are moved to 0 and 255 by the convert('L') function
Args:
imgA: Pillow image object to subtract from
imgB: Pillow image object to subtract
Returns:
Pillow image object containing the results of the subtraction with 128 mean
"""
bandsImgA = imgA.split()
bandsImgB = imgB.split()
bandsImgOut = []
for bandNum in range(len(bandsImgA)):
# out = ImageMath.eval("convert((128+a/2)-b/2,'L')", a=bandsImgA[bandNum], b=bandsImgB[bandNum])
out = ImageMath.eval("convert(128+a-b,'L')", a=bandsImgA[bandNum], b=bandsImgB[bandNum])
bandsImgOut.append(out)
return Image.merge('RGB', bandsImgOut)
|
PypiClean
|
/cdktf_cdktf_provider_aws-17.0.2-py3-none-any.whl/cdktf_cdktf_provider_aws/data_aws_route53_traffic_policy_document/__init__.py
|
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf as _cdktf_9a9027ec
import constructs as _constructs_77d1e7e8
class DataAwsRoute53TrafficPolicyDocument(
_cdktf_9a9027ec.TerraformDataSource,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocument",
):
'''Represents a {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document aws_route53_traffic_policy_document}.'''
def __init__(
self,
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
endpoint: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentEndpoint", typing.Dict[builtins.str, typing.Any]]]]] = None,
id: typing.Optional[builtins.str] = None,
record_type: typing.Optional[builtins.str] = None,
rule: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRule", typing.Dict[builtins.str, typing.Any]]]]] = None,
start_endpoint: typing.Optional[builtins.str] = None,
start_rule: typing.Optional[builtins.str] = None,
version: typing.Optional[builtins.str] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document aws_route53_traffic_policy_document} Data Source.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param endpoint: endpoint block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint DataAwsRoute53TrafficPolicyDocument#endpoint}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#id DataAwsRoute53TrafficPolicyDocument#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param record_type: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#record_type DataAwsRoute53TrafficPolicyDocument#record_type}.
:param rule: rule block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule DataAwsRoute53TrafficPolicyDocument#rule}
:param start_endpoint: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#start_endpoint DataAwsRoute53TrafficPolicyDocument#start_endpoint}.
:param start_rule: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#start_rule DataAwsRoute53TrafficPolicyDocument#start_rule}.
:param version: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#version DataAwsRoute53TrafficPolicyDocument#version}.
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3f59d1716fb29cedfeedb7d4d0793a9153c7a8e17bd02235e7375fa23200d019)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = DataAwsRoute53TrafficPolicyDocumentConfig(
endpoint=endpoint,
id=id,
record_type=record_type,
rule=rule,
start_endpoint=start_endpoint,
start_rule=start_rule,
version=version,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="putEndpoint")
def put_endpoint(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentEndpoint", typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2b4bff4d41f1ea6641264d0f86ff1bc568bc556506cbbaf0d4e3842aa250a96e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putEndpoint", [value]))
@jsii.member(jsii_name="putRule")
def put_rule(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRule", typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b215284bd29cf4970bee50e662df0f59d06358193a835450077c36bba165c330)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putRule", [value]))
@jsii.member(jsii_name="resetEndpoint")
def reset_endpoint(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEndpoint", []))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetRecordType")
def reset_record_type(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRecordType", []))
@jsii.member(jsii_name="resetRule")
def reset_rule(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRule", []))
@jsii.member(jsii_name="resetStartEndpoint")
def reset_start_endpoint(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetStartEndpoint", []))
@jsii.member(jsii_name="resetStartRule")
def reset_start_rule(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetStartRule", []))
@jsii.member(jsii_name="resetVersion")
def reset_version(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetVersion", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="endpoint")
def endpoint(self) -> "DataAwsRoute53TrafficPolicyDocumentEndpointList":
return typing.cast("DataAwsRoute53TrafficPolicyDocumentEndpointList", jsii.get(self, "endpoint"))
@builtins.property
@jsii.member(jsii_name="json")
def json(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "json"))
@builtins.property
@jsii.member(jsii_name="rule")
def rule(self) -> "DataAwsRoute53TrafficPolicyDocumentRuleList":
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleList", jsii.get(self, "rule"))
@builtins.property
@jsii.member(jsii_name="endpointInput")
def endpoint_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentEndpoint"]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentEndpoint"]]], jsii.get(self, "endpointInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="recordTypeInput")
def record_type_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "recordTypeInput"))
@builtins.property
@jsii.member(jsii_name="ruleInput")
def rule_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRule"]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRule"]]], jsii.get(self, "ruleInput"))
@builtins.property
@jsii.member(jsii_name="startEndpointInput")
def start_endpoint_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startEndpointInput"))
@builtins.property
@jsii.member(jsii_name="startRuleInput")
def start_rule_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "startRuleInput"))
@builtins.property
@jsii.member(jsii_name="versionInput")
def version_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "versionInput"))
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c011e650563d02b814fa371a0004500cf15eefec589f04d8a7327a3831bab4d9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="recordType")
def record_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "recordType"))
@record_type.setter
def record_type(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3361581191dfd05ef3b9631340e56c7f3f626820450f0cc757b12d382aeae342)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "recordType", value)
@builtins.property
@jsii.member(jsii_name="startEndpoint")
def start_endpoint(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "startEndpoint"))
@start_endpoint.setter
def start_endpoint(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__05d1f364aaef53beaa3ee1129601dee7fb81976ddd57143d95960b5c728684a7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "startEndpoint", value)
@builtins.property
@jsii.member(jsii_name="startRule")
def start_rule(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "startRule"))
@start_rule.setter
def start_rule(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9e72498f571b940f018e236ff4ccf98aae36ad1b4457771b536814cbc498eca8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "startRule", value)
@builtins.property
@jsii.member(jsii_name="version")
def version(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "version"))
@version.setter
def version(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f1391014f59ff2fc50cc0ff9eba099c7bc226dd2dc9a7b712b916522a7974174)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "version", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentConfig",
jsii_struct_bases=[_cdktf_9a9027ec.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"endpoint": "endpoint",
"id": "id",
"record_type": "recordType",
"rule": "rule",
"start_endpoint": "startEndpoint",
"start_rule": "startRule",
"version": "version",
},
)
class DataAwsRoute53TrafficPolicyDocumentConfig(_cdktf_9a9027ec.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
endpoint: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentEndpoint", typing.Dict[builtins.str, typing.Any]]]]] = None,
id: typing.Optional[builtins.str] = None,
record_type: typing.Optional[builtins.str] = None,
rule: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRule", typing.Dict[builtins.str, typing.Any]]]]] = None,
start_endpoint: typing.Optional[builtins.str] = None,
start_rule: typing.Optional[builtins.str] = None,
version: typing.Optional[builtins.str] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param endpoint: endpoint block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint DataAwsRoute53TrafficPolicyDocument#endpoint}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#id DataAwsRoute53TrafficPolicyDocument#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param record_type: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#record_type DataAwsRoute53TrafficPolicyDocument#record_type}.
:param rule: rule block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule DataAwsRoute53TrafficPolicyDocument#rule}
:param start_endpoint: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#start_endpoint DataAwsRoute53TrafficPolicyDocument#start_endpoint}.
:param start_rule: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#start_rule DataAwsRoute53TrafficPolicyDocument#start_rule}.
:param version: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#version DataAwsRoute53TrafficPolicyDocument#version}.
'''
if isinstance(lifecycle, dict):
lifecycle = _cdktf_9a9027ec.TerraformResourceLifecycle(**lifecycle)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3753a58cccb193054e80c8b366cfff78394a27fcb5ef11c999568b3177c81712)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument endpoint", value=endpoint, expected_type=type_hints["endpoint"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument record_type", value=record_type, expected_type=type_hints["record_type"])
check_type(argname="argument rule", value=rule, expected_type=type_hints["rule"])
check_type(argname="argument start_endpoint", value=start_endpoint, expected_type=type_hints["start_endpoint"])
check_type(argname="argument start_rule", value=start_rule, expected_type=type_hints["start_rule"])
check_type(argname="argument version", value=version, expected_type=type_hints["version"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if endpoint is not None:
self._values["endpoint"] = endpoint
if id is not None:
self._values["id"] = id
if record_type is not None:
self._values["record_type"] = record_type
if rule is not None:
self._values["rule"] = rule
if start_endpoint is not None:
self._values["start_endpoint"] = start_endpoint
if start_rule is not None:
self._values["start_rule"] = start_rule
if version is not None:
self._values["version"] = version
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]], result)
@builtins.property
def count(
self,
) -> typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]], result)
@builtins.property
def depends_on(
self,
) -> typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[_cdktf_9a9027ec.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[_cdktf_9a9027ec.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[_cdktf_9a9027ec.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]], result)
@builtins.property
def endpoint(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentEndpoint"]]]:
'''endpoint block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint DataAwsRoute53TrafficPolicyDocument#endpoint}
'''
result = self._values.get("endpoint")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentEndpoint"]]], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#id DataAwsRoute53TrafficPolicyDocument#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def record_type(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#record_type DataAwsRoute53TrafficPolicyDocument#record_type}.'''
result = self._values.get("record_type")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def rule(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRule"]]]:
'''rule block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule DataAwsRoute53TrafficPolicyDocument#rule}
'''
result = self._values.get("rule")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRule"]]], result)
@builtins.property
def start_endpoint(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#start_endpoint DataAwsRoute53TrafficPolicyDocument#start_endpoint}.'''
result = self._values.get("start_endpoint")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def start_rule(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#start_rule DataAwsRoute53TrafficPolicyDocument#start_rule}.'''
result = self._values.get("start_rule")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def version(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#version DataAwsRoute53TrafficPolicyDocument#version}.'''
result = self._values.get("version")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentEndpoint",
jsii_struct_bases=[],
name_mapping={"id": "id", "region": "region", "type": "type", "value": "value"},
)
class DataAwsRoute53TrafficPolicyDocumentEndpoint:
def __init__(
self,
*,
id: builtins.str,
region: typing.Optional[builtins.str] = None,
type: typing.Optional[builtins.str] = None,
value: typing.Optional[builtins.str] = None,
) -> None:
'''
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#id DataAwsRoute53TrafficPolicyDocument#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param region: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}.
:param type: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#type DataAwsRoute53TrafficPolicyDocument#type}.
:param value: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#value DataAwsRoute53TrafficPolicyDocument#value}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__34ac11567536e8379ef1f06c89de78b807bcd68d55905b7fed6967293f63545d)
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument region", value=region, expected_type=type_hints["region"])
check_type(argname="argument type", value=type, expected_type=type_hints["type"])
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"id": id,
}
if region is not None:
self._values["region"] = region
if type is not None:
self._values["type"] = type
if value is not None:
self._values["value"] = value
@builtins.property
def id(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#id DataAwsRoute53TrafficPolicyDocument#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
assert result is not None, "Required property 'id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}.'''
result = self._values.get("region")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def type(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#type DataAwsRoute53TrafficPolicyDocument#type}.'''
result = self._values.get("type")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def value(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#value DataAwsRoute53TrafficPolicyDocument#value}.'''
result = self._values.get("value")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentEndpoint(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataAwsRoute53TrafficPolicyDocumentEndpointList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentEndpointList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__df678f09f9c9ba33d41d1bc8f04420eb5df59d930de07434d6e9c3ea736a9790)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataAwsRoute53TrafficPolicyDocumentEndpointOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fcdd69aa3d676d635647243724d4181b17750bce985e964ed1bc46a1f0c9d9ca)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataAwsRoute53TrafficPolicyDocumentEndpointOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8b084e8b0d0253431449915dc3e75cc27bd576e374cc11f6f73d344ec71b8fce)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a740f1d6b4e237b09e819f5598cd15e484561dc7a57b1eb54c51e386e239136d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3eebf863eb671ac25b89aaa213197e46f2c44ffcc8173ffca317feca74caad4a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentEndpoint]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentEndpoint]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentEndpoint]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__079bc0cb124512cb2bd10fb7cc7e21e72e4a7df695e6a6ef5a74a59d508764e9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataAwsRoute53TrafficPolicyDocumentEndpointOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentEndpointOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__04e98c71bb37d3d61575f67c7d9b9546315cfa37099ef31be08f32dd03df9209)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetRegion")
def reset_region(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRegion", []))
@jsii.member(jsii_name="resetType")
def reset_type(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetType", []))
@jsii.member(jsii_name="resetValue")
def reset_value(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetValue", []))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="regionInput")
def region_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "regionInput"))
@builtins.property
@jsii.member(jsii_name="typeInput")
def type_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "typeInput"))
@builtins.property
@jsii.member(jsii_name="valueInput")
def value_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "valueInput"))
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1018f43caafeb285e87c31d3d134185afffeba841fa5c29c6206b2b1f0e2b731)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "region"))
@region.setter
def region(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__98cffbb63ec0afd6c38c4e060663576af068bd45d5a5700e087d169b39116750)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "region", value)
@builtins.property
@jsii.member(jsii_name="type")
def type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "type"))
@type.setter
def type(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__61de518a8440f6ec77c5a3d1017163b93a3ea6f3b228c3537aea59540f6ed1b7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "type", value)
@builtins.property
@jsii.member(jsii_name="value")
def value(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "value"))
@value.setter
def value(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6607208319d1a85778dfd068d9148a3f6339d5737f2ed4006715e16e4e78f74f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "value", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentEndpoint]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentEndpoint]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentEndpoint]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6e36b0de8a48bb4f9d62e0852ef306eeb2e9d18ca04a046785fb9596f0bceaab)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRule",
jsii_struct_bases=[],
name_mapping={
"id": "id",
"geo_proximity_location": "geoProximityLocation",
"items": "items",
"location": "location",
"primary": "primary",
"region": "region",
"secondary": "secondary",
"type": "type",
},
)
class DataAwsRoute53TrafficPolicyDocumentRule:
def __init__(
self,
*,
id: builtins.str,
geo_proximity_location: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation", typing.Dict[builtins.str, typing.Any]]]]] = None,
items: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRuleItems", typing.Dict[builtins.str, typing.Any]]]]] = None,
location: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRuleLocation", typing.Dict[builtins.str, typing.Any]]]]] = None,
primary: typing.Optional[typing.Union["DataAwsRoute53TrafficPolicyDocumentRulePrimary", typing.Dict[builtins.str, typing.Any]]] = None,
region: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRuleRegion", typing.Dict[builtins.str, typing.Any]]]]] = None,
secondary: typing.Optional[typing.Union["DataAwsRoute53TrafficPolicyDocumentRuleSecondary", typing.Dict[builtins.str, typing.Any]]] = None,
type: typing.Optional[builtins.str] = None,
) -> None:
'''
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#id DataAwsRoute53TrafficPolicyDocument#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param geo_proximity_location: geo_proximity_location block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#geo_proximity_location DataAwsRoute53TrafficPolicyDocument#geo_proximity_location}
:param items: items block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#items DataAwsRoute53TrafficPolicyDocument#items}
:param location: location block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#location DataAwsRoute53TrafficPolicyDocument#location}
:param primary: primary block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#primary DataAwsRoute53TrafficPolicyDocument#primary}
:param region: region block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}
:param secondary: secondary block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#secondary DataAwsRoute53TrafficPolicyDocument#secondary}
:param type: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#type DataAwsRoute53TrafficPolicyDocument#type}.
'''
if isinstance(primary, dict):
primary = DataAwsRoute53TrafficPolicyDocumentRulePrimary(**primary)
if isinstance(secondary, dict):
secondary = DataAwsRoute53TrafficPolicyDocumentRuleSecondary(**secondary)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3928430e7935f0fdd0670e4052c4916004706991443006f5fbd986c06ef5ffe2)
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument geo_proximity_location", value=geo_proximity_location, expected_type=type_hints["geo_proximity_location"])
check_type(argname="argument items", value=items, expected_type=type_hints["items"])
check_type(argname="argument location", value=location, expected_type=type_hints["location"])
check_type(argname="argument primary", value=primary, expected_type=type_hints["primary"])
check_type(argname="argument region", value=region, expected_type=type_hints["region"])
check_type(argname="argument secondary", value=secondary, expected_type=type_hints["secondary"])
check_type(argname="argument type", value=type, expected_type=type_hints["type"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"id": id,
}
if geo_proximity_location is not None:
self._values["geo_proximity_location"] = geo_proximity_location
if items is not None:
self._values["items"] = items
if location is not None:
self._values["location"] = location
if primary is not None:
self._values["primary"] = primary
if region is not None:
self._values["region"] = region
if secondary is not None:
self._values["secondary"] = secondary
if type is not None:
self._values["type"] = type
@builtins.property
def id(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#id DataAwsRoute53TrafficPolicyDocument#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
assert result is not None, "Required property 'id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def geo_proximity_location(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation"]]]:
'''geo_proximity_location block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#geo_proximity_location DataAwsRoute53TrafficPolicyDocument#geo_proximity_location}
'''
result = self._values.get("geo_proximity_location")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation"]]], result)
@builtins.property
def items(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleItems"]]]:
'''items block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#items DataAwsRoute53TrafficPolicyDocument#items}
'''
result = self._values.get("items")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleItems"]]], result)
@builtins.property
def location(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleLocation"]]]:
'''location block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#location DataAwsRoute53TrafficPolicyDocument#location}
'''
result = self._values.get("location")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleLocation"]]], result)
@builtins.property
def primary(
self,
) -> typing.Optional["DataAwsRoute53TrafficPolicyDocumentRulePrimary"]:
'''primary block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#primary DataAwsRoute53TrafficPolicyDocument#primary}
'''
result = self._values.get("primary")
return typing.cast(typing.Optional["DataAwsRoute53TrafficPolicyDocumentRulePrimary"], result)
@builtins.property
def region(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleRegion"]]]:
'''region block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}
'''
result = self._values.get("region")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleRegion"]]], result)
@builtins.property
def secondary(
self,
) -> typing.Optional["DataAwsRoute53TrafficPolicyDocumentRuleSecondary"]:
'''secondary block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#secondary DataAwsRoute53TrafficPolicyDocument#secondary}
'''
result = self._values.get("secondary")
return typing.cast(typing.Optional["DataAwsRoute53TrafficPolicyDocumentRuleSecondary"], result)
@builtins.property
def type(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#type DataAwsRoute53TrafficPolicyDocument#type}.'''
result = self._values.get("type")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentRule(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation",
jsii_struct_bases=[],
name_mapping={
"bias": "bias",
"endpoint_reference": "endpointReference",
"evaluate_target_health": "evaluateTargetHealth",
"health_check": "healthCheck",
"latitude": "latitude",
"longitude": "longitude",
"region": "region",
"rule_reference": "ruleReference",
},
)
class DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation:
def __init__(
self,
*,
bias: typing.Optional[builtins.str] = None,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
latitude: typing.Optional[builtins.str] = None,
longitude: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
'''
:param bias: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#bias DataAwsRoute53TrafficPolicyDocument#bias}.
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param evaluate_target_health: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
:param latitude: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#latitude DataAwsRoute53TrafficPolicyDocument#latitude}.
:param longitude: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#longitude DataAwsRoute53TrafficPolicyDocument#longitude}.
:param region: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}.
:param rule_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8592c0e79e8aae010cdf54020cd566a4f166ef2a31fe195e7ec9ccc86e648318)
check_type(argname="argument bias", value=bias, expected_type=type_hints["bias"])
check_type(argname="argument endpoint_reference", value=endpoint_reference, expected_type=type_hints["endpoint_reference"])
check_type(argname="argument evaluate_target_health", value=evaluate_target_health, expected_type=type_hints["evaluate_target_health"])
check_type(argname="argument health_check", value=health_check, expected_type=type_hints["health_check"])
check_type(argname="argument latitude", value=latitude, expected_type=type_hints["latitude"])
check_type(argname="argument longitude", value=longitude, expected_type=type_hints["longitude"])
check_type(argname="argument region", value=region, expected_type=type_hints["region"])
check_type(argname="argument rule_reference", value=rule_reference, expected_type=type_hints["rule_reference"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if bias is not None:
self._values["bias"] = bias
if endpoint_reference is not None:
self._values["endpoint_reference"] = endpoint_reference
if evaluate_target_health is not None:
self._values["evaluate_target_health"] = evaluate_target_health
if health_check is not None:
self._values["health_check"] = health_check
if latitude is not None:
self._values["latitude"] = latitude
if longitude is not None:
self._values["longitude"] = longitude
if region is not None:
self._values["region"] = region
if rule_reference is not None:
self._values["rule_reference"] = rule_reference
@builtins.property
def bias(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#bias DataAwsRoute53TrafficPolicyDocument#bias}.'''
result = self._values.get("bias")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def endpoint_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.'''
result = self._values.get("endpoint_reference")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def evaluate_target_health(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.'''
result = self._values.get("evaluate_target_health")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def health_check(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.'''
result = self._values.get("health_check")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def latitude(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#latitude DataAwsRoute53TrafficPolicyDocument#latitude}.'''
result = self._values.get("latitude")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def longitude(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#longitude DataAwsRoute53TrafficPolicyDocument#longitude}.'''
result = self._values.get("longitude")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}.'''
result = self._values.get("region")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def rule_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.'''
result = self._values.get("rule_reference")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3deff978032fcf1dac3caf4c7841c3af274bfdc35d9d50724a56caec879e1bd2)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__57037b72b63ef1d1b02afa482018f3a21b6fa980405f236ae0160830729d15f6)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d2602befda689a690deb91314100b3ee0cc3e67504f5c4ac76f47c8f8a6fcc47)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b68e21286beac230f7523954b36ec60fb90d32c5eeec59f92033aee487d5ee37)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__769c6f746a492d794092086f58dbd41f748e7b2c2ac7a1a74028f9a8214a2f2a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__38089380284196de37265733ec8fd92bf98f9cb16cb3b1d873c8e224b5a443e5)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9b8244679531121b5da19c76348ceb483bcb0881316774740d7f51239c7bcee1)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetBias")
def reset_bias(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetBias", []))
@jsii.member(jsii_name="resetEndpointReference")
def reset_endpoint_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEndpointReference", []))
@jsii.member(jsii_name="resetEvaluateTargetHealth")
def reset_evaluate_target_health(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEvaluateTargetHealth", []))
@jsii.member(jsii_name="resetHealthCheck")
def reset_health_check(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHealthCheck", []))
@jsii.member(jsii_name="resetLatitude")
def reset_latitude(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetLatitude", []))
@jsii.member(jsii_name="resetLongitude")
def reset_longitude(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetLongitude", []))
@jsii.member(jsii_name="resetRegion")
def reset_region(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRegion", []))
@jsii.member(jsii_name="resetRuleReference")
def reset_rule_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRuleReference", []))
@builtins.property
@jsii.member(jsii_name="biasInput")
def bias_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "biasInput"))
@builtins.property
@jsii.member(jsii_name="endpointReferenceInput")
def endpoint_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "endpointReferenceInput"))
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealthInput")
def evaluate_target_health_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "evaluateTargetHealthInput"))
@builtins.property
@jsii.member(jsii_name="healthCheckInput")
def health_check_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "healthCheckInput"))
@builtins.property
@jsii.member(jsii_name="latitudeInput")
def latitude_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "latitudeInput"))
@builtins.property
@jsii.member(jsii_name="longitudeInput")
def longitude_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "longitudeInput"))
@builtins.property
@jsii.member(jsii_name="regionInput")
def region_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "regionInput"))
@builtins.property
@jsii.member(jsii_name="ruleReferenceInput")
def rule_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "ruleReferenceInput"))
@builtins.property
@jsii.member(jsii_name="bias")
def bias(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "bias"))
@bias.setter
def bias(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__567a1161e230968777613778f9f6301f41bffe14607a85418ec572c01a12fc1e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "bias", value)
@builtins.property
@jsii.member(jsii_name="endpointReference")
def endpoint_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "endpointReference"))
@endpoint_reference.setter
def endpoint_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a9839ae127ba6a48af1ee8e06a5b9915acc83610a9457f4d4543212e5c321eda)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "endpointReference", value)
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealth")
def evaluate_target_health(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "evaluateTargetHealth"))
@evaluate_target_health.setter
def evaluate_target_health(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7c79328d7781875c2e326067d32f7663a229e09db1111c195bdd3835894a2878)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "evaluateTargetHealth", value)
@builtins.property
@jsii.member(jsii_name="healthCheck")
def health_check(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "healthCheck"))
@health_check.setter
def health_check(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3371153e57b63afe808b7c55b349dca6a4904295c3949a4135d8ecf24ade54f5)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "healthCheck", value)
@builtins.property
@jsii.member(jsii_name="latitude")
def latitude(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "latitude"))
@latitude.setter
def latitude(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2b378fa6900b56a4aa79baa6eb95c593a71648b91ba325fd04e0a33e91413ee6)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "latitude", value)
@builtins.property
@jsii.member(jsii_name="longitude")
def longitude(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "longitude"))
@longitude.setter
def longitude(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__af8679673bd736d67538cb85ea85e975121875118975e64503354eb487de2a6a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "longitude", value)
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "region"))
@region.setter
def region(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3203732b02d3c4961d67817976b7b13ed8d41ded16f34b2d882cae3a6dfb27dc)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "region", value)
@builtins.property
@jsii.member(jsii_name="ruleReference")
def rule_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "ruleReference"))
@rule_reference.setter
def rule_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3cb3e4932e2d710e690919953a19395fc67abb347d555c329d4c390d0ea406e8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "ruleReference", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3d776337159b2b4211f4b2ef3a3339705abdb517214b3756ee4881be0abc92ca)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleItems",
jsii_struct_bases=[],
name_mapping={
"endpoint_reference": "endpointReference",
"health_check": "healthCheck",
},
)
class DataAwsRoute53TrafficPolicyDocumentRuleItems:
def __init__(
self,
*,
endpoint_reference: typing.Optional[builtins.str] = None,
health_check: typing.Optional[builtins.str] = None,
) -> None:
'''
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c586cd7e498a09c6b0b02ac58f3830a6a8a61fde1635e125376b7ab0948499cc)
check_type(argname="argument endpoint_reference", value=endpoint_reference, expected_type=type_hints["endpoint_reference"])
check_type(argname="argument health_check", value=health_check, expected_type=type_hints["health_check"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if endpoint_reference is not None:
self._values["endpoint_reference"] = endpoint_reference
if health_check is not None:
self._values["health_check"] = health_check
@builtins.property
def endpoint_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.'''
result = self._values.get("endpoint_reference")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def health_check(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.'''
result = self._values.get("health_check")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentRuleItems(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataAwsRoute53TrafficPolicyDocumentRuleItemsList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleItemsList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__44c05729b44afb85f2cb65a212e5abaa9c0f41408e3983aaa2848f6a9bb00257)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataAwsRoute53TrafficPolicyDocumentRuleItemsOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b0bd3573866479ff6251014486061c652be31dc060b060ddfbf82e56050fb4b9)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleItemsOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8f8dfe10bb03cb3aa1cec0c7d5cac93aaee16794a350741ca909f9ec1c91efdb)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a1cd6f2fe1cdb819b6fed9bdb1a038b98a2280509e77f8be8173160cd7651a22)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a30e5fcfa568171642be6c78436d876d9fca0edb6ae417f2622ec564436f6c2a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleItems]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleItems]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleItems]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__114f8df995e56e40c547b6f369900e3fcb7ddceb4c127ea5bea5050352ddbb02)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataAwsRoute53TrafficPolicyDocumentRuleItemsOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleItemsOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0a617e8045701b0504ff50c44c2ee056e08337e329f22ba9fbc4b9b627da2814)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetEndpointReference")
def reset_endpoint_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEndpointReference", []))
@jsii.member(jsii_name="resetHealthCheck")
def reset_health_check(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHealthCheck", []))
@builtins.property
@jsii.member(jsii_name="endpointReferenceInput")
def endpoint_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "endpointReferenceInput"))
@builtins.property
@jsii.member(jsii_name="healthCheckInput")
def health_check_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "healthCheckInput"))
@builtins.property
@jsii.member(jsii_name="endpointReference")
def endpoint_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "endpointReference"))
@endpoint_reference.setter
def endpoint_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2af4853d5e7976257ca838e97de7a75a08e11ab37e0bae9cbdd834d63230526b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "endpointReference", value)
@builtins.property
@jsii.member(jsii_name="healthCheck")
def health_check(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "healthCheck"))
@health_check.setter
def health_check(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ebf40799afecbbb21a8d6ba8a6edc186005636364d4abdab55f5b1ca0c1a3268)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "healthCheck", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleItems]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleItems]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleItems]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__eb34caa8deea3daf67990ed159c871ca46863490e0cc477ee7434535fe59c629)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataAwsRoute53TrafficPolicyDocumentRuleList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4b67f9ee10a2f3dad96d110a7aa4998a55fec1475ac83b850cca3cd0b225e090)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataAwsRoute53TrafficPolicyDocumentRuleOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f99b0fabe402c65533b2457cde517bd2d56f2d151144427a1f5217ece1c7e616)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6e377211dbc3954ddea0ccded40d5aaf4120f577427fc7bd8943dfcd053975d6)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__766495a5402f250f370ec534b8e3687845847c68d36b79455f89cf85772a7442)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__19f20cf8561c4a13589e6d1eb8164e3bc4132819624f6279d7c64438fc355959)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRule]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRule]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRule]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__534ad647536b5ba377f26eab10b6c47ef55e6936e384d76c503c7a1fd6154409)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleLocation",
jsii_struct_bases=[],
name_mapping={
"continent": "continent",
"country": "country",
"endpoint_reference": "endpointReference",
"evaluate_target_health": "evaluateTargetHealth",
"health_check": "healthCheck",
"is_default": "isDefault",
"rule_reference": "ruleReference",
"subdivision": "subdivision",
},
)
class DataAwsRoute53TrafficPolicyDocumentRuleLocation:
def __init__(
self,
*,
continent: typing.Optional[builtins.str] = None,
country: typing.Optional[builtins.str] = None,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
is_default: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
rule_reference: typing.Optional[builtins.str] = None,
subdivision: typing.Optional[builtins.str] = None,
) -> None:
'''
:param continent: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#continent DataAwsRoute53TrafficPolicyDocument#continent}.
:param country: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#country DataAwsRoute53TrafficPolicyDocument#country}.
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param evaluate_target_health: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
:param is_default: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#is_default DataAwsRoute53TrafficPolicyDocument#is_default}.
:param rule_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.
:param subdivision: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#subdivision DataAwsRoute53TrafficPolicyDocument#subdivision}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5ad466c6218d01cf0ceafa63f53f56535ae71a3c70628a08b22eac81d08b3167)
check_type(argname="argument continent", value=continent, expected_type=type_hints["continent"])
check_type(argname="argument country", value=country, expected_type=type_hints["country"])
check_type(argname="argument endpoint_reference", value=endpoint_reference, expected_type=type_hints["endpoint_reference"])
check_type(argname="argument evaluate_target_health", value=evaluate_target_health, expected_type=type_hints["evaluate_target_health"])
check_type(argname="argument health_check", value=health_check, expected_type=type_hints["health_check"])
check_type(argname="argument is_default", value=is_default, expected_type=type_hints["is_default"])
check_type(argname="argument rule_reference", value=rule_reference, expected_type=type_hints["rule_reference"])
check_type(argname="argument subdivision", value=subdivision, expected_type=type_hints["subdivision"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if continent is not None:
self._values["continent"] = continent
if country is not None:
self._values["country"] = country
if endpoint_reference is not None:
self._values["endpoint_reference"] = endpoint_reference
if evaluate_target_health is not None:
self._values["evaluate_target_health"] = evaluate_target_health
if health_check is not None:
self._values["health_check"] = health_check
if is_default is not None:
self._values["is_default"] = is_default
if rule_reference is not None:
self._values["rule_reference"] = rule_reference
if subdivision is not None:
self._values["subdivision"] = subdivision
@builtins.property
def continent(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#continent DataAwsRoute53TrafficPolicyDocument#continent}.'''
result = self._values.get("continent")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def country(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#country DataAwsRoute53TrafficPolicyDocument#country}.'''
result = self._values.get("country")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def endpoint_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.'''
result = self._values.get("endpoint_reference")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def evaluate_target_health(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.'''
result = self._values.get("evaluate_target_health")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def health_check(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.'''
result = self._values.get("health_check")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def is_default(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#is_default DataAwsRoute53TrafficPolicyDocument#is_default}.'''
result = self._values.get("is_default")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def rule_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.'''
result = self._values.get("rule_reference")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def subdivision(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#subdivision DataAwsRoute53TrafficPolicyDocument#subdivision}.'''
result = self._values.get("subdivision")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentRuleLocation(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataAwsRoute53TrafficPolicyDocumentRuleLocationList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleLocationList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__66a3383b34cb9e57e03b0ba685f567e4cf674366760b9dd857cdf881149207fe)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataAwsRoute53TrafficPolicyDocumentRuleLocationOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__af752cf5bf7e50465fb6af95111ac336fc0318b86d6e3a029fb2018c0531364e)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleLocationOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__30896a3ae51680cc4053d687e198204aed6508633d7cc551f8d5af3909e3f24a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2030add803792f576044ada800654b5d88b29e9c432312e71873dbf45fb6a677)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__50829cc73d210ef2089b8fdb4568ae5b33867007b4e81cb78657164bd10a864e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleLocation]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleLocation]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleLocation]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4faac59605e02726ce3d8111d89785aaf20ca38146ea547d14e7526a0333cada)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataAwsRoute53TrafficPolicyDocumentRuleLocationOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleLocationOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b0c79999a9b1bc85c480958d549ad91c650b93ce970c5a9bb32ad1c26f30e411)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetContinent")
def reset_continent(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetContinent", []))
@jsii.member(jsii_name="resetCountry")
def reset_country(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCountry", []))
@jsii.member(jsii_name="resetEndpointReference")
def reset_endpoint_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEndpointReference", []))
@jsii.member(jsii_name="resetEvaluateTargetHealth")
def reset_evaluate_target_health(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEvaluateTargetHealth", []))
@jsii.member(jsii_name="resetHealthCheck")
def reset_health_check(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHealthCheck", []))
@jsii.member(jsii_name="resetIsDefault")
def reset_is_default(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetIsDefault", []))
@jsii.member(jsii_name="resetRuleReference")
def reset_rule_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRuleReference", []))
@jsii.member(jsii_name="resetSubdivision")
def reset_subdivision(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSubdivision", []))
@builtins.property
@jsii.member(jsii_name="continentInput")
def continent_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "continentInput"))
@builtins.property
@jsii.member(jsii_name="countryInput")
def country_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "countryInput"))
@builtins.property
@jsii.member(jsii_name="endpointReferenceInput")
def endpoint_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "endpointReferenceInput"))
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealthInput")
def evaluate_target_health_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "evaluateTargetHealthInput"))
@builtins.property
@jsii.member(jsii_name="healthCheckInput")
def health_check_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "healthCheckInput"))
@builtins.property
@jsii.member(jsii_name="isDefaultInput")
def is_default_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "isDefaultInput"))
@builtins.property
@jsii.member(jsii_name="ruleReferenceInput")
def rule_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "ruleReferenceInput"))
@builtins.property
@jsii.member(jsii_name="subdivisionInput")
def subdivision_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "subdivisionInput"))
@builtins.property
@jsii.member(jsii_name="continent")
def continent(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "continent"))
@continent.setter
def continent(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d45b930c192a047287d17089b7b246e152974a2a097b2273d57957ea59b422c8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "continent", value)
@builtins.property
@jsii.member(jsii_name="country")
def country(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "country"))
@country.setter
def country(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__644a4ee9bbfd73f44526f3f29c05fadedc7dd973a9949212591d26660e06ea01)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "country", value)
@builtins.property
@jsii.member(jsii_name="endpointReference")
def endpoint_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "endpointReference"))
@endpoint_reference.setter
def endpoint_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2edfd08e19125344b8f5d35d1b1fd18581bb6040ffcc93c4b63870c11f6a4f6f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "endpointReference", value)
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealth")
def evaluate_target_health(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "evaluateTargetHealth"))
@evaluate_target_health.setter
def evaluate_target_health(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0b5dcf76916ed1245835a9f53c3fa3686f0edf01312a2c9d0d249ddc47013409)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "evaluateTargetHealth", value)
@builtins.property
@jsii.member(jsii_name="healthCheck")
def health_check(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "healthCheck"))
@health_check.setter
def health_check(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0806ccc07f6a016d0eefadf86be45720e22297c55984bdc5081c9bbbedc65822)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "healthCheck", value)
@builtins.property
@jsii.member(jsii_name="isDefault")
def is_default(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "isDefault"))
@is_default.setter
def is_default(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a774b1bd2b2472aa7ee7f72486d009c7f499a54b665b80caec07a8e5bbaea0f7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "isDefault", value)
@builtins.property
@jsii.member(jsii_name="ruleReference")
def rule_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "ruleReference"))
@rule_reference.setter
def rule_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d1ad64eeac521524a68826399f5129967aeac3e44cf804dc66bff039efac4a27)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "ruleReference", value)
@builtins.property
@jsii.member(jsii_name="subdivision")
def subdivision(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "subdivision"))
@subdivision.setter
def subdivision(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8593da2edbb710b125f31e3eeedb8dafceb4cb017929bcb25473dbe8c7417158)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "subdivision", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleLocation]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleLocation]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleLocation]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cac0f2227019762719a773b0d45f76025d1fbfec491d99f1f34d7fce75f0b21e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataAwsRoute53TrafficPolicyDocumentRuleOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__92235ec0bc5e31526817212a0ce358e15af25387b3136d4b58ed4dd9c5f33859)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="putGeoProximityLocation")
def put_geo_proximity_location(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a6bb4d52a9ca178acf92984dfc0ff6f5b42d76d46c50c75d94e2cdfab5730729)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putGeoProximityLocation", [value]))
@jsii.member(jsii_name="putItems")
def put_items(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleItems, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__079e101d19bf5d04dae328bce13f337a0b1f6bde5a3962f0d96f6fc856f82b3b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putItems", [value]))
@jsii.member(jsii_name="putLocation")
def put_location(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleLocation, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3ddaba5363ef35624c5321961ec672ea9f6fc92711d5035bb73b86f40a676cd1)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putLocation", [value]))
@jsii.member(jsii_name="putPrimary")
def put_primary(
self,
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
'''
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param evaluate_target_health: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
:param rule_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.
'''
value = DataAwsRoute53TrafficPolicyDocumentRulePrimary(
endpoint_reference=endpoint_reference,
evaluate_target_health=evaluate_target_health,
health_check=health_check,
rule_reference=rule_reference,
)
return typing.cast(None, jsii.invoke(self, "putPrimary", [value]))
@jsii.member(jsii_name="putRegion")
def put_region(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["DataAwsRoute53TrafficPolicyDocumentRuleRegion", typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5196d9b61cc38bf3ed1b48b0013a83ff8e92497cc46f090a6eb27d5b98fb78a5)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putRegion", [value]))
@jsii.member(jsii_name="putSecondary")
def put_secondary(
self,
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
'''
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param evaluate_target_health: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
:param rule_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.
'''
value = DataAwsRoute53TrafficPolicyDocumentRuleSecondary(
endpoint_reference=endpoint_reference,
evaluate_target_health=evaluate_target_health,
health_check=health_check,
rule_reference=rule_reference,
)
return typing.cast(None, jsii.invoke(self, "putSecondary", [value]))
@jsii.member(jsii_name="resetGeoProximityLocation")
def reset_geo_proximity_location(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetGeoProximityLocation", []))
@jsii.member(jsii_name="resetItems")
def reset_items(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetItems", []))
@jsii.member(jsii_name="resetLocation")
def reset_location(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetLocation", []))
@jsii.member(jsii_name="resetPrimary")
def reset_primary(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetPrimary", []))
@jsii.member(jsii_name="resetRegion")
def reset_region(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRegion", []))
@jsii.member(jsii_name="resetSecondary")
def reset_secondary(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSecondary", []))
@jsii.member(jsii_name="resetType")
def reset_type(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetType", []))
@builtins.property
@jsii.member(jsii_name="geoProximityLocation")
def geo_proximity_location(
self,
) -> DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationList:
return typing.cast(DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationList, jsii.get(self, "geoProximityLocation"))
@builtins.property
@jsii.member(jsii_name="items")
def items(self) -> DataAwsRoute53TrafficPolicyDocumentRuleItemsList:
return typing.cast(DataAwsRoute53TrafficPolicyDocumentRuleItemsList, jsii.get(self, "items"))
@builtins.property
@jsii.member(jsii_name="location")
def location(self) -> DataAwsRoute53TrafficPolicyDocumentRuleLocationList:
return typing.cast(DataAwsRoute53TrafficPolicyDocumentRuleLocationList, jsii.get(self, "location"))
@builtins.property
@jsii.member(jsii_name="primary")
def primary(
self,
) -> "DataAwsRoute53TrafficPolicyDocumentRulePrimaryOutputReference":
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRulePrimaryOutputReference", jsii.get(self, "primary"))
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> "DataAwsRoute53TrafficPolicyDocumentRuleRegionList":
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleRegionList", jsii.get(self, "region"))
@builtins.property
@jsii.member(jsii_name="secondary")
def secondary(
self,
) -> "DataAwsRoute53TrafficPolicyDocumentRuleSecondaryOutputReference":
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleSecondaryOutputReference", jsii.get(self, "secondary"))
@builtins.property
@jsii.member(jsii_name="geoProximityLocationInput")
def geo_proximity_location_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]]], jsii.get(self, "geoProximityLocationInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="itemsInput")
def items_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleItems]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleItems]]], jsii.get(self, "itemsInput"))
@builtins.property
@jsii.member(jsii_name="locationInput")
def location_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleLocation]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleLocation]]], jsii.get(self, "locationInput"))
@builtins.property
@jsii.member(jsii_name="primaryInput")
def primary_input(
self,
) -> typing.Optional["DataAwsRoute53TrafficPolicyDocumentRulePrimary"]:
return typing.cast(typing.Optional["DataAwsRoute53TrafficPolicyDocumentRulePrimary"], jsii.get(self, "primaryInput"))
@builtins.property
@jsii.member(jsii_name="regionInput")
def region_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleRegion"]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["DataAwsRoute53TrafficPolicyDocumentRuleRegion"]]], jsii.get(self, "regionInput"))
@builtins.property
@jsii.member(jsii_name="secondaryInput")
def secondary_input(
self,
) -> typing.Optional["DataAwsRoute53TrafficPolicyDocumentRuleSecondary"]:
return typing.cast(typing.Optional["DataAwsRoute53TrafficPolicyDocumentRuleSecondary"], jsii.get(self, "secondaryInput"))
@builtins.property
@jsii.member(jsii_name="typeInput")
def type_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "typeInput"))
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d8b92622c80b0d5b51e6d06ac1f0ce37a8b0ddae3c0768600a72a0f509cc8359)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="type")
def type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "type"))
@type.setter
def type(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b9320c43648a6f4003aee864b267aab8447a24a7e70b0f04a4eb818dfee44047)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "type", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRule]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRule]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRule]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6744cfe0e75d98e573e5c767f8c089075c3e22e94d3918218dee70729b0cab35)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRulePrimary",
jsii_struct_bases=[],
name_mapping={
"endpoint_reference": "endpointReference",
"evaluate_target_health": "evaluateTargetHealth",
"health_check": "healthCheck",
"rule_reference": "ruleReference",
},
)
class DataAwsRoute53TrafficPolicyDocumentRulePrimary:
def __init__(
self,
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
'''
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param evaluate_target_health: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
:param rule_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__769abfd2fa555d8221bb4f5232915aa6513aebf01616ed207efb1356a07cc6b4)
check_type(argname="argument endpoint_reference", value=endpoint_reference, expected_type=type_hints["endpoint_reference"])
check_type(argname="argument evaluate_target_health", value=evaluate_target_health, expected_type=type_hints["evaluate_target_health"])
check_type(argname="argument health_check", value=health_check, expected_type=type_hints["health_check"])
check_type(argname="argument rule_reference", value=rule_reference, expected_type=type_hints["rule_reference"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if endpoint_reference is not None:
self._values["endpoint_reference"] = endpoint_reference
if evaluate_target_health is not None:
self._values["evaluate_target_health"] = evaluate_target_health
if health_check is not None:
self._values["health_check"] = health_check
if rule_reference is not None:
self._values["rule_reference"] = rule_reference
@builtins.property
def endpoint_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.'''
result = self._values.get("endpoint_reference")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def evaluate_target_health(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.'''
result = self._values.get("evaluate_target_health")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def health_check(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.'''
result = self._values.get("health_check")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def rule_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.'''
result = self._values.get("rule_reference")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentRulePrimary(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataAwsRoute53TrafficPolicyDocumentRulePrimaryOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRulePrimaryOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__662da0a3a5c0933e908e4ea904348c82451a5deaa40ed325a5f36decd9b654c8)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetEndpointReference")
def reset_endpoint_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEndpointReference", []))
@jsii.member(jsii_name="resetEvaluateTargetHealth")
def reset_evaluate_target_health(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEvaluateTargetHealth", []))
@jsii.member(jsii_name="resetHealthCheck")
def reset_health_check(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHealthCheck", []))
@jsii.member(jsii_name="resetRuleReference")
def reset_rule_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRuleReference", []))
@builtins.property
@jsii.member(jsii_name="endpointReferenceInput")
def endpoint_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "endpointReferenceInput"))
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealthInput")
def evaluate_target_health_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "evaluateTargetHealthInput"))
@builtins.property
@jsii.member(jsii_name="healthCheckInput")
def health_check_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "healthCheckInput"))
@builtins.property
@jsii.member(jsii_name="ruleReferenceInput")
def rule_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "ruleReferenceInput"))
@builtins.property
@jsii.member(jsii_name="endpointReference")
def endpoint_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "endpointReference"))
@endpoint_reference.setter
def endpoint_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__aa5fe2c31f7a28b47b161d61b8fd68ad61cc237dd307c0624a6bda156e1c71cf)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "endpointReference", value)
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealth")
def evaluate_target_health(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "evaluateTargetHealth"))
@evaluate_target_health.setter
def evaluate_target_health(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__00347552a696dcf70591ff12b5de22c1a11d4c463963ac337ac381245bcbc1c5)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "evaluateTargetHealth", value)
@builtins.property
@jsii.member(jsii_name="healthCheck")
def health_check(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "healthCheck"))
@health_check.setter
def health_check(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__55e11778a439431821e105bf726b85968e7a2924e13170c5d3f08e29ca1a98fc)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "healthCheck", value)
@builtins.property
@jsii.member(jsii_name="ruleReference")
def rule_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "ruleReference"))
@rule_reference.setter
def rule_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__eee6c088ba0ac3958f613f4eb1d2fd5f9f565ba4ff9cd5bf571f4bc0ddfb53ba)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "ruleReference", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataAwsRoute53TrafficPolicyDocumentRulePrimary]:
return typing.cast(typing.Optional[DataAwsRoute53TrafficPolicyDocumentRulePrimary], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataAwsRoute53TrafficPolicyDocumentRulePrimary],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a9bb9547b17b1f2ef3d8435ebc437f27efab2bb11e8755705cf803fc12e81481)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleRegion",
jsii_struct_bases=[],
name_mapping={
"endpoint_reference": "endpointReference",
"evaluate_target_health": "evaluateTargetHealth",
"health_check": "healthCheck",
"region": "region",
"rule_reference": "ruleReference",
},
)
class DataAwsRoute53TrafficPolicyDocumentRuleRegion:
def __init__(
self,
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
'''
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param evaluate_target_health: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
:param region: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}.
:param rule_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__29eb7c2eed3144fa6675e96d0eda18a02a31a76c89d382ce01b5a1042360a44c)
check_type(argname="argument endpoint_reference", value=endpoint_reference, expected_type=type_hints["endpoint_reference"])
check_type(argname="argument evaluate_target_health", value=evaluate_target_health, expected_type=type_hints["evaluate_target_health"])
check_type(argname="argument health_check", value=health_check, expected_type=type_hints["health_check"])
check_type(argname="argument region", value=region, expected_type=type_hints["region"])
check_type(argname="argument rule_reference", value=rule_reference, expected_type=type_hints["rule_reference"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if endpoint_reference is not None:
self._values["endpoint_reference"] = endpoint_reference
if evaluate_target_health is not None:
self._values["evaluate_target_health"] = evaluate_target_health
if health_check is not None:
self._values["health_check"] = health_check
if region is not None:
self._values["region"] = region
if rule_reference is not None:
self._values["rule_reference"] = rule_reference
@builtins.property
def endpoint_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.'''
result = self._values.get("endpoint_reference")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def evaluate_target_health(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.'''
result = self._values.get("evaluate_target_health")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def health_check(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.'''
result = self._values.get("health_check")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#region DataAwsRoute53TrafficPolicyDocument#region}.'''
result = self._values.get("region")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def rule_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.'''
result = self._values.get("rule_reference")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentRuleRegion(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataAwsRoute53TrafficPolicyDocumentRuleRegionList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleRegionList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8c2f5cfb2fb4ed3f21898f598fdccce17ffce85ff9d266ad715dfc28acb20621)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataAwsRoute53TrafficPolicyDocumentRuleRegionOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__70aad7cd4d79698dc73b31197a1872d934b71c6bf27338c62cc42db0389450dc)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataAwsRoute53TrafficPolicyDocumentRuleRegionOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a155d7532b5d6a5a05479ab442e75eb48ff2a2ee6879351836c8414d922a7f37)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3e2879baad75df6b2ec270ee3f769de30ab9b7ffaacc3ca87a5553da6da82e85)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__479939e8dd0fa8cbe56dd33b7827315c1202e89d3e3238d342dc47de7f5b3ad5)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleRegion]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleRegion]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleRegion]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2aa560b098a2721a484249d50bd0da1ccb0fe44a604d971967ea7a5b9ba85fed)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataAwsRoute53TrafficPolicyDocumentRuleRegionOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleRegionOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ea281861c0aebe10c57a5fe0914c88a6ef73a8853f41770f559f336efd00dc37)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetEndpointReference")
def reset_endpoint_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEndpointReference", []))
@jsii.member(jsii_name="resetEvaluateTargetHealth")
def reset_evaluate_target_health(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEvaluateTargetHealth", []))
@jsii.member(jsii_name="resetHealthCheck")
def reset_health_check(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHealthCheck", []))
@jsii.member(jsii_name="resetRegion")
def reset_region(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRegion", []))
@jsii.member(jsii_name="resetRuleReference")
def reset_rule_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRuleReference", []))
@builtins.property
@jsii.member(jsii_name="endpointReferenceInput")
def endpoint_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "endpointReferenceInput"))
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealthInput")
def evaluate_target_health_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "evaluateTargetHealthInput"))
@builtins.property
@jsii.member(jsii_name="healthCheckInput")
def health_check_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "healthCheckInput"))
@builtins.property
@jsii.member(jsii_name="regionInput")
def region_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "regionInput"))
@builtins.property
@jsii.member(jsii_name="ruleReferenceInput")
def rule_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "ruleReferenceInput"))
@builtins.property
@jsii.member(jsii_name="endpointReference")
def endpoint_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "endpointReference"))
@endpoint_reference.setter
def endpoint_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6a62a039a91975313e9ab9c9467644b911b9a402ab0bfb9f7e20d72221503906)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "endpointReference", value)
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealth")
def evaluate_target_health(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "evaluateTargetHealth"))
@evaluate_target_health.setter
def evaluate_target_health(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8a95c542cc4adaf84b97b5e87b895b69899bb15dc38cc9416eb66bae7887481e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "evaluateTargetHealth", value)
@builtins.property
@jsii.member(jsii_name="healthCheck")
def health_check(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "healthCheck"))
@health_check.setter
def health_check(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__35c54931867f7202fb90d87890362a086d9b2a8e4d39d3f1710ef278b24b6558)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "healthCheck", value)
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "region"))
@region.setter
def region(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0847359fad0d8ed2340826401a7810c3ee11076f7a219541cb8d7877351a2979)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "region", value)
@builtins.property
@jsii.member(jsii_name="ruleReference")
def rule_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "ruleReference"))
@rule_reference.setter
def rule_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__eca422fbf70e2985bf967636a4d707b75bbabca61c88bbc264eacfbffce6b201)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "ruleReference", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleRegion]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleRegion]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleRegion]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fb628e1d48d7f70101d084936c7edb9b696de26f6097a47a9e43061d4487e1d6)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleSecondary",
jsii_struct_bases=[],
name_mapping={
"endpoint_reference": "endpointReference",
"evaluate_target_health": "evaluateTargetHealth",
"health_check": "healthCheck",
"rule_reference": "ruleReference",
},
)
class DataAwsRoute53TrafficPolicyDocumentRuleSecondary:
def __init__(
self,
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
'''
:param endpoint_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.
:param evaluate_target_health: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.
:param health_check: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.
:param rule_reference: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c422cc56685b22ee803f7a8456c8e226ff69c32291a87b54a7010097694e66e7)
check_type(argname="argument endpoint_reference", value=endpoint_reference, expected_type=type_hints["endpoint_reference"])
check_type(argname="argument evaluate_target_health", value=evaluate_target_health, expected_type=type_hints["evaluate_target_health"])
check_type(argname="argument health_check", value=health_check, expected_type=type_hints["health_check"])
check_type(argname="argument rule_reference", value=rule_reference, expected_type=type_hints["rule_reference"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if endpoint_reference is not None:
self._values["endpoint_reference"] = endpoint_reference
if evaluate_target_health is not None:
self._values["evaluate_target_health"] = evaluate_target_health
if health_check is not None:
self._values["health_check"] = health_check
if rule_reference is not None:
self._values["rule_reference"] = rule_reference
@builtins.property
def endpoint_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#endpoint_reference DataAwsRoute53TrafficPolicyDocument#endpoint_reference}.'''
result = self._values.get("endpoint_reference")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def evaluate_target_health(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#evaluate_target_health DataAwsRoute53TrafficPolicyDocument#evaluate_target_health}.'''
result = self._values.get("evaluate_target_health")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def health_check(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#health_check DataAwsRoute53TrafficPolicyDocument#health_check}.'''
result = self._values.get("health_check")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def rule_reference(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/data-sources/route53_traffic_policy_document#rule_reference DataAwsRoute53TrafficPolicyDocument#rule_reference}.'''
result = self._values.get("rule_reference")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataAwsRoute53TrafficPolicyDocumentRuleSecondary(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataAwsRoute53TrafficPolicyDocumentRuleSecondaryOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.dataAwsRoute53TrafficPolicyDocument.DataAwsRoute53TrafficPolicyDocumentRuleSecondaryOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1c0bc52ac5f20c9082ed5cbaa5202dd825e60a912578252d209f62213b8ba1a0)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetEndpointReference")
def reset_endpoint_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEndpointReference", []))
@jsii.member(jsii_name="resetEvaluateTargetHealth")
def reset_evaluate_target_health(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEvaluateTargetHealth", []))
@jsii.member(jsii_name="resetHealthCheck")
def reset_health_check(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHealthCheck", []))
@jsii.member(jsii_name="resetRuleReference")
def reset_rule_reference(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRuleReference", []))
@builtins.property
@jsii.member(jsii_name="endpointReferenceInput")
def endpoint_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "endpointReferenceInput"))
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealthInput")
def evaluate_target_health_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "evaluateTargetHealthInput"))
@builtins.property
@jsii.member(jsii_name="healthCheckInput")
def health_check_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "healthCheckInput"))
@builtins.property
@jsii.member(jsii_name="ruleReferenceInput")
def rule_reference_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "ruleReferenceInput"))
@builtins.property
@jsii.member(jsii_name="endpointReference")
def endpoint_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "endpointReference"))
@endpoint_reference.setter
def endpoint_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0c8af1e13b1c1c34a27b785503f1709a2e70b3ada04e918910b0aacd0b61c0df)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "endpointReference", value)
@builtins.property
@jsii.member(jsii_name="evaluateTargetHealth")
def evaluate_target_health(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "evaluateTargetHealth"))
@evaluate_target_health.setter
def evaluate_target_health(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e85a284b1637a9d6e511bb80322b57fb600cf157e05239e4561f6bdcca9237af)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "evaluateTargetHealth", value)
@builtins.property
@jsii.member(jsii_name="healthCheck")
def health_check(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "healthCheck"))
@health_check.setter
def health_check(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__803f4946b15fe578d06817f4f6978f864d53c42cf9843906db65e00ba11e2c67)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "healthCheck", value)
@builtins.property
@jsii.member(jsii_name="ruleReference")
def rule_reference(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "ruleReference"))
@rule_reference.setter
def rule_reference(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4714742ad1a8879f64fe1b0fc12f20a80cca8fb0ae55d43e953b0434879d7dab)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "ruleReference", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataAwsRoute53TrafficPolicyDocumentRuleSecondary]:
return typing.cast(typing.Optional[DataAwsRoute53TrafficPolicyDocumentRuleSecondary], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataAwsRoute53TrafficPolicyDocumentRuleSecondary],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__98fb59bbdb4033c1c98f1147f909d9b376fead60510920f62b9c4da99a0382f8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"DataAwsRoute53TrafficPolicyDocument",
"DataAwsRoute53TrafficPolicyDocumentConfig",
"DataAwsRoute53TrafficPolicyDocumentEndpoint",
"DataAwsRoute53TrafficPolicyDocumentEndpointList",
"DataAwsRoute53TrafficPolicyDocumentEndpointOutputReference",
"DataAwsRoute53TrafficPolicyDocumentRule",
"DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation",
"DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationList",
"DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocationOutputReference",
"DataAwsRoute53TrafficPolicyDocumentRuleItems",
"DataAwsRoute53TrafficPolicyDocumentRuleItemsList",
"DataAwsRoute53TrafficPolicyDocumentRuleItemsOutputReference",
"DataAwsRoute53TrafficPolicyDocumentRuleList",
"DataAwsRoute53TrafficPolicyDocumentRuleLocation",
"DataAwsRoute53TrafficPolicyDocumentRuleLocationList",
"DataAwsRoute53TrafficPolicyDocumentRuleLocationOutputReference",
"DataAwsRoute53TrafficPolicyDocumentRuleOutputReference",
"DataAwsRoute53TrafficPolicyDocumentRulePrimary",
"DataAwsRoute53TrafficPolicyDocumentRulePrimaryOutputReference",
"DataAwsRoute53TrafficPolicyDocumentRuleRegion",
"DataAwsRoute53TrafficPolicyDocumentRuleRegionList",
"DataAwsRoute53TrafficPolicyDocumentRuleRegionOutputReference",
"DataAwsRoute53TrafficPolicyDocumentRuleSecondary",
"DataAwsRoute53TrafficPolicyDocumentRuleSecondaryOutputReference",
]
publication.publish()
def _typecheckingstub__3f59d1716fb29cedfeedb7d4d0793a9153c7a8e17bd02235e7375fa23200d019(
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
endpoint: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentEndpoint, typing.Dict[builtins.str, typing.Any]]]]] = None,
id: typing.Optional[builtins.str] = None,
record_type: typing.Optional[builtins.str] = None,
rule: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRule, typing.Dict[builtins.str, typing.Any]]]]] = None,
start_endpoint: typing.Optional[builtins.str] = None,
start_rule: typing.Optional[builtins.str] = None,
version: typing.Optional[builtins.str] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2b4bff4d41f1ea6641264d0f86ff1bc568bc556506cbbaf0d4e3842aa250a96e(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentEndpoint, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b215284bd29cf4970bee50e662df0f59d06358193a835450077c36bba165c330(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRule, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c011e650563d02b814fa371a0004500cf15eefec589f04d8a7327a3831bab4d9(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3361581191dfd05ef3b9631340e56c7f3f626820450f0cc757b12d382aeae342(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__05d1f364aaef53beaa3ee1129601dee7fb81976ddd57143d95960b5c728684a7(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9e72498f571b940f018e236ff4ccf98aae36ad1b4457771b536814cbc498eca8(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f1391014f59ff2fc50cc0ff9eba099c7bc226dd2dc9a7b712b916522a7974174(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3753a58cccb193054e80c8b366cfff78394a27fcb5ef11c999568b3177c81712(
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
endpoint: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentEndpoint, typing.Dict[builtins.str, typing.Any]]]]] = None,
id: typing.Optional[builtins.str] = None,
record_type: typing.Optional[builtins.str] = None,
rule: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRule, typing.Dict[builtins.str, typing.Any]]]]] = None,
start_endpoint: typing.Optional[builtins.str] = None,
start_rule: typing.Optional[builtins.str] = None,
version: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__34ac11567536e8379ef1f06c89de78b807bcd68d55905b7fed6967293f63545d(
*,
id: builtins.str,
region: typing.Optional[builtins.str] = None,
type: typing.Optional[builtins.str] = None,
value: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__df678f09f9c9ba33d41d1bc8f04420eb5df59d930de07434d6e9c3ea736a9790(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fcdd69aa3d676d635647243724d4181b17750bce985e964ed1bc46a1f0c9d9ca(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8b084e8b0d0253431449915dc3e75cc27bd576e374cc11f6f73d344ec71b8fce(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a740f1d6b4e237b09e819f5598cd15e484561dc7a57b1eb54c51e386e239136d(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3eebf863eb671ac25b89aaa213197e46f2c44ffcc8173ffca317feca74caad4a(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__079bc0cb124512cb2bd10fb7cc7e21e72e4a7df695e6a6ef5a74a59d508764e9(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentEndpoint]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__04e98c71bb37d3d61575f67c7d9b9546315cfa37099ef31be08f32dd03df9209(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1018f43caafeb285e87c31d3d134185afffeba841fa5c29c6206b2b1f0e2b731(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__98cffbb63ec0afd6c38c4e060663576af068bd45d5a5700e087d169b39116750(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__61de518a8440f6ec77c5a3d1017163b93a3ea6f3b228c3537aea59540f6ed1b7(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6607208319d1a85778dfd068d9148a3f6339d5737f2ed4006715e16e4e78f74f(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6e36b0de8a48bb4f9d62e0852ef306eeb2e9d18ca04a046785fb9596f0bceaab(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentEndpoint]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3928430e7935f0fdd0670e4052c4916004706991443006f5fbd986c06ef5ffe2(
*,
id: builtins.str,
geo_proximity_location: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation, typing.Dict[builtins.str, typing.Any]]]]] = None,
items: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleItems, typing.Dict[builtins.str, typing.Any]]]]] = None,
location: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleLocation, typing.Dict[builtins.str, typing.Any]]]]] = None,
primary: typing.Optional[typing.Union[DataAwsRoute53TrafficPolicyDocumentRulePrimary, typing.Dict[builtins.str, typing.Any]]] = None,
region: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleRegion, typing.Dict[builtins.str, typing.Any]]]]] = None,
secondary: typing.Optional[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleSecondary, typing.Dict[builtins.str, typing.Any]]] = None,
type: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8592c0e79e8aae010cdf54020cd566a4f166ef2a31fe195e7ec9ccc86e648318(
*,
bias: typing.Optional[builtins.str] = None,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
latitude: typing.Optional[builtins.str] = None,
longitude: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3deff978032fcf1dac3caf4c7841c3af274bfdc35d9d50724a56caec879e1bd2(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__57037b72b63ef1d1b02afa482018f3a21b6fa980405f236ae0160830729d15f6(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d2602befda689a690deb91314100b3ee0cc3e67504f5c4ac76f47c8f8a6fcc47(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b68e21286beac230f7523954b36ec60fb90d32c5eeec59f92033aee487d5ee37(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__769c6f746a492d794092086f58dbd41f748e7b2c2ac7a1a74028f9a8214a2f2a(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__38089380284196de37265733ec8fd92bf98f9cb16cb3b1d873c8e224b5a443e5(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9b8244679531121b5da19c76348ceb483bcb0881316774740d7f51239c7bcee1(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__567a1161e230968777613778f9f6301f41bffe14607a85418ec572c01a12fc1e(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a9839ae127ba6a48af1ee8e06a5b9915acc83610a9457f4d4543212e5c321eda(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7c79328d7781875c2e326067d32f7663a229e09db1111c195bdd3835894a2878(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3371153e57b63afe808b7c55b349dca6a4904295c3949a4135d8ecf24ade54f5(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2b378fa6900b56a4aa79baa6eb95c593a71648b91ba325fd04e0a33e91413ee6(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__af8679673bd736d67538cb85ea85e975121875118975e64503354eb487de2a6a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3203732b02d3c4961d67817976b7b13ed8d41ded16f34b2d882cae3a6dfb27dc(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3cb3e4932e2d710e690919953a19395fc67abb347d555c329d4c390d0ea406e8(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3d776337159b2b4211f4b2ef3a3339705abdb517214b3756ee4881be0abc92ca(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c586cd7e498a09c6b0b02ac58f3830a6a8a61fde1635e125376b7ab0948499cc(
*,
endpoint_reference: typing.Optional[builtins.str] = None,
health_check: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__44c05729b44afb85f2cb65a212e5abaa9c0f41408e3983aaa2848f6a9bb00257(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b0bd3573866479ff6251014486061c652be31dc060b060ddfbf82e56050fb4b9(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8f8dfe10bb03cb3aa1cec0c7d5cac93aaee16794a350741ca909f9ec1c91efdb(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a1cd6f2fe1cdb819b6fed9bdb1a038b98a2280509e77f8be8173160cd7651a22(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a30e5fcfa568171642be6c78436d876d9fca0edb6ae417f2622ec564436f6c2a(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__114f8df995e56e40c547b6f369900e3fcb7ddceb4c127ea5bea5050352ddbb02(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleItems]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0a617e8045701b0504ff50c44c2ee056e08337e329f22ba9fbc4b9b627da2814(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2af4853d5e7976257ca838e97de7a75a08e11ab37e0bae9cbdd834d63230526b(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ebf40799afecbbb21a8d6ba8a6edc186005636364d4abdab55f5b1ca0c1a3268(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__eb34caa8deea3daf67990ed159c871ca46863490e0cc477ee7434535fe59c629(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleItems]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4b67f9ee10a2f3dad96d110a7aa4998a55fec1475ac83b850cca3cd0b225e090(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f99b0fabe402c65533b2457cde517bd2d56f2d151144427a1f5217ece1c7e616(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6e377211dbc3954ddea0ccded40d5aaf4120f577427fc7bd8943dfcd053975d6(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__766495a5402f250f370ec534b8e3687845847c68d36b79455f89cf85772a7442(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__19f20cf8561c4a13589e6d1eb8164e3bc4132819624f6279d7c64438fc355959(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__534ad647536b5ba377f26eab10b6c47ef55e6936e384d76c503c7a1fd6154409(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRule]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5ad466c6218d01cf0ceafa63f53f56535ae71a3c70628a08b22eac81d08b3167(
*,
continent: typing.Optional[builtins.str] = None,
country: typing.Optional[builtins.str] = None,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
is_default: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
rule_reference: typing.Optional[builtins.str] = None,
subdivision: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__66a3383b34cb9e57e03b0ba685f567e4cf674366760b9dd857cdf881149207fe(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__af752cf5bf7e50465fb6af95111ac336fc0318b86d6e3a029fb2018c0531364e(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__30896a3ae51680cc4053d687e198204aed6508633d7cc551f8d5af3909e3f24a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2030add803792f576044ada800654b5d88b29e9c432312e71873dbf45fb6a677(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__50829cc73d210ef2089b8fdb4568ae5b33867007b4e81cb78657164bd10a864e(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4faac59605e02726ce3d8111d89785aaf20ca38146ea547d14e7526a0333cada(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleLocation]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b0c79999a9b1bc85c480958d549ad91c650b93ce970c5a9bb32ad1c26f30e411(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d45b930c192a047287d17089b7b246e152974a2a097b2273d57957ea59b422c8(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__644a4ee9bbfd73f44526f3f29c05fadedc7dd973a9949212591d26660e06ea01(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2edfd08e19125344b8f5d35d1b1fd18581bb6040ffcc93c4b63870c11f6a4f6f(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0b5dcf76916ed1245835a9f53c3fa3686f0edf01312a2c9d0d249ddc47013409(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0806ccc07f6a016d0eefadf86be45720e22297c55984bdc5081c9bbbedc65822(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a774b1bd2b2472aa7ee7f72486d009c7f499a54b665b80caec07a8e5bbaea0f7(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d1ad64eeac521524a68826399f5129967aeac3e44cf804dc66bff039efac4a27(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8593da2edbb710b125f31e3eeedb8dafceb4cb017929bcb25473dbe8c7417158(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cac0f2227019762719a773b0d45f76025d1fbfec491d99f1f34d7fce75f0b21e(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleLocation]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__92235ec0bc5e31526817212a0ce358e15af25387b3136d4b58ed4dd9c5f33859(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a6bb4d52a9ca178acf92984dfc0ff6f5b42d76d46c50c75d94e2cdfab5730729(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__079e101d19bf5d04dae328bce13f337a0b1f6bde5a3962f0d96f6fc856f82b3b(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleItems, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3ddaba5363ef35624c5321961ec672ea9f6fc92711d5035bb73b86f40a676cd1(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleLocation, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5196d9b61cc38bf3ed1b48b0013a83ff8e92497cc46f090a6eb27d5b98fb78a5(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[DataAwsRoute53TrafficPolicyDocumentRuleRegion, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d8b92622c80b0d5b51e6d06ac1f0ce37a8b0ddae3c0768600a72a0f509cc8359(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b9320c43648a6f4003aee864b267aab8447a24a7e70b0f04a4eb818dfee44047(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6744cfe0e75d98e573e5c767f8c089075c3e22e94d3918218dee70729b0cab35(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRule]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__769abfd2fa555d8221bb4f5232915aa6513aebf01616ed207efb1356a07cc6b4(
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__662da0a3a5c0933e908e4ea904348c82451a5deaa40ed325a5f36decd9b654c8(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__aa5fe2c31f7a28b47b161d61b8fd68ad61cc237dd307c0624a6bda156e1c71cf(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__00347552a696dcf70591ff12b5de22c1a11d4c463963ac337ac381245bcbc1c5(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__55e11778a439431821e105bf726b85968e7a2924e13170c5d3f08e29ca1a98fc(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__eee6c088ba0ac3958f613f4eb1d2fd5f9f565ba4ff9cd5bf571f4bc0ddfb53ba(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a9bb9547b17b1f2ef3d8435ebc437f27efab2bb11e8755705cf803fc12e81481(
value: typing.Optional[DataAwsRoute53TrafficPolicyDocumentRulePrimary],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__29eb7c2eed3144fa6675e96d0eda18a02a31a76c89d382ce01b5a1042360a44c(
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8c2f5cfb2fb4ed3f21898f598fdccce17ffce85ff9d266ad715dfc28acb20621(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__70aad7cd4d79698dc73b31197a1872d934b71c6bf27338c62cc42db0389450dc(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a155d7532b5d6a5a05479ab442e75eb48ff2a2ee6879351836c8414d922a7f37(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3e2879baad75df6b2ec270ee3f769de30ab9b7ffaacc3ca87a5553da6da82e85(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__479939e8dd0fa8cbe56dd33b7827315c1202e89d3e3238d342dc47de7f5b3ad5(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2aa560b098a2721a484249d50bd0da1ccb0fe44a604d971967ea7a5b9ba85fed(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[DataAwsRoute53TrafficPolicyDocumentRuleRegion]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ea281861c0aebe10c57a5fe0914c88a6ef73a8853f41770f559f336efd00dc37(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6a62a039a91975313e9ab9c9467644b911b9a402ab0bfb9f7e20d72221503906(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8a95c542cc4adaf84b97b5e87b895b69899bb15dc38cc9416eb66bae7887481e(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__35c54931867f7202fb90d87890362a086d9b2a8e4d39d3f1710ef278b24b6558(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0847359fad0d8ed2340826401a7810c3ee11076f7a219541cb8d7877351a2979(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__eca422fbf70e2985bf967636a4d707b75bbabca61c88bbc264eacfbffce6b201(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fb628e1d48d7f70101d084936c7edb9b696de26f6097a47a9e43061d4487e1d6(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, DataAwsRoute53TrafficPolicyDocumentRuleRegion]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c422cc56685b22ee803f7a8456c8e226ff69c32291a87b54a7010097694e66e7(
*,
endpoint_reference: typing.Optional[builtins.str] = None,
evaluate_target_health: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
health_check: typing.Optional[builtins.str] = None,
rule_reference: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1c0bc52ac5f20c9082ed5cbaa5202dd825e60a912578252d209f62213b8ba1a0(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0c8af1e13b1c1c34a27b785503f1709a2e70b3ada04e918910b0aacd0b61c0df(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e85a284b1637a9d6e511bb80322b57fb600cf157e05239e4561f6bdcca9237af(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__803f4946b15fe578d06817f4f6978f864d53c42cf9843906db65e00ba11e2c67(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4714742ad1a8879f64fe1b0fc12f20a80cca8fb0ae55d43e953b0434879d7dab(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__98fb59bbdb4033c1c98f1147f909d9b376fead60510920f62b9c4da99a0382f8(
value: typing.Optional[DataAwsRoute53TrafficPolicyDocumentRuleSecondary],
) -> None:
"""Type checking stubs"""
pass
|
PypiClean
|
/openid-selector-0.2.zip/openid-selector-0.2/js/openid-prototype.js
|
var providers;
var openid = {
version : '1.3', // version constant
demo : false,
demo_text : null,
cookie_expires : 6 * 30, // 6 months.
cookie_name : 'openid_provider',
cookie_path : '/',
img_path : 'images/',
locale : null, // is set in openid-<locale>.js
sprite : null, // usually equals to locale, is set in
// openid-<locale>.js
signin_text : null, // text on submit button on the form
all_small : false, // output large providers w/ small icons
no_sprite : false, // don't use sprite image
image_title : '{provider}', // for image title
input_id : null,
provider_url : null,
provider_id : null,
/**
* Class constructor
*
* @return {Void}
*/
init : function(input_id) {
providers = {};
Object.extend(providers, providers_large);
Object.extend(providers, providers_small);
var openid_btns = $('openid_btns');
this.input_id = input_id;
$('openid_choice').setStyle({
display: 'block'
});
$('openid_input_area').innerHTML = "";
var i = 0;
// add box for each provider
var id, box;
for (id in providers_large) {
box = this.getBoxHTML(id, providers_large[id], (this.all_small ? 'small' : 'large'), i++);
openid_btns.insert(box);
}
if (providers_small) {
openid_btns.insert('<br/>');
for (id in providers_small) {
box = this.getBoxHTML(id, providers_small[id], 'small', i++);
openid_btns.insert(box);
}
}
$('openid_form').onsubmit = this.submit;
var box_id = this.readCookie();
if (box_id) {
this.signin(box_id, true);
}
},
/**
* @return {String}
*/
getBoxHTML : function(box_id, provider, box_size, index) {
if (this.no_sprite) {
var image_ext = box_size == 'small' ? '.ico.gif' : '.gif';
return '<a title="' + this.image_title.replace('{provider}', provider.name) + '" href="javascript:openid.signin(\'' + box_id + '\');"'
+ ' style="background: #FFF url(' + this.img_path + '../images.' + box_size + '/' + box_id + image_ext + ') no-repeat center center" '
+ 'class="' + box_id + ' openid_' + box_size + '_btn"></a>';
}
var x = box_size == 'small' ? -index * 24 : -index * 100;
var y = box_size == 'small' ? -60 : 0;
return '<a title="' + this.image_title.replace('{provider}', provider.name) + '" href="javascript:openid.signin(\'' + box_id + '\');"'
+ ' style="background: #FFF url(' + this.img_path + 'openid-providers-' + this.sprite + '.png); background-position: ' + x + 'px ' + y + 'px" '
+ 'class="' + box_id + ' openid_' + box_size + '_btn"></a>';
},
/**
* Provider image click
*
* @return {Void}
*/
signin : function(box_id, onload) {
var provider = providers[box_id];
if (!provider) {
return;
}
this.highlight(box_id);
this.setCookie(box_id);
this.provider_id = box_id;
this.provider_url = provider.url;
// prompt user for input?
if (provider.label) {
this.useInputBox(provider);
} else {
$('openid_input_area').innerHTML = '';
if (!onload) {
if (this.submit()) {
$('openid_form').submit();
}
}
}
},
/**
* Sign-in button click
*
* @return {Boolean}
*/
submit : function() {
var url = openid.provider_url;
var username_field = $('openid_username');
var username = username_field ? $('openid_username').value : '';
if (url) {
url = url.replace('{username}', username);
openid.setOpenIdUrl(url);
}
if (openid.demo) {
alert(openid.demo_text + "\r\n" + document.getElementById(openid.input_id).value);
return false;
}
if (url && url.indexOf("javascript:") == 0) {
url = url.substr("javascript:".length);
eval(url);
return false;
}
return true;
},
/**
* @return {Void}
*/
setOpenIdUrl : function(url) {
var hidden = document.getElementById(this.input_id);
if (hidden != null) {
hidden.value = url;
} else {
$('openid_form').insert('<input type="hidden" id="' + this.input_id + '" name="' + this.input_id + '" value="' + url + '"/>');
}
},
/**
* @return {Void}
*/
highlight : function(box_id) {
// remove previous highlight.
var highlight = $('openid_highlight');
if (highlight) {
var fc = highlight.firstChild;
highlight.parentNode.replaceChild(fc, highlight);
}
// add new highlight.
var box = $$('.' + box_id)[0];
var wrapper = document.createElement('div');
wrapper.id = 'openid_highlight';
box.parentNode.replaceChild(wrapper, box);
wrapper.appendChild(box);
},
setCookie : function(value) {
var date = new Date();
date.setTime(date.getTime() + (this.cookie_expires * 24 * 60 * 60 * 1000));
var expires = "; expires=" + date.toGMTString();
document.cookie = this.cookie_name + "=" + value + expires + "; path=" + this.cookie_path;
},
readCookie : function() {
var nameEQ = this.cookie_name + "=";
var ca = document.cookie.split(';');
for ( var i = 0; i < ca.length; i++) {
var c = ca[i];
while (c.charAt(0) == ' ')
c = c.substring(1, c.length);
if (c.indexOf(nameEQ) == 0)
return c.substring(nameEQ.length, c.length);
}
return null;
},
/**
* @return {Void}
*/
useInputBox : function(provider) {
var input_area = $('openid_input_area');
var html = '';
var id = 'openid_username';
var value = '';
var label = provider.label;
var style = '';
if (label) {
html = '<p>' + label + '</p>';
}
if (provider.name == 'OpenID') {
id = this.input_id;
value = 'http://';
style = 'background: #FFF url(' + this.img_path + 'openid-inputicon.gif) no-repeat scroll 0 50%; padding-left:18px;';
}
html += '<input id="' + id + '" type="text" style="' + style + '" name="' + id + '" value="' + value + '" />'
+ '<input id="openid_submit" type="submit" value="' + this.signin_text + '"/>';
input_area.innerHTML = html;
$('openid_submit').onclick = this.submit;
$(id).focus();
},
setDemoMode : function(demoMode) {
this.demo = demoMode;
}
};
|
PypiClean
|
/lcr-api-2-0.5.0.tar.gz/lcr-api-2-0.5.0/lcr/__init__.py
|
import re
import json
import logging
import requests
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
from webdriver_manager.chrome import ChromeDriverManager
_LOGGER = logging.getLogger(__name__)
HOST = "churchofjesuschrist.org"
BETA_HOST = f"beta.{HOST}"
LCR_DOMAIN = f"lcr.{HOST}"
CHROME_OPTIONS = webdriver.chrome.options.Options()
CHROME_OPTIONS.add_argument("--headless")
TIMEOUT = 10
if _LOGGER.getEffectiveLevel() <= logging.DEBUG:
import http.client as http_client
http_client.HTTPConnection.debuglevel = 1
class InvalidCredentialsError(Exception):
pass
class API():
def __init__(
self, username, password, unit_number, beta=False,
driver = webdriver.Chrome(ChromeDriverManager().install(), options=CHROME_OPTIONS)):
driver
self.unit_number = unit_number
self.session = requests.Session()
self.driver = driver
self.beta = beta
self.host = BETA_HOST if beta else HOST
self._login(username, password)
def _login(self, user, password):
_LOGGER.info("Logging in")
# Navigate to the login page
self.driver.get(f"https://{LCR_DOMAIN}")
# Enter the username
login_input = WebDriverWait(self.driver, TIMEOUT).until(
ec.presence_of_element_located(
(By.CSS_SELECTOR, "input#okta-signin-username")
)
)
login_input.send_keys(user)
login_input.submit()
# Enter password
password_input = WebDriverWait(self.driver, TIMEOUT).until(
ec.presence_of_element_located(
(By.CSS_SELECTOR, "input.password-with-toggle")
)
)
password_input.send_keys(password)
password_input.submit()
# Wait until the page is loaded
WebDriverWait(self.driver, TIMEOUT).until(
ec.presence_of_element_located(
(By.CSS_SELECTOR, "input.eEwVDs")
)
)
# Get authState parameter.
cookies = self.driver.get_cookies()
potential_cookie = [c for c in cookies if "ChurchSSO" in c['name']]
real_cookie = next(iter(potential_cookie))
churchcookie = real_cookie['value']
self.session.cookies['ChurchSSO'] = churchcookie
self.driver.close()
self.driver.quit()
def _make_request(self, request):
if self.beta:
request['cookies'] = {'clerk-resources-beta-terms': '4.1',
'clerk-resources-beta-eula': '4.2'}
response = self.session.get(**request)
response.raise_for_status() # break on any non 200 status
return response
def birthday_list(self, month, months=1):
_LOGGER.info("Getting birthday list")
request = {
'url': 'https://{}/services/report/birthday-list'.format(
LCR_DOMAIN
),
'params': {
'lang': 'eng',
'month': month,
'months': months
}
}
result = self._make_request(request)
return result.json()
def members_moved_in(self, months):
_LOGGER.info("Getting members moved in")
request = {'url': 'https://{}/services/report/members-moved-in/unit/{}/{}'.format(LCR_DOMAIN,
self.unit_number,
months),
'params': {'lang': 'eng'}}
result = self._make_request(request)
return result.json()
def members_moved_out(self, months):
_LOGGER.info("Getting members moved out")
request = {'url': 'https://{}/services/report/members-moved-out/unit/{}/{}'.format(LCR_DOMAIN,
self.unit_number,
months),
'params': {'lang': 'eng'}}
result = self._make_request(request)
return result.json()
def member_list(self):
_LOGGER.info("Getting member list")
request = {'url': 'https://{}/services/umlu/report/member-list'.format(LCR_DOMAIN),
'params': {'lang': 'eng',
'unitNumber': self.unit_number}}
result = self._make_request(request)
return result.json()
def individual_photo(self, member_id):
"""
member_id is not the same as Mrn
"""
_LOGGER.info("Getting photo for {}".format(member_id))
request = {'url': 'https://{}/individual-photo/{}'.format(LCR_DOMAIN, member_id),
'params': {'lang': 'eng',
'status': 'APPROVED'}}
result = self._make_request(request)
scdn_url = result.json()['tokenUrl']
return self._make_request({'url': scdn_url}).content
def callings(self):
_LOGGER.info("Getting callings for all organizations")
request = {'url': 'https://{}/services/orgs/sub-orgs-with-callings'.format(LCR_DOMAIN),
'params': {'lang': 'eng'}}
result = self._make_request(request)
return result.json()
def members_with_callings_list(self):
_LOGGER.info("Getting callings for all organizations")
request = {'url': 'https://{}/services/report/members-with-callings'.format(LCR_DOMAIN),
'params': {'lang': 'eng'}}
result = self._make_request(request)
return result.json()
def ministering(self):
"""
API parameters known to be accepted are lang type unitNumber and quarter.
"""
_LOGGER.info("Getting ministering data")
request = {'url': 'https://{}/services/umlu/v1/ministering/data-full'.format(LCR_DOMAIN),
'params': {'lang': 'eng',
'unitNumber': self.unit_number}}
result = self._make_request(request)
return result.json()
def access_table(self):
"""
Once the users role id is known this table could be checked to selectively enable or disable methods for API endpoints.
"""
_LOGGER.info("Getting info for data access")
request = {'url': 'https://{}/services/access-table'.format(LCR_DOMAIN),
'params': {'lang': 'eng'}}
result = self._make_request(request)
return result.json()
def recommend_status(self):
"""
Obtain member information on recommend status
"""
_LOGGER.info("Getting recommend status")
request = {
'url': 'https://{}/services/recommend/recommend-status'.format(LCR_DOMAIN),
'params': {
'lang': 'eng',
'unitNumber': self.unit_number
}
}
result = self._make_request(request)
return result.json()
|
PypiClean
|
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/domain/AlipayUserBenefitCreateModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayUserBenefitCreateModel(object):
def __init__(self):
self._benefit_area_code = None
self._benefit_icon_url = None
self._benefit_name = None
self._benefit_name_as_area_subtitle = None
self._benefit_page_url = None
self._benefit_point = None
self._benefit_rec_biz_id = None
self._benefit_rec_type = None
self._benefit_subtitle = None
self._camp_id = None
self._eligible_grade = None
self._end_time = None
self._exchange_rule_ids = None
self._grade_discount = None
self._start_time = None
@property
def benefit_area_code(self):
return self._benefit_area_code
@benefit_area_code.setter
def benefit_area_code(self, value):
self._benefit_area_code = value
@property
def benefit_icon_url(self):
return self._benefit_icon_url
@benefit_icon_url.setter
def benefit_icon_url(self, value):
self._benefit_icon_url = value
@property
def benefit_name(self):
return self._benefit_name
@benefit_name.setter
def benefit_name(self, value):
self._benefit_name = value
@property
def benefit_name_as_area_subtitle(self):
return self._benefit_name_as_area_subtitle
@benefit_name_as_area_subtitle.setter
def benefit_name_as_area_subtitle(self, value):
self._benefit_name_as_area_subtitle = value
@property
def benefit_page_url(self):
return self._benefit_page_url
@benefit_page_url.setter
def benefit_page_url(self, value):
self._benefit_page_url = value
@property
def benefit_point(self):
return self._benefit_point
@benefit_point.setter
def benefit_point(self, value):
self._benefit_point = value
@property
def benefit_rec_biz_id(self):
return self._benefit_rec_biz_id
@benefit_rec_biz_id.setter
def benefit_rec_biz_id(self, value):
self._benefit_rec_biz_id = value
@property
def benefit_rec_type(self):
return self._benefit_rec_type
@benefit_rec_type.setter
def benefit_rec_type(self, value):
self._benefit_rec_type = value
@property
def benefit_subtitle(self):
return self._benefit_subtitle
@benefit_subtitle.setter
def benefit_subtitle(self, value):
self._benefit_subtitle = value
@property
def camp_id(self):
return self._camp_id
@camp_id.setter
def camp_id(self, value):
self._camp_id = value
@property
def eligible_grade(self):
return self._eligible_grade
@eligible_grade.setter
def eligible_grade(self, value):
self._eligible_grade = value
@property
def end_time(self):
return self._end_time
@end_time.setter
def end_time(self, value):
self._end_time = value
@property
def exchange_rule_ids(self):
return self._exchange_rule_ids
@exchange_rule_ids.setter
def exchange_rule_ids(self, value):
self._exchange_rule_ids = value
@property
def grade_discount(self):
return self._grade_discount
@grade_discount.setter
def grade_discount(self, value):
self._grade_discount = value
@property
def start_time(self):
return self._start_time
@start_time.setter
def start_time(self, value):
self._start_time = value
def to_alipay_dict(self):
params = dict()
if self.benefit_area_code:
if hasattr(self.benefit_area_code, 'to_alipay_dict'):
params['benefit_area_code'] = self.benefit_area_code.to_alipay_dict()
else:
params['benefit_area_code'] = self.benefit_area_code
if self.benefit_icon_url:
if hasattr(self.benefit_icon_url, 'to_alipay_dict'):
params['benefit_icon_url'] = self.benefit_icon_url.to_alipay_dict()
else:
params['benefit_icon_url'] = self.benefit_icon_url
if self.benefit_name:
if hasattr(self.benefit_name, 'to_alipay_dict'):
params['benefit_name'] = self.benefit_name.to_alipay_dict()
else:
params['benefit_name'] = self.benefit_name
if self.benefit_name_as_area_subtitle:
if hasattr(self.benefit_name_as_area_subtitle, 'to_alipay_dict'):
params['benefit_name_as_area_subtitle'] = self.benefit_name_as_area_subtitle.to_alipay_dict()
else:
params['benefit_name_as_area_subtitle'] = self.benefit_name_as_area_subtitle
if self.benefit_page_url:
if hasattr(self.benefit_page_url, 'to_alipay_dict'):
params['benefit_page_url'] = self.benefit_page_url.to_alipay_dict()
else:
params['benefit_page_url'] = self.benefit_page_url
if self.benefit_point:
if hasattr(self.benefit_point, 'to_alipay_dict'):
params['benefit_point'] = self.benefit_point.to_alipay_dict()
else:
params['benefit_point'] = self.benefit_point
if self.benefit_rec_biz_id:
if hasattr(self.benefit_rec_biz_id, 'to_alipay_dict'):
params['benefit_rec_biz_id'] = self.benefit_rec_biz_id.to_alipay_dict()
else:
params['benefit_rec_biz_id'] = self.benefit_rec_biz_id
if self.benefit_rec_type:
if hasattr(self.benefit_rec_type, 'to_alipay_dict'):
params['benefit_rec_type'] = self.benefit_rec_type.to_alipay_dict()
else:
params['benefit_rec_type'] = self.benefit_rec_type
if self.benefit_subtitle:
if hasattr(self.benefit_subtitle, 'to_alipay_dict'):
params['benefit_subtitle'] = self.benefit_subtitle.to_alipay_dict()
else:
params['benefit_subtitle'] = self.benefit_subtitle
if self.camp_id:
if hasattr(self.camp_id, 'to_alipay_dict'):
params['camp_id'] = self.camp_id.to_alipay_dict()
else:
params['camp_id'] = self.camp_id
if self.eligible_grade:
if hasattr(self.eligible_grade, 'to_alipay_dict'):
params['eligible_grade'] = self.eligible_grade.to_alipay_dict()
else:
params['eligible_grade'] = self.eligible_grade
if self.end_time:
if hasattr(self.end_time, 'to_alipay_dict'):
params['end_time'] = self.end_time.to_alipay_dict()
else:
params['end_time'] = self.end_time
if self.exchange_rule_ids:
if hasattr(self.exchange_rule_ids, 'to_alipay_dict'):
params['exchange_rule_ids'] = self.exchange_rule_ids.to_alipay_dict()
else:
params['exchange_rule_ids'] = self.exchange_rule_ids
if self.grade_discount:
if hasattr(self.grade_discount, 'to_alipay_dict'):
params['grade_discount'] = self.grade_discount.to_alipay_dict()
else:
params['grade_discount'] = self.grade_discount
if self.start_time:
if hasattr(self.start_time, 'to_alipay_dict'):
params['start_time'] = self.start_time.to_alipay_dict()
else:
params['start_time'] = self.start_time
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayUserBenefitCreateModel()
if 'benefit_area_code' in d:
o.benefit_area_code = d['benefit_area_code']
if 'benefit_icon_url' in d:
o.benefit_icon_url = d['benefit_icon_url']
if 'benefit_name' in d:
o.benefit_name = d['benefit_name']
if 'benefit_name_as_area_subtitle' in d:
o.benefit_name_as_area_subtitle = d['benefit_name_as_area_subtitle']
if 'benefit_page_url' in d:
o.benefit_page_url = d['benefit_page_url']
if 'benefit_point' in d:
o.benefit_point = d['benefit_point']
if 'benefit_rec_biz_id' in d:
o.benefit_rec_biz_id = d['benefit_rec_biz_id']
if 'benefit_rec_type' in d:
o.benefit_rec_type = d['benefit_rec_type']
if 'benefit_subtitle' in d:
o.benefit_subtitle = d['benefit_subtitle']
if 'camp_id' in d:
o.camp_id = d['camp_id']
if 'eligible_grade' in d:
o.eligible_grade = d['eligible_grade']
if 'end_time' in d:
o.end_time = d['end_time']
if 'exchange_rule_ids' in d:
o.exchange_rule_ids = d['exchange_rule_ids']
if 'grade_discount' in d:
o.grade_discount = d['grade_discount']
if 'start_time' in d:
o.start_time = d['start_time']
return o
|
PypiClean
|
/yandexcloud-0.229.0-py3-none-any.whl/yandex/cloud/compute/v1/placement_group_service_pb2_grpc.py
|
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from yandex.cloud.compute.v1 import placement_group_pb2 as yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__pb2
from yandex.cloud.compute.v1 import placement_group_service_pb2 as yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2
from yandex.cloud.operation import operation_pb2 as yandex_dot_cloud_dot_operation_dot_operation__pb2
class PlacementGroupServiceStub(object):
"""A set of methods for managing placement groups.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Get = channel.unary_unary(
'/yandex.cloud.compute.v1.PlacementGroupService/Get',
request_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.GetPlacementGroupRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__pb2.PlacementGroup.FromString,
)
self.List = channel.unary_unary(
'/yandex.cloud.compute.v1.PlacementGroupService/List',
request_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupsResponse.FromString,
)
self.Create = channel.unary_unary(
'/yandex.cloud.compute.v1.PlacementGroupService/Create',
request_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.CreatePlacementGroupRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Update = channel.unary_unary(
'/yandex.cloud.compute.v1.PlacementGroupService/Update',
request_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.UpdatePlacementGroupRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Delete = channel.unary_unary(
'/yandex.cloud.compute.v1.PlacementGroupService/Delete',
request_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.DeletePlacementGroupRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.ListInstances = channel.unary_unary(
'/yandex.cloud.compute.v1.PlacementGroupService/ListInstances',
request_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupInstancesRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupInstancesResponse.FromString,
)
self.ListOperations = channel.unary_unary(
'/yandex.cloud.compute.v1.PlacementGroupService/ListOperations',
request_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupOperationsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupOperationsResponse.FromString,
)
class PlacementGroupServiceServicer(object):
"""A set of methods for managing placement groups.
"""
def Get(self, request, context):
"""Returns the specified placement group.
To get the list of all available placement groups, make a [List] request.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
"""Retrieves the list of placement groups in the specified folder.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Create(self, request, context):
"""Creates a placement group in the specified folder.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
"""Updates the specified placement group.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Deletes the specified placement group.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListInstances(self, request, context):
"""Lists instances for the specified placement group.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListOperations(self, request, context):
"""Lists operations for the specified placement group.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PlacementGroupServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.GetPlacementGroupRequest.FromString,
response_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__pb2.PlacementGroup.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupsResponse.SerializeToString,
),
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.CreatePlacementGroupRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.UpdatePlacementGroupRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.DeletePlacementGroupRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'ListInstances': grpc.unary_unary_rpc_method_handler(
servicer.ListInstances,
request_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupInstancesRequest.FromString,
response_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupInstancesResponse.SerializeToString,
),
'ListOperations': grpc.unary_unary_rpc_method_handler(
servicer.ListOperations,
request_deserializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupOperationsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupOperationsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'yandex.cloud.compute.v1.PlacementGroupService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class PlacementGroupService(object):
"""A set of methods for managing placement groups.
"""
@staticmethod
def Get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.compute.v1.PlacementGroupService/Get',
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.GetPlacementGroupRequest.SerializeToString,
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__pb2.PlacementGroup.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def List(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.compute.v1.PlacementGroupService/List',
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupsRequest.SerializeToString,
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.compute.v1.PlacementGroupService/Create',
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.CreatePlacementGroupRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.compute.v1.PlacementGroupService/Update',
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.UpdatePlacementGroupRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.compute.v1.PlacementGroupService/Delete',
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.DeletePlacementGroupRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListInstances(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.compute.v1.PlacementGroupService/ListInstances',
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupInstancesRequest.SerializeToString,
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupInstancesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListOperations(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.compute.v1.PlacementGroupService/ListOperations',
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupOperationsRequest.SerializeToString,
yandex_dot_cloud_dot_compute_dot_v1_dot_placement__group__service__pb2.ListPlacementGroupOperationsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
PypiClean
|
/django-rbac-auth-0.0.2.tar.gz/django-rbac-auth-0.0.2/django_rbac/migrations/0001_initial.py
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Permission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_delete', models.BooleanField(default=False, verbose_name='is deleted')),
('order', models.PositiveIntegerField(verbose_name='order, the bigger, the farther forward')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='create time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='update time')),
('name', models.CharField(max_length=255, verbose_name='name')),
('description', models.CharField(max_length=500, verbose_name='description')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='django_rbac.Permission', verbose_name='parent')),
],
options={
'verbose_name': 'permission',
'verbose_name_plural': 'permission',
},
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_delete', models.BooleanField(default=False, verbose_name='is deleted')),
('order', models.PositiveIntegerField(verbose_name='order, the bigger, the farther forward')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='create time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='update time')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='django_rbac.Role', verbose_name='parent')),
('permissions', models.ManyToManyField(to='django_rbac.Permission', verbose_name='permissions')),
],
options={
'verbose_name': 'role',
'verbose_name_plural': 'role',
},
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_delete', models.BooleanField(default=False, verbose_name='is deleted')),
('order', models.PositiveIntegerField(verbose_name='order, the bigger, the farther forward')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='create time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='update time')),
('permissions', models.ManyToManyField(to='django_rbac.Permission', verbose_name='permissions')),
('roles', models.ManyToManyField(to='django_rbac.Role', verbose_name='roles')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'user',
},
),
migrations.CreateModel(
name='RouteController',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_delete', models.BooleanField(default=False, verbose_name='is deleted')),
('order', models.PositiveIntegerField(verbose_name='order, the bigger, the farther forward')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='create time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='update time')),
('route', models.CharField(max_length=255, verbose_name='route')),
('permission', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django_rbac.Permission', verbose_name='permission')),
],
options={
'verbose_name': 'route controller',
'verbose_name_plural': 'route controller',
},
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_delete', models.BooleanField(default=False, verbose_name='is deleted')),
('order', models.PositiveIntegerField(verbose_name='order, the bigger, the farther forward')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='create time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='update time')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='django_rbac.Group', verbose_name='parent')),
('permissions', models.ManyToManyField(to='django_rbac.Permission', verbose_name='permissions')),
('roles', models.ManyToManyField(to='django_rbac.Role', verbose_name='roles')),
('users', models.ManyToManyField(to='django_rbac.User', verbose_name='users')),
],
options={
'verbose_name': 'group',
'verbose_name_plural': 'group',
},
),
]
|
PypiClean
|
/AGouTI-1.0.3.tar.gz/AGouTI-1.0.3/agouti_pkg/agouti_annotate.py
|
import os
import sys
import codecs
from agouti_pkg.eprint import eprint
import agouti_pkg.gffutils
import agouti_pkg.gffutils.inspect as inspect
from operator import attrgetter
from agouti_pkg.database import Database
from agouti_pkg.argument_parser import (parse_arguments,
create_attributes_and_features_dict)
from agouti_pkg.sequence_ontology import *
from agouti_pkg.processing_product import ProcessingProduct
from agouti_pkg.miscallaneous import *
from agouti_pkg.read_input import *
from agouti_pkg.header import *
from agouti_pkg.output_processing import prepare_output
def main(args):
"""Main function
Arguments:
args {argparse.Namespace} -- command line arguments parsed with\
argparse
"""
chromosomes_not_found = set() ## chromosomes in the input file but not found in the reference annotations
chromosomes_found = set() ## chromosomes in the input file and found in the reference annotations
num_of_bed_fields = -1
lengths_dict = {} # stores length of UTRs and CDS of a given transcript
try:
db = Database(agouti_pkg.gffutils.FeatureDB(args.database, keep_order=False),
args.database) # reading the database from file
except ValueError:
eprint("ERROR: the database provided does not exists")
sys.exit()
# list all featuretypes existing in db
featuretypes_from_db = list(db.database.featuretypes())
try:
attr_and_feat = create_attributes_and_features_dict # abbr
attributes_and_features = attr_and_feat(db, args, featuretypes_from_db)
except IndexError:
eprint("ERROR: arguments provided with --combine, --select_attributes or --select_features are incorrect")
sys.exit()
try:
if (args.custom == "BED"):
try:
products, num_of_bed_fields = read_BED_file(args.bed,
num_of_bed_fields,
args.first_base_num, args.header_lines)
except FileNotFoundError:
eprint("ERROR: the input file does not exists")
sys.exit()
else:
try:
products, num_of_bed_fields = read_custom_format(args.bed, args.custom,
args.sep,
args.first_base_num,
num_of_bed_fields, args.header_lines)
except FileNotFoundError:
eprint("ERROR: the input file does not exists")
sys.exit()
except IndexError:
eprint("ERROR: the input file has the wrong format")
sys.exit()
header = prepare_header(db, attributes_and_features, args,
num_of_bed_fields, args.header_lines)
whole_output = f"{header[0].strip()}"
for key, value in products.items():
if (args.transcriptomic):
try:
id = value.coordinates[0]
overlapping_features = [db.database[id]]
g2t, cds_start = value.genomic_to_transcriptomic(
db, featuretypes_from_db)
lengths_dict[value.coordinates[0]] = g2t
out = prepare_output(lengths_dict, header, args,
attributes_and_features, db, value,
overlapping_features, g2t, cds_start)
whole_output = f"{whole_output}\n{out}"
except (agouti_pkg.gffutils.exceptions.FeatureNotFoundError):
eprint("WARNING: Couldn't find transcript {}. Please make sure that it exists in your GTF/GFF3 file.".format(id))
else:
region = value.coordinates
## test whether chromosome is present in annotations
if value.coordinates[0] not in chromosomes_found:
if len(list(db.database.region(seqid=value.coordinates[0]))) == 0:
chromosomes_not_found.add(value.coordinates[0])
else:
chromosomes_found.add(value.coordinates[0])
##
if (not args.strand_specific):
overlapping_features = list(
db.database.region(region=region,
featuretype=attributes_and_features.keys(),
completely_within=False))
elif(args.strand_specific):
overlapping_features = list(db.database.region(region=region,
strand=value.strand,
featuretype=attributes_and_features.keys(),
completely_within=False))
out = prepare_output(lengths_dict, header, args,
attributes_and_features, db, value,
overlapping_features, region)
whole_output = f"{whole_output}\n{out}"
if args.statistics or args.stats_only:
statistics(whole_output)
if not args.stats_only:
print(whole_output)
if len(chromosomes_not_found):
eprint(f"WARNING: The following chromosomes were not found in the annotations: {', '.join(list(chromosomes_not_found))}")
return
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
PypiClean
|
/msaf_test-0.1.70-py3-none-any.whl/msaf/pymf/aa.py
|
import numpy as np
from .dist import vq
from cvxopt import solvers, base
from .svd import pinv
from .nmf import NMF
__all__ = ["AA"]
class AA(NMF):
"""
AA(data, num_bases=4)
Archetypal Analysis. Factorize a data matrix into two matrices s.t.
F = | data - W*H | = | data - data*beta*H| is minimal. H and beta
are restricted to convexity (beta >=0, sum(beta, axis=1) = [1 .. 1]).
Factorization is solved via an alternating least squares optimization
using the quadratic programming solver from cvxopt.
Parameters
----------
data : array_like, shape (_data_dimension, _num_samples)
the input data
num_bases: int, optional
Number of bases to compute (column rank of W and row rank of H).
4 (default)
Attributes
----------
W : "data_dimension x num_bases" matrix of basis vectors
H : "num bases x num_samples" matrix of coefficients
beta : "num_bases x num_samples" matrix of basis vector coefficients
(for constructing W s.t. W = beta * data.T )
ferr : frobenius norm (after calling .factorize())
Example
-------
Applying AA to some rather stupid data set:
>>> import numpy as np
>>> from aa import AA
>>> data = np.array([[1.0, 0.0, 2.0], [0.0, 1.0, 1.0]])
Use 2 basis vectors -> W shape(data_dimension, 2).
>>> aa_mdl = AA(data, num_bases=2)
Set number of iterations to 5 and start computing the factorization.
>>> aa_mdl.factorize(niter=5)
The basis vectors are now stored in aa_mdl.W, the coefficients in aa_mdl.H.
To compute coefficients for an existing set of basis vectors simply copy W
to aa_mdl.W, and set compute_w to False:
>>> data = np.array([[1.5], [1.2]])
>>> W = np.array([[1.0, 0.0], [0.0, 1.0]])
>>> aa_mdl = AA(data, num_bases=2)
>>> aa_mdl.W = W
>>> aa_mdl.factorize(niter=5, compute_w=False)
The result is a set of coefficients aa_mdl.H, s.t. data = W * aa_mdl.H.
"""
# set cvxopt options
solvers.options['show_progress'] = False
def init_h(self):
self.H = np.random.random((self._num_bases, self._num_samples))
self.H /= self.H.sum(axis=0)
def init_w(self):
self.beta = np.random.random((self._num_bases, self._num_samples))
self.beta /= self.beta.sum(axis=0)
self.W = np.dot(self.beta, self.data.T).T
self.W = np.random.random((self._data_dimension, self._num_bases))
def update_h(self):
""" alternating least squares step, update H under the convexity
constraint """
def update_single_h(i):
""" compute single H[:,i] """
# optimize alpha using qp solver from cvxopt
FA = base.matrix(np.float64(np.dot(-self.W.T, self.data[:,i])))
al = solvers.qp(HA, FA, INQa, INQb, EQa, EQb)
self.H[:,i] = np.array(al['x']).reshape((1, self._num_bases))
EQb = base.matrix(1.0, (1,1))
# float64 required for cvxopt
HA = base.matrix(np.float64(np.dot(self.W.T, self.W)))
INQa = base.matrix(-np.eye(self._num_bases))
INQb = base.matrix(0.0, (self._num_bases,1))
EQa = base.matrix(1.0, (1, self._num_bases))
for i in range(self._num_samples):
update_single_h(i)
def update_w(self):
""" alternating least squares step, update W under the convexity
constraint """
def update_single_w(i):
""" compute single W[:,i] """
# optimize beta using qp solver from cvxopt
FB = base.matrix(np.float64(np.dot(-self.data.T, W_hat[:,i])))
be = solvers.qp(HB, FB, INQa, INQb, EQa, EQb)
self.beta[i,:] = np.array(be['x']).reshape((1, self._num_samples))
# float64 required for cvxopt
HB = base.matrix(np.float64(np.dot(self.data[:,:].T, self.data[:,:])))
EQb = base.matrix(1.0, (1, 1))
W_hat = np.dot(self.data, pinv(self.H))
INQa = base.matrix(-np.eye(self._num_samples))
INQb = base.matrix(0.0, (self._num_samples, 1))
EQa = base.matrix(1.0, (1, self._num_samples))
for i in range(self._num_bases):
update_single_w(i)
self.W = np.dot(self.beta, self.data.T).T
if __name__ == "__main__":
import doctest
doctest.testmod()
|
PypiClean
|
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/drawing/tools/Path.js
|
define(["dijit","dojo","dojox"],function(_1,_2,_3){
_2.provide("dojox.drawing.tools.Path");
_3.drawing.tools.Path=_3.drawing.util.oo.declare(_3.drawing.stencil.Path,function(){
this.pathMode="";
this.currentPathMode="";
this._started=false;
this.oddEvenClicks=0;
},{draws:true,onDown:function(_4){
if(!this._started){
this.onStartPath(_4);
}
},makeSubPath:function(_5){
if(_5){
if(this.currentPathMode=="Q"){
this.points.push({x:this.points[0].x,y:this.points[0].y});
}
this.points.push({t:"Z"});
this.render();
}
this.currentPathMode="";
this.pathMode="M";
},onStartPath:function(_6){
this._started=true;
this.revertRenderHit=this.renderHit;
this.renderHit=false;
this.closePath=false;
this.mouse.setEventMode("PathEdit");
this.closePoint={x:_6.x,y:_6.y};
this._kc1=this.connect(this.keys,"onEsc",this,function(){
this.onCompletePath(false);
});
this._kc2=this.connect(this.keys,"onKeyUp",this,function(_7){
switch(_7.letter){
case "c":
this.onCompletePath(true);
break;
case "l":
this.pathMode="L";
break;
case "m":
this.makeSubPath(false);
break;
case "q":
this.pathMode="Q";
break;
case "s":
this.pathMode="S";
break;
case "z":
this.makeSubPath(true);
break;
}
});
},onCompletePath:function(_8){
this.remove(this.closeGuide,this.guide);
var _9=this.getBounds();
if(_9.w<this.minimumSize&&_9.h<this.minimumSize){
this.remove(this.hit,this.shape,this.closeGuide);
this._started=false;
this.mouse.setEventMode("");
this.setPoints([]);
return;
}
if(_8){
if(this.currentPathMode=="Q"){
this.points.push({x:this.points[0].x,y:this.points[0].y});
}
this.closePath=true;
}
this.renderHit=this.revertRenderHit;
this.renderedOnce=true;
this.onRender(this);
this.disconnect([this._kc1,this._kc2]);
this.mouse.setEventMode("");
this.render();
},onUp:function(_a){
if(!this._started||!_a.withinCanvas){
return;
}
if(this.points.length>2&&this.closeRadius>this.util.distance(_a.x,_a.y,this.closePoint.x,this.closePoint.y)){
this.onCompletePath(true);
}else{
var p={x:_a.x,y:_a.y};
this.oddEvenClicks++;
if(this.currentPathMode!=this.pathMode){
if(this.pathMode=="Q"){
p.t="Q";
this.oddEvenClicks=0;
}else{
if(this.pathMode=="L"){
p.t="L";
}else{
if(this.pathMode=="M"){
p.t="M";
this.closePoint={x:_a.x,y:_a.y};
}
}
}
this.currentPathMode=this.pathMode;
}
this.points.push(p);
if(this.points.length>1){
this.remove(this.guide);
this.render();
}
}
},createGuide:function(_b){
if(!this.points.length){
return;
}
var _c=[].concat(this.points);
var pt={x:_b.x,y:_b.y};
if(this.currentPathMode=="Q"&&this.oddEvenClicks%2){
pt.t="L";
}
this.points.push(pt);
this.render();
this.points=_c;
var _d=this.util.distance(_b.x,_b.y,this.closePoint.x,this.closePoint.y);
if(this.points.length>1){
if(_d<this.closeRadius&&!this.closeGuide){
var c={cx:this.closePoint.x,cy:this.closePoint.y,rx:this.closeRadius,ry:this.closeRadius};
this.closeGuide=this.container.createEllipse(c).setFill(this.closeColor);
}else{
if(_d>this.closeRadius&&this.closeGuide){
this.remove(this.closeGuide);
this.closeGuide=null;
}
}
}
},onMove:function(_e){
if(!this._started){
return;
}
this.createGuide(_e);
},onDrag:function(_f){
if(!this._started){
return;
}
this.createGuide(_f);
}});
_3.drawing.tools.Path.setup={name:"dojox.drawing.tools.Path",tooltip:"Path Tool",iconClass:"iconLine"};
_3.drawing.register(_3.drawing.tools.Path.setup,"tool");
});
|
PypiClean
|
/fnal-column-analysis-tools-0.4.23.tar.gz/fnal-column-analysis-tools-0.4.23/fnal_column_analysis_tools/jetmet_tools/JetTransformer.py
|
from .FactorizedJetCorrector import FactorizedJetCorrector
from .JetResolution import JetResolution
from .JetResolutionScaleFactor import JetResolutionScaleFactor
from .JetCorrectionUncertainty import JetCorrectionUncertainty
from ..analysis_objects.JaggedCandidateArray import JaggedCandidateArray
import numpy as np
from uproot_methods import TLorentzVectorArray
from copy import deepcopy
from pdb import set_trace
_signature_map = {'JetPt': 'pt',
'JetEta': 'eta',
'Rho': 'rho',
'JetA': 'area'
}
def _update_jet_ptm(corr, jet, fromRaw=False):
"""
This is a hack to update the jet pt and jet mass in place
as we apply corrections and smearings.
"""
if fromRaw:
jet._content._contents['__fast_pt'] = corr * jet.ptRaw.content
jet._content._contents['__fast_mass'] = corr * jet.massRaw.content
else:
jet._content._contents['__fast_pt'] = corr * jet.pt.content
jet._content._contents['__fast_mass'] = corr * jet.mass.content
# the class below does use hacks of JaggedCandidateArray to achieve the desired behavior
# no monkey patches though
class JetTransformer(object):
"""
This class is a columnar implementation of the the standard recipes for apply JECs, and
the various scale factors and uncertainties therein.
- Only the stochastic smearing method is implemented at the moment.
It uses the FactorizedJetCorrector, JetResolution, JetResolutionScaleFactor, and
JetCorrectionUncertainty classes to calculate the ingredients for the final updated jet
object, which will be modified in place.
The jet object must be a "JaggedCandidateArray" and have the additional properties:
- ptRaw
- massRaw
These will be used to reset the jet pT and mass, and then calculate the updated pTs and
masses for various corrections and smearings.
You can use this class like:
xformer = JetTransformer(name1=corrL1,...)
xformer.transform(jet)
"""
def __init__(self, jec=None, junc=None, jer=None, jersf=None):
if jec is None:
raise Exception('JetTransformer must have "jec" specified as an argument!')
if not isinstance(jec, FactorizedJetCorrector):
raise Exception('JetTransformer needs a FactorizedJetCorrecter passed as "jec"' +
' got object of type {}'.format(type(jec)))
self._jec = jec
if junc is not None and not isinstance(junc, JetCorrectionUncertainty):
raise Exception('"junc" must be of type "JetCorrectionUncertainty"' +
' got {}'.format(type(junc)))
self._junc = junc
if (jer is None) != (jersf is None):
raise Exception('Cannot apply JER-SF without an input JER, and vice-versa!')
if jer is not None and not isinstance(jer, JetResolution):
raise Exception('"jer" must be of type "JetResolution"' +
' got {}'.format(type(jer)))
self._jer = jer
if jersf is not None and not isinstance(jersf, JetResolutionScaleFactor):
raise Exception('"jersf" must be of type "JetResolutionScaleFactor"' +
' got {}'.format(type(jersf)))
self._jersf = jersf
@property
def uncertainties(self):
return self._junc.levels if self._junc is not None else []
def transform(self, jet, met=None):
"""
precondition - jet is a JaggedCandidateArray with additional attributes:
- 'ptRaw'
- 'massRaw'
xformer = JetTransformer(name1=corrL1,...)
xformer.transform(jet)
postcondition - jet.pt, jet.mass, jet.p4 are updated to represent the corrected jet
based on the input correction set
"""
if not isinstance(jet, JaggedCandidateArray):
raise Exception('Input data must be a JaggedCandidateArray!')
if ('ptRaw' not in jet.columns or 'massRaw' not in jet.columns):
raise Exception('Input JaggedCandidateArray must have "ptRaw" & "massRaw"!')
if met is not None:
if 'p4' not in met.columns:
raise Exception('Input met must have a p4 column!')
if not isinstance(met['p4'], TLorentzVectorArray):
raise Exception('Met p4 must be a TLorentzVectorArray!')
initial_p4 = jet['p4'].copy() # keep a copy for fixing met
# initialize the jet momenta to raw values
_update_jet_ptm(1.0, jet, fromRaw=True)
# below we work in numpy arrays, JaggedCandidateArray knows how to convert them
args = {key: getattr(jet, _signature_map[key]).content for key in self._jec.signature}
jec = self._jec.getCorrection(**args)
_update_jet_ptm(jec, jet, fromRaw=True)
juncs = None
if self._junc is not None:
args = {key: getattr(jet, _signature_map[key]).content for key in self._junc.signature}
juncs = self._junc.getUncertainty(**args)
# if there's a jer and sf to apply we have to update the momentum too
# right now only use stochastic smearing
if self._jer is not None and self._jersf is not None:
args = {key: getattr(jet, _signature_map[key]).content for key in self._jer.signature}
jer = self._jer.getResolution(**args)
args = {key: getattr(jet, _signature_map[key]).content for key in self._jersf.signature}
jersf = self._jersf.getScaleFactor(**args)
jersmear = jer * np.random.normal(size=jer.size)
jsmear_cen = 1. + np.sqrt(jersf[:, 0]**2 - 1.0) * jersmear
jsmear_up = 1. + np.sqrt(jersf[:, 1]**2 - 1.0) * jersmear
jsmear_down = 1. + np.sqrt(jersf[:, -1]**2 - 1.0) * jersmear
# need to apply up and down jer-smear before applying central correction
jet.add_attributes(pt_jer_up=jsmear_up * jet.pt.content,
mass_jer_up=jsmear_up * jet.mass.content,
pt_jer_down=jsmear_down * jet.pt.content,
mass_jer_down=jsmear_down * jet.mass.content)
# finally, update the central value
_update_jet_ptm(jsmear_cen, jet)
# have to apply central jersf before calculating junc
if self._junc is not None:
for name, values in juncs:
jet.add_attributes(**{
'pt_{0}_up'.format(name): values[:, 0] * jet.pt.content,
'mass_{0}_up'.format(name): values[:, 0] * jet.mass.content,
'pt_{0}_down'.format(name): values[:, 1] * jet.pt.content,
'mass_{0}_down'.format(name): values[:, 1] * jet.mass.content
})
# hack to update the jet p4, we have the fully updated pt and mass here
jet._content._contents['p4'] = TLorentzVectorArray.from_ptetaphim(jet.pt.content,
jet.eta.content,
jet.phi.content,
jet.mass.content)
if met is None:
return
# set MET values
new_x = met['p4'].x - (initial_p4.x - jet['p4'].x).sum()
new_y = met['p4'].y - (initial_p4.y - jet['p4'].y).sum()
met.base['p4'] = TLorentzVectorArray.from_ptetaphim(
np.sqrt(new_x**2 + new_y**2), 0,
np.arctan2(new_y, new_x), 0
)
if 'MetUnclustEnUpDeltaX' in met.columns:
px_up = met['p4'].x + met['MetUnclustEnUpDeltaX']
py_up = met['p4'].y + met['MetUnclustEnUpDeltaY']
met.base['pt_UnclustEn_up'] = np.sqrt(px_up**2 + py_up**2)
met.base['phi_UnclustEn_up'] = np.arctan2(py_up, px_up)
px_down = met['p4'].x - met['MetUnclustEnUpDeltaX']
py_down = met['p4'].y - met['MetUnclustEnUpDeltaY']
met.base['pt_UnclustEn_down'] = np.sqrt(px_down**2 + py_down**2)
met.base['phi_UnclustEn_down'] = np.arctan2(py_down, px_down)
if self._junc is not None:
jets_sin = np.sin(jet['p4'].phi)
jets_cos = np.cos(jet['p4'].phi)
for name, _ in juncs:
for shift in ['up', 'down']:
px = met['p4'].x - (initial_p4.x - jet['pt_{0}_{1}'.format(name, shift)] * jets_cos).sum()
py = met['p4'].y - (initial_p4.y - jet['pt_{0}_{1}'.format(name, shift)] * jets_sin).sum()
met.base['pt_{0}_{1}'.format(name, shift)] = np.sqrt(px**2 + py**2)
met.base['phi_{0}_{1}'.format(name, shift)] = np.arctan2(py, px)
|
PypiClean
|
/experimaestro-ir-1.0.0.zip/experimaestro-ir-1.0.0/src/xpmir/letor/learner.py
|
import logging
import json
from pathlib import Path
from typing import Dict, Iterator
from datamaestro_text.data.ir import Adhoc
from experimaestro import Param, pathgenerator, Annotated
import numpy as np
from xpmir.utils.utils import easylog
from xpmir.evaluation import evaluate
from xpmir.learning.context import (
TrainState,
TrainerContext,
)
from xpmir.rankers import (
Retriever,
)
from xpmir.learning.learner import LearnerListener, Learner, LearnerListenerStatus
from xpmir.learning.optim import ModuleLoader
logger = easylog()
class ValidationListener(LearnerListener):
"""Learning validation early-stopping
Computes a validation metric and stores the best result. If early_stop is
set (> 0), then it signals to the learner that the learning process can
stop.
"""
metrics: Param[Dict[str, bool]] = {"map": True}
"""Dictionary whose keys are the metrics to record, and boolean
values whether the best performance checkpoint should be kept for
the associated metric ([parseable by ir-measures](https://ir-measur.es/))"""
dataset: Param[Adhoc]
"""The dataset to use"""
retriever: Param[Retriever]
"""The retriever for validation"""
warmup: Param[int] = -1
"""How many epochs before actually computing the metric"""
bestpath: Annotated[Path, pathgenerator("best")]
"""Path to the best checkpoints"""
info: Annotated[Path, pathgenerator("info.json")]
"""Path to the JSON file that contains the metric values at each epoch"""
validation_interval: Param[int] = 1
"""Epochs between each validation"""
early_stop: Param[int] = 0
"""Number of epochs without improvement after which we stop learning.
Should be a multiple of validation_interval or 0 (no early stopping)"""
def __validate__(self):
assert (
self.early_stop % self.validation_interval == 0
), "Early stop should be a multiple of the validation interval"
def initialize(self, learner: Learner, context: TrainerContext):
super().initialize(learner, context)
self.retriever.initialize()
self.bestpath.mkdir(exist_ok=True, parents=True)
# Checkpoint start
try:
with self.info.open("rt") as fp:
self.top = json.load(fp) # type: Dict[str, Dict[str, float]]
except Exception:
self.top = {}
def update_metrics(self, metrics: Dict[str, float]):
if self.top:
# Just use another key
for metric in self.metrics.keys():
metrics[f"{self.id}/final/{metric}"] = self.top[metric]["value"]
def monitored(self) -> Iterator[str]:
return [key for key, monitored in self.metrics.items() if monitored]
def task_outputs(self, learner: "Learner", dep):
"""Experimaestro outputs: returns the best checkpoints for each
metric"""
res = {
key: ModuleLoader.construct(
learner.model, self.bestpath / key / TrainState.MODEL_PATH, dep
)
for key, store in self.metrics.items()
if store
}
return res
def should_stop(self, epoch=0):
if self.early_stop > 0 and self.top:
epochs_since_imp = (epoch or self.context.epoch) - max(
info["epoch"] for key, info in self.top.items() if self.metrics[key]
)
if epochs_since_imp >= self.early_stop:
return LearnerListenerStatus.STOP
return LearnerListenerStatus.DONT_STOP
def __call__(self, state: TrainState):
# Check that we did not stop earlier (when loading from checkpoint / if other
# listeners have not stopped yet)
if self.should_stop(state.epoch - 1) == LearnerListenerStatus.STOP:
return LearnerListenerStatus.STOP
if state.epoch % self.validation_interval == 0:
# Compute validation metrics
means, details = evaluate(
self.retriever, self.dataset, list(self.metrics.keys()), True
)
for metric, keep in self.metrics.items():
value = means[metric]
self.context.writer.add_scalar(
f"{self.id}/{metric}/mean", value, state.step
)
self.context.writer.add_histogram(
f"{self.id}/{metric}",
np.array(list(details[metric].values()), dtype=np.float32),
state.step,
)
# Update the top validation
if state.epoch >= self.warmup:
topstate = self.top.get(metric, None)
if topstate is None or value > topstate["value"]:
# Save the new top JSON
self.top[metric] = {"value": value, "epoch": self.context.epoch}
# Copy in corresponding directory
if keep:
logging.info(
f"Saving the checkpoint {state.epoch}"
f" for metric {metric}"
)
self.context.copy(self.bestpath / metric)
# Update information
with self.info.open("wt") as fp:
json.dump(self.top, fp)
# Early stopping?
return self.should_stop()
|
PypiClean
|
/api-buddy-0.4.0.tar.gz/api-buddy-0.4.0/README.md
|
# API Buddy
[](https://travis-ci.org/fonsecapeter/api-buddy.svg)
[](https://badge.fury.io/py/api-buddy)

> Right now, only OAuth2 authentication is supported. It's the most common, and current gold standard for security best practices. Also most APIs use it. That said, I have no beef with all the APIs out there using something else, so feel free to open a ticket if you want something else supported. 🎟
>
> You can also always manually set headers.
## Installation
As long as you have python 3.7 or higher (I recommend using [pyenv](https://github.com/pyenv/pyenv)), just:
```bash
pip install api-buddy
```
## Usage
First, specify the API you're exploring in your preferences
```yaml
# ~/.api-buddy.yaml
api_url: https://some.api.com
```
Then it's as easy as:
```bash
api get some-endpoint
```
```json
=> 200
{
"look": "I haz data",
"thx": "API Buddy"
}
```
HTTP Method defaults to `get`:
```bash
api this-endpoint # same as first example
```
You can add query params in key=val format:
```bash
api get \
my/favorite/endpoint \
first_name=cosmo \
last_name=kramer
```
You can also add request body data in JSON format:
```bash
api post \
some-endpoint \
'{"id": 1, "field": "value"}'
```
🤔 Note the single-quotes. You can expand this accross multiple lines:
```bash
api post \
some-endpoint \
'{
"id": 1,
"field": "value"
}'
```
### [Preferences 👉](https://github.com/fonsecapeter/api-buddy/blob/master/docs/preferences.md)
### Arguments
- `http_method`: (optional, default=`get`) The HTTP method to use in your request.
- It should be one of:
- `get`
- `post`
- `patch`
- `put`
- `delete`
- `endpoint`: (required) The relative path to an API endpoint.
- AKA you don't need to type the base api url again here.
- `params`: (optional) A list of `key=val` query params
- `data`: (optional) A JSON string of requets body data.
- You can't use this with `get` because HTTP.
### Options
- `-h`, `--help`: Show the help message
- `-v`, `--version`: Show the installed version
## Development
Requires:
- [poetry](https://poetry.eustace.io/)
- Python 3.7
- Suggest using: [pyenv](https://github.com/pyenv/pyenv)
Steps to start working:
- Build and create the local venv with `bin/setup`
- Make sure everything works with `bin/test`
- Try the local cli with `poetry run api --help`
- Find other management commands with `bin/list`
|
PypiClean
|
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/@blueprintjs/core/lib/esnext/components/slider/handle.d.ts
|
import * as React from "react";
import { AbstractPureComponent2 } from "../../common";
import { HandleProps } from "./handleProps";
/**
* Props for the internal <Handle> component needs some additional info from the parent Slider.
*/
export interface IInternalHandleProps extends HandleProps {
disabled?: boolean;
label: JSX.Element | string | undefined;
max: number;
min: number;
stepSize: number;
tickSize: number;
tickSizeRatio: number;
vertical: boolean;
}
export interface IHandleState {
/** whether slider handle is currently being dragged */
isMoving?: boolean;
}
/** Internal component for a Handle with click/drag/keyboard logic to determine a new value. */
export declare class Handle extends AbstractPureComponent2<IInternalHandleProps, IHandleState> {
static displayName: string;
state: {
isMoving: boolean;
};
private handleElement;
private refHandlers;
componentDidMount(): void;
render(): JSX.Element;
componentWillUnmount(): void;
/** Convert client pixel to value between min and max. */
clientToValue(clientPixel: number): number;
mouseEventClientOffset(event: MouseEvent | React.MouseEvent<HTMLElement>): number;
touchEventClientOffset(event: TouchEvent | React.TouchEvent<HTMLElement>): number;
beginHandleMovement: (event: MouseEvent | React.MouseEvent<HTMLElement>) => void;
beginHandleTouchMovement: (event: TouchEvent | React.TouchEvent<HTMLElement>) => void;
protected validateProps(props: IInternalHandleProps): void;
private getStyleProperties;
private endHandleMovement;
private endHandleTouchMovement;
private handleMoveEndedAt;
private handleHandleMovement;
private handleHandleTouchMovement;
private handleMovedTo;
private handleKeyDown;
private handleKeyUp;
/** Clamp value and invoke callback if it differs from current value */
private changeValue;
/** Clamp value between min and max props */
private clamp;
private getHandleElementCenterPixel;
private getHandleMidpointAndOffset;
private removeDocumentEventListeners;
}
|
PypiClean
|
/django_advanced_password_validation-1.1.1.tar.gz/django_advanced_password_validation-1.1.1/README.md
|
# django-advanced_password_validation
Extends Django password validation options to include minimum uppercase, minimum lowercase, minimum numerical, and minimum special characters. This was created in an attempt to keep up with industry standards for strong user passwords.
This package works for both python 3.x and 2.x versions.
> **_NOTE:_** As of January 01, 2020 python 2.x has been deprecated and will no longer receive continued support. See [Python 2.x EOL](https://www.python.org/doc/sunset-python-2/) for more details.
### Prerequisites
Requires Django 1.11 or later.
You can install the latest version of Django via pip:
```
$ pip install django
```
Alternatively, you can install a specific version of Django via pip:
```
$ pip install django=2.2
```
> **_NOTE:_** See the [django-project](https://docs.djangoproject.com) documentation for information on non-deprecated Django versions.
### Installation
#### Normal installation
Install django-advanced_password_validation via pip:
```
$ pip install django-advanced_password_validation
```
#### Development installation
```
$ git clone https://github.com/ezrajrice/django-advanced_password_validation.git
$ cd django-advanced_password_validation
$ pip install --editable .
```
### Usage
The four optional validators must be configured in the settings.py file of your django project.
#### /my-cool-project/settings.py
```python
INSTALLED_APPS = [
...
'django_advanced_password_validation',
...
]
AUTH_PASSWORD_VALIDATORS = [
...
{
'NAME': 'django_advanced_password_validation.advanced_password_validation.ContainsDigitsValidator',
'OPTIONS': {
'min_digits': 1
}
},
{
'NAME': 'django_advanced_password_validation.advanced_password_validation.ContainsUppercaseValidator',
'OPTIONS': {
'min_uppercase': 1
}
},
{
'NAME': 'django_advanced_password_validation.advanced_password_validation.ContainsLowercaseValidator',
'OPTIONS': {
'min_lowercase': 1
}
},
{
'NAME': 'django_advanced_password_validation.advanced_password_validation.ContainsSpecialCharactersValidator',
'OPTIONS': {
'min_characters': 1
}
},
...
]
```
### Options
Here is a list of the available options with their default values.
| Validator | Option | Default |
| --- |:---:| ---:|
| ContainsDigitsValidator | min_digits | 1 |
| ContainsUppercaseValidator | min_uppercase | 1 |
| ContainsLowercaseValidator | min_lowercase | 1 |
| ContainsSpecialCharactersValidator | min_characters | 1 |
| MaximumLengthValidator | max_length | 128 |
| MaxConsecutiveCharactersValidator | max_consecutive | 3 |
| ConsecutivelyIncreasingDigitValidator | max_consecutive | 3 |
| ConsecutivelyDecreasingDigitValidator | max_consecutive | 3 |
## Authors
* **Ezra Rice** - *Initial work* - [ezrajrice](https://github.com/ezrajrice)
## License
This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details.
## Acknowledgments
* **Victor Semionov** - *Contributor* - [vsemionov](https://github.com/vsemionov)
|
PypiClean
|
/algokit_client_generator-1.0.2b1-py3-none-any.whl/algokit_client_generator/cli.py
|
import argparse
import logging
import sys
from pathlib import Path
from algokit_client_generator.writer import generate_client
logger = logging.getLogger(__name__)
class ArgumentError(Exception):
def __init__(self, message: str):
self.message = message
def get_args_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description="Generates typed python clients from an Algorand ARC-0032 specification file."
)
parser.add_argument(
"-a",
"--app_spec",
default=".",
type=Path,
help="The path to an application.json or a directory if using --walk. Defaults to current directory",
)
parser.add_argument(
"-o", "--output", default="client_generated.py", type=Path, help="The output filename for the generated client"
)
parser.add_argument(
"-w",
"--walk",
action="store_true",
help="Walk the input path recursively, generating a client for each application.json found",
)
return parser
def configure_logging() -> None:
logging.basicConfig(level=logging.INFO, format="%(message)s")
def walk_dir(path: Path, output: Path) -> None:
for child in path.iterdir():
if child.is_dir():
walk_dir(child, output)
elif child.name.lower() == "application.json":
generate_client(child, child.parent / output)
def process(parser: argparse.ArgumentParser) -> None:
args = parser.parse_args()
app_spec: Path = args.app_spec
output: Path = args.output
if not app_spec.exists():
raise ArgumentError(f"Application Specification not found: {app_spec}")
if args.walk:
if not app_spec.is_dir():
raise ArgumentError(
f"Application specification must be a path to a directory, when using the --walk option: {app_spec}"
)
if output.is_absolute():
raise ArgumentError(f"Output must be a relative path when using the --walk option: {output}")
walk_dir(args.app_spec, args.output)
elif len(sys.argv) == 1: # if user invokes with no arguments display help
parser.print_usage()
else:
if not app_spec.is_file():
raise ArgumentError(f"Application Specification must be a path to an application.json: {app_spec}")
generate_client(app_spec, output)
def main() -> None:
configure_logging()
parser = get_args_parser()
try:
process(parser)
except ArgumentError as ex:
logger.error(ex.message)
|
PypiClean
|
/datasette-surveys-1.1.8.tar.gz/datasette-surveys-1.1.8/datasette_surveys/static/lib/datatables.net-plugins/Readme.md
|
DataTables Plugins
==================
This repository contains a collection of plug-ins for the jQuery [DataTables](http://datatables.net) table enhancer. These plug-ins are feature enhancing for the DataTables library, adding extra options to core functionality such as additional sort algorithms, API methods and pagination controls. The plug-ins should not be confused with DataTables "extensions" which are more significant software libraries which add additional features to DataTables.
This repository holds the following plug-in types for DataTables (among others):
* Sorting
* Type based
* Custom data source based
* API
* Filtering
* Type based
* Row based
* Internationalisation translations
* Type detection
* Pagination
* Integration scripts
* Twitter Bootstrap
Please refer to the DataTables [plug-in documentation](http://datatables.net/plug-ins) for details on how to use these plug-ins.
|
PypiClean
|
/jax_dips-0.2.1-py3-none-any.whl/jax_dips/solvers/free_boundary/solver_poisson_advection.py
|
import os
import pickle
import jax
from jax import config, grad, jit
from jax import numpy as jnp
from jax import random, value_and_grad, vmap
from jax_dips.geometry.level_set import geometric_integrations_per_point
config.update("jax_debug_nans", False)
from functools import partial
import haiku as hk
import optax
from jax_dips._jaxmd_modules.util import f32, i32
from jax_dips.geometry import interpolate
from jax_dips.nn.nn_solution_model import DoubleMLP
class PoissonAdvectionTrainer:
"""
This is a completely local point-based Poisson solver.
"""
def __init__(self, gstate, sim_state, sim_state_fn, optimizer, algorithm=0, precondition=1):
"""
algorithm = 0: use regression to evaluate u^\pm
algorithm = 1: use neural network to evaluate u^\pm
"""
self.optimizer = optimizer
self.gstate = gstate
self.sim_state_fn = sim_state_fn
self.sim_state = sim_state
self.algorithm = algorithm
""" Grid Info """
# self.bandwidth_squared = (2.0 * self.dx)*(2.0 * self.dx)
self.xmin = gstate.xmin()
self.xmax = gstate.xmax()
self.ymin = gstate.ymin()
self.ymax = gstate.ymax()
self.zmin = gstate.zmin()
self.zmax = gstate.zmax()
""" functions for the method """
self.dir_bc_fn = self.sim_state_fn.dir_bc_fn
self.f_m_interp_fn = self.sim_state_fn.f_m_fn
self.f_p_interp_fn = self.sim_state_fn.f_p_fn
self.k_m_interp_fn = self.sim_state_fn.k_m_fn
self.k_p_interp_fn = self.sim_state_fn.k_p_fn
self.mu_m_interp_fn = self.sim_state_fn.mu_m_fn
self.mu_p_interp_fn = self.sim_state_fn.mu_p_fn
self.alpha_interp_fn = self.sim_state_fn.alpha_fn
self.beta_interp_fn = self.sim_state_fn.beta_fn
self.mu_m_over_mu_p_interp_fn = lambda r: self.mu_m_interp_fn(r) / self.mu_p_interp_fn(r)
self.beta_over_mu_m_interp_fn = lambda r: self.beta_interp_fn(r) / self.mu_m_interp_fn(r)
self.beta_over_mu_p_interp_fn = lambda r: self.beta_interp_fn(r) / self.mu_p_interp_fn(r)
""" The level set function or its interpolant (if is free boundary) """
# self.phi_cube_ = sim_state.phi.reshape(self.grid_shape)
# x, y, z, phi_cube = interpolate.add_ghost_layer_3d(xo, yo, zo, self.phi_cube_)
# x, y, z, self.phi_cube = interpolate.add_ghost_layer_3d(x, y, z, phi_cube)
# self.phi_flat = self.phi_cube_.reshape(-1)
# self.phi_interp_fn = interpolate.nonoscillatory_quadratic_interpolation(self.sim_state.phi, self.gstate)
self.phi_interp_fn = self.sim_state_fn.phi_fn
""" Geometric operations per point """
(
self.get_vertices_of_cell_intersection_with_interface_at_point,
self.is_cell_crossed_by_interface,
) = geometric_integrations_per_point.get_vertices_of_cell_intersection_with_interface(self.phi_interp_fn)
(
self.beta_integrate_over_interface_at_point,
self.beta_integrate_in_negative_domain,
) = geometric_integrations_per_point.integrate_over_gamma_and_omega_m(
self.get_vertices_of_cell_intersection_with_interface_at_point,
self.is_cell_crossed_by_interface,
self.beta_interp_fn,
)
self.compute_face_centroids_values_plus_minus_at_point = (
geometric_integrations_per_point.compute_cell_faces_areas_values(
self.get_vertices_of_cell_intersection_with_interface_at_point,
self.is_cell_crossed_by_interface,
self.mu_m_interp_fn,
self.mu_p_interp_fn,
)
)
# self.ngbs = jnp.array([ [-1, -1, -1],
# [0, -1, -1],
# [1, -1, -1],
# [-1, 0, -1],
# [0, 0, -1],
# [1, 0, -1],
# [-1, 1, -1],
# [0, 1, -1],
# [1, 1, -1],
# [-1, -1, 0],
# [0, -1, 0],
# [1, -1, 0],
# [-1, 0, 0],
# [0, 0, 0],
# [1, 0, 0],
# [-1, 1, 0],
# [0, 1, 0],
# [1, 1, 0],
# [-1, -1, 1],
# [0, -1, 1],
# [1, -1, 1],
# [-1, 0, 1],
# [0, 0, 1],
# [1, 0, 1],
# [-1, 1, 1],
# [0, 1, 1],
# [1, 1, 1]], dtype=i32)
""" initialize configurated solver """
if self.algorithm == 0:
self.u_mp_fn = self.get_u_mp_by_regression_at_point_fn
elif self.algorithm == 1:
self.initialize_neural_based_algorithm()
self.u_mp_fn = NotImplemented # self.get_u_mp_by_neural_network_at_node_fn
self.compute_normal_gradient_solution_mp_on_interface = (
self.compute_normal_gradient_solution_mp_on_interface_neural_network
)
self.compute_gradient_solution_mp = self.compute_gradient_solution_mp_neural_network
self.compute_normal_gradient_solution_on_interface = (
self.compute_normal_gradient_solution_on_interface_neural_network
)
self.compute_gradient_solution = self.compute_gradient_solution_neural_network
if precondition == 1:
self.compute_Ax_and_b_fn = self.compute_Ax_and_b_preconditioned_fn
elif precondition == 0:
self.compute_Ax_and_b_fn = self.compute_Ax_and_b_vanilla_fn
def fetch_checkpoint(self, checkpoint_dir):
if checkpoint_dir is None or not os.path.exists(checkpoint_dir):
return None
else:
checkpoints = [p for p in os.listdir(checkpoint_dir) if "checkpoint_" in p]
if checkpoints == []:
return None
checkpoint = os.path.join(checkpoint_dir, max(checkpoints))
print(f"Loading checkpoint {checkpoint}")
with open(checkpoint, "rb") as f:
state = pickle.load(f)
return state
def save_checkpoint(self, checkpoint_dir, state):
if checkpoint_dir is None:
print("No checkpoint dir. specified. Skipping checkpoint.")
return
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
checkpoint = os.path.join(checkpoint_dir, "checkpoint_" + str(state["epoch"]))
print(f"Saving checkpoint {checkpoint}")
with open(checkpoint, "wb") as f:
pickle.dump(state, f)
return checkpoint
def get_Xijk(self, cell_dx, cell_dy, cell_dz):
Xijk = jnp.array(
[
[-cell_dx, -cell_dy, -cell_dz],
[0.0, -cell_dy, -cell_dz],
[cell_dx, -cell_dy, -cell_dz],
[-cell_dx, 0.0, -cell_dz],
[0.0, 0.0, -cell_dz],
[cell_dx, 0.0, -cell_dz],
[-cell_dx, cell_dy, -cell_dz],
[0.0, cell_dy, -cell_dz],
[cell_dx, cell_dy, -cell_dz],
[-cell_dx, -cell_dy, 0.0],
[0.0, -cell_dy, 0.0],
[cell_dx, -cell_dy, 0.0],
[-cell_dx, 0.0, 0.0],
[0.0, 0.0, 0.0],
[cell_dx, 0.0, 0.0],
[-cell_dx, cell_dy, 0.0],
[0.0, cell_dy, 0.0],
[cell_dx, cell_dy, 0.0],
[-cell_dx, -cell_dy, cell_dz],
[0.0, -cell_dy, cell_dz],
[cell_dx, -cell_dy, cell_dz],
[-cell_dx, 0.0, cell_dz],
[0.0, 0.0, cell_dz],
[cell_dx, 0.0, cell_dz],
[-cell_dx, cell_dy, cell_dz],
[0.0, cell_dy, cell_dz],
[cell_dx, cell_dy, cell_dz],
],
dtype=f32,
)
return Xijk
def normal_point_fn(self, point, dx, dy, dz):
"""
Evaluate normal vector at a given point based on interpolated values
of the level set function at the face-centers of a 3D cell centered at the
point with each side length given by dx, dy, dz.
"""
point_ip1_j_k = jnp.array([[point[0] + dx, point[1], point[2]]])
point_im1_j_k = jnp.array([[point[0] - dx, point[1], point[2]]])
phi_x = (self.phi_interp_fn(point_ip1_j_k) - self.phi_interp_fn(point_im1_j_k)) / (2 * dx)
point_i_jp1_k = jnp.array([[point[0], point[1] + dy, point[2]]])
point_i_jm1_k = jnp.array([[point[0], point[1] - dy, point[2]]])
phi_y = (self.phi_interp_fn(point_i_jp1_k) - self.phi_interp_fn(point_i_jm1_k)) / (2 * dy)
point_i_j_kp1 = jnp.array([[point[0], point[1], point[2] + dz]])
point_i_j_km1 = jnp.array([[point[0], point[1], point[2] - dz]])
phi_z = (self.phi_interp_fn(point_i_j_kp1) - self.phi_interp_fn(point_i_j_km1)) / (2 * dz)
norm = jnp.sqrt(phi_x * phi_x + phi_y * phi_y + phi_z * phi_z)
return jnp.array([phi_x / norm, phi_y / norm, phi_z / norm], dtype=f32)
def initialize_neural_based_algorithm(self):
def sign_p_fn(a):
# returns 1 only if a>0, otherwise is 0
sgn = jnp.sign(a)
return jnp.floor(0.5 * sgn + 0.75)
def sign_m_fn(a):
# returns 1 only if a<0, otherwise is 0
sgn = jnp.sign(a)
return jnp.ceil(0.5 * sgn - 0.75) * (-1.0)
self.mask_region_m = sign_m_fn(self.phi_flat)
self.mask_region_p = sign_p_fn(self.phi_flat)
self.mask_interface_bandwidth = sign_m_fn(self.phi_flat**2 - self.bandwidth_squared)
self.mask_non_interface_bandwidth = sign_p_fn(self.phi_flat**2 - self.bandwidth_squared)
def get_regression_coeffs_at_point(self, point, dx, dy, dz):
def sign_p_fn(a):
# returns 1 only if a>0, otherwise is 0
sgn = jnp.sign(a)
return jnp.floor(0.5 * sgn + 0.75)
def sign_m_fn(a):
# returns 1 only if a<0, otherwise is 0
sgn = jnp.sign(a)
return jnp.ceil(0.5 * sgn - 0.75) * (-1.0)
x, y, z = point
Xijk = self.get_Xijk(dx, dy, dz)
curr_vertices = jnp.add(jnp.array([x, y, z]), Xijk)
phi_vertices = self.phi_interp_fn(curr_vertices)
Wijk_p = jnp.diag(vmap(sign_p_fn)(phi_vertices))
Wijk_m = jnp.diag(vmap(sign_m_fn)(phi_vertices))
Dp = jnp.linalg.pinv(Xijk.T @ Wijk_p @ Xijk) @ (Wijk_p @ Xijk).T
Dm = jnp.linalg.pinv(Xijk.T @ Wijk_m @ Xijk) @ (Wijk_m @ Xijk).T
D_m_mat = jnp.nan_to_num(Dm)
D_p_mat = jnp.nan_to_num(Dp)
normal_vec = self.normal_point_fn(point, dx, dy, dz).T
phi_point = self.phi_interp_fn(point[jnp.newaxis])
Cm_ijk_pqm = normal_vec @ D_m_mat
Cp_ijk_pqm = normal_vec @ D_p_mat
zeta_p_ijk_pqm = (
(self.mu_p_interp_fn(point[jnp.newaxis]) - self.mu_m_interp_fn(point[jnp.newaxis]))
/ self.mu_m_interp_fn(point[jnp.newaxis])
) * phi_point
zeta_p_ijk_pqm = zeta_p_ijk_pqm[..., jnp.newaxis] * Cp_ijk_pqm
zeta_m_ijk_pqm = (
(self.mu_p_interp_fn(point[jnp.newaxis]) - self.mu_m_interp_fn(point[jnp.newaxis]))
/ self.mu_p_interp_fn(point[jnp.newaxis])
) * phi_point
zeta_m_ijk_pqm = zeta_m_ijk_pqm[..., jnp.newaxis] * Cm_ijk_pqm
zeta_p_ijk = (zeta_p_ijk_pqm.sum(axis=1) - zeta_p_ijk_pqm[:, 13]) * f32(-1.0)
zeta_m_ijk = (zeta_m_ijk_pqm.sum(axis=1) - zeta_m_ijk_pqm[:, 13]) * f32(-1.0)
gamma_p_ijk_pqm = zeta_p_ijk_pqm / (1.0 + zeta_p_ijk[:, jnp.newaxis])
gamma_m_ijk_pqm = zeta_m_ijk_pqm / (1.0 - zeta_m_ijk[:, jnp.newaxis])
gamma_p_ijk = (gamma_p_ijk_pqm.sum(axis=1) - gamma_p_ijk_pqm[:, 13]) * f32(-1.0)
gamma_m_ijk = (gamma_m_ijk_pqm.sum(axis=1) - gamma_m_ijk_pqm[:, 13]) * f32(-1.0)
return (
normal_vec,
gamma_m_ijk,
gamma_m_ijk_pqm,
gamma_p_ijk,
gamma_p_ijk_pqm,
zeta_m_ijk,
zeta_m_ijk_pqm,
zeta_p_ijk,
zeta_p_ijk_pqm,
)
@staticmethod
@hk.transform
def forward(x, phi_x):
"""
Forward pass of the neural network.
Args:
x: input data
Returns:
output of the neural network
"""
model = DoubleMLP()
return model(x, phi_x)
@partial(jit, static_argnums=0)
def init(self, seed=42):
rng = random.PRNGKey(seed)
params = self.forward.init(rng, x=jnp.array([0.0, 0.0, 0.0]), phi_x=0.1)
opt_state = self.optimizer.init(params)
return opt_state, params
@partial(jit, static_argnums=(0))
def evaluate_solution_fn(self, params, R_flat):
phi_flat = self.phi_interp_fn(R_flat)
sol_fn = partial(self.forward.apply, params, None)
pred_sol = vmap(sol_fn, (0, 0))(R_flat, phi_flat)
return pred_sol
def solution_at_point_fn(self, params, r_point, phi_point):
sol_fn = partial(self.forward.apply, params, None)
return sol_fn(r_point, phi_point).reshape()
def get_sol_grad_sol_fn(self, params):
u_at_point_fn = partial(self.solution_at_point_fn, params)
grad_u_at_point_fn = grad(u_at_point_fn)
return u_at_point_fn, grad_u_at_point_fn
def get_mask_plus(self, points):
"""
For a set of points, returns 1 if in external region
returns 0 if inside the geometry.
"""
phi_points = self.phi_interp_fn(points)
def sign_p_fn(a):
# returns 1 only if a>0, otherwise is 0
sgn = jnp.sign(a)
return jnp.floor(0.5 * sgn + 0.75)
mask_p = sign_p_fn(phi_points)
return mask_p
@partial(jit, static_argnums=(0))
def loss(self, params, points, dx, dy, dz):
"""
Loss function of the neural network
"""
lhs_rhs = vmap(self.compute_Ax_and_b_fn, (None, 0, None, None, None))(params, points, dx, dy, dz)
lhs, rhs = jnp.split(lhs_rhs, [1], axis=1)
tot_loss = jnp.mean(optax.l2_loss(lhs, rhs))
# du_xmax = (self.evaluate_solution_fn(params, self.gstate.R_xmax_boundary) - self.dir_bc_fn(self.gstate.R_xmax_boundary)[...,jnp.newaxis])
# du_xmin = (self.evaluate_solution_fn(params, self.gstate.R_xmin_boundary) - self.dir_bc_fn(self.gstate.R_xmin_boundary)[...,jnp.newaxis])
# du_ymax = (self.evaluate_solution_fn(params, self.gstate.R_ymax_boundary) - self.dir_bc_fn(self.gstate.R_ymax_boundary)[...,jnp.newaxis])
# du_ymin = (self.evaluate_solution_fn(params, self.gstate.R_ymin_boundary) - self.dir_bc_fn(self.gstate.R_ymin_boundary)[...,jnp.newaxis])
# du_zmax = (self.evaluate_solution_fn(params, self.gstate.R_zmax_boundary) - self.dir_bc_fn(self.gstate.R_zmax_boundary)[...,jnp.newaxis])
# du_zmin = (self.evaluate_solution_fn(params, self.gstate.R_zmin_boundary) - self.dir_bc_fn(self.gstate.R_zmin_boundary)[...,jnp.newaxis])
# tot_loss += 0.01 * (jnp.mean(jnp.square(du_xmax)) + jnp.mean(jnp.square(du_xmin)) + jnp.mean(jnp.square(du_ymax)) + jnp.mean(jnp.square(du_ymin)) + jnp.mean(jnp.square(du_zmax)) + jnp.mean(jnp.square(du_zmin)))
return tot_loss
@partial(jit, static_argnums=(0))
def update(self, opt_state, params, points, dx, dy, dz):
loss, grads = value_and_grad(self.loss)(params, points, dx, dy, dz)
updates, opt_state = self.optimizer.update(grads, opt_state, params)
params = optax.apply_updates(params, updates)
return opt_state, params, loss
@partial(jit, static_argnums=(0))
def update_multi_gpu(self, opt_state, params, points, dx, dy, dz):
loss, grads = value_and_grad(self.loss)(params, points, dx, dy, dz)
""" Muli-GPU """
grads = jax.lax.pmean(grads, axis_name="devices")
loss = jax.lax.pmean(loss, axis_name="devices")
updates, opt_state = self.optimizer.update(grads, opt_state, params)
params = optax.apply_updates(params, updates)
return opt_state, params, loss
@partial(jit, static_argnums=(0))
def compute_Ax_and_b_preconditioned_fn(self, params, point, dx, dy, dz):
"""
This function calculates A @ u for a given vector of unknowns u.
This evaluates the rhs in Au^k=b given estimate u^k.
The purpose would be to define an optimization problem with:
min || A u^k - b ||^2
using autodiff we can compute gradients w.r.t u^k values, and optimize for the solution field.
* PROCEDURE:
first compute u = B:u + r for each node
then use the actual cell geometries (face areas and mu coeffs) to
compute the rhs of the linear system given currently passed-in u vector
for solution estimate.
"""
u_mp_at_point = partial(self.u_mp_fn, params, dx, dy, dz)
def is_box_boundary_point(point):
"""
Check if current node is on the boundary of box
"""
x, y, z = point
boundary = jnp.where(abs(x - self.xmin) < 1e-6 * dx, 0, 1) * jnp.where(
abs(x - self.xmax) < 1e-6 * dx, 0, 1
)
boundary *= jnp.where(abs(y - self.ymin) < 1e-6 * dy, 0, 1) * jnp.where(
abs(y - self.ymax) < 1e-6 * dy, 0, 1
)
boundary *= jnp.where(abs(z - self.zmin) < 1e-6 * dz, 0, 1) * jnp.where(
abs(z - self.zmax) < 1e-6 * dz, 0, 1
)
return jnp.where(boundary == 0, True, False)
def evaluate_discretization_lhs_rhs_at_point(point, dx, dy, dz):
# --- LHS
coeffs_ = self.compute_face_centroids_values_plus_minus_at_point(point, dx, dy, dz)
coeffs = coeffs_[:12]
vols = coeffs_[12:14]
V_m_ijk = vols[0]
V_p_ijk = vols[1]
Vol_cell_nominal = dx * dy * dz
def get_lhs_at_interior_point(point):
point_ijk = point
point_imjk = jnp.array([point[0] - dx, point[1], point[2]], dtype=f32)
point_ipjk = jnp.array([point[0] + dx, point[1], point[2]], dtype=f32)
point_ijmk = jnp.array([point[0], point[1] - dy, point[2]], dtype=f32)
point_ijpk = jnp.array([point[0], point[1] + dy, point[2]], dtype=f32)
point_ijkm = jnp.array([point[0], point[1], point[2] - dz], dtype=f32)
point_ijkp = jnp.array([point[0], point[1], point[2] + dz], dtype=f32)
k_m_ijk = self.k_m_interp_fn(point[jnp.newaxis])
k_p_ijk = self.k_p_interp_fn(point[jnp.newaxis])
u_m_ijk, u_p_ijk = u_mp_at_point(point_ijk)
u_m_imjk, u_p_imjk = u_mp_at_point(point_imjk)
u_m_ipjk, u_p_ipjk = u_mp_at_point(point_ipjk)
u_m_ijmk, u_p_ijmk = u_mp_at_point(point_ijmk)
u_m_ijpk, u_p_ijpk = u_mp_at_point(point_ijpk)
u_m_ijkm, u_p_ijkm = u_mp_at_point(point_ijkm)
u_m_ijkp, u_p_ijkp = u_mp_at_point(point_ijkp)
lhs = k_m_ijk * V_m_ijk * u_m_ijk
lhs += k_p_ijk * V_p_ijk * u_p_ijk
lhs += (coeffs[0] + coeffs[2] + coeffs[4] + coeffs[6] + coeffs[8] + coeffs[10]) * u_m_ijk + (
coeffs[1] + coeffs[3] + coeffs[5] + coeffs[7] + coeffs[9] + coeffs[11]
) * u_p_ijk
lhs += -1.0 * coeffs[0] * u_m_imjk - coeffs[1] * u_p_imjk
lhs += -1.0 * coeffs[2] * u_m_ipjk - coeffs[3] * u_p_ipjk
lhs += -1.0 * coeffs[4] * u_m_ijmk - coeffs[5] * u_p_ijmk
lhs += -1.0 * coeffs[6] * u_m_ijpk - coeffs[7] * u_p_ijpk
lhs += -1.0 * coeffs[8] * u_m_ijkm - coeffs[9] * u_p_ijkm
lhs += -1.0 * coeffs[10] * u_m_ijkp - coeffs[11] * u_p_ijkp
diag_coeff = (
k_p_ijk * V_p_ijk
+ k_m_ijk * V_m_ijk
+ (coeffs[0] + coeffs[2] + coeffs[4] + coeffs[6] + coeffs[8] + coeffs[10])
+ (coeffs[1] + coeffs[3] + coeffs[5] + coeffs[7] + coeffs[9] + coeffs[11])
)
return jnp.array([lhs.reshape(), diag_coeff.reshape()])
def get_lhs_on_box_boundary(point):
phi_boundary = self.phi_interp_fn(point[jnp.newaxis])
u_boundary = self.solution_at_point_fn(params, point, phi_boundary)
lhs = u_boundary * Vol_cell_nominal
return jnp.array([lhs, Vol_cell_nominal])
lhs_diagcoeff = jnp.where(
is_box_boundary_point(point),
get_lhs_on_box_boundary(point),
get_lhs_at_interior_point(point),
)
lhs, diagcoeff = jnp.split(lhs_diagcoeff, [1], 0)
# --- RHS
def get_rhs_at_interior_point(point):
rhs = (
self.f_m_interp_fn(point[jnp.newaxis]) * V_m_ijk + self.f_p_interp_fn(point[jnp.newaxis]) * V_p_ijk
)
rhs += self.beta_integrate_over_interface_at_point(point, dx, dy, dz)
return rhs
def get_rhs_on_box_boundary(point):
return self.dir_bc_fn(point[jnp.newaxis]).reshape() * Vol_cell_nominal
rhs = jnp.where(
is_box_boundary_point(point),
get_rhs_on_box_boundary(point),
get_rhs_at_interior_point(point),
)
lhs_over_diag = jnp.nan_to_num(lhs / diagcoeff)
rhs_over_diag = jnp.nan_to_num(rhs / diagcoeff)
return jnp.array([lhs_over_diag, rhs_over_diag])
lhs_rhs = evaluate_discretization_lhs_rhs_at_point(point, dx, dy, dz)
return lhs_rhs
@partial(jit, static_argnums=(0))
def get_u_mp_by_regression_at_point_fn(self, params, dx, dy, dz, point):
"""
This function evaluates pairs of u^+ and u^- at each grid point
in the domain, given the neural network models.
BIAS SLOW:
This function evaluates
u_m = B_m : u + r_m
and
u_p = B_p : u + r_p
"""
delta_ijk = self.phi_interp_fn(point[jnp.newaxis])
u_ijk = self.solution_at_point_fn(params, point, delta_ijk)
Xijk = self.get_Xijk(dx, dy, dz)
curr_vertices = jnp.add(point, Xijk)
u_cube_ijk = self.evaluate_solution_fn(params, curr_vertices)
(
normal_ijk,
gamma_m_ijk,
gamma_m_ijk_pqm,
gamma_p_ijk,
gamma_p_ijk_pqm,
zeta_m_ijk,
zeta_m_ijk_pqm,
zeta_p_ijk,
zeta_p_ijk_pqm,
) = self.get_regression_coeffs_at_point(point, dx, dy, dz)
def bulk_point(is_interface_, u_ijk_):
return jnp.array(
[
jnp.where(is_interface_ == -1, u_ijk_, 0.0),
jnp.where(is_interface_ == 1, u_ijk_, 0.0),
]
)
def interface_point(point):
def mu_minus_bigger_fn(point):
def extrapolate_u_m_from_negative_domain(r_ijk):
r_m_proj = r_ijk[jnp.newaxis] - delta_ijk * normal_ijk
u_m = -1.0 * jnp.dot(gamma_m_ijk_pqm, u_cube_ijk)
u_m += (1.0 - gamma_m_ijk + gamma_m_ijk_pqm[:, 13]) * u_ijk
u_m += (
-1.0
* (1.0 - gamma_m_ijk)
* (self.alpha_interp_fn(r_m_proj) + delta_ijk * self.beta_over_mu_p_interp_fn(r_m_proj))
)
return u_m.reshape()
def extrapolate_u_p_from_positive_domain(r_ijk):
r_p_proj = r_ijk[jnp.newaxis] - delta_ijk * normal_ijk[0]
u_p = -1.0 * jnp.dot(zeta_m_ijk_pqm, u_cube_ijk)
u_p += (1.0 - zeta_m_ijk + zeta_m_ijk_pqm[:, 13]) * u_ijk
u_p += self.alpha_interp_fn(r_p_proj) + delta_ijk * self.beta_over_mu_p_interp_fn(r_p_proj)
return u_p.reshape()
u_m = jnp.where(delta_ijk > 0, extrapolate_u_m_from_negative_domain(point), u_ijk)[0]
u_p = jnp.where(delta_ijk > 0, u_ijk, extrapolate_u_p_from_positive_domain(point))[0]
return jnp.array([u_m, u_p])
def mu_plus_bigger_fn(point):
def extrapolate_u_m_from_negative_domain_(r_ijk):
r_m_proj = r_ijk[jnp.newaxis] - delta_ijk * normal_ijk
u_m = -1.0 * jnp.dot(zeta_p_ijk_pqm, u_cube_ijk)
u_m += (1.0 - zeta_p_ijk + zeta_p_ijk_pqm[:, 13]) * u_ijk
u_m += (-1.0) * (
self.alpha_interp_fn(r_m_proj) + delta_ijk * self.beta_over_mu_m_interp_fn(r_m_proj)
)
return u_m.reshape()
def extrapolate_u_p_from_positive_domain_(r_ijk):
r_p_proj = r_ijk[jnp.newaxis] - delta_ijk * normal_ijk
u_p = -1.0 * jnp.dot(gamma_p_ijk_pqm, u_cube_ijk)
u_p += (1.0 - gamma_p_ijk + gamma_p_ijk_pqm[:, 13]) * u_ijk
u_p += (1.0 - gamma_p_ijk) * (
self.alpha_interp_fn(r_p_proj) + delta_ijk * self.beta_over_mu_m_interp_fn(r_p_proj)
)
return u_p.reshape()
u_m = jnp.where(delta_ijk > 0, extrapolate_u_m_from_negative_domain_(point), u_ijk)[0]
u_p = jnp.where(delta_ijk > 0, u_ijk, extrapolate_u_p_from_positive_domain_(point))[0]
return jnp.array([u_m, u_p])
mu_m_ijk = self.mu_m_interp_fn(point[jnp.newaxis])
mu_p_ijk = self.mu_p_interp_fn(point[jnp.newaxis])
return jnp.where(mu_m_ijk > mu_p_ijk, mu_minus_bigger_fn(point), mu_plus_bigger_fn(point))
# 0: crossed by interface, -1: in Omega^-, +1: in Omega^+
is_interface = self.is_cell_crossed_by_interface(point, dx, dy, dz)
# is_interface = jnp.where( delta_ijk*delta_ijk <= self.bandwidth_squared, 0, jnp.sign(delta_ijk))
u_mp = jnp.where(is_interface == 0, interface_point(point), bulk_point(is_interface, u_ijk))
return u_mp
def compute_normal_gradient_solution_mp_on_interface_neural_network(self, params, points, dx, dy, dz):
_, grad_u_at_point_fn = self.get_sol_grad_sol_fn(params)
grad_u_p = vmap(grad_u_at_point_fn, (0, None))(points, 1)
grad_u_m = vmap(grad_u_at_point_fn, (0, None))(points, -1)
normal_vecs = vmap(self.normal_point_fn, (0, None, None, None))(points, dx, dy, dz)
grad_n_u_m = vmap(jnp.dot, (0, 0))(jnp.squeeze(normal_vecs), grad_u_m)
grad_n_u_p = vmap(jnp.dot, (0, 0))(jnp.squeeze(normal_vecs), grad_u_p)
return grad_n_u_m, grad_n_u_p
def compute_gradient_solution_mp_neural_network(self, params, points):
_, grad_u_at_point_fn = self.get_sol_grad_sol_fn(params)
grad_u_p = vmap(grad_u_at_point_fn, (0, None))(points, 1)
grad_u_m = vmap(grad_u_at_point_fn, (0, None))(points, -1)
return grad_u_m, grad_u_p
def compute_normal_gradient_solution_on_interface_neural_network(self, params, points, dx, dy, dz):
phi_flat = self.phi_interp_fn(points)
_, grad_u_at_point_fn = self.get_sol_grad_sol_fn(params)
grad_u = vmap(grad_u_at_point_fn, (0, 0))(points, phi_flat)
normal_vecs = vmap(self.normal_point_fn, (0, None, None, None))(points, dx, dy, dz)
grad_n_u = vmap(jnp.dot, (0, 0))(jnp.squeeze(normal_vecs), grad_u)
return grad_n_u
def compute_gradient_solution_neural_network(self, params, points):
phi_flat = self.phi_interp_fn(points)
_, grad_u_at_point_fn = self.get_sol_grad_sol_fn(params)
grad_u = vmap(grad_u_at_point_fn, (0, 0))(points, phi_flat)
return grad_u
|
PypiClean
|
/matrix-47-0.1.2.tar.gz/matrix-47-0.1.2/README.md
|
# matrix
```
__ _
__ _ ___ _/ /_____(_)_ __
/ ' \/ _ `/ __/ __/ /\ \ /
/_/_/_/\_,_/\__/_/ /_//_\_\
```
A python package for matrix operations and manipulations.
## Contents
* [Back-Story](#back-story)
* [Features](#features)
* [Installation](#installation)
* [Usage](#usage)
* [Uninstallation](#uninstallation)
* [Contributing](#contributing)
## Back-Story
I had just completed my "Journey through the Docs", studying the Core Python aspect of the Python Docs (majorly the Library & Language References, wherever else those referred me to and whatever else I needed to fully understand things) with interactive sessions, testing things out and making notes.
Then I needed something to put to (real) practice all the things I had learned.
I wanted something purely **Standard** Python, no 3rd-party libraries and seemed this project would be a good place to start.
The project was actually concieved in the course of my "Journey through the Docs" but only stayed on my TODO list till after.
It's been interesting so far and the project actually turned out to incoparate a lot of what I learned... though, definitely not all. :smile:
**NOTE:** This project is not intended to be a "re-invention of any wheel", it's just me practicing.
I actually didn't test out or go through any similar project in the course of developing this.
## Features
This is just an outline of the major features of the library. For the complete feature list, detailed descriptions and project documentation, see the [documentation](https://anonymoux47.github.io/matrix/).
### Matrix initialization. Via:
* The class constructor
* Utility functions to generate:
* Unit matrices.
* Matrices filled with random integer elements.
* Matrices filled with random floating-point elements.
### Matrix object interactions
* Intelligent string representation
* Subscription
* Single-element indexing, assignment and deletion
* Block-slice (sub-matrix) subscription and assignment.
* Truthiness
* Membership test for elements
* Iteration over elements
* Per-element rounding
### Matrix object properties
* Size
* Number of rows
* Number of columns
* Determinant
* Principal diagonal
* Trace
* Rank
### Matrix Operations
* Negation
* Equality comparison
* Addition and subtraction
* Scalar multiplication
* Matrix multiplication
* Exponentiation (Repeated matrix multiplication)
* Division (by scalar)
* Inverse
* Transpose
* Augmentation
* Row reduction
* Row Echelon form (Forward elimination)
* Reduced Row Echelon form
* Back substitution
### Tests for matrix properties and special matrices
* Diagonality
* Nullity
* Orthogonality
* Squareness
* Symmetry
* Triangularity
* Identity matrix
* Conformability
### Matrix views
These are views of the matrix object, like `.keys()` and `.values()` are to `dict`.
* Rows and Columns (and their slices). Support:
* Single row/column Indexing
* Slicing of multiple rows/columns (Yes, a slice of rows/columns can still be sliced further! :sunglasses:)
* Row/column assignment and deletion (Rows/Columns slices **DO NOT** support these).
* Length (number of rows/columns "in" the view)
* Iteration over rows/columns
* Row and column. Support:
* String representation
* Single element indexing
* Multiple element slicing
* Equality comparison
* Mathematical operations (Also supports augmented assignment of these operations):
* Addition and subtraction of rows and/or columns (Element-wise)
* Multiplication and Division by scalar
* Multiplication and Division by row/column (i.e inter-operation of two rows/columns element-by-element)
* **NOTE:** Augmented assignments of these operations are performed **in-place** i.e affect the matrix itself.
* Row/column length
* Membership tests
* Iteration over elements
### Other operations on matrices
* Horizontal and vertical flip
* Clockwise and anti-clockwise rotation
* Matrix copy
* Matrix resize
* Rounded comparison
### Solutions to systems of linear equations
* Gaussian elimination
* Gauss-Jordan elimination
* Inverse method
## Installation
### Requirements
- Python >= 3.8
### Install from PYPI
NOTE: You must have the `pip` python package installed (usually is, by default)
Run
```sh
pip install matrix-47
```
OR
```sh
python -m pip install matrix-47
```
* * *
### Install from source
Download and unzip [this repository](https://github.com/AnonymouX47/matrix/archive/refs/heads/main.zip) or run
```sh
git clone https://github.com/AnonymouX47/matrix
```
Change your Working Directory to the local repository; run
```sh
cd matrix
```
Then, run
```sh
pip install .
```
OR
```sh
python setup.py install
```
to install the package.
* * *
Instead, you might run
```sh
python -i test.py
```
to just test out the library without installing the package (but will be limited to only that interactive session).
**NOTE:** On Windows, the Python executables must've been added to `PATH` (For help, check [here](https://datatofish.com/add-python-to-windows-path/)).
## Usage
Quick example:
```python
>>> from matrix import Matrix
>>> print(Matrix(4, 4))
+―――――――――――――――+
| 0 | 0 | 0 | 0 |
|―――+―――+―――+―――|
| 0 | 0 | 0 | 0 |
|―――+―――+―――+―――|
| 0 | 0 | 0 | 0 |
|―――+―――+―――+―――|
| 0 | 0 | 0 | 0 |
+―――――――――――――――+
```
For more usage examples, check the [samples](https://github.com/AnonymouX47/matrix/tree/main/samples).
For the complete feature list and descriptions, see [Feature Description](https://anonymoux47.github.io/matrix/features/).
## Uninstallation
To uninstall the package, run
```sh
pip uninstall matrix
```
## Contributing
If you find any bug, please create an **Issue** in the [Issues section](https://github.com/AnonymouX47/matrix/issues).
Please make sure you check other issues first, to make sure you don't create a duplicate. Thank you :smile:
|
PypiClean
|
/opal-azure-cli-servicebus-0.3.9.tar.gz/opal-azure-cli-servicebus-0.3.9/azure/cli/command_modules/servicebus/_params.py
|
from azure.cli.core.commands.parameters import tags_type, get_enum_type, resource_group_name_type, name_type, get_location_type, get_three_state_flag, get_resource_name_completion_list
from azure.cli.core.commands.validators import get_default_location_from_resource_group
def load_arguments_sb(self, _):
from azure.cli.command_modules.servicebus._completers import get_queue_command_completion_list, \
get_rules_command_completion_list, get_subscriptions_command_completion_list, get_topic_command_completion_list
from azure.cli.command_modules.servicebus._validators import _validate_auto_delete_on_idle, \
_validate_duplicate_detection_history_time_window, \
_validate_default_message_time_to_live, \
_validate_lock_duration, validate_partner_namespace, validate_premiumsku_capacity, validate_target_namespace
from knack.arguments import CLIArgumentType
from azure.mgmt.servicebus.models import SkuName, AccessRights, KeyType, FilterType
rights_arg_type = CLIArgumentType(options_list=['--rights'], nargs='+', arg_type=get_enum_type(AccessRights), help='Space-separated list of Authorization rule rights')
key_arg_type = CLIArgumentType(options_list=['--key'], arg_type=get_enum_type(KeyType), help='specifies Primary or Secondary key needs to be reset')
keyvalue_arg_type = CLIArgumentType(options_list=['--key-value'], help='Optional, if the key value provided, is set for KeyType or autogenerated Key value set for keyType.')
with self.argument_context('servicebus') as c:
c.argument('resource_group_name', arg_type=resource_group_name_type)
c.argument('namespace_name', options_list=['--namespace-name'], id_part='name', help='Name of Namespace')
with self.argument_context('servicebus namespace') as c:
c.argument('namespace_name', id_part='name', arg_type=name_type, completer=get_resource_name_completion_list('Microsoft.ServiceBus/namespaces'), help='Name of Namespace')
c.argument('default_action', help='Default action for network rule set.')
c.argument('tags', arg_type=tags_type)
c.argument('sku', arg_type=get_enum_type(SkuName), help='Namespace SKU.')
c.argument('capacity', type=int, choices=[1, 2, 4], help='Number of message units. This property is only applicable to namespaces of Premium SKU', validator=validate_premiumsku_capacity)
with self.argument_context('servicebus namespace exists') as c:
c.argument('name', arg_type=name_type, help='Namespace name. Name can contain only letters, numbers, and hyphens. The namespace must start with a letter, and it must end with a letter or number.')
with self.argument_context('servicebus namespace create') as c:
c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group)
# region Namespace Authorization Rule
with self.argument_context('servicebus namespace authorization-rule list') as c:
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of the Namespace')
with self.argument_context('servicebus namespace authorization-rule') as c:
c.argument('authorization_rule_name', arg_type=name_type, id_part='child_name_1', help='Name of Namespace Authorization Rule')
c.argument('namespace_name', id_part='name', options_list=['--namespace-name'], help='Name of Namespace')
for scope in ['servicebus namespace authorization-rule create', 'servicebus namespace authorization-rule update', 'servicebus queue authorization-rule create', 'servicebus queue authorization-rule update', 'servicebus topic authorization-rule create', 'servicebus topic authorization-rule update']:
with self.argument_context(scope) as c:
c.argument('rights', arg_type=rights_arg_type)
with self.argument_context('servicebus namespace authorization-rule keys renew') as c:
c.argument('key_type', arg_type=key_arg_type)
c.argument('key', arg_type=keyvalue_arg_type)
with self.argument_context('servicebus namespace authorization-rule keys list') as c:
c.argument('authorization_rule_name', arg_type=name_type, id_part=None, help='Name of Namespace Authorization Rule')
c.argument('namespace_name', id_part=None, options_list=['--namespace-name'], help='Name of Namespace')
# region Queue
with self.argument_context('servicebus queue') as c:
c.argument('queue_name', arg_type=name_type, id_part='child_name_1', completer=get_queue_command_completion_list, help='Name of Queue')
# region - Queue Create
for scope in ['create', 'update']:
with self.argument_context('servicebus queue {}'.format(scope)) as c:
c.argument('queue_name', arg_type=name_type, id_part='child_name_1', help='Name of Queue')
c.argument('lock_duration', validator=_validate_lock_duration, help='String ISO 8601 timespan or duration format for duration of a peek-lock; that is, the amount of time that the message is locked for other receivers. The maximum value for LockDuration is 5 minutes; the default value is 1 minute.')
c.argument('max_size_in_megabytes', options_list=['--max-size'], type=int, choices=[1024, 2048, 3072, 4096, 5120, 10240, 20480, 40960, 81920], help='Maximum size of queue in megabytes, which is the size of the memory allocated for the queue. Default is 1024. Max for Standard SKU is 5120 and for Premium SKU is 81920')
c.argument('requires_duplicate_detection', options_list=['--enable-duplicate-detection'], arg_type=get_three_state_flag(), help='A boolean value indicating if this queue requires duplicate detection.')
c.argument('requires_session', options_list=['--enable-session'], arg_type=get_three_state_flag(), help='A boolean value indicating whether the queue supports the concept of sessions.')
c.argument('default_message_time_to_live', validator=_validate_default_message_time_to_live, help='ISO 8601 timespan or duration time format for default message to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.')
c.argument('dead_lettering_on_message_expiration', options_list=['--enable-dead-lettering-on-message-expiration'], arg_type=get_three_state_flag(), help='A boolean value that indicates whether this queue has dead letter support when a message expires.')
c.argument('duplicate_detection_history_time_window', validator=_validate_duplicate_detection_history_time_window, help='ISO 8601 timeSpan structure that defines the duration of the duplicate detection history. The default value is 10 minutes.')
c.argument('max_delivery_count', type=int, help='The maximum delivery count. A message is automatically deadlettered after this number of deliveries. default value is 10.')
c.argument('status', arg_type=get_enum_type(['Active', 'Disabled', 'SendDisabled', 'ReceiveDisabled']), help='Enumerates the possible values for the status of a messaging entity.')
c.argument('auto_delete_on_idle', validator=_validate_auto_delete_on_idle, help='ISO 8601 timeSpan or duration time format for idle interval after which the queue is automatically deleted. The minimum duration is 5 minutes.')
c.argument('enable_partitioning', arg_type=get_three_state_flag(), help='A boolean value that indicates whether the queue is to be partitioned across multiple message brokers.')
c.argument('enable_express', arg_type=get_three_state_flag(), help='A boolean value that indicates whether Express Entities are enabled. An express queue holds a message in memory temporarily before writing it to persistent storage.')
c.argument('forward_to', help='Queue/Topic name to forward the messages')
c.argument('forward_dead_lettered_messages_to', help='Queue/Topic name to forward the Dead Letter message')
c.argument('enable_batched_operations', arg_type=get_three_state_flag(), help='Allow server-side batched operations.')
with self.argument_context('servicebus queue list') as c:
c.argument('namespace_name', id_part=None, options_list=['--namespace-name'], help='Name of Namespace')
# region Queue Authorization Rule
with self.argument_context('servicebus queue authorization-rule') as c:
c.argument('authorization_rule_name', arg_type=name_type, id_part='child_name_2', help='Name of Queue Authorization Rule')
c.argument('queue_name', id_part='child_name_1', options_list=['--queue-name'], help='Name of Queue')
with self.argument_context('servicebus queue authorization-rule list') as c:
c.argument('namespace_name', id_part=None, options_list=['--namespace-name'], help='Name of Namespace')
c.argument('queue_name', id_part=None, options_list=['--queue-name'], help='Name of Queue')
with self.argument_context('servicebus queue authorization-rule keys renew') as c:
c.argument('key_type', arg_type=key_arg_type)
c.argument('key', arg_type=keyvalue_arg_type)
with self.argument_context('servicebus queue authorization-rule keys list') as c:
c.argument('authorization_rule_name', arg_type=name_type, id_part=None, help='Name of Queue Authorization Rule')
c.argument('queue_name', id_part=None, options_list=['--queue-name'], help='Name of Queue')
c.argument('namespace_name', id_part=None, options_list=['--namespace-name'], help='Name of Namespace')
# region - Topic
for scope in ['servicebus topic show', 'servicebus topic delete']:
with self.argument_context(scope) as c:
c.argument('topic_name', arg_type=name_type, id_part='child_name_1', completer=get_topic_command_completion_list, help='Name of Topic')
# region - Topic Create
for scope in ['create', 'update']:
with self.argument_context('servicebus topic {}'.format(scope)) as c:
c.argument('topic_name', arg_type=name_type, id_part='child_name_1', completer=get_topic_command_completion_list, help='Name of Topic')
c.argument('default_message_time_to_live', validator=_validate_default_message_time_to_live, help='ISO 8601 or duration time format for Default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.')
c.argument('max_size_in_megabytes', options_list=['--max-size'], type=int, choices=[1024, 2048, 3072, 4096, 5120, 10240, 20480, 40960, 81920], help='Maximum size of topic in megabytes, which is the size of the memory allocated for the topic. Default is 1024. Max for Standard SKU is 5120 and for Premium SKU is 81920')
c.argument('requires_duplicate_detection', options_list=['--enable-duplicate-detection'], arg_type=get_three_state_flag(), help='A boolean value indicating if this topic requires duplicate detection.')
c.argument('duplicate_detection_history_time_window', validator=_validate_duplicate_detection_history_time_window, help='ISO 8601 timespan or duration time format for structure that defines the duration of the duplicate detection history. The default value is 10 minutes.')
c.argument('enable_batched_operations', arg_type=get_three_state_flag(), help='Allow server-side batched operations.')
c.argument('status', arg_type=get_enum_type(['Active', 'Disabled', 'SendDisabled', 'ReceiveDisabled']), help='Enumerates the possible values for the status of a messaging entity.')
c.argument('support_ordering', options_list=['--enable-ordering'], arg_type=get_three_state_flag(), help='A boolean value that indicates whether the topic supports ordering.')
c.argument('auto_delete_on_idle', validator=_validate_auto_delete_on_idle, help='ISO 8601 timespan or duration time format for idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.')
c.argument('enable_partitioning', arg_type=get_three_state_flag(), help='A boolean value that indicates whether the topic to be partitioned across multiple message brokers is enabled.')
c.argument('enable_express', arg_type=get_three_state_flag(), help='A boolean value that indicates whether Express Entities are enabled. An express topic holds a message in memory temporarily before writing it to persistent storage.')
for scope in ['servicebus topic show', 'servicebus topic delete']:
with self.argument_context(scope) as c:
c.argument('topic_name', arg_type=name_type, id_part='child_name_1', completer=get_topic_command_completion_list, help='Name of Topic')
with self.argument_context('servicebus topic list') as c:
c.argument('namespace_name', id_part=None, options_list=['--namespace-name'], help='Name of Namespace')
# region Topic Authorization Rule
with self.argument_context('servicebus topic authorization-rule') as c:
c.argument('authorization_rule_name', arg_type=name_type, id_part='child_name_2', help='name of Topic Authorization Rule')
c.argument('topic_name', options_list=['--topic-name'], id_part='child_name_1', help='name of Topic')
with self.argument_context('servicebus topic authorization-rule list') as c:
c.argument('namespace_name', id_part=None, options_list=['--namespace-name'], help='Name of Namespace')
c.argument('topic_name', options_list=['--topic-name'], id_part=None, help='name of Topic')
with self.argument_context('servicebus topic authorization-rule keys renew') as c:
c.argument('key_type', arg_type=key_arg_type)
c.argument('key', arg_type=keyvalue_arg_type)
with self.argument_context('servicebus topic authorization-rule keys list') as c:
c.argument('namespace_name', id_part=None, options_list=['--namespace-name'], help='Name of Namespace')
c.argument('authorization_rule_name', arg_type=name_type, id_part=None, help='name of Topic Authorization Rule')
c.argument('topic_name', options_list=['--topic-name'], id_part=None, help='Name of Topic')
with self.argument_context('servicebus topic subscription') as c:
c.argument('subscription_name', arg_type=name_type, id_part='child_name_2', completer=get_subscriptions_command_completion_list, help='Name of Subscription')
c.argument('topic_name', id_part='child_name_1', options_list=['--topic-name'], help='Name of Topic')
# region - Subscription Create and update
for scope in ['create', 'update']:
with self.argument_context('servicebus topic subscription {}'.format(scope)) as c:
c.argument('lock_duration', validator=_validate_lock_duration, help='ISO 8601 or duration format (day:minute:seconds) for lock duration timespan for the subscription. The default value is 1 minute.')
c.argument('requires_session', options_list=['--enable-session'], arg_type=get_three_state_flag(), help='A boolean value indicating if a subscription supports the concept of sessions.')
c.argument('default_message_time_to_live', validator=_validate_default_message_time_to_live, help='ISO 8601 or duration time format for Default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.')
c.argument('dead_lettering_on_message_expiration', options_list=['--enable-dead-lettering-on-message-expiration'], arg_type=get_three_state_flag(), help='A boolean Value that indicates whether a subscription has dead letter support when a message expires.')
c.argument('max_delivery_count', type=int, help='Number of maximum deliveries.')
c.argument('status', arg_type=get_enum_type(['Active', 'Disabled', 'SendDisabled', 'ReceiveDisabled']), help='Enumerates the possible values for the status of a messaging entity.')
c.argument('enable_batched_operations', arg_type=get_three_state_flag(), help='Allow server-side batched operations.')
c.argument('auto_delete_on_idle', validator=_validate_auto_delete_on_idle, options_list=['--auto-delete-on-idle'], help='ISO 8601 timeSpan or duration time format for idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.')
c.argument('forward_to', help='Queue/Topic name to forward the messages')
c.argument('forward_dead_lettered_messages_to', help='Queue/Topic name to forward the Dead Letter message')
c.argument('dead_lettering_on_filter_evaluation_exceptions', options_list=['--dead-letter-on-filter-exceptions'], arg_type=get_three_state_flag(), help='Allow dead lettering when filter evaluation exceptions occur.')
with self.argument_context('servicebus topic subscription list') as c:
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of Namespace')
c.argument('topic_name', options_list=['--topic-name'], id_part=None, help='Name of Topic')
# Region Subscription Rules
# Rules Create
with self.argument_context('servicebus topic subscription rule') as c:
c.argument('rule_name', arg_type=name_type, id_part='child_name_3', completer=get_rules_command_completion_list, help='Name of Rule')
c.argument('subscription_name', options_list=['--subscription-name'], id_part='child_name_2', help='Name of Subscription')
c.argument('topic_name', options_list=['--topic-name'], id_part='child_name_1', help='Name of Topic')
for scope in ['servicebus topic subscription rule create', 'servicebus topic subscription rule update']:
with self.argument_context(scope, arg_group='Action') as c:
c.argument('filter_type', arg_type=get_enum_type(FilterType), help='Rule Filter types')
c.argument('action_sql_expression', help='Action SQL expression.')
c.argument('action_compatibility_level', type=int, help='This property is reserved for future use. An integer value showing the compatibility level, currently hard-coded to 20.')
c.argument('action_requires_preprocessing', options_list=['--enable-action-preprocessing'], arg_type=get_three_state_flag(), help='A boolean value that indicates whether the rule action requires preprocessing.')
with self.argument_context(scope, arg_group='SQL Filter') as c:
c.argument('filter_sql_expression', help='SQL expression. e.g. myproperty=test')
c.argument('filter_requires_preprocessing', options_list=['--enable-sql-preprocessing'], arg_type=get_three_state_flag(), help='A boolean value that indicates whether the rule action requires preprocessing.')
with self.argument_context(scope, arg_group='Correlation Filter') as c:
c.argument('correlation_id', help='Identifier of correlation.')
c.argument('message_id', help='Identifier of message.')
c.argument('to', help='Address to send to.')
c.argument('reply_to', help='Address of the queue to reply to.')
c.argument('label', help='Application specific label.')
c.argument('session_id', help='Session identifier')
c.argument('reply_to_session_id', help='Session identifier to reply to.')
c.argument('content_type', help='Content type of message.')
c.argument('requires_preprocessing', options_list=['--enable-correlation-preprocessing'], arg_type=get_three_state_flag(), help='A boolean value that indicates whether the rule action requires preprocessing.')
with self.argument_context('servicebus topic subscription rule list') as c:
c.argument('subscription_name', options_list=['--subscription-name'], id_part=None, help='Name of Subscription')
c.argument('topic_name', options_list=['--topic-name'], id_part=None, help='Name of Topic')
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of Namespace')
# Geo DR - Disaster Recovery Configs - Alias : Region
with self.argument_context('servicebus georecovery-alias exists') as c:
c.argument('name', options_list=['--alias', '-a'], arg_type=name_type, help='Name of Geo-Disaster Recovery Configuration Alias to check availability')
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of Namespace')
with self.argument_context('servicebus georecovery-alias') as c:
c.argument('alias', options_list=['--alias', '-a'], id_part='child_name_1', help='Name of the Geo-Disaster Recovery Configuration Alias')
with self.argument_context('servicebus georecovery-alias set') as c:
c.argument('partner_namespace', required=True, options_list=['--partner-namespace'], validator=validate_partner_namespace, help='Name (if within the same resource group) or ARM Id of Primary/Secondary Service Bus namespace name, which is part of GEO DR pairing')
c.argument('alternate_name', help='Alternate Name (Post failover) for Primary Namespace, when Namespace name and Alias name are same')
for scope in ['servicebus georecovery-alias authorization-rule show', 'servicebus georecovery-alias authorization-rule keys list']:
with self.argument_context(scope)as c:
c.argument('authorization_rule_name', arg_type=name_type, id_part='child_name_2', help='name of Namespace Authorization Rule')
with self.argument_context('servicebus georecovery-alias list') as c:
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of Namespace')
with self.argument_context('servicebus georecovery-alias authorization-rule list') as c:
c.argument('alias', options_list=['--alias', '-a'], help='Name of Geo-Disaster Recovery Configuration Alias')
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of Namespace')
with self.argument_context('servicebus georecovery-alias authorization-rule keys list') as c:
c.argument('alias', options_list=['--alias', '-a'], id_part=None, help='Name of Geo-Disaster Recovery Configuration Alias')
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of Namespace')
c.argument('authorization_rule_name', arg_type=name_type, help='Name of Namespace AuthorizationRule')
# Standard to Premium Migration: Region
with self.argument_context('servicebus migration start') as c:
c.argument('namespace_name', arg_type=name_type, help='Name of Standard Namespace used as source of the migration')
c.argument('target_namespace', options_list=['--target-namespace'], validator=validate_target_namespace, help='Name (if within the same resource group) or ARM Id of empty Premium Service Bus namespace name that will be target of the migration')
c.argument('post_migration_name', options_list=['--post-migration-name'], help='Post migration name is the name that can be used to connect to standard namespace after migration is complete.')
for scope in ['show', 'complete', 'abort']:
with self.argument_context('servicebus migration {}'.format(scope)) as c:
c.argument('namespace_name', arg_type=name_type, help='Name of Standard Namespace')
# Region Namespace NetworkRuleSet
with self.argument_context('servicebus namespace network-rule') as c:
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of the Namespace')
for scope in ['servicebus namespace network-rule add', 'servicebus namespace network-rule remove']:
with self.argument_context(scope) as c:
c.argument('subnet', arg_group='Virtual Network Rule', options_list=['--subnet'], help='Name or ID of subnet. If name is supplied, `--vnet-name` must be supplied.')
c.argument('ip_mask', arg_group='IP Address Rule', options_list=['--ip-address'], help='IPv4 address or CIDR range.')
c.argument('namespace_name', options_list=['--namespace-name'], id_part=None, help='Name of the Namespace')
c.extra('vnet_name', arg_group='Virtual Network Rule', options_list=['--vnet-name'], help='Name of the Virtual Network')
with self.argument_context('servicebus namespace network-rule add') as c:
c.argument('ignore_missing_vnet_service_endpoint', arg_group='Virtual Network Rule', options_list=['--ignore-missing-endpoint'], arg_type=get_three_state_flag(), help='A boolean value that indicates whether to ignore missing vnet Service Endpoint')
c.argument('action', arg_group='IP Address Rule', options_list=['--action'], arg_type=get_enum_type(['Allow']), help='Action of the IP rule')
|
PypiClean
|
/LWTools-1.0.5.tar.gz/LWTools-1.0.5/LWT/lmtanalysis/BuildEventNest3.py
|
'''
Created on 6 sept. 2017
@author: Fab
'''
import sqlite3
from time import *
from lmtanalysis.Chronometer import Chronometer
from lmtanalysis.Animal import *
from lmtanalysis.Detection import *
from lmtanalysis.Measure import *
import matplotlib.pyplot as plt
import numpy as np
from lmtanalysis.Event import *
from lmtanalysis.Measure import *
from lmtanalysis.EventTimeLineCache import EventTimeLineCached
import networkx as nx
def flush( connection ):
''' flush event in database '''
deleteEventTimeLineInBase(connection, "Nest3_" )
def reBuildEvent( connection, file, tmin=None, tmax=None , pool = None ):
'''
Nest 3
'''
print("[NEST 3] : Assume that there is no occlusion, does not work with anonymous animals")
if ( pool == None ):
pool = AnimalPool( )
pool.loadAnimals( connection )
pool.loadDetection( start = tmin, end = tmax , lightLoad=True )
if ( len ( pool.getAnimalList() ) != 4 ):
print( "[NEST3 Cancelled] 4 animals are required to build nest3.")
return
contact = {}
for idAnimalA in range( 1 , 5 ):
for idAnimalB in range( 1 , 5 ):
if idAnimalA != idAnimalB:
contact[idAnimalA, idAnimalB] = EventTimeLineCached( connection, file, "Contact", idAnimalA, idAnimalB, minFrame=tmin, maxFrame=tmax ).getDictionnary()
stopDictionnary = {}
for idAnimalA in range( 1 , 5 ):
stopDictionnary[idAnimalA] = EventTimeLineCached(
connection, file, "Stop", idA=idAnimalA, minFrame=tmin, maxFrame=tmax ).getDictionnary()
nest3TimeLine = {}
for idAnimalA in range( 1 , 5 ):
# the id will be the one excluded from nest.
nest3TimeLine[idAnimalA] = EventTimeLine( None, "Nest3_" , idA = idAnimalA , loadEvent=False )
pool.loadAnonymousDetection()
animalList = pool.getAnimalList()
result = {}
for idAnimalA in range( 1 , 5 ):
result[idAnimalA] = {}
for t in range( tmin, tmax+1 ):
isNest = False
nbAnimalAtT = 0
animalDetectedList = []
anonymousDetectionList = pool.getAnonymousDetection( t )
for animal in animalList:
if t in animal.detectionDictionnary:
nbAnimalAtT+=1
animalDetectedList.append( animal )
#print( str(t) + " : " + str( nbAnimalAtT ) )
#print("TEST")
graph = nx.Graph()
# add nodes
for animal in animalDetectedList:
graph.add_node( animal )
for animalA in animalDetectedList:
for animalB in animalDetectedList:
if animalA != animalB:
# add an edge
if t in contact[animalA.baseId,animalB.baseId]:
graph.add_edge( animalA, animalB )
# check with anonymous detection. Check contact
if anonymousDetectionList!= None:
# manage anonymous
# print( t , "manage anonymous")
'''
# load all masks
for animal in animalDetectedList:
animal.loadMask( t )
'''
for detectionA in anonymousDetectionList: # anonymous with anonymous
for detectionB in anonymousDetectionList: # anonymous with anonymous
if detectionA != detectionB:
distance = detectionA.getDistanceTo( detectionB )
if distance != None:
if distance < DISTANCE_CONTACT_MASS_CENTER:
graph.add_edge( detectionA, detectionB )
# print("Adding edge with mask (det anonymous to det anonymous)")
for detection in anonymousDetectionList:
for animal in animalDetectedList:
distance = detection.getDistanceTo(animal.getDetectionAt( t ) )
if distance != None:
if distance < DISTANCE_CONTACT_MASS_CENTER:
#if detection.getMask().isInContactWithMask( animal.getDetectionAt ( t ).getMask() ):
graph.add_edge( animal, detection )
# print("Adding edge with mask")
# list of CC from the biggest to the smallest
listCC = sorted(nx.connected_components( graph ), key=len, reverse=True)
if ( len( listCC ) == 2 ): # we have 2 groups
# check if animals in the biggest group are stopped.
allStoppedInBiggestGroup = True
for animal in list( listCC[0] ):
if isinstance( animal , Animal ):
if not ( t in stopDictionnary[animal.baseId] ):
allStoppedInBiggestGroup = False
break
if allStoppedInBiggestGroup:
if ( len( listCC[1] ) == 1 ): # the 2nd group (and the smallest) has only one mouse
animal = list(listCC[1])[0]
if isinstance( animal , Animal ):
result[ animal.baseId ][ t ] = True
for idAnimalA in range( 1 , 5 ):
# the id will be the one excluded from nest.
nest3TimeLine[idAnimalA].reBuildWithDictionnary( result[idAnimalA] )
# remove very small events
nest3TimeLine[idAnimalA].removeEventsBelowLength( 2 )
# merge flashing events
nest3TimeLine[idAnimalA].mergeCloseEvents( 3 )
nest3TimeLine[idAnimalA].endRebuildEventTimeLine(connection)
# log process
from lmtanalysis.TaskLogger import TaskLogger
t = TaskLogger( connection )
t.addLog( "Build Event Nest3" , tmin=tmin, tmax=tmax )
print( "Rebuild event finished." )
|
PypiClean
|
/stem4tob-1.7.1.post1.tar.gz/stem4tob-1.7.1.post1/test/runner.py
|
import logging
import os
import shutil
import stat
import tempfile
import threading
import time
import uuid
import stem.connection
import stem.prereq
import stem.process
import stem.socket
import stem.util.conf
import stem.util.enum
import test
from test.output import println, STATUS, ERROR, SUBSTATUS, NO_NL
CONFIG = stem.util.conf.config_dict('test', {
'integ.torrc': '',
'integ.extra_torrc': '',
'integ.test_directory': './test/data',
'integ.log': './test/data/log',
'target.torrc': {},
})
SOCKS_PORT = 1112
ORPORT = 1113
# singleton Runner instance
INTEG_RUNNER = None
# control authentication options and attributes
CONTROL_PASSWORD = 'pw'
CONTROL_PORT = 1111
CONTROL_SOCKET_PATH = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()), 'socket')
Torrc = stem.util.enum.Enum(
('PORT', 'ControlPort %i' % CONTROL_PORT),
('COOKIE', 'CookieAuthentication 1'),
('PASSWORD', 'HashedControlPassword 16:8C423A41EF4A542C6078985270AE28A4E04D056FB63F9F201505DB8E06'),
('SOCKET', 'ControlSocket %s' % CONTROL_SOCKET_PATH),
('PTRACE', 'DisableDebuggerAttachment 0'),
)
class RunnerStopped(Exception):
"Raised when we try to use a Runner that doesn't have an active tor instance"
class TorInaccessable(Exception):
'Raised when information is needed from tor but the instance we have is inaccessible'
def exercise_controller(test_case, controller):
"""
Checks that we can now use the socket by issuing a 'GETINFO config-file'
query. Controller can be either a :class:`stem.socket.ControlSocket` or
:class:`stem.control.BaseController`.
:param unittest.TestCase test_case: test being ran
:param controller: tor controller connection to be authenticated
"""
runner = get_runner()
torrc_path = runner.get_torrc_path()
if isinstance(controller, stem.socket.ControlSocket):
controller.send('GETINFO config-file')
config_file_response = controller.recv()
else:
config_file_response = controller.msg('GETINFO config-file')
test_case.assertEqual('config-file=%s\nOK' % torrc_path, str(config_file_response))
def get_runner():
"""
Singleton for the runtime context of integration tests.
:returns: :class:`test.runner.Runner` with context for our integration tests
"""
global INTEG_RUNNER
if not INTEG_RUNNER:
INTEG_RUNNER = Runner()
return INTEG_RUNNER
class _MockChrootFile(object):
"""
Wrapper around a file object that strips given content from readline()
responses. This is used to simulate a chroot setup by removing the prefix
directory from the paths we report.
"""
def __init__(self, wrapped_file, strip_text):
self.wrapped_file = wrapped_file
self.strip_text = strip_text
def readline(self):
return self.wrapped_file.readline().replace(self.strip_text, '')
class Runner(object):
def __init__(self):
self.attribute_targets = []
self._runner_lock = threading.RLock()
# runtime attributes, set by the start method
self._test_dir = ''
self._tor_cmd = None
self._tor_cwd = ''
self._torrc_contents = ''
self._custom_opts = []
self._tor_process = None
self._chroot_path = None
# set if we monkey patch stem.socket.recv_message()
self._original_recv_message = None
# The first controller to attach takes ownership so tor will promptly
# terminate if the tests do. As such we need to ensure that first
# connection is our runner's.
self._owner_controller = None
def start(self, config_target, attribute_targets, tor_cmd):
"""
Makes temporary testing resources and starts tor, blocking until it
completes.
:param str config_target: **Target** for this test run's torrc settings
:param list attribute_targets: **Targets** for our non-configuration attributes
:param str tor_cmd: command to start tor with
:raises: OSError if unable to run test preparations or start tor
"""
with self._runner_lock:
self.attribute_targets = attribute_targets
# if we're holding on to a tor process (running or not) then clean up after
# it so we can start a fresh instance
if self._tor_process:
self.stop()
println('Setting up a test instance...', STATUS)
# if 'test_directory' is unset then we make a new data directory in /tmp
# and clean it up when we're done
config_test_dir = CONFIG['integ.test_directory']
if config_test_dir:
self._test_dir = stem.util.system.expand_path(config_test_dir, test.STEM_BASE)
else:
self._test_dir = tempfile.mktemp('-stem-integ')
original_cwd, data_dir_path = os.getcwd(), self._test_dir
self._tor_cmd = stem.util.system.expand_path(tor_cmd) if os.path.sep in tor_cmd else tor_cmd
if test.Target.RELATIVE in self.attribute_targets:
tor_cwd = os.path.dirname(self._test_dir)
if not os.path.exists(tor_cwd):
os.makedirs(tor_cwd)
os.chdir(tor_cwd)
data_dir_path = './%s' % os.path.basename(self._test_dir)
config_csv = CONFIG['target.torrc'].get(config_target)
target_torrc_opts = []
if config_csv:
for opt in config_csv.split(','):
opt = opt.strip()
if opt in Torrc.keys():
target_torrc_opts.append(Torrc[opt])
else:
raise ValueError("'%s' isn't a test.runner.Torrc enumeration" % opt)
self._custom_opts = target_torrc_opts
self._torrc_contents = CONFIG['integ.torrc']
if target_torrc_opts:
self._torrc_contents += '\n\n# Torrc options for the %s target\n\n' % config_target
self._torrc_contents += '\n'.join(target_torrc_opts)
if CONFIG['integ.extra_torrc']:
self._torrc_contents += '\n\n# Torrc options from %s\n\n' % os.environ['STEM_TEST_CONFIG']
self._torrc_contents += CONFIG['integ.extra_torrc']
self._torrc_contents = self._torrc_contents.replace('[DATA_DIR]', data_dir_path)
self._torrc_contents = self._torrc_contents.replace('[SOCKS_PORT]', str(SOCKS_PORT))
self._torrc_contents = self._torrc_contents.replace('[OR_PORT]', str(ORPORT))
try:
self._tor_cwd = os.getcwd()
self._run_setup()
self._start_tor(self._tor_cmd)
# strip the testing directory from recv_message responses if we're
# simulating a chroot setup
if test.Target.CHROOT in self.attribute_targets and not self._original_recv_message:
# TODO: when we have a function for telling stem the chroot we'll
# need to set that too
self._original_recv_message = stem.socket.recv_message
self._chroot_path = data_dir_path
def _chroot_recv_message(control_file):
return self._original_recv_message(_MockChrootFile(control_file, data_dir_path))
stem.socket.recv_message = _chroot_recv_message
if self.is_accessible():
self._owner_controller = self.get_tor_controller(True)
if test.Target.RELATIVE in self.attribute_targets:
os.chdir(original_cwd) # revert our cwd back to normal
except OSError as exc:
raise exc
def stop(self):
"""
Stops our tor test instance and cleans up any temporary resources.
"""
with self._runner_lock:
println('Shutting down tor... ', STATUS, NO_NL)
if self._owner_controller:
self._owner_controller.close()
self._owner_controller = None
if self._tor_process:
# if the tor process has stopped on its own then the following raises
# an OSError ([Errno 3] No such process)
try:
self._tor_process.kill()
except OSError:
pass
self._tor_process.stdout.close()
self._tor_process.stderr.close()
self._tor_process.wait() # blocks until the process is done
# if we've made a temporary data directory then clean it up
if self._test_dir and CONFIG['integ.test_directory'] == '':
shutil.rmtree(self._test_dir, ignore_errors = True)
# reverts any mocking of stem.socket.recv_message
if self._original_recv_message:
stem.socket.recv_message = self._original_recv_message
self._original_recv_message = None
# clean up our socket directory if we made one
socket_dir = os.path.dirname(CONTROL_SOCKET_PATH)
if os.path.exists(socket_dir):
shutil.rmtree(socket_dir, ignore_errors = True)
self._test_dir = ''
self._tor_cmd = None
self._tor_cwd = ''
self._torrc_contents = ''
self._custom_opts = []
self._tor_process = None
println('done', STATUS)
def is_running(self):
"""
Checks if we're running a tor test instance and that it's alive.
:returns: True if we have a running tor test instance, False otherwise
"""
with self._runner_lock:
# Check for an unexpected shutdown by calling subprocess.Popen.poll(),
# which returns the exit code or None if we're still running.
if self._tor_process and self._tor_process.poll() is not None:
# clean up the temporary resources and note the unexpected shutdown
self.stop()
println('tor shut down unexpectedly', ERROR)
return bool(self._tor_process)
def is_accessible(self):
"""
Checks if our tor instance has a method of being connected to or not.
:returns: True if tor has a control socket or port, False otherwise
"""
return Torrc.PORT in self._custom_opts or Torrc.SOCKET in self._custom_opts
def get_options(self):
"""
Provides the custom torrc options our tor instance is running with.
:returns: list of Torrc enumerations being used by our test instance
"""
return self._custom_opts
def get_test_dir(self, resource = None):
"""
Provides the absolute path for our testing directory or a file within it.
:param str resource: file within our test directory to provide the path for
:returns: str with our test directory's absolute path or that of a file within it
:raises: :class:`test.runner.RunnerStopped` if we aren't running
"""
if resource:
return os.path.join(self._get('_test_dir'), resource)
else:
return self._get('_test_dir')
def get_torrc_path(self, ignore_chroot = False):
"""
Provides the absolute path for where our testing torrc resides.
:param bool ignore_chroot: provides the real path, rather than the one that tor expects if True
:returns: str with our torrc path
:raises: RunnerStopped if we aren't running
"""
test_dir = self._get('_test_dir')
torrc_path = os.path.join(test_dir, 'torrc')
if not ignore_chroot and self._chroot_path and torrc_path.startswith(self._chroot_path):
torrc_path = torrc_path[len(self._chroot_path):]
return torrc_path
def get_torrc_contents(self):
"""
Provides the contents of our torrc.
:returns: str with the contents of our torrc, lines are newline separated
:raises: :class:`test.runner.RunnerStopped` if we aren't running
"""
return self._get('_torrc_contents')
def get_auth_cookie_path(self):
"""
Provides the absolute path for our authentication cookie if we have one.
If running with an emulated chroot this is uneffected, still providing the
real path.
:returns: str with our auth cookie path
:raises: :class:`test.runner.RunnerStopped` if we aren't running
"""
test_dir = self._get('_test_dir')
return os.path.join(test_dir, 'control_auth_cookie')
def get_tor_cwd(self):
"""
Provides the current working directory of our tor process.
"""
return self._get('_tor_cwd')
def get_chroot(self):
"""
Provides the path we're using to emulate a chroot environment. This is None
if we aren't emulating a chroot setup.
:returns: str with the path of our emulated chroot
"""
return self._chroot_path
def get_pid(self):
"""
Provides the process id of the tor process.
:returns: int pid for the tor process
:raises: :class:`test.runner.RunnerStopped` if we aren't running
"""
tor_process = self._get('_tor_process')
return tor_process.pid
def get_tor_socket(self, authenticate = True):
"""
Provides a socket connected to our tor test instance.
:param bool authenticate: if True then the socket is authenticated
:returns: :class:`stem.socket.ControlSocket` connected with our testing instance
:raises: :class:`test.runner.TorInaccessable` if tor can't be connected to
"""
if Torrc.PORT in self._custom_opts:
control_socket = stem.socket.ControlPort(port = CONTROL_PORT)
elif Torrc.SOCKET in self._custom_opts:
control_socket = stem.socket.ControlSocketFile(CONTROL_SOCKET_PATH)
else:
raise TorInaccessable('Unable to connect to tor')
if authenticate:
stem.connection.authenticate(control_socket, CONTROL_PASSWORD, self.get_chroot())
return control_socket
def get_tor_controller(self, authenticate = True):
"""
Provides a controller connected to our tor test instance.
:param bool authenticate: if True then the socket is authenticated
:returns: :class:`stem.socket.Controller` connected with our testing instance
:raises: :class: `test.runner.TorInaccessable` if tor can't be connected to
"""
control_socket = self.get_tor_socket(False)
controller = stem.control.Controller(control_socket)
if authenticate:
controller.authenticate(password = CONTROL_PASSWORD, chroot_path = self.get_chroot())
return controller
def get_tor_command(self, base_cmd = False):
"""
Provides the command used to run our tor instance.
:param bool base_cmd: provides just the command name if true rather than
the full '--tor path' argument
"""
return os.path.basename(self._get('_tor_cmd')) if base_cmd else self._get('_tor_cmd')
def assert_tor_is_running(self):
"""
Checks if our tor process is running. If not, this prints an error and
provides **False**.
"""
if not self._tor_process:
println('Tor process failed to initialize', ERROR)
return False
process_status = self._tor_process.poll() # None if running
if process_status is None:
return True
else:
process_output = stem.util.str_tools._to_unicode(self._tor_process.stdout.read() + b'\n\n' + self._tor_process.stderr.read()).strip()
println('\n%s\nOur tor process ended prematurely with exit status %s\n%s\n\n%s' % ('=' * 60, process_status, '=' * 60, process_output), ERROR)
return False
def _get(self, attr):
"""
Fetches one of our attributes in a thread safe manner, raising if we aren't
running.
:param str attr: class variable that we want to fetch
:returns: value of the fetched variable
:returns: :class:`test.runner.RunnerStopped` if we aren't running
"""
with self._runner_lock:
if self.is_running():
return self.__dict__[attr]
else:
raise RunnerStopped()
def _run_setup(self):
"""
Makes a temporary runtime resources of our integration test instance.
:raises: OSError if unsuccessful
"""
# makes a temporary data directory if needed
try:
println(' making test directory (%s)... ' % self._test_dir, STATUS, NO_NL)
if os.path.exists(self._test_dir):
println('skipped', STATUS)
else:
os.makedirs(self._test_dir)
println('done', STATUS)
except OSError as exc:
println('failed (%s)' % exc, ERROR)
raise exc
# Tor checks during startup that the directory a control socket resides in
# is only accessible by the tor user (and refuses to finish starting if it
# isn't).
if Torrc.SOCKET in self._custom_opts:
try:
socket_dir = os.path.dirname(CONTROL_SOCKET_PATH)
println(' making control socket directory (%s)... ' % socket_dir, STATUS, NO_NL)
if os.path.exists(socket_dir) and stat.S_IMODE(os.stat(socket_dir).st_mode) == 0o700:
println('skipped', STATUS)
else:
if not os.path.exists(socket_dir):
os.makedirs(socket_dir)
os.chmod(socket_dir, 0o700)
println('done', STATUS)
except OSError as exc:
println('failed (%s)' % exc, ERROR)
raise exc
# configures logging
logging_path = CONFIG['integ.log']
if logging_path:
logging_path = stem.util.system.expand_path(logging_path, test.STEM_BASE)
println(' configuring logger (%s)... ' % logging_path, STATUS, NO_NL)
# delete the old log
if os.path.exists(logging_path):
os.remove(logging_path)
logging.basicConfig(
filename = logging_path,
level = logging.DEBUG,
format = '%(asctime)s [%(levelname)s] %(message)s',
datefmt = '%D %H:%M:%S',
)
println('done', STATUS)
else:
println(' configuring logger... skipped', STATUS)
# writes our testing torrc
torrc_dst = os.path.join(self._test_dir, 'torrc')
try:
println(' writing torrc (%s)... ' % torrc_dst, STATUS, NO_NL)
torrc_file = open(torrc_dst, 'w')
torrc_file.write(self._torrc_contents)
torrc_file.close()
println('done', STATUS)
for line in self._torrc_contents.strip().splitlines():
println(' %s' % line.strip(), SUBSTATUS)
println()
except Exception as exc:
println('failed (%s)\n' % exc, ERROR)
raise OSError(exc)
def _start_tor(self, tor_cmd):
"""
Initializes a tor process. This blocks until initialization completes or we
error out.
:param str tor_cmd: command to start tor with
:raises: OSError if we either fail to create the tor process or reached a timeout without success
"""
println('Starting %s...\n' % tor_cmd, STATUS)
start_time = time.time()
try:
self._tor_process = stem.process.launch_tor(
tor_cmd = tor_cmd,
torrc_path = os.path.join(self._test_dir, 'torrc'),
completion_percent = 100 if test.Target.ONLINE in self.attribute_targets else 0,
init_msg_handler = lambda line: println(' %s' % line, SUBSTATUS),
take_ownership = True,
close_output = False,
)
runtime = time.time() - start_time
println(' done (%i seconds)\n' % runtime, STATUS)
except OSError as exc:
println(' failed to start tor: %s\n' % exc, ERROR)
raise exc
|
PypiClean
|
/graphene-django-patch-2.15.0.0.tar.gz/graphene-django-patch-2.15.0.0/graphene_django/converter.py
|
from collections import OrderedDict
from django.db import models
from django.utils.encoding import force_str
from django.utils.module_loading import import_string
from graphene import (
ID,
Boolean,
Dynamic,
Enum,
Field,
Float,
Int,
List,
NonNull,
String,
UUID,
DateTime,
Date,
Time,
Decimal,
)
from graphene.types.json import JSONString
from graphene.utils.str_converters import to_camel_case
from graphql import assert_valid_name
from .settings import graphene_settings
from .compat import ArrayField, HStoreField, JSONField, PGJSONField, RangeField
from .fields import DjangoListField, DjangoConnectionField
from .utils import import_single_dispatch
from .utils.str_converters import to_const
singledispatch = import_single_dispatch()
def convert_choice_name(name):
name = to_const(force_str(name))
try:
assert_valid_name(name)
except AssertionError:
name = "A_%s" % name
return name
def get_choices(choices):
converted_names = []
if isinstance(choices, OrderedDict):
choices = choices.items()
for value, help_text in choices:
if isinstance(help_text, (tuple, list)):
for choice in get_choices(help_text):
yield choice
else:
name = convert_choice_name(value)
while name in converted_names:
name += "_" + str(len(converted_names))
converted_names.append(name)
description = help_text
yield name, value, description
def convert_choices_to_named_enum_with_descriptions(name, choices):
choices = list(get_choices(choices))
named_choices = [(c[0], c[1]) for c in choices]
named_choices_descriptions = {c[0]: c[2] for c in choices}
class EnumWithDescriptionsType(object):
@property
def description(self):
return named_choices_descriptions[self.name]
return Enum(name, list(named_choices), type=EnumWithDescriptionsType)
def generate_enum_name(django_model_meta, field):
if graphene_settings.DJANGO_CHOICE_FIELD_ENUM_CUSTOM_NAME:
# Try and import custom function
custom_func = import_string(
graphene_settings.DJANGO_CHOICE_FIELD_ENUM_CUSTOM_NAME
)
name = custom_func(field)
elif graphene_settings.DJANGO_CHOICE_FIELD_ENUM_V3_NAMING is True:
name = "{app_label}{object_name}{field_name}Choices".format(
app_label=to_camel_case(django_model_meta.app_label.title()),
object_name=django_model_meta.object_name,
field_name=to_camel_case(field.name.title()),
)
else:
name = to_camel_case("{}_{}".format(django_model_meta.object_name, field.name))
return name
def convert_choice_field_to_enum(field, name=None):
if name is None:
name = generate_enum_name(field.model._meta, field)
choices = field.choices
return convert_choices_to_named_enum_with_descriptions(name, choices)
def convert_django_field_with_choices(
field, registry=None, convert_choices_to_enum=True
):
if registry is not None:
converted = registry.get_converted_field(field)
if converted:
return converted
choices = getattr(field, "choices", None)
if choices and convert_choices_to_enum:
enum = convert_choice_field_to_enum(field)
required = not (field.blank or field.null)
converted = enum(description=field.help_text, required=required)
else:
converted = convert_django_field(field, registry)
if registry is not None:
registry.register_converted_field(field, converted)
return converted
@singledispatch
def convert_django_field(field, registry=None):
raise Exception(
"Don't know how to convert the Django field %s (%s)" % (field, field.__class__)
)
@convert_django_field.register(models.CharField)
@convert_django_field.register(models.TextField)
@convert_django_field.register(models.EmailField)
@convert_django_field.register(models.SlugField)
@convert_django_field.register(models.URLField)
@convert_django_field.register(models.GenericIPAddressField)
@convert_django_field.register(models.FileField)
@convert_django_field.register(models.FilePathField)
def convert_field_to_string(field, registry=None):
return String(description=field.help_text, required=not field.null)
@convert_django_field.register(models.AutoField)
def convert_field_to_id(field, registry=None):
return ID(description=field.help_text, required=not field.null)
@convert_django_field.register(models.UUIDField)
def convert_field_to_uuid(field, registry=None):
return UUID(description=field.help_text, required=not field.null)
@convert_django_field.register(models.PositiveIntegerField)
@convert_django_field.register(models.PositiveSmallIntegerField)
@convert_django_field.register(models.SmallIntegerField)
@convert_django_field.register(models.BigIntegerField)
@convert_django_field.register(models.IntegerField)
def convert_field_to_int(field, registry=None):
return Int(description=field.help_text, required=not field.null)
@convert_django_field.register(models.NullBooleanField)
@convert_django_field.register(models.BooleanField)
def convert_field_to_boolean(field, registry=None):
return Boolean(description=field.help_text, required=not field.null)
@convert_django_field.register(models.DecimalField)
def convert_field_to_decimal(field, registry=None):
return Decimal(description=field.help_text, required=not field.null)
@convert_django_field.register(models.FloatField)
@convert_django_field.register(models.DurationField)
def convert_field_to_float(field, registry=None):
return Float(description=field.help_text, required=not field.null)
@convert_django_field.register(models.DateTimeField)
def convert_datetime_to_string(field, registry=None):
return DateTime(description=field.help_text, required=not field.null)
@convert_django_field.register(models.DateField)
def convert_date_to_string(field, registry=None):
return Date(description=field.help_text, required=not field.null)
@convert_django_field.register(models.TimeField)
def convert_time_to_string(field, registry=None):
return Time(description=field.help_text, required=not field.null)
@convert_django_field.register(models.OneToOneRel)
def convert_onetoone_field_to_djangomodel(field, registry=None):
model = field.related_model
def dynamic_type():
_type = registry.get_type_for_model(model)
if not _type:
return
# We do this for a bug in Django 1.8, where null attr
# is not available in the OneToOneRel instance
null = getattr(field, "null", True)
return Field(_type, required=not null)
return Dynamic(dynamic_type)
@convert_django_field.register(models.ManyToManyField)
@convert_django_field.register(models.ManyToManyRel)
@convert_django_field.register(models.ManyToOneRel)
def convert_field_to_list_or_connection(field, registry=None):
model = field.related_model
def dynamic_type():
_type = registry.get_type_for_model(model)
if not _type:
return
description = (
field.help_text
if isinstance(field, models.ManyToManyField)
else field.field.help_text
)
# If there is a connection, we should transform the field
# into a DjangoConnectionField
if _type._meta.connection:
# Use a DjangoFilterConnectionField if there are
# defined filter_fields or a filterset_class in the
# DjangoObjectType Meta
if _type._meta.filter_fields or _type._meta.filterset_class:
from .filter.fields import DjangoFilterConnectionField
return DjangoFilterConnectionField(
_type, required=True, description=description
)
return DjangoConnectionField(_type, required=True, description=description)
return DjangoListField(
_type,
required=True, # A Set is always returned, never None.
description=description,
)
return Dynamic(dynamic_type)
@convert_django_field.register(models.OneToOneField)
@convert_django_field.register(models.ForeignKey)
def convert_field_to_djangomodel(field, registry=None):
model = field.related_model
def dynamic_type():
_type = registry.get_type_for_model(model)
if not _type:
return
return Field(_type, description=field.help_text, required=not field.null)
return Dynamic(dynamic_type)
@convert_django_field.register(ArrayField)
def convert_postgres_array_to_list(field, registry=None):
inner_type = convert_django_field(field.base_field)
if not isinstance(inner_type, (List, NonNull)):
inner_type = (
NonNull(type(inner_type))
if inner_type.kwargs["required"]
else type(inner_type)
)
return List(inner_type, description=field.help_text, required=not field.null)
@convert_django_field.register(HStoreField)
@convert_django_field.register(PGJSONField)
@convert_django_field.register(JSONField)
def convert_pg_and_json_field_to_string(field, registry=None):
return JSONString(description=field.help_text, required=not field.null)
@convert_django_field.register(RangeField)
def convert_postgres_range_to_string(field, registry=None):
inner_type = convert_django_field(field.base_field)
if not isinstance(inner_type, (List, NonNull)):
inner_type = (
NonNull(type(inner_type))
if inner_type.kwargs["required"]
else type(inner_type)
)
return List(inner_type, description=field.help_text, required=not field.null)
|
PypiClean
|
/herbie_data-2023.3.0-py3-none-any.whl/herbie/accessors.py
|
import functools
import cartopy.crs as ccrs
import metpy # * Needed for metpy accessor
import numpy as np
import pandas as pd
import xarray as xr
import pygrib
from pyproj import CRS
from pathlib import Path
import re
import shapely
from shapely.geometry import Polygon, MultiPoint, Point
_level_units = dict(
adiabaticCondensation="adiabatic condensation",
atmosphere="atmosphere",
atmosphereSingleLayer="atmosphere single layer",
boundaryLayerCloudLayer="boundary layer cloud layer",
cloudBase="cloud base",
cloudCeiling="cloud ceiling",
cloudTop="cloud top",
depthBelowLand="m",
equilibrium="equilibrium",
heightAboveGround="m",
heightAboveGroundLayer="m",
highCloudLayer="high cloud layer",
highestTroposphericFreezing="highest tropospheric freezing",
isobaricInhPa="hPa",
isobaricLayer="hPa",
isothermZero="0 C",
isothermal="K",
level="m",
lowCloudLayer="low cloud layer",
meanSea="MSLP",
middleCloudLayer="middle cloud layer",
nominalTop="nominal top",
pressureFromGroundLayer="Pa",
sigma="sigma",
sigmaLayer="sigmaLayer",
surface="surface",
)
def add_proj_info(ds):
"""Add projection info to a Dataset"""
match = re.search(r'"source": "(.*?)"', ds.history)
FILE = Path(match.group(1))
# Get CF grid projection information with pygrib and pyproj because
# this is something cfgrib doesn't do (https://github.com/ecmwf/cfgrib/issues/251)
# NOTE: Assumes the projection is the same for all variables
with pygrib.open(str(FILE)) as grb:
msg = grb.message(1)
cf_params = CRS(msg.projparams).to_cf()
# Funny stuff with polar stereographic (https://github.com/pyproj4/pyproj/issues/856)
# TODO: Is there a better way to handle this? What about south pole?
if cf_params["grid_mapping_name"] == "polar_stereographic":
cf_params["latitude_of_projection_origin"] = cf_params.get(
"latitude_of_projection_origin", 90
)
# ----------------------
# Attach CF grid mapping
# ----------------------
# http://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#appendix-grid-mappings
ds["gribfile_projection"] = None
ds["gribfile_projection"].attrs = cf_params
ds["gribfile_projection"].attrs["long_name"] = "model grid projection"
# Assign this grid_mapping for all variables
for var in list(ds):
if var == "gribfile_projection":
continue
ds[var].attrs["grid_mapping"] = "gribfile_projection"
@xr.register_dataset_accessor("herbie")
class HerbieAccessor:
"""Accessor for xarray Datasets opened with Herbie."""
def __init__(self, xarray_obj):
self._obj = xarray_obj
self._center = None
@property
def center(self):
"""Return the geographic center point of this dataset."""
if self._center is None:
# we can use a cache on our accessor objects, because accessors
# themselves are cached on instances that access them.
lon = self._obj.latitude
lat = self._obj.longitude
self._center = (float(lon.mean()), float(lat.mean()))
return self._center
@functools.cached_property
def crs(self):
"""
Cartopy coordinate reference system (crs) from a cfgrib Dataset.
Projection information is from the grib2 message for each variable.
Parameters
----------
ds : xarray.Dataset
An xarray.Dataset from a GRIB2 file opened by the cfgrib engine.
"""
ds = self._obj
# Get variables that have dimensions
# (this filters out the gribfile_projection variable)
variables = [i for i in list(ds) if len(ds[i].dims) > 0]
ds = ds.metpy.parse_cf(varname=variables)
crs = ds.metpy_crs.item().to_cartopy()
return crs
@functools.cached_property
def polygon(self):
"""
Get a polygon of the domain boundary.
"""
ds = self._obj
LON = ds.longitude.data
LAT = ds.latitude.data
# Path of array outside border starting from the lower left corner
# and going around the array counter-clockwise.
outside = (
list(zip(LON[0, :], LAT[0, :]))
+ list(zip(LON[:, -1], LAT[:, -1]))
+ list(zip(LON[-1, ::-1], LAT[-1, ::-1]))
+ list(zip(LON[::-1, 0], LAT[::-1, 0]))
)
outside = np.array(outside)
###############################
# Polygon in Lat/Lon coordinates
x = outside[:, 0]
y = outside[:, 1]
domain_polygon_latlon = Polygon(zip(x, y))
###################################
# Polygon in projection coordinates
transform = self.crs.transform_points(ccrs.PlateCarree(), x, y)
# Remove any points that run off the projection map (i.e., point's value is `inf`).
transform = transform[~np.isinf(transform).any(axis=1)]
x = transform[:, 0]
y = transform[:, 1]
domain_polygon = Polygon(zip(x, y))
return domain_polygon, domain_polygon_latlon
def nearest_points(self, points, names=None, verbose=True):
"""
Get the nearest latitude/longitude points from a xarray Dataset.
- Stack Overflow: https://stackoverflow.com/questions/58758480/xarray-select-nearest-lat-lon-with-multi-dimension-coordinates
- MetPy Details: https://unidata.github.io/MetPy/latest/tutorials/xarray_tutorial.html?highlight=assign_y_x
Parameters
----------
ds : xr.Dataset
A Herbie-friendly xarray Dataset
points : tuple, list of tuples, pd.DataFrame
Points to be plucked from the gridded Dataset.
There are multiple objects accepted.
1. Tuple of longitude and latitude (lon, lat) coordinate pair.
1. List of multiple (lon, lat) coordinate pair tuples.
1. Pandas DataFrame with ``longitude`` and ``latitude`` columns. Index will be used as point names, unless ``names`` is specified.
1. Shapeley Point or Points
names : list
A list of names for each point location (i.e., station name).
None will not append any names. names should be the same
length as points.
Notes
-----
This is **much** faster than my old "pluck_points" method.
For matching 1,948 points:
- `nearest_points` completed in 7.5 seconds.
- `pluck_points` completed in 2 minutes.
TODO: Explore alternatives
- Could Shapely nearest_points be used
https://shapely.readthedocs.io/en/latest/manual.html#nearest-points
- Or possibly scipy BallTree method.
"""
ds = self._obj
# Longitude and Latitude point DataFrame
if isinstance(points, pd.DataFrame):
point_df = points[["longitude", "latitude"]]
if names is not None:
point_df.index = names
elif np.shape(points) == (2,):
# points is a tuple (lon, lat) or list [lon, lat]
# and name is given as None or str
point_df = pd.DataFrame(
[points],
columns=["longitude", "latitude"],
index=[names],
)
elif isinstance(points, list):
# points given as a list of coordinate-pair tuples
# and name is given as a list of str
point_df = pd.DataFrame(
points,
columns=["longitude", "latitude"],
index=names,
)
elif isinstance(points, (MultiPoint, Point)):
# points is given as a Shapely object
point_df = pd.DataFrame(
shapely.get_coordinates(points),
columns=["longitude", "latitude"],
index=names,
)
else:
raise ValueError("The points supplied was not understood.")
# Check if MetPy has already parsed the CF metadata grid projection.
# Do that if it hasn't been done yet.
if "metpy_crs" not in ds:
ds = ds.metpy.parse_cf()
# Apply the MetPy method `assign_y_x` to the dataset
# https://unidata.github.io/MetPy/latest/api/generated/metpy.xarray.html?highlight=assign_y_x#metpy.xarray.MetPyDataArrayAccessor.assign_y_x
ds = ds.metpy.assign_y_x()
# Convert the requested [(lon,lat), (lon,lat)] points to map projection.
# Accept a list of point tuples, or Shapely Points object.
# We want to index the dataset at a single point.
# We can do this by transforming a lat/lon point to the grid location
crs = ds.metpy_crs.item().to_cartopy()
transformed_data = crs.transform_points(
ccrs.PlateCarree(), point_df.longitude, point_df.latitude
)
xs = transformed_data[:, 0]
ys = transformed_data[:, 1]
# Select the nearest points from the projection coordinates.
# TODO: Is there a better way?
# There doesn't seem to be a way to get just the points like this
# ds = ds.sel(x=xs, y=ys, method='nearest')
# because it gives a 2D array, and not a point-by-point index.
# Instead, I have too loop the ds.sel method
new_ds = xr.concat(
[ds.sel(x=xi, y=yi, method="nearest") for xi, yi in zip(xs, ys)],
dim="point",
)
new_ds.coords["point"] = ("point", point_df.index.to_list())
new_ds.coords["point_latitude"] = ("point", point_df.latitude)
new_ds.coords["point_longitude"] = ("point", point_df.longitude)
return new_ds
def plot(self, ax=None, common_features_kw={}, vars=None, **kwargs):
"""Plot data on a map.
Parameters
----------
vars : list
List of variables to plot. Default None will plot all
variables in the DataSet.
"""
# From Carpenter_Workshop:
# https://github.com/blaylockbk/Carpenter_Workshop
import matplotlib.pyplot as plt
try:
from toolbox.cartopy_tools import common_features, pc
from paint.radar import cm_reflectivity
from paint.radar2 import cm_reflectivity
from paint.terrain2 import cm_terrain
from paint.standard2 import cm_dpt, cm_pcp, cm_rh, cm_tmp, cm_wind
except:
print("The plotting accessor requires my Carpenter Workshop. Try:")
print(
"`pip install git+https://github.com/blaylockbk/Carpenter_Workshop.git`"
)
ds = self._obj
if isinstance(vars, str):
vars = [vars]
if vars is None:
vars = ds.data_vars
for var in vars:
if "longitude" not in ds[var].coords:
# This is the case for the gribfile_projection variable
continue
print("cfgrib variable:", var)
print("GRIB_cfName", ds[var].attrs.get("GRIB_cfName"))
print("GRIB_cfVarName", ds[var].attrs.get("GRIB_cfVarName"))
print("GRIB_name", ds[var].attrs.get("GRIB_name"))
print("GRIB_units", ds[var].attrs.get("GRIB_units"))
print("GRIB_typeOfLevel", ds[var].attrs.get("GRIB_typeOfLevel"))
print()
ds[var].attrs["units"] = (
ds[var]
.attrs["units"]
.replace("**-1", "$^{-1}$")
.replace("**-2", "$^{-2}$")
)
defaults = dict(
scale="50m",
dpi=150,
figsize=(10, 5),
crs=ds.herbie.crs,
ax=ax,
)
common_features_kw = {**defaults, **common_features_kw}
ax = common_features(**common_features_kw).STATES().ax
title = ""
kwargs.setdefault("shading", "auto")
cbar_kwargs = dict(pad=0.01)
if ds[var].GRIB_cfVarName in ["d2m", "dpt"]:
ds[var].attrs["GRIB_cfName"] = "dew_point_temperature"
## Wind
wind_pair = {"u10": "v10", "u80": "v80", "u": "v"}
if ds[var].GRIB_cfName == "air_temperature":
kwargs = {**cm_tmp().cmap_kwargs, **kwargs}
cbar_kwargs = {**cm_tmp().cbar_kwargs, **cbar_kwargs}
if ds[var].GRIB_units == "K":
ds[var] -= 273.15
ds[var].attrs["GRIB_units"] = "C"
ds[var].attrs["units"] = "C"
elif ds[var].GRIB_cfName == "dew_point_temperature":
kwargs = {**cm_dpt().cmap_kwargs, **kwargs}
cbar_kwargs = {**cm_dpt().cbar_kwargs, **cbar_kwargs}
if ds[var].GRIB_units == "K":
ds[var] -= 273.15
ds[var].attrs["GRIB_units"] = "C"
ds[var].attrs["units"] = "C"
elif ds[var].GRIB_name == "Total Precipitation":
title = "-".join(
[f"F{int(i):02d}" for i in ds[var].GRIB_stepRange.split("-")]
)
ds[var] = ds[var].where(ds[var] != 0)
kwargs = {**cm_pcp().cmap_kwargs, **kwargs}
cbar_kwargs = {**cm_pcp().cbar_kwargs, **cbar_kwargs}
elif ds[var].GRIB_name == "Maximum/Composite radar reflectivity":
ds[var] = ds[var].where(ds[var] >= 0)
cbar_kwargs = {**cm_reflectivity().cbar_kwargs, **cbar_kwargs}
kwargs = {**cm_reflectivity().cmap_kwargs, **kwargs}
elif ds[var].GRIB_cfName == "relative_humidity":
cbar_kwargs = {**cm_rh().cbar_kwargs, **cbar_kwargs}
kwargs = {**cm_rh().cmap_kwargs, **kwargs}
elif ds[var].GRIB_name == "Orography":
if "lsm" in ds:
ds["orog"] = ds.orog.where(ds.lsm == 1, -100)
cbar_kwargs = {**cm_terrain().cbar_kwargs, **cbar_kwargs}
kwargs = {**cm_terrain().cmap_kwargs, **kwargs}
elif "wind" in ds[var].GRIB_cfName or "wind" in ds[var].GRIB_name:
cbar_kwargs = {**cm_wind().cbar_kwargs, **cbar_kwargs}
kwargs = {**cm_wind().cmap_kwargs, **kwargs}
if ds[var].GRIB_cfName == "eastward_wind":
cbar_kwargs["label"] = "U " + cbar_kwargs["label"]
elif ds[var].GRIB_cfName == "northward_wind":
cbar_kwargs["label"] = "V " + cbar_kwargs["label"]
else:
cbar_kwargs = {
**dict(
label=f"{ds[var].GRIB_parameterName.strip().title()} ({ds[var].units})"
),
**cbar_kwargs,
}
p = ax.pcolormesh(
ds.longitude, ds.latitude, ds[var], transform=pc, **kwargs
)
plt.colorbar(p, ax=ax, **cbar_kwargs)
VALID = pd.to_datetime(ds.valid_time.data).strftime("%H:%M UTC %d %b %Y")
RUN = pd.to_datetime(ds.time.data).strftime("%H:%M UTC %d %b %Y")
FXX = f"F{pd.to_timedelta(ds.step.data).total_seconds()/3600:02.0f}"
level_type = ds[var].GRIB_typeOfLevel
if level_type in _level_units:
level_units = _level_units[level_type]
else:
level_units = "unknown"
if level_units.lower() in ["surface", "atmosphere"]:
level = f"{title} {level_units}"
else:
level = f"{ds[var][level_type].data:g} {level_units}"
ax.set_title(
f"Run: {RUN} {FXX}",
loc="left",
fontfamily="monospace",
fontsize="x-small",
)
ax.set_title(
f"{ds.model.upper()} {level}\n", loc="center", fontweight="semibold"
)
ax.set_title(
f"Valid: {VALID}",
loc="right",
fontfamily="monospace",
fontsize="x-small",
)
# Set extent so no whitespace shows around pcolormesh area
# TODO: Any better way to do this? With metpy.assign_y_x
# !!!!: The `metpy.assign_y_x` method could be used for pluck_point :)
try:
if "x" in ds.dims:
ds = ds.metpy.parse_cf()
ds = ds.metpy.assign_y_x()
ax.set_extent(
[
ds.x.min().item(),
ds.x.max().item(),
ds.y.min().item(),
ds.y.max().item(),
],
crs=ds.herbie.crs,
)
except:
pass
return ax
|
PypiClean
|
/tw.dojo-0.9.181.tar.gz/tw.dojo-0.9.181/tw/dojo/static/1.8.1/debug/dojox/grid/enhanced/plugins/filter/FilterDefDialog.js.uncompressed.js
|
require({cache:{
'url:dojox/grid/enhanced/templates/FilterDefPane.html':"<div class=\"dojoxGridFDPane\">\n\t<div class=\"dojoxGridFDPaneRelation\">${_relMsgFront}\n\t<span class=\"dojoxGridFDPaneModes\" dojoAttachPoint=\"criteriaModeNode\">\n\t\t<select dojoAttachPoint=\"_relSelect\" dojoType=\"dijit.form.Select\" dojoAttachEvent=\"onChange: _onRelSelectChange\">\n\t\t\t<option value=\"0\">${_relAll}</option>\n\t\t\t<option value=\"1\">${_relAny}</option>\n\t\t</select>\n\t</span>\n\t${_relMsgTail}\n\t</div>\n\t<div dojoAttachPoint=\"criteriaPane\" class=\"dojoxGridFDPaneRulePane\"></div>\n\t<div dojoAttachPoint=\"_addCBoxBtn\" dojoType=\"dijit.form.Button\" \n\t\tclass=\"dojoxGridFDPaneAddCBoxBtn\" iconClass=\"dojoxGridFDPaneAddCBoxBtnIcon\"\n\t\tdojoAttachEvent=\"onClick:_onAddCBox\" label=\"${_addRuleBtnLabel}\" showLabel=\"false\">\n\t</div>\n\t<div class=\"dojoxGridFDPaneBtns\" dojoAttachPoint=\"buttonsPane\">\n\t\t<span dojoAttachPoint=\"_cancelBtn\" dojoType=\"dijit.form.Button\" \n\t\t\tdojoAttachEvent=\"onClick:_onCancel\" label=\"${_cancelBtnLabel}\">\n\t\t</span>\n\t\t<span dojoAttachPoint=\"_clearFilterBtn\" dojoType=\"dijit.form.Button\" \n\t\t\tdojoAttachEvent=\"onClick:_onClearFilter\" label=\"${_clearBtnLabel}\" disabled=\"true\">\n\t\t</span>\n\t\t<span dojoAttachPoint=\"_filterBtn\" dojoType=\"dijit.form.Button\" \n\t\t\tdojoAttachEvent=\"onClick:_onFilter\" label=\"${_filterBtnLabel}\" disabled=\"true\">\n\t\t</span>\n\t</div>\n</div>\n",
'url:dojox/grid/enhanced/templates/CriteriaBox.html':"<div class=\"dojoxGridFCBox\">\n\t<div class=\"dojoxGridFCBoxSelCol\" dojoAttachPoint=\"selColNode\">\n\t\t<span class=\"dojoxGridFCBoxField\">${_colSelectLabel}</span>\n\t\t<select dojoAttachPoint=\"_colSelect\" dojoType=\"dijit.form.Select\" \n\t\t\tclass=\"dojoxGridFCBoxColSelect\"\n\t\t\tdojoAttachEvent=\"onChange:_onChangeColumn\">\n\t\t</select>\n\t</div>\n\t<div class=\"dojoxGridFCBoxCondition\" dojoAttachPoint=\"condNode\">\n\t\t<span class=\"dojoxGridFCBoxField\">${_condSelectLabel}</span>\n\t\t<select dojoAttachPoint=\"_condSelect\" dojoType=\"dijit.form.Select\" \n\t\t\tclass=\"dojoxGridFCBoxCondSelect\"\n\t\t\tdojoAttachEvent=\"onChange:_onChangeCondition\">\n\t\t</select>\n\t\t<div class=\"dojoxGridFCBoxCondSelectAlt\" dojoAttachPoint=\"_condSelectAlt\" style=\"display:none;\"></div>\n\t</div>\n\t<div class=\"dojoxGridFCBoxValue\" dojoAttachPoint=\"valueNode\">\n\t\t<span class=\"dojoxGridFCBoxField\">${_valueBoxLabel}</span>\n\t</div>\n</div>\n",
'url:dojox/grid/enhanced/templates/FilterBoolValueBox.html':"<div class=\"dojoxGridBoolValueBox\">\n\t<div class=\"dojoxGridTrueBox\">\n\t\t<input dojoType=\"dijit.form.RadioButton\" type='radio' name='a1' id='${_baseId}_rbTrue' checked=\"true\" \n\t\t\tdojoAttachPoint=\"rbTrue\" dojoAttachEvent=\"onChange: onChange\"/>\n\t\t<div class=\"dojoxGridTrueLabel\" for='${_baseId}_rbTrue'>${_lblTrue}</div>\n\t</div>\n\t<div class=\"dojoxGridFalseBox\">\n\t\t<input dojoType=\"dijit.form.RadioButton\" dojoAttachPoint=\"rbFalse\" type='radio' name='a1' id='${_baseId}_rbFalse'/>\n\t\t<div class=\"dojoxGridTrueLabel\" for='${_baseId}_rbFalse'>${_lblFalse}</div>\n\t</div>\n</div>\n"}});
define("dojox/grid/enhanced/plugins/filter/FilterDefDialog", [
"dojo/_base/declare",
"dojo/_base/array",
"dojo/_base/connect",
"dojo/_base/lang",
"dojo/_base/event",
"dojo/_base/html",
"dojo/_base/sniff",
"dojo/keys",
"dojo/string",
"dojo/window",
"dojo/date/locale",
"./FilterBuilder",
"../Dialog",
"dijit/form/ComboBox",
"dijit/form/TextBox",
"dijit/form/NumberTextBox",
"dijit/form/DateTextBox",
"dijit/form/TimeTextBox",
"dijit/form/Button",
"dijit/layout/AccordionContainer",
"dijit/layout/ContentPane",
"dijit/_Widget",
"dijit/_TemplatedMixin",
"dijit/_WidgetsInTemplateMixin",
"dijit/focus",
"dojox/html/metrics",
"dijit/a11y",
"dojo/text!../../templates/FilterDefPane.html",
"dojo/text!../../templates/CriteriaBox.html",
"dojo/text!../../templates/FilterBoolValueBox.html",
"dijit/Tooltip",
"dijit/form/Select",
"dijit/form/RadioButton",
"dojox/html/ellipsis",
"../../../cells/dijit"
], function(declare, array, connect, lang, event, html, has, keys, string, win, dateLocale,
FilterBuilder, Dialog, ComboBox, TextBox, NumberTextBox, DateTextBox, TimeTextBox, Button,
AccordionContainer, ContentPane, _Widget, _TemplatedMixin, _WidgetsInTemplateMixin, dijitFocus,
metrics, dijitA11y, defPaneTemplate, criteriaTemplate, boolValueTemplate){
var _tabIdxes = {
// summary:
// Define tabindexes for elements in the filter definition dialog
relSelect: 60,
accordionTitle: 70,
removeCBoxBtn: -1,
colSelect: 90,
condSelect: 95,
valueBox: 10,
addCBoxBtn: 20,
filterBtn: 30,
clearBtn: 40,
cancelBtn: 50
};
var FilterAccordionContainer = declare("dojox.grid.enhanced.plugins.filter.AccordionContainer", AccordionContainer, {
nls: null,
addChild: function(/*dijit._Widget*/ child, /*Integer?*/ insertIndex){
var pane = arguments[0] = child._pane = new ContentPane({
content: child
});
this.inherited(arguments);
this._modifyChild(pane);
},
removeChild: function(child){
var pane = child, isRemoveByUser = false;
if(child._pane){
isRemoveByUser = true;
pane = arguments[0] = child._pane;
}
this.inherited(arguments);
if(isRemoveByUser){
this._hackHeight(false, this._titleHeight);
var children = this.getChildren();
if(children.length === 1){
html.style(children[0]._removeCBoxBtn.domNode, "display", "none");
}
}
pane.destroyRecursive();
},
selectChild: function(child){
if(child._pane){
arguments[0] = child._pane;
}
this.inherited(arguments);
},
resize: function(){
this.inherited(arguments);
array.forEach(this.getChildren(), this._setupTitleDom);
},
startup: function(){
if(this._started){
return;
}
this.inherited(arguments);
if(parseInt(has('ie'), 10) == 7){
//IE7 will fire a lot of "onresize" event during initialization.
array.some(this._connects, function(cnnt){
if((cnnt[0] || {})[1] == "onresize"){
this.disconnect(cnnt);
return true;
}
}, this);
}
array.forEach(this.getChildren(), function(child){
this._modifyChild(child, true);
}, this);
},
_onKeyPress: function(/*Event*/ e, /*dijit._Widget*/ fromTitle){
// summary:
// Overrides base class method, make left/right button do other things.
if(this.disabled || e.altKey || !(fromTitle || e.ctrlKey)){
return;
}
var k = keys, c = e.charOrCode, ltr = html._isBodyLtr(), toNext = null;
if((fromTitle && c == k.UP_ARROW) || (e.ctrlKey && c == k.PAGE_UP)){
toNext = false;
}else if((fromTitle && c == k.DOWN_ARROW) || (e.ctrlKey && (c == k.PAGE_DOWN || c == k.TAB))){
toNext = true;
}else if(c == (ltr ? k.LEFT_ARROW : k.RIGHT_ARROW)){
toNext = this._focusOnRemoveBtn ? null : false;
this._focusOnRemoveBtn = !this._focusOnRemoveBtn;
}else if(c == (ltr ? k.RIGHT_ARROW : k.LEFT_ARROW)){
toNext = this._focusOnRemoveBtn ? true : null;
this._focusOnRemoveBtn = !this._focusOnRemoveBtn;
}else{
return;
}
if(toNext !== null){
this._adjacent(toNext)._buttonWidget._onTitleClick();
}
event.stop(e);
win.scrollIntoView(this.selectedChildWidget._buttonWidget.domNode.parentNode);
if(has('ie')){
//IE will not show focus indicator if tabIndex is -1
this.selectedChildWidget._removeCBoxBtn.focusNode.setAttribute("tabIndex", this._focusOnRemoveBtn ? _tabIdxes.accordionTitle : -1);
}
dijitFocus.focus(this.selectedChildWidget[this._focusOnRemoveBtn ? "_removeCBoxBtn" : "_buttonWidget"].focusNode);
},
_modifyChild: function(child, isFirst){
if(!child || !this._started){
return;
}
html.style(child.domNode, "overflow", "hidden");
child._buttonWidget.connect(child._buttonWidget, "_setSelectedAttr", function(){
this.focusNode.setAttribute("tabIndex", this.selected ? _tabIdxes.accordionTitle : "-1");
});
var _this = this;
child._buttonWidget.connect(child._buttonWidget.domNode, "onclick", function(){
_this._focusOnRemoveBtn = false;
});
(child._removeCBoxBtn = new Button({
label: this.nls.removeRuleButton,
showLabel: false,
iconClass: "dojoxGridFCBoxRemoveCBoxBtnIcon",
tabIndex: _tabIdxes.removeCBoxBtn,
onClick: lang.hitch(child.content, "onRemove"),
onKeyPress: function(e){
_this._onKeyPress(e, child._buttonWidget.contentWidget);
}
})).placeAt(child._buttonWidget.domNode);
var i, children = this.getChildren();
if(children.length === 1){
child._buttonWidget.set("selected", true);
html.style(child._removeCBoxBtn.domNode, "display", "none");
}else{
for(i = 0; i < children.length; ++i){
if(children[i]._removeCBoxBtn){
html.style(children[i]._removeCBoxBtn.domNode, "display", "");
}
}
}
this._setupTitleDom(child);
if(!this._titleHeight){
for(i = 0; i < children.length; ++i){
if(children[i] != this.selectedChildWidget){
this._titleHeight = html.marginBox(children[i]._buttonWidget.domNode.parentNode).h;
break;
}
}
}
if(!isFirst){
this._hackHeight(true, this._titleHeight);
}
},
_hackHeight: function(/* bool */toGrow,/* int */heightDif){
var children = this.getChildren(),
dn = this.domNode, h = html.style(dn, "height");
if(!toGrow){
dn.style.height = (h - heightDif) + 'px';
}else if(children.length > 1){
dn.style.height = (h + heightDif) + 'px';
}else{
//Only one rule, no need to do anything.
return;
}
this.resize();
},
_setupTitleDom: function(child){
var w = html.contentBox(child._buttonWidget.titleNode).w;
if(has('ie') < 8){ w -= 8; }
html.style(child._buttonWidget.titleTextNode, "width", w + "px");
}
});
var FilterDefPane = declare("dojox.grid.enhanced.plugins.filter.FilterDefPane",[_Widget, _TemplatedMixin, _WidgetsInTemplateMixin],{
templateString: defPaneTemplate,
widgetsInTemplate: true,
dlg: null,
postMixInProperties: function(){
this.plugin = this.dlg.plugin;
var nls = this.plugin.nls;
this._addRuleBtnLabel = nls.addRuleButton;
this._cancelBtnLabel = nls.cancelButton;
this._clearBtnLabel = nls.clearButton;
this._filterBtnLabel = nls.filterButton;
this._relAll = nls.relationAll;
this._relAny = nls.relationAny;
this._relMsgFront = nls.relationMsgFront;
this._relMsgTail = nls.relationMsgTail;
},
postCreate: function(){
this.inherited(arguments);
this.connect(this.domNode, "onkeypress", "_onKey");
(this.cboxContainer = new FilterAccordionContainer({
nls: this.plugin.nls
})).placeAt(this.criteriaPane);
this._relSelect.set("tabIndex", _tabIdxes.relSelect);
this._addCBoxBtn.set("tabIndex", _tabIdxes.addCBoxBtn);
this._cancelBtn.set("tabIndex", _tabIdxes.cancelBtn);
this._clearFilterBtn.set("tabIndex", _tabIdxes.clearBtn);
this._filterBtn.set("tabIndex", _tabIdxes.filterBtn);
var nls = this.plugin.nls;
this._relSelect.domNode.setAttribute("aria-label", nls.waiRelAll);
this._addCBoxBtn.domNode.setAttribute("aria-label", nls.waiAddRuleButton);
this._cancelBtn.domNode.setAttribute("aria-label", nls.waiCancelButton);
this._clearFilterBtn.domNode.setAttribute("aria-label", nls.waiClearButton);
this._filterBtn.domNode.setAttribute("aria-label", nls.waiFilterButton);
this._relSelect.set("value", this.dlg._relOpCls === "logicall" ? "0" : "1");
},
uninitialize: function(){
this.cboxContainer.destroyRecursive();
this.plugin = null;
this.dlg = null;
},
_onRelSelectChange: function(val){
this.dlg._relOpCls = val == "0" ? "logicall" : "logicany";
this._relSelect.domNode.setAttribute("aria-label", this.plugin.nls[val == "0" ? "waiRelAll" : "waiRelAny"]);
},
_onAddCBox: function(){
this.dlg.addCriteriaBoxes(1);
},
_onCancel: function(){
this.dlg.onCancel();
},
_onClearFilter: function(){
this.dlg.onClearFilter();
},
_onFilter: function(){
this.dlg.onFilter();
},
_onKey: function(e){
if(e.keyCode == keys.ENTER){
this.dlg.onFilter();
}
}
});
var CriteriaBox = declare("dojox.grid.enhanced.plugins.filter.CriteriaBox",[_Widget, _TemplatedMixin, _WidgetsInTemplateMixin],{
templateString: criteriaTemplate,
widgetsInTemplate: true,
dlg: null,
postMixInProperties: function(){
this.plugin = this.dlg.plugin;
this._curValueBox = null;
var nls = this.plugin.nls;
this._colSelectLabel = nls.columnSelectLabel;
this._condSelectLabel = nls.conditionSelectLabel;
this._valueBoxLabel = nls.valueBoxLabel;
this._anyColumnOption = nls.anyColumnOption;
},
postCreate: function(){
var dlg = this.dlg, g = this.plugin.grid;
//Select Column
this._colSelect.set("tabIndex", _tabIdxes.colSelect);
this._colOptions = this._getColumnOptions();
this._colSelect.addOption([
{label: this.plugin.nls.anyColumnOption, value: "anycolumn", selected: dlg.curColIdx < 0},
{value: ""}
].concat(this._colOptions));
//Select Condition
this._condSelect.set("tabIndex", _tabIdxes.condSelect);
this._condSelect.addOption(this._getUsableConditions(dlg.getColumnType(dlg.curColIdx)));
this._showSelectOrLabel(this._condSelect, this._condSelectAlt);
this.connect(g.layout, "moveColumn", "onMoveColumn");
var _this = this;
setTimeout(function(){
var type = dlg.getColumnType(dlg.curColIdx);
_this._setValueBoxByType(type);
}, 0);
},
_getColumnOptions: function(){
var colIdx = this.dlg.curColIdx >= 0 ? String(this.dlg.curColIdx) : "anycolumn";
return array.map(array.filter(this.plugin.grid.layout.cells, function(cell){
return !(cell.filterable === false || cell.hidden);
}), function(cell){
return {
label: cell.name || cell.field,
value: String(cell.index),
selected: colIdx == String(cell.index)
};
});
},
onMoveColumn: function(){
var tmp = this._onChangeColumn;
this._onChangeColumn = function(){};
var option = this._colSelect.get("selectedOptions");
this._colSelect.removeOption(this._colOptions);
this._colOptions = this._getColumnOptions();
this._colSelect.addOption(this._colOptions);
var i = 0;
for(; i < this._colOptions.length; ++i){
if(this._colOptions[i].label == option.label){
break;
}
}
if(i < this._colOptions.length){
this._colSelect.set("value", this._colOptions[i].value);
}
var _this = this;
setTimeout(function(){
_this._onChangeColumn = tmp;
}, 0);
},
onRemove: function(){
this.dlg.removeCriteriaBoxes(this);
},
uninitialize: function(){
if(this._curValueBox){
this._curValueBox.destroyRecursive();
this._curValueBox = null;
}
this.plugin = null;
this.dlg = null;
},
_showSelectOrLabel: function(sel, alt){
var options = sel.getOptions();
if(options.length == 1){
alt.innerHTML = options[0].label;
html.style(sel.domNode, "display", "none");
html.style(alt, "display", "");
}else{
html.style(sel.domNode, "display", "");
html.style(alt, "display", "none");
}
},
_onChangeColumn: function(val){
this._checkValidCriteria();
var type = this.dlg.getColumnType(val);
this._setConditionsByType(type);
this._setValueBoxByType(type);
this._updateValueBox();
},
_onChangeCondition: function(val){
this._checkValidCriteria();
var f = (val == "range");
if(f ^ this._isRange){
this._isRange = f;
this._setValueBoxByType(this.dlg.getColumnType(this._colSelect.get("value")));
}
this._updateValueBox();
},
_updateValueBox: function(cond){
this._curValueBox.set("disabled", this._condSelect.get("value") == "isempty");
},
_checkValidCriteria: function(){
// summary:
// Check whether the given criteria box is completed. If it is, mark it.
setTimeout(lang.hitch(this, function(){
this.updateRuleTitle();
this.dlg._updatePane();
}),0);
},
_createValueBox: function(/* widget constructor */cls,/* object */arg){
// summary:
// Create a value input box with given class and arguments
var func = lang.hitch(arg.cbox, "_checkValidCriteria");
return new cls(lang.mixin(arg,{
tabIndex: _tabIdxes.valueBox,
onKeyPress: func,
onChange: func,
"class": "dojoxGridFCBoxValueBox"
}));
},
_createRangeBox: function(/* widget constructor */cls,/* object */arg){
// summary:
// Create a DIV containing 2 input widgets, which represents a range, with the given class and arguments
var func = lang.hitch(arg.cbox, "_checkValidCriteria");
lang.mixin(arg,{
tabIndex: _tabIdxes.valueBox,
onKeyPress: func,
onChange: func
});
var div = html.create("div", {"class": "dojoxGridFCBoxValueBox"}),
start = new cls(arg),
txt = html.create("span", {"class": "dojoxGridFCBoxRangeValueTxt", "innerHTML": this.plugin.nls.rangeTo}),
end = new cls(arg);
html.addClass(start.domNode, "dojoxGridFCBoxStartValue");
html.addClass(end.domNode, "dojoxGridFCBoxEndValue");
div.appendChild(start.domNode);
div.appendChild(txt);
div.appendChild(end.domNode);
div.domNode = div;
//Mock functions for set and get (in place of the old attr function)
div.set = function(dummy, args){
if(lang.isObject(args)){
start.set("value", args.start);
end.set("value", args.end);
}
};
div.get = function(){
var s = start.get("value"),
e = end.get("value");
return s && e ? {start: s, end: e} : "";
};
return div;
},
changeCurrentColumn: function(/* bool */selectCurCol){
var colIdx = this.dlg.curColIdx;
//Re-populate the columns in case some of them are set to hidden.
this._colSelect.removeOption(this._colOptions);
this._colOptions = this._getColumnOptions();
this._colSelect.addOption(this._colOptions);
this._colSelect.set('value', colIdx >= 0 ? String(colIdx) : "anycolumn");
this.updateRuleTitle(true);
},
curColumn: function(){
return this._colSelect.getOptions(this._colSelect.get("value")).label;
},
curCondition: function(){
return this._condSelect.getOptions(this._condSelect.get("value")).label;
},
curValue: function(){
var cond = this._condSelect.get("value");
if(cond == "isempty"){return "";}
return this._curValueBox ? this._curValueBox.get("value") : "";
},
save: function(){
if(this.isEmpty()){
return null;
}
var colIdx = this._colSelect.get("value"),
type = this.dlg.getColumnType(colIdx),
value = this.curValue(),
cond = this._condSelect.get("value");
return {
"column": colIdx,
"condition": cond,
"value": value,
"formattedVal": this.formatValue(type, cond, value),
"type": type,
"colTxt": this.curColumn(),
"condTxt": this.curCondition()
};
},
load: function(obj){
var tmp = [
this._onChangeColumn,
this._onChangeCondition
];
this._onChangeColumn = this._onChangeCondition = function(){};
if(obj.column){
this._colSelect.set("value", obj.column);
}
if(obj.type){
this._setConditionsByType(obj.type);
this._setValueBoxByType(obj.type);
}else{
obj.type = this.dlg.getColumnType(this._colSelect.get("value"));
}
if(obj.condition){
this._condSelect.set("value", obj.condition);
}
var value = obj.value || "";
if(value || (obj.type != "date" && obj.type != "time")){
this._curValueBox.set("value", value);
}
this._updateValueBox();
setTimeout(lang.hitch(this, function(){
this._onChangeColumn = tmp[0];
this._onChangeCondition = tmp[1];
}), 0);
},
getExpr: function(){
if(this.isEmpty()){
return null;
}
var colval = this._colSelect.get("value");
return this.dlg.getExprForCriteria({
"type": this.dlg.getColumnType(colval),
"column": colval,
"condition": this._condSelect.get("value"),
"value": this.curValue()
});
},
isEmpty: function(){
var cond = this._condSelect.get("value");
if(cond == "isempty"){return false;}
var v = this.curValue();
return v === "" || v === null || typeof v == "undefined" || (typeof v == "number" && isNaN(v));
},
updateRuleTitle: function(isEmpty){
var node = this._pane._buttonWidget.titleTextNode;
var title = [
"<div class='dojoxEllipsis'>"
];
if(isEmpty || this.isEmpty()){
node.title = string.substitute(this.plugin.nls.ruleTitleTemplate, [this._ruleIndex || 1]);
title.push(node.title);
}else{
var type = this.dlg.getColumnType(this._colSelect.get("value"));
var column = this.curColumn();
var condition = this.curCondition();
var value = this.formatValue(type, this._condSelect.get("value"), this.curValue());
title.push(
column,
" <span class='dojoxGridRuleTitleCondition'>",
condition,
"</span> ",
value
);
node.title = [column, " ", condition, " ", value].join('');
}
node.innerHTML = title.join('');
if(has('mozilla')){
var tt = html.create("div", {
"style": "width: 100%; height: 100%; position: absolute; top: 0; left: 0; z-index: 9999;"
}, node);
tt.title = node.title;
}
},
updateRuleIndex: function(index){
if(this._ruleIndex != index){
this._ruleIndex = index;
if(this.isEmpty()){
this.updateRuleTitle();
}
}
},
setAriaInfo: function(idx){
var dss = string.substitute, nls = this.plugin.nls;
this._colSelect.domNode.setAttribute("aria-label", dss(nls.waiColumnSelectTemplate, [idx]));
this._condSelect.domNode.setAttribute("aria-label", dss(nls.waiConditionSelectTemplate, [idx]));
this._pane._removeCBoxBtn.domNode.setAttribute("aria-label", dss(nls.waiRemoveRuleButtonTemplate, [idx]));
this._index = idx;
},
_getUsableConditions: function(type){
var conditions = lang.clone(this.dlg._dataTypeMap[type].conditions);
var typeDisabledConds = (this.plugin.args.disabledConditions || {})[type];
var colIdx = parseInt(this._colSelect.get("value"), 10);
var colDisabledConds = isNaN(colIdx) ?
(this.plugin.args.disabledConditions || {})["anycolumn"] :
this.plugin.grid.layout.cells[colIdx].disabledConditions;
if(!lang.isArray(typeDisabledConds)){
typeDisabledConds = [];
}
if(!lang.isArray(colDisabledConds)){
colDisabledConds = [];
}
var arr = typeDisabledConds.concat(colDisabledConds);
if(arr.length){
var disabledConds = {};
array.forEach(arr, function(c){
if(lang.isString(c)){
disabledConds[c.toLowerCase()] = true;
}
});
return array.filter(conditions, function(condOption){
return !(condOption.value in disabledConds);
});
}
return conditions;
},
_setConditionsByType: function(/* string */type){
var condSelect = this._condSelect;
condSelect.removeOption(condSelect.options);
condSelect.addOption(this._getUsableConditions(type));
this._showSelectOrLabel(this._condSelect, this._condSelectAlt);
},
_setValueBoxByType: function(/* string */type){
if(this._curValueBox){
this.valueNode.removeChild(this._curValueBox.domNode);
try{
this._curValueBox.destroyRecursive();
}catch(e){}
delete this._curValueBox;
}
//value box class
var vbcls = this.dlg._dataTypeMap[type].valueBoxCls[this._getValueBoxClsInfo(this._colSelect.get("value"), type)],
vboxArg = this._getValueBoxArgByType(type);
this._curValueBox = this[this._isRange ? "_createRangeBox" : "_createValueBox"](vbcls, vboxArg);
this.valueNode.appendChild(this._curValueBox.domNode);
//Can not move to setAriaInfo, 'cause the value box is created after the defpane is loaded.
this._curValueBox.domNode.setAttribute("aria-label", string.substitute(this.plugin.nls.waiValueBoxTemplate,[this._index]));
//Now our cbox is completely ready
this.dlg.onRendered(this);
},
//--------------------------UI Configuration--------------------------------------
_getValueBoxArgByType: function(/* string */type){
// summary:
// Get the arguments for the value box construction.
var g = this.plugin.grid,
cell = g.layout.cells[parseInt(this._colSelect.get("value"), 10)],
res = {
cbox: this
};
if(type == "string"){
if(cell && (cell.suggestion || cell.autoComplete)){
lang.mixin(res, {
store: g.store,
searchAttr: cell.field || cell.name,
query: g.query || {},
fetchProperties: {
sort: [{"attribute": cell.field || cell.name}],
queryOptions: lang.mixin({
ignoreCase: true,
deep: true
}, g.queryOptions || {})
}
});
}
}else if(type == "boolean"){
lang.mixin(res, this.dlg.builder.defaultArgs["boolean"]);
}
if(cell && cell.dataTypeArgs){
lang.mixin(res, cell.dataTypeArgs);
}
return res;
},
formatValue: function(type, cond, v){
// summary:
// Format the value to be shown in tooltip.
if(cond == "isempty"){return "";}
if(type == "date" || type == "time"){
var opt = {selector: type},
fmt = dateLocale.format;
if(cond == "range"){
return string.substitute(this.plugin.nls.rangeTemplate, [fmt(v.start, opt), fmt(v.end, opt)]);
}
return fmt(v, opt);
}else if(type == "boolean"){
return v ? this._curValueBox._lblTrue : this._curValueBox._lblFalse;
}
return v;
},
_getValueBoxClsInfo: function(/* int|string */colIndex, /* string */type){
// summary:
// Decide which value box to use given data type and column index.
var cell = this.plugin.grid.layout.cells[parseInt(colIndex, 10)];
//Now we only need to handle string. But maybe we need to handle more types here in the future.
if(type == "string"){
return (cell && (cell.suggestion || cell.autoComplete)) ? "ac" : "dft";
}
return "dft";
}
});
var UniqueComboBox = declare("dojox.grid.enhanced.plugins.filter.UniqueComboBox", ComboBox, {
_openResultList: function(results){
var cache = {}, s = this.store, colName = this.searchAttr;
arguments[0] = array.filter(results, function(item){
var key = s.getValue(item, colName), existed = cache[key];
cache[key] = true;
return !existed;
});
this.inherited(arguments);
},
_onKey: function(evt){
if(evt.charOrCode === keys.ENTER && this._opened){
event.stop(evt);
}
this.inherited(arguments);
}
});
var BooleanValueBox = declare("dojox.grid.enhanced.plugins.filter.BooleanValueBox", [_Widget, _TemplatedMixin, _WidgetsInTemplateMixin], {
templateString: boolValueTemplate,
widgetsInTemplate: true,
constructor: function(args){
var nls = args.cbox.plugin.nls;
this._baseId = args.cbox.id;
this._lblTrue = args.trueLabel || nls.trueLabel || "true";
this._lblFalse = args.falseLabel || nls.falseLabel || "false";
this.args = args;
},
postCreate: function(){
this.onChange();
},
onChange: function(){},
get: function(prop){
return this.rbTrue.get("checked");
},
set: function(prop, v){
this.inherited(arguments);
if(prop == "value"){
this.rbTrue.set("checked", !!v);
this.rbFalse.set("checked", !v);
}
}
});
var FilterDefDialog = declare("dojox.grid.enhanced.plugins.filter.FilterDefDialog", null, {
// summary:
// Create the filter definition UI.
curColIdx: -1,
_relOpCls: "logicall",
_savedCriterias: null,
plugin: null,
constructor: function(args){
var plugin = this.plugin = args.plugin;
this.builder = new FilterBuilder();
this._setupData();
this._cboxes = [];
this.defaultType = plugin.args.defaultType || "string";
(this.filterDefPane = new FilterDefPane({
"dlg": this
})).startup();
(this._defPane = new Dialog({
"refNode": this.plugin.grid.domNode,
"title": plugin.nls.filterDefDialogTitle,
"class": "dojoxGridFDTitlePane",
"iconClass": "dojoxGridFDPaneIcon",
"content": this.filterDefPane
})).startup();
this._defPane.connect(plugin.grid.layer('filter'), "filterDef", lang.hitch(this, "_onSetFilter"));
plugin.grid.setFilter = lang.hitch(this, "setFilter");
plugin.grid.getFilter = lang.hitch(this, "getFilter");
plugin.grid.getFilterRelation = lang.hitch(this, function(){
return this._relOpCls;
});
plugin.connect(plugin.grid.layout, "moveColumn", lang.hitch(this, "onMoveColumn"));
},
onMoveColumn: function(sourceViewIndex, destViewIndex, cellIndex, targetIndex, before){
if(this._savedCriterias && cellIndex != targetIndex){
if(before){ --targetIndex; }
var min = cellIndex < targetIndex ? cellIndex : targetIndex;
var max = cellIndex < targetIndex ? targetIndex : cellIndex;
var dir = targetIndex > min ? 1 : -1;
array.forEach(this._savedCriterias, function(sc){
var idx = parseInt(sc.column, 10);
if(!isNaN(idx) && idx >= min && idx <= max){
sc.column = String(idx == cellIndex ? idx + (max - min) * dir : idx - dir);
}
});
}
},
destroy: function(){
this._defPane.destroyRecursive();
this._defPane = null;
this.filterDefPane = null;
this.builder = null;
this._dataTypeMap = null;
this._cboxes = null;
var g = this.plugin.grid;
g.setFilter = null;
g.getFilter = null;
g.getFilterRelation = null;
this.plugin = null;
},
_setupData: function(){
var nls = this.plugin.nls;
this._dataTypeMap = {
// summary:
// All supported data types
"number":{
valueBoxCls: {
dft: NumberTextBox
},
conditions:[
{label: nls.conditionEqual, value: "equalto", selected: true},
{label: nls.conditionNotEqual, value: "notequalto"},
{label: nls.conditionLess, value: "lessthan"},
{label: nls.conditionLessEqual, value: "lessthanorequalto"},
{label: nls.conditionLarger, value: "largerthan"},
{label: nls.conditionLargerEqual, value: "largerthanorequalto"},
{label: nls.conditionIsEmpty, value: "isempty"}
]
},
"string":{
valueBoxCls: {
dft: TextBox,
ac: UniqueComboBox //For autoComplete
},
conditions:[
{label: nls.conditionContains, value: "contains", selected: true},
{label: nls.conditionIs, value: "equalto"},
{label: nls.conditionStartsWith, value: "startswith"},
{label: nls.conditionEndWith, value: "endswith"},
{label: nls.conditionNotContain, value: "notcontains"},
{label: nls.conditionIsNot, value: "notequalto"},
{label: nls.conditionNotStartWith, value: "notstartswith"},
{label: nls.conditionNotEndWith, value: "notendswith"},
{label: nls.conditionIsEmpty, value: "isempty"}
]
},
"date":{
valueBoxCls: {
dft: DateTextBox
},
conditions:[
{label: nls.conditionIs, value: "equalto", selected: true},
{label: nls.conditionBefore, value: "lessthan"},
{label: nls.conditionAfter, value: "largerthan"},
{label: nls.conditionRange, value: "range"},
{label: nls.conditionIsEmpty, value: "isempty"}
]
},
"time":{
valueBoxCls: {
dft: TimeTextBox
},
conditions:[
{label: nls.conditionIs, value: "equalto", selected: true},
{label: nls.conditionBefore, value: "lessthan"},
{label: nls.conditionAfter, value: "largerthan"},
{label: nls.conditionRange, value: "range"},
{label: nls.conditionIsEmpty, value: "isempty"}
]
},
"boolean": {
valueBoxCls: {
dft: BooleanValueBox
},
conditions: [
{label: nls.conditionIs, value: "equalto", selected: true},
{label: nls.conditionIsEmpty, value: "isempty"}
]
}
};
},
setFilter: function(rules, ruleRelation){
rules = rules || [];
if(!lang.isArray(rules)){
rules = [rules];
}
var func = function(){
if(rules.length){
this._savedCriterias = array.map(rules, function(rule){
var type = rule.type || this.defaultType;
return {
"type": type,
"column": String(rule.column),
"condition": rule.condition,
"value": rule.value,
"colTxt": this.getColumnLabelByValue(String(rule.column)),
"condTxt": this.getConditionLabelByValue(type, rule.condition),
"formattedVal": rule.formattedVal || rule.value
};
}, this);
this._criteriasChanged = true;
if(ruleRelation === "logicall" || ruleRelation === "logicany"){
this._relOpCls = ruleRelation;
}
var exprs = array.map(rules, this.getExprForCriteria, this);
exprs = this.builder.buildExpression(exprs.length == 1 ? exprs[0] : {
"op": this._relOpCls,
"data": exprs
});
this.plugin.grid.layer("filter").filterDef(exprs);
this.plugin.filterBar.toggleClearFilterBtn(false);
}
this._closeDlgAndUpdateGrid();
};
if(this._savedCriterias){
this._clearWithoutRefresh = true;
var handle = connect.connect(this, "clearFilter", this, function(){
connect.disconnect(handle);
this._clearWithoutRefresh = false;
func.apply(this);
});
this.onClearFilter();
}else{
func.apply(this);
}
},
getFilter: function(){
return lang.clone(this._savedCriterias) || [];
},
getColumnLabelByValue: function(v){
var nls = this.plugin.nls;
if(v.toLowerCase() == "anycolumn"){
return nls["anyColumnOption"];
}else{
var cell = this.plugin.grid.layout.cells[parseInt(v, 10)];
return cell ? (cell.name || cell.field) : "";
}
},
getConditionLabelByValue: function(type, c){
var conditions = this._dataTypeMap[type].conditions;
for(var i = conditions.length - 1; i >= 0; --i){
var cond = conditions[i];
if(cond.value == c.toLowerCase()){
return cond.label;
}
}
return "";
},
addCriteriaBoxes: function(/* int */cnt){
// summary:
// Add *cnt* criteria boxes to the filter definition pane.
// Check overflow if necessary.
if(typeof cnt != "number" || cnt <= 0){
return;
}
var cbs = this._cboxes,
cc = this.filterDefPane.cboxContainer,
total = this.plugin.args.ruleCount,
len = cbs.length, cbox;
//If overflow, add to max rule count.
if(total > 0 && len + cnt > total){
cnt = total - len;
}
for(; cnt > 0; --cnt){
cbox = new CriteriaBox({
dlg: this
});
cbs.push(cbox);
cc.addChild(cbox);
}
//If there's no content box in it , AccordionContainer can not startup
cc.startup();
this._updatePane();
this._updateCBoxTitles();
cc.selectChild(cbs[cbs.length-1]);
//Asign an impossibly large scrollTop to scroll the criteria pane to the bottom.
this.filterDefPane.criteriaPane.scrollTop = 1000000;
if(cbs.length === 4){
if(has('ie') <= 6 && !this.__alreadyResizedForIE6){
var size = html.position(cc.domNode);
size.w -= metrics.getScrollbar().w;
cc.resize(size);
this.__alreadyResizedForIE6 = true;
}else{
cc.resize();
}
}
},
removeCriteriaBoxes: function(/* int|CriteriaBox|int[] */cnt,/* bool? */isIdx){
// summary:
// Remove criteria boxes from the filter definition pane.
var cbs = this._cboxes, cc = this.filterDefPane.cboxContainer,
len = cbs.length, start = len - cnt,
end = len - 1, cbox,
curIdx = array.indexOf(cbs, cc.selectedChildWidget.content);
if(lang.isArray(cnt)){
var i, idxes = cnt;
idxes.sort();
cnt = idxes.length;
//find a rule that's not deleted.
//must find and focus the last one, or the hack will not work.
for(i = len - 1; i >= 0 && array.indexOf(idxes, i) >= 0; --i){}
if(i >= 0){
//must select before remove
if(i != curIdx){
cc.selectChild(cbs[i]);
}
//idxes is sorted from small to large,
//so travel reversely won't need change index after delete from array.
for(i = cnt-1; i >= 0; --i){
if(idxes[i] >= 0 && idxes[i] < len){
cc.removeChild(cbs[idxes[i]]);
cbs.splice(idxes[i],1);
}
}
}
start = cbs.length;
}else{
if(isIdx === true){
if(cnt >= 0 && cnt < len){
start = end = cnt;
cnt = 1;
}else{
return;
}
}else{
if(cnt instanceof CriteriaBox){
cbox = cnt;
cnt = 1;
start = end = array.indexOf(cbs, cbox);
}else if(typeof cnt != "number" || cnt <= 0){
return;
}else if(cnt >= len){
cnt = end;
start = 1;
}
}
if(end < start){
return;
}
//must select before remove
if(curIdx >= start && curIdx <= end){
cc.selectChild(cbs[start ? start-1 : end+1]);
}
for(; end >= start; --end){
cc.removeChild(cbs[end]);
}
cbs.splice(start, cnt);
}
this._updatePane();
this._updateCBoxTitles();
if(cbs.length === 3){
//In ie6, resize back to the normal width will cause the title button look strange.
cc.resize();
}
},
getCriteria: function(/* int */idx){
// summary:
// Get the *idx*-th criteria.
if(typeof idx != "number"){
return this._savedCriterias ? this._savedCriterias.length : 0;
}
if(this._savedCriterias && this._savedCriterias[idx]){
return lang.mixin({
relation: this._relOpCls == "logicall" ? this.plugin.nls.and : this.plugin.nls.or
},this._savedCriterias[idx]);
}
return null;
},
getExprForCriteria: function(rule){
if(rule.column == "anycolumn"){
var cells = array.filter(this.plugin.grid.layout.cells, function(cell){
return !(cell.filterable === false || cell.hidden);
});
return {
"op": "logicany",
"data": array.map(cells, function(cell){
return this.getExprForColumn(rule.value, cell.index, rule.type, rule.condition);
}, this)
};
}else{
return this.getExprForColumn(rule.value, rule.column, rule.type, rule.condition);
}
},
getExprForColumn: function(value, colIdx, type, condition){
colIdx = parseInt(colIdx, 10);
var cell = this.plugin.grid.layout.cells[colIdx],
colName = cell.field || cell.name,
obj = {
"datatype": type || this.getColumnType(colIdx),
"args": cell.dataTypeArgs,
"isColumn": true
},
operands = [lang.mixin({"data": this.plugin.args.isServerSide ? colName : cell}, obj)];
obj.isColumn = false;
if(condition == "range"){
operands.push(lang.mixin({"data": value.start}, obj),
lang.mixin({"data": value.end}, obj));
}else if(condition != "isempty"){
operands.push(lang.mixin({"data": value}, obj));
}
return {
"op": condition,
"data": operands
};
},
getColumnType: function(/* int */colIndex){
var cell = this.plugin.grid.layout.cells[parseInt(colIndex, 10)];
if(!cell || !cell.datatype){
return this.defaultType;
}
var type = String(cell.datatype).toLowerCase();
return this._dataTypeMap[type] ? type : this.defaultType;
},
//////////////////////////////////////////////////////////////////////////////////////////////////////////
clearFilter: function(noRefresh){
// summary:
// Clear filter definition.
if(!this._savedCriterias){
return;
}
this._savedCriterias = null;
this.plugin.grid.layer("filter").filterDef(null);
try{
this.plugin.filterBar.toggleClearFilterBtn(true);
this.filterDefPane._clearFilterBtn.set("disabled", true);
this.removeCriteriaBoxes(this._cboxes.length-1);
this._cboxes[0].load({});
}catch(e){
//Any error means the filter is defined outside this plugin.
}
if(noRefresh){
this.closeDialog();
}else{
this._closeDlgAndUpdateGrid();
}
},
showDialog: function(/* int */colIndex){
// summary:
// Show the filter defintion dialog.
this._defPane.show();
this.plugin.filterStatusTip.closeDialog();
this._prepareDialog(colIndex);
},
closeDialog: function(){
// summary:
// Close the filter definition dialog.
if(this._defPane.open){
this._defPane.hide();
}
},
onFilter: function(e){
// summary:
// Triggered when the "Filter" button is clicked.
if(this.canFilter()){
this._defineFilter();
this._closeDlgAndUpdateGrid();
this.plugin.filterBar.toggleClearFilterBtn(false);
}
},
onClearFilter: function(e){
// summary:
// Triggered when the "Clear" button is clicked.
if(this._savedCriterias){
if(this._savedCriterias.length >= this.plugin.ruleCountToConfirmClearFilter){
this.plugin.clearFilterDialog.show();
}else{
this.clearFilter(this._clearWithoutRefresh);
}
}
},
onCancel: function(e){
// summary:
// Triggered when the "Cancel" buttton is clicked.
var sc = this._savedCriterias;
var cbs = this._cboxes;
if(sc){
this.addCriteriaBoxes(sc.length - cbs.length);
this.removeCriteriaBoxes(cbs.length - sc.length);
array.forEach(sc, function(c, i){
cbs[i].load(c);
});
}else{
this.removeCriteriaBoxes(cbs.length - 1);
cbs[0].load({});
}
this.closeDialog();
},
onRendered: function(cbox){
// summary:
// Triggered when the rendering of the filter definition dialog is completely finished.
// cbox:
// Current visible criteria box
if(!has('ff')){
var elems = dijitA11y._getTabNavigable(html.byId(cbox.domNode));
dijitFocus.focus(elems.lowest || elems.first);
}else{
var dp = this._defPane;
dp._getFocusItems(dp.domNode);
dijitFocus.focus(dp._firstFocusItem);
}
},
_onSetFilter: function(filterDef){
// summary:
// If someone clear the filter def in the store directly, we must clear it in the UI.
// If someone defines a filter, don't know how to handle it!
if(filterDef === null && this._savedCriterias){
this.clearFilter();
}
},
_prepareDialog: function(/* int */colIndex){
var sc = this._savedCriterias,
cbs = this._cboxes, i, cbox;
this.curColIdx = colIndex;
if(!sc){
if(cbs.length === 0){
this.addCriteriaBoxes(1);
}else{
//Re-populate columns anyway, because we don't know when the column is set to hidden.
for(i = 0; (cbox = cbs[i]); ++i){
cbox.changeCurrentColumn();
}
}
}else if(this._criteriasChanged){
this.filterDefPane._relSelect.set("value", this._relOpCls === "logicall" ? "0" : "1");
this._criteriasChanged = false;
var needNewCBox = sc.length > cbs.length ? sc.length - cbs.length : 0;
this.addCriteriaBoxes(needNewCBox);
this.removeCriteriaBoxes(cbs.length - sc.length);
this.filterDefPane._clearFilterBtn.set("disabled", false);
for(i = 0; i < cbs.length - needNewCBox; ++i){
cbs[i].load(sc[i]);
}
if(needNewCBox > 0){
var handled = [], handle = connect.connect(this, "onRendered", function(cbox){
var i = array.indexOf(cbs, cbox);
if(!handled[i]){
handled[i] = true;
if(--needNewCBox === 0){
connect.disconnect(handle);
}
cbox.load(sc[i]);
}
});
}
}
//Since we're allowed to remove cboxes when the definition pane is not shown,
//we have to resize the container to have a correct _verticalSpace.
this.filterDefPane.cboxContainer.resize();
},
_defineFilter: function(){
var cbs = this._cboxes,
filterCboxes = function(method){
return array.filter(array.map(cbs, function(cbox){
return cbox[method]();
}), function(result){
return !!result;
});
},
exprs = filterCboxes("getExpr");
this._savedCriterias = filterCboxes("save");
exprs = exprs.length == 1 ? exprs[0] : {
"op": this._relOpCls,
"data": exprs
};
exprs = this.builder.buildExpression(exprs);
this.plugin.grid.layer("filter").filterDef(exprs);
this.filterDefPane._clearFilterBtn.set("disabled", false);
},
_updateCBoxTitles: function(){
for(var cbs = this._cboxes, i = cbs.length; i > 0; --i){
cbs[i - 1].updateRuleIndex(i);
cbs[i - 1].setAriaInfo(i);
}
},
_updatePane: function(){
var cbs = this._cboxes,
defPane = this.filterDefPane;
defPane._addCBoxBtn.set("disabled", cbs.length == this.plugin.args.ruleCount);
defPane._filterBtn.set("disabled", !this.canFilter());
},
canFilter: function(){
return array.filter(this._cboxes, function(cbox){
return !cbox.isEmpty();
}).length > 0;
},
_closeDlgAndUpdateGrid: function(){
this.closeDialog();
var g = this.plugin.grid;
g.showMessage(g.loadingMessage);
setTimeout(lang.hitch(g, g._refresh), this._defPane.duration + 10);
}
});
return FilterDefDialog;
});
|
PypiClean
|
/compose.db2-0.2.0.2.tar.gz/compose.db2-0.2.0.2/compose/db2/utils/hadr.py
|
import os
import socket
import re
from compose.db2.utils.sqlUtils import Db2SQL
from compose.db2 import configuration, formation
class Db2HADR():
def __init__(self, conf=None):
if conf is None:
conf = configuration.Configuration()
self.conf = conf
def build_hadr_standby_list(self):
peers = self.conf['extended_peers']
if len(peers) == 1:
return "STANDARD"
else:
designated_primary_name = self.build_primary_host()
standby_list = "'"
for peer in peers:
if peer.split(".")[0] != designated_primary_name.split(".")[0]:
standby_list += '"' + peer + '",'
standby_list = standby_list[: -1]
standby_list += "'"
return standby_list
def build_primary_host(self):
peers = self.conf['extended_peers']
id = os.environ.get("ID")
if len(peers) == 1:
for peer in peers:
if peer.split(".")[0] in ("c-" + id + "-m-0"):
designated_pri = peer
else:
sqls = Db2SQL(self.conf)
try:
# get the result from a non DR peer, this will reduce the number of times we call sqls.run_desired_select_sql
for peer in peers:
query = sqls.ha_primary
if not re.search('^.*private.db2.databases.appdomain.cloud$', peer):
result = sqls.run_desired_select_sql(peer, query)
break
for peer in peers:
# cannot connect across clusters using local ports
if not re.search('^.*private.db2.databases.appdomain.cloud$', peer):
if result[0].get(0).split("|")[0] == peer:
designated_pri = peer
return designated_pri
else:
designated_pri = None
continue
else:
# unlike "result", the DR peer's name does not include "-zone" (i.e "lon04" for "London")
# therefore instead of checking if the result is equal the peer's name, we check if it contains the first part
if peer.split(".")[0] in result[0].get(0).split("|")[0]:
designated_pri = peer
return designated_pri
else:
designated_pri = None
continue
except Exception:
designated_pri = None
pass
if designated_pri is None:
# no connection to db2 assume dr setup is first time or -0 node is primary
if self.conf['disaster_recovery_host'] != "":
fmtn = formation.Formation(self.conf['crd_group'],
self.conf['account'],
self.conf['id'])
if fmtn.is_disaster_recovery_site():
designated_pri = self.conf['disaster_recovery_host']
if designated_pri is None:
for peer in peers:
if peer.split(".")[0] in ("c-" + id + "-m-0"):
designated_pri = peer
break
return designated_pri
def set_designated_role(self):
hostname = socket.gethostname()
peers = self.conf['extended_peers']
if len(peers) == 1:
designated_role = "STANDARD"
else:
designated_pri = self.build_primary_host().split(".")[0]
for peer in peers:
if hostname == designated_pri:
designated_role = "PRIMARY"
break
else:
designated_role = "STANDBY"
return designated_role
def get_db2_hadr_start_cmd(self):
designated_role = self.set_designated_role()
if designated_role in ("PRIMARY", "STANDBY"):
if designated_role == "PRIMARY":
return "db2 start hadr on db %s as primary by force" % self.conf['db_name']
elif designated_role == "STANDBY":
return "db2 start hadr on db %s as standby" % self.conf['db_name']
else:
return "STANDARD"
|
PypiClean
|
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flashblade/FB_2_10/models/support_remote_assist_paths.py
|
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_10 import models
class SupportRemoteAssistPaths(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'component_name': 'str',
'status': 'str'
}
attribute_map = {
'component_name': 'component_name',
'status': 'status'
}
required_args = {
}
def __init__(
self,
component_name=None, # type: str
status=None, # type: str
):
"""
Keyword args:
component_name (str): The name of the FM.
status (str): The status of the remote-assist session on the local FM. Valid values are `reconnecting`, `connected`, `disconnected`, and `unknown`.
"""
if component_name is not None:
self.component_name = component_name
if status is not None:
self.status = status
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `SupportRemoteAssistPaths`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SupportRemoteAssistPaths, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SupportRemoteAssistPaths):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/RPD-0.6.0.tar.gz/RPD-0.6.0/rpd/apps/bot.py
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE
"""The bot app"""
import asyncio
import importlib
import logging
import os
from threading import Event
from time import time
from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar, Union
from rpd.api.rest_factory import RESTFactory
from rpd.api.gateway import Gateway
from rpd.audio import VoiceClient, has_nacl
from rpd.implements.core import implements
from rpd.interactions.command import Command
from rpd.internal import dispatcher
from rpd.state import ConnectionState
from rpd.ui import print_banner, start_logging
_log = logging.getLogger(__name__)
__all__: List[str] = ["BotApp"]
CFT = TypeVar("CFT", bound="dispatcher.CoroFunc")
class BotApp:
"""Represents a Discord bot.
.. versionadded:: 0.4.0
Attributes
----------
factory
The instance of RESTFactory
state
The client's connection state
dispatcher
The dispatcher
gateway
The Gateway
p
The presence
cogs
A :class:`dict` of all Cogs.
Parameters
----------
token
The bot token
intents
The bot intents, defaults `32509`
status
The bot status, defaults to online
afk
If the bot is afk, default to False
loop
The loop you want to use, defaults to :class:`asyncio.new_event_loop`
module
The module with a `banner.txt` to print
logs
A :class:`int`, :class:`str` or :class:`dict`.
debug
To show debug logs or not.
"""
def __init__(
self,
loop: Optional[asyncio.AbstractEventLoop] = asyncio.new_event_loop(),
intents: Optional[int] = 32509,
module: Optional[str] = "rpd",
shards: Optional[int] = None,
mobile: Optional[bool] = False,
command_prefix: Optional[str] = None,
logs: Optional[Union[None, int, str, Dict[str, Any]]] = None,
debug: Optional[bool] = False,
):
print_banner(module)
start_logging(logs, debug)
self.command_prefix = command_prefix
self.state = ConnectionState(
loop=loop,
intents=intents,
bot=self,
shard_count=shards,
)
self.dispatcher = dispatcher.Dispatcher(state=self.state)
self.factory = RESTFactory(state=self.state)
self.gateway = Gateway(
state=self.state,
dispatcher=self.dispatcher,
factory=self.factory,
mobile=mobile,
)
self.voice = VoiceClient(self.state, self.dispatcher, self.gateway)
self._got_gateway_bot: Event = Event()
self.cogs = {}
if not has_nacl:
_log.warning(
"You don't have PyNaCl, meaning you won't be able to use Voice features."
)
async def login(self, token: str):
"""Starts the bot connection
.. versionadded:: 0.4.0
"""
self.token = token
r = await self.factory.login(token)
self.state._bot_id = r["id"]
return r
async def connect(self, token: str):
"""Starts the WebSocket(Gateway) connection with Discord.
.. versionadded:: 0.4.0
"""
await self.gateway.connect(token=token)
def implements(self, command_name: str, prefixed_command: bool = False):
return implements(
self, self.command_prefix, self.dispatcher, command_name, prefixed_command
)
def run(self, token: str):
"""A blocking function to start your bot"""
async def runner():
await self.login(token=token)
await asyncio.sleep(0.111) # sleep for a bit
await self.connect(token=token)
self.state.loop.create_task(runner())
self.state.loop.run_forever()
@property
async def is_ready(self):
"""Returns if the bot is ready or not."""
return self.state._ready.is_set()
@property
def presence(self) -> list[str]:
return self.state._bot_presences
def change_presence(
self,
name: str,
type: int,
status: Literal["online", "dnd", "idle", "invisible", "offline"] = "online",
stream_url: Optional[str] = None,
afk: bool = False,
):
if type == 1 and stream_url is None:
raise NotImplementedError("Streams need to be provided a url!")
elif type == 1 and stream_url is not None:
ret = {
"name": name,
"type": 1,
"url": stream_url,
}
else:
# another type
ret = {
"name": name,
"type": type,
}
json = {"op": 3, "d": {"activities": [ret]}}
if afk is True:
json["d"]["afk"] = True
json["d"]["since"] = time()
else:
json["d"]["afk"] = False
json["d"]["since"] = None
json["d"]["status"] = status
return self.gateway.send(json)
def event(self, coro: dispatcher.Coro) -> dispatcher.Coro:
return self.dispatcher.listen(coro)
def load_module(self, location, package):
importlib.import_module(location, package)
def load_modules(self, folder):
for file in os.listdir(folder):
self.load_module(file)
def listen(self, name: str = None) -> Callable[[CFT], CFT]:
def decorator(func: CFT) -> CFT:
self.dispatcher.add_listener(func, name)
return func
return decorator
def command(
self,
slash_command: bool = False,
user_command: bool = False,
message_command: bool = False,
):
return Command(
self.state,
self.factory,
slash_command=slash_command,
message_command=message_command,
user_command=user_command,
)
|
PypiClean
|
/django-widgy-0.9.2.tar.gz/django-widgy-0.9.2/widgy/static/widgy/js/lib/ckeditor/plugins/a11yhelp/dialogs/lang/cy.js
|
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("a11yhelp","cy",{title:"Canllawiau Hygyrchedd",contents:"Cynnwys Cymorth. I gau y deialog hwn, pwyswch ESC.",legend:[{name:"Cyffredinol",items:[{name:"Bar Offer y Golygydd",legend:"Pwyswch $ {toolbarFocus} i fynd at y bar offer. Symudwch i'r grŵp bar offer nesaf a blaenorol gyda TAB a SHIFT-TAB. Symudwch i'r botwm bar offer nesaf a blaenorol gyda SAETH DDE neu SAETH CHWITH. Pwyswch SPACE neu ENTER i wneud botwm y bar offer yn weithredol."},{name:"Deialog y Golygydd",legend:"Tu mewn i'r deialog, pwyswch TAB i fynd i'r maes nesaf ar y deialog, pwyswch SHIFT + TAB i symud i faes blaenorol, pwyswch ENTER i gyflwyno'r deialog, pwyswch ESC i ddiddymu'r deialog. Ar gyfer deialogau sydd â thudalennau aml-tab, pwyswch ALT + F10 i lywio'r tab-restr. Yna symudwch i'r tab nesaf gyda TAB neu SAETH DDE. Symudwch i dab blaenorol gyda SHIFT + TAB neu'r SAETH CHWITH. Pwyswch SPACE neu ENTER i ddewis y dudalen tab."},
{name:"Dewislen Cyd-destun y Golygydd",legend:"Pwyswch $ {contextMenu} neu'r ALLWEDD 'APPLICATION' i agor y ddewislen cyd-destun. Yna symudwch i'r opsiwn ddewislen nesaf gyda'r TAB neu'r SAETH I LAWR. Symudwch i'r opsiwn blaenorol gyda SHIFT + TAB neu'r SAETH I FYNY. Pwyswch SPACE neu ENTER i ddewis yr opsiwn ddewislen. Agorwch is-dewislen yr opsiwn cyfredol gyda SPACE neu ENTER neu SAETH DDE. Ewch yn ôl i'r eitem ar y ddewislen uwch gydag ESC neu SAETH CHWITH. Ceuwch y ddewislen cyd-destun gydag ESC."},
{name:"Blwch Rhestr y Golygydd",legend:"Tu mewn y blwch rhestr, ewch i'r eitem rhestr nesaf gyda TAB neu'r SAETH I LAWR. Symudwch i restr eitem flaenorol gyda SHIFT + TAB neu SAETH I FYNY. Pwyswch SPACE neu ENTER i ddewis yr opsiwn o'r rhestr. Pwyswch ESC i gau'r rhestr."},{name:"Bar Llwybr Elfen y Golygydd",legend:"Pwyswch ${elementsPathFocus} i fynd i'r bar llwybr elfennau. Symudwch i fotwm yr elfen nesaf gyda TAB neu SAETH DDE. Symudwch i fotwm blaenorol gyda SHIFT + TAB neu SAETH CHWITH. Pwyswch SPACE neu ENTER i ddewis yr elfen yn y golygydd."}]},
{name:"Gorchmynion",items:[{name:"Gorchymyn dadwneud",legend:"Pwyswch ${undo}"},{name:"Gorchymyn ailadrodd",legend:"Pwyswch ${redo}"},{name:"Gorchymyn Bras",legend:"Pwyswch ${bold}"},{name:"Gorchymyn italig",legend:"Pwyswch ${italig}"},{name:"Gorchymyn tanlinellu",legend:"Pwyso ${underline}"},{name:"Gorchymyn dolen",legend:"Pwyswch ${link}"},{name:"Gorchymyn Cwympo'r Dewislen",legend:"Pwyswch ${toolbarCollapse}"},{name:"Myned i orchymyn bwlch ffocws blaenorol",legend:"Pwyswch ${accessPreviousSpace} i fyned i'r \"blwch ffocws sydd methu ei gyrraedd\" cyn y caret, er enghraifft: dwy elfen HR drws nesaf i'w gilydd. AIladroddwch y cyfuniad allwedd i gyrraedd bylchau ffocws pell."},
{name:"Ewch i'r gorchymyn blwch ffocws nesaf",legend:"Pwyswch ${accessNextSpace} i fyned i'r blwch ffocws agosaf nad oes modd ei gyrraedd ar ôl y caret, er enghraifft: dwy elfen HR drws nesaf i'w gilydd. Ailadroddwch y cyfuniad allwedd i gyrraedd blychau ffocws pell."},{name:"Cymorth Hygyrchedd",legend:"Pwyswch ${a11yHelp}"}]}],backspace:"Backspace",tab:"Tab",enter:"Enter",shift:"Shift",ctrl:"Ctrl",alt:"Alt",pause:"Pause",capslock:"Caps Lock",escape:"Escape",pageUp:"Page Up",pageDown:"Page Down",
end:"End",home:"Home",leftArrow:"Left Arrow",upArrow:"Up Arrow",rightArrow:"Right Arrow",downArrow:"Down Arrow",insert:"Insert","delete":"Delete",leftWindowKey:"Left Windows key",rightWindowKey:"Right Windows key",selectKey:"Select key",numpad0:"Numpad 0",numpad1:"Numpad 1",numpad2:"Numpad 2",numpad3:"Numpad 3",numpad4:"Numpad 4",numpad5:"Numpad 5",numpad6:"Numpad 6",numpad7:"Numpad 7",numpad8:"Numpad 8",numpad9:"Numpad 9",multiply:"Multiply",add:"Add",subtract:"Subtract",decimalPoint:"Decimal Point",
divide:"Divide",f1:"F1",f2:"F2",f3:"F3",f4:"F4",f5:"F5",f6:"F6",f7:"F7",f8:"F8",f9:"F9",f10:"F10",f11:"F11",f12:"F12",numLock:"Num Lock",scrollLock:"Scroll Lock",semiColon:"Semicolon",equalSign:"Equal Sign",comma:"Comma",dash:"Dash",period:"Period",forwardSlash:"Forward Slash",graveAccent:"Grave Accent",openBracket:"Open Bracket",backSlash:"Backslash",closeBracket:"Close Bracket",singleQuote:"Single Quote"});
|
PypiClean
|
/Stetl-2.1.tar.gz/Stetl-2.1/examples/basics/10_jinja2_templating/readme.txt
|
This example, or better a series of examples, shows an unconventional but exciting new
way for transforming source data into complex/rich XML/GML. Possibly "transforming"
is not the proper term but "substitution".
Enter the Jinja...Within many web-frameworks, the use of "templating" is common.
think of JSP (Java), ASP (.NET), PHP even in JavaScript (e.g. Mustache).
Within Python there are a zillion templating languages to choose from. From plain
built-in $string substitution up to fullfledged programming. Django Templating, Gensha, Mako.
See https://wiki.python.org/moin/Templating
Within Stetl we choose to support two languages for now, both derived from a single
TemplatingFilter class: String templating (see example 9) and Jinja2 templating.
This example shows several Jinja2 templating examples from basic to very advanced.
All are bundled within the single etl.cfg file as Stetl allows multiple ETL chains
defined in a single config file.
For all examples, the dirs are as follows:
- input/: all input files
- output/: all generated output files
- templates/: the Jinja2 templates and macros
EXAMPLE 1 - simple Jinja2 templating: JSON to XML
input/cities.json is converted to output/cities.xml using the template templates/cities-json2xml.jinja2
EXAMPLE 2 - more advanced Jinja2 templating : JSON to GML (
input/cities.json is converted to output/cities.gml (GML FeatureCollection) using the
template templates/cities-json2gml.jinja2
Shows the use of Jinja2 Globals (input/globals.json), global/static values that are
always available in the template and the use of Jinja2 Macros (template/macros.jinja2) for recurring
tasks.
EXAMPLE 3 - advanced Jinja2 templating - GeoJSON to GML - and reading from URL
input/cities-gjson.json is converted to output/cities-gjson.gml (GML FeatureCollection) using the
template templates/cities-gjson2gml.jinja2. Shows the use of advanced Jinja2 Filters within Stetl that
can transform GeoJSON Geometries to GML Geometries. Also shows that input and/or globals can be fetched from
a remote URL.
EXAMPLE 4 - data harmonization for INSPIRE: transform local Dutch address data to INSPIRE Annex I Addresses (AD)
input/addresses.csv is converted to output/inspire-addresses.gml (GML Spatial dataset) using the
template templates/addresses2inspire-ad.jinja2. Shows that data transformation for INSPIRE doesn't need to be
hard. Usually the source data is in tabular form, here a CSV file. The required data format
for INSPIRE is often "bloated" with boilerplate encodings. Here templating makes lots of sense.
|
PypiClean
|
/pulumi_gcp-6.65.0a1693462587.tar.gz/pulumi_gcp-6.65.0a1693462587/pulumi_gcp/gameservices/game_server_deployment_rollout.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['GameServerDeploymentRolloutArgs', 'GameServerDeploymentRollout']
@pulumi.input_type
class GameServerDeploymentRolloutArgs:
def __init__(__self__, *,
default_game_server_config: pulumi.Input[str],
deployment_id: pulumi.Input[str],
game_server_config_overrides: Optional[pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]] = None,
project: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a GameServerDeploymentRollout resource.
:param pulumi.Input[str] default_game_server_config: This field points to the game server config that is
applied by default to all realms and clusters. For example,
`projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
- - -
:param pulumi.Input[str] deployment_id: The deployment to rollout the new config to. Only 1 rollout must be associated with each deployment.
:param pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]] game_server_config_overrides: The game_server_config_overrides contains the per game server config
overrides. The overrides are processed in the order they are listed. As
soon as a match is found for a cluster, the rest of the list is not
processed.
Structure is documented below.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
pulumi.set(__self__, "default_game_server_config", default_game_server_config)
pulumi.set(__self__, "deployment_id", deployment_id)
if game_server_config_overrides is not None:
pulumi.set(__self__, "game_server_config_overrides", game_server_config_overrides)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter(name="defaultGameServerConfig")
def default_game_server_config(self) -> pulumi.Input[str]:
"""
This field points to the game server config that is
applied by default to all realms and clusters. For example,
`projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
- - -
"""
return pulumi.get(self, "default_game_server_config")
@default_game_server_config.setter
def default_game_server_config(self, value: pulumi.Input[str]):
pulumi.set(self, "default_game_server_config", value)
@property
@pulumi.getter(name="deploymentId")
def deployment_id(self) -> pulumi.Input[str]:
"""
The deployment to rollout the new config to. Only 1 rollout must be associated with each deployment.
"""
return pulumi.get(self, "deployment_id")
@deployment_id.setter
def deployment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "deployment_id", value)
@property
@pulumi.getter(name="gameServerConfigOverrides")
def game_server_config_overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]]:
"""
The game_server_config_overrides contains the per game server config
overrides. The overrides are processed in the order they are listed. As
soon as a match is found for a cluster, the rest of the list is not
processed.
Structure is documented below.
"""
return pulumi.get(self, "game_server_config_overrides")
@game_server_config_overrides.setter
def game_server_config_overrides(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]]):
pulumi.set(self, "game_server_config_overrides", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@pulumi.input_type
class _GameServerDeploymentRolloutState:
def __init__(__self__, *,
default_game_server_config: Optional[pulumi.Input[str]] = None,
deployment_id: Optional[pulumi.Input[str]] = None,
game_server_config_overrides: Optional[pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering GameServerDeploymentRollout resources.
:param pulumi.Input[str] default_game_server_config: This field points to the game server config that is
applied by default to all realms and clusters. For example,
`projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
- - -
:param pulumi.Input[str] deployment_id: The deployment to rollout the new config to. Only 1 rollout must be associated with each deployment.
:param pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]] game_server_config_overrides: The game_server_config_overrides contains the per game server config
overrides. The overrides are processed in the order they are listed. As
soon as a match is found for a cluster, the rest of the list is not
processed.
Structure is documented below.
:param pulumi.Input[str] name: The resource id of the game server deployment
eg: `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
if default_game_server_config is not None:
pulumi.set(__self__, "default_game_server_config", default_game_server_config)
if deployment_id is not None:
pulumi.set(__self__, "deployment_id", deployment_id)
if game_server_config_overrides is not None:
pulumi.set(__self__, "game_server_config_overrides", game_server_config_overrides)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter(name="defaultGameServerConfig")
def default_game_server_config(self) -> Optional[pulumi.Input[str]]:
"""
This field points to the game server config that is
applied by default to all realms and clusters. For example,
`projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
- - -
"""
return pulumi.get(self, "default_game_server_config")
@default_game_server_config.setter
def default_game_server_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_game_server_config", value)
@property
@pulumi.getter(name="deploymentId")
def deployment_id(self) -> Optional[pulumi.Input[str]]:
"""
The deployment to rollout the new config to. Only 1 rollout must be associated with each deployment.
"""
return pulumi.get(self, "deployment_id")
@deployment_id.setter
def deployment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_id", value)
@property
@pulumi.getter(name="gameServerConfigOverrides")
def game_server_config_overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]]:
"""
The game_server_config_overrides contains the per game server config
overrides. The overrides are processed in the order they are listed. As
soon as a match is found for a cluster, the rest of the list is not
processed.
Structure is documented below.
"""
return pulumi.get(self, "game_server_config_overrides")
@game_server_config_overrides.setter
def game_server_config_overrides(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]]):
pulumi.set(self, "game_server_config_overrides", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The resource id of the game server deployment
eg: `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
class GameServerDeploymentRollout(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
default_game_server_config: Optional[pulumi.Input[str]] = None,
deployment_id: Optional[pulumi.Input[str]] = None,
game_server_config_overrides: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This represents the rollout state. This is part of the game server
deployment.
To get more information about GameServerDeploymentRollout, see:
* [API documentation](https://cloud.google.com/game-servers/docs/reference/rest/v1beta/GameServerDeploymentRollout)
* How-to Guides
* [Official Documentation](https://cloud.google.com/game-servers/docs)
## Example Usage
### Game Service Deployment Rollout Basic
```python
import pulumi
import json
import pulumi_gcp as gcp
default_game_server_deployment = gcp.gameservices.GameServerDeployment("defaultGameServerDeployment",
deployment_id="tf-test-deployment",
description="a deployment description")
default_game_server_config = gcp.gameservices.GameServerConfig("defaultGameServerConfig",
config_id="tf-test-config",
deployment_id=default_game_server_deployment.deployment_id,
description="a config description",
fleet_configs=[gcp.gameservices.GameServerConfigFleetConfigArgs(
name="some-non-guid",
fleet_spec=json.dumps({
"replicas": 1,
"scheduling": "Packed",
"template": {
"metadata": {
"name": "tf-test-game-server-template",
},
"spec": {
"ports": [{
"name": "default",
"portPolicy": "Dynamic",
"containerPort": 7654,
"protocol": "UDP",
}],
"template": {
"spec": {
"containers": [{
"name": "simple-udp-server",
"image": "gcr.io/agones-images/udp-server:0.14",
}],
},
},
},
},
}),
)])
default_game_server_deployment_rollout = gcp.gameservices.GameServerDeploymentRollout("defaultGameServerDeploymentRollout",
deployment_id=default_game_server_deployment.deployment_id,
default_game_server_config=default_game_server_config.name)
```
## Import
GameServerDeploymentRollout can be imported using any of these accepted formats
```sh
$ pulumi import gcp:gameservices/gameServerDeploymentRollout:GameServerDeploymentRollout default projects/{{project}}/locations/global/gameServerDeployments/{{deployment_id}}/rollout
```
```sh
$ pulumi import gcp:gameservices/gameServerDeploymentRollout:GameServerDeploymentRollout default {{project}}/{{deployment_id}}
```
```sh
$ pulumi import gcp:gameservices/gameServerDeploymentRollout:GameServerDeploymentRollout default {{deployment_id}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] default_game_server_config: This field points to the game server config that is
applied by default to all realms and clusters. For example,
`projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
- - -
:param pulumi.Input[str] deployment_id: The deployment to rollout the new config to. Only 1 rollout must be associated with each deployment.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]] game_server_config_overrides: The game_server_config_overrides contains the per game server config
overrides. The overrides are processed in the order they are listed. As
soon as a match is found for a cluster, the rest of the list is not
processed.
Structure is documented below.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GameServerDeploymentRolloutArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This represents the rollout state. This is part of the game server
deployment.
To get more information about GameServerDeploymentRollout, see:
* [API documentation](https://cloud.google.com/game-servers/docs/reference/rest/v1beta/GameServerDeploymentRollout)
* How-to Guides
* [Official Documentation](https://cloud.google.com/game-servers/docs)
## Example Usage
### Game Service Deployment Rollout Basic
```python
import pulumi
import json
import pulumi_gcp as gcp
default_game_server_deployment = gcp.gameservices.GameServerDeployment("defaultGameServerDeployment",
deployment_id="tf-test-deployment",
description="a deployment description")
default_game_server_config = gcp.gameservices.GameServerConfig("defaultGameServerConfig",
config_id="tf-test-config",
deployment_id=default_game_server_deployment.deployment_id,
description="a config description",
fleet_configs=[gcp.gameservices.GameServerConfigFleetConfigArgs(
name="some-non-guid",
fleet_spec=json.dumps({
"replicas": 1,
"scheduling": "Packed",
"template": {
"metadata": {
"name": "tf-test-game-server-template",
},
"spec": {
"ports": [{
"name": "default",
"portPolicy": "Dynamic",
"containerPort": 7654,
"protocol": "UDP",
}],
"template": {
"spec": {
"containers": [{
"name": "simple-udp-server",
"image": "gcr.io/agones-images/udp-server:0.14",
}],
},
},
},
},
}),
)])
default_game_server_deployment_rollout = gcp.gameservices.GameServerDeploymentRollout("defaultGameServerDeploymentRollout",
deployment_id=default_game_server_deployment.deployment_id,
default_game_server_config=default_game_server_config.name)
```
## Import
GameServerDeploymentRollout can be imported using any of these accepted formats
```sh
$ pulumi import gcp:gameservices/gameServerDeploymentRollout:GameServerDeploymentRollout default projects/{{project}}/locations/global/gameServerDeployments/{{deployment_id}}/rollout
```
```sh
$ pulumi import gcp:gameservices/gameServerDeploymentRollout:GameServerDeploymentRollout default {{project}}/{{deployment_id}}
```
```sh
$ pulumi import gcp:gameservices/gameServerDeploymentRollout:GameServerDeploymentRollout default {{deployment_id}}
```
:param str resource_name: The name of the resource.
:param GameServerDeploymentRolloutArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GameServerDeploymentRolloutArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
default_game_server_config: Optional[pulumi.Input[str]] = None,
deployment_id: Optional[pulumi.Input[str]] = None,
game_server_config_overrides: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GameServerDeploymentRolloutArgs.__new__(GameServerDeploymentRolloutArgs)
if default_game_server_config is None and not opts.urn:
raise TypeError("Missing required property 'default_game_server_config'")
__props__.__dict__["default_game_server_config"] = default_game_server_config
if deployment_id is None and not opts.urn:
raise TypeError("Missing required property 'deployment_id'")
__props__.__dict__["deployment_id"] = deployment_id
__props__.__dict__["game_server_config_overrides"] = game_server_config_overrides
__props__.__dict__["project"] = project
__props__.__dict__["name"] = None
super(GameServerDeploymentRollout, __self__).__init__(
'gcp:gameservices/gameServerDeploymentRollout:GameServerDeploymentRollout',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
default_game_server_config: Optional[pulumi.Input[str]] = None,
deployment_id: Optional[pulumi.Input[str]] = None,
game_server_config_overrides: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None) -> 'GameServerDeploymentRollout':
"""
Get an existing GameServerDeploymentRollout resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] default_game_server_config: This field points to the game server config that is
applied by default to all realms and clusters. For example,
`projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
- - -
:param pulumi.Input[str] deployment_id: The deployment to rollout the new config to. Only 1 rollout must be associated with each deployment.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GameServerDeploymentRolloutGameServerConfigOverrideArgs']]]] game_server_config_overrides: The game_server_config_overrides contains the per game server config
overrides. The overrides are processed in the order they are listed. As
soon as a match is found for a cluster, the rest of the list is not
processed.
Structure is documented below.
:param pulumi.Input[str] name: The resource id of the game server deployment
eg: `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GameServerDeploymentRolloutState.__new__(_GameServerDeploymentRolloutState)
__props__.__dict__["default_game_server_config"] = default_game_server_config
__props__.__dict__["deployment_id"] = deployment_id
__props__.__dict__["game_server_config_overrides"] = game_server_config_overrides
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
return GameServerDeploymentRollout(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="defaultGameServerConfig")
def default_game_server_config(self) -> pulumi.Output[str]:
"""
This field points to the game server config that is
applied by default to all realms and clusters. For example,
`projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
- - -
"""
return pulumi.get(self, "default_game_server_config")
@property
@pulumi.getter(name="deploymentId")
def deployment_id(self) -> pulumi.Output[str]:
"""
The deployment to rollout the new config to. Only 1 rollout must be associated with each deployment.
"""
return pulumi.get(self, "deployment_id")
@property
@pulumi.getter(name="gameServerConfigOverrides")
def game_server_config_overrides(self) -> pulumi.Output[Optional[Sequence['outputs.GameServerDeploymentRolloutGameServerConfigOverride']]]:
"""
The game_server_config_overrides contains the per game server config
overrides. The overrides are processed in the order they are listed. As
soon as a match is found for a cluster, the rest of the list is not
processed.
Structure is documented below.
"""
return pulumi.get(self, "game_server_config_overrides")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource id of the game server deployment
eg: `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
|
PypiClean
|
/apache_superset_iteco-2.1.1.4-py3-none-any.whl/superset/static/assets/8989.5a7ea49dc49b9f447815.entry.js
|
"use strict";(globalThis.webpackChunksuperset=globalThis.webpackChunksuperset||[]).push([[8989],{48989:(e,t,o)=>{o.r(t),o.d(t,{COLUMN_NAME_ALIASES:()=>j.DS,COMPARATOR:()=>L.O9,ColumnOption:()=>me.E,ColumnTypeLabel:()=>Re.j,D3_FORMAT_DOCS:()=>Y.YD,D3_FORMAT_OPTIONS:()=>Y.F6,D3_NUMBER_FORMAT_DESCRIPTION_PERCENTAGE_TEXT:()=>Y.u6,D3_NUMBER_FORMAT_DESCRIPTION_VALUES_TEXT:()=>Y.pj,D3_TIME_FORMAT_DOCS:()=>Y.oq,D3_TIME_FORMAT_OPTIONS:()=>Y.HC,DATASET_TIME_COLUMN_OPTION:()=>j.LU,DEFAULT_NUMBER_FORMAT:()=>Y.Ul,DEFAULT_TIME_FORMAT:()=>Y._r,InfoTooltipWithTrigger:()=>de.V,MULTIPLE_VALUE_COMPARATORS:()=>L.vD,MetricOption:()=>Ee.m,QUERY_TIME_COLUMN_OPTION:()=>j.CV,QueryModeLabel:()=>j.cm,TIME_COMPARISON_SEPARATOR:()=>ue.K,TIME_FILTER_LABELS:()=>j.m_,TestDataset:()=>ye,__hack__reexport__:()=>Te.M,boxplotOperator:()=>re.F,columnChoices:()=>q.Z,contributionModeControl:()=>U,contributionOperator:()=>ae.I,datePickerInAdhocFilterMixin:()=>fe.sy,defineSavedMetrics:()=>Q.R,expandControlConfig:()=>G.q,expandControlType:()=>G.$,extractExtraMetrics:()=>ce.T,flattenOperator:()=>ie.k,formatSelectOptions:()=>R.m,formatSelectOptionsForRange:()=>R.B,getColorFormatters:()=>H.ni,getColorFunction:()=>H.E2,getMetricOffsetsMap:()=>le.O,getOpacity:()=>H.Km,getStandardizedControls:()=>X.u,getTemporalColumns:()=>w.X,isColumnMeta:()=>L.xN,isControlPanelSectionConfig:()=>L.D_,isDataset:()=>L.It,isDerivedSeries:()=>_e.m,isQueryResponse:()=>L.JQ,isSavedExpression:()=>L.zy,isStandardizedFormData:()=>L.pp,isTemporalColumn:()=>w.x,isTimeComparison:()=>se.H,mainMetric:()=>W,pivotOperator:()=>ee.H,prophetOperator:()=>ne.D,renameOperator:()=>oe.N,resampleOperator:()=>te.X,rollingWindowOperator:()=>K.V,round:()=>H.NM,sections:()=>Ne,sharedControlComponents:()=>Te.Z,sharedControls:()=>pe.ZP,sortOperator:()=>$.C,temporalColumnMixin:()=>fe.V7,timeCompareOperator:()=>z.N,timeComparePivotOperator:()=>J.O,withDndFallback:()=>Oe.pO,xAxisMixin:()=>fe.P2,xAxisSortAscControl:()=>F,xAxisSortControl:()=>V});var a={};o.r(a),o.d(a,{FORECAST_DEFAULT_DATA:()=>N,TITLE_MARGIN_OPTIONS:()=>C,TITLE_POSITION_OPTIONS:()=>A,advancedAnalyticsControls:()=>p,annotationLayers:()=>O,annotations:()=>d,annotationsAndLayersControls:()=>T,colorScheme:()=>u,datasourceAndVizType:()=>c,echartsTimeSeriesQuery:()=>Z,echartsTimeSeriesQueryWithXAxisSort:()=>B,forecastIntervalControls:()=>S,genericTime:()=>s,legacyRegularTime:()=>_,legacyTimeseriesTime:()=>l,titleControls:()=>g});var n=o(55867),r=o(11146);const i={label:(0,n.t)("Time"),expanded:!0,description:(0,n.t)("Time related form attributes")},l={...i,controlSetRows:[["granularity"],["granularity_sqla"],["time_grain_sqla"],["time_range"]]},s=r.O6?{controlSetRows:[]}:{...i,controlSetRows:[["granularity_sqla"],["time_grain_sqla"],["time_range"]]},_=r.O6?{controlSetRows:[]}:{...i,controlSetRows:[["granularity_sqla"],["time_range"]]},c={label:(0,n.t)("Datasource & Chart Type"),expanded:!0,controlSetRows:[["datasource"],["viz_type"],[{name:"slice_id",config:{type:"HiddenControl",label:(0,n.t)("Chart ID"),hidden:!0,description:(0,n.t)("The id of the active chart")}},{name:"cache_timeout",config:{type:"HiddenControl",label:(0,n.t)("Cache Timeout (seconds)"),hidden:!0,description:(0,n.t)("The number of seconds before expiring the cache")}},{name:"url_params",config:{type:"HiddenControl",label:(0,n.t)("URL Parameters"),hidden:!0,description:(0,n.t)("Extra url parameters for use in Jinja templated queries")}},{name:"custom_params",config:{type:"HiddenControl",label:(0,n.t)("Extra Parameters"),hidden:!0,description:(0,n.t)("Extra parameters that any plugins can choose to set for use in Jinja templated queries")}}]]},u={label:(0,n.t)("Color Scheme"),controlSetRows:[["color_scheme"]]},d={label:(0,n.t)("Annotations and Layers"),tabOverride:"data",expanded:!0,controlSetRows:[[{name:"annotation_layers",config:{type:"AnnotationLayerControl",label:"",default:[],description:(0,n.t)("Annotation Layers"),renderTrigger:!0}}]]};o(67294);var m=o(62363),R=o(6126),E=o(11965);const p={label:(0,n.t)("Advanced analytics"),tabOverride:"data",description:(0,n.t)("This section contains options that allow for advanced analytical post processing of query results"),controlSetRows:[[(0,E.tZ)("div",{className:"section-header"},(0,n.t)("Rolling window"))],[{name:"rolling_type",config:{type:"SelectControl",label:(0,n.t)("Rolling function"),default:null,choices:[[null,(0,n.t)("None")]].concat((0,R.m)(Object.values(m.su))),description:(0,n.t)("Defines a rolling window function to apply, works along with the [Periods] text box")}}],[{name:"rolling_periods",config:{type:"TextControl",label:(0,n.t)("Periods"),isInt:!0,description:(0,n.t)("Defines the size of the rolling window function, relative to the time granularity selected"),visibility:(e,t)=>{var o,a;let{controls:n}=e,{name:r}=t;const i=r.endsWith("_b")?"rolling_type_b":"rolling_type";return Boolean(null==(o=n[i])?void 0:o.value)&&(null==(a=n[i])?void 0:a.value)!==m.su.Cumsum}}}],[{name:"min_periods",config:{type:"TextControl",label:(0,n.t)("Min periods"),isInt:!0,description:(0,n.t)('The minimum number of rolling periods required to show a value. For instance if you do a cumulative sum on 7 days you may want your "Min Period" to be 7, so that all data points shown are the total of 7 periods. This will hide the "ramp up" taking place over the first 7 periods'),visibility:(e,t)=>{var o,a;let{controls:n}=e,{name:r}=t;const i=r.endsWith("_b")?"rolling_type_b":"rolling_type";return Boolean(null==(o=n[i])?void 0:o.value)&&(null==(a=n[i])?void 0:a.value)!==m.su.Cumsum}}}],[(0,E.tZ)("div",{className:"section-header"},(0,n.t)("Time comparison"))],[{name:"time_compare",config:{type:"SelectControl",multi:!0,freeForm:!0,label:(0,n.t)("Time shift"),choices:[["1 day ago",(0,n.t)("1 day ago")],["1 week ago",(0,n.t)("1 week ago")],["28 days ago",(0,n.t)("28 days ago")],["30 days ago",(0,n.t)("30 days ago")],["52 weeks ago",(0,n.t)("52 weeks ago")],["1 year ago",(0,n.t)("1 year ago")],["104 weeks ago",(0,n.t)("104 weeks ago")],["2 years ago",(0,n.t)("2 years ago")],["156 weeks ago",(0,n.t)("156 weeks ago")],["3 years ago",(0,n.t)("3 years ago")]],description:(0,n.t)("Overlay one or more timeseries from a relative time period. Expects relative time deltas in natural language (example: 24 hours, 7 days, 52 weeks, 365 days). Free text is supported.")}}],[{name:"comparison_type",config:{type:"SelectControl",label:(0,n.t)("Calculation type"),default:"values",choices:[[m.wj.Values,(0,n.t)("Actual values")],[m.wj.Difference,(0,n.t)("Difference")],[m.wj.Percentage,(0,n.t)("Percentage change")],[m.wj.Ratio,(0,n.t)("Ratio")]],description:(0,n.t)("How to display time shifts: as individual lines; as the difference between the main time series and each time shift; as the percentage change; or as the ratio between series and time shifts.")}}],[(0,E.tZ)("div",{className:"section-header"},(0,n.t)("Resample"))],[{name:"resample_rule",config:{type:"SelectControl",freeForm:!0,label:(0,n.t)("Rule"),default:null,choices:[["1T",(0,n.t)("1 minutely frequency")],["1H",(0,n.t)("1 hourly frequency")],["1D",(0,n.t)("1 calendar day frequency")],["7D",(0,n.t)("7 calendar day frequency")],["1MS",(0,n.t)("1 month start frequency")],["1M",(0,n.t)("1 month end frequency")],["1AS",(0,n.t)("1 year start frequency")],["1A",(0,n.t)("1 year end frequency")]],description:(0,n.t)("Pandas resample rule")}}],[{name:"resample_method",config:{type:"SelectControl",label:(0,n.t)("Fill method"),default:null,choices:[["asfreq",(0,n.t)("Null imputation")],["zerofill",(0,n.t)("Zero imputation")],["linear",(0,n.t)("Linear interpolation")],["ffill",(0,n.t)("Forward values")],["bfill",(0,n.t)("Backward values")],["median",(0,n.t)("Median values")],["mean",(0,n.t)("Mean values")],["sum",(0,n.t)("Sum values")]],description:(0,n.t)("Pandas resample method")}}]]},O=[],T={label:(0,n.t)("Annotations and Layers"),expanded:!1,tabOverride:"data",controlSetRows:[[{name:"annotation_layers",config:{type:"AnnotationLayerControl",label:"",default:O,description:(0,n.t)("Annotation Layers"),renderTrigger:!1}}]]};var f=o(75294),y=o(99298);const N={forecastEnabled:!1,forecastInterval:.8,forecastPeriods:10,forecastSeasonalityDaily:null,forecastSeasonalityWeekly:null,forecastSeasonalityYearly:null},S={label:(0,n.t)("Predictive Analytics"),expanded:!1,controlSetRows:[[{name:"forecastEnabled",config:{type:"CheckboxControl",label:(0,n.t)("Enable forecast"),renderTrigger:!1,default:N.forecastEnabled,description:(0,n.t)("Enable forecasting")}}],[{name:"forecastPeriods",config:{type:"TextControl",label:(0,n.t)("Forecast periods"),validators:[f.Z],default:N.forecastPeriods,description:(0,n.t)("How many periods into the future do we want to predict")}}],[{name:"forecastInterval",config:{type:"TextControl",label:(0,n.t)("Confidence interval"),validators:[y.Z],default:N.forecastInterval,description:(0,n.t)("Width of the confidence interval. Should be between 0 and 1")}}],[{name:"forecastSeasonalityYearly",config:{type:"SelectControl",freeForm:!0,label:(0,n.t)("Yearly seasonality"),choices:[[null,(0,n.t)("default")],[!0,(0,n.t)("Yes")],[!1,(0,n.t)("No")]],default:N.forecastSeasonalityYearly,description:(0,n.t)("Should yearly seasonality be applied. An integer value will specify Fourier order of seasonality.")}}],[{name:"forecastSeasonalityWeekly",config:{type:"SelectControl",freeForm:!0,label:(0,n.t)("Weekly seasonality"),choices:[[null,(0,n.t)("default")],[!0,(0,n.t)("Yes")],[!1,(0,n.t)("No")]],default:N.forecastSeasonalityWeekly,description:(0,n.t)("Should weekly seasonality be applied. An integer value will specify Fourier order of seasonality.")}}],[{name:"forecastSeasonalityDaily",config:{type:"SelectControl",freeForm:!0,label:(0,n.t)("Daily seasonality"),choices:[[null,(0,n.t)("default")],[!0,(0,n.t)("Yes")],[!1,(0,n.t)("No")]],default:N.forecastSeasonalityDaily,description:(0,n.t)("Should daily seasonality be applied. An integer value will specify Fourier order of seasonality.")}}]]},C=[15,30,50,75,100,125,150,200],A=[["Left",(0,n.t)("Left")],["Top",(0,n.t)("Top")]],g={label:(0,n.t)("Chart Title"),tabOverride:"customize",expanded:!0,controlSetRows:[[(0,E.tZ)("div",{className:"section-header"},(0,n.t)("X Axis"))],[{name:"x_axis_title",config:{type:"TextControl",label:(0,n.t)("X Axis Title"),renderTrigger:!0,default:"",description:(0,n.t)("Changing this control takes effect instantly")}}],[{name:"x_axis_title_margin",config:{type:"SelectControl",freeForm:!0,clearable:!0,label:(0,n.t)("X AXIS TITLE BOTTOM MARGIN"),renderTrigger:!0,default:C[0],choices:(0,R.m)(C),description:(0,n.t)("Changing this control takes effect instantly")}}],[(0,E.tZ)("div",{className:"section-header"},(0,n.t)("Y Axis"))],[{name:"y_axis_title",config:{type:"TextControl",label:(0,n.t)("Y Axis Title"),renderTrigger:!0,default:"",description:(0,n.t)("Changing this control takes effect instantly")}}],[{name:"y_axis_title_margin",config:{type:"SelectControl",freeForm:!0,clearable:!0,label:(0,n.t)("Y AXIS TITLE MARGIN"),renderTrigger:!0,default:C[0],choices:(0,R.m)(C),description:(0,n.t)("Changing this control takes effect instantly")}}],[{name:"y_axis_title_position",config:{type:"SelectControl",freeForm:!0,clearable:!1,label:(0,n.t)("Y AXIS TITLE POSITION"),renderTrigger:!0,default:A[0][0],choices:A,description:(0,n.t)("Changing this control takes effect instantly")}}]]};var I=o(78580),v=o.n(I),h=o(43716),b=o(37731),D=o(10581),M=o(55786),x=o(56652),L=o(38575),w=o(45211);const U={name:"contributionMode",config:{type:"SelectControl",label:(0,n.t)("Contribution Mode"),default:null,choices:[[null,(0,n.t)("None")],[h.YC.Row,(0,n.t)("Row")],[h.YC.Column,(0,n.t)("Series")]],description:(0,n.t)("Calculate contribution per series or row")}},P=e=>{var t,o,a,n;let{controls:r}=e;return(0,b.Z)(null==r||null==(t=r.x_axis)?void 0:t.value)&&!(0,w.x)((0,D.Z)(null==r||null==(o=r.x_axis)?void 0:o.value),null==r||null==(a=r.datasource)?void 0:a.datasource)&&Array.isArray(null==r||null==(n=r.groupby)?void 0:n.value)&&0===r.groupby.value.length},V={name:"x_axis_sort",config:{type:"XAxisSortControl",label:e=>{var t;return"horizontal"===(null==(t=e.form_data)?void 0:t.orientation)?(0,n.t)("Y-Axis Sort By"):(0,n.t)("X-Axis Sort By")},description:(0,n.t)("Decides which column to sort the base axis by."),shouldMapStateToProps:()=>!0,mapStateToProps:(e,t)=>{var o,a,n,r,i,l;const{controls:s,datasource:_}=e,c=(0,L.It)(_)?_:void 0,u=[null==s||null==(o=s.x_axis)?void 0:o.value].filter(Boolean),d=[...(0,M.Z)(null==s||null==(a=s.metrics)?void 0:a.value),null==s||null==(n=s.timeseries_limit_metric)?void 0:n.value].filter(Boolean),m=[...u.map((e=>{var t;const o=(0,D.Z)(e);return{value:o,label:(null==c||null==(t=c.verbose_map)?void 0:t[o])||o}})),...d.map((e=>{var t;const o=(0,x.Z)(e);return{value:o,label:(null==c||null==(t=c.verbose_map)?void 0:t[o])||o}}))];return{shouldReset:!("string"==typeof t.value&&v()(r=m.map((e=>e.value))).call(r,t.value)&&!(0,w.x)((0,D.Z)(null==s||null==(i=s.x_axis)?void 0:i.value),null==s||null==(l=s.datasource)?void 0:l.datasource)),options:m}},visibility:P}},F={name:"x_axis_sort_asc",config:{type:"CheckboxControl",label:e=>{var t;return"horizontal"===(null==(t=e.form_data)?void 0:t.orientation)?(0,n.t)("Y-Axis Sort Ascending"):(0,n.t)("X-Axis Sort Ascending")},default:!0,description:(0,n.t)("Whether to sort ascending or descending on the base Axis."),visibility:P}},k=[["metrics"],["groupby"],[U],["adhoc_filters"],["limit"],["timeseries_limit_metric"],["order_desc"],["row_limit"],["truncate_metric"],["show_empty_columns"]],Z={label:(0,n.t)("Query"),expanded:!0,controlSetRows:[[r.O6?"x_axis":null],[r.O6?"time_grain_sqla":null],...k]},B={label:(0,n.t)("Query"),expanded:!0,controlSetRows:[[r.O6?"x_axis":null],[r.O6?"time_grain_sqla":null],[r.O6?V:null],[r.O6?F:null],...k]};var Y=o(58842),G=o(89368),H=o(9609);function W(e){let t;return e&&e.length>0&&(e.forEach((e=>{"count"===e.metric_name&&(t="count")})),t||(t=e[0].metric_name)),t}var q=o(75781),Q=o(19835),X=o(20002),j=o(35622),K=o(97490),z=o(60955),J=o(92155),$=o(44928),ee=o(4098),te=o(99343),oe=o(19546),ae=o(69862),ne=o(37701),re=o(20884),ie=o(63038),le=o(6219),se=o(73739),_e=o(27751),ce=o(51689),ue=o(14920),de=o(9882),me=o(65634),Re=o(63351),Ee=o(51384),pe=o(39650),Oe=o(53769),Te=o(356),fe=o(83389);const ye={column_format:{},columns:[{advanced_data_type:void 0,certification_details:null,certified_by:null,column_name:"num",description:null,expression:"",filterable:!0,groupby:!0,id:332,is_certified:!1,is_dttm:!1,python_date_format:null,type:"BIGINT",type_generic:0,verbose_name:null,warning_markdown:null},{advanced_data_type:void 0,certification_details:null,certified_by:null,column_name:"gender",description:null,expression:"",filterable:!0,groupby:!0,id:330,is_certified:!1,is_dttm:!1,python_date_format:null,type:"VARCHAR(16)",type_generic:1,verbose_name:"",warning_markdown:null},{advanced_data_type:void 0,certification_details:null,certified_by:null,column_name:"state",description:null,expression:"",filterable:!0,groupby:!0,id:333,is_certified:!1,is_dttm:!1,python_date_format:null,type:"VARCHAR(10)",type_generic:1,verbose_name:null,warning_markdown:null},{advanced_data_type:void 0,certification_details:null,certified_by:null,column_name:"ds",description:null,expression:"",filterable:!0,groupby:!0,id:329,is_certified:!1,is_dttm:!0,python_date_format:null,type:"TIMESTAMP WITHOUT TIME ZONE",type_generic:2,verbose_name:null,warning_markdown:null},{advanced_data_type:void 0,certification_details:null,certified_by:null,column_name:"name",description:null,expression:"",filterable:!0,groupby:!0,id:331,is_certified:!1,is_dttm:!1,python_date_format:null,type:"VARCHAR(255)",type_generic:1,verbose_name:null,warning_markdown:null}],datasource_name:"birth_names",description:null,granularity_sqla:"ds",id:2,main_dttm_col:"ds",metrics:[{certification_details:null,certified_by:null,d3format:null,description:null,expression:"COUNT(*)",id:7,is_certified:!1,metric_name:"count",verbose_name:"COUNT(*)",warning_markdown:"",warning_text:null}],name:"public.birth_names",order_by_choices:[],owners:[{first_name:"admin",id:1,last_name:"user",username:"admin"}],type:o(46078).i9.Dataset,uid:"2__table",verbose_map:{}},Ne=a},20884:(e,t,o)=>{o.d(t,{F:()=>l});var a=o(55786),n=o(10581),r=o(56652);const i=/(\d+)\/(\d+) percentiles/,l=(e,t)=>{const{groupby:o,whiskerOptions:l}=e;if(l){let e,s;const _=i.exec(l);if("Tukey"!==l&&l)if("Min/max (no outliers)"===l)e="min/max";else{if(!_)throw new Error(`Unsupported whisker type: ${l}`);e="percentile",s=[parseInt(_[1],10),parseInt(_[2],10)]}else e="tukey";return{operation:"boxplot",options:{whisker_type:e,percentiles:s,groupby:(0,a.Z)(o).map(n.Z),metrics:(0,a.Z)(t.metrics).map(r.Z)}}}}},69862:(e,t,o)=>{o.d(t,{I:()=>a});const a=(e,t)=>{if(e.contributionMode)return{operation:"contribution",options:{orientation:e.contributionMode}}}},63038:(e,t,o)=>{o.d(t,{k:()=>a});const a=(e,t)=>({operation:"flatten"})},4098:(e,t,o)=>{o.d(t,{H:()=>s});var a=o(55786),n=o(56652),r=o(11146),i=o(10581),l=o(51689);const s=(e,t)=>{const o=[...(0,a.Z)(t.metrics),...(0,l.T)(e)].map(n.Z),s=(0,r.M8)(e),_=t.series_columns||t.columns;if(s&&o.length)return{operation:"pivot",options:{index:[s],columns:(0,a.Z)(_).map(i.Z),aggregates:Object.fromEntries(o.map((e=>[e,{operator:"mean"}]))),drop_missing_columns:!(null!=e&&e.show_empty_columns)}}}},37701:(e,t,o)=>{o.d(t,{D:()=>n});var a=o(11146);const n=(e,t)=>{const o=(0,a.M8)(e);if(e.forecastEnabled&&o)return{operation:"prophet",options:{time_grain:e.time_grain_sqla,periods:parseInt(e.forecastPeriods,10),confidence_interval:parseFloat(e.forecastInterval),yearly_seasonality:e.forecastSeasonalityYearly,weekly_seasonality:e.forecastSeasonalityWeekly,daily_seasonality:e.forecastSeasonalityDaily,index:o}}}},19546:(e,t,o)=>{o.d(t,{N:()=>u});var a=o(78580),n=o.n(a),r=o(55786),i=o(11146),l=o(62363),s=o(56652),_=o(73739),c=o(6219);const u=(e,t)=>{var o;const a=(0,r.Z)(t.metrics),u=(0,r.Z)(t.series_columns||t.columns),{truncate_metric:d}=e,m=(0,i.M8)(e);if(1===a.length&&u.length>0&&m&&(!(0,_.H)(e,t)||!n()(o=[l.wj.Difference,l.wj.Ratio,l.wj.Percentage]).call(o,e.comparison_type))&&void 0!==d&&d){const o=[];if((0,_.H)(e,t)&&e.comparison_type===l.wj.Values){const a=(0,c.O)(e,t),i=(0,r.Z)(e.time_compare);[...a.keys()].forEach((e=>{const t=i.find((t=>n()(e).call(e,t)));o.push([e,t])}))}return o.push([(0,s.Z)(a[0]),null]),{operation:"rename",options:{columns:Object.fromEntries(o),level:0,inplace:!0}}}}},99343:(e,t,o)=>{o.d(t,{X:()=>a});const a=(e,t)=>{const o="zerofill"===e.resample_method,a=o?"asfreq":e.resample_method,n=e.resample_rule;if(a&&n)return{operation:"resample",options:{method:a,rule:n,fill_value:o?0:null}}}},97490:(e,t,o)=>{o.d(t,{V:()=>c});var a=o(78580),n=o.n(a),r=o(55786),i=o(62363),l=o(77675),s=o(73739),_=o(6219);const c=(e,t)=>{var o;let a;if((0,s.H)(e,t)){const o=(0,_.O)(e,t);a=[...Array.from(o.values()),...Array.from(o.keys())]}else a=(0,r.Z)(t.metrics).map((e=>"string"==typeof e?e:e.label));const c=Object.fromEntries(a.map((e=>[e,e])));return e.rolling_type===i.su.Cumsum?{operation:"cum",options:{operator:"sum",columns:c}}:n()(o=[i.su.Sum,i.su.Mean,i.su.Std]).call(o,e.rolling_type)?{operation:"rolling",options:{rolling_type:e.rolling_type,window:(0,l.Z)(e.rolling_periods,1),min_periods:(0,l.Z)(e.min_periods,0),columns:c}}:void 0}},44928:(e,t,o)=>{o.d(t,{C:()=>d});var a=o(41609),n=o.n(a),r=o(78580),i=o.n(r),l=o(11146),s=o(55786),_=o(56652),c=o(37731),u=o(51689);const d=(e,t)=>{const o=[(0,l.M8)(e),...(0,s.Z)(e.metrics).map(_.Z),...(0,u.T)(e).map(_.Z)].filter(Boolean);if(l.O6&&(0,c.Z)(null==e?void 0:e.x_axis_sort)&&(0,c.Z)(null==e?void 0:e.x_axis_sort_asc)&&i()(o).call(o,e.x_axis_sort)&&n()(e.groupby))return e.x_axis_sort===(0,l.M8)(e)?{operation:"sort",options:{is_sort_index:!0,ascending:e.x_axis_sort_asc}}:{operation:"sort",options:{by:e.x_axis_sort,ascending:e.x_axis_sort_asc}}}},60955:(e,t,o)=>{o.d(t,{N:()=>i});var a=o(62363),n=o(6219),r=o(73739);const i=(e,t)=>{const o=e.comparison_type,i=(0,n.O)(e,t);if((0,r.H)(e,t)&&o!==a.wj.Values)return{operation:"compare",options:{source_columns:Array.from(i.values()),compare_columns:Array.from(i.keys()),compare_type:o,drop_original_columns:!0}}}},92155:(e,t,o)=>{o.d(t,{O:()=>s});var a=o(11146),n=o(55786),r=o(10581),i=o(6219),l=o(73739);const s=(e,t)=>{const o=(0,i.O)(e,t),s=(0,a.M8)(e),_=t.series_columns||t.columns;if((0,l.H)(e,t)&&s){const t=Object.fromEntries([...o.values(),...o.keys()].map((e=>[e,{operator:"mean"}])));return{operation:"pivot",options:{index:[s],columns:(0,n.Z)(_).map(r.Z),drop_missing_columns:!(null!=e&&e.show_empty_columns),aggregates:t}}}}},14920:(e,t,o)=>{o.d(t,{K:()=>a});const a="__"},51689:(e,t,o)=>{o.d(t,{T:()=>n});var a=o(56652);function n(e){const{groupby:t,timeseries_limit_metric:o,x_axis_sort:n}=e,r=[];return!(t||[]).length&&o&&(0,a.Z)(o)===n&&r.push(o),r}},6219:(e,t,o)=>{o.d(t,{O:()=>i});var a=o(55786),n=o(56652),r=o(14920);const i=(e,t)=>{const o=(0,a.Z)(t.metrics),i=(0,a.Z)(e.time_compare),l=o.map(n.Z),s=new Map;return l.forEach((e=>{i.forEach((t=>{s.set([e,t].join(r.K),e)}))})),s}},27751:(e,t,o)=>{o.d(t,{m:()=>l});var a=o(47037),n=o.n(a),r=o(62363),i=o(55786);const l=(e,t)=>{if(t.comparison_type!==r.wj.Values)return!1;const o=(0,i.Z)(null==t?void 0:t.time_compare);return!!n()(e.name)&&!!o.find((t=>e.name.endsWith(t)))}},73739:(e,t,o)=>{o.d(t,{H:()=>l});var a=o(78580),n=o.n(a),r=o(62363),i=o(6219);const l=(e,t)=>{var o;const a=e.comparison_type,l=(0,i.O)(e,t);return n()(o=Object.values(r.wj)).call(o,a)&&l.size>0}},75781:(e,t,o)=>{o.d(t,{Z:()=>r});var a=o(55786),n=o(38575);function r(e){return(0,n.It)(e)&&(0,n.xN)(e.columns[0])?e.columns.map((e=>[e.column_name,e.verbose_name||e.column_name])).sort(((e,t)=>e[1].toLowerCase()>t[1].toLowerCase()?1:-1)):(0,n.JQ)(e)?(0,a.Z)(e.columns).map((e=>[e.name,e.name])).sort(((e,t)=>e[1].toLowerCase()>t[1].toLowerCase()?1:-1)):[]}},9609:(e,t,o)=>{o.d(t,{NM:()=>s,Km:()=>_,E2:()=>c,ni:()=>u});var a=o(78580),n=o.n(a),r=o(30845),i=o(25130),l=o(38575);const s=function(e,t){return void 0===t&&(t=0),Number(`${Math.round(Number(`${e}e+${t}`))}e-${t}`)},_=function(e,t,o,a,n){return void 0===a&&(a=.05),void 0===n&&(n=1),o===t?n:Math.min(n,s(Math.abs((n-a)/(o-t)*(e-t))+a,2))},c=(e,t)=>{let o,{operator:a,targetValue:r,targetValueLeft:s,targetValueRight:c,colorScheme:u}=e,d=.05;if(void 0===a||void 0===u)return()=>{};if(n()(l.vD).call(l.vD,a)&&(void 0===s||void 0===c))return()=>{};if(a!==l.O9.NONE&&!n()(l.vD).call(l.vD,a)&&void 0===r)return()=>{};switch(a){case l.O9.NONE:d=0,o=(e,t)=>{const o=Math.min(...t),a=Math.max(...t);return e>=o&&e<=a&&{cutoffValue:o,extremeValue:a}};break;case l.O9.GREATER_THAN:o=(e,t)=>e>r&&{cutoffValue:r,extremeValue:Math.max(...t)};break;case l.O9.LESS_THAN:o=(e,t)=>e<r&&{cutoffValue:r,extremeValue:Math.min(...t)};break;case l.O9.GREATER_OR_EQUAL:o=(e,t)=>e>=r&&{cutoffValue:r,extremeValue:Math.max(...t)};break;case l.O9.LESS_OR_EQUAL:o=(e,t)=>e<=r&&{cutoffValue:r,extremeValue:Math.min(...t)};break;case l.O9.EQUAL:o=e=>e===r&&{cutoffValue:r,extremeValue:r};break;case l.O9.NOT_EQUAL:o=(e,t)=>{if(e===r)return!1;const o=Math.max(...t),a=Math.min(...t);return{cutoffValue:r,extremeValue:Math.abs(r-a)>Math.abs(o-r)?a:o}};break;case l.O9.BETWEEN:o=e=>e>s&&e<c&&{cutoffValue:s,extremeValue:c};break;case l.O9.BETWEEN_OR_EQUAL:o=e=>e>=s&&e<=c&&{cutoffValue:s,extremeValue:c};break;case l.O9.BETWEEN_OR_LEFT_EQUAL:o=e=>e>=s&&e<c&&{cutoffValue:s,extremeValue:c};break;case l.O9.BETWEEN_OR_RIGHT_EQUAL:o=e=>e>s&&e<=c&&{cutoffValue:s,extremeValue:c};break;default:o=()=>!1}return e=>{const a=o(e,t);if(!1===a)return;const{cutoffValue:n,extremeValue:r}=a;return(0,i.Zf)(u,_(e,n,r,d,1))}},u=(0,r.Z)(((e,t)=>{var o;return null!=(o=null==e?void 0:e.reduce(((e,o)=>(void 0!==(null==o?void 0:o.column)&&((null==o?void 0:o.operator)===l.O9.NONE||void 0!==(null==o?void 0:o.operator)&&(n()(l.vD).call(l.vD,null==o?void 0:o.operator)?void 0!==(null==o?void 0:o.targetValueLeft)&&void 0!==(null==o?void 0:o.targetValueRight):void 0!==(null==o?void 0:o.targetValue)))&&e.push({column:null==o?void 0:o.column,getColorFromValue:c(o,t.map((e=>e[o.column])))}),e)),[]))?o:[]}))},43716:(e,t,o)=>{o.d(t,{C5:()=>n,WN:()=>r,Tb:()=>i,sn:()=>l,yv:()=>s,Qn:()=>_,eh:()=>c,pY:()=>u,YC:()=>d,ZP:()=>m});var a=o(46078);const n={FRONTEND_CSRF_ERROR:"FRONTEND_CSRF_ERROR",FRONTEND_NETWORK_ERROR:"FRONTEND_NETWORK_ERROR",FRONTEND_TIMEOUT_ERROR:"FRONTEND_TIMEOUT_ERROR",GENERIC_DB_ENGINE_ERROR:"GENERIC_DB_ENGINE_ERROR",COLUMN_DOES_NOT_EXIST_ERROR:"COLUMN_DOES_NOT_EXIST_ERROR",TABLE_DOES_NOT_EXIST_ERROR:"TABLE_DOES_NOT_EXIST_ERROR",SCHEMA_DOES_NOT_EXIST_ERROR:"SCHEMA_DOES_NOT_EXIST_ERROR",CONNECTION_INVALID_USERNAME_ERROR:"CONNECTION_INVALID_USERNAME_ERROR",CONNECTION_INVALID_PASSWORD_ERROR:"CONNECTION_INVALID_PASSWORD_ERROR",CONNECTION_INVALID_HOSTNAME_ERROR:"CONNECTION_INVALID_HOSTNAME_ERROR",CONNECTION_PORT_CLOSED_ERROR:"CONNECTION_PORT_CLOSED_ERROR",CONNECTION_INVALID_PORT_ERROR:"CONNECTION_INVALID_PORT_ERROR",CONNECTION_HOST_DOWN_ERROR:"CONNECTION_HOST_DOWN_ERROR",CONNECTION_ACCESS_DENIED_ERROR:"CONNECTION_ACCESS_DENIED_ERROR",CONNECTION_UNKNOWN_DATABASE_ERROR:"CONNECTION_UNKNOWN_DATABASE_ERROR",CONNECTION_DATABASE_PERMISSIONS_ERROR:"CONNECTION_DATABASE_PERMISSIONS_ERROR",CONNECTION_MISSING_PARAMETERS_ERRORS:"CONNECTION_MISSING_PARAMETERS_ERRORS",OBJECT_DOES_NOT_EXIST_ERROR:"OBJECT_DOES_NOT_EXIST_ERROR",SYNTAX_ERROR:"SYNTAX_ERROR",VIZ_GET_DF_ERROR:"VIZ_GET_DF_ERROR",UNKNOWN_DATASOURCE_TYPE_ERROR:"UNKNOWN_DATASOURCE_TYPE_ERROR",FAILED_FETCHING_DATASOURCE_INFO_ERROR:"FAILED_FETCHING_DATASOURCE_INFO_ERROR",TABLE_SECURITY_ACCESS_ERROR:"TABLE_SECURITY_ACCESS_ERROR",DATASOURCE_SECURITY_ACCESS_ERROR:"DATASOURCE_SECURITY_ACCESS_ERROR",DATABASE_SECURITY_ACCESS_ERROR:"DATABASE_SECURITY_ACCESS_ERROR",QUERY_SECURITY_ACCESS_ERROR:"QUERY_SECURITY_ACCESS_ERROR",MISSING_OWNERSHIP_ERROR:"MISSING_OWNERSHIP_ERROR",BACKEND_TIMEOUT_ERROR:"BACKEND_TIMEOUT_ERROR",DATABASE_NOT_FOUND_ERROR:"DATABASE_NOT_FOUND_ERROR",MISSING_TEMPLATE_PARAMS_ERROR:"MISSING_TEMPLATE_PARAMS_ERROR",INVALID_TEMPLATE_PARAMS_ERROR:"INVALID_TEMPLATE_PARAMS_ERROR",RESULTS_BACKEND_NOT_CONFIGURED_ERROR:"RESULTS_BACKEND_NOT_CONFIGURED_ERROR",DML_NOT_ALLOWED_ERROR:"DML_NOT_ALLOWED_ERROR",INVALID_CTAS_QUERY_ERROR:"INVALID_CTAS_QUERY_ERROR",INVALID_CVAS_QUERY_ERROR:"INVALID_CVAS_QUERY_ERROR",SQLLAB_TIMEOUT_ERROR:"SQLLAB_TIMEOUT_ERROR",RESULTS_BACKEND_ERROR:"RESULTS_BACKEND_ERROR",ASYNC_WORKERS_ERROR:"ASYNC_WORKERS_ERROR",GENERIC_COMMAND_ERROR:"GENERIC_COMMAND_ERROR",GENERIC_BACKEND_ERROR:"GENERIC_BACKEND_ERROR",INVALID_PAYLOAD_FORMAT_ERROR:"INVALID_PAYLOAD_FORMAT_ERROR",INVALID_PAYLOAD_SCHEMA_ERROR:"INVALID_PAYLOAD_SCHEMA_ERROR"},r={TABLE:"TABLE",VIEW:"VIEW"};var i;!function(e){e.STARTED="started",e.STOPPED="stopped",e.FAILED="failed",e.PENDING="pending",e.RUNNING="running",e.SCHEDULED="scheduled",e.SUCCESS="success",e.FETCHING="fetching",e.TIMED_OUT="timed_out"}(i||(i={}));const l=[i.RUNNING,i.STARTED,i.PENDING,i.FETCHING,i.SCHEDULED],s=[i.STOPPED,i.FAILED,i.SUCCESS,i.TIMED_OUT],_={id:"clientId2353",dbId:1,sql:"SELECT * FROM something",sqlEditorId:"dfsadfs",tab:"unimportant",tempTable:"",ctas:!1,cached:!1,errorMessage:null,extra:{progress:null},isDataPreview:!1,progress:0,resultsKey:null,state:i.SUCCESS,tempSchema:null,trackingUrl:null,templateParams:null,rows:42,queryLimit:100,limitingFactor:"",endDttm:1476910579693,duration:"",startDttm:1476910566092.96,time:{},user:{},userId:1,db:{},started:"",querylink:{},queryId:1,executedSql:"",output:"",actions:{},type:a.i9.Query,columns:[{name:"Column 1",type:"STRING",is_dttm:!1},{name:"Column 3",type:"STRING",is_dttm:!1},{name:"Column 2",type:"TIMESTAMP",is_dttm:!0}]},c={results:{displayLimitReached:!1,columns:[{name:"Column 1",type:"STRING",is_dttm:!1},{name:"Column 3",type:"STRING",is_dttm:!1},{name:"Column 2",type:"TIMESTAMP",is_dttm:!0}],data:[{"Column 1":"a","Column 2":"b","Column 3":"2014-11-11T00:00:00"}],expanded_columns:[],selected_columns:[{name:"Column 1",type:"STRING",is_dttm:!1},{name:"Column 3",type:"STRING",is_dttm:!1},{name:"Column 2",type:"TIMESTAMP",is_dttm:!0}],query:{limit:6}}},u={..._,...c};var d;!function(e){e.Row="row",e.Column="column"}(d||(d={}));const m={}},77675:(e,t,o)=>{function a(e,t){const o=parseInt(String(e),10),a=void 0===t?NaN:t;return Number.isNaN(o)?a:o}o.d(t,{Z:()=>a})}}]);
//# sourceMappingURL=8989.5a7ea49dc49b9f447815.entry.js.map
|
PypiClean
|
/featureform-enterprise-0.10.3.tar.gz/featureform-enterprise-0.10.3/src/featureform/dashboard/out/_next/static/chunks/react-syntax-highlighter_languages_refractor_glsl.aaa801f4d5c6da96.js
|
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[158,8950],{65806:function(a){function b(a){a.languages.c=a.languages.extend("clike",{comment:{pattern:/\/\/(?:[^\r\n\\]|\\(?:\r\n?|\n|(?![\r\n])))*|\/\*[\s\S]*?(?:\*\/|$)/,greedy:!0},string:{pattern:/"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"/,greedy:!0},"class-name":{pattern:/(\b(?:enum|struct)\s+(?:__attribute__\s*\(\([\s\S]*?\)\)\s*)?)\w+|\b[a-z]\w*_t\b/,lookbehind:!0},keyword:/\b(?:_Alignas|_Alignof|_Atomic|_Bool|_Complex|_Generic|_Imaginary|_Noreturn|_Static_assert|_Thread_local|__attribute__|asm|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|inline|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|typeof|union|unsigned|void|volatile|while)\b/,function:/\b[a-z_]\w*(?=\s*\()/i,number:/(?:\b0x(?:[\da-f]+(?:\.[\da-f]*)?|\.[\da-f]+)(?:p[+-]?\d+)?|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[+-]?\d+)?)[ful]{0,4}/i,operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|[-+*/%&|^!=<>]=?/}),a.languages.insertBefore("c","string",{char:{pattern:/'(?:\\(?:\r\n|[\s\S])|[^'\\\r\n]){0,32}'/,greedy:!0}}),a.languages.insertBefore("c","string",{macro:{pattern:/(^[\t ]*)#\s*[a-z](?:[^\r\n\\/]|\/(?!\*)|\/\*(?:[^*]|\*(?!\/))*\*\/|\\(?:\r\n|[\s\S]))*/im,lookbehind:!0,greedy:!0,alias:"property",inside:{string:[{pattern:/^(#\s*include\s*)<[^>]+>/,lookbehind:!0},a.languages.c.string],char:a.languages.c.char,comment:a.languages.c.comment,"macro-name":[{pattern:/(^#\s*define\s+)\w+\b(?!\()/i,lookbehind:!0},{pattern:/(^#\s*define\s+)\w+\b(?=\()/i,lookbehind:!0,alias:"function"}],directive:{pattern:/^(#\s*)[a-z]+/,lookbehind:!0,alias:"keyword"},"directive-hash":/^#/,punctuation:/##|\\(?=[\r\n])/,expression:{pattern:/\S[\s\S]*/,inside:a.languages.c}}}}),a.languages.insertBefore("c","function",{constant:/\b(?:EOF|NULL|SEEK_CUR|SEEK_END|SEEK_SET|__DATE__|__FILE__|__LINE__|__TIMESTAMP__|__TIME__|__func__|stderr|stdin|stdout)\b/}),delete a.languages.c.boolean}a.exports=b,b.displayName="c",b.aliases=[]},3322:function(a,b,c){var d=c(65806);function e(a){a.register(d),a.languages.glsl=a.languages.extend("c",{keyword:/\b(?:active|asm|atomic_uint|attribute|[ibdu]?vec[234]|bool|break|buffer|case|cast|centroid|class|coherent|common|const|continue|d?mat[234](?:x[234])?|default|discard|do|double|else|enum|extern|external|false|filter|fixed|flat|float|for|fvec[234]|goto|half|highp|hvec[234]|[iu]?sampler2DMS(?:Array)?|[iu]?sampler2DRect|[iu]?samplerBuffer|[iu]?samplerCube|[iu]?samplerCubeArray|[iu]?sampler[123]D|[iu]?sampler[12]DArray|[iu]?image2DMS(?:Array)?|[iu]?image2DRect|[iu]?imageBuffer|[iu]?imageCube|[iu]?imageCubeArray|[iu]?image[123]D|[iu]?image[12]DArray|if|in|inline|inout|input|int|interface|invariant|layout|long|lowp|mediump|namespace|noinline|noperspective|out|output|partition|patch|precise|precision|public|readonly|resource|restrict|return|sample|sampler[12]DArrayShadow|sampler[12]DShadow|sampler2DRectShadow|sampler3DRect|samplerCubeArrayShadow|samplerCubeShadow|shared|short|sizeof|smooth|static|struct|subroutine|superp|switch|template|this|true|typedef|uint|uniform|union|unsigned|using|varying|void|volatile|while|writeonly)\b/})}a.exports=e,e.displayName="glsl",e.aliases=[]}}])
|
PypiClean
|
/pykit-sci-0.2.1.tar.gz/pykit-sci-0.2.1/pksci/tools/mpltools/_cmaps.py
|
from __future__ import division, absolute_import, print_function
from __future__ import unicode_literals
from builtins import object
__docformat__ = 'restructuredtext'
import matplotlib as mpl
__all__ = ['COLORS', 'CustomColormap']
COLORS = {}
#COLOR_MAPS = {}
#_BlYlGr_dict = {'red': ((0.0, 51 / 255, 51 / 255),
# (0.2, 180 / 255, 180 / 255),
# (0.4, 175 / 255, 175 / 255),
# (0.6, 206 / 255, 206 / 255),
# (0.8, 0 / 255, 0 / 255),
# (1.0, 102 / 255, 102 / 255)),
# 'green': ((0.0, 51 / 255, 51 / 255),
# (0.2, 180 / 255, 180 / 255),
# (0.4, 200 / 255, 200 / 255),
# (0.6, 211 / 255, 211 / 255),
# (0.8, 130 / 255, 130 / 255),
# (1.0, 217 / 255, 217 / 255)),
# 'blue': ((0.0, 51 / 255, 51 / 255),
# (0.2, 180 / 255, 180 / 255),
# (0.4, 7 / 255, 7 / 255),
# (0.6, 106 / 255, 106 / 255),
# (0.8, 195 / 255, 195 / 255),
# (1.0, 237 / 255, 237 / 255))}
_BlYlGr_list = ['#333333', '#B4B4B4', '#AFC807',
'#CED36A', '#0082C3', '#66D9ED']
COLORS['BlYlGr'] = _BlYlGr_list
#_BlYlGr_cmap = \
# mpl.colors.LinearSegmentedColormap.from_list('BlYlGr', _BlYlGr_list)
#
#COLOR_MAPS['BlYlGr'] = _BlYlGr_cmap
_BlGnOr_list = ['#0000FF', '#0055FF', '#00AAFF', '#00FFFF', '#55FFAA',
'#AAFF55', '#FFFF00', '#FFAA00', '#FF5500']
COLORS['BlGnOr'] = _BlGnOr_list
_Jet2_list = ['#0000FF', '#0055FF', '#00AAFF', '#00FFFF', '#55FFAA',
'#AAFF55', '#FFFF00', '#FFAA00', '#FF5500', '#FE1400']
COLORS['Jet2'] = _Jet2_list
#_BlGnOr_cmap = \
# mpl.colors.LinearSegmentedColormap.from_list('BlGnOr', _BlGnOr_list)
#
#COLOR_MAPS['BlGnOr'] = _BlGnOr_cmap
class CustomColormap(object):
def __init__(self, name, reverse=False):
try:
self._color_list = COLORS[name]
except KeyError:
s = 'Invalid color map name: {}\n'.format(name)
s += 'Valid names are: {}'.format(sorted(COLORS.keys()))
raise KeyError(s)
self._name = name
if reverse:
self._color_list = [c for c in reversed(self._color_list)]
def get_mpl_colormap(self, N=256, gamma=1.0):
cmap = mpl.colors.LinearSegmentedColormap.from_list(self._name,
self._color_list,
N=N, gamma=gamma)
return cmap
|
PypiClean
|
/Roots-0.0.4.tar.gz/Roots-0.0.4/roots/visualization.py
|
import numpy as np
from mayavi import mlab
import os
from roots.swcToolkit import swcToolkit
class swcVisualizer():
"""
mfile = 'fileonpath.swc'
visualizer = swcVisualizer()
visualizer.mplot_mfile(mfile)
"""
def __init__(self):
self.swcTool = swcToolkit()
def create_cylinders(self,coords,diams,data,num_pts):
x = []
y = []
z = []
connections = []
D = []
offset = 0
for kk in range(len(coords)):
# Define points
C1 = np.array(coords[kk][0])
C2 = np.array(coords[kk][1])
# Define normal plane
p = C1-C2
d = np.dot(p,C1)
# Get normal vectors on plane
z_idx = np.arange(3)[p==0]
nz_idx = np.arange(3)[p!=0]
if len(nz_idx) == 3:
x1 = 1.
y1 = 1.
z1 = (d-(np.dot(p[:2],[x1,y1])))/p[2]
a = np.array([x1,y1,z1])
elif len(nz_idx) == 2:
a = np.zeros(3)
a[z_idx] = 1.
a[nz_idx[0]] = 1.
a[nz_idx[1]] = (d-p[nz_idx[0]])/p[nz_idx[1]]
else:
a = np.zeros(3)
a[z_idx] = 1.
a[nz_idx] = d/p[nz_idx]
a = a-C1
if len(p[p!=0]) == 3:
x2 = 1.
y2 = (a[2]*p[0]/p[2] - a[0]) / (a[1] - a[2]*p[1]/p[2])
z2 = -(p[1]*y2+p[0])/p[2]
b = np.array([x2,y2,z2])
elif len(p[p!=0]) == 2:
b = np.zeros(3)
b[z_idx] = 1.
b[nz_idx[0]] = a[z_idx]/(a[nz_idx[1]]*p[nz_idx[0]]/p[nz_idx[1]] - a[nz_idx[0]])
b[nz_idx[1]] = -p[nz_idx[0]]*b[nz_idx[0]]/p[nz_idx[1]]
else:
b = np.zeros(3)
b[nz_idx] = 0
b[z_idx[0]] = 1.
b[z_idx[1]] = -a[z_idx[0]]/a[z_idx[1]]
# Convert to unit vectors
a = a/np.linalg.norm(a)
b = b/np.linalg.norm(b)
theta_step = np.pi*2/num_pts
# Define set of points at a defined radius around
# the original points, C1 and C2
P1 = np.zeros((num_pts,3))
P2 = np.zeros((num_pts,3))
r1 = diams[kk][0]
r2 = diams[kk][1]
theta = 0
for ii in range(num_pts):
for jj in range(3):
P1[ii][jj] = C1[jj] + r1*np.cos(theta)*a[jj] + r1*np.sin(theta)*b[jj]
P2[ii][jj] = C2[jj] + r2*np.cos(theta)*a[jj] + r2*np.sin(theta)*b[jj]
theta += theta_step
# Define triangles
for ii in range(2*num_pts):
if ii < num_pts:
connections.append((ii+offset,(ii+1)%num_pts+offset,ii+num_pts+offset))
else:
connections.append((ii+offset,(ii+1-num_pts)%num_pts+offset+num_pts,(ii-num_pts+1)%num_pts+offset))
for ii in range(num_pts):
x.append(P1[ii][0])
y.append(P1[ii][1])
z.append(P1[ii][2])
D.append(data[kk])
for ii in range(num_pts):
x.append(P2[ii][0])
y.append(P2[ii][1])
z.append(P2[ii][2])
D.append(data[kk])
offset += 2*num_pts
x = np.array(x)
y = np.array(y)
z = np.array(z)
D = np.array(D)
return x, y, z, connections, D
def segment_branch(self,branch):
segments =[]
for i,seg_end in enumerate(branch[:-1]):
segments.append([branch[i],branch[i+1]])
return(segments)
def unzip_sectioned_arbor(self,arbor):
if arbor is None:
return({},{},{},{})
x = {}
y = {}
z = {}
r = {}
for branch in arbor.keys():
x[branch] = []
y[branch] = []
z[branch] = []
r[branch] = []
for section in arbor[branch]:
for point in section:
x[branch].append(point[0])
y[branch].append(point[1])
z[branch].append(point[2])
r[branch].append(point[3])
return(x,y,z,r)
def rgb_to_mlabcolor(self,rgb):
return((rgb[0]/255.0,rgb[1]/255.0,rgb[2]/255.0))
def mplot_sectioned_arbor_simplified(self,arbor,arbor_labels,view=True,DefaultDiameters=True):
fig = mlab.figure(bgcolor=(42/255.0,56/255.0,54/255.0),size=(1280,720))
keys = ['node','paranode1','paranode2','internode','interbouton','bouton']
diams = [0.75,1.54,1.54,1.54,0.2,1.0]
values = [self.rgb_to_mlabcolor(item) for item in [(255, 22, 84),(112, 193, 179),(178, 219, 191),(36, 123, 160),((243, 255, 189)),(255, 22, 84)]]
color_dict = dict(zip(keys,values))
diam_dict = dict(zip(keys,diams))
mobjs = []
for branch in arbor.keys():
# if branch not in [0,1]:
# continue
for s,section in enumerate(arbor[branch]):
if DefaultDiameters:
mobjs.append(mlab.plot3d([sec[0] for sec in section],[sec[1] for sec in section],[sec[2] for sec in section],color=color_dict[arbor_labels[branch][s]],tube_radius=diam_dict[arbor_labels[branch][s]],tube_sides=6,representation='wireframe'))
else:
mobjs.append(mlab.plot3d([sec[0] for sec in section],[sec[1] for sec in section],[sec[2] for sec in section],color=color_dict[arbor_labels[branch][s]],tube_radius=section[-1][-1],tube_sides=6))
mobjs[-1].actor.property.lighting = False
mlab.view(azimuth=0,elevation=0)
if view:
mlab.show()
def plot_electrode(self,arbor,arbor_labels,view=False):
keys = ['contact','noncontact','activecontact']
values = [self.rgb_to_mlabcolor(item) for item in [(42,56,54),(224, 224, 224),(173,42,42)]]
color_dict = dict(zip(keys,values))
electrode_parts = []
electrode_parts.append(mlab.points3d([arbor[1][0][0][0]],[arbor[1][0][0][1]],[arbor[1][0][0][2]],color=color_dict['noncontact'],scale_factor=arbor[1][0][0][3]*1,mode='sphere',resolution=16))
for s,section in enumerate(arbor[0]):
if s in arbor_labels:
col = color_dict['contact']
if s == 3:
col = color_dict['activecontact']
else:
col = color_dict['noncontact']
electrode_parts.append(mlab.plot3d([sec[0] for sec in section],[sec[1] for sec in section],[sec[2] for sec in section],color=col,tube_radius=section[-1][-1]/2.0,tube_sides=16))
for part in electrode_parts:
part.actor.property.backface_culling=True
part.actor.property.frontface_culling=True
part.actor.property.shading=True
if view:
mlab.show()
def mplot_sectioned_arbors(self,arbors,colors = [(0.29, 0.58, 0.67),(0.82, 0.35, 0.24)],view=True):
fig = mlab.figure(bgcolor=(42/255.0,56/255.0,54/255.0),size=(1280,720))
colors = [(item[0]/255.0,item[1]/255.0,item[2]/255.0) for item in [[0,119,187],[51,187,238],[0,153,136],[238,119,51],[204,51,17],[238,51,119],[221,170,51]]]
colors.reverse()
col_index = 0
for arbor in arbors:
myav_coords = []
myav_diams = []
x,y,z,r = self.unzip_sectioned_arbor(arbor)
coords = []
diams = []
for bnum in x:
tcoords = []
tdiams = []
for i,tem in enumerate(x[bnum]):
tcoords.append([x[bnum][i],y[bnum][i],z[bnum][i]])
tdiams.append(r[bnum][i])
tdiams[-1] *= 3.0
coords.extend(self.segment_branch(tcoords))
diams.extend(self.segment_branch(tdiams))
myav_coords.extend(coords)
myav_diams.extend(diams)
myav_vs = [20 for i in range(len(myav_coords)-len(coords))]+[2 for j in range(len(coords))]
num_pts = 20
tx,ty,tz,tconn,tD = self.create_cylinders(myav_coords,myav_diams,myav_vs,num_pts)
tmsh = mlab.triangular_mesh(tx,ty,tz,tconn,scalars=tD,vmin=1,vmax=20,representation='wireframe',color=colors[col_index])
tmsh.actor.property.frontface_culling = True
tmsh.actor.property.backface_culling = True
tmsh.actor.property.lighting = False
col_index+=1
if col_index==len(colors):
col_index=0
mlab.view(azimuth=0,elevation=0)
# for ii in range(D.shape[1]):
# _=mlab.triangular_mesh(x,y,z,connection,scalars = D[:,ii],vmin=Min,vmax=Max)
# _=mlab.view(azimuth=0,elevation=0)
# _=mlab.savefig('pic%.4d.png' % ii, size=(800,600))
# mlab.savefig('pic%.4d.png' % tstep,size=(1200,900))
if view:
mlab.show()
def view(self):
mlab.show()
def close(self):
mlab.close(all=True)
def mplot_sectioned_arbor(self,fig=None,arbor=None,colors = [(0.29, 0.58, 0.67),(0.82, 0.35, 0.24)],view=True):
if fig is None:
fig = mlab.figure(bgcolor=(42/255.0,56/255.0,54/255.0),size=(1280,720))
colorind = 0
myav_coords = []
myav_diams = []
x,y,z,r = self.unzip_sectioned_arbor(arbor)
coords = []
diams = []
for bnum in x:
tcoords = []
tdiams = []
for i,tem in enumerate(x[bnum]):
tcoords.append([x[bnum][i],y[bnum][i],z[bnum][i]])
tdiams.append(r[bnum][i])
# tdiams[-1] = 0.025
coords.extend(self.segment_branch(tcoords))
diams.extend(self.segment_branch(tdiams))
myav_coords.extend(coords)
myav_diams.extend(diams)
myav_vs = [20 for i in range(len(myav_coords)-len(coords))]+[2 for j in range(len(coords))]
num_pts = 20
tx,ty,tz,tconn,tD = self.create_cylinders(myav_coords,myav_diams,myav_vs,num_pts)
mlab.triangular_mesh(tx,ty,tz,tconn,scalars=tD,vmin=1,vmax=20)
colorind+=1
mlab.view(azimuth=0,elevation=0)
# for ii in range(D.shape[1]):
# _=mlab.triangular_mesh(x,y,z,connection,scalars = D[:,ii],vmin=Min,vmax=Max)
# _=mlab.view(azimuth=0,elevation=0)
# _=mlab.savefig('pic%.4d.png' % ii, size=(800,600))
# mlab.savefig('pic%.4d.png' % tstep,size=(1200,900))
if view:
mlab.show()
def mplot_mfile(self,swcfile,colors = [(0.29, 0.58, 0.67),(0.82, 0.35, 0.24)]):
colorind = 0
myav_coords = []
myav_diams = []
x,y,z,r = self.swcTool.load_swc(swcfile,asTree=False)
coords = []
diams = []
for bnum in x:
tcoords = []
tdiams = []
for i,tem in enumerate(x[bnum]):
tcoords.append([x[bnum][i],y[bnum][i],z[bnum][i]])
tdiams.append(r[bnum][i])
# tdiams[-1] = 0.025
coords.extend(self.segment_branch(tcoords))
diams.extend(self.segment_branch(tdiams))
myav_coords.extend(coords)
myav_diams.extend(diams)
myav_vs = [20 for i in range(len(myav_coords)-len(coords))]+[2 for j in range(len(coords))]
num_pts = 6
tx,ty,tz,tconn,tD = self.create_cylinders(myav_coords,myav_diams,myav_vs,num_pts)
mlab.triangular_mesh(tx,ty,tz,tconn,scalars=tD,vmin=1,vmax=20,color=colors[colorind])
colorind+=1
mlab.view(azimuth=0,elevation=0)
# for ii in range(D.shape[1]):
# _=mlab.triangular_mesh(x,y,z,connection,scalars = D[:,ii],vmin=Min,vmax=Max)
# _=mlab.view(azimuth=0,elevation=0)
# _=mlab.savefig('pic%.4d.png' % ii, size=(800,600))
# mlab.savefig('pic%.4d.png' % tstep,size=(1200,900))
mlab.show()
|
PypiClean
|
/RAMSTK-1.0.1.tar.gz/RAMSTK-1.0.1/docs/api/source/ramstk.gui.gtk.widgets.rst
|
Widgets Package
===============================================================================
.. toctree::
.. automodule:: ramstk.gui.gtk.ramstk.View
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Button
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Combo
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Dialog
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Entry
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Frame
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Label
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Plot
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.TreeView
:members:
:undoc-members:
:private-members:
:show-inheritance:
.. automodule:: ramstk.gui.gtk.ramstk.Widget
:members:
:undoc-members:
:private-members:
:show-inheritance:
|
PypiClean
|
/energistics/etp/v12/datatypes/supported_protocol.py
|
import typing
from pydantic import validator
from etptypes import ETPModel, Field, Strict
from etptypes.energistics.etp.v12.datatypes.version import Version
from etptypes.energistics.etp.v12.datatypes.data_value import DataValue
avro_schema: typing.Final[
str
] = '{"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "SupportedProtocol", "fields": [{"name": "protocol", "type": "int"}, {"name": "protocolVersion", "type": {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "Version", "fields": [{"name": "major", "type": "int", "default": 0}, {"name": "minor", "type": "int", "default": 0}, {"name": "revision", "type": "int", "default": 0}, {"name": "patch", "type": "int", "default": 0}], "fullName": "Energistics.Etp.v12.Datatypes.Version", "depends": []}}, {"name": "role", "type": "string"}, {"name": "protocolCapabilities", "type": {"type": "map", "values": {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "DataValue", "fields": [{"name": "item", "type": ["null", "boolean", "int", "long", "float", "double", "string", {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfBoolean", "fields": [{"name": "values", "type": {"type": "array", "items": "boolean"}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfBoolean", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfNullableBoolean", "fields": [{"name": "values", "type": {"type": "array", "items": ["null", "boolean"]}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfNullableBoolean", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfInt", "fields": [{"name": "values", "type": {"type": "array", "items": "int"}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfInt", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfNullableInt", "fields": [{"name": "values", "type": {"type": "array", "items": ["null", "int"]}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfNullableInt", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfLong", "fields": [{"name": "values", "type": {"type": "array", "items": "long"}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfLong", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfNullableLong", "fields": [{"name": "values", "type": {"type": "array", "items": ["null", "long"]}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfNullableLong", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfFloat", "fields": [{"name": "values", "type": {"type": "array", "items": "float"}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfFloat", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfDouble", "fields": [{"name": "values", "type": {"type": "array", "items": "double"}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfDouble", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfString", "fields": [{"name": "values", "type": {"type": "array", "items": "string"}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfString", "depends": []}, {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "ArrayOfBytes", "fields": [{"name": "values", "type": {"type": "array", "items": "bytes"}}], "fullName": "Energistics.Etp.v12.Datatypes.ArrayOfBytes", "depends": []}, "bytes", {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "AnySparseArray", "fields": [{"name": "slices", "type": {"type": "array", "items": {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "AnySubarray", "fields": [{"name": "start", "type": "long"}, {"name": "slice", "type": {"type": "record", "namespace": "Energistics.Etp.v12.Datatypes", "name": "AnyArray", "fields": [{"name": "item", "type": ["Energistics.Etp.v12.Datatypes.ArrayOfBoolean", "Energistics.Etp.v12.Datatypes.ArrayOfInt", "Energistics.Etp.v12.Datatypes.ArrayOfLong", "Energistics.Etp.v12.Datatypes.ArrayOfFloat", "Energistics.Etp.v12.Datatypes.ArrayOfDouble", "Energistics.Etp.v12.Datatypes.ArrayOfString", "bytes"]}], "fullName": "Energistics.Etp.v12.Datatypes.AnyArray", "depends": ["Energistics.Etp.v12.Datatypes.ArrayOfBoolean", "Energistics.Etp.v12.Datatypes.ArrayOfInt", "Energistics.Etp.v12.Datatypes.ArrayOfLong", "Energistics.Etp.v12.Datatypes.ArrayOfFloat", "Energistics.Etp.v12.Datatypes.ArrayOfDouble", "Energistics.Etp.v12.Datatypes.ArrayOfString"]}}], "fullName": "Energistics.Etp.v12.Datatypes.AnySubarray", "depends": ["Energistics.Etp.v12.Datatypes.AnyArray"]}}}], "fullName": "Energistics.Etp.v12.Datatypes.AnySparseArray", "depends": ["Energistics.Etp.v12.Datatypes.AnySubarray"]}]}], "fullName": "Energistics.Etp.v12.Datatypes.DataValue", "depends": ["Energistics.Etp.v12.Datatypes.ArrayOfBoolean", "Energistics.Etp.v12.Datatypes.ArrayOfNullableBoolean", "Energistics.Etp.v12.Datatypes.ArrayOfInt", "Energistics.Etp.v12.Datatypes.ArrayOfNullableInt", "Energistics.Etp.v12.Datatypes.ArrayOfLong", "Energistics.Etp.v12.Datatypes.ArrayOfNullableLong", "Energistics.Etp.v12.Datatypes.ArrayOfFloat", "Energistics.Etp.v12.Datatypes.ArrayOfDouble", "Energistics.Etp.v12.Datatypes.ArrayOfString", "Energistics.Etp.v12.Datatypes.ArrayOfBytes", "Energistics.Etp.v12.Datatypes.AnySparseArray"]}}, "default": {}}], "fullName": "Energistics.Etp.v12.Datatypes.SupportedProtocol", "depends": ["Energistics.Etp.v12.Datatypes.Version", "Energistics.Etp.v12.Datatypes.DataValue"]}'
class SupportedProtocol(ETPModel):
protocol: int = Field(alias="protocol")
protocol_version: Version = Field(alias="protocolVersion")
role: str = Field(alias="role")
protocol_capabilities: typing.Mapping[str, DataValue] = Field(
alias="protocolCapabilities", default_factory=lambda: {}
)
|
PypiClean
|
/django-birdland-0.1a1.tar.gz/django-birdland-0.1a1/src/birdland/models.py
|
import datetime
from django.db import models
from django.db.models import permalink
from django.utils.translation import ugettext_lazy as _
from django.utils.dateformat import format
from django.contrib.auth.models import User
from django.dispatch import dispatcher
from birdland import managers, signals
from birdland.conf import settings
from birdland.markup import get_markups
if settings.TAGGING_ENABLED:
import tagging.fields
class Entry(models.Model):
"""
A weblog entry.
"""
DRAFT_STATUS, PUBLISH_STATUS, REMOVED_STATUS = 1, 2, 3
STATUS_CHOICES = (
(DRAFT_STATUS, _('Draft')),
(PUBLISH_STATUS, _('Publish')),
(REMOVED_STATUS, _('Removed')),
)
# Metadata Fields
author = models.ForeignKey(User, verbose_name=_('author'), related_name='entries')
title = models.CharField(_('title'), max_length=256)
slug = models.SlugField(_('slug'), max_length=64)
created = models.DateTimeField(
_('created on'),
default=datetime.datetime.now, editable=False
)
modified = models.DateTimeField(
_('last modified on'),
editable=False
)
publish = models.DateTimeField(
_('publish on'),
default=datetime.datetime.now
)
status = models.IntegerField(_('status'), choices=STATUS_CHOICES, default=DRAFT_STATUS)
if settings.TAGGING_ENABLED:
tags = tagging.fields.TagField(verbose_name=_('tags'))
if settings.BACKLINKS_ENABLED:
backlinks_enabled = models.BooleanField(_('backlinks enabled'), default=True)
# Content Fields
summary_markup = models.CharField(
_('summary markup language'),
max_length=10,
choices=[(value, display) for value, display, callback in get_markups()],
default=settings.DEFAULT_MARKUP_LANG
)
summary = models.CharField(_('summary'), max_length=1024, blank=True)
summary_html = models.TextField(_('summary html'),
blank=True, editable=False)
body_markup = models.CharField(
_('body markup language'),
max_length=10,
choices=[(value, display) for value, display, callback in get_markups()],
default=settings.DEFAULT_MARKUP_LANG
)
body = models.TextField(_('body text'), blank=True)
body_html = models.TextField(_('body html'), blank=True, editable=False)
# Managers
objects = managers.EntryManager()
class Meta:
verbose_name = _('entry')
verbose_name_plural = _('entries')
ordering = ['-publish',]
get_latest_by = 'publish'
# Methods
def __unicode__(self):
return self.title
def save(self, **kwargs):
# Process the markup, adding signal hooks for any additional processing
# before and after the default markup processing behavior
self.modified = datetime.datetime.now()
signals.entry_markup_preprocess.send(sender=self)
for markup, title, formatter in get_markups():
if self.summary_markup == markup:
self.summary_html = formatter(self.summary)
if self.body_markup == markup:
self.body_html = formatter(self.body)
signals.entry_markup_processed.send(sender=self)
super(Entry, self).save(**kwargs)
def get_absolute_url(self):
return ('birdland.views.public.entry_archive_detail', (),
{'year': format(self.publish, 'Y'),
'month': format(self.publish, 'b'),
'day': format(self.publish, 'd'),
'slug': self.slug})
get_absolute_url = permalink(get_absolute_url)
def _next_previous_helper(self, direction):
return getattr(self, 'get_%s_by_publish' % direction)(
status__exact=self.PUBLISH_STATUS,
publish__lte=datetime.datetime.now())
def next_entry(self):
return self._next_previous_helper('next')
def previous_entry(self):
return self._next_previous_helper('previous')
|
PypiClean
|
/tensorflow-gpu-macosx-1.8.1.tar.gz/tensorflow/contrib/distributions/python/ops/bijectors/softmax_centered.py
|
"""SoftmaxCentered bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops.distributions import bijector
__all__ = [
"SoftmaxCentered",
]
class SoftmaxCentered(bijector.Bijector):
"""Bijector which computes `Y = g(X) = exp([X 0]) / sum(exp([X 0]))`.
To implement [softmax](https://en.wikipedia.org/wiki/Softmax_function) as a
bijection, the forward transformation appends a value to the input and the
inverse removes this coordinate. The appended coordinate represents a pivot,
e.g., `softmax(x) = exp(x-c) / sum(exp(x-c))` where `c` is the implicit last
coordinate.
Example Use:
```python
bijector.SoftmaxCentered().forward(tf.log([2, 3, 4]))
# Result: [0.2, 0.3, 0.4, 0.1]
# Extra result: 0.1
bijector.SoftmaxCentered().inverse([0.2, 0.3, 0.4, 0.1])
# Result: tf.log([2, 3, 4])
# Extra coordinate removed.
```
At first blush it may seem like the [Invariance of domain](
https://en.wikipedia.org/wiki/Invariance_of_domain) theorem implies this
implementation is not a bijection. However, the appended dimension
makes the (forward) image non-open and the theorem does not directly apply.
"""
def __init__(self,
validate_args=False,
name="softmax_centered"):
self._graph_parents = []
self._name = name
super(SoftmaxCentered, self).__init__(
event_ndims=1,
validate_args=validate_args,
name=name)
def _forward_event_shape(self, input_shape):
if input_shape.ndims is None or input_shape[-1] is None:
return input_shape
return tensor_shape.TensorShape([input_shape[-1] + 1])
def _forward_event_shape_tensor(self, input_shape):
return (input_shape[-1] + 1)[..., array_ops.newaxis]
def _inverse_event_shape(self, output_shape):
if output_shape.ndims is None or output_shape[-1] is None:
return output_shape
if output_shape[-1] <= 1:
raise ValueError("output_shape[-1] = %d <= 1" % output_shape[-1])
return tensor_shape.TensorShape([output_shape[-1] - 1])
def _inverse_event_shape_tensor(self, output_shape):
if self.validate_args:
# It is not possible for a negative shape so we need only check <= 1.
is_greater_one = check_ops.assert_greater(
output_shape[-1], 1, message="Need last dimension greater than 1.")
output_shape = control_flow_ops.with_dependencies(
[is_greater_one], output_shape)
return (output_shape[-1] - 1)[..., array_ops.newaxis]
def _forward(self, x):
# Pad the last dim with a zeros vector. We need this because it lets us
# infer the scale in the inverse function.
y = distribution_util.pad(x, axis=-1, back=True)
# Set shape hints.
if x.shape.ndims is not None:
shape = x.shape[:-1].concatenate(x.shape[-1] + 1)
y.shape.assert_is_compatible_with(shape)
y.set_shape(shape)
# Since we only support event_ndims in [0, 1] and we do padding, we always
# reduce over the last dimension, i.e., dim=-1 (which is the default).
return nn_ops.softmax(y)
def _inverse(self, y):
# To derive the inverse mapping note that:
# y[i] = exp(x[i]) / normalization
# and
# y[end] = 1 / normalization.
# Thus:
# x[i] = log(exp(x[i])) - log(y[end]) - log(normalization)
# = log(exp(x[i])/normalization) - log(y[end])
# = log(y[i]) - log(y[end])
# Do this first to make sure CSE catches that it'll happen again in
# _inverse_log_det_jacobian.
x = math_ops.log(y)
log_normalization = (-x[..., -1])[..., array_ops.newaxis]
x = x[..., :-1] + log_normalization
# Set shape hints.
if y.shape.ndims is not None:
shape = y.shape[:-1].concatenate(y.shape[-1] - 1)
x.shape.assert_is_compatible_with(shape)
x.set_shape(shape)
return x
def _inverse_log_det_jacobian(self, y):
# WLOG, consider the vector case:
# x = log(y[:-1]) - log(y[-1])
# where,
# y[-1] = 1 - sum(y[:-1]).
# We have:
# det{ dX/dY } = det{ diag(1 ./ y[:-1]) + 1 / y[-1] }
# = det{ inv{ diag(y[:-1]) - y[:-1]' y[:-1] } } (1)
# = 1 / det{ diag(y[:-1]) - y[:-1]' y[:-1] }
# = 1 / { (1 + y[:-1]' inv(diag(y[:-1])) y[:-1]) *
# det(diag(y[:-1])) } (2)
# = 1 / { y[-1] prod(y[:-1]) }
# = 1 / prod(y)
# (1) - https://en.wikipedia.org/wiki/Sherman%E2%80%93Morrison_formula
# or by noting that det{ dX/dY } = 1 / det{ dY/dX } from Bijector
# docstring "Tip".
# (2) - https://en.wikipedia.org/wiki/Matrix_determinant_lemma
return -math_ops.reduce_sum(math_ops.log(y), axis=-1)
def _forward_log_det_jacobian(self, x):
# This code is similar to nn_ops.log_softmax but different because we have
# an implicit zero column to handle. I.e., instead of:
# reduce_sum(logits - reduce_sum(exp(logits), dim))
# we must do:
# log_normalization = 1 + reduce_sum(exp(logits))
# -log_normalization + reduce_sum(logits - log_normalization)
log_normalization = nn_ops.softplus(
math_ops.reduce_logsumexp(x, axis=-1, keep_dims=True))
fldj = (-log_normalization +
math_ops.reduce_sum(x - log_normalization,
axis=-1,
keep_dims=True))
return array_ops.squeeze(fldj, squeeze_dims=-1)
|
PypiClean
|
/pyBAR-3.1.2.tar.gz/pyBAR-3.1.2/pybar/daq/fei4_record.py
|
from collections import OrderedDict
from basil.utils.BitLogic import BitLogic
from pybar.daq.readout_utils import is_data_header, is_address_record, is_value_record, is_service_record, is_data_record
flavors = ('fei4a', 'fei4b')
class FEI4Record(object):
"""Record Object
"""
def __init__(self, data_word, chip_flavor, tdc_trig_dist=False, trigger_data_mode=0):
self.record_rawdata = int(data_word)
self.record_word = BitLogic.from_value(value=self.record_rawdata, size=32)
self.record_dict = OrderedDict()
if self.record_rawdata & 0x80000000:
self.record_type = "TW"
if trigger_data_mode == 0:
self.record_dict.update([('trigger number', self.record_word[30:0].tovalue())])
elif trigger_data_mode == 1:
self.record_dict.update([('trigger timestamp', self.record_word[30:0].tovalue())])
elif trigger_data_mode == 2:
self.record_dict.update([('trigger timestamp', self.record_word[30:16].tovalue()), ('trigger number', self.record_word[15:0].tovalue())])
else:
raise ValueError("Unknown trigger data mode %d" % trigger_data_mode)
elif self.record_rawdata & 0xF0000000 == 0x40000000:
self.record_type = "TDC"
if tdc_trig_dist:
self.record_dict.update([('tdc distance', self.record_word[27:20].tovalue()), ('tdc counter', self.record_word[19:12].tovalue()), ('tdc value', self.record_word[11:0].tovalue())])
else:
self.record_dict.update([('tdc counter', self.record_word[27:12].tovalue()), ('tdc value', self.record_word[11:0].tovalue())])
elif not self.record_rawdata & 0xF0000000: # FE data
self.record_dict.update([('channel', (self.record_rawdata & 0x0F000000) >> 24)])
self.chip_flavor = chip_flavor
if self.chip_flavor not in flavors:
raise KeyError('Chip flavor is not of type {}'.format(', '.join('\'' + flav + '\'' for flav in self.chip_flavors)))
if is_data_header(self.record_rawdata):
self.record_type = "DH"
if self.chip_flavor == "fei4a":
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('flag', self.record_word[15:15].tovalue()), ('lvl1id', self.record_word[14:8].tovalue()), ('bcid', self.record_word[7:0].tovalue())])
elif self.chip_flavor == "fei4b":
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('flag', self.record_word[15:15].tovalue()), ('lvl1id', self.record_word[14:10].tovalue()), ('bcid', self.record_word[9:0].tovalue())])
elif is_address_record(self.record_rawdata):
self.record_type = "AR"
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('type', self.record_word[15:15].tovalue()), ('address', self.record_word[14:0].tovalue())])
elif is_value_record(self.record_rawdata):
self.record_type = "VR"
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('value', self.record_word[15:0].tovalue())])
elif is_service_record(self.record_rawdata):
self.record_type = "SR"
if self.chip_flavor == "fei4a":
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('code', self.record_word[15:10].tovalue()), ('counter', self.record_word[9:0].tovalue())])
elif self.chip_flavor == "fei4b":
if self.record_word[15:10].tovalue() == 14:
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('code', self.record_word[15:10].tovalue()), ('lvl1id[11:5]', self.record_word[9:3].tovalue()), ('bcid[12:10]', self.record_word[2:0].tovalue())])
elif self.record_word[15:10].tovalue() == 15:
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('code', self.record_word[15:10].tovalue()), ('skipped', self.record_word[9:0].tovalue())])
elif self.record_word[15:10].tovalue() == 16:
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('code', self.record_word[15:10].tovalue()), ('truncation flag', self.record_word[9:9].tovalue()), ('truncation counter', self.record_word[8:4].tovalue()), ('l1req', self.record_word[3:0].tovalue())])
else:
self.record_dict.update([('start', self.record_word[23:19].tovalue()), ('header', self.record_word[18:16].tovalue()), ('code', self.record_word[15:10].tovalue()), ('counter', self.record_word[9:0].tovalue())])
elif is_data_record(self.record_rawdata):
self.record_type = "DR"
self.record_dict.update([('column', self.record_word[23:17].tovalue()), ('row', self.record_word[16:8].tovalue()), ('tot1', self.record_word[7:4].tovalue()), ('tot2', self.record_word[3:0].tovalue())])
else:
self.record_type = "UNKNOWN FE WORD"
self.record_dict.update([('word', self.record_word.tovalue())])
# raise ValueError('Unknown data word: ' + str(self.record_word.tovalue()))
else:
self.record_type = "UNKNOWN WORD"
self.record_dict.update([('unknown', self.record_word[31:0].tovalue())])
def __len__(self):
return len(self.record_dict)
def __getitem__(self, key):
if not (isinstance(key, (int, long)) or isinstance(key, basestring)):
raise TypeError()
try:
return self.record_dict[key.lower()]
except TypeError:
return self.record_dict[self.record_dict.iterkeys()[int(key)]]
def next(self):
return self.record_dict.iteritems().next()
def __iter__(self):
return self.record_dict.iteritems()
def __eq__(self, other):
try:
return self.record_type.lower() == other.lower()
except Exception:
try:
return self.record_type == other.record_type
except Exception:
return False
def __str__(self):
return self.record_type + ' {}'.format(' '.join(key + ':' + str(val) for key, val in self.record_dict.iteritems()))
def __repr__(self):
return repr(self.__str__())
|
PypiClean
|
/nni_daily-1.5.2005180104-py3-none-manylinux1_x86_64.whl/nni_daily-1.5.2005180104.data/data/nni/node_modules/lodash/_createHybrid.js
|
var composeArgs = require('./_composeArgs'),
composeArgsRight = require('./_composeArgsRight'),
countHolders = require('./_countHolders'),
createCtor = require('./_createCtor'),
createRecurry = require('./_createRecurry'),
getHolder = require('./_getHolder'),
reorder = require('./_reorder'),
replaceHolders = require('./_replaceHolders'),
root = require('./_root');
/** Used to compose bitmasks for function metadata. */
var WRAP_BIND_FLAG = 1,
WRAP_BIND_KEY_FLAG = 2,
WRAP_CURRY_FLAG = 8,
WRAP_CURRY_RIGHT_FLAG = 16,
WRAP_ARY_FLAG = 128,
WRAP_FLIP_FLAG = 512;
/**
* Creates a function that wraps `func` to invoke it with optional `this`
* binding of `thisArg`, partial application, and currying.
*
* @private
* @param {Function|string} func The function or method name to wrap.
* @param {number} bitmask The bitmask flags. See `createWrap` for more details.
* @param {*} [thisArg] The `this` binding of `func`.
* @param {Array} [partials] The arguments to prepend to those provided to
* the new function.
* @param {Array} [holders] The `partials` placeholder indexes.
* @param {Array} [partialsRight] The arguments to append to those provided
* to the new function.
* @param {Array} [holdersRight] The `partialsRight` placeholder indexes.
* @param {Array} [argPos] The argument positions of the new function.
* @param {number} [ary] The arity cap of `func`.
* @param {number} [arity] The arity of `func`.
* @returns {Function} Returns the new wrapped function.
*/
function createHybrid(func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, argPos, ary, arity) {
var isAry = bitmask & WRAP_ARY_FLAG,
isBind = bitmask & WRAP_BIND_FLAG,
isBindKey = bitmask & WRAP_BIND_KEY_FLAG,
isCurried = bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG),
isFlip = bitmask & WRAP_FLIP_FLAG,
Ctor = isBindKey ? undefined : createCtor(func);
function wrapper() {
var length = arguments.length,
args = Array(length),
index = length;
while (index--) {
args[index] = arguments[index];
}
if (isCurried) {
var placeholder = getHolder(wrapper),
holdersCount = countHolders(args, placeholder);
}
if (partials) {
args = composeArgs(args, partials, holders, isCurried);
}
if (partialsRight) {
args = composeArgsRight(args, partialsRight, holdersRight, isCurried);
}
length -= holdersCount;
if (isCurried && length < arity) {
var newHolders = replaceHolders(args, placeholder);
return createRecurry(
func, bitmask, createHybrid, wrapper.placeholder, thisArg,
args, newHolders, argPos, ary, arity - length
);
}
var thisBinding = isBind ? thisArg : this,
fn = isBindKey ? thisBinding[func] : func;
length = args.length;
if (argPos) {
args = reorder(args, argPos);
} else if (isFlip && length > 1) {
args.reverse();
}
if (isAry && ary < length) {
args.length = ary;
}
if (this && this !== root && this instanceof wrapper) {
fn = Ctor || createCtor(fn);
}
return fn.apply(thisBinding, args);
}
return wrapper;
}
module.exports = createHybrid;
|
PypiClean
|
/gda-score-code-test-2.5.13.tar.gz/gda-score-code-test-2.5.13/gdascore/gdaAttack.py
|
import re
import subprocess
import coloredlogs, logging
import sqlite3
import simplejson
import psycopg2
import queue
import threading
import sys
import os
import copy
import base64
import time
import pprint
import datetime
import signal
import atexit
import random
import requests
import functools
coloredlogs.DEFAULT_FIELD_STYLES['asctime'] = {}
coloredlogs.DEFAULT_FIELD_STYLES['levelname'] = {'bold': True, 'color': 'white', 'bright': True}
coloredlogs.DEFAULT_LEVEL_STYLES['info'] = {'color': 'cyan', 'bright': True}
coloredlogs.install(
fmt="[%(levelname)s] %(message)s (%(filename)s, %(funcName)s(), line %(lineno)d, %(asctime)s)",
datefmt='%Y-%m-%d %H:%M',
level=logging.INFO,
)
# logging.basicConfig(
# format="[%(levelname)s] %(message)s (%(filename)s, %(funcName)s(), line %(lineno)d, %(asctime)s)",
# datefmt='%Y-%m-%d %H:%M',
# level=logging.INFO,
# )
# for pdoc documentation
__all__ = ["gdaAttack"]
try:
from .gdaTools import getInterpolatedValue, getDatabaseInfo
from .dupCheck import DupCheck
except ImportError:
from gdaTools import getInterpolatedValue, getDatabaseInfo
from dupCheck import DupCheck
theCacheQueue = None
theCacheThreadObject = None
flgCacheThreadStarted = False
atcObject = None
class gdaAttack:
"""Manages a GDA Attack
WARNING: this code is fragile, and can fail ungracefully, or
just hang."""
def __init__(self, params):
""" Everything gets set up with 'gdaAttack(params)'
params is a dictionary containing the following
required parameters: <br/>
`param['name']`: The name of the attack. Make it unique, because
the cache is discovered using this name. <br/>
`param['rawDb']`: The label for the DB to be used as the
raw (non-anonymized) DB. <br/>
Following are the optional parameters: <br/>
`param['criteria']`: The criteria by which the attack should
determined to succeed or fail. Must be one of 'singlingOut',
'inference', or 'linkability'. Default is 'singlingOut'. <br/>
`param['anonDb']`: The label for the DB to be used as the
anonymized DB. (Is automatically set to `param['rawDb']` if
not set.) <br/>
`param['pubDb']`: The label for the DB to be used as the
publicly known DB in linkability attacks. <br/>
`param['table']`: The table to be attacked. Must be present
if the DB has more than one table. <br/>
`param['uid']`: The uid column for the table. Must be present
if the name of the column is other than 'uid'. <br/>
`param['flushCache']`: Set to true if you want the cache of
query answers from a previous run flushed. The purpose of the
cache is to save the work from an aborted attack, which can be
substantial because attacks can have hundreds of queries. <br/>
`param['locCacheDir']`: The directory holding the cache DBs.
Default 'cacheDBs'. <br/>
`param['numRawDbThreads']`: The number of parallel queries
that can be made to the raw DB. Default 3. <br/>
`param['numAnonDbThreads']`: The number of parallel queries
that can be made to the anon DB. Default 3. <br/>
`param['numPubDbThreads']`: The number of parallel queries
that can be made to the public linkability DB. Default 3. <br/>
`param['verbose']`: Set to True for verbose output.
`param['dp_budget']`: An optional overall privacy budget for the attack. For use with uber_dp. Default 'None'. <br/>
"""
#### gda-score-code version check warning ####
process = subprocess.run([sys.executable, "-m", "pip", "list","--outdated"],stdout=subprocess.PIPE,stderr=subprocess.PIPE,universal_newlines=True)
upgradable_pkgs = process.stdout
if "gda-score-code" in upgradable_pkgs:
pkgs = upgradable_pkgs.split('\n')
potential_gdascore_pkgs = list(filter(lambda x: 'gda-score-code' in x, pkgs))
if len(potential_gdascore_pkgs) == 1:
gdascore_pkg = potential_gdascore_pkgs[0]
pkg_name, curr_ver, latest_ver, ins_type = (re.sub(r'\s+', '|', gdascore_pkg)
.split('|'))
print('\n')
logging.warning(f'WARNING: You have {pkg_name} version {curr_ver} installed; '
f'however, version {latest_ver} is available.')
logging.warning(f'You should consider upgrading via the '
f'"pip install --upgrade {pkg_name}" command.')
print('\n')
########
########### added by frzmohammadali ##########
global theCacheQueue
global theCacheThreadObject
global flgCacheThreadStarted
global atcObject
if not theCacheQueue and not theCacheThreadObject:
theCacheQueue = queue.Queue()
theCacheThreadObject = CacheThread(theCacheQueue, self)
atcObject = self
printTitle('cache thread initialized.')
self.cacheQueue = theCacheQueue
self.cacheThreadObject = theCacheThreadObject
if not flgCacheThreadStarted:
self.cacheThreadObject.start()
flgCacheThreadStarted = True
##############################################
############## parameters and instance variables ###############
# ------------- Class called parameters and configured parameters
self._vb = False
self._cr = '' # short for criteria
self._pp = None # pretty printer (for debugging)
self._sid = None # for uber_dp interface, a session ID over the attack is needed
self._session = None # also session for the uber_dp interface
self._colNamesTypes = []
self._colNames = []
self._p = dict(name='',
rawDb='',
anonDb='',
pubDb='',
criteria='singlingOut',
table='',
uid='uid',
flushCache=False,
verbose=False,
# following not normally set by caller, but can be
locCacheDir="cacheDBs",
numRawDbThreads=3,
numAnonDbThreads=3,
numPubDbThreads=3,
)
self._requiredParams = ['name', 'rawDb']
# ---------- Private internal state
# Threads
self._rawThreads = []
self._anonThreads = []
self._pubThreads = []
# Queues read by database threads _rawThreads and _anonThreads
self._rawQ = None
self._anonQ = None
self._pubQ = None
# Queues read by various caller functions
self._exploreQ = None
self._knowledgeQ = None
self._attackQ = None
self._claimQ = None
self._guessQ = None
# ask/get counters for setting 'stillToCome'
self._exploreCounter = 0
self._knowledgeCounter = 0
self._attackCounter = 0
self._claimCounter = 0
self._guessCounter = 0
# State for duplicate claim detection
self._dupCheck = DupCheck()
# State for computing attack results (see _initAtkRes())
self._atrs = {}
# State for various operational measures (see _initOp())
self._op = {}
##############################################
if self._vb:
print(f"Calling {__name__}.init")
if self._vb:
print(f" {params}")
self._initOp()
self._initCounters()
self._assignGlobalParams(params)
self._doParamChecks()
for param in self._requiredParams:
if len(self._p[param]) == 0:
s = str(f"Error: Need param '{param}' in class parameters")
sys.exit(s)
# extract the type of interface we are interacting with the anonymization
self._type = self._p['anonDb']['type']
if self._type == 'uber_dp':
# cannot run attack on uber dp without specifying the budget
if self._p['dp_budget'] is None:
s = str(f"Error: Needs param dp_budget in class parameters when running uber_dp attacks")
sys.exit(s)
self._initUberDPSession()
# if no session id was set, the attacks cannot be conducted
if self._sid is None:
s = str(f"Failed initializing session with Uber_DP Server")
sys.exit(s)
# create the database directory if it doesn't exist
try:
if not os.path.exists(self._p['locCacheDir']):
os.makedirs(self._p['locCacheDir'])
except OSError:
sys.exit("Error: Creating directory. " + self._p['locCacheDir'])
# Get the table name if not provided by the caller
if len(self._p['table']) == 0:
tables = self.getTableNames()
if len(tables) != 1:
print("Error: gdaAttack(): Must include table name if " +
"there is more than one table in database")
sys.exit()
self._p['table'] = tables[0]
# Get the column names for computing susceptibility later
self._colNamesTypes = self.getColNamesAndTypes()
if self._vb:
print(f"Columns are '{self._colNamesTypes}'")
self._initAtkRes()
# And make a convenient list of column names
for colNameType in self._colNamesTypes:
self._colNames.append(colNameType[0])
# Setup the database which holds already executed queries so we
# don't have to repeat them if we are restarting
self._setupLocalCacheDB()
# Setup the threads and queues
self._setupThreadsAndQueues()
numThreads = threading.active_count()
expectedThreads = (self._p['numRawDbThreads'] +
self._p['numAnonDbThreads'] + 1)
if len(self._p['pubDb']) > 0:
expectedThreads += self._p['numPubDbThreads']
if numThreads < expectedThreads:
print(f"Error: Some thread(s) died "
f"(count {numThreads}, expected {expectedThreads}). "
f"Aborting.")
self.cleanUp(cleanUpCache=False, doExit=True)
def getResults(self):
""" Returns all of the compiled attack results.
This can be input to class `gdaScores()` and method
`gdaScores.addResult()`."""
# Add the operational parameters
self._atrs['operational'] = self.getOpParameters()
self._cleanPasswords()
return self._atrs
def getOpParameters(self):
""" Returns a variety of performance measurements.
Useful for debugging."""
self._op['avQueryDuration'] = 0
if self._op['numQueries'] > 0:
self._op['avQueryDuration'] = (
self._op['timeQueries'] / self._op['numQueries'])
self._op['avCachePutDuration'] = 0
if self._op['numCachePuts'] > 0:
self._op['avCachePutDuration'] = (
self._op['timeCachePuts'] / self._op['numCachePuts'])
self._op['avCacheGetDuration'] = 0
if self._op['numCacheGets'] > 0:
self._op['avCacheGetDuration'] = (
self._op['timeCacheGets'] / self._op['numCacheGets'])
return self._op
def setVerbose(self):
"""Sets Verbose to True"""
self._vb = True
def unsetVerbose(self):
"""Sets Verbose to False"""
self._vb = False
def cleanUp(self, cleanUpCache=True, doExit=False,
exitMsg="Finished cleanUp, exiting"):
""" Garbage collect queues, threads, and cache.
By default, this wipes the cache. The idea being that if the
entire attack finished successfully, then it won't be
repeated and the cache isn't needed. Do `cleanUpCache=False`
if that isn't what you want."""
if self._vb: print(f"Calling {__name__}.cleanUp")
if self._rawQ.empty() != True:
logging.warning("Warning, trying to clean up when raw queue not empty!")
if self._anonQ.empty() != True:
logging.warning("Warning, trying to clean up when anon queue not empty!")
if self.cacheQueue.empty() != True:
logging.warning("Warning, trying to clean up when cache queue not empty!")
# Stuff in end signals for the workers (this is a bit bogus, cause
# if a thread is gone or hanging, not all signals will get read)
for i in range(self._p['numRawDbThreads']):
self._rawQ.put(None)
for i in range(self._p['numAnonDbThreads']):
self._anonQ.put(None)
for i in range(self.cacheQueue.qsize()):
self.cacheQueue.put(None)
cleanBgThreads()
if len(self._p['pubDb']) > 0:
if self._pubQ.empty() != True:
print("Warning, trying to clean up when pub queue not empty!")
for i in range(self._p['numPubDbThreads']):
self._pubQ.put(None)
for t in self._pubThreads:
if t.isAlive(): t.stop() # t.join()
if cleanUpCache:
self._removeLocalCacheDB()
if self._session: # close the uber session
self._session.close()
if doExit:
sys.exit(exitMsg)
def isClaimed(self, spec):
"""Check if a claim was already fully or partially made.
The `spec` is formatted identical to the `spec` in `gdaAttack.askClaim`."""
return self._dupCheck.is_claimed(spec, verbose=self._vb)
def askClaim(self, spec, cache=True, claim=True):
"""Generate Claim query for raw and optionally pub databases.
Before anything happens, the system uses the `gdaAttack.isClaimed`
method to determine whether a previous claim fully or partially
matches the new claim. Such duplicates are not allowed and an error
will be raised providing additional details about the duplicate.
Making a claim results in a query to the raw database, and if
linkability attack, the pub database, to check
the correctness of the claim. Multiple calls to this method will
cause the corresponding queries to be queued up, so `askClaim()`
returns immediately. `getClaim()` harvests one claim result. <br/>
Set `claim=False` if this claim should not be applied to the
confidence improvement score. In this case, the probability score
will instead be reduced accordingly. <br/>
The `spec` is formatted as follows: <br/>
{'known':[{'col':'colName','val':'value'},...],
'guess':[{'col':'colName','val':'value'},...],
}
`spec['known']` are the columns and values the attacker already knows
(i.e. with prior knowledge). Optional. <br/>
`spec['guess']` are the columns and values the attacker doesn't know,
but rather is trying to predict. Mandatory for 'singling out'
and 'inference'. Optional for 'linkabiblity' <br/>
Answers are cached <br/>
Returns immediately"""
if self._vb: print(f"Calling {__name__}.askClaim with spec '{spec}', count {self._claimCounter}")
if not self._dupCheck.is_claimed(spec, verbose=self._vb, raise_true=True):
self._dupCheck.claim(spec, verbose=self._vb)
self._claimCounter += 1
sql = self._makeSqlFromSpec(spec)
if self._vb: print(f"Sql is '{sql}'")
sqlConfs = self._makeSqlConfFromSpec(spec)
if self._vb: print(f"SqlConf is '{sqlConfs}'")
# Make a copy of the query for passing around
job = {}
job['q'] = self._claimQ
job['claim'] = claim
job['queries'] = [{'sql': sql, 'cache': cache}]
job['spec'] = spec
for sqlConf in sqlConfs:
job['queries'].append({'sql': sqlConf, 'cache': cache})
self._rawQ.put(job)
def getClaim(self):
""" Wait for and gather results of askClaim() calls
Returns a data structure that contains both the result
of one finished claim, and the claim's input parameters.
Note that the order in which results are returned by
`getClaim()` are not necessarily the same order they were
inserted by `askClaim()`. <br/>
Assuming `result` is returned: <br/>
`result['claim']` is the value supplied in the corresponding
`askClaim()` call <br/>
`result['spec']` is a copy of the `spec` supplied in the
corresponding `askClaim()` call. <br/>
`result['queries']` is a list of the queries generated in order to
validate the claim. <br/>
`result['answers']` are the answers to the queries in
`result['queries']`. <br/>
`result['claimResult']` is 'Correct' or 'Incorrect', depending
on whether the claim satisfies the critieria or not. <br/>
`result['stillToCome']` is a counter showing how many more
claims are still queued. When `stillToCome` is 0, then all
claims submitted by `askClaim()` have been returned."""
if self._vb:
print(f"Calling {__name__}.getClaim")
if self._claimCounter == 0:
# Caller shouldn't be calling if there are no expected
# answers, but is anyway, so just return
return {'query': {'sql': 'None'}, 'error': 'Nothing to do',
'stillToCome': 0, 'claimResult': 'Error'}
job = self._claimQ.get()
claim = job['claim']
self._claimQ.task_done()
self._claimCounter -= 1
job['stillToCome'] = self._claimCounter
self._addToAtkRes('claimTrials', job['spec'], 1)
# The claim is tested against the first reply
reply = job['replies'][0]
job['claimResult'] = 'Wrong'
if claim:
self._addToAtkRes('claimMade', job['spec'], 1)
if 'error' in reply:
self._addToAtkRes('claimError', job['spec'], 1)
job['claimResult'] = 'Error'
else:
if self._cr == 'singlingOut':
claimIsCorrect = self._checkSinglingOut(reply['answer'])
elif self._cr == 'inference':
claimIsCorrect = self._checkInference(reply['answer'])
elif self._cr == 'linkability':
claimIsCorrect = self._checkLinkability(reply['answer'])
if claim == 1 and claimIsCorrect:
self._addToAtkRes('claimCorrect', job['spec'], 1)
job['claimResult'] = 'Correct'
elif claim == 0 and claimIsCorrect:
self._addToAtkRes('claimPassCorrect', job['spec'], 1)
job['claimResult'] = 'Correct'
if self._cr == 'singlingOut' or self._cr == 'inference':
# Then measure confidence against the second and third replies
if 'answer' in job['replies'][1]:
if job['replies'][1]['answer']:
guessedRows = job['replies'][1]['answer'][0][0]
else:
guessedRows = 0
elif 'error' in job['replies'][1]:
self._pp.pprint(job)
print(f"Error: conf query:\n{job['replies'][1]['error']}")
self.cleanUp(cleanUpCache=False, doExit=True)
if 'answer' in job['replies'][2]:
if job['replies'][2]['answer']:
totalRows = job['replies'][2]['answer'][0][0]
else:
totalRows = 0
elif 'error' in job['replies'][2]:
self._pp.pprint(job)
print(f"Error: conf query:\n{job['replies'][2]['error']}")
self.cleanUp(cleanUpCache=False, doExit=True)
if totalRows:
self._addToAtkRes('sumConfidenceRatios', job['spec'],
guessedRows / totalRows)
self._addToAtkRes('numConfidenceRatios', job['spec'], 1)
self._atrs['tableStats']['totalRows'] = totalRows
else:
# For linkability, the confidence is always 1/2
self._addToAtkRes('sumConfidenceRatios', job['spec'], 0.5)
self._addToAtkRes('numConfidenceRatios', job['spec'], 1)
if 'q' in job:
del job['q']
return (job)
def askAttack(self, query, cache=True):
""" Generate and queue up an attack query for database.
`query` is a dictionary with (currently) one value: <br/>
`query['sql']` contains the SQL query. <br/>
`query['epsilon']` is optional, and defines how much of the differential privacy budget is used for uber_dp <br/>
"""
self._attackCounter += 1
if self._vb: print(f"Calling {__name__}.askAttack with query '{query}', count {self._attackCounter}")
# Make a copy of the query for passing around
qCopy = copy.copy(query)
job = {}
job['q'] = self._attackQ
qCopy['cache'] = cache
job['queries'] = [qCopy]
self._anonQ.put(job)
def getAttack(self):
""" Returns the result of one askAttack() call
Blocks until the result is available. Note that the order
in which results are received is not necesarily the order
in which `askAttack()` calls were made. <br/>
Assuming `result` is returned: <br/>
`result['answer']` is the answer returned by the DB. The
format is: <br/>
`[(C1,C2...,Cn),(C1,C2...,Cn), ... (C1,C2...,Cn)]` <br/>
where C1 is the first element of the `SELECT`, C2 the second
element, etc. This attribute does not exist in cases of query
error (i.e. bad sql, budget exceeded if uber_dp, etc.) <br/>
`result['cells']` is the number of cells returned in the answer
(used by `gdaAttack()` to compute total attack cells) <br/>
`result['query']['sql']` is the query from the corresponding
`askAttack()`.
`result['error']` contains the error description <br/>
"""
if self._vb:
print(f"Calling {__name__}.getAttack")
if self._attackCounter == 0:
# Caller shouldn't be calling if there are no expected
# answers, but is anyway, so just return
return {'query': {'sql': 'None'}, 'error': 'Nothing to do',
'stillToCome': 0}
job = self._attackQ.get()
self._attackQ.task_done()
self._attackCounter -= 1
reply = job['replies'][0]
reply['stillToCome'] = self._attackCounter
self._atrs['base']['attackGets'] += 1
if 'cells' in reply:
if reply['cells'] == 0:
self._atrs['base']['attackCells'] += 1
else:
self._atrs['base']['attackCells'] += reply['cells']
else:
self._atrs['base']['attackCells'] += 1
return (reply)
def askKnowledge(self, query, cache=True):
""" Generate and queue up a prior knowledge query for database
The class keeps track of how many prior knowledge cells were
returned and uses this to compute a score. <br/>
Input parameters formatted the same as with `askAttack()`"""
self._knowledgeCounter += 1
if self._vb: print(f"Calling {__name__}.askKnowledge with query "
f"'{query}', count {self._knowledgeCounter}")
# Make a copy of the query for passing around
qCopy = copy.copy(query)
job = {}
job['q'] = self._knowledgeQ
qCopy['cache'] = cache
job['queries'] = [qCopy]
self._rawQ.put(job)
def getKnowledge(self):
""" Wait for and gather results of prior askKnowledge() calls
Blocks until the result is available. Note that the order
in which results are received is not necesarily the order
in which `askKnowledge()` calls were made. <br/>
Return parameter formatted the same as with `getAttack()`"""
if self._vb:
print(f"Calling {__name__}.getKnowledge")
if self._knowledgeCounter == 0:
# Caller shouldn't be calling if there are no expected
# answers, but is anyway, so just return
return {'query': {'sql': 'None'}, 'error': 'Nothing to do',
'stillToCome': 0}
job = self._knowledgeQ.get()
self._knowledgeQ.task_done()
self._knowledgeCounter -= 1
reply = job['replies'][0]
reply['stillToCome'] = self._knowledgeCounter
self._atrs['base']['knowledgeGets'] += 1
if 'cells' in reply:
self._atrs['base']['knowledgeCells'] += reply['cells']
return (reply)
def askExplore(self, query, cache=True):
""" Generate and queue up an exploritory query for database
No score book-keeping is done here. An analyst may make
any number of queries without impacting the GDA score. <br/>
`query` is a dictionary with two values: <br/>
`query['sql']` contains the SQL query. <br/>
`query['db']` determines which database is queried, and
is one of 'rawDb', 'anonDb', or (if linkability), 'pubDb'."""
self._exploreCounter += 1
if self._vb: print(f"Calling {__name__}.askExplore with "
f"query '{query}', count {self._exploreCounter}")
# Make a copy of the query for passing around
qCopy = copy.copy(query)
job = {}
job['q'] = self._exploreQ
qCopy['cache'] = cache
job['queries'] = [qCopy]
if qCopy['db'] == 'rawDb' or qCopy['db'] == 'raw':
self._rawQ.put(job)
elif qCopy['db'] == 'anonDb' or qCopy['db'] == 'anon':
self._anonQ.put(job)
else:
self._pubQ.put(job)
def getExplore(self):
""" Wait for and gather results of prior askExplore() calls.
Blocks until the result is available. Note that the order
in which results are received is not necesarily the order
in which `askExplore()` calls were made. <br/>
Return parameter formatted the same as with `getAttack()`"""
if self._vb:
print(f"Calling {__name__}.getExplore")
if self._exploreCounter == 0:
# Caller shouldn't be calling if there are no expected
# answers, but is anyway, so just return
return {'query': {'sql': 'None'}, 'error': 'Nothing to do',
'stillToCome': 0}
job = self._exploreQ.get()
self._exploreQ.task_done()
self._exploreCounter -= 1
reply = job['replies'][0]
reply['stillToCome'] = self._exploreCounter
return (reply)
def getPublicColValues(self, colName, tableName=''):
"""Return list of "publicly known" column values and counts
Column value has index 0, count of distinct UIDs has index 1
Must specify column name.
"""
if len(colName) == 0:
print(f"Must specify column 'colName'")
return None
if len(tableName) == 0:
# caller didn't supply a table name, so get it from the
# class init
tableName = self._p['table']
# Establish connection to database
db = getDatabaseInfo(self._p['rawDb'])
connStr = str(
f"host={db['host']} port={db['port']} dbname={db['dbname']} user={db['user']} password={db['password']}")
conn = psycopg2.connect(connStr)
cur = conn.cursor()
# First we need to know the total number of distinct users
sql = str(f"""select count(distinct {self._p['uid']})
from {tableName}""")
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getPublicColValues() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
ans = cur.fetchall()
numUid = ans[0][0]
# Query the raw db for values in the column
sql = str(f"""select {colName}, count(distinct {self._p['uid']})
from {tableName}
group by 1
order by 2 desc
limit 200""")
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getPublicColValues() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
ans = cur.fetchall()
ret = []
for row in ans:
# row[0] is the value, row[1] is the count
if (((row[1] / numUid) > 0.002) and
(row[1] >= 50)):
ret.append((row[0], row[1]))
conn.close()
return ret
def getColNames(self, dbType='rawDb', tableName=''):
"""Return simple list of column names
`dbType` is one of 'rawDb' or 'anonDb'"""
if len(tableName) == 0:
colsAndTypes = self.getColNamesAndTypes(dbType=dbType)
else:
colsAndTypes = self.getColNamesAndTypes(
dbType=dbType, tableName=tableName)
if not colsAndTypes:
return None
cols = []
for tup in colsAndTypes:
cols.append(tup[0])
return cols
def getAttackTableName(self):
"""Returns the name of the table being used in the attack."""
return self._p['table']
def getTableCharacteristics(self, tableName=''):
"""Returns the full contents of the table characteristics
Return value is a dict indexed by column name: <br/>
{ '<colName>':
{
'av_rows_per_vals': 3.93149,
'av_uids_per_val': 0.468698,
'column_label': 'continuous',
'column_name': 'dropoff_latitude',
'column_type': 'real',
'max': '898.29382000000000',
'min': '-0.56333297000000',
'num_distinct_vals': 24216,
'num_rows': 95205,
'num_uids': 11350,
'std_rows_per_val': 10.8547,
'std_uids_per_val': 4.09688},
}
}
"""
if len(tableName) == 0:
# caller didn't supply a table name, so get it from the
# class init
tableName = self._p['table']
# Modify table name to the default for the characteristics table
tableName += '_char'
# Establish connection to database
db = getDatabaseInfo(self._p['rawDb'])
connStr = str(
f"host={db['host']} port={db['port']} dbname={db['dbname']} user={db['user']} password={db['password']}")
conn = psycopg2.connect(connStr)
cur = conn.cursor()
# Set up return dict
ret = {}
# Query it for column names
sql = str(f"""select column_name, data_type
from information_schema.columns where
table_name='{tableName}'""")
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getTableCharacteristics() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
cols = cur.fetchall()
# Make index for column name (should be 0, but just to be sure)
for colNameIndex in range(len(cols)):
if cols[colNameIndex][0] == 'column_name':
break
# Query it for table contents
sql = str(f"SELECT * FROM {tableName}")
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getTableCharacteristics() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
ans = cur.fetchall()
for row in ans:
colName = row[colNameIndex]
ret[colName] = {}
for i in range(len(row)):
ret[colName][cols[i][0]] = row[i]
conn.close()
return ret
def getAnonTableCharacteristics(self, tableName=''):
"""Returns the full contents of the table characteristics
Return value is a dict indexed by column name: <br/>
{ '<colName>':
{
'av_rows_per_vals': 3.93149,
'av_uids_per_val': 0.468698,
'column_label': 'continuous',
'column_name': 'dropoff_latitude',
'column_type': 'real',
'max': '898.29382000000000',
'min': '-0.56333297000000',
'num_distinct_vals': 24216,
'num_rows': 95205,
'num_uids': 11350,
'std_rows_per_val': 10.8547,
'std_uids_per_val': 4.09688},
}
}
"""
if len(tableName) == 0:
# caller didn't supply a table name, so get it from the
# class init
tableName = self._p['table']
# Modify table name to the default for the characteristics table
# tableName += '_char'
# Establish connection to database
db = getDatabaseInfo(self._p['anonDb'])
connStr = str(
f"host={db['host']} port={db['port']} dbname={db['dbname']} user={db['user']} password={db['password']}")
conn = psycopg2.connect(connStr)
cur = conn.cursor()
# Query it for column names
sql = str(f"""select column_name, data_type
from information_schema.columns
where table_schema NOT IN ('information_schema', 'pg_catalog') and
table_name='{tableName}'""")
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getAnonTableCharacteristics() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
ans = cur.fetchall()
# Set up return dict
ret = {_row[0]: {'column_name': _row[0], 'column_type': _row[1]} for _row in ans}
conn.close()
return ret
# Note that following is used internally, but we expose it to the
# caller as well because it is a useful function for exploration
def getColNamesAndTypes(self, dbType='rawDb', tableName=''):
"""Return raw database column names and types (or None if error)
dbType is one of 'rawDb' or 'anonDb' <br/>
return format: [(col,type),(col,type),...]"""
if len(tableName) == 0:
# caller didn't supply a table name, so get it from the
# class init
tableName = self._p['table']
# Establish connection to database
db = getDatabaseInfo(self._p[dbType])
if db['type'] != 'postgres' and db['type'] != 'aircloak':
print(f"DB type '{db['type']}' must be 'postgres' or 'aircloak'")
return None
connStr = str(
f"host={db['host']} port={db['port']} dbname={db['dbname']} user={db['user']} password={db['password']}")
conn = psycopg2.connect(connStr)
cur = conn.cursor()
# Query it for column names
if db['type'] == 'postgres':
sql = str(f"""select column_name, data_type
from information_schema.columns where
table_name='{tableName}'""")
elif db['type'] == 'aircloak':
sql = str(f"show columns from {tableName}")
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getColNamesAndTypes() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
ans = cur.fetchall()
ret = []
for row in ans:
ret.append((row[0], row[1]))
conn.close()
return ret
def getTableNames(self, dbType='rawDb'):
"""Return database table names
dbType is one of 'rawDb' or 'anonDb' <br/>
Table names returned as list, unless error then return None"""
# Establish connection to database
db = getDatabaseInfo(self._p[dbType])
if db['type'] != 'postgres' and db['type'] != 'aircloak':
print(f"DB type '{db['type']}' must be 'postgres' or 'aircloak'")
return None
connStr = str(
f"host={db['host']} port={db['port']} dbname={db['dbname']} user={db['user']} password={db['password']}")
conn = psycopg2.connect(connStr)
cur = conn.cursor()
# Query it for column names
if db['type'] == 'postgres':
sql = """SELECT tablename
FROM pg_catalog.pg_tables
WHERE schemaname != 'pg_catalog' AND
schemaname != 'information_schema'"""
elif db['type'] == 'aircloak':
sql = "show tables"
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getTableNames() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
ans = cur.fetchall()
ret = []
for row in ans:
ret.append(row[0])
conn.close()
return ret
def getUidColName(self):
""" Returns the name of the UID column"""
return self._p['uid']
def getPriorKnowledge(self, dataColumns, method,
fraction=None, count=None, selectColumn=None, colRange=[None,None], values=[None]):
""" Returns data from the rawDB according to a specification
This mimics external knowledge that an attacker may have about the data, and
influences the 'knowledge' part of the GDA Score. <br/>
`dataColumns` is a list of column names. The data for these columns is returned <br/>
`method` can be 'rows' or 'users'. If 'rows', then rows are selected
according to the criteria (`fraction`, `count`, `selectColumn`, `colRange`,
or `values`).
If 'users', then all rows for a set of selected users is returned.
The users are selected according to the criteria (`fraction` or `count`) <br/>
If none of the criteria are set, or if `fraction` is set to 1.0, then all
rows are returned (for the selected column values) One of `fraction`, `count`,
or `selectColumn` must be set. <br/>
`fraction` or `count` are set to obtain a random set of rows or users. If
`fraction`, then an approximate fraction of all rows/users is selected.
`fraction` is a value between 0 and 1.0. If `count`, then exactly `count`
random rows/users are selected. <br/>
`selectColumn` is set to select rows according to the values of the specified
column. `selectColumn` is a column name. If set, then either a range of
values (`colRange`), or a set of values (`values`) must be chosen. <br/>
`colRange` is
a list with two values: `[min,max]`. This selects all values
between min and max inclusive. <br/>
`values` is a list
of one or more values of any type. This selects all values matching those in
the list. <br/>
The return value is a list in this format: <br/>
`[(C1,C2...,Cn),(C1,C2...,Cn), ... (C1,C2...,Cn)]` <br/>
where C1 corresponds to the first column in `dataColumns`, C2 corresponds to
the second column in `dataColumns`, and so on. <br/>
"""
# Check input parameters
if not isinstance(dataColumns, list):
print(f"getPriorKnowledge Error: dataColumns must be a list of one or more column names")
self.cleanUp(cleanUpCache=False, doExit=True)
if method not in ['rows','users']:
print(f"getPriorKnowledge Error: method must be 'rows' or 'users'")
self.cleanUp(cleanUpCache=False, doExit=True)
if fraction is None and count is None and selectColumn is None:
print(f"getPriorKnowledge Error: one of fraction, count, or selectColumn must be set")
self.cleanUp(cleanUpCache=False, doExit=True)
if fraction and not isinstance(fraction, float):
print(f"getPriorKnowledge Error: if set, fraction must be a float")
self.cleanUp(cleanUpCache=False, doExit=True)
if (fraction and (count or selectColumn)) or (count and (fraction or selectColumn)):
print(f"getPriorKnowledge Error: only one of fraction, count, or selectColumn may be set")
self.cleanUp(cleanUpCache=False, doExit=True)
if count and not isinstance(count, int):
print(f"getPriorKnowledge Error: if set, count must be an integer")
self.cleanUp(cleanUpCache=False, doExit=True)
if selectColumn:
if selectColumn not in self._colNames:
print(f"getPriorKnowledge Error: selectColumn '{selectColumn}' is not a valid column")
self.cleanUp(cleanUpCache=False, doExit=True)
if colRange == [None,None] and values == [None]:
print(f"getPriorKnowledge Error: if selectColumn is set, one of colRange or values must be set")
self.cleanUp(cleanUpCache=False, doExit=True)
if not isinstance(colRange, list):
print(f"getPriorKnowledge Error: colRange must be a list with two values")
self.cleanUp(cleanUpCache=False, doExit=True)
if not (isinstance(values, list) or isinstance(values, tuple)) or len(values) == 0:
print(f"getPriorKnowledge Error: values must be a list or tuple with one or more values")
self.cleanUp(cleanUpCache=False, doExit=True)
for col in dataColumns:
if col not in self._colNames:
print(f"getPriorKnowledge Error: column '{col}' is not a valid column")
self.cleanUp(cleanUpCache=False, doExit=True)
# Basic input checks finished
# Establish connection to database
db = getDatabaseInfo(self._p['rawDb'])
connStr = str(
f"host={db['host']} port={db['port']} dbname={db['dbname']} user={db['user']} password={db['password']}")
conn = psycopg2.connect(connStr)
cur = conn.cursor()
table = self._p['table']
uid = self._p['uid']
# Make the SELECT part of the SQL query
initSql = 'SELECT '
for col in dataColumns:
initSql += str(f"{col}, ")
initSql = initSql[0:-2]
if method == 'rows' and fraction:
sql = initSql + str(f" FROM {table} WHERE random() <= {fraction}")
ans = self._doQuery(cur,sql)
self._atrs['base']['knowledgeCells'] += len(dataColumns) * len(ans)
return(ans)
if method == 'users' and fraction:
sql = initSql + str(f" FROM {table} WHERE {uid} IN ")
sql += str(f"(SELECT {uid} from (SELECT DISTINCT {uid} FROM {table}) t WHERE random() < {fraction})")
ans = self._doQuery(cur,sql)
self._atrs['base']['knowledgeCells'] += len(dataColumns) * len(ans)
return(ans)
if method == 'rows' and colRange[0] is not None:
for pair in self._colNamesTypes:
if selectColumn in pair[0]:
colType = pair[1]
break
if 'text' in colType or 'char' in colType or 'date' in colType or 'time' in colType:
sql = initSql + str(f" FROM {table} WHERE {selectColumn} >= '{colRange[0]}' and {selectColumn} <= '{colRange[1]}'")
else:
sql = initSql + str(f" FROM {table} WHERE {selectColumn} >= {colRange[0]} and {selectColumn} <= {colRange[1]}")
ans = self._doQuery(cur,sql)
self._atrs['base']['knowledgeCells'] += len(dataColumns) * len(ans)
return(ans)
if method == 'rows' and values[0] is not None:
sql = initSql + str(f" FROM {table} WHERE {selectColumn} IN (")
for pair in self._colNamesTypes:
if selectColumn in pair[0]:
colType = pair[1]
break
for value in values:
if "text" in colType or "date" in colType or "time" in colType:
sql += str(f"'{value}', ")
else:
sql += str(f"{value}, ")
sql = sql[0:-2]
sql += ")"
ans = self._doQuery(cur,sql)
self._atrs['base']['knowledgeCells'] += len(dataColumns) * len(ans)
return(ans)
if method == 'rows' and count:
# need to know the total number of rows
sql = str(f"select count(*) from {table}")
ans = self._doQuery(cur,sql)
numRows = ans[0][0]
# next we get some random set of rows that is certainly more than we need
frac = (count/numRows)*2
sql = initSql + str(f" FROM {table} WHERE random() <= {frac}")
temp = self._doQuery(cur,sql)
# next we scramble these so that we get a random sampling from the random sampling
random.shuffle(temp)
# finally pick the exact count
ans = temp[0:count]
self._atrs['base']['knowledgeCells'] += len(dataColumns) * len(ans)
return(ans)
if method == 'users' and count:
# get the full list of distinct UIDs
sql = str(f"SELECT DISTINCT {uid} from {table}")
uidList = self._doQuery(cur,sql)
# next we scramble these so that we can get a random sampling
random.shuffle(uidList)
# pick the exact count of UIDs
uidList = uidList[0:count]
sql = initSql + str(f" FROM {table} WHERE {uid} IN (")
for pair in self._colNamesTypes:
if uid in pair[0]:
colType = pair[1]
break
for uidVal in uidList:
if "text" in colType or "date" in colType or "time" in colType:
sql += str(f"'{uidVal[0]}', ")
else:
sql += str(f"{uidVal[0]}, ")
sql = sql[0:-2]
sql += ")"
ans = self._doQuery(cur,sql)
self._atrs['base']['knowledgeCells'] += len(dataColumns) * len(ans)
return(ans)
#zzzz
return None
#def getPriorKnowledge(self, dataColumns, method,
#fraction=None, count=None, selectColumn=None, colRange=[None,None], values=[None]):
# -------------- Private Methods -------------------
def _doQuery(self,cur,sql):
try:
cur.execute(sql)
except psycopg2.Error as e:
print(f"Error: getPublicColValues() query: '{e}'")
self.cleanUp(cleanUpCache=False, doExit=True)
ans = cur.fetchall()
return ans
def _cleanPasswords(self):
if 'attack' in self._atrs:
if ('anonDb' in self._atrs['attack'] and
'password' in self._atrs['attack']['anonDb']):
self._atrs['attack']['anonDb']['password'] = 'xxxxxxx'
if ('rawDb' in self._atrs['attack'] and
'password' in self._atrs['attack']['rawDb']):
self._atrs['attack']['rawDb']['password'] = 'xxxxxxx'
if ('pubDb' in self._atrs['attack'] and
'password' in self._atrs['attack']['pubDb']):
self._atrs['attack']['pubDb']['password'] = 'xxxxxxx'
return
def _assignGlobalParams(self, params):
self._pp = pprint.PrettyPrinter(indent=4)
for key, val in params.items():
self._p[key] = val
# assign verbose value to a smaller variable name
if key == "verbose":
if val != False:
self._vb = True
# Check criteria
if key == "criteria":
if (val == 'singlingOut' or val == 'inference' or
val == 'linkability'):
self._cr = val
else:
print("""Error: criteria must be one of 'singlingOut',
'inference', or 'linkability'""")
sys.exit('')
def _setupLocalCacheDB(self):
path = self._p['locCacheDir'] + "/" + self._p['name'] + ".db"
conn = sqlite3.connect(path)
cur = conn.cursor()
if self._p['flushCache'] == True:
sql = "DROP TABLE IF EXISTS tab"
if self._vb: print(f" cache DB: {sql}")
cur.execute(sql)
sql = """CREATE TABLE IF NOT EXISTS tab
(qid text primary key, answer text)"""
if self._vb: print(f" cache DB: {sql}")
cur.execute(sql)
# conn.commit()
cur.execute("PRAGMA journal_mode=WAL;")
# conn.commit()
conn.close()
def _removeLocalCacheDB(self):
path = self._p['locCacheDir'] + "/" + self._p['name'] + ".db"
max_attempts = 5
attempt = 0
removeFlag = False
_ex = None
if os.path.exists(path):
while attempt <= max_attempts:
attempt += 1
try:
os.remove(path)
removeFlag = True
break
except Exception as ex:
_ex = ex
removeFlag = False
time.sleep(0.3)
if not removeFlag:
logging.error(f"cache db removing error after {attempt} attempts.\n"
f"ERROR: Failed to remove cache DB {path} => ex: {_ex}")
else:
logging.info(f"cache db removed successfully after {attempt} attempt(s).")
def removeLocalCacheDBWrapper(self):
return self._removeLocalCacheDB()
def _setupThreadsAndQueues(self):
self._anonThreads = []
self._rawThreads = []
self._pubThreads = []
self._exploreQ = queue.Queue()
self._knowledgeQ = queue.Queue()
self._attackQ = queue.Queue()
self._claimQ = queue.Queue()
self._guessQ = queue.Queue()
self._rawQ = queue.Queue()
if self._cr == 'linkability':
self._pubQ = queue.Queue()
self._anonQ = queue.Queue()
backQ = queue.Queue()
for i in range(self._p['numRawDbThreads']):
d = dict(db=self._p['rawDb'], q=self._rawQ,
kind='raw', backQ=backQ)
t = EnhancedThread(target=self._dbWorker, kwargs=d)
t.start()
self._rawThreads.append(t)
for i in range(self._p['numAnonDbThreads']):
d = dict(db=self._p['anonDb'], q=self._anonQ,
kind='anon', backQ=backQ)
t = EnhancedThread(target=self._dbWorker, kwargs=d)
t.start()
self._anonThreads.append(t)
if self._cr == 'linkability':
for i in range(self._p['numPubDbThreads']):
d = dict(db=self._p['pubDb'], q=self._pubQ,
kind='pub', backQ=backQ)
t = EnhancedThread(target=self._dbWorker, kwargs=d)
t.start()
self._pubThreads.append(t)
num = (self._p['numRawDbThreads'] + self._p['numAnonDbThreads'])
if self._cr == 'linkability':
num += self._p['numPubDbThreads']
# Make sure all the worker threads are ready
for i in range(num):
msg = backQ.get()
if self._vb: print(f"{msg} is ready")
backQ.task_done()
def _dbWorker(self, db, q, kind, backQ):
# uber dp has a different interface than aircloak or postgres
if db['type'] == 'uber_dp':
if self._vb: print(f"Starting {__name__}.serverWorker:{db, kind}")
me = threading.current_thread()
backQ.put(me)
while True:
jobOrig = q.get()
q.task_done()
if jobOrig is None:
if self._vb:
print(f" {me}: serverWorker done {db, kind}")
break
# make a copy for passing around
job = copy.copy(jobOrig)
replyQ = job['q']
replies = [] # holds all the reply dicts
for query in job['queries']:
reply = self._processUberQuery(query)
replies.append(reply)
job['replies'] = replies
replyQ.put(job)
elif db['type'] == 'aircloak' or db['type'] == 'postgres':
if self._vb: print(f"Starting {__name__}.dbWorker:{db, kind}")
me = threading.current_thread()
d = getDatabaseInfo(db)
# Establish connection to database
connStr = str(
f"host={d['host']} port={d['port']} dbname={d['dbname']} user={d['user']} password={d['password']}")
if self._vb: print(f" {me}: Connect to DB with DSN '{connStr}'")
conn = psycopg2.connect(connStr)
cur = conn.cursor()
# Establish connection to local cache
path = self._p['locCacheDir'] + "/" + self._p['name'] + ".db"
# Set timeout low so that we don't spend a lot of time inserting
# into the cache in case it gets overloaded
connInsert = sqlite3.connect(path, timeout=0.1)
curInsert = connInsert.cursor()
connRead = sqlite3.connect(path)
curRead = connRead.cursor()
backQ.put(me)
while True:
if isinstance(me, EnhancedThread) and me.stopped():
logging.info(f' > {me.getName()} stopped.')
return
try:
jobOrig = q.get(block=True, timeout=3)
except queue.Empty:
continue
q.task_done()
if jobOrig is None:
if self._vb: print(f" {me}: dbWorker done {db, kind}")
conn.close()
connRead.close()
connInsert.close()
break
# make a copy for passing around
job = copy.copy(jobOrig)
replyQ = job['q']
replies = []
for query in job['queries']:
reply = self._processQuery(query, conn, cur,
connInsert, curInsert, curRead)
replies.append(reply)
job['replies'] = replies
replyQ.put(job)
def _processUberQuery(self, query):
# Once the session ID is defined, we stay in that session
# ONLY `epsilon` and the `query` can be set
# `budget` and `dbname` just have placeholders because they cannot be changed anyways
request = {
'query': query['sql'],
'epsilon': str(query['epsilon']),
'count' : '1', # the interface is designed in a way such that repeted attacks need to be triggered
# by several askAttack(), getAttack(). Therefore, the server functionality to potentially execute the same
# query several times is not used
'budget': 'None',
'dbname': 'None',
'sid': self._sid
}
start = time.perf_counter() # store the time of query execution
url = self._p['anonDb']['host']
headers = {'Content-Type': 'application/json',
'Accept': 'application/json'} # Headers to be sent in the client request
# Client stores the response sent by the simpleServer.py
try:
response = requests.get(url, json=request, headers=headers, timeout=100, verify=True)
resp = response.json() # Convert response sent by server to JSON
if self._vb:
print("Server response for the given query: ")
print(resp)
if 'Error' in resp['Server Response']:
print(f"Uber Server response error: {resp['Server Response']['Error']}")
reply = dict(error=resp['Server Response']['Error'])
else:
# the answer of dp queries is a single value (as it computes the aggregate over several query rows)
# to match the format needed to compute number of cells, we still need two dimensions
# therefore, [[]]
ans = [[float((resp['Server Response']['Result']))]]# record the answer and append it as a 1-element list of float
# for statistics. Only makes sense to count query if it went through
self._op['numQueries'] += 1
# after all for loops find the shape of the resulting answers
numCells = self._computeNumCells(ans)
# format the reply similarly as for aircloak and postgres
reply = dict(answer=ans, cells=numCells,
remaining_dp_budget=float(resp['Server Response']['Remaining Budget']))
except requests.ConnectionError as e:
print("Connection Error. Make sure you are connected to Internet.")
print(str(e))
reply = dict(error=str(e))
except requests.Timeout as e:
print("Timeout Error")
print(str(e))
reply = dict(error=str(e))
except requests.RequestException as e:
print("General Error")
print(str(e))
reply = dict(error=str(e))
except KeyboardInterrupt:
print("Program closed")
reply = dict(error="Program closed")
reply['query'] = query
# calculate the time we needed for the query
end = time.perf_counter()
duration = end - start
self._op['timeQueries'] += duration
return reply
def _processQuery(self, query, conn, cur, connInsert, curInsert, curRead, queryType='db'):
# record and remove the return queue
# queryType specifies if we are asking the queries from a db (aircloak, postgres)
# or from a server, like uber_dp
if queryType == 'server':
pass
elif queryType == 'db':
cache = query['cache']
del query['cache']
# Check the cache for the answer
# Note that at this point query is a dict
# containing the sql, the db (raw, anon, or pub),
# and any tags that the source added
cachedReply = None
if cache:
cachedReply = self._getCache(curRead, query)
if cachedReply:
if self._vb: print(" Answer from cache")
if 'answer' in cachedReply:
numCells = self._computeNumCells(cachedReply['answer'])
cachedReply['cells'] = numCells
return cachedReply
else:
start = time.perf_counter()
try:
cur.execute(query['sql'])
except psycopg2.Error as e:
reply = dict(error=e.pgerror)
else:
ans = cur.fetchall()
numCells = self._computeNumCells(ans)
reply = dict(answer=ans, cells=numCells)
end = time.perf_counter()
duration = end - start
self._op['numQueries'] += 1
self._op['timeQueries'] += duration
reply['query'] = query
# only cache if the native query is slow
if duration > 0.1:
# self._putCache(connInsert, curInsert, query, reply)
self.cacheQueue.put([connInsert, curInsert, query, reply])
return reply
def _checkInference(self, ans):
# column 0 must be UID
# User is inferred if all users in answer have same attributes
# Returns 1 if inference claim correct, else returns 0
if len(ans) == 0:
return 0
numRows = len(ans)
numColumns = len(ans[0])
if numColumns < 2:
# Can't test inference unless there is at least one column
# (other than UID) that is the same
return 0
for c in range(1, numColumns):
val = ans[0][c]
for r in range(1, numRows):
if val != ans[r][c]:
return 0
return 1
def _checkLinkability(self, ans):
# The test is the same as with singling out
# Returns 1 if linkability claim correct, else returns 0
return self._checkSinglingOut(ans)
def _checkSinglingOut(self, ans):
# column 0 must be UID
# User is singled-out if there is only one distinct UID
# Returns 1 if singling out claim correct, else returns 0
if len(ans) == 0:
return 0
uids = {}
for row in ans:
uids[row[0]] = 1
numUids = len(uids)
if numUids == 1:
return 1
else:
return 0
def _computeNumCells(self, ans):
# ans is a list of tuples [(x,y),(x,y),(x,y) ...
# Count the number of columns (in the first row)
if len(ans) == 0:
return 0
numColumns = len(ans[0])
numRows = len(ans)
numCells = numColumns * numRows
return numCells
def _doParamChecks(self):
dbInfoRaw = getDatabaseInfo(self._p['rawDb'])
if not dbInfoRaw:
sys.exit('rawDb not found in database config')
if len(self._p['anonDb']) == 0:
self._p['anonDb'] = self._p['rawDb']
else:
dbInfoAnon = getDatabaseInfo(self._p['anonDb'])
if not dbInfoAnon:
sys.exit('anonDb not found in database config')
if self._cr == 'linkability':
dbInfo = getDatabaseInfo(self._p['pubDb'])
if not dbInfo:
sys.exit('Must specify pubDb if criteria is linkability')
numThreads = self._p['numRawDbThreads'] + self._p['numAnonDbThreads']
if self._cr == 'linkability':
numThreads += self._p['numPubDbThreads']
if numThreads > 50:
sys.exit("Error: Can't have more than 50 threads total")
def _getCache(self, cur, query):
path = self._p['locCacheDir'] + "/" + self._p['name'] + ".db"
my_conn = sqlite3.connect(path, timeout=0.1)
my_cur = my_conn.cursor()
# turn the query (dict) into a string
qStr = self._dict2Str(query)
if qStr is None:
return None
sql = str(f"SELECT answer FROM tab where qid = '{qStr}'")
if self._vb: print(f" cache DB: {sql}")
start = time.perf_counter()
for z in range(1,11):
try:
# cur.execute(sql)
my_cur.execute(sql)
except sqlite3.OperationalError as e:
# database is locked
if self._p['verbose'] or self._vb:
logging.warning(f'>> reading from cache DB: {z} attempt(s). Coming next try '
f'soon...')
err = e
time.sleep(0.5)
continue
except (sqlite3.Error, Exception) as e:
if self._p['verbose'] or self._vb:
logging.warning(f"getCache error '{e.args[0]}' attempt: {z}. Coming next try "
f"soon...")
err = e
time.sleep(0.5)
continue
else:
break
else:
if self._p['verbose'] or self._vb:
logging.error(f'>> could not read from cache DB >> ERROR: {err}')
return None
end = time.perf_counter()
self._op['numCacheGets'] += 1
self._op['timeCacheGets'] += (end - start)
# answer = cur.fetchone() # frzmohammadali just to remember my stupidest bug ever
answer = my_cur.fetchone()
my_cur.close()
my_conn.close()
if not answer:
return None
rtnDict = self._str2Dict(answer[0])
return rtnDict
def _putCache(self, conn, cur, query, reply):
# turn the query and reply (dict) into a string
# Establish connection to local cache
path = self._p['locCacheDir'] + "/" + self._p['name'] + ".db"
qStr = self._dict2Str(query)
if qStr is None:
return
rStr = self._dict2Str(reply)
if rStr is None:
return
sql = str(f"INSERT INTO tab VALUES ('{qStr}','{rStr}')")
if self._vb: print(f" cache DB: {sql}")
start = time.perf_counter()
err = None
for z in range(10):
try:
# cur.execute(sql)
# conn.commit()
my_conn = sqlite3.connect(path, timeout=0.1)
my_cur = my_conn.cursor()
my_cur.execute(sql)
my_conn.commit()
except sqlite3.IntegrityError as e:
if self._p['verbose'] or self._vb:
logging.warning(f"putCache error [qid exists in cached queries] '{e.args[0]}' ")
break
except sqlite3.OperationalError as e:
# database is locked
if self._p['verbose'] or self._vb:
logging.warning(f"putCache attempt: {z}. Coming next try "
f"soon...")
err = e
time.sleep(0.5)
continue
except (sqlite3.Error, Exception) as e:
if self._p['verbose'] or self._vb:
logging.warning(f"putCache error '{e.args[0]}' attempt: {z}. Coming next try "
f"soon...")
err = e
time.sleep(0.5)
continue
else:
break
finally:
try:
if my_cur:
my_cur.close()
if my_conn:
my_conn.close()
except sqlite3.ProgrammingError:
# cursor and connection is already closed
pass
else:
# raise err
if self._p['verbose'] or self._vb:
logging.error(f'>> could not insert into cache DB >> ERROR: {err}')
end = time.perf_counter()
self._op['numCachePuts'] += 1
self._op['timeCachePuts'] += (end - start)
def putCacheWrapper(self, conn, cur, query, reply):
self._putCache(conn, cur, query, reply)
def _dict2Str(self, d):
try:
dStr = simplejson.dumps(d)
except TypeError:
print("simpleJson failed")
return None
dByte = str.encode(dStr)
dByte64 = base64.b64encode(dByte)
try:
dByte64Str = str(dByte64, "utf-8")
except MemoryError:
print("str(dByte64) failed")
return None
return dByte64Str
def _str2Dict(self, dByte64Str):
dByte64 = str.encode(dByte64Str)
dByte = base64.b64decode(dByte64)
dStr = str(dByte, "utf-8")
d = simplejson.loads(dStr)
return d
def _makeSqlFromSpec(self, spec):
sql = "select "
if 'known' in spec:
numKnown = len(spec['known'])
else:
numKnown = 0
if 'guess' in spec:
numGuess = len(spec['guess'])
else:
numGuess = 0
if self._cr == 'inference':
sql += str(f"{self._p['uid']}, ")
for i in range(numGuess):
sql += str(f"{spec['guess'][i]['col']}")
if i == (numGuess - 1):
sql += " "
else:
sql += ", "
sql += str(f"from {self._p['table']} ")
if numKnown:
sql += "where "
for i in range(numKnown):
sql += str(f"{spec['known'][i]['col']} = ")
sql += str(f"'{spec['known'][i]['val']}' ")
if i == (numKnown - 1):
sql += " "
else:
sql += "and "
elif self._cr == 'singlingOut' or self._cr == 'linkability':
sql += str(f"{self._p['uid']} from {self._p['table']} where ")
for i in range(numKnown):
sql += str(f"{spec['known'][i]['col']} = ")
sql += str(f"'{spec['known'][i]['val']}' and ")
for i in range(numGuess):
sql += str(f"{spec['guess'][i]['col']} = ")
sql += str(f"'{spec['guess'][i]['val']}' ")
if i == (numGuess - 1):
sql += " "
else:
sql += "and "
return sql
def _makeSqlConfFromSpec(self, spec):
sqls = []
numGuess = len(spec['guess'])
if self._cr == 'inference' or self._cr == 'singlingOut':
sql = str(f"select count(distinct {self._p['uid']}) from {self._p['table']} where ")
# This first sql learns the number of rows matching the
# guessed values
for i in range(numGuess):
sql += str(f"{spec['guess'][i]['col']} = ")
sql += str(f"'{spec['guess'][i]['val']}'")
if i != (numGuess - 1):
sql += " and "
sqls.append(sql)
# This second sql learns the total number of rows (should
# normally be a cached result)
sql = str(f"select count(distinct {self._p['uid']}) from {self._p['table']}")
sqls.append(sql)
elif self._cr == 'linkability':
# nothing happens for linkability
pass
return sqls
def _addToAtkRes(self, label, spec, val):
"""Adds the value to each column in the guess"""
for tup in spec['guess']:
col = tup['col']
if col not in self._atrs['col']:
print(f"Error: addToAtkRes(): Bad column in spec: '{col}'")
self.cleanUp(cleanUpCache=False, doExit=True)
if label not in self._atrs['col'][col]:
print(f"Error: addToAtkRes(): Bad label '{label}'")
self.cleanUp(cleanUpCache=False, doExit=True)
self._atrs['col'][col][label] += val
def _initAtkRes(self):
self._atrs = {}
self._atrs['attack'] = {}
self._atrs['base'] = {}
self._atrs['tableStats'] = {}
self._atrs['col'] = {}
# ----- Attack parameters
self._atrs['attack']['attackName'] = self._p['name']
self._atrs['attack']['rawDb'] = self._p['rawDb']
self._atrs['attack']['anonDb'] = self._p['anonDb']
if self._cr == 'linkability':
self._atrs['attack']['pubDb'] = self._p['anonDb']
self._atrs['attack']['criteria'] = self._p['criteria']
self._atrs['attack']['table'] = self._p['table']
# add parameters for the database machine itself
db = getDatabaseInfo(self._p['rawDb'])
self._atrs['attack']['rawHost'] = db['host']
self._atrs['attack']['rawDbName'] = db['dbname']
self._atrs['attack']['rawPort'] = db['port']
if self._cr == 'linkability':
db = getDatabaseInfo(self._p['pubDb'])
self._atrs['attack']['pubHost'] = db['host']
self._atrs['attack']['pubDbName'] = db['dbname']
self._atrs['attack']['pubPort'] = db['port']
db = getDatabaseInfo(self._p['anonDb'])
self._atrs['attack']['anonHost'] = db['host']
self._atrs['attack']['anonDbName'] = db['dbname']
self._atrs['attack']['anonPort'] = db['port']
# and a timestamp
self._atrs['attack']['startTime'] = str(datetime.datetime.now())
# ----- Params for computing knowledge:
# number of prior knowledge cells requested
self._atrs['base']['knowledgeCells'] = 0
# number of times knowledge was queried
self._atrs['base']['knowledgeGets'] = 0
# ----- Params for computing how much work needed to attack:
# number of attack cells requested
self._atrs['base']['attackCells'] = 0
# number of times attack was queried
self._atrs['base']['attackGets'] = 0
self._atrs['tableStats']['colNamesAndTypes'] = self._colNamesTypes
self._atrs['tableStats']['numColumns'] = len(self._colNamesTypes)
for tup in self._colNamesTypes:
col = tup[0]
if self._vb: print(f"initAtkRes() init column '{col}'")
self._atrs['col'][col] = {}
# ----- Params for computing claim success rate:
# total possible number of claims
self._atrs['col'][col]['claimTrials'] = 0
# actual number of claims
self._atrs['col'][col]['claimMade'] = 0
# number of correct claims
self._atrs['col'][col]['claimCorrect'] = 0
# number of claims that produced bad SQL answer
self._atrs['col'][col]['claimError'] = 0
# claims where the attacker chose to pass (not make a claim),
# but where the claim would have been correct
self._atrs['col'][col]['claimPassCorrect'] = 0
# ----- Params for computing confidence:
# sum of all known count to full count ratios
self._atrs['col'][col]['sumConfidenceRatios'] = 0
# number of such ratios
self._atrs['col'][col]['numConfidenceRatios'] = 0
# average confidence ratio (division of above two params)
self._atrs['col'][col]['avgConfidenceRatios'] = 0
def _initOp(self):
self._op['numQueries'] = 0
self._op['timeQueries'] = 0
self._op['numCachePuts'] = 0
self._op['timeCachePuts'] = 0
self._op['numCacheGets'] = 0
self._op['timeCacheGets'] = 0
def _initCounters(self):
self._exploreCounter = 0
self._knowledgeCounter = 0
self._attackCounter = 0
self._claimCounter = 0
self._guessCounter = 0
def _initUberDPSession(self):
# Client establishes a session
session = requests.Session()
session.get_orig, session.get = session.get, functools.partial(session.get, timeout=20)
# remember the session to close it if necessary
self._session = session
# function to initialize the session with the dp server
try:
# this is the initial query.
# its only purpose is to obtain a session ID and to define a budget
# The budget is set in the initial request only
# Once the budget is set, no further modification to the budget
# is possible in subsequent requests
request = {
'query': "", # empty query, just serves to get a session ID
'epsilon': '0.0', # nothing used up in the initialization phase
'budget': self._p['dp_budget'], # the numeric values are sent as strings
'dbname': self._p['rawDb']['dbname'], # name of the raw db
'sid': '' # When sid is Null it indicates start of a session
}
# the database for anonymization here is uber
url = self._p['anonDb']['host']
headers = {'Content-Type': 'application/json',
'Accept': 'application/json'}
# Client stores the response sent by the simpleServer.py
response = requests.get(url, json=request, headers=headers, timeout=20, verify=True)
resp = response.json() # Convert response sent by server to JSON
if 'Error' in resp['Server Response']:
pprint.pprint(resp) # Client prints the data returned by the server
else: # if no error was encountered
if self._vb:
pprint.pprint("Setting up connection with Uber_DP Server")
pprint.pprint(resp) # Client prints the data returned by the server
# in case there is no error, but we are at the "dummy query" to get the session ID
self._sid = resp['Server Response']['Session ID'] # Set Session ID to value returned by server
except requests.ConnectionError as e:
print("Connection Error. Make sure you are connected to Internet.")
print(str(e))
except requests.Timeout as e:
print("Timeout Error")
print(str(e))
except requests.RequestException as e:
print("General Error")
print(str(e))
except KeyboardInterrupt:
print("Program closed")
class EnhancedThread(threading.Thread):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.daemon = True
self._stopFlag = threading.Event()
def stop(self):
self._stopFlag.set()
def stopped(self):
return self._stopFlag.isSet()
class CacheThread(EnhancedThread):
def __init__(self, theQueue, atcObject):
super().__init__()
self.theQueue = theQueue
self.atcObject = atcObject
self.name = self.getName() + " (cache thread)"
self.dbConnection = None
def run(self):
while True:
if self.stopped():
logging.info(f' > {self.getName()} stopped.')
break
try:
data = self.theQueue.get(block=True, timeout=3)
except queue.Empty:
continue
if data is not None:
self.atcObject.putCacheWrapper(*data)
self.dbConnection = data[0] # this is connInsert for closing later
if self.atcObject._p['verbose'] or self.atcObject._vb:
printTitle('cache insert successful. queue length: ' + str(self.theQueue.qsize()))
self.theQueue.task_done()
def stop(self):
logging.debug("CacheThread received stop signal")
super().stop()
if self.dbConnection:
try:
self.dbConnection.interrupt()
except sqlite3.ProgrammingError:
pass
else:
logging.debug("interrupt signal sent to cacheDb for safe deleting cacheDb file later.")
def cleanBgThreads():
for t in threading.enumerate():
if isinstance(t, EnhancedThread) and (not t.stopped()):
t.stop()
t.join(timeout=1.0)
def printTitle(text):
print(f'\n{" "+text:->46}\n')
def signal_kill_handler(signum, frame):
global atcObject
printTitle("Terminating the program ...")
thread_info = (
(f' >> {set([t.name for t in threading.enumerate() if t != threading.main_thread()])} \n'
f' > sending termination signal to all. please wait ... ') if threading.active_count() > 1
else ''
)
logging.info(f'\n > active background threads: {threading.active_count() - 1} \n'
f'{thread_info}')
# logging.info(f'\n > active background threads: {threading.active_count() - 1} \n'
# f' >> {set([t.name for t in threading.enumerate() if t != threading.main_thread()])} \n'
# f' > sending termination signal to all. please wait ... ')
cleanBgThreads()
if atcObject:
atcObject.cleanUp(cleanUpCache=False)
sys.exit(-1)
def on_exit():
if len([t for t in threading.enumerate() if isinstance(t, EnhancedThread) and (not t.stopped())]):
cleanBgThreads()
while threading.active_count() > 1:
pass
signal.signal(signal.SIGTERM, signal_kill_handler)
signal.signal(signal.SIGINT, signal_kill_handler)
atexit.register(on_exit)
|
PypiClean
|
/ka-lite-static-0.17.6b2.tar.gz/ka-lite-static-0.17.6b2/kalite/packages/bundled/django/contrib/admin/validation.py
|
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.forms.models import (BaseModelForm, BaseModelFormSet, fields_for_model,
_get_foreign_key)
from django.contrib.admin import ListFilter, FieldListFilter
from django.contrib.admin.util import get_fields_from_path, NotRelationField
from django.contrib.admin.options import (flatten_fieldsets, BaseModelAdmin,
ModelAdmin, HORIZONTAL, VERTICAL)
__all__ = ['validate']
def validate(cls, model):
"""
Does basic ModelAdmin option validation. Calls custom validation
classmethod in the end if it is provided in cls. The signature of the
custom validation classmethod should be: def validate(cls, model).
"""
# Before we can introspect models, they need to be fully loaded so that
# inter-relations are set up correctly. We force that here.
models.get_apps()
opts = model._meta
validate_base(cls, model)
# list_display
if hasattr(cls, 'list_display'):
check_isseq(cls, 'list_display', cls.list_display)
for idx, field in enumerate(cls.list_display):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
opts.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("%s.list_display[%d], %r is not a callable or an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name))
else:
# getattr(model, field) could be an X_RelatedObjectsDescriptor
f = fetch_attr(cls, model, opts, "list_display[%d]" % idx, field)
if isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.list_display[%d]', '%s' is a ManyToManyField which is not supported."
% (cls.__name__, idx, field))
# list_display_links
if hasattr(cls, 'list_display_links'):
check_isseq(cls, 'list_display_links', cls.list_display_links)
for idx, field in enumerate(cls.list_display_links):
if field not in cls.list_display:
raise ImproperlyConfigured("'%s.list_display_links[%d]' "
"refers to '%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field))
# list_filter
if hasattr(cls, 'list_filter'):
check_isseq(cls, 'list_filter', cls.list_filter)
for idx, item in enumerate(cls.list_filter):
# There are three options for specifying a filter:
# 1: 'field' - a basic field filter, possibly w/ relationships (eg, 'field__rel')
# 2: ('field', SomeFieldListFilter) - a field-based list filter class
# 3: SomeListFilter - a non-field list filter class
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not issubclass(item, ListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is not a descendant of ListFilter."
% (cls.__name__, idx, item.__name__))
# ... but not a FieldListFilter.
if issubclass(item, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'"
" which is of type FieldListFilter but is not"
" associated with a field name."
% (cls.__name__, idx, item.__name__))
else:
if isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not issubclass(list_filter_class, FieldListFilter):
raise ImproperlyConfigured("'%s.list_filter[%d][1]'"
" is '%s' which is not of type FieldListFilter."
% (cls.__name__, idx, list_filter_class.__name__))
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(model, field)
except (NotRelationField, FieldDoesNotExist):
raise ImproperlyConfigured("'%s.list_filter[%d]' refers to '%s'"
" which does not refer to a Field."
% (cls.__name__, idx, field))
# list_per_page = 100
if hasattr(cls, 'list_per_page') and not isinstance(cls.list_per_page, int):
raise ImproperlyConfigured("'%s.list_per_page' should be a integer."
% cls.__name__)
# list_max_show_all
if hasattr(cls, 'list_max_show_all') and not isinstance(cls.list_max_show_all, int):
raise ImproperlyConfigured("'%s.list_max_show_all' should be an integer."
% cls.__name__)
# list_editable
if hasattr(cls, 'list_editable') and cls.list_editable:
check_isseq(cls, 'list_editable', cls.list_editable)
for idx, field_name in enumerate(cls.list_editable):
try:
field = opts.get_field_by_name(field_name)[0]
except models.FieldDoesNotExist:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', not defined on %s.%s."
% (cls.__name__, idx, field_name, model._meta.app_label, model.__name__))
if field_name not in cls.list_display:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to "
"'%s' which is not defined in 'list_display'."
% (cls.__name__, idx, field_name))
if field_name in cls.list_display_links:
raise ImproperlyConfigured("'%s' cannot be in both '%s.list_editable'"
" and '%s.list_display_links'"
% (field_name, cls.__name__, cls.__name__))
if not cls.list_display_links and cls.list_display[0] in cls.list_editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to"
" the first field in list_display, '%s', which can't be"
" used unless list_display_links is set."
% (cls.__name__, idx, cls.list_display[0]))
if not field.editable:
raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a "
"field, '%s', which isn't editable through the admin."
% (cls.__name__, idx, field_name))
# search_fields = ()
if hasattr(cls, 'search_fields'):
check_isseq(cls, 'search_fields', cls.search_fields)
# date_hierarchy = None
if cls.date_hierarchy:
f = get_field(cls, model, opts, 'date_hierarchy', cls.date_hierarchy)
if not isinstance(f, (models.DateField, models.DateTimeField)):
raise ImproperlyConfigured("'%s.date_hierarchy is "
"neither an instance of DateField nor DateTimeField."
% cls.__name__)
# ordering = None
if cls.ordering:
check_isseq(cls, 'ordering', cls.ordering)
for idx, field in enumerate(cls.ordering):
if field == '?' and len(cls.ordering) != 1:
raise ImproperlyConfigured("'%s.ordering' has the random "
"ordering marker '?', but contains other fields as "
"well. Please either remove '?' or the other fields."
% cls.__name__)
if field == '?':
continue
if field.startswith('-'):
field = field[1:]
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
if '__' in field:
continue
get_field(cls, model, opts, 'ordering[%d]' % idx, field)
if hasattr(cls, "readonly_fields"):
check_readonly_fields(cls, model, opts)
# list_select_related = False
# save_as = False
# save_on_top = False
for attr in ('list_select_related', 'save_as', 'save_on_top'):
if not isinstance(getattr(cls, attr), bool):
raise ImproperlyConfigured("'%s.%s' should be a boolean."
% (cls.__name__, attr))
# inlines = []
if hasattr(cls, 'inlines'):
check_isseq(cls, 'inlines', cls.inlines)
for idx, inline in enumerate(cls.inlines):
if not issubclass(inline, BaseModelAdmin):
raise ImproperlyConfigured("'%s.inlines[%d]' does not inherit "
"from BaseModelAdmin." % (cls.__name__, idx))
if not inline.model:
raise ImproperlyConfigured("'model' is a required attribute "
"of '%s.inlines[%d]'." % (cls.__name__, idx))
if not issubclass(inline.model, models.Model):
raise ImproperlyConfigured("'%s.inlines[%d].model' does not "
"inherit from models.Model." % (cls.__name__, idx))
validate_base(inline, inline.model)
validate_inline(inline, cls, model)
def validate_inline(cls, parent, parent_model):
# model is already verified to exist and be a Model
if cls.fk_name: # default value is None
f = get_field(cls, cls.model, cls.model._meta, 'fk_name', cls.fk_name)
if not isinstance(f, models.ForeignKey):
raise ImproperlyConfigured("'%s.fk_name is not an instance of "
"models.ForeignKey." % cls.__name__)
fk = _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name, can_fail=True)
# extra = 3
if not isinstance(cls.extra, int):
raise ImproperlyConfigured("'%s.extra' should be a integer."
% cls.__name__)
# max_num = None
max_num = getattr(cls, 'max_num', None)
if max_num is not None and not isinstance(max_num, int):
raise ImproperlyConfigured("'%s.max_num' should be an integer or None (default)."
% cls.__name__)
# formset
if hasattr(cls, 'formset') and not issubclass(cls.formset, BaseModelFormSet):
raise ImproperlyConfigured("'%s.formset' does not inherit from "
"BaseModelFormSet." % cls.__name__)
# exclude
if hasattr(cls, 'exclude') and cls.exclude:
if fk and fk.name in cls.exclude:
raise ImproperlyConfigured("%s cannot exclude the field "
"'%s' - this is the foreign key to the parent model "
"%s.%s." % (cls.__name__, fk.name, parent_model._meta.app_label, parent_model.__name__))
if hasattr(cls, "readonly_fields"):
check_readonly_fields(cls, cls.model, cls.model._meta)
def validate_fields_spec(cls, model, opts, flds, label):
"""
Validate the fields specification in `flds` from a ModelAdmin subclass
`cls` for the `model` model. `opts` is `model`'s Meta inner class.
Use `label` for reporting problems to the user.
The fields specification can be a ``fields`` option or a ``fields``
sub-option from a ``fieldsets`` option component.
"""
for fields in flds:
# The entry in fields might be a tuple. If it is a standalone
# field, make it into a tuple to make processing easier.
if type(fields) != tuple:
fields = (fields,)
for field in fields:
if field in cls.readonly_fields:
# Stuff can be put in fields that isn't actually a
# model field if it's in readonly_fields,
# readonly_fields will handle the validation of such
# things.
continue
check_formfield(cls, model, opts, label, field)
try:
f = opts.get_field(field)
except models.FieldDoesNotExist:
# If we can't find a field on the model that matches, it could be an
# extra field on the form; nothing to check so move on to the next field.
continue
if isinstance(f, models.ManyToManyField) and not f.rel.through._meta.auto_created:
raise ImproperlyConfigured("'%s.%s' "
"can't include the ManyToManyField field '%s' because "
"'%s' manually specifies a 'through' model." % (
cls.__name__, label, field, field))
def validate_base(cls, model):
opts = model._meta
# raw_id_fields
if hasattr(cls, 'raw_id_fields'):
check_isseq(cls, 'raw_id_fields', cls.raw_id_fields)
for idx, field in enumerate(cls.raw_id_fields):
f = get_field(cls, model, opts, 'raw_id_fields', field)
if not isinstance(f, (models.ForeignKey, models.ManyToManyField)):
raise ImproperlyConfigured("'%s.raw_id_fields[%d]', '%s' must "
"be either a ForeignKey or ManyToManyField."
% (cls.__name__, idx, field))
# fields
if cls.fields: # default value is None
check_isseq(cls, 'fields', cls.fields)
validate_fields_spec(cls, model, opts, cls.fields, 'fields')
if cls.fieldsets:
raise ImproperlyConfigured('Both fieldsets and fields are specified in %s.' % cls.__name__)
if len(cls.fields) > len(set(cls.fields)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fields' % cls.__name__)
# fieldsets
if cls.fieldsets: # default value is None
check_isseq(cls, 'fieldsets', cls.fieldsets)
for idx, fieldset in enumerate(cls.fieldsets):
check_isseq(cls, 'fieldsets[%d]' % idx, fieldset)
if len(fieldset) != 2:
raise ImproperlyConfigured("'%s.fieldsets[%d]' does not "
"have exactly two elements." % (cls.__name__, idx))
check_isdict(cls, 'fieldsets[%d][1]' % idx, fieldset[1])
if 'fields' not in fieldset[1]:
raise ImproperlyConfigured("'fields' key is required in "
"%s.fieldsets[%d][1] field options dict."
% (cls.__name__, idx))
validate_fields_spec(cls, model, opts, fieldset[1]['fields'], "fieldsets[%d][1]['fields']" % idx)
flattened_fieldsets = flatten_fieldsets(cls.fieldsets)
if len(flattened_fieldsets) > len(set(flattened_fieldsets)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.fieldsets' % cls.__name__)
# exclude
if cls.exclude: # default value is None
check_isseq(cls, 'exclude', cls.exclude)
for field in cls.exclude:
check_formfield(cls, model, opts, 'exclude', field)
try:
f = opts.get_field(field)
except models.FieldDoesNotExist:
# If we can't find a field on the model that matches,
# it could be an extra field on the form.
continue
if len(cls.exclude) > len(set(cls.exclude)):
raise ImproperlyConfigured('There are duplicate field(s) in %s.exclude' % cls.__name__)
# form
if hasattr(cls, 'form') and not issubclass(cls.form, BaseModelForm):
raise ImproperlyConfigured("%s.form does not inherit from "
"BaseModelForm." % cls.__name__)
# filter_vertical
if hasattr(cls, 'filter_vertical'):
check_isseq(cls, 'filter_vertical', cls.filter_vertical)
for idx, field in enumerate(cls.filter_vertical):
f = get_field(cls, model, opts, 'filter_vertical', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_vertical[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
# filter_horizontal
if hasattr(cls, 'filter_horizontal'):
check_isseq(cls, 'filter_horizontal', cls.filter_horizontal)
for idx, field in enumerate(cls.filter_horizontal):
f = get_field(cls, model, opts, 'filter_horizontal', field)
if not isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.filter_horizontal[%d]' must be "
"a ManyToManyField." % (cls.__name__, idx))
# radio_fields
if hasattr(cls, 'radio_fields'):
check_isdict(cls, 'radio_fields', cls.radio_fields)
for field, val in cls.radio_fields.items():
f = get_field(cls, model, opts, 'radio_fields', field)
if not (isinstance(f, models.ForeignKey) or f.choices):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither an instance of ForeignKey nor does "
"have choices set." % (cls.__name__, field))
if not val in (HORIZONTAL, VERTICAL):
raise ImproperlyConfigured("'%s.radio_fields['%s']' "
"is neither admin.HORIZONTAL nor admin.VERTICAL."
% (cls.__name__, field))
# prepopulated_fields
if hasattr(cls, 'prepopulated_fields'):
check_isdict(cls, 'prepopulated_fields', cls.prepopulated_fields)
for field, val in cls.prepopulated_fields.items():
f = get_field(cls, model, opts, 'prepopulated_fields', field)
if isinstance(f, (models.DateTimeField, models.ForeignKey,
models.ManyToManyField)):
raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' "
"is either a DateTimeField, ForeignKey or "
"ManyToManyField. This isn't allowed."
% (cls.__name__, field))
check_isseq(cls, "prepopulated_fields['%s']" % field, val)
for idx, f in enumerate(val):
get_field(cls, model, opts, "prepopulated_fields['%s'][%d]" % (field, idx), f)
def check_isseq(cls, label, obj):
if not isinstance(obj, (list, tuple)):
raise ImproperlyConfigured("'%s.%s' must be a list or tuple." % (cls.__name__, label))
def check_isdict(cls, label, obj):
if not isinstance(obj, dict):
raise ImproperlyConfigured("'%s.%s' must be a dictionary." % (cls.__name__, label))
def get_field(cls, model, opts, label, field):
try:
return opts.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that is missing from model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__))
def check_formfield(cls, model, opts, label, field):
if getattr(cls.form, 'base_fields', None):
try:
cls.form.base_fields[field]
except KeyError:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that "
"is missing from the form." % (cls.__name__, label, field))
else:
get_form_is_overridden = hasattr(cls, 'get_form') and cls.get_form != ModelAdmin.get_form
if not get_form_is_overridden:
fields = fields_for_model(model)
try:
fields[field]
except KeyError:
raise ImproperlyConfigured("'%s.%s' refers to field '%s' that "
"is missing from the form." % (cls.__name__, label, field))
def fetch_attr(cls, model, opts, label, field):
try:
return opts.get_field(field)
except models.FieldDoesNotExist:
pass
try:
return getattr(model, field)
except AttributeError:
raise ImproperlyConfigured("'%s.%s' refers to '%s' that is neither a field, method or property of model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__))
def check_readonly_fields(cls, model, opts):
check_isseq(cls, "readonly_fields", cls.readonly_fields)
for idx, field in enumerate(cls.readonly_fields):
if not callable(field):
if not hasattr(cls, field):
if not hasattr(model, field):
try:
opts.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("%s.readonly_fields[%d], %r is not a callable or an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name))
|
PypiClean
|
/adafruit-circuitpython-tc74-1.0.15.tar.gz/adafruit-circuitpython-tc74-1.0.15/adafruit_tc74.py
|
from adafruit_register.i2c_struct import ROUnaryStruct
from adafruit_register.i2c_bit import RWBit, ROBit
import adafruit_bus_device.i2c_device as i2cdevice
try:
import typing # pylint: disable=unused-import
from busio import I2C
except ImportError:
pass
__version__ = "1.0.15"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_TC74.git"
# pylint: disable=too-few-public-methods
TC74_DEFAULT_ADDRESS = 0x48
TC74_REGISTER_TEMP = 0 # Temperature register (read-only)
TC74_REGISTER_CONFIG = 1 # Configuration register
TC74_SHUTDOWN_BIT = 7 # Shutdown bit in Configuration register
TC74_DATA_READY_BIT = 6 # Data Ready bit in Configuration register
# pylint: enable=too-few-public-methods
class TC74:
"""
Driver for the Microchip TC74 Digital Temperature Sensor.
:param ~busio.I2C i2c_bus: The I2C bus the TC74 is connected to
:param int address: The I2C device address for the sensor. Default is :const:`0x48`
**Quickstart: Importing and using the TC74**
Here is an example of using the :class:`TC74` class.
First you will need to import the libraries to use the sensor
.. code-block:: python
import board
import adafruit_tc74
Once this is done you can define your `board.I2C` object and define your sensor object
.. code-block:: python
i2c = board.I2C() # uses board.SCL and board.SDA
tc = adafruit_tc74.TC74(i2c)
Now you have access to the temperature using :attr:`temperature`.
.. code-block:: python
temperature = tc.temperature
"""
def __init__(self, i2c_bus: I2C, address: int = TC74_DEFAULT_ADDRESS) -> None:
self.i2c_device = i2cdevice.I2CDevice(i2c_bus, address)
_temperature = ROUnaryStruct(TC74_REGISTER_TEMP, "b")
shutdown = RWBit(TC74_REGISTER_CONFIG, TC74_SHUTDOWN_BIT, lsb_first=True)
"""Set to True to turn off the temperature measurement circuitry in
the sensor. While shut down the configurations properties can still
be read or written but the temperature will not be measured."""
data_ready = ROBit(TC74_REGISTER_CONFIG, TC74_DATA_READY_BIT, lsb_first=True)
"""Read-only bit that indicates when the temperature register is
ready to be read from, especially after power-on or when switching
from the shutdown to the normal state."""
@property
def temperature(self) -> int:
"""
Returns the current temperature in degrees Celsius. Resolution
is 1 degrees Celsius.
"""
return self._temperature
|
PypiClean
|
/apache-superset-3.0.0rc3.tar.gz/apache-superset-3.0.0rc3/superset/static/assets/spa.62a9c98ebce8578c1ce7.entry.js
|
(()=>{var e,t,a,n,r,i,l,o,d={80760:(e,t,a)=>{var n=a(89881);e.exports=function(e,t){var a=[];return n(e,(function(e,n,r){t(e,n,r)&&a.push(e)})),a}},63105:(e,t,a)=>{var n=a(34963),r=a(80760),i=a(67206),l=a(1469);e.exports=function(e,t){return(l(e)?n:r)(e,i(t,3))}},43063:(e,t,a)=>{var n=a(34963),r=a(80760),i=a(67206),l=a(1469),o=a(94885);e.exports=function(e,t){return(l(e)?n:r)(e,o(i(t,3)))}},4756:(e,t,a)=>{"use strict";a.d(t,{J:()=>r});var n=a(67294);const r=e=>{(0,n.useEffect)(e,[])}},19259:(e,t,a)=>{"use strict";a.d(t,{Z:()=>l});var n=a(67294),r=a(17198),i=a(11965);function l(e){let{title:t,description:a,onConfirm:l,children:o}=e;const[d,s]=(0,n.useState)(!1),[c,u]=(0,n.useState)([]),b=()=>{s(!1),u([])};return(0,i.tZ)(n.Fragment,null,null==o?void 0:o((function(){for(var e=arguments.length,t=new Array(e),a=0;a<e;a++)t[a]=arguments[a];t.forEach((e=>{e&&("function"==typeof e.persist&&e.persist(),"function"==typeof e.preventDefault&&e.preventDefault(),"function"==typeof e.stopPropagation&&e.stopPropagation())})),s(!0),u(t)})),(0,i.tZ)(r.Z,{description:a,onConfirm:()=>{l(...c),b()},onHide:b,open:d,title:t}))}},21742:(e,t,a)=>{"use strict";a.d(t,{Z:()=>_});var n=a(11965),r=a(67294),i=a(51995),l=a(4715),o=a(58593),d=a(4144),s=a(79789);const c=i.iK.div`
width: 64px;
display: flex;
justify-content: flex-end;
`,u=(0,i.iK)(l.Ak)`
${e=>{let{theme:t}=e;return`\n border: 1px solid ${t.colors.grayscale.light2};\n border-radius: ${t.gridUnit}px;\n overflow: hidden;\n\n .ant-card-body {\n padding: ${4*t.gridUnit}px\n ${2*t.gridUnit}px;\n }\n .ant-card-meta-detail > div:not(:last-child) {\n margin-bottom: 0;\n }\n .gradient-container {\n position: relative;\n height: 100%;\n }\n &:hover {\n box-shadow: 8px 8px 28px 0px ${t.colors.grayscale.light1};\n transition: box-shadow ${t.transitionTiming}s ease-in-out;\n\n .cover-footer {\n transform: translateY(0);\n }\n }\n `}}
`,b=i.iK.div`
height: 264px;
border-bottom: 1px solid ${e=>{let{theme:t}=e;return t.colors.grayscale.light2}};
overflow: hidden;
.cover-footer {
transform: translateY(${e=>{let{theme:t}=e;return 9*t.gridUnit}}px);
transition: ${e=>{let{theme:t}=e;return t.transitionTiming}}s ease-out;
}
`,h=i.iK.div`
display: flex;
justify-content: flex-start;
flex-direction: column;
.card-actions {
margin-left: auto;
align-self: flex-end;
padding-left: ${e=>{let{theme:t}=e;return t.gridUnit}}px;
span[role='img'] {
display: flex;
align-items: center;
}
}
.titleRow {
display: flex;
justify-content: flex-start;
flex-direction: row;
}
`,f=i.iK.span`
overflow: hidden;
text-overflow: ellipsis;
& a {
color: ${e=>{let{theme:t}=e;return t.colors.grayscale.dark1}} !important;
}
`,m=i.iK.span`
position: absolute;
right: -1px;
bottom: ${e=>{let{theme:t}=e;return t.gridUnit}}px;
`,p=i.iK.div`
display: flex;
flex-wrap: nowrap;
position: relative;
top: -${e=>{let{theme:t}=e;return 9*t.gridUnit}}px;
padding: 0 8px;
`,g=i.iK.div`
flex: 1;
overflow: hidden;
`,v=i.iK.div`
align-self: flex-end;
margin-left: auto;
max-width: 200px;
overflow: hidden;
text-overflow: ellipsis;
`,Z=(0,i.iK)(l.Od)`
h3 {
margin: ${e=>{let{theme:t}=e;return t.gridUnit}}px 0;
}
ul {
margin-bottom: 0;
}
`,y={rows:1,width:150},w=e=>{let{to:t,children:a}=e;return(0,n.tZ)("a",{href:t},a)};function C(e){let{title:t,subtitle:a,url:c,linkComponent:C,titleRight:_,imgURL:x,imgFallbackURL:k,description:S,coverLeft:F,coverRight:T,actions:$,avatar:E,loading:I,imgPosition:N="top",cover:D,certifiedBy:P,certificationDetails:z}=e;const U=c&&C?C:w,R=(0,i.Fg)();return(0,n.tZ)(u,{cover:D||(0,n.tZ)(b,null,(0,n.tZ)(U,{to:c},(0,n.tZ)("div",{className:"gradient-container"},(0,n.tZ)(d.Z,{src:x||"",fallback:k||"",isLoading:I,position:N}))),(0,n.tZ)(p,{className:"cover-footer"},!I&&F&&(0,n.tZ)(g,null,F),!I&&T&&(0,n.tZ)(v,null,T)))},I&&(0,n.tZ)(l.Ak.Meta,{title:(0,n.tZ)(r.Fragment,null,(0,n.tZ)(h,null,(0,n.tZ)(l.Od.Input,{active:!0,size:"small",css:(0,n.iv)({width:Math.trunc(62.5*R.gridUnit)},"","")}),(0,n.tZ)("div",{className:"card-actions"},(0,n.tZ)(l.Od.Button,{active:!0,shape:"circle"})," ",(0,n.tZ)(l.Od.Button,{active:!0,css:(0,n.iv)({width:10*R.gridUnit},"","")})))),description:(0,n.tZ)(Z,{round:!0,active:!0,title:!1,paragraph:y})}),!I&&(0,n.tZ)(l.Ak.Meta,{title:(0,n.tZ)(h,null,a||null,(0,n.tZ)("div",{className:"titleRow"},(0,n.tZ)(o.u,{title:t},(0,n.tZ)(f,null,P&&(0,n.tZ)(r.Fragment,null,(0,n.tZ)(s.Z,{certifiedBy:P,details:z})," "),t)),_&&(0,n.tZ)(m,null,_),(0,n.tZ)("div",{className:"card-actions"},$))),description:S,avatar:E||null}))}C.Actions=c;const _=C},83673:(e,t,a)=>{"use strict";a.d(t,{Z:()=>_});var n=a(67294),r=a(74069),i=a(9875),l=a(35932),o=a(4715),d=a(15926),s=a.n(d),c=a(51995),u=a(55867),b=a(31069),h=a(93185),f=a(98286),m=a(14114),p=a(60718),g=a(48273),v=a(11965);const Z=o.qz.Item,y=(0,c.iK)(o.qz.Item)`
margin-bottom: 0;
`,w=c.iK.span`
margin-bottom: 0;
`;var C={name:"1blj7km",styles:"margin-top:1em"};const _=(0,m.ZP)((function(e){let{slice:t,onHide:a,onSave:d,show:c,addSuccessToast:m}=e;const[_,x]=(0,n.useState)(!1),[k]=o.qz.useForm(),[S,F]=(0,n.useState)(t.slice_name||""),[T,$]=(0,n.useState)(null),[E,I]=(0,n.useState)([]),N=(0,n.useMemo)((()=>E.map((e=>({value:e.name,label:e.name,key:e.name})))),[E.length]);function D(e){let{error:t,statusText:a,message:n}=e,i=t||a||(0,u.t)("An error has occurred");"Forbidden"===n&&(i=(0,u.t)("You do not have permission to edit this chart")),r.Z.error({title:(0,u.t)("Error"),content:i,okButtonProps:{danger:!0,className:"btn-danger"}})}const P=(0,n.useCallback)((async function(){try{var e;const a=(await b.Z.get({endpoint:`/api/v1/chart/${t.slice_id}`})).json.result;$(null==a||null==(e=a.owners)?void 0:e.map((e=>({value:e.id,label:`${e.first_name} ${e.last_name}`}))))}catch(e){D(await(0,f.O$)(e))}}),[t.slice_id]),z=(0,n.useMemo)((()=>function(e,t,a){void 0===e&&(e="");const n=s().encode({filter:e,page:t,page_size:a});return b.Z.get({endpoint:`/api/v1/chart/related/owners?q=${n}`}).then((e=>({data:e.json.result.filter((e=>e.extra.active)).map((e=>({value:e.value,label:e.text}))),totalCount:e.json.count})))}),[]),U=(0,u.t)("Owners");return(0,n.useEffect)((()=>{P()}),[P]),(0,n.useEffect)((()=>{F(t.slice_name||"")}),[t.slice_name]),(0,n.useEffect)((()=>{if((0,h.cr)(h.TT.TAGGING_SYSTEM))try{(0,g.$3)({objectType:g.g.CHART,objectId:t.slice_id,includeTypes:!1},(e=>I(e)),(e=>{D(e)}))}catch(e){D(e)}}),[t.slice_id]),(0,v.tZ)(r.Z,{show:c,onHide:a,title:(0,u.t)("Edit Chart Properties"),footer:(0,v.tZ)(n.Fragment,null,(0,v.tZ)(l.Z,{htmlType:"button",buttonSize:"small",onClick:a,cta:!0},(0,u.t)("Cancel")),(0,v.tZ)(l.Z,{htmlType:"submit",buttonSize:"small",buttonStyle:"primary",onClick:k.submit,disabled:_||!S||t.is_managed_externally,tooltip:t.is_managed_externally?(0,u.t)("This chart is managed externally, and can't be edited in Superset"):"",cta:!0},(0,u.t)("Save"))),responsive:!0,wrapProps:{"data-test":"properties-edit-modal"}},(0,v.tZ)(o.qz,{form:k,onFinish:async e=>{x(!0);const{certified_by:n,certification_details:r,description:i,cache_timeout:l}=e,o={slice_name:S||null,description:i||null,cache_timeout:l||null,certified_by:n||null,certification_details:n&&r?r:null};if(T&&(o.owners=T.map((e=>e.value))),(0,h.cr)(h.TT.TAGGING_SYSTEM))try{(0,g.$3)({objectType:g.g.CHART,objectId:t.slice_id,includeTypes:!1},(e=>{return a=e,(n=E).map((e=>{a.some((t=>t.name===e.name))||(0,g._U)({objectType:g.g.CHART,objectId:t.slice_id,includeTypes:!1},e.name,(()=>{}),(()=>{}))})),void a.map((e=>{n.some((t=>t.name===e.name))||(0,g.OY)({objectType:g.g.CHART,objectId:t.slice_id},e,(()=>{}),(()=>{}))}));var a,n}),(e=>{D(e)}))}catch(e){D(e)}try{const e=await b.Z.put({endpoint:`/api/v1/chart/${t.slice_id}`,headers:{"Content-Type":"application/json"},body:JSON.stringify(o)}),n={...o,...e.json.result,tags:E,id:t.slice_id,owners:T};d(n),m((0,u.t)("Chart properties updated")),a()}catch(e){D(await(0,f.O$)(e))}x(!1)},layout:"vertical",initialValues:{name:t.slice_name||"",description:t.description||"",cache_timeout:null!=t.cache_timeout?t.cache_timeout:"",certified_by:t.certified_by||"",certification_details:t.certified_by&&t.certification_details?t.certification_details:""}},(0,v.tZ)(o.X2,{gutter:16},(0,v.tZ)(o.JX,{xs:24,md:12},(0,v.tZ)("h3",null,(0,u.t)("Basic information")),(0,v.tZ)(Z,{label:(0,u.t)("Name"),required:!0},(0,v.tZ)(i.II,{"aria-label":(0,u.t)("Name"),name:"name",type:"text",value:S,onChange:e=>{var t;return F(null!=(t=e.target.value)?t:"")}})),(0,v.tZ)(Z,null,(0,v.tZ)(y,{label:(0,u.t)("Description"),name:"description"},(0,v.tZ)(i.Kx,{rows:3,style:{maxWidth:"100%"}})),(0,v.tZ)(w,{className:"help-block"},(0,u.t)("The description can be displayed as widget headers in the dashboard view. Supports markdown."))),(0,v.tZ)("h3",null,(0,u.t)("Certification")),(0,v.tZ)(Z,null,(0,v.tZ)(y,{label:(0,u.t)("Certified by"),name:"certified_by"},(0,v.tZ)(i.II,{"aria-label":(0,u.t)("Certified by")})),(0,v.tZ)(w,{className:"help-block"},(0,u.t)("Person or group that has certified this chart."))),(0,v.tZ)(Z,null,(0,v.tZ)(y,{label:(0,u.t)("Certification details"),name:"certification_details"},(0,v.tZ)(i.II,{"aria-label":(0,u.t)("Certification details")})),(0,v.tZ)(w,{className:"help-block"},(0,u.t)("Any additional detail to show in the certification tooltip.")))),(0,v.tZ)(o.JX,{xs:24,md:12},(0,v.tZ)("h3",null,(0,u.t)("Configuration")),(0,v.tZ)(Z,null,(0,v.tZ)(y,{label:(0,u.t)("Cache timeout"),name:"cache_timeout"},(0,v.tZ)(i.II,{"aria-label":"Cache timeout"})),(0,v.tZ)(w,{className:"help-block"},(0,u.t)("Duration (in seconds) of the caching timeout for this chart. Set to -1 to bypass the cache. Note this defaults to the dataset's timeout if undefined."))),(0,v.tZ)("h3",{style:{marginTop:"1em"}},(0,u.t)("Access")),(0,v.tZ)(Z,{label:U},(0,v.tZ)(o.qb,{ariaLabel:U,mode:"multiple",name:"owners",value:T||[],onChange:$,options:z,disabled:!T,allowClear:!0}),(0,v.tZ)(w,{className:"help-block"},(0,u.t)("A list of users who can alter the chart. Searchable by name or username."))),(0,h.cr)(h.TT.TAGGING_SYSTEM)&&(0,v.tZ)("h3",{css:C},(0,u.t)("Tags")),(0,h.cr)(h.TT.TAGGING_SYSTEM)&&(0,v.tZ)(Z,null,(0,v.tZ)(o.qb,{ariaLabel:"Tags",mode:"multiple",allowNewOptions:!0,value:N,options:p.m,onChange:e=>{const t=[...new Set(e.map((e=>e.label)))];I([...t.map((e=>({name:e})))])},onClear:()=>{I([])},allowClear:!0}),(0,v.tZ)(w,{className:"help-block"},(0,u.t)("A list of tags that have been applied to this chart.")))))))}))},8494:(e,t,a)=>{"use strict";a.d(t,{Z:()=>Z});var n=a(67294),r=a(93185),i=a(51995),l=a(55867),o=a(16550),d=a(73727),s=a(19259),c=a(70163),u=a(21742),b=a(37921),h=a(4715),f=a(83862),m=a(36674),p=a(34581),g=a(40768),v=a(11965);function Z(e){let{chart:t,hasPerm:a,openChartEditModal:Z,bulkSelectEnabled:y,addDangerToast:w,addSuccessToast:C,refreshData:_,loading:x,showThumbnails:k,saveFavoriteStatus:S,favoriteStatus:F,chartFilter:T,userId:$,handleBulkChartExport:E}=e;const I=(0,o.k6)(),N=a("can_write"),D=a("can_write"),P=a("can_export")&&(0,r.cr)(r.TT.VERSIONED_EXPORT),z=(0,i.Fg)(),U=(0,v.tZ)(f.v,null,D&&(0,v.tZ)(f.v.Item,null,(0,v.tZ)(s.Z,{title:(0,l.t)("Please confirm"),description:(0,v.tZ)(n.Fragment,null,(0,l.t)("Are you sure you want to delete")," ",(0,v.tZ)("b",null,t.slice_name),"?"),onConfirm:()=>(0,g.Gm)(t,C,w,_,T,$)},(e=>(0,v.tZ)("div",{role:"button",tabIndex:0,className:"action-button",onClick:e},(0,v.tZ)(c.Z.Trash,{iconSize:"l"})," ",(0,l.t)("Delete"))))),P&&(0,v.tZ)(f.v.Item,null,(0,v.tZ)("div",{role:"button",tabIndex:0,onClick:()=>E([t])},(0,v.tZ)(c.Z.Share,{iconSize:"l"})," ",(0,l.t)("Export"))),N&&(0,v.tZ)(f.v.Item,null,(0,v.tZ)("div",{role:"button",tabIndex:0,onClick:()=>Z(t)},(0,v.tZ)(c.Z.EditAlt,{iconSize:"l"})," ",(0,l.t)("Edit"))));return(0,v.tZ)(g.ZB,{onClick:()=>{!y&&t.url&&I.push(t.url)}},(0,v.tZ)(u.Z,{loading:x,title:t.slice_name,certifiedBy:t.certified_by,certificationDetails:t.certification_details,cover:(0,r.cr)(r.TT.THUMBNAILS)&&k?null:(0,v.tZ)(n.Fragment,null),url:y?void 0:t.url,imgURL:t.thumbnail_url||"",imgFallbackURL:"/static/assets/images/chart-card-fallback.svg",description:(0,l.t)("Modified %s",t.changed_on_delta_humanized),coverLeft:(0,v.tZ)(p.Z,{users:t.owners||[]}),coverRight:(0,v.tZ)(b.Z,{type:"secondary"},t.datasource_name_text),linkComponent:d.rU,actions:(0,v.tZ)(u.Z.Actions,{onClick:e=>{e.stopPropagation(),e.preventDefault()}},$&&(0,v.tZ)(m.Z,{itemId:t.id,saveFaveStar:S,isStarred:F}),(0,v.tZ)(h.Gj,{overlay:U},(0,v.tZ)(c.Z.MoreVert,{iconColor:z.colors.grayscale.base})))}))}},65043:(e,t,a)=>{"use strict";a.d(t,{Z:()=>v});var n=a(67294),r=a(16550),i=a(73727),l=a(51995),o=a(55867),d=a(93185),s=a(40768),c=a(4715),u=a(83862),b=a(21742),h=a(70163),f=a(37921),m=a(34581),p=a(36674),g=a(11965);const v=function(e){let{dashboard:t,hasPerm:a,bulkSelectEnabled:v,userId:Z,openDashboardEditModal:y,favoriteStatus:w,saveFavoriteStatus:C,showThumbnails:_,handleBulkDashboardExport:x,onDelete:k}=e;const S=(0,r.k6)(),F=a("can_write"),T=a("can_write"),$=a("can_export"),E=(0,l.Fg)(),I=(0,g.tZ)(u.v,null,F&&y&&(0,g.tZ)(u.v.Item,null,(0,g.tZ)("div",{role:"button",tabIndex:0,className:"action-button",onClick:()=>null==y?void 0:y(t)},(0,g.tZ)(h.Z.EditAlt,{iconSize:"l"})," ",(0,o.t)("Edit"))),$&&(0,g.tZ)(u.v.Item,null,(0,g.tZ)("div",{role:"button",tabIndex:0,onClick:()=>x([t]),className:"action-button"},(0,g.tZ)(h.Z.Share,{iconSize:"l"})," ",(0,o.t)("Export"))),T&&(0,g.tZ)(u.v.Item,null,(0,g.tZ)("div",{role:"button",tabIndex:0,className:"action-button",onClick:()=>k(t)},(0,g.tZ)(h.Z.Trash,{iconSize:"l"})," ",(0,o.t)("Delete"))));return(0,g.tZ)(s.ZB,{onClick:()=>{v||S.push(t.url)}},(0,g.tZ)(b.Z,{loading:t.loading||!1,title:t.dashboard_title,certifiedBy:t.certified_by,certificationDetails:t.certification_details,titleRight:(0,g.tZ)(f.Z,null,t.published?(0,o.t)("published"):(0,o.t)("draft")),cover:(0,d.cr)(d.TT.THUMBNAILS)&&_?null:(0,g.tZ)(n.Fragment,null),url:v?void 0:t.url,linkComponent:i.rU,imgURL:t.thumbnail_url,imgFallbackURL:"/static/assets/images/dashboard-card-fallback.svg",description:(0,o.t)("Modified %s",t.changed_on_delta_humanized),coverLeft:(0,g.tZ)(m.Z,{users:t.owners||[]}),actions:(0,g.tZ)(b.Z.Actions,{onClick:e=>{e.stopPropagation(),e.preventDefault()}},Z&&(0,g.tZ)(p.Z,{itemId:t.id,saveFaveStar:C,isStarred:w}),(0,g.tZ)(c.Gj,{overlay:I},(0,g.tZ)(h.Z.MoreVert,{iconColor:E.colors.grayscale.base})))}))}},86074:(e,t,a)=>{"use strict";a.d(t,{Z:()=>w});var n=a(23279),r=a.n(n),i=a(67294),l=a(16550),o=a(73727),d=a(51995),s=a(11965),c=a(55867),u=a(94184),b=a.n(u),h=a(58593),f=a(4715),m=a(83862),p=a(35932),g=a(70163);const v=d.iK.div`
margin-bottom: ${e=>{let{theme:t}=e;return 4*t.gridUnit}}px;
.header {
font-weight: ${e=>{let{theme:t}=e;return t.typography.weights.bold}};
margin-right: ${e=>{let{theme:t}=e;return 3*t.gridUnit}}px;
text-align: left;
font-size: 18px;
padding: ${e=>{let{theme:t}=e;return 3*t.gridUnit}}px;
display: inline-block;
line-height: ${e=>{let{theme:t}=e;return 9*t.gridUnit}}px;
}
.nav-right {
display: flex;
align-items: center;
padding: ${e=>{let{theme:t}=e;return 3.5*t.gridUnit}}px 0;
margin-right: ${e=>{let{theme:t}=e;return 3*t.gridUnit}}px;
float: right;
position: absolute;
right: 0;
ul.ant-menu-root {
padding: 0px;
}
li[role='menuitem'] {
border: 0;
border-bottom: none;
&:hover {
border-bottom: transparent;
}
}
}
.nav-right-collapse {
display: flex;
align-items: center;
padding: 14px 0;
margin-right: 0;
float: left;
padding-left: 10px;
}
.menu {
background-color: ${e=>{let{theme:t}=e;return t.colors.grayscale.light5}};
.ant-menu-horizontal {
line-height: inherit;
.ant-menu-item {
border-bottom: none;
&:hover {
border-bottom: none;
text-decoration: none;
}
}
}
.ant-menu {
padding: ${e=>{let{theme:t}=e;return 4*t.gridUnit}}px 0px;
}
}
.ant-menu-horizontal:not(.ant-menu-dark) > .ant-menu-item {
margin: 0 ${e=>{let{theme:t}=e;return t.gridUnit+1}}px;
}
.menu .ant-menu-item {
li,
div {
a,
div {
font-size: ${e=>{let{theme:t}=e;return t.typography.sizes.s}}px;
color: ${e=>{let{theme:t}=e;return t.colors.secondary.dark1}};
a {
margin: 0;
padding: ${e=>{let{theme:t}=e;return 2*t.gridUnit}}px
${e=>{let{theme:t}=e;return 4*t.gridUnit}}px;
line-height: ${e=>{let{theme:t}=e;return 5*t.gridUnit}}px;
&:hover {
text-decoration: none;
}
}
}
&.no-router a {
padding: ${e=>{let{theme:t}=e;return 2*t.gridUnit}}px
${e=>{let{theme:t}=e;return 4*t.gridUnit}}px;
}
&.active a {
background: ${e=>{let{theme:t}=e;return t.colors.secondary.light4}};
border-radius: ${e=>{let{theme:t}=e;return t.borderRadius}}px;
}
}
li.active > a,
li.active > div,
div.active > div,
li > a:hover,
li > a:focus,
li > div:hover,
div > div:hover,
div > a:hover {
background: ${e=>{let{theme:t}=e;return t.colors.secondary.light4}};
border-bottom: none;
border-radius: ${e=>{let{theme:t}=e;return t.borderRadius}}px;
margin-bottom: ${e=>{let{theme:t}=e;return 2*t.gridUnit}}px;
text-decoration: none;
}
}
.btn-link {
padding: 10px 0;
}
.ant-menu-horizontal {
border: none;
}
@media (max-width: 767px) {
.header,
.nav-right {
position: relative;
margin-left: ${e=>{let{theme:t}=e;return 2*t.gridUnit}}px;
}
}
.ant-menu-submenu {
span[role='img'] {
position: absolute;
right: ${e=>{let{theme:t}=e;return-t.gridUnit-2}}px;
top: ${e=>{let{theme:t}=e;return t.gridUnit+1}}px !important;
}
}
.dropdown-menu-links > div.ant-menu-submenu-title,
.ant-menu-submenu-open.ant-menu-submenu-active > div.ant-menu-submenu-title {
color: ${e=>{let{theme:t}=e;return t.colors.primary.dark1}};
}
`,Z=e=>s.iv`
color: ${e.colors.grayscale.light1};
.ant-menu-item-active {
color: ${e.colors.grayscale.light1};
cursor: default;
}
`,{SubMenu:y}=m.$,w=e=>{var t,a,n;const[d,u]=(0,i.useState)("horizontal"),[w,C]=(0,i.useState)("nav-right");let _=!0;try{(0,l.k6)()}catch(e){_=!1}return(0,i.useEffect)((()=>{function t(){window.innerWidth<=767?u("inline"):u("horizontal"),e.buttons&&e.buttons.length>=3&&window.innerWidth>=795?C("nav-right"):e.buttons&&e.buttons.length>=3&&window.innerWidth<=795&&C("nav-right-collapse")}t();const a=r()(t,10);return window.addEventListener("resize",a),()=>window.removeEventListener("resize",a)}),[e.buttons]),(0,s.tZ)(v,null,(0,s.tZ)(f.X2,{className:"menu",role:"navigation"},e.name&&(0,s.tZ)("div",{className:"header"},e.name),(0,s.tZ)(m.v,{mode:d,style:{backgroundColor:"transparent"}},null==(t=e.tabs)?void 0:t.map((t=>(e.usesRouter||_)&&t.usesRouter?(0,s.tZ)(m.v.Item,{key:t.label},(0,s.tZ)("div",{role:"tab",className:t.name===e.activeChild?"active":""},(0,s.tZ)("div",null,(0,s.tZ)(o.rU,{to:t.url||""},t.label)))):(0,s.tZ)(m.v.Item,{key:t.label},(0,s.tZ)("div",{className:b()("no-router",{active:t.name===e.activeChild}),role:"tab"},(0,s.tZ)("a",{href:t.url,onClick:t.onClick},t.label)))))),(0,s.tZ)("div",{className:w},(0,s.tZ)(m.v,{mode:"horizontal",triggerSubMenuAction:"click"},null==(a=e.dropDownLinks)?void 0:a.map(((e,t)=>{var a;return(0,s.tZ)(y,{key:t,title:e.label,icon:(0,s.tZ)(g.Z.TriangleDown,null),popupOffset:[10,20],className:"dropdown-menu-links"},null==(a=e.childs)?void 0:a.map((e=>"object"==typeof e?e.disable?(0,s.tZ)(m.$.Item,{key:e.label,css:Z},(0,s.tZ)(h.u,{placement:"top",title:(0,c.t)("Enable 'Allow file uploads to database' in any database's settings")},e.label)):(0,s.tZ)(m.$.Item,{key:e.label},(0,s.tZ)("a",{href:e.url},e.label)):null)))}))),null==(n=e.buttons)?void 0:n.map(((e,t)=>(0,s.tZ)(p.Z,{key:t,buttonStyle:e.buttonStyle,onClick:e.onClick},e.name))))),e.children)}},32228:(e,t,a)=>{"use strict";a.d(t,{Z:()=>d});var n=a(89816),r=a(15926),i=a.n(r),l=a(14670),o=a.n(l);function d(e,t,a,r){void 0===r&&(r=200);const l=o().generate(),d=`/api/v1/${e}/export/?q=${i().encode(t)}&token=${l}`,s=document.createElement("iframe");s.style.display="none",s.src=d,document.body.appendChild(s);const c=window.setInterval((()=>{"done"===(0,n.Z)()[l]&&(window.clearInterval(c),document.body.removeChild(s),a())}),r)}},22562:(e,t,a)=>{"use strict";var n=a(67294),r=a(90731),i=a(5872),l=a.n(i),o=a(16550),d=a(73727),s=a(14890),c=a(31405),u=a(57902),b=a(38703),h=a(85198),f=a(20292),m=a(5667),p=a(38552),g=a(91263),v=a(93185),Z=a(43063),y=a.n(Z),w=a(75049),C=a(51995),_=a(55867),x=a(15926),k=a.n(x),S=a(43700),F=a(61337),T=a(21742),$=a(14114),E=a(40768),I=a(4715),N=a(12),D=a(86074),P=a(22318),z=a(78580),U=a.n(z),R=a(30381),A=a.n(R),O=a(70163),L=a(35932),M=a(39589),q=a(11965);const j={[M.g.Charts]:(0,_.t)("charts"),[M.g.Dashboards]:(0,_.t)("dashboards"),[M.g.Recents]:(0,_.t)("recents"),[M.g.SavedQueries]:(0,_.t)("saved queries")},B={[M.g.Charts]:(0,_.t)("No charts yet"),[M.g.Dashboards]:(0,_.t)("No dashboards yet"),[M.g.Recents]:(0,_.t)("No recents yet"),[M.g.SavedQueries]:(0,_.t)("No saved queries yet")},V={[M.g.Charts]:e=>(0,_.t)("%(other)s charts will appear here",{other:e}),[M.g.Dashboards]:e=>(0,_.t)("%(other)s dashboards will appear here",{other:e}),[M.g.Recents]:e=>(0,_.t)("%(other)s recents will appear here",{other:e}),[M.g.SavedQueries]:e=>(0,_.t)("%(other)s saved queries will appear here",{other:e})},H=C.iK.div`
min-height: 200px;
display: flex;
flex-direction: column;
justify-content: space-around;
`,K=C.iK.div`
Button {
svg {
color: ${e=>{let{theme:t}=e;return t.colors.grayscale.light5}};
}
}
`;function G(e){let{tableName:t,tab:a,otherTabTitle:n}=e;const r={[M.g.Charts]:"/chart/add",[M.g.Dashboards]:"/dashboard/new",[M.g.SavedQueries]:"/superset/sqllab?new=true"},i={[M.g.Charts]:"/chart/list",[M.g.Dashboards]:"/dashboard/list/",[M.g.SavedQueries]:"/savedqueryview/list/"},l={[M.g.Charts]:"empty-charts.svg",[M.g.Dashboards]:"empty-dashboard.svg",[M.g.Recents]:"union.svg",[M.g.SavedQueries]:"empty-queries.svg"},o=(0,q.tZ)("span",null,B[t]),d=(0,q.tZ)("span",{className:"no-recents"},(()=>{if(a===N.F.Viewed)return(0,_.t)("Recently viewed charts, dashboards, and saved queries will appear here");if(a===N.F.Created)return(0,_.t)("Recently created charts, dashboards, and saved queries will appear here");if(a===N.F.Other){const e=n||(0,_.t)("Other");return V[t](e)}return a===N.F.Edited?(0,_.t)("Recently edited charts, dashboards, and saved queries will appear here"):null})());return a===N.F.Mine||t===M.g.Recents||a===N.F.Other?(0,q.tZ)(H,null,(0,q.tZ)(I.HY,{image:`/static/assets/images/${l[t]}`,description:t===M.g.Recents||a===N.F.Other?d:o},t!==M.g.Recents&&(0,q.tZ)(K,null,(0,q.tZ)(L.Z,{buttonStyle:"primary",onClick:()=>{window.location.href=r[t]}},(0,q.tZ)("i",{className:"fa fa-plus"}),t===M.g.SavedQueries?(0,_.t)("SQL query"):t.split("").slice(0,t.length-1).join(""))))):(0,q.tZ)(H,null,(0,q.tZ)(I.HY,{image:"/static/assets/images/star-circle.svg",description:(0,q.tZ)("span",{className:"no-favorites"},(0,_.t)("You don't have any favorites yet!"))},(0,q.tZ)(L.Z,{buttonStyle:"primary",onClick:()=>{window.location.href=i[t]}},(0,_.t)("See all %(tableName)s",{tableName:t===M.g.SavedQueries?(0,_.t)("SQL Lab queries"):j[t]}))))}const Q=C.iK.div`
.recentCards {
max-height: none;
grid-gap: ${e=>{let{theme:t}=e;return 4*t.gridUnit+"px"}};
}
`,Y=(0,_.t)("[Untitled]"),W=(0,_.t)("Unknown"),J=e=>"dashboard_title"in e?e.dashboard_title||Y:"slice_name"in e?e.slice_name||Y:"label"in e?e.label||Y:e.item_title||Y,X=e=>{if("sql"in e)return(0,q.tZ)(O.Z.Sql,null);const t="item_url"in e?e.item_url:e.url;return null!=t&&U()(t).call(t,"dashboard")?(0,q.tZ)(O.Z.NavDashboard,null):null!=t&&U()(t).call(t,"explore")?(0,q.tZ)(O.Z.NavCharts,null):null};function ee(e){var t;let{activeChild:a,setActiveChild:r,activityData:i,user:l,isFetchingActivityData:o}=e;const[s,c]=(0,n.useState)(),[u,b]=(0,n.useState)(!1);(0,n.useEffect)((()=>{a===N.F.Edited&&(b(!0),(0,E.Ld)(l.userId).then((e=>{c([...e.editedChart,...e.editedDash]),b(!1)})))}),[a]);const h=[{name:N.F.Edited,label:(0,_.t)("Edited"),onClick:()=>{r(N.F.Edited),(0,F.LS)(F.dR.homepage_activity_filter,N.F.Edited)}},{name:N.F.Created,label:(0,_.t)("Created"),onClick:()=>{r(N.F.Created),(0,F.LS)(F.dR.homepage_activity_filter,N.F.Created)}}];return null!=i&&i[N.F.Viewed]&&h.unshift({name:N.F.Viewed,label:(0,_.t)("Viewed"),onClick:()=>{r(N.F.Viewed),(0,F.LS)(F.dR.homepage_activity_filter,N.F.Viewed)}}),u&&!s||o?(0,q.tZ)(Se,null):(0,q.tZ)(Q,null,(0,q.tZ)(D.Z,{activeChild:a,tabs:h}),(null==(t=i[a])?void 0:t.length)>0||a===N.F.Edited&&null!=s&&s.length?(0,q.tZ)(E._L,{className:"recentCards"},(a===N.F.Edited?s:i[a]).map((e=>{const t=(e=>"sql"in e?`/superset/sqllab?savedQueryId=${e.id}`:"url"in e?e.url:e.item_url)(e),a=(e=>{if("time"in e)return(0,_.t)("Viewed %s",A()(e.time).fromNow());let t;return"changed_on"in e&&(t=e.changed_on),"changed_on_utc"in e&&(t=e.changed_on_utc),(0,_.t)("Modified %s",null==t?W:A()(t).fromNow())})(e);return(0,q.tZ)(E.ZB,{key:t},(0,q.tZ)(d.rU,{to:t},(0,q.tZ)(T.Z,{cover:(0,q.tZ)(n.Fragment,null),url:t,title:J(e),description:a,avatar:X(e),actions:null})))}))):(0,q.tZ)(G,{tableName:M.g.Recents,tab:a}))}var te=a(63105),ae=a.n(te),ne=a(34858),re=a(83673),ie=a(8494),le=a(32228);const oe=(0,$.ZP)((function(e){let{user:t,addDangerToast:a,addSuccessToast:r,mine:i,showThumbnails:l,otherTabData:d,otherTabFilters:s,otherTabTitle:c}=e;const h=(0,o.k6)(),f=(0,F.rV)(F.dR.homepage_chart_filter,N.F.Other),m=ae()(d,(e=>"viz_type"in e)),{state:{loading:p,resourceCollection:g,bulkSelectEnabled:v},setResourceCollection:Z,hasPerm:y,refreshData:w,fetchData:C}=(0,ne.Yi)("chart",(0,_.t)("chart"),a,!0,f===N.F.Mine?i:m,[],!1),x=(0,n.useMemo)((()=>g.map((e=>e.id))),[g]),[k,S]=(0,ne.NE)("chart",x,a),{sliceCurrentlyEditing:T,openChartEditModal:$,handleChartUpdated:I,closeChartEditModal:P}=(0,ne.fF)(Z,g),[z,U]=(0,n.useState)(f),[R,A]=(0,n.useState)(!1),[O,L]=(0,n.useState)(!1);(0,n.useEffect)((()=>{var e;(O||z===N.F.Favorite)&&(e=z,C({pageIndex:0,pageSize:E.IV,sortBy:[{id:"changed_on_delta_humanized",desc:!0}],filters:(0,E.if)(e,M.g.Charts,t,s)})),L(!0)}),[z]);const j=e=>{const t=e.map((e=>{let{id:t}=e;return t}));(0,le.Z)("chart",t,(()=>{A(!1)})),A(!0)},B=[{name:N.F.Favorite,label:(0,_.t)("Favorite"),onClick:()=>{U(N.F.Favorite),(0,F.LS)(F.dR.homepage_chart_filter,N.F.Favorite)}},{name:N.F.Mine,label:(0,_.t)("Mine"),onClick:()=>{U(N.F.Mine),(0,F.LS)(F.dR.homepage_chart_filter,N.F.Mine)}}];return d&&B.push({name:N.F.Other,label:c,onClick:()=>{U(N.F.Other),(0,F.LS)(F.dR.homepage_chart_filter,N.F.Other)}}),p?(0,q.tZ)(Se,{cover:l}):(0,q.tZ)(u.Z,null,T&&(0,q.tZ)(re.Z,{onHide:P,onSave:I,show:!0,slice:T}),(0,q.tZ)(D.Z,{activeChild:z,tabs:B,buttons:[{name:(0,q.tZ)(n.Fragment,null,(0,q.tZ)("i",{className:"fa fa-plus"}),(0,_.t)("Chart")),buttonStyle:"tertiary",onClick:()=>{window.location.assign("/chart/add")}},{name:(0,_.t)("View All »"),buttonStyle:"link",onClick:()=>{const e=z===N.F.Favorite?`/chart/list/?filters=(favorite:(label:${(0,_.t)("Yes")},value:!t))`:"/chart/list/";h.push(e)}}]}),null!=g&&g.length?(0,q.tZ)(E._L,{showThumbnails:l},g.map((e=>(0,q.tZ)(ie.Z,{key:`${e.id}`,openChartEditModal:$,chartFilter:z,chart:e,userId:null==t?void 0:t.userId,hasPerm:y,showThumbnails:l,bulkSelectEnabled:v,refreshData:w,addDangerToast:a,addSuccessToast:r,favoriteStatus:S[e.id],saveFavoriteStatus:k,handleBulkChartExport:j})))):(0,q.tZ)(G,{tableName:M.g.Charts,tab:z,otherTabTitle:c}),R&&(0,q.tZ)(b.Z,null))}));var de=a(31069),se=a(42110),ce=a(33743),ue=a(120),be=a(83862),he=a(17198);se.Z.registerLanguage("sql",ce.Z);const fe=C.iK.div`
cursor: pointer;
a {
text-decoration: none;
}
.ant-card-cover {
border-bottom: 1px solid ${e=>{let{theme:t}=e;return t.colors.grayscale.light2}};
& > div {
height: 171px;
}
}
.gradient-container > div {
background-size: contain;
background-repeat: no-repeat;
background-position: center;
background-color: ${e=>{let{theme:t}=e;return t.colors.secondary.light3}};
display: inline-block;
width: 100%;
height: 179px;
background-repeat: no-repeat;
vertical-align: middle;
}
`,me=C.iK.div`
svg {
margin-left: ${e=>{let{theme:t}=e;return 10*t.gridUnit}}px;
}
.query-title {
padding: ${e=>{let{theme:t}=e;return 2*t.gridUnit+2}}px;
font-size: ${e=>{let{theme:t}=e;return t.typography.sizes.l}}px;
}
`,pe=C.iK.div`
pre {
height: ${e=>{let{theme:t}=e;return 40*t.gridUnit}}px;
border: none !important;
background-color: ${e=>{let{theme:t}=e;return t.colors.grayscale.light5}} !important;
overflow: hidden;
padding: ${e=>{let{theme:t}=e;return 4*t.gridUnit}}px !important;
}
`,ge=(0,$.ZP)((e=>{let{user:t,addDangerToast:a,addSuccessToast:r,mine:i,showThumbnails:l,featureFlag:o}=e;const{state:{loading:d,resourceCollection:s},hasPerm:c,fetchData:u,refreshData:b}=(0,ne.Yi)("saved_query",(0,_.t)("query"),a,!0,i,[],!1),[h,f]=(0,n.useState)(N.F.Mine),[m,p]=(0,n.useState)(!1),[g,v]=(0,n.useState)({}),[Z,y]=(0,n.useState)(!0),w=c("can_edit"),x=c("can_delete"),k=(0,C.Fg)();return d?(0,q.tZ)(Se,{cover:l}):(0,q.tZ)(n.Fragment,null,m&&(0,q.tZ)(he.Z,{description:(0,_.t)("This action will permanently delete the saved query."),onConfirm:()=>{m&&(e=>{let{id:n,label:i}=e;de.Z.delete({endpoint:`/api/v1/saved_query/${n}`}).then((()=>{const e={filters:(0,E.if)(N.F.Created,M.g.SavedQueries,t),pageSize:E.IV,sortBy:[{id:"changed_on_delta_humanized",desc:!0}],pageIndex:0};b(Z?e:void 0),y(!1),p(!1),r((0,_.t)("Deleted: %s",i))}),(0,E.v$)((e=>a((0,_.t)("There was an issue deleting %s: %s",i,e)))))})(g)},onHide:()=>{p(!1)},open:!0,title:(0,_.t)("Delete Query?")}),(0,q.tZ)(D.Z,{activeChild:h,tabs:[{name:N.F.Mine,label:(0,_.t)("Mine"),onClick:()=>{return(e=N.F.Mine,u({pageIndex:0,pageSize:E.IV,sortBy:[{id:"changed_on_delta_humanized",desc:!0}],filters:(0,E.if)(e,M.g.SavedQueries,t)})).then((()=>f(N.F.Mine)));var e}}],buttons:[{name:(0,q.tZ)(n.Fragment,null,(0,q.tZ)("i",{className:"fa fa-plus"}),(0,_.t)("SQL Query")),buttonStyle:"tertiary",onClick:()=>{window.location.href="/superset/sqllab?new=true"}},{name:(0,_.t)("View All »"),buttonStyle:"link",onClick:()=>{window.location.href="/savedqueryview/list"}}]}),s.length>0?(0,q.tZ)(E._L,{showThumbnails:l},s.map((e=>{var t,i,d;return(0,q.tZ)(fe,{onClick:()=>{window.location.href=`/superset/sqllab?savedQueryId=${e.id}`},key:e.id},(0,q.tZ)(T.Z,{imgURL:"",url:`/superset/sqllab?savedQueryId=${e.id}`,title:e.label,imgFallbackURL:"/static/assets/images/empty-query.svg",description:(0,_.t)("Ran %s",e.changed_on_delta_humanized),cover:null!=e&&null!=(t=e.sql)&&t.length&&l&&o?(0,q.tZ)(pe,null,(0,q.tZ)(se.Z,{language:"sql",lineProps:{style:{color:k.colors.grayscale.dark2,wordBreak:"break-all",whiteSpace:"pre-wrap"}},style:ue.Z,wrapLines:!0,lineNumberStyle:{display:"none"},showLineNumbers:!1},(0,E.IB)(e.sql,25))):!(l&&(null==e||null==(i=e.sql)||!i.length))&&(0,q.tZ)(n.Fragment,null),actions:(0,q.tZ)(me,null,(0,q.tZ)(T.Z.Actions,{onClick:e=>{e.stopPropagation(),e.preventDefault()}},(0,q.tZ)(I.Gj,{overlay:(d=e,(0,q.tZ)(be.v,null,w&&(0,q.tZ)(be.v.Item,{onClick:()=>{window.location.href=`/superset/sqllab?savedQueryId=${d.id}`}},(0,_.t)("Edit")),(0,q.tZ)(be.v.Item,{onClick:()=>{d.id&&(0,ne.bR)(d.id,a,r)}},(0,_.t)("Share")),x&&(0,q.tZ)(be.v.Item,{onClick:()=>{p(!0),v(d)}},(0,_.t)("Delete"))))},(0,q.tZ)(O.Z.MoreVert,{iconColor:k.colors.grayscale.base}))))}))}))):(0,q.tZ)(G,{tableName:M.g.SavedQueries,tab:h}))}));var ve=a(20818),Ze=a(65043);const ye=(0,$.ZP)((function(e){let{user:t,addDangerToast:a,addSuccessToast:r,mine:i,showThumbnails:l,otherTabData:d,otherTabFilters:s,otherTabTitle:c}=e;const u=(0,o.k6)(),h=(0,F.rV)(F.dR.homepage_dashboard_filter,N.F.Other),f=ae()(d,(e=>!("viz_type"in e))),{state:{loading:m,resourceCollection:p},setResourceCollection:g,hasPerm:v,refreshData:Z,fetchData:y}=(0,ne.Yi)("dashboard",(0,_.t)("dashboard"),a,!0,h===N.F.Mine?i:f,[],!1),w=(0,n.useMemo)((()=>p.map((e=>e.id))),[p]),[C,x]=(0,ne.NE)("dashboard",w,a),[k,S]=(0,n.useState)(),[T,$]=(0,n.useState)(h),[I,P]=(0,n.useState)(!1),[z,U]=(0,n.useState)(!1),[R,A]=(0,n.useState)(null);(0,n.useEffect)((()=>{var e;(z||T===N.F.Favorite)&&(e=T,y({pageIndex:0,pageSize:E.IV,sortBy:[{id:"changed_on_delta_humanized",desc:!0}],filters:(0,E.if)(e,M.g.Dashboards,t,s)})),U(!0)}),[T]);const O=e=>{const t=e.map((e=>{let{id:t}=e;return t}));(0,le.Z)("dashboard",t,(()=>{P(!1)})),P(!0)},L=[{name:N.F.Favorite,label:(0,_.t)("Favorite"),onClick:()=>{$(N.F.Favorite),(0,F.LS)(F.dR.homepage_dashboard_filter,N.F.Favorite)}},{name:N.F.Mine,label:(0,_.t)("Mine"),onClick:()=>{$(N.F.Mine),(0,F.LS)(F.dR.homepage_dashboard_filter,N.F.Mine)}}];return d&&L.push({name:N.F.Other,label:c,onClick:()=>{$(N.F.Other),(0,F.LS)(F.dR.homepage_dashboard_filter,N.F.Other)}}),m?(0,q.tZ)(Se,{cover:l}):(0,q.tZ)(n.Fragment,null,(0,q.tZ)(D.Z,{activeChild:T,tabs:L,buttons:[{name:(0,q.tZ)(n.Fragment,null,(0,q.tZ)("i",{className:"fa fa-plus"}),(0,_.t)("Dashboard")),buttonStyle:"tertiary",onClick:()=>{window.location.assign("/dashboard/new")}},{name:(0,_.t)("View All »"),buttonStyle:"link",onClick:()=>{const e=T===N.F.Favorite?`/dashboard/list/?filters=(favorite:(label:${(0,_.t)("Yes")},value:!t))`:"/dashboard/list/";u.push(e)}}]}),k&&(0,q.tZ)(ve.Z,{dashboardId:null==k?void 0:k.id,show:!0,onHide:()=>S(void 0),onSubmit:e=>de.Z.get({endpoint:`/api/v1/dashboard/${e.id}`}).then((e=>{let{json:t={}}=e;g(p.map((e=>e.id===t.id?t.result:e)))}),(0,E.v$)((e=>a((0,_.t)("An error occurred while fetching dashboards: %s",e)))))}),R&&(0,q.tZ)(he.Z,{description:(0,q.tZ)(n.Fragment,null,(0,_.t)("Are you sure you want to delete")," ",(0,q.tZ)("b",null,R.dashboard_title),"?"),onConfirm:()=>{(0,E.Iu)(R,Z,r,a,T,null==t?void 0:t.userId),A(null)},onHide:()=>A(null),open:!!R,title:(0,_.t)("Please confirm")}),p.length>0&&(0,q.tZ)(E._L,{showThumbnails:l},p.map((e=>(0,q.tZ)(Ze.Z,{key:e.id,dashboard:e,hasPerm:v,bulkSelectEnabled:!1,showThumbnails:l,userId:null==t?void 0:t.userId,loading:m,openDashboardEditModal:e=>S(e),saveFavoriteStatus:C,favoriteStatus:x[e.id],handleBulkDashboardExport:O,onDelete:e=>A(e)})))),0===p.length&&(0,q.tZ)(G,{tableName:M.g.Dashboards,tab:T}),I&&(0,q.tZ)(b.Z,null))})),we=(0,w.I)(),Ce=["2","3"],_e=C.iK.div`
background-color: ${e=>{let{theme:t}=e;return t.colors.grayscale.light4}};
.ant-row.menu {
margin-top: -15px;
background-color: ${e=>{let{theme:t}=e;return t.colors.grayscale.light4}};
&:after {
content: '';
display: block;
border: 1px solid ${e=>{let{theme:t}=e;return t.colors.grayscale.light2}};
margin: 0px ${e=>{let{theme:t}=e;return 6*t.gridUnit}}px;
position: relative;
width: 100%;
${E.mq[1]} {
margin-top: 5px;
margin: 0px 2px;
}
}
.ant-menu.ant-menu-light.ant-menu-root.ant-menu-horizontal {
padding-left: ${e=>{let{theme:t}=e;return 8*t.gridUnit}}px;
}
button {
padding: 3px 21px;
}
}
.ant-card-meta-description {
margin-top: ${e=>{let{theme:t}=e;return t.gridUnit}}px;
}
.ant-card.ant-card-bordered {
border: 1px solid ${e=>{let{theme:t}=e;return t.colors.grayscale.light2}};
}
.ant-collapse-item .ant-collapse-content {
margin-bottom: ${e=>{let{theme:t}=e;return-6*t.gridUnit}}px;
}
div.ant-collapse-item:last-child.ant-collapse-item-active
.ant-collapse-header {
padding-bottom: ${e=>{let{theme:t}=e;return 3*t.gridUnit}}px;
}
div.ant-collapse-item:last-child .ant-collapse-header {
padding-bottom: ${e=>{let{theme:t}=e;return 9*t.gridUnit}}px;
}
.loading-cards {
margin-top: ${e=>{let{theme:t}=e;return 8*t.gridUnit}}px;
.ant-card-cover > div {
height: 168px;
}
}
`,xe=C.iK.div`
${e=>{let{theme:t}=e;return`\n .switch {\n display: flex;\n flex-direction: row;\n margin: ${4*t.gridUnit}px;\n span {\n display: block;\n margin: ${t.gridUnit}px;\n line-height: ${3.5*t.gridUnit}px;\n }\n }\n `}}
`,ke=(0,f.Z)(),Se=e=>{let{cover:t}=e;return(0,q.tZ)(E._L,{showThumbnails:t,className:"loading-cards"},[...new Array(E.iv)].map(((e,a)=>(0,q.tZ)(T.Z,{key:a,cover:!t&&(0,q.tZ)(n.Fragment,null),description:"",loading:!0}))))},Fe=(0,$.ZP)((function(e){let{user:t,addDangerToast:a}=e;const r=(0,P.N$)(t,"SavedQuery","can_read"),i=t.userId.toString(),l=`/api/v1/log/recent_activity/?q=${k().encode({page_size:6})}`,[o,d]=(0,n.useState)("Loading"),s=(0,F.OH)(i,null);let c=!1;const u=(0,v.cr)(v.TT.THUMBNAILS);u&&(c=void 0===(null==s?void 0:s.thumbnails)||(null==s?void 0:s.thumbnails));const[b,h]=(0,n.useState)(c),[f,m]=(0,n.useState)(null),[p,g]=(0,n.useState)(null),[Z,w]=(0,n.useState)(null),[C,x]=(0,n.useState)(null),[T,$]=(0,n.useState)(!0),z=(0,F.rV)(F.dR.homepage_collapse_state,[]),[U,R]=(0,n.useState)(z),A=we.get("welcome.message"),O=we.get("welcome.banner"),L=we.get("welcome.main.replacement"),[M,j]=(0,n.useMemo)((()=>{var e;const t=null==(e=ke.common)?void 0:e.conf.WELCOME_PAGE_LAST_TAB,[a,n]=Array.isArray(t)?t:[void 0,void 0];return a&&n?[(0,_.t)(a),n]:"all"===t?[(0,_.t)("All"),[]]:[(0,_.t)("Examples"),[{col:"created_by",opr:"rel_o_m",value:0}]]}),[]);(0,n.useEffect)((()=>{if(!j)return;const e=(0,F.rV)(F.dR.homepage_activity_filter,null);R(z.length>0?z:Ce),(0,E.xF)(t.userId,l,a,j).then((t=>{const a={};if(a[N.F.Other]=t.other,t.viewed){const n=y()(t.viewed,["item_url",null]).map((e=>e));a[N.F.Viewed]=n,!e&&a[N.F.Viewed]?d(N.F.Viewed):e||a[N.F.Viewed]?d(e||N.F.Created):d(N.F.Created)}else d(e||N.F.Created);m((e=>({...e,...a})))})).catch((0,E.v$)((e=>{m((e=>({...e,[N.F.Viewed]:[]}))),a((0,_.t)("There was an issue fetching your recent activity: %s",e))})));const n=[{col:"created_by",opr:"rel_o_m",value:`${i}`}];Promise.all([(0,E.B1)(i,"dashboard").then((e=>(x(e),Promise.resolve()))).catch((e=>(x([]),a((0,_.t)("There was an issue fetching your dashboards: %s",e)),Promise.resolve()))),(0,E.B1)(i,"chart").then((e=>(g(e),Promise.resolve()))).catch((e=>(g([]),a((0,_.t)("There was an issue fetching your chart: %s",e)),Promise.resolve()))),r?(0,E.B1)(i,"saved_query",n).then((e=>(w(e),Promise.resolve()))).catch((e=>(w([]),a((0,_.t)("There was an issue fetching your saved queries: %s",e)),Promise.resolve()))):Promise.resolve()]).then((()=>{$(!1)}))}),[j]);const B=()=>{h(!b),(0,F.I_)(i,{thumbnails:!b})};(0,n.useEffect)((()=>{!z&&null!=Z&&Z.length&&R((e=>[...e,"4"])),m((e=>({...e,Created:[...(null==p?void 0:p.slice(0,3))||[],...(null==C?void 0:C.slice(0,3))||[],...(null==Z?void 0:Z.slice(0,3))||[]]})))}),[p,Z,C]),(0,n.useEffect)((()=>{var e;!z&&null!=f&&null!=(e=f[N.F.Viewed])&&e.length&&R((e=>["1",...e]))}),[f]);const V=!(null!=f&&f[N.F.Other]||null!=f&&f[N.F.Viewed]),H={activeChild:"Home",name:(0,_.t)("Home")};return u&&(H.buttons=[{name:(0,q.tZ)(xe,null,(0,q.tZ)("div",{className:"switch"},(0,q.tZ)(I.KU,{checked:b,onClick:B}),(0,q.tZ)("span",null,(0,_.t)("Thumbnails")))),onClick:B,buttonStyle:"link"}]),(0,q.tZ)(n.Fragment,null,(0,q.tZ)(D.Z,H),(0,q.tZ)(_e,null,A&&(0,q.tZ)(A,null),O&&(0,q.tZ)(O,null),L&&(0,q.tZ)(L,null),(!O||!L)&&(0,q.tZ)(n.Fragment,null,(0,q.tZ)(S.Z,{activeKey:U,onChange:e=>{R(e),(0,F.LS)(F.dR.homepage_collapse_state,e)},ghost:!0,bigger:!0},(0,q.tZ)(S.Z.Panel,{header:(0,_.t)("Recents"),key:"1"},f&&(f[N.F.Viewed]||f[N.F.Other]||f[N.F.Created])&&"Loading"!==o?(0,q.tZ)(ee,{user:{userId:t.userId},activeChild:o,setActiveChild:d,activityData:f,isFetchingActivityData:T}):(0,q.tZ)(Se,null)),(0,q.tZ)(S.Z.Panel,{header:(0,_.t)("Dashboards"),key:"2"},!C||V?(0,q.tZ)(Se,{cover:b}):(0,q.tZ)(ye,{user:t,mine:C,showThumbnails:b,otherTabData:null==f?void 0:f[N.F.Other],otherTabFilters:j,otherTabTitle:M})),(0,q.tZ)(S.Z.Panel,{header:(0,_.t)("Charts"),key:"3"},!p||V?(0,q.tZ)(Se,{cover:b}):(0,q.tZ)(oe,{showThumbnails:b,user:t,mine:p,otherTabData:null==f?void 0:f[N.F.Other],otherTabFilters:j,otherTabTitle:M})),r&&(0,q.tZ)(S.Z.Panel,{header:(0,_.t)("Saved queries"),key:"4"},Z?(0,q.tZ)(ge,{showThumbnails:b,user:t,mine:Z,featureFlag:u}):(0,q.tZ)(Se,{cover:b}))))))})),Te=(0,n.lazy)((()=>a.e(4109).then(a.bind(a,76499)))),$e=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(906)]).then(a.bind(a,78768)))),Ee=(0,n.lazy)((()=>Promise.all([a.e(1216),a.e(1247),a.e(8),a.e(8782),a.e(95),a.e(9563)]).then(a.bind(a,32635)))),Ie=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(1611)]).then(a.bind(a,98217)))),Ne=(0,n.lazy)((()=>Promise.all([a.e(1216),a.e(8782),a.e(665)]).then(a.bind(a,33320)))),De=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(4085)]).then(a.bind(a,36942)))),Pe=(0,n.lazy)((()=>Promise.all([a.e(1216),a.e(8782),a.e(8774)]).then(a.bind(a,18510)))),ze=(0,n.lazy)((()=>Promise.all([a.e(232),a.e(6362)]).then(a.bind(a,90170)))),Ue=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(4502)]).then(a.bind(a,95731)))),Re=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(5656)]).then(a.bind(a,52438)))),Ae=(0,n.lazy)((()=>Promise.all([a.e(1216),a.e(527),a.e(8782),a.e(3197),a.e(6217),a.e(8438)]).then(a.bind(a,58298)))),Oe=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(5336)]).then(a.bind(a,58883)))),Le=(0,n.lazy)((()=>Promise.all([a.e(1216),a.e(527),a.e(1247),a.e(8),a.e(981),a.e(5207),a.e(323),a.e(3197),a.e(95),a.e(232),a.e(868),a.e(9540),a.e(1128)]).then(a.bind(a,4189)))),Me=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(6284)]).then(a.bind(a,36444)))),qe=(0,n.lazy)((()=>Promise.all([a.e(1216),a.e(8782),a.e(9173)]).then(a.bind(a,7742)))),je=(0,n.lazy)((()=>a.e(7637).then(a.bind(a,7628)))),Be=(0,n.lazy)((()=>Promise.all([a.e(8782),a.e(373)]).then(a.bind(a,87827)))),Ve=[{path:"/superset/welcome/",Component:Fe},{path:"/dashboard/list/",Component:Pe},{path:"/superset/dashboard/:idOrSlug/",Component:ze},{path:"/chart/add",Component:Te},{path:"/chart/list/",Component:Ne},{path:"/tablemodelview/list/",Component:Re},{path:"/databaseview/list/",Component:Ue},{path:"/savedqueryview/list/",Component:qe},{path:"/csstemplatemodelview/list/",Component:De},{path:"/annotationlayer/list/",Component:$e},{path:"/annotationlayer/:annotationLayerId/annotation/",Component:Ie},{path:"/superset/sqllab/history/",Component:Me},{path:"/alert/list/",Component:Ee},{path:"/report/list/",Component:Ee,props:{isReportEnabled:!0}},{path:"/alert/:alertId/log/",Component:Oe},{path:"/report/:alertId/log/",Component:Oe,props:{isReportEnabled:!0}},{path:"/explore/",Component:Le},{path:"/superset/explore/p",Component:Le},{path:"/dataset/add/",Component:Ae},{path:"/dataset/:datasetId",Component:Ae},{path:"/rowlevelsecurity/list",Component:(0,n.lazy)((()=>Promise.all([a.e(1216),a.e(1247),a.e(8782),a.e(7177)]).then(a.bind(a,6065))))}];(0,v.cr)(v.TT.TAGGING_SYSTEM)&&(Ve.push({path:"/superset/all_entities/",Component:je}),Ve.push({path:"/superset/tags/",Component:Be}));const He=Ve.map((e=>e.path)).reduce(((e,t)=>({...e,[t]:!0})),{});function Ke(e){if(e){const t=e.split(/[?#]/)[0];return!!He[t]}return!1}var Ge=a(3741),Qe=a(63431),Ye=a(97381),We=a(71255),Je=a(68135),Xe=a(28216),et=a(35755),tt=a(38626),at=a(57865),nt=a(4756);const rt={info:"addInfoToast",alert:"addDangerToast",danger:"addDangerToast",warning:"addWarningToast",success:"addSuccessToast"};function it(e){let{children:t,messages:a}=e;const n=(0,$.e1)();return(0,nt.J)((()=>{a.forEach((e=>{const[t,a]=e,r=n[rt[t]];r&&r(a)}))})),t}var lt=a(25076),ot=a(29147),dt=a(14278);const{common:st}=(0,f.Z)(),ct=(0,w.I)(),ut=e=>{let{children:t}=e;const a=ct.get("root.context.provider");return(0,q.tZ)(Je.a,{theme:lt.r},(0,q.tZ)(Xe.zt,{store:We.h},(0,q.tZ)(tt.W,{backend:at.PD},(0,q.tZ)(it,{messages:st.flash_messages},(0,q.tZ)(ot.DG,null,(0,q.tZ)(dt.EM,null,(0,q.tZ)(et.Fz,{ReactRouterRoute:o.AW,stringifyOptions:{encode:!1}},a?(0,q.tZ)(a,null,t):t)))))))},bt=()=>{const{pathname:e}=(0,o.TH)();return(0,n.useEffect)((()=>{window.scrollTo(0,0)}),[e]),null};(0,p.Z)(),(0,g.Z)(),(0,Qe.Z)();const ht=(0,f.Z)();let ft;const mt=(0,s.DE)({logEvent:Ye.logEvent},We.h.dispatch),pt=()=>{const e=(0,o.TH)();return(0,n.useEffect)((()=>{mt.logEvent(Ge.kV,{path:e.pathname}),ft&&ft!==e.pathname&&Ge.Yd.markTimeOrigin(),ft=e.pathname}),[e.pathname]),(0,q.tZ)(n.Fragment,null)};r.render((0,q.tZ)((()=>(0,q.tZ)(d.VK,null,(0,q.tZ)(bt,null),(0,q.tZ)(pt,null),(0,q.tZ)(ut,null,(0,q.tZ)(c.n,null),(0,q.tZ)(h.Z,{data:ht.common.menu_data,isFrontendRoute:Ke}),(0,q.tZ)(o.rs,null,Ve.map((e=>{let{path:t,Component:a,props:r={},Fallback:i=b.Z}=e;return(0,q.tZ)(o.AW,{path:t,key:t},(0,q.tZ)(n.Suspense,{fallback:(0,q.tZ)(i,null)},(0,q.tZ)(u.Z,null,(0,q.tZ)(a,l()({user:ht.user},r)))))}))),(0,q.tZ)(m.Z,null)))),null),document.getElementById("app"))}},s={};function c(e){var t=s[e];if(void 0!==t)return t.exports;var a=s[e]={id:e,loaded:!1,exports:{}};return d[e].call(a.exports,a,a.exports,c),a.loaded=!0,a.exports}c.m=d,c.amdD=function(){throw new Error("define cannot be used indirect")},c.amdO={},e=[],c.O=(t,a,n,r)=>{if(!a){var i=1/0;for(s=0;s<e.length;s++){for(var[a,n,r]=e[s],l=!0,o=0;o<a.length;o++)(!1&r||i>=r)&&Object.keys(c.O).every((e=>c.O[e](a[o])))?a.splice(o--,1):(l=!1,r<i&&(i=r));if(l){e.splice(s--,1);var d=n();void 0!==d&&(t=d)}}return t}r=r||0;for(var s=e.length;s>0&&e[s-1][2]>r;s--)e[s]=e[s-1];e[s]=[a,n,r]},c.H={},c.G=e=>{Object.keys(c.H).map((t=>{c.H[t](e)}))},c.n=e=>{var t=e&&e.__esModule?()=>e.default:()=>e;return c.d(t,{a:t}),t},a=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,c.t=function(e,n){if(1&n&&(e=this(e)),8&n)return e;if("object"==typeof e&&e){if(4&n&&e.__esModule)return e;if(16&n&&"function"==typeof e.then)return e}var r=Object.create(null);c.r(r);var i={};t=t||[null,a({}),a([]),a(a)];for(var l=2&n&&e;"object"==typeof l&&!~t.indexOf(l);l=a(l))Object.getOwnPropertyNames(l).forEach((t=>i[t]=()=>e[t]));return i.default=()=>e,c.d(r,i),r},c.d=(e,t)=>{for(var a in t)c.o(t,a)&&!c.o(e,a)&&Object.defineProperty(e,a,{enumerable:!0,get:t[a]})},c.f={},c.e=e=>Promise.all(Object.keys(c.f).reduce(((t,a)=>(c.f[a](e,t),t)),[])),c.u=e=>6217===e?"6217.cd25427f973c862c7ae4.entry.js":2441===e?"2441.84a87e1b904333670286.entry.js":527===e?"527.c0e551182a264e2f7148.entry.js":3197===e?"3197.1cd8561c0296667b196a.entry.js":323===e?"323.323ae901ba7565bddc59.entry.js":{8:"dca67f420947a30366cc",57:"6a8edc9301437b97a880",95:"0583e8e330caeca98310",112:"ea34548959147c99db4f",137:"b87da57903aa9410028d",158:"7c6c5a6298756fb2e368",177:"854e72ce009097dea6ef",183:"b8a6d32010639756fd5a",215:"2eb952af30b72fbaca47",232:"a732943a7b6a3c9a7a61",310:"1b6d3d5fb379f4ffba0c",312:"de243f762dd3dae25a71",326:"2cb64f3b9637471ab79f",336:"ee514f13799d75351117",347:"01e75abc64cbda8b33ab",349:"84954b79c3bf85863720",357:"b45d9baa85989a83d267",363:"7d5517b5ba77ac664f31",373:"cdf79ec133c460ebf576",440:"f8e2127f15ad8052eb06",444:"50925b57f14af44a0bb4",452:"413790c7ff292148d337",597:"4a61352083b112ee09cf",600:"202c01791d6b0888d41f",616:"84d54d83caf99e4945fa",665:"6686fa1b69f40ed0f56b",704:"25ae9fbedb72d9b3ffe7",741:"fb1021ca22e56391512a",775:"a1add2c6d470e2eed2f3",783:"0fb241b92d4f6fe00a1b",826:"e4564149bcdcfe49a5b2",868:"e401bb1e0331ab8f5723",906:"30817e62f07f1abf294f",954:"296c1383fa71efe1ecc8",981:"8e5330a6c04a3c75cd9c",992:"3bb28bf9de5cfb3a31ea",999:"66d4521869b231631f62",1020:"65e85181c3af20adfaf3",1075:"7686aaddcaaafe1fc65c",1128:"2ef9322205062e596aca",1130:"f94f8bd9251398afb0ee",1161:"949f079381e033adcd80",1174:"30bc5e225c6a879478e8",1185:"51c76e5148ce3638cf4d",1221:"e7c18da35561edd7407c",1247:"519fcdece5fe12d2a370",1256:"e47da8a47ce8421a6c35",1258:"84ffa13048dcd63abcbf",1263:"e3d24b7d629f0c9caa0f",1293:"7ae2db4047e52c1955d1",1351:"315f984040a2731d6cbf",1382:"e2d86ccce3024d1eaeda",1398:"0e8062e0e7e11f86214c",1402:"ef71db83f410e2d77610",1441:"3c51abc9ce0adcb269e5",1458:"b09ea5e1028006cabb5d",1493:"c395f7575cfa71b30738",1568:"ade06aa262190c3f47b4",1573:"1e467de27bd3dba441fd",1605:"09f2d740509950089383",1611:"3b4e34066289dee1857f",1862:"6359f21c2f9b786ce16a",1877:"3926ef13daefbca43225",1899:"42582cd7f6f09fd108b8",1948:"ff5684143fd91d3b8a80",2079:"73fc3ae216623170b69a",2089:"833e1a633dab60aa1e82",2105:"d5553e1dcb0d56e914e2",2112:"fbeea1159ce35af31257",2197:"c3f808e931ea3b315215",2229:"569b70071600746d3b71",2264:"8dca48a562851e445715",2267:"1d613dba270d05e66fd6",2403:"6b0855f12416f000070b",2439:"8379ca70bb023fd2cd2d",2549:"a89b67984e1a91e7bcb7",2646:"c7c6eae3a837335173db",2698:"106e182a75dbf3b70282",2713:"cfb5fefa4663bcc52a9d",2797:"e7d896401450124f316c",2955:"6295f45e8d82a3fd81ce",2983:"77022e8536c5f48eb27d",3036:"ca1ad58a2cb6edbea045",3126:"083b79681eb022297597",3141:"1a3ad71ca8414b1aff4c",3187:"177dfb79b42eb73d7866",3195:"ad7b6a2c969508cd6a29",3208:"48ce4cffe1e01e20fe1f",3240:"18d808107a08c1b56d69",3265:"b70b50f3b438cae549d4",3325:"c74aeba305c67f0de7fc",3496:"e04fa2a488bef0ca4f6f",3544:"7911faad16e31278af77",3558:"836419fc91f20475e280",3567:"d95b5cd0f543a0fa7fcd",3588:"3b7ca92d40664402b992",3606:"54aec15887e18dcf4d6e",3711:"82113b63056436d4d02b",3745:"8f3ee2bd81ca66645cbb",3749:"a8fed202e88865a3c129",3831:"6c76e7ff81d847693bf3",3871:"2ab5d182a330ff8dc633",3955:"ae1e5967d8e165d4c383",3985:"e0db3b71272b791f29f3",4085:"62aab99bb2c5c13f6bd0",4109:"4c06079f17a1bd53ba5d",4139:"c27e96fb57f180a5fea0",4194:"df8caffe35dc7408592b",4237:"04046faa6cfa8c4d495c",4266:"7b50c83b0b6b87ccd33c",4273:"e50674c10f226bc56c96",4421:"f2885780359216344d5f",4458:"b91e10e355934543f970",4474:"9c4d23562042acc4eab2",4494:"60e55d23e81b8abb6801",4502:"316cfa73cf024a783196",4572:"237ebbb7008e5a538ac0",4579:"03d7163328e8d555df63",4625:"3321aa0059456f094615",4662:"dea8bc3bbe5cae7135cd",4667:"0fda52175c59292cafb9",4672:"ccf213c8a0828ebb7f45",4732:"2a9aac4aa4178ef96636",4757:"53bd0bbe26f32495e698",4758:"225a2cf803122d531ac1",4790:"2828f07d4970a294e924",4794:"77b2d851251414066907",4797:"06a8550688aa97522575",4810:"13d8a4a65f2fa69630df",4817:"e5ec7f93c7b83dbc8e10",4832:"a65fafcbf585f484da44",4851:"e3a5a687922d1f1f61c4",4854:"79ab3b8e6e8ab5897a79",4936:"31dba9d54004fd0b4f6b",4972:"e1d7d70ebc1e8e374eb1",5094:"03c0179c4fe59dfccbe6",5123:"42114fa32b185e95acdf",5181:"cc23598b0cf466b174f6",5201:"b410f0b9661764777ae1",5207:"1b166f826e97b201ae04",5224:"6801df583a7baa6f38df",5226:"164f0157f65b94ac86ea",5249:"960fa56954a3390a45d0",5264:"cfd910d048817219d524",5281:"71fb054b1d5a4969f94e",5330:"6a3f757875b50336fa52",5335:"ffcda709454ee8e00875",5336:"de4ae23774f8ad67b971",5350:"7c476e1bcd37583d1ad2",5359:"b948569636c2e25cf87e",5367:"4eecb7542f1d0f20e390",5378:"97d3f65161ee31afe88b",5422:"4c7b29775067e1b09212",5507:"f9862baff82efddcfed6",5566:"8cb7ec34e1a5d5cdc0e9",5580:"a6fffc2d36e02dfdd558",5592:"34f938a1511d7766860c",5641:"03cee1d1e1120dca640f",5656:"f1e701773c829c37e5eb",5707:"ee1bae94981497a1099b",5771:"e6bd7d04c59062204af7",5772:"5d2c10c1ae63f298a73f",5777:"bd047178347cf48818b5",5802:"10c562415702931e090a",5816:"09a58c174324a1095068",5832:"b4253794b4e4f614fdad",5838:"03b4e5235c29124ba82b",5906:"0816e0f97502443c7233",5962:"8e22959b6677bb2c904e",5972:"e2b002c74e3f3f64bd8b",5998:"3acb25873e499325f00e",6061:"c0d72749bdfdbe1f0675",6126:"b1e9e54700dc790f535e",6150:"2b84bc490340c9762ce6",6167:"a1a753704b31b1574914",6207:"77c62667b74526939ac1",6254:"d679cfabbe67c893078a",6284:"d0d5a85523dcb1a1cd39",6303:"5c84e2437e0384ee0b13",6357:"57f7615e3a9ef917ca82",6362:"977cb591e862cb4d1f88",6371:"8b3a79bdcffc8199f2ce",6377:"68d56cb0512e7003eacd",6420:"c0bab80e31e3cb9d7370",6447:"4982ddd1a2cfc3462d35",6486:"b4e2ce371cfa18faee67",6507:"662e962171154f1f9157",6668:"c9ce3d6771f6b266aa3f",6682:"8d7f828696799beb97ed",6693:"9d63ead3434e1f56167a",6758:"75f6753a49fe647bc674",6819:"bc3027bcb33a6f068f74",6883:"3725a5f79aad6bf56dbd",6981:"37a59d45844d993b5526",7003:"7e2cbc04372fc91eb049",7156:"2dd1002db8b996ecacc5",7177:"8e1c9f2cced189f5d82b",7183:"7ef924adae1d79c59a30",7245:"7919f5dfe4abd96372c7",7249:"26389e9e4698c17823e1",7253:"4896f1032edd95396be8",7405:"ed30936869c11b67b644",7460:"269bcab302a8d01e4b71",7521:"b7b1f7a6bc46d5a0dd8b",7584:"2a0efbe0d8bbb1942c39",7610:"cd111d14a239de6cbf67",7637:"ffa3342484cc118b32f1",7654:"7781029cd7874c62c004",7716:"53ac5c1205f6e1e6e616",7760:"2ac2524484cd50006129",7803:"716853494168af0ba3a9",7832:"2478a72a59cd4d05fdc5",7850:"2e1b1dd1c6ac18288e18",7922:"557ae239cc7e3f8ccead",7984:"d5704e54bf74dfb88424",7989:"f4a6c401a92c88bca1b2",8041:"4697dd9418c10110c66f",8312:"037d8fbe29610cde8fed",8349:"6602c65d3c4cec8b27b2",8370:"16a5bf7a5abd35871cfe",8398:"ea0e18eecdbcd98ba60b",8425:"216064963022a7599339",8438:"c48d630f46962be620b7",8463:"3626549b23225a6e2c95",8464:"5c6a86ce1ce8831458ee",8491:"553928348feb33e7dcbf",8551:"b34ea07f15c98438d8b6",8623:"0dfe5fdeff8c56bcee19",8656:"a0536d45eaee4b70ae6b",8682:"e123504ae9bdae42d7c6",8695:"f5f9d5fad83cad0b90ea",8701:"9c835224d19a8a65cf13",8750:"0b72ee1a02623f83e0ca",8774:"a6580ea28a6741ca22db",8782:"bcae2d9e0a388c644584",8883:"8ba088da5c466b5829ba",8970:"bb1ed3f0e87a0bc74792",8971:"1f51ccb0a3dee1492d37",9013:"8f7d3503973d9608c3f6",9052:"b9ce77b76d5488a36355",9101:"402ea28e6248b9349288",9109:"ef04744c9e66d5a22e11",9173:"17fe86ac251a467c9684",9207:"bda2f64f1dbce6ae7d7d",9305:"1d76f1f9c53ccf34e596",9322:"8622b05fed2ed11cfb55",9325:"c5562d7456f59804ae5d",9393:"1e925598ffe216e24710",9396:"dbd8d78e8e000c257257",9483:"01cfddf850e70ca35a4f",9510:"d3a9f49bff7461a62ef6",9540:"b0a63257a319b05fde58",9558:"faf680e140cfca20867e",9563:"d6e79a30ac772ea01149",9622:"442e335838b6579b00c4",9681:"de2757c9960ff08f92b9",9767:"afafa15398ea7165cb04",9794:"4e85b8dbced27ec7a2bc",9811:"3c24ba556c37f0468945",9857:"40e39f4283bce96d857f",9873:"634e2618746831f36034",9877:"8988693a046a18f3dbdf"}[e]+".chunk.js",c.miniCssF=e=>(({452:"DashboardContainer",1128:"Chart",7177:"RowLevelSecurityList",9563:"AlertReportList"}[e]||e)+"."+{137:"b87da57903aa9410028d",452:"413790c7ff292148d337",1128:"2ef9322205062e596aca",1441:"3c51abc9ce0adcb269e5",1877:"3926ef13daefbca43225",3036:"ca1ad58a2cb6edbea045",4194:"df8caffe35dc7408592b",4237:"04046faa6cfa8c4d495c",4494:"60e55d23e81b8abb6801",4936:"31dba9d54004fd0b4f6b",5378:"97d3f65161ee31afe88b",5422:"4c7b29775067e1b09212",5566:"8cb7ec34e1a5d5cdc0e9",5772:"5d2c10c1ae63f298a73f",7177:"8e1c9f2cced189f5d82b",8623:"0dfe5fdeff8c56bcee19",9563:"d6e79a30ac772ea01149"}[e]+".chunk.css"),c.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),c.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),n={},r="superset:",c.l=(e,t,a,i)=>{if(n[e])n[e].push(t);else{var l,o;if(void 0!==a)for(var d=document.getElementsByTagName("script"),s=0;s<d.length;s++){var u=d[s];if(u.getAttribute("src")==e||u.getAttribute("data-webpack")==r+a){l=u;break}}l||(o=!0,(l=document.createElement("script")).charset="utf-8",l.timeout=120,c.nc&&l.setAttribute("nonce",c.nc),l.setAttribute("data-webpack",r+a),l.src=e),n[e]=[t];var b=(t,a)=>{l.onerror=l.onload=null,clearTimeout(h);var r=n[e];if(delete n[e],l.parentNode&&l.parentNode.removeChild(l),r&&r.forEach((e=>e(a))),t)return t(a)},h=setTimeout(b.bind(null,void 0,{type:"timeout",target:l}),12e4);l.onerror=b.bind(null,l.onerror),l.onload=b.bind(null,l.onload),o&&document.head.appendChild(l)}},c.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},c.nmd=e=>(e.paths=[],e.children||(e.children=[]),e),c.p="/static/assets/",i=e=>new Promise(((t,a)=>{var n=c.miniCssF(e),r=c.p+n;if(((e,t)=>{for(var a=document.getElementsByTagName("link"),n=0;n<a.length;n++){var r=(l=a[n]).getAttribute("data-href")||l.getAttribute("href");if("stylesheet"===l.rel&&(r===e||r===t))return l}var i=document.getElementsByTagName("style");for(n=0;n<i.length;n++){var l;if((r=(l=i[n]).getAttribute("data-href"))===e||r===t)return l}})(n,r))return t();((e,t,a,n)=>{var r=document.createElement("link");r.rel="stylesheet",r.type="text/css",r.onerror=r.onload=i=>{if(r.onerror=r.onload=null,"load"===i.type)a();else{var l=i&&("load"===i.type?"missing":i.type),o=i&&i.target&&i.target.href||t,d=new Error("Loading CSS chunk "+e+" failed.\n("+o+")");d.code="CSS_CHUNK_LOAD_FAILED",d.type=l,d.request=o,r.parentNode.removeChild(r),n(d)}},r.href=t,document.head.appendChild(r)})(e,r,t,a)})),l={7103:0},c.f.miniCss=(e,t)=>{l[e]?t.push(l[e]):0!==l[e]&&{137:1,452:1,1128:1,1441:1,1877:1,3036:1,4194:1,4237:1,4494:1,4936:1,5378:1,5422:1,5566:1,5772:1,7177:1,8623:1,9563:1}[e]&&t.push(l[e]=i(e).then((()=>{l[e]=0}),(t=>{throw delete l[e],t})))},(()=>{var e={7103:0};c.f.j=(t,a)=>{var n=c.o(e,t)?e[t]:void 0;if(0!==n)if(n)a.push(n[2]);else{var r=new Promise(((a,r)=>n=e[t]=[a,r]));a.push(n[2]=r);var i=c.p+c.u(t),l=new Error;c.l(i,(a=>{if(c.o(e,t)&&(0!==(n=e[t])&&(e[t]=void 0),n)){var r=a&&("load"===a.type?"missing":a.type),i=a&&a.target&&a.target.src;l.message="Loading chunk "+t+" failed.\n("+r+": "+i+")",l.name="ChunkLoadError",l.type=r,l.request=i,n[1](l)}}),"chunk-"+t,t)}},c.H.j=t=>{if(!c.o(e,t)||void 0===e[t]){e[t]=null;var a=document.createElement("link");a.charset="utf-8",c.nc&&a.setAttribute("nonce",c.nc),a.rel="preload",a.as="script",a.href=c.p+c.u(t),document.head.appendChild(a)}},c.O.j=t=>0===e[t];var t=(t,a)=>{var n,r,[i,l,o]=a,d=0;if(i.some((t=>0!==e[t]))){for(n in l)c.o(l,n)&&(c.m[n]=l[n]);if(o)var s=o(c)}for(t&&t(a);d<i.length;d++)r=i[d],c.o(e,r)&&e[r]&&e[r][0](),e[r]=0;return c.O(s)},a=globalThis.webpackChunksuperset=globalThis.webpackChunksuperset||[];a.forEach(t.bind(null,0)),a.push=t.bind(null,a.push.bind(a))})(),o={1128:[1216,527,1247,8,981,5207,5640,3197,95,868,9540,4717,452],6362:[1216,527,1247,8,981,5207,5640,3197,95,868,9540,4717,452]},c.f.preload=e=>{var t=o[e];Array.isArray(t)&&t.map(c.G)},c.O(void 0,[1216,504,2450,2318,5429,4477,3727,7748,2087,5755,845,5640,9602,8047,6209,6579,8989,6839,8795,5296,5198,4717],(()=>c(25076)));var u=c.O(void 0,[1216,504,2450,2318,5429,4477,3727,7748,2087,5755,845,5640,9602,8047,6209,6579,8989,6839,8795,5296,5198,4717],(()=>c(22562)));u=c.O(u)})();
//# sourceMappingURL=spa.62a9c98ebce8578c1ce7.entry.js.map
|
PypiClean
|
/python-opensesame-3.3.15a1.tar.gz/python-opensesame-3.3.15a1/opensesame_plugins/advanced_delay/locale/fr_FR/advanced_delay.md
|
# Advanced_delay
Le plugin `advanced_delay` retarde l'expérimentation d'une durée pré-spécifiée ajoutée à une durée aléatoire.
- *Durée* correspond à la durée moyenne du délai en millisecondes.
- *Variabilité* correspond à la taille de la variation du délai en millisecondes.
- *Mode de la variabilité* correspond à la manière dont la variabilité est calculée :
- *Ecart-type* définira la variation à partir d'une distribution Gaussienne avec la Variabilité comme écart-type.
- *Uniforme* définira la variation de la durée à partir d'une distribution uniforme.
|
PypiClean
|
/Flask-Minify-0.42.tar.gz/Flask-Minify-0.42/README.md
|
<h1 align='center'> flask_minify </h1>
<p align='center'>
<a href='https://pypi.org/project/Flask-Minify/'>
<img src='https://img.shields.io/github/v/tag/mrf345/flask_minify' alt='Latest Release' />
</a>
<a href='https://github.com/mrf345/flask_minify/actions/workflows/ci.yml'>
<img src='https://github.com/mrf345/flask_minify/workflows/Build/badge.svg'>
</a>
<br />
<img src='https://img.shields.io/pypi/pyversions/flask_minify' alt='Supported versions' />
<br />
<a href='https://github.com/mrf345/flask_minify/actions/workflows/ci.yml'>
<img src='https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/mrf345/bc746d7bfe356b54fbb93b2ea5d0d2a4/raw/flask_minify__heads_master.json' alt='Coverage Percentage' />
</a>
<a href='https://github.com/PyCQA/bandit'>
<img src='https://img.shields.io/badge/security-bandit-yellow.svg' alt='security: bandit' />
</a>
<a href='https://github.com/psf/black'>
<img src='https://img.shields.io/badge/style-black-000000.svg' alt='Code Style Black' />
</a>
<br />
</p>
<h3 align='center'>Flask extension to parse and minify html, javascript, css and less.</h3>
## Install:
With **pip**
- `pip install Flask-Minify`
*Or* from the source
- `git clone https://github.com/mrf345/flask_minify.git`
- `cd flask_minify`
- `python setup.py install`
## Setup:
In this example the extension will minify every HTML request, unless it's explicitly bypassed.
```python
from flask import Flask
from flask_minify import Minify
app = Flask(__name__)
Minify(app=app, html=True, js=True, cssless=True)
```
Another approach is using **decorators**, you can set the extension to be `passive` so will only minify the decorated routes
```python
from flask import Flask
from flask_minify import Minify, decorators as minify_decorators
app = Flask(__name__)
Minify(app=app, passive=True)
@app.route('/')
@minify_decorators.minify(html=True, js=True, cssless=True)
def example():
return '<h1>Example...</h1>'
```
## Options:
Option | type | Description
-------------------|----------|-------------
app | `object` | `Flask` app instance to be passed (default: `None`)
html | `bool` | minify HTML (default: `True`)
js | `bool` | minify JavaScript output (default: `True`)
cssless | `bool` | minify CSS or Less. (default: `True`)
fail_safe | `bool` | avoid raising error while minifying (default: `True`)
bypass | `list` | endpoints to bypass minifying for, supports `Regex` (default: `[]`)
bypass_caching | `list` | endpoints to bypass caching for, supports `Regex` (default: `[]`)
caching_limit | `int` | limit the number of cached response variations (default: `2`).
passive | `bool` | disable active minifying, to use *decorators* instead (default: `False`)
static | `bool` | enable minifying static files css, less and js (default: `True`)
script_types | `list` | script types to limit js minification to (default: `[]`)
parsers | `dict` | parsers to handle minifying specific tags, mainly for advanced customization (default: `{}`)
#### - `bypass` and `bypass_caching`
`endpoint` in this context is the name of the function decorated with `@app.route`
so in the following example the endpoint will be `root`:
```python
@app.route('/root/<id>')
def root(id):
return id
```
both options can handle regex patterns as input so for example, if you want to bypass all routes on a certain blueprint
you can just pass the pattern as such:
```python
Minify(app, bypass=['blueprint_name.*'])
```
#### - `caching_limit`
if the option is set to `0`, we'll not cache any response, so if you want to **disable caching** just do that.
#### - `script_types`
when using the option include `''` (empty string) in the list to include script blocks which are missing the `type` attribute.
#### - `parsers`
allows passing tag specific options to the module responsible for the minification, as well as replacing the default parser with another included option or your own custom one.
In the following example will replace the default `style` (handles CSS) parser `rcssmin` with `lesscpy`:
```python
from flask_minify import Minify, parsers as minify_parsers
parsers = {'style': minify_parsers.Lesscpy}
Minify(app=app, parsers=parsers)
```
you can override the default parser runtime options as well, as shown in the following example:
```python
from flask_minify import Minify, parsers as minify_parsers
class CustomCssParser(minify_parsers.Lesscpy):
runtime_options = {
**minify_parsers.Lesscpy.runtime_options,
"xminify": False,
}
parsers = {'style': CustomCssParser}
Minify(app=app, parsers=parsers)
```
the **default** parsers are set to `{"html": Html, "script": Jsmin, "style": Rcssmin}` check out [the code](https://github.com/mrf345/flask_minify/blob/master/flask_minify/parsers.py) for more insight.
## Development:
- *Tests*: `make test`
- *Style check*: `make lint`
- *Format code*: `make format`
## Breaking changes
#### `0.40`
Due to a future deprecation in Flask 2.3, the extension is no longer going to fallback to `Flask._app_ctx_stack`, it will raise an exception instead (`flask_minify.exceptions.MissingApp`)
#### `0.33`
introduces a breaking change to the expected output, in this release `lesscpy` will be replaced by `cssmin` as
the default css minifier so no more `less` compiling by default. in case you don't want that, follow [this example](https://github.com/mrf345/flask_minify#--parsers).
|
PypiClean
|
/opendig-0.1.0.tar.gz/opendig-0.1.0/namecoinrpc/proxy.py
|
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import json
import decimal
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
class JSONRPCException(Exception):
def __init__(self, rpcError):
Exception.__init__(self)
self.error = rpcError
class AuthServiceProxy(object):
def __init__(self, serviceURL, serviceName=None):
self.__serviceURL = serviceURL
self.__serviceName = serviceName
self.__url = urlparse.urlparse(serviceURL)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
self.__idcnt = 0
authpair = "%s:%s" % (self.__url.username, self.__url.password)
authpair = authpair.encode('utf8')
self.__authhdr = "Basic ".encode('utf8') + base64.b64encode(authpair)
if self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port, None, None,False,
HTTP_TIMEOUT)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port, False,
HTTP_TIMEOUT)
def __getattr__(self, name):
if self.__serviceName != None:
name = "%s.%s" % (self.__serviceName, name)
return AuthServiceProxy(self.__serviceURL, name)
def __call__(self, *args):
self.__idcnt += 1
postdata = json.dumps({
'version': '1.1',
'method': self.__serviceName,
'params': args,
'id': self.__idcnt})
self.__conn.request('POST', self.__url.path, postdata,
{ 'Host' : self.__url.hostname,
'User-Agent' : USER_AGENT,
'Authorization' : self.__authhdr,
'Content-type' : 'application/json' })
httpresp = self.__conn.getresponse()
if httpresp is None:
raise JSONRPCException({
'code' : -342, 'message' : 'missing HTTP response from server'})
resp = httpresp.read()
resp = resp.decode('utf8')
resp = json.loads(resp, parse_float=decimal.Decimal)
if 'error' in resp and resp['error'] != None:
raise JSONRPCException(resp['error'])
elif 'result' not in resp:
raise JSONRPCException({
'code' : -343, 'message' : 'missing JSON-RPC result'})
else:
return resp['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list))
self.__conn.request('POST', self.__url.path, postdata,
{ 'Host' : self.__url.hostname,
'User-Agent' : USER_AGENT,
'Authorization' : self.__authhdr,
'Content-type' : 'application/json' })
httpresp = self.__conn.getresponse()
if httpresp is None:
raise JSONRPCException({
'code' : -342, 'message' : 'missing HTTP response from server'})
resp = httpresp.read()
resp = resp.decode('utf8')
resp = json.loads(resp, parse_float=decimal.Decimal)
return resp
|
PypiClean
|
/anime-pgen-1.0.1.tar.gz/anime-pgen-1.0.1/README.md
|
# Генератор превью для Shikimori

<p align="center"><i>Демо</i></p>
[](https://pypi.org/project/anime-pgen)
## Описание
`anime-pgen` представляет собой `cli`-утилиту для генерации превью-изображений по данным Шикимори (_скачивание данных включено в функциональность_). В качестве фреймворка для организации `cli` интерфейса используется [Typer](https://typer.tiangolo.com)
### Требования и установка
- Python `^3.9`
- `pip`, или, `poetry` или любой другой пакетный менеджер для Python
- Приложение на [Shikimori](https://shikimori.one/oauth/applications) (_для работы необходимо иметь_ `APPLICATION_NAME`)
Установка:
```bash
$> pip install anime-pgen
```
[Опционально] Подсказки для терминала:
```bash
$> pgen --install-completion
```
## Использование
1. Понадобится создать папку для конфигов и контента
```bash
$> mkdir previews && cd previews
```
2. Далее нужно добавить конфиг-файл. Его можно взять [в репозитории](https://github.com/shikimori/preview-generator/blob/master/config.example.yaml). Имя файла: `config.yaml`
```bash
$> cp config.example.yaml config.yaml
$> l
total 16
drwxr-xr-x 4 user staff 128B Jun 28 19:48 .
drwxr-xr-x 23 user staff 736B Jun 28 19:43 ..
-rw-r--r-- 1 user staff 1.1K Jun 28 19:48 config.yaml
```
3. Для удобства создадим папку `content` - в ней разместим шрифты и иконки
```bash
$> mkdir content
$> l
total 16
drwxr-xr-x 5 user staff 160B Jun 28 19:52 .
drwxr-xr-x 23 user staff 736B Jun 28 19:49 ..
-rw-r--r-- 1 user staff 1.1K Jun 28 19:48 config.yaml
drwxr-xr-x 2 user staff 64B Jun 28 19:52 content
```
4. В новосозданную папку `content` можно сразу [перенести из репозитория](https://github.com/shikimori/preview-generator/tree/master/content) двусоставное лого Шикимори, иконку рейтинга и заполнение заднего фона (или можно использовать свои)
```bash
$> cp shikimori-glyph.png content/shikimori-glyph.png
$> cp shikimori-logo.png content/shikimori-logo.png
$> cp star.png content/star.png
$> cp tile.png content/tile.png
$> tree -a
.
└── previews
├── config.yaml
└── content
├── shikimori-glyph.png
├── shikimori-logo.png
├── star.png
└── tile.png
```
5. В `content` так же нужно положить шрифты. Для Шикимори используются:
- [🔗](https://fonts.google.com/specimen/Open+Sans) `OpenSans` для заголовка и описания
- [🔗](https://docs.microsoft.com/ru-ru/typography/font-list/tahoma) `Tahoma` для рейтинга
- [🔗](https://fonts.google.com/noto/specimen/Noto+Serif+JP) `NotoSerif_JP` для Японских иероглифов
Финально папка `previews` выглядит примерно так:
```bash
$> tree -a -L 4
.
└── previews
├── config.yaml
└── content
├── Noto_Serif_JP
│ ├── NotoSerifJP-Black.otf
│ ├── NotoSerifJP-Bold.otf
│ ├── NotoSerifJP-ExtraLight.otf
│ ├── NotoSerifJP-Light.otf
│ ├── NotoSerifJP-Medium.otf
│ ├── NotoSerifJP-Regular.otf
│ ├── NotoSerifJP-SemiBold.otf
│ └── OFL.txt
├── Open_Sans
│ ├── LICENSE.txt
│ ├── OpenSans-Italic-VariableFont_wdth,wght.ttf
│ ├── OpenSans-VariableFont_wdth,wght.ttf
│ ├── README.txt
│ └── static
├── Tahoma
│ ├── COPYRIGHT.txt
│ └── tahoma.ttf
├── shikimori-glyph.png
├── shikimori-logo.png
├── star.png
└── tile.png
```
### `config.yaml`
Рассмотрим конфигурационный файл. По дефолту он выглядит так:
```yml
size: 'big'
colors:
background: '#ffffff'
text: '#343434'
year: '#555555'
rating:
active: '#4c86c8'
regular: '#cccccc'
content:
images:
background_tile: content/tile.png
star: content/star.png
logo:
glyph: content/shikimori-glyph.png
text: content/shikimori-logo.png
fonts:
text: content/Open_Sans/OpenSans-VariableFont_wdth,wght.ttf
bold_text: content/Open_Sans/static/OpenSans/OpenSans-Bold.ttf
numbers: content/Tahoma/tahoma.ttf
japanese: content/Noto_Serif_JP/NotoSerifJP-Bold.otf
```
---
```yml
size: 'big'
```
Возможные значения:
- `big` = 1200 x 630 _(значение по умолчанию)_
- `small` = 600 x 315
Это размер финального изображения. Цифры являются рекоммендацией к формату превью от Facebook/Twitter/Вконтакте.
----
```yml
rating:
active: '#4c86c8'
regular: '#cccccc'
```
Цвета звёздочек рейтинга - активных и плейсхолдеров.
В конфиге представлены их дефолтные значения.
----
```yml
colors:
background: '#ffffff'
text: '#343434'
year: '#555555'
```
Цвета:
- Подложки (`background`)
- Всего текса (`text`)
- Года выпуска (`year`)
В конфиге представлены их дефолтные значения.
----
**Важно!**
`colors` и `size` - опциональны.
В случае, если они не указаны в файле - будут использовать дефолтные значения (которые совпадают с дефолтным конфигом)
`content` - обязательные поля
**Важно2!**
Для картинок нельзя использовать `.svg`, только `.jpeg|.jpg|.png` (ограничение библиотеки)
----
```yml
content:
images:
background_tile: content/tile.png
```
Путь до файла с тайлом для заднего фона. Например, дефолтный для Шикимори:

Рекоммендации:
- Квадратный (иначе сплющится)
- Бесшовный
- `.png` с альфа-каналом, если хочется красивого наложения на белый фон
----
```yml
content:
images:
star: content/star.png
```
Путь до файла со звездой рейтинга.
Требования:
- Прозрачный фон
- Фигура чёрного цвета
- Квадрат
При накладывании на превью чёрный цвет перекрашивается в `rating.active` или `rating.regular`
----
```yml
logo:
glyph: content/shikimori-glyph.png
text: content/shikimori-logo.png
```
Двусоставное лого Шикимори - Иероглиф + "SHIKIMORI"
Требования:
- Одинаковая высота
- `.png` с альфа-каналом
----
```yml
fonts:
text: content/Open_Sans/OpenSans-VariableFont_wdth,wght.ttf
bold_text: content/Open_Sans/static/OpenSans/OpenSans-Bold.ttf
numbers: content/Tahoma/tahoma.ttf
japanese: content/Noto_Serif_JP/NotoSerifJP-Bold.otf
```
Путь до шрифтов:
- `text` - описание и подписи
- `bold_text` - название
- `number` - рейтинг и год
- `japanese` - для Иероглифов, Хираганы и Катаканы
Требования:
- `TrueType` шрифты
### Использование
- Подробная документация по `cli`-интерфейсу: [DOCS.md](https://github.com/shikimori/preview-generator/blob/master/DOCS.md)
- Пример использования: [Makefile](https://github.com/shikimori/preview-generator/blob/master/Makefile)
Использование состоит из двух частей:
1. Скачиваем данные из API-Шикимори по `id` аниме или манги
2. Генерируем превью по данным
Скачаем информацию об аниме "Ковбой Бибоп":
```bash
$> pgen fetch 1 --app-name <APPLICATION_NAME_из_Шикимори>
Successfully saved to .pgen.json
$> l
total 40
drwxr-xr-x 6 vladimirlevin staff 192B Jun 28 20:36 .
drwxr-xr-x 3 vladimirlevin staff 96B Jun 28 19:56 ..
-rw-r--r-- 1 vladimirlevin staff 9.2K Jun 28 20:36 .pgen.json
-rw-r--r-- 1 vladimirlevin staff 1.1K Jun 28 19:48 config.yaml
drwxr-xr-x 9 vladimirlevin staff 288B Jun 28 20:03 content
```
По умолчанию данные сохраняются в `.pgen.json`, путь можно изменить, передав флаг `--save-path 'my_file.json'`
```bash
$> pgen fetch 1 --app-name <APPLICATION_NAME_из_Шикимори> --save-path "my_file.json"
Successfully saved to my_file.json
```
Переходим к генерации:
```bash
$>pgen make-preview .pgen.json \
--output-folder "." \
--config "config.yaml" \
--app-name <APPLICATION_NAME_из_Шикимори>
Successfully create previews:
- 1.jpg
```
**Готово!** 🥳
### FAQ
**Q**: Как разметить много за раз? <br>
**A**: С флагом `-M` можно за раз скачать и разметить много Аниме/Манги:
```bash
$> pgen fetch -M "1,5,8" --app-name <APPLICATION_NAME_из_Шикимори>
Successfully saved to .pgen.json
$> pgen make-preview .pgen.json --output-folder "." --config "config.yaml" --app-name <APPLICATION_NAME_из_Шикимори>
Successfully create previews:
- 1.jpg
- 5.jpg
- 8.jpg
```
**Q**: Как разметить мангу?<br>
**A**: С помощью флага `-m` можно скачать Мангу. Создание превью опирается на данные, поэтому во второй команде ничего не потребуется менять
```bash
$> pgen fetch -mM "1,8" --app-name <APPLICATION_NAME_из_Шикимори>
Successfully saved to .pgen.json
$> pgen make-preview .pgen.json --output-folder "." --config "config.yaml" --app-name <APPLICATION_NAME_из_Шикимори>
Successfully create previews:
- 1.jpg
- 8.jpg
```
|
PypiClean
|
/openNPL-0.5.tar.gz/openNPL-0.5/docs/source/installation.rst
|
Installation
=======================
You can install and use the openNPL package in any system that supports *python* **or** *docker*
Installation via Docker
-----------------------
Installation via docker is recommended as it provides a streamlined and fast setup of an openNPL instance. If you do not want to use docker scroll further down for :ref:`Manual installation from sources`
Install Docker
~~~~~~~~~~~~~~
.. note:: A working docker installation is required! Docker is available for many operating systems and platforms (Windows, MacOS, Linux, running on Intel/AMD or ARM chipsets among others). Follow the installation instructions `here <https://docs.docker.com/engine/install/>`_.
Once the installation is complete, make sure the docker service is running by testing that you can run the docker *'hello-world'* application.
.. code:: bash
sudo service docker start
sudo docker run hello-world
Now we are ready for the next step. You can either pull an image from Docker Hub or build a local image:
Pull the openNPL image from Docker Hub
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You can pull and run the latest image from Docker Hub (This method is recommended if you do not want to mess at all with the source distribution).
.. note:: We are also providing images also for the ARM/v7 architecture (Raspberry Pi). Check the root of our docker hub for what is `currently available <https://hub.docker.com/u/openrisk>`_
Start by issuing a docker pull command:
.. code:: bash
docker pull openrisk/opennpl_web:latest
docker run -p 8001:8080 openrisk/opennpl_web:latest
If all went well you have now a running instance of openNPL in your local machine. Access it by pointing your browser to ``http://localhost:8001`` and login with admin/admin credentials.
The API endpoints are accessible at ``http://localhost:8001/api``
.. note:: If you want to work with a different image check what is available at our `docker hub list <https://hub.docker.com/repository/docker/openrisk/opennpl_web>`_
Building a local docker image
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Alternatively you can build your own *local* docker image. After you fetch the distribution from the `github repository <https://github.com/open-risk/openNPL>`_ (as per manual installation instructions below), in the root directory of the distribution issue:
.. code:: bash
cd openNPL
docker build -t opennpl_web:latest .
docker run -p 8001:8080 opennpl_web:latest
Again, access the running instance of openNPL by pointing your browser to ``http://localhost:8001`` and login with the default admin/admin credentials
Manual installation from sources
--------------------------------
The manual installation path is recommended if you want to dig into and inspect the openNPL code base or if you want to contribute to openNPL.
Dependencies / Requirements
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. note:: A Linux based system is recommended but with minor tweaks it is in principle also possible to deploy in Windows systems
- openNPL requires a working Python 3 installation (including pip)
- Python >= 3.9
- Django >= 4.0
- The precise python library dependencies are listed in the :doc:`requirements`.txt file.
- openNPL may work with earlier versions of these packages but this has not been tested
- A linux based system is recommended. Some tweaks are required for Windows but is in principle also possible to deploy there
.. note:: The current User Interface (UI) of openNPL is desktop oriented and might not work properly in smaller (mobile) screens
Manual installation procedure
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Step 1. Download the github sources to your preferred directory:
.. code:: bash
git clone https://github.com/open-risk/openNPL
Step 2. Create a virtualenv. It is advisable to install the platform in a virtualenv so as not to interfere with your system's python distribution
.. code:: bash
virtualenv -p python3 venv
source venv/bin/activate
Step 3. Install the required dependencies (The core dependency is Django and its own dependencies, in addition the Grappelli skin as the admin interface)
.. code:: bash
pip3 install -r requirements.txt
Step 4. Make the required django migrations. The project is setup to use sqlite3. This step will ensure the database has the right tables.
.. code:: bash
cd openNPL
python manage.py makemigrations
python manage.py migrate
Step 5. Create a superuser. Suggestion: Use admin/admin as login/password as a reminder that this instance of openNPL should NOT be used for anything remotely sensitive!
.. code:: bash
python3 manage.py createsuperuser
Step 6. Collect static files (to ensure the interface will render properly)
.. code:: bash
python3 manage.py collectstatic --no-input
Step 7. Insert some dummy data (optional). Without this the database will be empty.
.. code:: bash
bash loadfixtures.sh
Step 8. Run the server. The default port is 8000 but if (by any chance) this port is already used in your computer there will be another assigned. Be sure to note the assigned port and use it instead.
.. code:: bash
python3 manage.py runserver
Step 9. Login with your browser. Finally in your favorite browser (e.g. Firefox from Mozilla), enter the url ``http://localhost:8001`` and login with admin/admin credentials.
.. note:: 8000 is the default port, if that is already in use, you can select an alternative one as follows:
.. code:: bash
python3 manage.py runserver localhost:8081
Troubleshooting
~~~~~~~~~~~~~~~~~~~~~~
The above steps are typical Django project installation steps. If you experience trouble at any point, the `Django online FAQ <https://docs.djangoproject.com/en/3.1/faq/>`_ should help you out.
.. Note:: The project uses an sqlite3 database for good reason! If things go pear-shaped with your database simply remove the file and start again.
We welcome your feedback and support. Please raise a `github ticket <https://github.com/open-risk/openNPL/issues>`_ if you want to report a bug or need a new feature. For contributions check our Contribution and Code of Conduct docs.
|
PypiClean
|
/safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/numato/switch.py
|
import logging
from numato_gpio import NumatoGpioError
from homeassistant.const import (
CONF_DEVICES,
CONF_ID,
CONF_SWITCHES,
DEVICE_DEFAULT_NAME,
)
from homeassistant.helpers.entity import ToggleEntity
from . import CONF_INVERT_LOGIC, CONF_PORTS, DATA_API, DOMAIN
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the configured Numato USB GPIO switch ports."""
if discovery_info is None:
return
api = hass.data[DOMAIN][DATA_API]
switches = []
devices = hass.data[DOMAIN][CONF_DEVICES]
for device in [d for d in devices if CONF_SWITCHES in d]:
device_id = device[CONF_ID]
platform = device[CONF_SWITCHES]
invert_logic = platform[CONF_INVERT_LOGIC]
ports = platform[CONF_PORTS]
for port, port_name in ports.items():
try:
api.setup_output(device_id, port)
api.write_output(device_id, port, 1 if invert_logic else 0)
except NumatoGpioError as err:
_LOGGER.error(
"Failed to initialize switch '%s' on Numato device %s port %s: %s",
port_name,
device_id,
port,
err,
)
continue
switches.append(
NumatoGpioSwitch(
port_name,
device_id,
port,
invert_logic,
api,
)
)
add_entities(switches, True)
class NumatoGpioSwitch(ToggleEntity):
"""Representation of a Numato USB GPIO switch port."""
def __init__(self, name, device_id, port, invert_logic, api):
"""Initialize the port."""
self._name = name or DEVICE_DEFAULT_NAME
self._device_id = device_id
self._port = port
self._invert_logic = invert_logic
self._state = False
self._api = api
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if port is turned on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the port on."""
try:
self._api.write_output(
self._device_id, self._port, 0 if self._invert_logic else 1
)
self._state = True
self.schedule_update_ha_state()
except NumatoGpioError as err:
_LOGGER.error(
"Failed to turn on Numato device %s port %s: %s",
self._device_id,
self._port,
err,
)
def turn_off(self, **kwargs):
"""Turn the port off."""
try:
self._api.write_output(
self._device_id, self._port, 1 if self._invert_logic else 0
)
self._state = False
self.schedule_update_ha_state()
except NumatoGpioError as err:
_LOGGER.error(
"Failed to turn off Numato device %s port %s: %s",
self._device_id,
self._port,
err,
)
|
PypiClean
|
/introspection-1.5.2.tar.gz/introspection-1.5.2/docs/build/html/_static/copybutton.js
|
const messages = {
'en': {
'copy': 'Copy',
'copy_to_clipboard': 'Copy to clipboard',
'copy_success': 'Copied!',
'copy_failure': 'Failed to copy',
},
'es' : {
'copy': 'Copiar',
'copy_to_clipboard': 'Copiar al portapapeles',
'copy_success': '¡Copiado!',
'copy_failure': 'Error al copiar',
},
'de' : {
'copy': 'Kopieren',
'copy_to_clipboard': 'In die Zwischenablage kopieren',
'copy_success': 'Kopiert!',
'copy_failure': 'Fehler beim Kopieren',
},
'fr' : {
'copy': 'Copier',
'copy_to_clipboard': 'Copié dans le presse-papier',
'copy_success': 'Copié !',
'copy_failure': 'Échec de la copie',
},
'ru': {
'copy': 'Скопировать',
'copy_to_clipboard': 'Скопировать в буфер',
'copy_success': 'Скопировано!',
'copy_failure': 'Не удалось скопировать',
},
'zh-CN': {
'copy': '复制',
'copy_to_clipboard': '复制到剪贴板',
'copy_success': '复制成功!',
'copy_failure': '复制失败',
}
}
let locale = 'en'
if( document.documentElement.lang !== undefined
&& messages[document.documentElement.lang] !== undefined ) {
locale = document.documentElement.lang
}
let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT;
if (doc_url_root == '#') {
doc_url_root = '';
}
const path_static = `${doc_url_root}_static/`;
/**
* Set up copy/paste for code blocks
*/
const runWhenDOMLoaded = cb => {
if (document.readyState != 'loading') {
cb()
} else if (document.addEventListener) {
document.addEventListener('DOMContentLoaded', cb)
} else {
document.attachEvent('onreadystatechange', function() {
if (document.readyState == 'complete') cb()
})
}
}
const codeCellId = index => `codecell${index}`
// Clears selected text since ClipboardJS will select the text when copying
const clearSelection = () => {
if (window.getSelection) {
window.getSelection().removeAllRanges()
} else if (document.selection) {
document.selection.empty()
}
}
// Changes tooltip text for two seconds, then changes it back
const temporarilyChangeTooltip = (el, oldText, newText) => {
el.setAttribute('data-tooltip', newText)
el.classList.add('success')
setTimeout(() => el.setAttribute('data-tooltip', oldText), 2000)
setTimeout(() => el.classList.remove('success'), 2000)
}
// Changes the copy button icon for two seconds, then changes it back
const temporarilyChangeIcon = (el) => {
img = el.querySelector("img");
img.setAttribute('src', `${path_static}check-solid.svg`)
setTimeout(() => img.setAttribute('src', `${path_static}copy-button.svg`), 2000)
}
const addCopyButtonToCodeCells = () => {
// If ClipboardJS hasn't loaded, wait a bit and try again. This
// happens because we load ClipboardJS asynchronously.
if (window.ClipboardJS === undefined) {
setTimeout(addCopyButtonToCodeCells, 250)
return
}
// Add copybuttons to all of our code cells
const codeCells = document.querySelectorAll('div.highlight pre')
codeCells.forEach((codeCell, index) => {
const id = codeCellId(index)
codeCell.setAttribute('id', id)
const clipboardButton = id =>
`<button class="copybtn o-tooltip--left" data-tooltip="${messages[locale]['copy']}" data-clipboard-target="#${id}">
<img src="${path_static}copy-button.svg" alt="${messages[locale]['copy_to_clipboard']}">
</button>`
codeCell.insertAdjacentHTML('afterend', clipboardButton(id))
})
function escapeRegExp(string) {
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string
}
// Callback when a copy button is clicked. Will be passed the node that was clicked
// should then grab the text and replace pieces of text that shouldn't be used in output
function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") {
var regexp;
var match;
// Do we check for line continuation characters and "HERE-documents"?
var useLineCont = !!lineContinuationChar
var useHereDoc = !!hereDocDelim
// create regexp to capture prompt and remaining line
if (isRegexp) {
regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)')
} else {
regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)')
}
const outputLines = [];
var promptFound = false;
var gotLineCont = false;
var gotHereDoc = false;
const lineGotPrompt = [];
for (const line of textContent.split('\n')) {
match = line.match(regexp)
if (match || gotLineCont || gotHereDoc) {
promptFound = regexp.test(line)
lineGotPrompt.push(promptFound)
if (removePrompts && promptFound) {
outputLines.push(match[2])
} else {
outputLines.push(line)
}
gotLineCont = line.endsWith(lineContinuationChar) & useLineCont
if (line.includes(hereDocDelim) & useHereDoc)
gotHereDoc = !gotHereDoc
} else if (!onlyCopyPromptLines) {
outputLines.push(line)
} else if (copyEmptyLines && line.trim() === '') {
outputLines.push(line)
}
}
// If no lines with the prompt were found then just use original lines
if (lineGotPrompt.some(v => v === true)) {
textContent = outputLines.join('\n');
}
// Remove a trailing newline to avoid auto-running when pasting
if (textContent.endsWith("\n")) {
textContent = textContent.slice(0, -1)
}
return textContent
}
var copyTargetText = (trigger) => {
var target = document.querySelector(trigger.attributes['data-clipboard-target'].value);
return formatCopyText(target.innerText, '>>> |\\.\\.\\. ', true, true, true, true, '', '')
}
// Initialize with a callback so we can modify the text before copy
const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText})
// Update UI with error/success messages
clipboard.on('success', event => {
clearSelection()
temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success'])
temporarilyChangeIcon(event.trigger)
})
clipboard.on('error', event => {
temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure'])
})
}
runWhenDOMLoaded(addCopyButtonToCodeCells)
|
PypiClean
|
/laskea-2022.11.7.tar.gz/laskea-2022.11.7/gen_licenses.py
|
import functools
import json
import pathlib
import pkg_resources
import string
import subprocess # nosec
from typing import List, Tuple
__all__ = ['dependency_tree_console_text', 'direct_dependencies_table', 'indirect_dependencies_table']
ENCODING = 'utf-8'
TP_PATH = pathlib.Path('docs', 'third-party')
TABLE_KEYS = (('Name', 'URL'), 'Version', 'License', 'Author', 'Description')
HEADER_LABELS = ('Name', 'Version', 'License', 'Author', 'Description (from packaging data)')
FALLBACK_URLS = {
'typing-extensions': 'https://github.com/python/typing/blob/master/typing_extensions/README.rst',
}
TARGET = """\
__version__ = '$version$+parent.$revision$'\
"""
@functools.lru_cache()
def _fetch_direct_dependency_names():
with pathlib.Path('requirements.txt').open(encoding=ENCODING) as requirements_txt:
install_requires = [
str(requirement)
for requirement
in pkg_resources.parse_requirements(requirements_txt)
]
return install_requires
def _generate_dependency_information() -> None:
"""Use pip-licenses for creation of diverse databases and graphs."""
install_requires = _fetch_direct_dependency_names()
tokens = set(list(string.ascii_letters + '-_'))
direct_names = [''.join(c for c in term if c in tokens) for term in install_requires]
direct_vector = [
'pip-licenses', '--format', 'json', '-p', *direct_names,
'--with-authors', '--with-description', '--with-urls', '--with-license-file', '--with-notice-file',
'--output-file', str(TP_PATH / 'direct-dependency-licenses.json')]
noise = subprocess.run(direct_vector, capture_output=True, encoding=ENCODING, text=True, check=True).stdout.strip() # nosec
if not noise.startswith('created path: ') or not noise.endswith('direct-dependency-licenses.json'):
raise RuntimeError(noise)
indirect_names = [ # TODO(sthagen) these indirect deps may diverge ...
'appdirs',
'attrs',
'cattrs',
'certifi',
'charset-normalizer',
'click',
'deprecated',
'idna',
'oauthlib',
'requests',
'requests-oauthlib',
'six',
'typing-extensions',
'url-normalize',
'urllib3',
'wrapt',
]
full_vector = [
'pip-licenses', '--format', 'json', '-p', *direct_names, *indirect_names,
'--with-authors', '--with-description', '--with-urls', '--with-license-file', '--with-notice-file',
'--output-file', str(TP_PATH / 'all-dependency-licenses.json')]
noise = subprocess.run(full_vector, capture_output=True, encoding=ENCODING, text=True, check=True).stdout.strip() # nosec
if not noise.startswith('created path: ') or not noise.endswith('all-dependency-licenses.json'):
raise RuntimeError(noise)
base_vector = ['pipdeptree', '--packages', ','.join(direct_names)]
jobs = (
(TP_PATH / 'package-dependency-tree.dot.txt', base_vector + ['--graph-output', 'dot']),
(TP_PATH / 'package-dependency-tree.svg', base_vector + ['--graph-output', 'svg']),
(TP_PATH / 'package-dependency-tree.json', base_vector + ['--json-tree', '--warn', 'silence']),
(TP_PATH / 'package-dependency-tree.console.txt', base_vector + ['--warn', 'silence']),
)
for target, vector in jobs:
plot = subprocess.run(vector, capture_output=True, encoding=ENCODING, text=True, check=True).stdout.strip() # nosec
target.write_text(plot, encoding=ENCODING)
@functools.lru_cache()
def _fetch_dependencies(direct_only: bool = True):
db = 'direct-dependency-licenses.json' if direct_only else 'all-dependency-licenses.json'
dep_json_path = pathlib.Path('docs', 'third-party') / db
with open(dep_json_path, 'rt', encoding=ENCODING) as handle:
data = json.load(handle)
return data
def _markdown_table(table: List[Tuple[str, str, str, str, str]], header_labels=HEADER_LABELS) -> str:
"""Create the gfm table as string."""
columns = header_labels
col_wid = {key: len(key) for key in columns}
for record in table:
for key, cell in zip(columns, record):
col_wid[key] = max(len(cell), col_wid[key])
header_cells = [key.ljust(col_wid[key]) for key in columns]
header = f'| {" | ".join(header_cells)} |'
separator_cells = ['-' * (col_wid[key] + 1) for key in columns]
separator = f'|:{"|:".join(separator_cells)}|'
rows = [f'| {" | ".join(str(v).ljust(col_wid[k]) for k, v in zip(columns, line))} |' for line in table]
return '\n'.join([header] + [separator] + rows)
def _extract_rows(data):
rows = []
for record in data:
nam = record['Name']
url = record.get('URL', '')
if url == 'UNKNOWN':
url = FALLBACK_URLS.get(nam, '')
nam_e = f'[{nam}]({url})' if url else nam
ver = record['Version']
ver_sion = f'[{ver}](https://pypi.org/project/{nam}/{ver}/)'
lic = record['License']
aut = record['Author']
des = record['Description']
rows.append((nam_e, ver_sion, lic, aut, des))
rows.sort()
return rows
def direct_dependencies_table() -> None:
"""Fill in the data from the direct dependencies."""
_generate_dependency_information()
print(_markdown_table(_extract_rows(_fetch_dependencies(direct_only=True))))
def indirect_dependencies_table() -> None:
"""Fill in the data from the indirect dependencies."""
direct_data = _fetch_dependencies(direct_only=True)
direct_names = tuple(record['Name'] for record in direct_data)
indirect_only_data = [rec for rec in _fetch_dependencies(direct_only=False) if rec['Name'] not in direct_names]
print(_markdown_table(_extract_rows(indirect_only_data)))
def dependency_tree_console_text():
"""Fill in the pipdeptree console output minus any warnings."""
console_tree = (TP_PATH / 'package-dependency-tree.console.txt').read_text(encoding=ENCODING).strip()
fence = '````'
print(f'{fence}console')
print(console_tree)
print(fence)
|
PypiClean
|
/manimlib-0.2.0.tar.gz/manimlib-0.2.0/from_3b1b/active/diffyq/part1/pi_scenes.py
|
from manimlib.imports import *
from active_projects.diffyq.part1.shared_constructs import *
class SomeOfYouWatching(TeacherStudentsScene):
CONFIG = {
"camera_config": {
"background_color": DARKER_GREY,
}
}
def construct(self):
screen = self.screen
screen.scale(1.25, about_edge=UL)
screen.set_fill(BLACK, 1)
self.add(screen)
self.teacher.change("raise_right_hand")
for student in self.students:
student.change("pondering", screen)
self.student_says(
"Well...yeah",
target_mode="tease"
)
self.wait(3)
class FormulasAreLies(PiCreatureScene):
def construct(self):
you = self.pi_creature
t2c = {
"{L}": BLUE,
"{g}": YELLOW,
"\\theta_0": WHITE,
"\\sqrt{\\,": WHITE,
}
kwargs = {"tex_to_color_map": t2c}
period_eq = TexMobject(
"\\text{Period} = 2\\pi \\sqrt{\\,{L} / {g}}",
**kwargs
)
theta_eq = TexMobject(
"\\theta(t) = \\theta_0 \\cos\\left("
"\\sqrt{\\,{L} / {g}} \\cdot t"
"\\right)",
**kwargs
)
equations = VGroup(theta_eq, period_eq)
equations.arrange(DOWN, buff=LARGE_BUFF)
for eq in period_eq, theta_eq:
i = eq.index_of_part_by_tex("\\sqrt")
eq.sqrt_part = eq[i:i + 4]
theta0 = theta_eq.get_part_by_tex("\\theta_0")
theta0_words = TextMobject("Starting angle")
theta0_words.next_to(theta0, UL)
theta0_words.shift(UP + 0.5 * RIGHT)
arrow = Arrow(
theta0_words.get_bottom(),
theta0,
color=WHITE,
tip_length=0.25,
)
bubble = SpeechBubble()
bubble.pin_to(you)
bubble.write("Lies!")
bubble.content.scale(2)
bubble.resize_to_content()
self.add(period_eq)
you.change("pondering", period_eq)
self.wait()
theta_eq.remove(*theta_eq.sqrt_part)
self.play(
TransformFromCopy(
period_eq.sqrt_part,
theta_eq.sqrt_part,
),
FadeIn(theta_eq)
)
theta_eq.add(*theta_eq.sqrt_part)
self.play(
FadeInFrom(theta0_words, LEFT),
GrowArrow(arrow),
)
self.wait()
self.play(you.change, "confused")
self.wait()
self.play(
you.change, "angry",
ShowCreation(bubble),
FadeInFromPoint(bubble.content, you.mouth),
equations.to_edge, LEFT,
FadeOut(arrow),
FadeOut(theta0_words),
)
self.wait()
def create_pi_creature(self):
return You().flip().to_corner(DR)
# class TourOfDifferentialEquations(Scene):
# def construct(self):
# pass
class SoWhatIsThetaThen(TeacherStudentsScene):
def construct(self):
ode = get_ode()
ode.to_corner(UL)
self.add(ode)
self.student_says(
"Okay, but then\\\\"
"what \\emph{is} $\\theta(t)$?"
)
self.wait()
self.play(self.teacher.change, "happy")
self.wait(2)
self.teacher_says(
"First, you must appreciate\\\\"
"a deep truth...",
added_anims=[self.get_student_changes(
*3 * ["confused"]
)]
)
self.wait(4)
class ProveTeacherWrong(TeacherStudentsScene):
def construct(self):
tex_config = {
"tex_to_color_map": {
"{\\theta}": BLUE,
"{\\dot\\theta}": YELLOW,
"{\\ddot\\theta}": RED,
}
}
func = TexMobject(
"{\\theta}(t)", "=",
"\\theta_0", "\\cos(\\sqrt{g / L} \\cdot t)",
**tex_config,
)
d_func = TexMobject(
"{\\dot\\theta}(t)", "=",
"-\\left(\\sqrt{g / L}\\right)",
"\\theta_0", "\\sin(\\sqrt{g / L} \\cdot t)",
**tex_config,
)
dd_func = TexMobject(
"{\\ddot\\theta}(t)", "=",
"-\\left(g / L\\right)",
"\\theta_0", "\\cos(\\sqrt{g / L} \\cdot t)",
**tex_config,
)
# ode = TexMobject(
# "\\ddot {\\theta}({t})", "=",
# "-\\mu \\dot {\\theta}({t})",
# "-{g \\over L} \\sin\\big({\\theta}({t})\\big)",
# **tex_config,
# )
ode = get_ode()
arrows = [TexMobject("\\Downarrow") for x in range(2)]
VGroup(func, d_func, dd_func, ode, *arrows).scale(0.7)
teacher = self.teacher
you = self.students[2]
self.student_thinks(ode)
you.add_updater(lambda m: m.look_at(func))
self.teacher_holds_up(func)
self.wait()
group = VGroup(arrows[0], d_func, arrows[1], dd_func)
group.arrange(DOWN)
group.move_to(func, DOWN)
arrow = Arrow(
group.get_corner(UL),
ode.get_top(),
path_arc=PI / 2,
)
q_marks = VGroup(*[
TexMobject("?").scale(1.5).next_to(
arrow.point_from_proportion(a),
UP
)
for a in np.linspace(0.2, 0.8, 5)
])
cycle_animation(VFadeInThenOut(
q_marks,
lag_ratio=0.2,
run_time=4,
rate_func=squish_rate_func(smooth, 0, 0.5)
))
self.play(
func.next_to, group, UP,
LaggedStartMap(
FadeInFrom, group,
lambda m: (m, UP)
),
teacher.change, "guilty",
you.change, "sassy",
)
rect = SurroundingRectangle(
VGroup(group, func)
)
dashed_rect = DashedVMobject(rect, num_dashes=75)
animated_rect = AnimatedBoundary(dashed_rect, cycle_rate=1)
self.wait()
self.add(animated_rect, q_marks)
self.play(
ShowCreation(arrow),
# FadeInFromDown(q_mark),
self.get_student_changes("confused", "confused")
)
self.wait(4)
self.change_student_modes(
*3 * ["pondering"],
self.teacher.change, "maybe"
)
self.wait(8)
class PhysicistPhaseSpace(PiCreatureScene):
def construct(self):
physy = self.pi_creature
name = TextMobject("Physicist")
name.scale(1.5)
name.to_corner(DL, buff=MED_SMALL_BUFF)
physy.next_to(name, UP, SMALL_BUFF)
VGroup(name, physy).shift_onto_screen()
axes = Axes(
x_min=-1,
x_max=10,
y_min=-1,
y_max=7,
)
axes.set_height(6)
axes.next_to(physy, RIGHT)
axes.to_edge(UP)
axes.set_stroke(width=1)
x_label = TextMobject("Position")
x_label.next_to(axes.x_axis.get_right(), UP)
y_label = TextMobject("Momentum")
y_label.next_to(axes.y_axis.get_top(), RIGHT)
title = TextMobject("Phase space")
title.scale(1.5)
title.set_color(YELLOW)
title.move_to(axes)
self.add(name, physy)
self.play(
physy.change, "angry",
Write(axes),
FadeInFromDown(title)
)
self.wait(2)
self.play(
GrowFromPoint(x_label, physy.get_corner(UR)),
physy.change, "raise_right_hand",
axes.x_axis.get_right()
)
self.play(
GrowFromPoint(y_label, physy.get_corner(UR)),
physy.look_at, axes.y_axis.get_top(),
)
self.wait(3)
def create_pi_creature(self):
return PiCreature(color=GREY).to_corner(DL)
class AskAboutActuallySolving(TeacherStudentsScene):
def construct(self):
ode = get_ode()
ode.to_corner(UL)
self.add(ode)
morty = self.teacher
self.student_says(
"Yeah yeah, but how do\\\\"
"you actually \\emph{solve} it?",
student_index=1,
target_mode="sassy",
added_anims=[morty.change, "thinking"],
)
self.change_student_modes(
"confused", "sassy", "confused",
look_at_arg=ode,
)
self.wait()
self.teacher_says(
"What do you mean\\\\ by ``solve''?",
target_mode="speaking",
added_anims=[self.get_student_changes(
*3 * ["erm"]
)]
)
self.play(self.students[1].change, "angry")
self.wait(3)
class HungerForExactness(TeacherStudentsScene):
def construct(self):
students = self.students
you = students[2]
teacher = self.teacher
ode = get_ode()
ode.to_corner(UL)
left_part = ode[:5]
friction_part = ode[5:11]
self.add(ode)
proposed_solution = TexMobject(
"\\theta_0\\cos((\\sqrt{g/L})t)e^{-\\mu t}"
)
proposed_solution.next_to(
you.get_corner(UL), UP, buff=0.7
)
proposed_solution_rect = SurroundingRectangle(
proposed_solution, buff=MED_SMALL_BUFF,
)
proposed_solution_rect.set_color(BLUE)
proposed_solution_rect.round_corners()
solution_p1 = TexMobject(
"""
\\theta(t) = 2\\text{am}\\left(
\\frac{\\sqrt{2g + Lc_1} (t + c_2)}{2\\sqrt{L}},
\\frac{4g}{2g + Lc_1}
\\right)
""",
)
solution_p1.to_corner(UL)
solution_p2 = TexMobject(
"c_1, c_2 = \\text{Constants depending on initial conditions}"
)
solution_p2.set_color(LIGHT_GREY)
solution_p2.scale(0.75)
solution_p3 = TexMobject(
"""
\\text{am}(u, k) =
\\int_0^u \\text{dn}(v, k)\\,dv
"""
)
solution_p3.name = TextMobject(
"(Jacobi amplitude function)"
)
solution_p4 = TexMobject(
"""
\\text{dn}(u, k) =
\\sqrt{1 - k^2 \\sin^2(\\phi)}
"""
)
solution_p4.name = TextMobject(
"(Jacobi elliptic function)"
)
solution_p5 = TextMobject("Where $\\phi$ satisfies")
solution_p6 = TexMobject(
"""
u = \\int_0^\\phi \\frac{dt}{\\sqrt{1 - k^2 \\sin^2(t)}}
"""
)
solution = VGroup(
solution_p1,
solution_p2,
solution_p3,
solution_p4,
solution_p5,
solution_p6,
)
solution.arrange(DOWN)
solution.scale(0.7)
solution.to_corner(UL, buff=MED_SMALL_BUFF)
solution.set_stroke(width=0, background=True)
solution.remove(solution_p2)
solution_p1.add(solution_p2)
solution.remove(solution_p5)
solution_p6.add(solution_p5)
for part in [solution_p3, solution_p4]:
part.name.scale(0.7 * 0.7)
part.name.set_color(LIGHT_GREY)
part.name.next_to(part, RIGHT)
part.add(part.name)
self.student_says(
"Right, but like,\\\\"
"what \\emph{is} $\\theta(t)$?",
target_mode="sassy",
added_anims=[teacher.change, "guilty"],
)
self.wait()
self.play(
FadeInFromDown(proposed_solution),
RemovePiCreatureBubble(
you,
target_mode="raise_left_hand",
look_at_arg=proposed_solution,
),
teacher.change, "pondering",
students[0].change, "pondering",
students[1].change, "hesitant",
)
self.play(ShowCreation(proposed_solution_rect))
self.play(
proposed_solution.shift, 3 * RIGHT,
proposed_solution_rect.shift, 3 * RIGHT,
you.change, "raise_right_hand", teacher.eyes,
)
self.wait(3)
self.play(
FadeOut(proposed_solution),
FadeOut(proposed_solution_rect),
ode.move_to, self.hold_up_spot, DOWN,
ode.shift, LEFT,
teacher.change, "raise_right_hand",
self.get_student_changes(*3 * ["pondering"])
)
self.wait()
ode.save_state()
self.play(
left_part.move_to, friction_part, RIGHT,
left_part.match_y, left_part,
friction_part.to_corner, DR,
friction_part.fade, 0.5,
)
self.wait()
modes = ["erm", "sad", "sad", "horrified"]
for part, mode in zip(solution, modes):
self.play(
FadeInFrom(part, UP),
self.get_student_changes(
*3 * [mode],
look_at_arg=part,
)
)
self.wait()
self.wait(3)
self.change_student_modes("tired", "sad", "concerned_musician")
self.wait(4)
self.look_at(solution)
self.wait(5)
self.play(
FadeOutAndShift(solution, 2 * LEFT),
Restore(ode),
self.get_student_changes(
"sick", "angry", "tired",
)
)
self.wait(3)
mystery = TexMobject(
"\\theta(t) = ???",
tex_to_color_map={"\\theta": BLUE},
)
mystery.scale(2)
mystery.to_edge(UP)
mystery.set_stroke(width=0, background=True)
mystery_boundary = AnimatedBoundary(
mystery, stroke_width=1
)
self.play(
FadeInFromDown(mystery),
self.teacher.change, "pondering"
)
self.add(mystery_boundary, mystery)
self.change_all_student_modes("sad")
self.look_at(mystery)
self.wait(5)
# Define
self.student_says(
"Let $\\text{P}(\\mu, g, L; t)$ be a\\\\"
"function satisfying this ODE.",
student_index=0,
target_mode="speaking",
added_anims=[
FadeOut(mystery),
FadeOut(mystery_boundary),
ode.to_corner, UR
]
)
self.change_student_modes(
"hooray", "sassy", "sassy",
look_at_arg=students[0].eyes.get_corner(UR),
)
self.wait(2)
class ItGetsWorse(TeacherStudentsScene):
def construct(self):
self.teacher_says("It gets\\\\worse")
self.change_student_modes(
"hesitant", "pleading", "erm"
)
self.wait(5)
|
PypiClean
|
/fake_bpy_module_3.1-20230117-py3-none-any.whl/mathutils/__init__.py
|
import sys
import typing
from . import kdtree
from . import geometry
from . import noise
from . import bvhtree
from . import interpolate
GenericType = typing.TypeVar("GenericType")
class Color:
''' This object gives access to Colors in Blender. :param rgb: (r, g, b) color values :type rgb: 3d vector
'''
b: float = None
''' Blue color channel.
:type: float
'''
g: float = None
''' Green color channel.
:type: float
'''
h: float = None
''' HSV Hue component in [0, 1].
:type: float
'''
hsv: typing.Union[typing.Sequence[float], 'Vector'] = None
''' HSV Values in [0, 1].
:type: typing.Union[typing.Sequence[float], 'Vector']
'''
is_frozen: bool = None
''' True when this object has been frozen (read-only).
:type: bool
'''
is_valid: bool = None
''' True when the owner of this data is valid.
:type: bool
'''
is_wrapped: bool = None
''' True when this object wraps external data (read-only).
:type: bool
'''
owner = None
''' The item this is wrapping or None (read-only).'''
r: float = None
''' Red color channel.
:type: float
'''
s: float = None
''' HSV Saturation component in [0, 1].
:type: float
'''
v: float = None
''' HSV Value component in [0, 1].
:type: float
'''
@staticmethod
def copy() -> 'Color':
''' Returns a copy of this color.
:rtype: 'Color'
:return: A copy of the color.
'''
pass
@staticmethod
def freeze():
''' Make this object immutable. After this the object can be hashed, used in dictionaries & sets.
'''
pass
def __init__(self, rgb=(0.0, 0.0, 0.0)) -> typing.Any:
'''
:rtype: typing.Any
'''
pass
def __add__(self, other: typing.Union[typing.Sequence[float], 'Color']
) -> 'Color':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Color']
:rtype: 'Color'
'''
pass
def __sub__(self, other: typing.Union[typing.Sequence[float], 'Color']
) -> 'Color':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Color']
:rtype: 'Color'
'''
pass
def __mul__(self, other: typing.Union[int, float]) -> 'Color':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Color'
'''
pass
def __truediv__(self, other: typing.Union[int, float]) -> 'Color':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Color'
'''
pass
def __radd__(self, other: typing.Union[typing.Sequence[float], 'Color']
) -> 'Color':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Color']
:rtype: 'Color'
'''
pass
def __rsub__(self, other: typing.Union[typing.Sequence[float], 'Color']
) -> 'Color':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Color']
:rtype: 'Color'
'''
pass
def __rmul__(self, other: typing.Union[int, float]) -> 'Color':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Color'
'''
pass
def __rtruediv__(self, other: typing.Union[int, float]) -> 'Color':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Color'
'''
pass
def __iadd__(self, other: typing.Union[typing.Sequence[float], 'Color']
) -> 'Color':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Color']
:rtype: 'Color'
'''
pass
def __isub__(self, other: typing.Union[typing.Sequence[float], 'Color']
) -> 'Color':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Color']
:rtype: 'Color'
'''
pass
def __imul__(self, other: typing.Union[int, float]) -> 'Color':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Color'
'''
pass
def __itruediv__(self, other: typing.Union[int, float]) -> 'Color':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Color'
'''
pass
class Euler:
''' This object gives access to Eulers in Blender. :param angles: Three angles, in radians. :type angles: 3d vector :param order: Optional order of the angles, a permutation of ``XYZ``. :type order: str
'''
is_frozen: bool = None
''' True when this object has been frozen (read-only).
:type: bool
'''
is_valid: bool = None
''' True when the owner of this data is valid.
:type: bool
'''
is_wrapped: bool = None
''' True when this object wraps external data (read-only).
:type: bool
'''
order: str = None
''' Euler rotation order.
:type: str
'''
owner = None
''' The item this is wrapping or None (read-only).'''
x: float = None
''' Euler axis angle in radians.
:type: float
'''
y: float = None
''' Euler axis angle in radians.
:type: float
'''
z: float = None
''' Euler axis angle in radians.
:type: float
'''
@staticmethod
def copy() -> 'Euler':
''' Returns a copy of this euler.
:rtype: 'Euler'
:return: A copy of the euler.
'''
pass
@staticmethod
def freeze():
''' Make this object immutable. After this the object can be hashed, used in dictionaries & sets.
'''
pass
def make_compatible(self, other):
''' Make this euler compatible with another, so interpolating between them works as intended.
'''
pass
def rotate(self,
other: typing.Union[typing.Sequence[float], 'Euler', typing.
Sequence[float], 'Quaternion', typing.
Sequence[float], 'Matrix']):
''' Rotates the euler by another mathutils value.
:param other: rotation component of mathutils value
:type other: typing.Union[typing.Sequence[float], 'Euler', typing.Sequence[float], 'Quaternion', typing.Sequence[float], 'Matrix']
'''
pass
def rotate_axis(self, axis: str, angle: float):
''' Rotates the euler a certain amount and returning a unique euler rotation (no 720 degree pitches).
:param axis: single character in ['X, 'Y', 'Z'].
:type axis: str
:param angle: angle in radians.
:type angle: float
'''
pass
def to_matrix(self) -> 'Matrix':
''' Return a matrix representation of the euler.
:rtype: 'Matrix'
:return: A 3x3 rotation matrix representation of the euler.
'''
pass
def to_quaternion(self) -> 'Quaternion':
''' Return a quaternion representation of the euler.
:rtype: 'Quaternion'
:return: Quaternion representation of the euler.
'''
pass
def zero(self):
''' Set all values to zero.
'''
pass
def __init__(self, angles=(0.0, 0.0, 0.0), order='XYZ') -> typing.Any:
'''
:rtype: typing.Any
'''
pass
class Matrix:
''' This object gives access to Matrices in Blender, supporting square and rectangular matrices from 2x2 up to 4x4. :param rows: Sequence of rows. When omitted, a 4x4 identity matrix is constructed. :type rows: 2d number sequence
'''
col: 'Matrix' = None
''' Access the matrix by columns, 3x3 and 4x4 only, (read-only).
:type: 'Matrix'
'''
is_frozen: bool = None
''' True when this object has been frozen (read-only).
:type: bool
'''
is_negative: bool = None
''' True if this matrix results in a negative scale, 3x3 and 4x4 only, (read-only).
:type: bool
'''
is_orthogonal: bool = None
''' True if this matrix is orthogonal, 3x3 and 4x4 only, (read-only).
:type: bool
'''
is_orthogonal_axis_vectors: bool = None
''' True if this matrix has got orthogonal axis vectors, 3x3 and 4x4 only, (read-only).
:type: bool
'''
is_valid: bool = None
''' True when the owner of this data is valid.
:type: bool
'''
is_wrapped: bool = None
''' True when this object wraps external data (read-only).
:type: bool
'''
median_scale: float = None
''' The average scale applied to each axis (read-only).
:type: float
'''
owner = None
''' The item this is wrapping or None (read-only).'''
row: 'Matrix' = None
''' Access the matrix by rows (default), (read-only).
:type: 'Matrix'
'''
translation: 'Vector' = None
''' The translation component of the matrix.
:type: 'Vector'
'''
@classmethod
def Diagonal(cls, vector: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Matrix':
''' Create a diagonal (scaling) matrix using the values from the vector.
:param vector: The vector of values for the diagonal.
:type vector: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Matrix'
:return: A diagonal matrix.
'''
pass
@classmethod
def Identity(cls, size: int) -> 'Matrix':
''' Create an identity matrix.
:param size: The size of the identity matrix to construct [2, 4].
:type size: int
:rtype: 'Matrix'
:return: A new identity matrix.
'''
pass
@classmethod
def LocRotScale(
cls, location: typing.Optional['Vector'],
rotation: typing.Union[typing.Sequence[float], 'Quaternion', typing
.Sequence[float], 'Euler'],
scale: typing.Optional['Vector']) -> 'Matrix':
''' Create a matrix combining translation, rotation and scale, acting as the inverse of the decompose() method. Any of the inputs may be replaced with None if not needed.
:param location: The translation component.
:type location: typing.Optional['Vector']
:param rotation: The rotation component.
:type rotation: typing.Union[typing.Sequence[float], 'Quaternion', typing.Sequence[float], 'Euler']
:param scale: The scale component.
:type scale: typing.Optional['Vector']
:rtype: 'Matrix'
:return: Combined transformation matrix.
'''
pass
@classmethod
def OrthoProjection(
cls, axis: typing.Union[str, typing.Sequence[float], 'Vector'],
size: int) -> 'Matrix':
''' Create a matrix to represent an orthographic projection.
:param axis: ['X', 'Y', 'XY', 'XZ', 'YZ'], where a single axis is for a 2D matrix. Or a vector for an arbitrary axis
:type axis: typing.Union[str, typing.Sequence[float], 'Vector']
:param size: The size of the projection matrix to construct [2, 4].
:type size: int
:rtype: 'Matrix'
:return: A new projection matrix.
'''
pass
@classmethod
def Rotation(cls, angle: float, size: int,
axis: typing.Union[str, typing.Sequence[float], 'Vector']
) -> 'Matrix':
''' Create a matrix representing a rotation.
:param angle: The angle of rotation desired, in radians.
:type angle: float
:param size: The size of the rotation matrix to construct [2, 4].
:type size: int
:param axis: a string in ['X', 'Y', 'Z'] or a 3D Vector Object (optional when size is 2).
:type axis: typing.Union[str, typing.Sequence[float], 'Vector']
:rtype: 'Matrix'
:return: A new rotation matrix.
'''
pass
@classmethod
def Scale(
cls, factor: float, size: int,
axis: typing.Union[typing.Sequence[float], 'Vector']) -> 'Matrix':
''' Create a matrix representing a scaling.
:param factor: The factor of scaling to apply.
:type factor: float
:param size: The size of the scale matrix to construct [2, 4].
:type size: int
:param axis: Direction to influence scale. (optional).
:type axis: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Matrix'
:return: A new scale matrix.
'''
pass
@classmethod
def Shear(cls, plane: str, size: int, factor: float) -> 'Matrix':
''' Create a matrix to represent an shear transformation.
:param plane: ['X', 'Y', 'XY', 'XZ', 'YZ'], where a single axis is for a 2D matrix only.
:type plane: str
:param size: The size of the shear matrix to construct [2, 4].
:type size: int
:param factor: The factor of shear to apply. For a 3 or 4 *size* matrix pass a pair of floats corresponding with the *plane* axis.
:type factor: float
:rtype: 'Matrix'
:return: A new shear matrix.
'''
pass
@classmethod
def Translation(cls, vector: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Matrix':
''' Create a matrix representing a translation.
:param vector: The translation vector.
:type vector: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Matrix'
:return: An identity matrix with a translation.
'''
pass
def adjugate(self):
''' Set the matrix to its adjugate. :raises ValueError: if the matrix cannot be adjugate.
'''
pass
def adjugated(self) -> 'Matrix':
''' Return an adjugated copy of the matrix. :raises ValueError: if the matrix cannot be adjugated
:rtype: 'Matrix'
:return: the adjugated matrix.
'''
pass
def copy(self) -> 'Matrix':
''' Returns a copy of this matrix.
:rtype: 'Matrix'
:return: an instance of itself
'''
pass
def decompose(self) -> 'Quaternion':
''' Return the translation, rotation, and scale components of this matrix.
:rtype: 'Quaternion'
:return: tuple of translation, rotation, and scale
'''
pass
def determinant(self) -> float:
''' Return the determinant of a matrix.
:rtype: float
:return: Return the determinant of a matrix.
'''
pass
@staticmethod
def freeze():
''' Make this object immutable. After this the object can be hashed, used in dictionaries & sets.
'''
pass
def identity(self):
''' Set the matrix to the identity matrix.
'''
pass
def invert(
self,
fallback: typing.Union[typing.Sequence[float], 'Matrix'] = None):
''' Set the matrix to its inverse.
:param fallback: Set the matrix to this value when the inverse cannot be calculated (instead of raising a :exc:`ValueError` exception).
:type fallback: typing.Union[typing.Sequence[float], 'Matrix']
'''
pass
def invert_safe(self):
''' Set the matrix to its inverse, will never error. If degenerated (e.g. zero scale on an axis), add some epsilon to its diagonal, to get an invertible one. If tweaked matrix is still degenerated, set to the identity matrix instead.
'''
pass
def inverted(self, fallback: typing.Any = None) -> 'Matrix':
''' Return an inverted copy of the matrix.
:param fallback: return this when the inverse can't be calculated (instead of raising a :exc:`ValueError`).
:type fallback: typing.Any
:rtype: 'Matrix'
:return: the inverted matrix or fallback when given.
'''
pass
def inverted_safe(self) -> 'Matrix':
''' Return an inverted copy of the matrix, will never error. If degenerated (e.g. zero scale on an axis), add some epsilon to its diagonal, to get an invertible one. If tweaked matrix is still degenerated, return the identity matrix instead.
:rtype: 'Matrix'
:return: the inverted matrix.
'''
pass
@staticmethod
def lerp(other: typing.Union[typing.Sequence[float], 'Matrix'],
factor: float) -> 'Matrix':
''' Returns the interpolation of two matrices. Uses polar decomposition, see "Matrix Animation and Polar Decomposition", Shoemake and Duff, 1992.
:param other: value to interpolate with.
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:param factor: The interpolation value in [0.0, 1.0].
:type factor: float
:rtype: 'Matrix'
:return: The interpolated matrix.
'''
pass
def normalize(self):
''' Normalize each of the matrix columns.
'''
pass
def normalized(self) -> 'Matrix':
''' Return a column normalized matrix
:rtype: 'Matrix'
:return: a column normalized matrix
'''
pass
def resize_4x4(self):
''' Resize the matrix to 4x4.
'''
pass
def rotate(self,
other: typing.Union[typing.Sequence[float], 'Euler', typing.
Sequence[float], 'Quaternion', typing.
Sequence[float], 'Matrix']):
''' Rotates the matrix by another mathutils value.
:param other: rotation component of mathutils value
:type other: typing.Union[typing.Sequence[float], 'Euler', typing.Sequence[float], 'Quaternion', typing.Sequence[float], 'Matrix']
'''
pass
def to_2x2(self) -> 'Matrix':
''' Return a 2x2 copy of this matrix.
:rtype: 'Matrix'
:return: a new matrix.
'''
pass
def to_3x3(self) -> 'Matrix':
''' Return a 3x3 copy of this matrix.
:rtype: 'Matrix'
:return: a new matrix.
'''
pass
def to_4x4(self) -> 'Matrix':
''' Return a 4x4 copy of this matrix.
:rtype: 'Matrix'
:return: a new matrix.
'''
pass
def to_euler(self, order: str,
euler_compat: typing.Union[typing.Sequence[float], 'Euler']
) -> 'Euler':
''' Return an Euler representation of the rotation matrix (3x3 or 4x4 matrix only).
:param order: Optional rotation order argument in ['XYZ', 'XZY', 'YXZ', 'YZX', 'ZXY', 'ZYX'].
:type order: str
:param euler_compat: Optional euler argument the new euler will be made compatible with (no axis flipping between them). Useful for converting a series of matrices to animation curves.
:type euler_compat: typing.Union[typing.Sequence[float], 'Euler']
:rtype: 'Euler'
:return: Euler representation of the matrix.
'''
pass
def to_quaternion(self) -> 'Quaternion':
''' Return a quaternion representation of the rotation matrix.
:rtype: 'Quaternion'
:return: Quaternion representation of the rotation matrix.
'''
pass
def to_scale(self) -> 'Vector':
''' Return the scale part of a 3x3 or 4x4 matrix.
:rtype: 'Vector'
:return: Return the scale of a matrix.
'''
pass
def to_translation(self) -> 'Vector':
''' Return the translation part of a 4 row matrix.
:rtype: 'Vector'
:return: Return the translation of a matrix.
'''
pass
def transpose(self):
''' Set the matrix to its transpose.
'''
pass
def transposed(self) -> 'Matrix':
''' Return a new, transposed matrix.
:rtype: 'Matrix'
:return: a transposed matrix
'''
pass
def zero(self):
''' Set all the matrix values to zero.
'''
pass
def __init__(self,
rows=((1.0, 0.0, 0.0, 0.0), (0.0, 1.0, 0.0, 0.0),
(0.0, 0.0, 1.0, 0.0), (0.0, 0.0, 0.0,
1.0))) -> typing.Any:
'''
:rtype: typing.Any
'''
pass
def __len__(self) -> int:
'''
:rtype: int
'''
pass
def __add__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Matrix':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Matrix'
'''
pass
def __sub__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Matrix':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Matrix'
'''
pass
def __mul__(self, other: typing.Union[int, float]) -> 'Matrix':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Matrix'
'''
pass
def __matmul__(
self, other: typing.Union[typing.Sequence[float], 'Matrix', typing.
Sequence[float], 'Vector']
) -> typing.Union['Matrix', 'Vector']:
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix', typing.Sequence[float], 'Vector']
:rtype: typing.Union['Matrix', 'Vector']
'''
pass
def __radd__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Matrix':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Matrix'
'''
pass
def __rsub__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Matrix':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Matrix'
'''
pass
def __rmul__(self, other: typing.Union[int, float]) -> 'Matrix':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Matrix'
'''
pass
def __rmatmul__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Matrix':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Matrix'
'''
pass
def __imul__(self, other: typing.Union[int, float]) -> 'Matrix':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Matrix'
'''
pass
def __imatmul__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Matrix':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Matrix'
'''
pass
class Quaternion:
''' This object gives access to Quaternions in Blender. :param seq: size 3 or 4 :type seq: `Vector` :param angle: rotation angle, in radians :type angle: float The constructor takes arguments in various forms: (), *no args* Create an identity quaternion (*wxyz*) Create a quaternion from a ``(w, x, y, z)`` vector. (*exponential_map*) Create a quaternion from a 3d exponential map vector. .. seealso:: :meth:`to_exponential_map` (*axis, angle*) Create a quaternion representing a rotation of *angle* radians over *axis*. .. seealso:: :meth:`to_axis_angle`
'''
angle: float = None
''' Angle of the quaternion.
:type: float
'''
axis: typing.Union[typing.Sequence[float], 'Vector'] = None
''' Quaternion axis as a vector.
:type: typing.Union[typing.Sequence[float], 'Vector']
'''
is_frozen: bool = None
''' True when this object has been frozen (read-only).
:type: bool
'''
is_valid: bool = None
''' True when the owner of this data is valid.
:type: bool
'''
is_wrapped: bool = None
''' True when this object wraps external data (read-only).
:type: bool
'''
magnitude: float = None
''' Size of the quaternion (read-only).
:type: float
'''
owner = None
''' The item this is wrapping or None (read-only).'''
w: float = None
''' Quaternion axis value.
:type: float
'''
x: float = None
''' Quaternion axis value.
:type: float
'''
y: float = None
''' Quaternion axis value.
:type: float
'''
z: float = None
''' Quaternion axis value.
:type: float
'''
@staticmethod
def conjugate():
''' Set the quaternion to its conjugate (negate x, y, z).
'''
pass
@staticmethod
def conjugated() -> 'Quaternion':
''' Return a new conjugated quaternion.
:rtype: 'Quaternion'
:return: a new quaternion.
'''
pass
@staticmethod
def copy() -> 'Quaternion':
''' Returns a copy of this quaternion.
:rtype: 'Quaternion'
:return: A copy of the quaternion.
'''
pass
def cross(self, other: typing.Union[typing.Sequence[float], 'Quaternion']
) -> 'Quaternion':
''' Return the cross product of this quaternion and another.
:param other: The other quaternion to perform the cross product with.
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
:return: The cross product.
'''
pass
def dot(self, other: typing.Union[typing.Sequence[float], 'Quaternion']
) -> float:
''' Return the dot product of this quaternion and another.
:param other: The other quaternion to perform the dot product with.
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:rtype: float
:return: The dot product.
'''
pass
@staticmethod
def freeze():
''' Make this object immutable. After this the object can be hashed, used in dictionaries & sets.
'''
pass
@staticmethod
def identity():
''' Set the quaternion to an identity quaternion.
'''
pass
@staticmethod
def invert():
''' Set the quaternion to its inverse.
'''
pass
@staticmethod
def inverted() -> 'Quaternion':
''' Return a new, inverted quaternion.
:rtype: 'Quaternion'
:return: the inverted value.
'''
pass
def make_compatible(self, other):
''' Make this quaternion compatible with another, so interpolating between them works as intended.
'''
pass
@staticmethod
def negate():
''' Set the quaternion to its negative.
'''
pass
@staticmethod
def normalize():
''' Normalize the quaternion.
'''
pass
@staticmethod
def normalized() -> 'Quaternion':
''' Return a new normalized quaternion.
:rtype: 'Quaternion'
:return: a normalized copy.
'''
pass
def rotate(self,
other: typing.Union[typing.Sequence[float], 'Euler', typing.
Sequence[float], 'Quaternion', typing.
Sequence[float], 'Matrix']):
''' Rotates the quaternion by another mathutils value.
:param other: rotation component of mathutils value
:type other: typing.Union[typing.Sequence[float], 'Euler', typing.Sequence[float], 'Quaternion', typing.Sequence[float], 'Matrix']
'''
pass
@staticmethod
def rotation_difference(
other: typing.Union[typing.Sequence[float], 'Quaternion']
) -> 'Quaternion':
''' Returns a quaternion representing the rotational difference.
:param other: second quaternion.
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
:return: the rotational difference between the two quat rotations.
'''
pass
@staticmethod
def slerp(other: typing.Union[typing.Sequence[float], 'Quaternion'],
factor: float) -> 'Quaternion':
''' Returns the interpolation of two quaternions.
:param other: value to interpolate with.
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:param factor: The interpolation value in [0.0, 1.0].
:type factor: float
:rtype: 'Quaternion'
:return: The interpolated rotation.
'''
pass
def to_axis_angle(self) -> typing.Tuple['mathutils.Vector', float]:
''' Return the axis, angle representation of the quaternion.
:rtype: typing.Tuple['mathutils.Vector', float]
:return: axis, angle.
'''
pass
def to_euler(self, order: str,
euler_compat: typing.Union[typing.Sequence[float], 'Euler']
) -> 'Euler':
''' Return Euler representation of the quaternion.
:param order: Optional rotation order argument in ['XYZ', 'XZY', 'YXZ', 'YZX', 'ZXY', 'ZYX'].
:type order: str
:param euler_compat: Optional euler argument the new euler will be made compatible with (no axis flipping between them). Useful for converting a series of matrices to animation curves.
:type euler_compat: typing.Union[typing.Sequence[float], 'Euler']
:rtype: 'Euler'
:return: Euler representation of the quaternion.
'''
pass
def to_exponential_map(self) -> 'Vector':
''' Return the exponential map representation of the quaternion. This representation consist of the rotation axis multiplied by the rotation angle. Such a representation is useful for interpolation between multiple orientations. To convert back to a quaternion, pass it to the `Quaternion` constructor.
:rtype: 'Vector'
:return: exponential map.
'''
pass
def to_matrix(self) -> 'Matrix':
''' Return a matrix representation of the quaternion.
:rtype: 'Matrix'
:return: A 3x3 rotation matrix representation of the quaternion.
'''
pass
def to_swing_twist(self, axis: typing.Any
) -> typing.Tuple['mathutils.Quaternion', float]:
''' Split the rotation into a swing quaternion with the specified axis fixed at zero, and the remaining twist rotation angle.
:param axis: twist axis as a string in ['X', 'Y', 'Z']
:type axis: typing.Any
:rtype: typing.Tuple['mathutils.Quaternion', float]
:return: swing, twist angle.
'''
pass
def __init__(self, seq=(1.0, 0.0, 0.0, 0.0)) -> typing.Any:
'''
:rtype: typing.Any
'''
pass
def __len__(self) -> int:
'''
:rtype: int
'''
pass
def __getitem__(self, key: int) -> float:
'''
:param key:
:type key: int
:rtype: float
'''
pass
def __setitem__(self, key: int, value: float) -> float:
'''
:param key:
:type key: int
:param value:
:type value: float
:rtype: float
'''
pass
def __add__(self, other: typing.Union[typing.Sequence[float], 'Quaternion']
) -> 'Quaternion':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
'''
pass
def __sub__(self, other: typing.Union[typing.Sequence[float], 'Quaternion']
) -> 'Quaternion':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
'''
pass
def __mul__(self, other: typing.Union[int, float, typing.
Sequence[float], 'Quaternion']
) -> 'Quaternion':
'''
:param other:
:type other: typing.Union[int, float, typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
'''
pass
def __matmul__(
self, other: typing.Union[typing.Sequence[float], 'Vector', typing.
Sequence[float], 'Quaternion']
) -> typing.Union['Vector', 'Quaternion']:
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector', typing.Sequence[float], 'Quaternion']
:rtype: typing.Union['Vector', 'Quaternion']
'''
pass
def __radd__(self,
other: typing.Union[typing.Sequence[float], 'Quaternion']
) -> 'Quaternion':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
'''
pass
def __rsub__(self,
other: typing.Union[typing.Sequence[float], 'Quaternion']
) -> 'Quaternion':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
'''
pass
def __rmul__(self, other: typing.Union[int, float, typing.
Sequence[float], 'Quaternion']
) -> 'Quaternion':
'''
:param other:
:type other: typing.Union[int, float, typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
'''
pass
def __rmatmul__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
def __imul__(self, other: typing.Union[int, float, typing.
Sequence[float], 'Quaternion']
) -> 'Quaternion':
'''
:param other:
:type other: typing.Union[int, float, typing.Sequence[float], 'Quaternion']
:rtype: 'Quaternion'
'''
pass
def __imatmul__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
class Vector:
''' This object gives access to Vectors in Blender. :param seq: Components of the vector, must be a sequence of at least two :type seq: sequence of numbers
'''
is_frozen: bool = None
''' True when this object has been frozen (read-only).
:type: bool
'''
is_valid: bool = None
''' True when the owner of this data is valid.
:type: bool
'''
is_wrapped: bool = None
''' True when this object wraps external data (read-only).
:type: bool
'''
length: float = None
''' Vector Length.
:type: float
'''
length_squared: float = None
''' Vector length squared (v.dot(v)).
:type: float
'''
magnitude: float = None
''' Vector Length.
:type: float
'''
owner = None
''' The item this is wrapping or None (read-only).'''
w: float = None
''' Vector W axis (4D Vectors only).
:type: float
'''
ww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
www = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wwzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wxzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wywx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wywy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wywz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wyzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
wzzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
x: float = None
''' Vector X axis.
:type: float
'''
xw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xwzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xxzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xywx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xywy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xywz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xyzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
xzzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
y: float = None
''' Vector Y axis.
:type: float
'''
yw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
ywzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yxzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yywx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yywy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yywz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yyzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
yzzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
z: float = None
''' Vector Z axis (3D Vectors only).
:type: float
'''
zw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zwzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zxzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zywx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zywy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zywz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zyzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzww = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzwx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzwy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzwz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzxw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzxx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzxy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzxz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzyw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzyx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzyy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzyz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzzw = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzzx = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzzy = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
zzzz = None
''' Undocumented, consider `contributing <https://developer.blender.org/T51061>`__.'''
@classmethod
def Fill(cls, size: int, fill: float = 0.0):
''' Create a vector of length size with all values set to fill.
:param size: The length of the vector to be created.
:type size: int
:param fill: The value used to fill the vector.
:type fill: float
'''
pass
@classmethod
def Linspace(cls, start: int, stop: int, size: int):
''' Create a vector of the specified size which is filled with linearly spaced values between start and stop values.
:param start: The start of the range used to fill the vector.
:type start: int
:param stop: The end of the range used to fill the vector.
:type stop: int
:param size: The size of the vector to be created.
:type size: int
'''
pass
@classmethod
def Range(cls, start: int, stop: int, step: int = 1):
''' Create a filled with a range of values.
:param start: The start of the range used to fill the vector.
:type start: int
:param stop: The end of the range used to fill the vector.
:type stop: int
:param step: The step between successive values in the vector.
:type step: int
'''
pass
@classmethod
def Repeat(cls, vector, size: int):
''' Create a vector by repeating the values in vector until the required size is reached.
:param tuple: The vector to draw values from.
:type tuple: typing.Union[typing.Sequence[float], 'Vector']
:param size: The size of the vector to be created.
:type size: int
'''
pass
@staticmethod
def angle(other: typing.Union[typing.Sequence[float], 'Vector'],
fallback: typing.Any = None) -> float:
''' Return the angle between two vectors.
:param other: another vector to compare the angle with
:type other: typing.Union[typing.Sequence[float], 'Vector']
:param fallback: return this when the angle can't be calculated (zero length vector), (instead of raising a :exc:`ValueError`).
:type fallback: typing.Any
:rtype: float
:return: angle in radians or fallback when given
'''
pass
@staticmethod
def angle_signed(other: typing.Union[typing.Sequence[float], 'Vector'],
fallback: typing.Any) -> float:
''' Return the signed angle between two 2D vectors (clockwise is positive).
:param other: another vector to compare the angle with
:type other: typing.Union[typing.Sequence[float], 'Vector']
:param fallback: return this when the angle can't be calculated (zero length vector), (instead of raising a :exc:`ValueError`).
:type fallback: typing.Any
:rtype: float
:return: angle in radians or fallback when given
'''
pass
@staticmethod
def copy() -> 'Vector':
''' Returns a copy of this vector.
:rtype: 'Vector'
:return: A copy of the vector.
'''
pass
def cross(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
''' Return the cross product of this vector and another.
:param other: The other vector to perform the cross product with.
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
:return: The cross product.
'''
pass
def dot(self,
other: typing.Union[typing.Sequence[float], 'Vector']) -> float:
''' Return the dot product of this vector and another.
:param other: The other vector to perform the dot product with.
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: float
:return: The dot product.
'''
pass
@staticmethod
def freeze():
''' Make this object immutable. After this the object can be hashed, used in dictionaries & sets.
'''
pass
@staticmethod
def lerp(other: typing.Union[typing.Sequence[float], 'Vector'],
factor: float) -> 'Vector':
''' Returns the interpolation of two vectors.
:param other: value to interpolate with.
:type other: typing.Union[typing.Sequence[float], 'Vector']
:param factor: The interpolation value in [0.0, 1.0].
:type factor: float
:rtype: 'Vector'
:return: The interpolated vector.
'''
pass
def negate(self):
''' Set all values to their negative.
'''
pass
def normalize(self):
''' Normalize the vector, making the length of the vector always 1.0.
'''
pass
def normalized(self) -> 'Vector':
''' Return a new, normalized vector.
:rtype: 'Vector'
:return: a normalized copy of the vector
'''
pass
def orthogonal(self) -> 'Vector':
''' Return a perpendicular vector.
:rtype: 'Vector'
:return: a new vector 90 degrees from this vector.
'''
pass
@staticmethod
def project(
other: typing.Union[typing.Sequence[float], 'Vector']) -> 'Vector':
''' Return the projection of this vector onto the *other*.
:param other: second vector.
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
:return: the parallel projection vector
'''
pass
def reflect(self, mirror: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
''' Return the reflection vector from the *mirror* argument.
:param mirror: This vector could be a normal from the reflecting surface.
:type mirror: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
:return: The reflected vector matching the size of this vector.
'''
pass
def resize(self, size=3):
''' Resize the vector to have size number of elements.
'''
pass
def resize_2d(self):
''' Resize the vector to 2D (x, y).
'''
pass
def resize_3d(self):
''' Resize the vector to 3D (x, y, z).
'''
pass
def resize_4d(self):
''' Resize the vector to 4D (x, y, z, w).
'''
pass
def resized(self, size=3) -> 'Vector':
''' Return a resized copy of the vector with size number of elements.
:rtype: 'Vector'
:return: a new vector
'''
pass
@staticmethod
def rotate(other: typing.Union[typing.Sequence[float], 'Euler', typing.
Sequence[float], 'Quaternion', typing.
Sequence[float], 'Matrix']):
''' Rotate the vector by a rotation value.
:param other: rotation component of mathutils value
:type other: typing.Union[typing.Sequence[float], 'Euler', typing.Sequence[float], 'Quaternion', typing.Sequence[float], 'Matrix']
'''
pass
@staticmethod
def rotation_difference(
other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Quaternion':
''' Returns a quaternion representing the rotational difference between this vector and another.
:param other: second vector.
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Quaternion'
:return: the rotational difference between the two vectors.
'''
pass
@staticmethod
def slerp(other: typing.Union[typing.Sequence[float], 'Vector'],
factor: float,
fallback: typing.Any = None) -> 'Vector':
''' Returns the interpolation of two non-zero vectors (spherical coordinates).
:param other: value to interpolate with.
:type other: typing.Union[typing.Sequence[float], 'Vector']
:param factor: The interpolation value typically in [0.0, 1.0].
:type factor: float
:param fallback: return this when the vector can't be calculated (zero length vector or direct opposites), (instead of raising a :exc:`ValueError`).
:type fallback: typing.Any
:rtype: 'Vector'
:return: The interpolated vector.
'''
pass
def to_2d(self) -> 'Vector':
''' Return a 2d copy of the vector.
:rtype: 'Vector'
:return: a new vector
'''
pass
def to_3d(self) -> 'Vector':
''' Return a 3d copy of the vector.
:rtype: 'Vector'
:return: a new vector
'''
pass
def to_4d(self) -> 'Vector':
''' Return a 4d copy of the vector.
:rtype: 'Vector'
:return: a new vector
'''
pass
def to_track_quat(self, track: str, up: str) -> 'Quaternion':
''' Return a quaternion rotation from the vector and the track and up axis.
:param track: Track axis in ['X', 'Y', 'Z', '-X', '-Y', '-Z'].
:type track: str
:param up: Up axis in ['X', 'Y', 'Z'].
:type up: str
:rtype: 'Quaternion'
:return: rotation from the vector and the track and up axis.
'''
pass
def to_tuple(self, precision: int = -1) -> typing.Tuple:
''' Return this vector as a tuple with.
:param precision: The number to round the value to in [-1, 21].
:type precision: int
:rtype: typing.Tuple
:return: the values of the vector rounded by *precision*
'''
pass
def zero(self):
''' Set all values to zero.
'''
pass
def __init__(self, seq=(0.0, 0.0, 0.0)) -> typing.Any:
'''
:rtype: typing.Any
'''
pass
def __len__(self) -> int:
'''
:rtype: int
'''
pass
def __getitem__(self, key: int) -> float:
'''
:param key:
:type key: int
:rtype: float
'''
pass
def __setitem__(self, key: int, value: float) -> float:
'''
:param key:
:type key: int
:param value:
:type value: float
:rtype: float
'''
pass
def __neg__(self) -> 'Vector':
'''
:rtype: 'Vector'
'''
pass
def __add__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
def __sub__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
def __mul__(self, other: typing.Union[int, float]) -> 'Vector':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Vector'
'''
pass
def __truediv__(self, other: typing.Union[int, float]) -> 'Vector':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Vector'
'''
pass
def __matmul__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Vector'
'''
pass
def __radd__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
def __rsub__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
def __rmul__(self, other: typing.Union[int, float]) -> 'Vector':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Vector'
'''
pass
def __rtruediv__(self, other: typing.Union[int, float]) -> 'Vector':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Vector'
'''
pass
def __rmatmul__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Vector'
'''
pass
def __iadd__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
def __isub__(self, other: typing.Union[typing.Sequence[float], 'Vector']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Vector']
:rtype: 'Vector'
'''
pass
def __imul__(self, other: typing.Union[int, float]) -> 'Vector':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Vector'
'''
pass
def __itruediv__(self, other: typing.Union[int, float]) -> 'Vector':
'''
:param other:
:type other: typing.Union[int, float]
:rtype: 'Vector'
'''
pass
def __imatmul__(self, other: typing.Union[typing.Sequence[float], 'Matrix']
) -> 'Vector':
'''
:param other:
:type other: typing.Union[typing.Sequence[float], 'Matrix']
:rtype: 'Vector'
'''
pass
|
PypiClean
|
/pywren_ibm_cloud-1.7.3.tar.gz/pywren_ibm_cloud-1.7.3/pywren_ibm_cloud/storage/backends/infinispan/infinispan.py
|
import logging
import requests
import json
import base64
from requests.auth import HTTPBasicAuth
from pywren_ibm_cloud.utils import is_pywren_function
logger = logging.getLogger(__name__)
class InfinispanBackend:
"""
Infinispan backend
"""
def __init__(self, infinispan_config, **kwargs):
logger.debug("Creating Infinispan client")
self.infinispan_config = infinispan_config
self.is_pywren_function = is_pywren_function()
self.basicAuth = HTTPBasicAuth(infinispan_config.get('username'),
infinispan_config.get('password'))
self.endpoint = infinispan_config.get('endpoint')
self.cache_manager = infinispan_config.get('cache_manager', 'default')
self.cache_name = self.__generate_cache_name(kwargs['bucket'], kwargs['executor_id'])
self.infinispan_client = requests.session()
self.__is_server_version_supported()
res = self.infinispan_client.head(self.endpoint + '/rest/v2/caches/' + self.cache_name,
auth=self.basicAuth)
if res.status_code == 404:
logger.debug('going to create new Infinispan cache {}'.format(self.cache_name))
res = self.infinispan_client.post(self.endpoint + '/rest/v2/caches/' + self.cache_name + '?template=org.infinispan.DIST_SYNC')
logger.debug('New Infinispan cache {} created with status {}'.format(self.cache_name, res.status_code))
logger.debug("Infinispan client created successfully")
def __generate_cache_name(self, bucket, executor_id):
if executor_id == None and bucket == None:
raise Exception ('at least one of bucket or executor_id should be non empty')
if executor_id is not None and executor_id.find('/') > 0:
executor_id = executor_id.replace('/','_')
if bucket is not None:
cache_name = bucket + '_' + executor_id
else:
cache_name = executor_id
return cache_name
def __key_url(self, key):
urlSafeEncodedBytes = base64.urlsafe_b64encode(key.encode("utf-8"))
urlSafeEncodedStr = str(urlSafeEncodedBytes, "utf-8")
url = self.endpoint + '/rest/v2/caches/' + self.cache_name + '/' + urlSafeEncodedStr
return url
def __is_server_version_supported(self):
res = self.infinispan_client.get(self.endpoint + '/rest/v2/cache-managers/' + self.cache_manager,
auth=self.basicAuth)
json_resp = json.loads(res.content.decode('utf-8'))
server_version = json_resp['version'].split('.')
if (int(server_version[0]) < 10 or (int(server_version[0]) == 10 and int(server_version[1]) < 1)):
raise Exception('Infinispan versions 10.1 and up supported')
def get_client(self):
"""
Get infinispan client.
:return: infinispan_client
"""
return self.infinispan_client
def put_object(self, bucket_name, key, data):
"""
Put an object in Infinispan. Override the object if the key already exists.
:param key: key of the object.
:param data: data of the object
:type data: str/bytes
:return: None
"""
headers = {"Content-Type": "application/octet-stream",
'Key-Content-Type': "application/octet-stream;encoding=base64"}
resp = self.infinispan_client.put(self.__key_url(key), data = data,
auth=self.basicAuth, headers = headers )
print (resp)
def get_object(self, bucket_name, key, stream=False, extra_get_args={}):
"""
Get object from COS with a key. Throws StorageNoSuchKeyError if the given key does not exist.
:param key: key of the object
:return: Data of the object
:rtype: str/bytes
"""
headers = {"Content-Type": "application/octet-stream",
'Key-Content-Type': "application/octet-stream;encoding=base64"}
res = self.infinispan_client.get(self.__key_url(key), headers = headers,
auth=self.basicAuth)
data = res.content
return data
def head_object(self, bucket_name, key):
"""
Head object from COS with a key. Throws StorageNoSuchKeyError if the given key does not exist.
:param key: key of the object
:return: Data of the object
:rtype: str/bytes
"""
res = self.infinispan_client.head(self.endpoint + '/rest/v2/caches/default/' + bucket_name + '/' + key,
auth=self.basicAuth)
return res.status_code
def delete_object(self, bucket_name, key):
"""
Delete an object from storage.
:param bucket: bucket name
:param key: data key
"""
headers = {"Content-Type": "application/octet-stream"
,'Key-Content-Type': "application/octet-stream;encoding=base64"}
return self.infinispan_client.delete(self.__key_url(key), headers = headers,
auth=self.basicAuth)
def delete_objects(self, bucket_name, key_list):
"""
Delete a list of objects from storage.
:param bucket: bucket name
:param key_list: list of keys
"""
result = []
for key in key_list:
self.delete_object(bucket_name, key)
return result
def bucket_exists(self, bucket_name):
"""
Head bucket from COS with a name. Throws StorageNoSuchKeyError if the given bucket does not exist.
:param bucket_name: name of the bucket
"""
raise NotImplementedError
def head_bucket(self, bucket_name):
"""
Head bucket from COS with a name. Throws StorageNoSuchKeyError if the given bucket does not exist.
:param bucket_name: name of the bucket
:return: Metadata of the bucket
:rtype: str/bytes
"""
raise NotImplementedError
def list_objects(self, bucket_name, prefix=None):
"""
Return a list of objects for the given bucket and prefix.
:param bucket_name: Name of the bucket.
:param prefix: Prefix to filter object names.
:return: List of objects in bucket that match the given prefix.
:rtype: list of str
"""
res = self.infinispan_client.get(self.endpoint + '/rest/v2/caches/' + self.cache_name + '?action=keys', auth=self.basicAuth)
data = res.content
return data
def list_keys(self, bucket_name, prefix=None):
"""
Return a list of keys for the given prefix.
:param bucket_name: Name of the bucket.
:param prefix: Prefix to filter object names.
:return: List of keys in bucket that match the given prefix.
:rtype: list of str
"""
res = self.infinispan_client.get(self.endpoint + '/rest/v2/caches/' + self.cache_name + '?action=keys', auth=self.basicAuth)
data = res.content
return data
|
PypiClean
|
/code-checker-0.2.2.post1.tar.gz/code-checker-0.2.2.post1/codechecker/scripts/runner.py
|
import sys
import fnmatch
import yaml
from codechecker import worker
from codechecker import git
from codechecker.checker.builder import (CheckListBuilder,
TaskCreator)
from codechecker.checkers_spec import (PROJECT_CHECKERS,
FILE_CHECKERS)
def main():
"""Run checkers.
1. Load checkers configuration from precommit-checkers.yml
2. Use :py:class:`codechecker.checker.builder.CheckListBuilder` to create
list of all configured checkers for project and staged files
3. Next call :py:func:`codechecker.worker.execute_checkers` to
execute created checker tasks and print checkers result
4. If :py:func:`codechecker.worker.execute_checkers` return non
empty value script exits with status 1 so commit is aborted
"""
checkers_data = yaml.load(open('precommit-checkers.yml', 'r'))
_validate_checkers_data(checkers_data)
checklist_builder = _init_checkers_builder()
if 'config' in checkers_data:
_set_checkers_config(checklist_builder, checkers_data['config'])
if 'project-checkers' in checkers_data:
_create_project_checkers(checklist_builder,
checkers_data['project-checkers'])
if 'file-checkers' in checkers_data:
_create_file_checkers(checklist_builder,
checkers_data['file-checkers'])
return _execute_checkers(checklist_builder.get_result())
def _init_checkers_builder():
project_chekcers = {}
for each_checker in PROJECT_CHECKERS:
project_chekcers[each_checker] = TaskCreator(
each_checker,
**PROJECT_CHECKERS[each_checker]
)
file_checkers = {}
for each_checker in FILE_CHECKERS:
file_checkers[each_checker] = TaskCreator(
each_checker,
**FILE_CHECKERS[each_checker]
)
checklist_builder = CheckListBuilder(
project_chekcers,
file_checkers
)
return checklist_builder
def _validate_checkers_data(checkers_data):
"""Check if precommit-checkers.yml contains valid options only."""
for each_option in checkers_data:
if each_option not in ('config', 'project-checkers', 'file-checkers'):
raise ValueError('precommit-checkers.yml contains'
' invalid option "{}"'.format(each_option))
def _set_checkers_config(checklist_builder, config):
"""Configure checker factories."""
for each_checker, each_conf in config.items():
checklist_builder.configure_checker(each_checker, each_conf)
def _create_project_checkers(checklist_builder, checkers):
"""Create project checkers."""
if isinstance(checkers, str):
checkers = [checkers]
for each_checker in checkers:
checklist_builder.add_project_checker(each_checker)
def _create_file_checkers(checklist_builder, checkers):
"""Create file checkers."""
staged_files = git.get_staged_files()
files_previously_matched = set()
patterns_sorted = _sort_file_patterns(checkers.keys())
for path_pattern in patterns_sorted:
checkers_list = checkers[path_pattern]
if isinstance(checkers_list, str):
checkers_list = [checkers_list]
matched_files = set(fnmatch.filter(staged_files, path_pattern))
# Exclude files that match more specific pattern
files_to_check = matched_files - files_previously_matched
files_previously_matched.update(files_to_check)
for each_file in files_to_check:
checklist_builder.add_checkers_for_file(each_file, checkers_list)
def _execute_checkers(checker_tasks):
if worker.execute_checkers(checker_tasks):
sys.exit(1)
else:
return 0
def _sort_file_patterns(pattern_list):
"""Sort file patterns.
Sort file patterns so that more specific patterns are before more generic
patterns. For example if we have patterns ['*.py', 'tests/*.py'] result
should be ['tests/*.py', '*.py']
"""
patterns_sorted = []
for pattern_to_insert in pattern_list:
for index, pattern_inserted in enumerate(patterns_sorted):
if fnmatch.fnmatch(pattern_to_insert, pattern_inserted):
# more generic pattern is already inserted into result list
# so pattern_to_insert must by inserted before
patterns_sorted.insert(index, pattern_to_insert)
break
else:
# there is not more generic patterns in result list
patterns_sorted.append(pattern_to_insert)
return patterns_sorted
|
PypiClean
|
/RestForce-1.0.0.tar.gz/RestForce-1.0.0/restforce/login.py
|
from utensils import getUniqueElementValueFromXmlString
from httplib2 import Http, ServerNotFoundError
def readLoginCredentialsFromFile(credentialsFilePath):
with open(credentialsFilePath, 'r') as f:
username = f.readline().rstrip('\n')
password = f.readline().rstrip('\n')
securityToken = f.readline().rstrip('\n')
return (username, password, securityToken)
def login(username, password, securityToken):
loginResp, loginRespContent = _callSoapLoginService(username, password, securityToken);
if loginResp.status != 200:
raise SalesforceAuthenticationFailedException(loginRespContent)
return _parseSoapLoginServiceResponse(loginRespContent)
def _callSoapLoginService(username, password, securityToken):
'''
Calls out to the soap login service.
@return: a tuple containing (loginResp, loginRespContent)
@rtype: a tuple of size 2
'''
soapUrl = "https://login.salesforce.com/services/Soap/u/23.0"
loginSoapRequestBody = """<?xml version="1.0" encoding="utf-8" ?>
<env:Envelope
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:env="http://schemas.xmlsoap.org/soap/envelope/">
<env:Body>
<n1:login xmlns:n1="urn:partner.soap.sforce.com">
<n1:username>""" + username + """</n1:username>
<n1:password>""" + password + securityToken + """</n1:password>
</n1:login>
</env:Body>
</env:Envelope>"""
loginSoapRequestHeaders = {
"content-type":"text/xml",
"charset":"UTF-8",
"SOAPAction":"login"
}
h = Http()
try:
return h.request(soapUrl, "POST", body=loginSoapRequestBody, headers=loginSoapRequestHeaders)
except ServerNotFoundError as e:
raise SalesforceAuthenticationFailedException(e)
def _parseSoapLoginServiceResponse(loginRespContent):
'''
Pares the response content from a soap login request, extracting a tuple containing (sessionId, serverUrl, sfInstance).
Example of expected soap login response content:
<?xml version="1.0" encoding="UTF-8"?>
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns="urn:partner.soap.sforce.com" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soapenv:Body>
<loginResponse>
<result>
[...]
<serverUrl>https://na10-api.salesforce.com/services/Soap/u/23.0/00DA0000000ZoX3</serverUrl>
<sessionId>00DA0000000ZoX3!ARcAQErwnlE.gpvhb82ogWxRVdCfvIQAoaWoZfDSsgUAvp8Xrk0uUHK_wW5us3a3DOX1TCz1V1knqEbXHDaPyY5TxkD1szBO</sessionId>
[...]
</result>
</loginResponse>
</soapenv:Body>
</soapenv:Envelope>
'''
sessionId = getUniqueElementValueFromXmlString(loginRespContent, 'sessionId')
serverUrl = getUniqueElementValueFromXmlString(loginRespContent, 'serverUrl')
sfInstance = serverUrl.replace('http://', '').replace('https://', '').split('/')[0].replace('-api', '')
return (sessionId, serverUrl, sfInstance)
class SalesforceAuthenticationFailedException(Exception):
'''
Thrown to indicate that authentication with Salesforce failed.
'''
pass
|
PypiClean
|
/Nevow-0.14.5.tar.gz/Nevow-0.14.5/doc/howto/intro.rst
|
============
Introduction
============
Summary
-------
Nevow is a next-generation web application templating system, based on
the ideas developed in the Twisted Woven package. Its main focus is on
separating the HTML template from both the business logic and the
display logic, while allowing the programmer to write pure Python code
as much as possible. It separates your code into 'data' and 'render'
functions, a simplified implementation of traditional MVC. It has
various parts which can be used individually or as a whole, integrated
web solution:
- XHTML templates: contain no programming logic, only nodes tagged with
nevow attributes
- data/render methods: simplified MVC
- stan: An s-expression-like syntax for expressing xml in pure python
- formless: For describing the types of objects which may be passed to
methods of your classes, validating and coercing string input from
either web or command-line sources, and calling your methods
automatically once validation passes
- formless.webform: For rendering web forms based on formless type
descriptions, accepting form posts and passing them to formless
validators, and rendering error forms in the event validation fails
Disk based templates
--------------------
Nevow includes the ability to load templates off disk. These templates
may have processing directives which cause the execution of python
methods at render time. The attribute technique was inspired by the
attributes used by ZPT. However, no actual code may be embedded in the
HTML template:
.. code-block:: html
<html xmlns:nevow="http://nevow.com/ns/nevow/0.1">
<head>
<title>Greetings!</title>
</head>
<body>
<h1 style="font-size: large">Now I will greet you:</h1>
<span nevow:render="greet" />
</body>
</html>
This template can then be loaded and rendered like so:
.. code-block:: python
class Greeter(rend.Page):
docFactory = loaders.xmlfile("Greeting.html")
def render_greet(self, context, data):
return random.choice(["Hello", "Greetings", "Hi"]), " ", data
Greeter("My name is").renderString()
data/render methods
-------------------
To allow clean isolation between code which fetches data from a data
source and code which renders the data into HTML, nevow allows you to
write both 'data' methods and 'render' methods. These concepts are
inspired by MVC, but simpler, since the framework can handle most of the
controller aspect. An example:
.. code-block:: html
<html xmlns:nevow="http://nevow.com/ns/nevow/0.1">
<body>
<span nevow:data="name" nevow:render="colorful" />
<span nevow:data="fun" nevow:render="colorful" />
</body>
</html>
This template can be loaded and rendered using a class such as this:
.. code-block:: python
class Colorful(rend.Page):
docFactory = loaders.xmlfile("Colorful.html")
def render_colorful(self, context, data):
color = random.choice(['red', 'green', 'blue'])
return context.tag(style="color: %s" % color)
def data_name(self, context, data):
return "Your name here"
def data_fun(self, context, data):
return "Are we having fun yet?"
Stan
----
One of the most powerful things about nevow is stan, an
s-expression-like syntax for producing XML fragments in pure Python
syntax. Stan is not required for using nevow, but it is both a simple
and powerful way to both lay out one's XHTML templates and express one's
display logic. A brief example will illustrate its utility:
.. code-block:: python
import random
from nevow import rend, tags
class Greeter(rend.Page):
def greet(self, context, data):
return random.choice(["Hello", "Greetings", "Hi"]), " ", data
docFactory = loaders.stan(
tags.html[
tags.head[ tags.title[ "Greetings!" ]],
tags.body[
tags.h1(style="font-size: large")[ "Now I will greet you:" ],
greet
]
])
When the Greeter class is constructed, it is passed a Python object
which will be used as that page's data:
.. code-block:: python
Greeter("Your name here").renderString()
Formless
--------
Python is dynamically typed, which means it has no built-in controls for
enforcing the types of objects which are passed to one's methods. This
is great for programmers, but not necessarily great if you are going to
be passing user-entered input to those methods. Formless is a simple way
to describe the types of objects that can be passed to one's methods, as
well as coerce from string input to those types. Other code can then
accept user input from a command line or from a web form, validate the
input against the types described using formless, and call the method
once validation has passed. A simple example:
.. code-block:: python
from zope.interface import implements
from formless.annotate import TypedInterface, Integer, String
class ISimpleMethod(TypedInterface):
def simple(self,
name=String(description="Your name."),
age=Integer(description="Your age.")):
"""
Simple
Please enter your name and age.
"""
class Implementation(object):
implements(ISimpleMethod)
def simple(self, name, age):
print "Hello, %s, who is %s" % (name, age)
Webform
-------
Webform is a nevow module which will automatically render web forms and
accept form posts based on types described using the classes in
formless. Used in conjunction with the twisted.web HTTP server, the
process is almost automatic:
.. code-block:: python
from nevow import rend, tags
from formless import webform
class WebForm(rend.Page):
document = rend.stan(
tags.html[
tags.body[
h1["Here is the form:"],
webform.renderForms('original')
]
])
resource = WebForm(Implementation())
Exposing this resource instance to the web using twisted.web and
visiting it will cause a form with two input boxes to be rendered.
Posting the form will cause form validation to occur. Upon error, the
user will be returned to the original page, with the form annotated with
error messages. Upon success, the "simple" method of the Implementation
instance will be called and passed a string and an integer.
LivePage
--------
LivePage was a Woven technology which allowed programmers to receive
server- side notification of client-side JavaScript events, and to send
JavaScript to the client in response to a server-side event. New for
Nevow 0.3, LivePage has been updated to support Mozilla, Firefox, IE6
Win, and Safari. Using LivePage is very easy:
.. code-block:: python
from nevow.liveevil import handler
def greeter(client, nodeName):
client.alert("Greetings. You clicked the %s node." % nodeName)
# Any string arguments after the event handler function will be evaluated
# as JavaScript in the context of the web browser and results passed to the
# Python event handler
handler = handler(greeter, 'node.name')
class Live(rend.Page):
docFactory = loaders.stan(
tags.html[
tags.body[
ol[
li(onclick=handler, name="one")["One"]
li(onclick=handler, name="two")["Two"]
li(onclick=handler, name="three")["Three"]
]
]
])
More Information
----------------
The `Nevow website <https://divmod.org/trac/wiki/DivmodNevow>`__ has more
information. Starting with 0.3, it contains a simple WSGI implementation
and can also be used to render CGIs. However, the recommended mode of
operation is using the `Twisted
web <http://twistedmatrix.com/trac/wiki/TwistedWeb>`__ server. Nevow is
an active project, and many new bugfixes and features are committed to
the Nevow Git repository. Information about Nevow commits is available
by subscribing to the `Divmod
commits <http://divmod.net/users/mailman.twistd/listinfo/divmod-commits>`__
mailing list. The Nevow Git repository can be checked out using:
::
git clone git://github.com/twisted/nevow
Discussion of Nevow occurs on the `twisted.web mailing
list <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-web>`__.
The Nevow developers are also often available for real-time help on the
`#twisted.web channel <irc://irc.freenode.net/#twisted.web>`__ on
irc.freenode.net.
|
PypiClean
|
/intel_optimization_for_horovod-0.28.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/horovod/ray/elastic.py
|
from typing import Callable, List, Any, Dict, Optional
import logging
import socket
import time
import os
import random
import math
import threading
import warnings
from horovod.runner.common.util import timeout, secret
from horovod.runner.http.http_server import RendezvousServer
from horovod.runner.gloo_run import (create_slot_env_vars, create_run_env_vars,
_get_min_start_hosts)
from horovod.runner.elastic.settings import ElasticSettings
from horovod.runner.elastic.rendezvous import create_rendezvous_handler
from horovod.runner.elastic.discovery import HostDiscovery
from horovod.runner.elastic.driver import ElasticDriver
import ray
import ray.exceptions
from horovod.ray.worker import BaseHorovodWorker
from horovod.ray.utils import detect_nics
logger = logging.getLogger(__name__)
if hasattr(ray.exceptions, "GetTimeoutError"):
GetTimeoutError = ray.exceptions.GetTimeoutError
elif hasattr(ray.exceptions, "RayTimeoutError"):
GetTimeoutError = ray.exceptions.RayTimeoutError
else:
raise ImportError("Unable to find Ray Timeout Error class "
"(GetTimeoutError, RayTimeoutError). "
"This is likely due to the Ray version not "
"compatible with Horovod-Ray.")
class RayHostDiscovery(HostDiscovery):
"""Uses Ray global state to obtain host mapping.
Assumes that the whole global state is available for usage."""
def __init__(self, use_gpu=False, cpus_per_slot=1, gpus_per_slot=1):
self.use_gpu = use_gpu
self.cpus_per_slot = cpus_per_slot
self.gpus_per_slot = gpus_per_slot
logger.debug(f"Discovery started with {cpus_per_slot} CPU / "
f"{gpus_per_slot} GPU per slot.")
def find_available_hosts_and_slots(self) -> Dict[str, int]:
"""Returns a dict mapping <hostname> -> <number of slots>."""
alive_nodes = [k for k in ray.nodes() if k["alive"]]
host_mapping = {}
for node in alive_nodes:
hostname = node["NodeManagerAddress"]
resources = node["Resources"]
slots = resources.get("CPU", 0) // self.cpus_per_slot
if self.use_gpu:
gpu_slots = resources.get("GPU", 0) // self.gpus_per_slot
slots = min(slots, gpu_slots)
slots = int(math.ceil(slots))
if slots:
host_mapping[hostname] = slots
if host_mapping and sum(host_mapping.values()) == 0:
logger.info(f"Detected {len(host_mapping)} hosts, but no hosts "
"have available slots.")
logger.debug(f"Alive nodes: {alive_nodes}")
return host_mapping
class TestDiscovery(RayHostDiscovery):
def __init__(self,
min_hosts,
max_hosts,
change_frequency_s,
use_gpu=False,
cpus_per_slot=1,
gpus_per_slot=1,
verbose=True,
_graceful=True):
super().__init__(
use_gpu=use_gpu,
cpus_per_slot=cpus_per_slot,
gpus_per_slot=gpus_per_slot)
self._min_hosts = min_hosts
self._graceful = _graceful
self._max_hosts = max_hosts
self._change_frequency_s = change_frequency_s
self._last_reset_t = None
self.verbose = verbose
self._removed_hosts = set()
def add_host(self, hosts):
available_hosts = self._removed_hosts & hosts.keys()
if available_hosts:
host = random.choice(list(available_hosts))
self._removed_hosts.remove(host)
else:
print("No hosts to add.")
def remove_host(self, hosts):
good_hosts = [k for k in hosts if k not in self._removed_hosts]
from ray.autoscaler._private.commands import kill_node
if good_hosts:
if self._graceful:
host = random.choice(good_hosts)
else:
host = kill_node(
os.path.expanduser("~/ray_bootstrap_config.yaml"), True,
False, None)
self._removed_hosts.add(host)
def change_hosts(self, hosts):
for host in self._removed_hosts:
if host not in hosts:
self._removed_hosts.remove(host)
current_hosts = len(hosts) - len(self._removed_hosts)
if current_hosts <= self._min_hosts:
self.add_host(hosts)
elif current_hosts >= self._max_hosts:
self.remove_host(hosts)
else:
if random.random() < 0.5:
self.add_host(hosts)
else:
self.remove_host(hosts)
def find_available_hosts_and_slots(self):
t = time.time()
if self._last_reset_t is None:
self._last_reset_t = t
hosts = super().find_available_hosts_and_slots()
if t - self._last_reset_t >= self._change_frequency_s:
self.change_hosts(hosts)
self._last_reset_t = t
if self.verbose:
print(f"Total hosts: {len(hosts)}")
remaining = {
k: v
for k, v in hosts.items() if k not in self._removed_hosts
}
if self.verbose:
print(f"Remaining hosts: {len(remaining)} -- {remaining}")
return remaining
class ElasticRayExecutor:
"""Executor for elastic jobs using Ray.
Leverages the Ray global state to detect available hosts and
slots. Assumes that the entire Ray cluster is available for
the Executor to use.
Args:
settings: Configuration for the elastic job
setup. You can use a standard Horovod ElasticSettings
object or create one directly from
ElasticRayExecutor.create_settings.
use_gpu (bool): Whether to use GPU for allocation.
cpus_per_slot (int): Number of CPU resources to allocate to
each worker.
gpus_per_slot (int): Number of GPU resources to allocate to
each worker.
env_vars (Dict): Environment variables to be set
on the actors (worker processes) before initialization.
override_discovery (bool): Whether for the ElasticRayExecutor to
automatically provide a discovery mechanism for ElasticSettings.
Example:
.. code-block:: python
import ray
ray.init(address="auto")
settings = ElasticRayExecutor.create_settings(verbose=True)
executor = ElasticRayExecutor(
settings, use_gpu=True, cpus_per_slot=2)
executor.start()
executor.run(train_fn)
warning:: .. deprecated:: 0.25.0
"""
@staticmethod
def create_settings(min_num_proc: int = 1,
max_num_proc: int = None,
reset_limit: int = None,
elastic_timeout: int = 600,
timeout_s: int = 30,
ssh_identity_file: str = None,
nics: str = None,
# min_np is deprecated, use min_num_proc instead
min_np=None,
# max_np is deprecated, use max_num_proc instead
max_np=None,
**kwargs):
"""Returns a Settings object for ElasticRayExecutor.
Note that the `discovery` property will be set at runtime.
Args:
min_num_proc (int): Minimum number of processes running for
training to continue. If number of available processes dips
below this threshold, then training will wait for
more instances to become available.
max_num_proc (int): Maximum number of training processes,
beyond which no additional processes will be created.
If not specified, then will be unbounded.
reset_limit (int): Maximum number of times that the training
job can scale up or down the number of workers after
which the job is terminated.
elastic_timeout (int): Timeout for elastic initialisation after
re-scaling the cluster. The default value is 600 seconds.
Alternatively, the environment variable
HOROVOD_ELASTIC_TIMEOUT can also be used.'
timeout_s (int): Horovod performs all the checks and starts the
processes before the specified timeout.
The default value is 30 seconds.
ssh_identity_file (str): File on the driver from which
the identity (private key) is read.
nics (set): Network interfaces that can be used for communication.
"""
if min_np is not None:
min_num_proc = min_np
warnings.warn('min_np is deprecated, use min_num_proc instead', DeprecationWarning)
if max_np is not None:
max_num_proc = max_np
warnings.warn('max_np is deprecated, use max_num_proc instead', DeprecationWarning)
start_timeout = timeout.Timeout(
timeout_s,
message="Timed out waiting for {activity}. Please "
"check connectivity between servers. You "
"may need to increase the --start-timeout "
"parameter if you have too many servers.")
ssh_identity_file = ssh_identity_file or os.path.expanduser(
"~/ray_bootstrap_key.pem")
settings = ElasticSettings(
discovery=None,
min_num_proc=min_num_proc,
max_num_proc=max_num_proc,
elastic_timeout=elastic_timeout,
reset_limit=reset_limit,
num_proc=min_num_proc,
ssh_identity_file=ssh_identity_file,
nics=nics,
start_timeout=start_timeout,
key=secret.make_secret_key() if secret else None,
**kwargs)
return settings
def __init__(self,
settings: ElasticSettings,
use_gpu: bool = False,
cpus_per_slot: int = 1,
gpus_per_slot: Optional[int] = None,
env_vars: dict = None,
override_discovery=True):
if gpus_per_slot and not use_gpu:
raise ValueError("gpus_per_slot is set, but use_gpu is False. "
"use_gpu must be True if gpus_per_slot is set. ")
gpus_per_slot = gpus_per_slot or int(use_gpu)
if use_gpu and gpus_per_slot < 1:
raise ValueError(
f"gpus_per_slot must be >= 1: Got {gpus_per_slot}.")
if override_discovery:
settings.discovery = RayHostDiscovery(
use_gpu=use_gpu,
cpus_per_slot=cpus_per_slot,
gpus_per_slot=gpus_per_slot)
self.cpus_per_slot = cpus_per_slot
self.gpus_per_slot = gpus_per_slot
self.use_gpu = use_gpu
self.settings = settings
self.driver = None
self.rendezvous = None
self.env_vars = env_vars or {}
def start(self):
"""Starts the Horovod driver and services."""
self.rendezvous = RendezvousServer(self.settings.verbose)
self.driver = ElasticDriver(
rendezvous=self.rendezvous,
discovery=self.settings.discovery,
min_num_proc=self.settings.min_num_proc,
max_num_proc=self.settings.max_num_proc,
timeout=self.settings.elastic_timeout,
reset_limit=self.settings.reset_limit,
verbose=self.settings.verbose)
handler = create_rendezvous_handler(self.driver)
logger.debug("[ray] starting rendezvous")
global_rendezv_port = self.rendezvous.start(handler)
logger.debug(f"[ray] waiting for {self.settings.num_proc} to start.")
self.driver.wait_for_available_slots(self.settings.num_proc)
# Host-to-host common interface detection
# requires at least 2 hosts in an elastic job.
min_hosts = _get_min_start_hosts(self.settings)
current_hosts = self.driver.wait_for_available_slots(
self.settings.num_proc, min_hosts=min_hosts)
logger.debug("[ray] getting common interfaces")
nics = detect_nics(
self.settings,
all_host_names=current_hosts.host_assignment_order,
)
logger.debug("[ray] getting driver IP")
server_ip = socket.gethostbyname(socket.gethostname())
self.run_env_vars = create_run_env_vars(
server_ip, nics, global_rendezv_port, elastic=True)
def _create_resources(self, hostname: str):
resources = dict(
num_cpus=self.cpus_per_slot,
num_gpus=int(self.use_gpu) * self.gpus_per_slot,
resources={f"node:{hostname}": 0.01})
return resources
def _create_remote_worker(self, slot_info, worker_env_vars):
hostname = slot_info.hostname
loaded_worker_cls = self.remote_worker_cls.options(
**self._create_resources(hostname))
worker = loaded_worker_cls.remote()
worker.update_env_vars.remote(worker_env_vars)
worker.update_env_vars.remote(create_slot_env_vars(slot_info))
if self.use_gpu:
visible_devices = ",".join(
[str(i) for i in range(slot_info.local_size)])
worker.update_env_vars.remote({
"CUDA_VISIBLE_DEVICES":
visible_devices
})
return worker
def _create_spawn_worker_fn(self, return_results: List,
worker_fn: Callable,
queue: "ray.util.Queue") -> Callable:
self.remote_worker_cls = ray.remote(BaseHorovodWorker)
# event = register_shutdown_event()
worker_env_vars = {}
worker_env_vars.update(self.run_env_vars.copy())
worker_env_vars.update(self.env_vars.copy())
worker_env_vars.update({"PYTHONUNBUFFERED": "1"})
def worker_loop(slot_info, events):
def ping_worker(worker):
# There is an odd edge case where a node can be removed
# before the remote worker is started, leading to a failure
# in trying to create the horovod mesh.
try:
ping = worker.execute.remote(lambda _: 1)
ray.get(ping, timeout=10)
except Exception as e:
logger.error(f"{slot_info.hostname}: Ping failed - {e}")
return False
return True
worker = self._create_remote_worker(slot_info, worker_env_vars)
if not ping_worker(worker):
return 1, time.time()
ray.get(worker.set_queue.remote(queue))
future = worker.execute.remote(lambda _: worker_fn())
result = None
while result is None:
try:
# TODO: make this event driven at some point.
retval = ray.get(future, timeout=0.1)
return_results.append((slot_info.rank, retval))
# Success
result = 0, time.time()
except GetTimeoutError:
# Timeout
if any(e.is_set() for e in events):
ray.kill(worker)
result = 1, time.time()
except Exception as e:
logger.error(f"{slot_info.hostname}[{slot_info.rank}]:{e}")
ray.kill(worker)
result = 1, time.time()
logger.debug(f"Worker ({slot_info}) routine is done!")
return result
return worker_loop
def run(self,
worker_fn: Callable,
callbacks: Optional[List[Callable]] = None) -> List[Any]:
"""Executes the provided function on all workers.
Args:
worker_fn: Target elastic function that can be executed.
callbacks: List of callables. Each callback must either
be a callable function or a class that implements __call__.
Every callback will be invoked on every value logged
by the rank 0 worker.
Returns:
List of return values from every completed worker.
"""
return_values = []
from ray.util.queue import Queue
import inspect
args = inspect.getfullargspec(Queue).args
if "actor_options" not in args:
# Ray 1.1 and less
_queue = Queue()
else:
_queue = Queue(actor_options={
"num_cpus": 0,
"resources": {
ray.state.current_node_id(): 0.001
}
})
self.driver.start(
self.settings.num_proc,
self._create_spawn_worker_fn(return_values, worker_fn, _queue))
def _process_calls(queue, callbacks, event):
if not callbacks:
return
while queue.actor:
if not queue.empty():
result = queue.get_nowait()
for c in callbacks:
c(result)
# avoid slamming the CI
elif event.is_set():
break
time.sleep(0.1)
try:
event = threading.Event()
_callback_thread = threading.Thread(
target=_process_calls,
args=(_queue, callbacks, event),
daemon=True)
_callback_thread.start()
res = self.driver.get_results()
event.set()
if _callback_thread:
_callback_thread.join(timeout=60)
finally:
if hasattr(_queue, "shutdown"):
_queue.shutdown()
else:
done_ref = _queue.actor.__ray_terminate__.remote()
done, not_done = ray.wait([done_ref], timeout=5)
if not_done:
ray.kill(_queue.actor)
self.driver.stop()
if res.error_message is not None:
raise RuntimeError(res.error_message)
for name, value in sorted(
res.worker_results.items(), key=lambda item: item[1][1]):
exit_code, timestamp = value
if exit_code != 0:
raise RuntimeError(
'Horovod detected that one or more processes '
'exited with non-zero '
'status, thus causing the job to be terminated. '
'The first process '
'to do so was:\nProcess name: {name}\nExit code: {code}\n'
.format(name=name, code=exit_code))
return_values = [
value for k, value in sorted(return_values, key=lambda kv: kv[0])
]
return return_values
|
PypiClean
|
/opensesame_extension_osweb-2.0.1.0.tar.gz/opensesame_extension_osweb-2.0.1.0/osweb/src/js/_shared.js
|
function defineErrorAlert() {
if (!alertify.errorAlert) {
//define a new errorAlert base on alert
alertify.dialog('errorAlert', function factory() {
return {
build: function () {
this.setHeader('Application Error')
},
hooks: {
onclose: function() {
if (document.fullscreenElement) {
document.exitFullscreen().catch((error) => {
console.log(error);
});
}
}
}
}
}, true, 'alert')
}
}
// Callback function to handle errors
function errorHandler (event) {
const current_item = runner._itemStack.pop()
const item_name = current_item['item']
const item = runner._experiment.items['_items'][item_name]
const phase = current_item['phase']
let error_msg = `<h1>${event.message}</h1>`
if (item.type === 'inline_javascript') {
error_msg += `<p>This error occurred on <b>line ${event.lineno}</b> in the <b>${phase}</b> phase of item <b>${item_name}</b>.</p>`
// For inline_javascript items, we can actually get the code and
// highlight the offending line
error_msg += '<pre>\n'
const code = item.vars.get(`_${phase}`, null, false)
let code_lines = code.split(/\r?\n/)
const min_lineno = Math.max(0, event.lineno - 4)
const max_lineno = Math.min(code_lines.length, event.lineno + 3)
code_lines = code_lines.slice(min_lineno, max_lineno)
let lineno = min_lineno
for (const code_line of code_lines) {
lineno += 1
if (lineno === event.lineno) {
error_msg += `${lineno} <span class="error-line">${code_line}\n</span>`
} else {
error_msg += `${lineno} ${code_line}\n`
}
}
error_msg += '</pre>\n'
} else {
error_msg += `<p>This error occurred in the <b>${phase}</b> phase of item <b>${item_name}</b>.</p>`
const script = runner._experiment._javascriptWorkspace.current_script
if (script !== null) {
error_msg += `<p>The script below caused the error. This script may be a conditional expression or be embedded in text.</p><pre>\n${script}</pre`
}
}
alertify.errorAlert(error_msg).showModal()
}
/**
* Function to handle input prompt dialog messages from the runner.
* @param {String} title - The title of the dialog box.
* @param {String} message - The message qwithin the dialog box.
* @param {String} defaultValue - The default value for the input field.
* @param {String} _
* @param {Object} onConfirm - The confirm event.
* @param {Object} onCancel - The cancel event.
*/
function prompt(title, message, defaultValue, _, onConfirm, onCancel) {
alertify.prompt(
title,
message,
defaultValue,
function (_, value) {
onConfirm(value)
},
function () {
onCancel()
}.bind(this)
).showModal()
}
|
PypiClean
|
/omero_py-5.15.0-py3-none-any.whl/omero_model_LaserMediumI.py
|
try:
unicode
except NameError:
# Python 3: "unicode" is built-in
unicode = str
import Ice
import IceImport
import omero
IceImport.load("omero_model_DetailsI")
IceImport.load("omero_model_LaserMedium_ice")
from omero.rtypes import rlong
from collections import namedtuple
_omero = Ice.openModule("omero")
_omero_model = Ice.openModule("omero.model")
__name__ = "omero.model"
class LaserMediumI(_omero_model.LaserMedium):
# Property Metadata
_field_info_data = namedtuple("FieldData", ["wrapper", "nullable"])
_field_info_type = namedtuple("FieldInfo", [
"value",
"details",
])
_field_info = _field_info_type(
value=_field_info_data(wrapper=omero.rtypes.rstring, nullable=False),
details=_field_info_data(wrapper=omero.proxy_to_instance, nullable=True),
) # end _field_info
VALUE = "ome.model.enums.LaserMedium_value"
DETAILS = "ome.model.enums.LaserMedium_details"
def errorIfUnloaded(self):
if not self._loaded:
raise _omero.UnloadedEntityException("Object unloaded:"+str(self))
def throwNullCollectionException(self,propertyName):
raise _omero.UnloadedEntityException(""+
"Error updating collection:" + propertyName +"\n"+
"Collection is currently null. This can be seen\n" +
"by testing \""+ propertyName +"Loaded\". This implies\n"+
"that this collection was unloaded. Please refresh this object\n"+
"in order to update this collection.\n")
def _toggleCollectionsLoaded(self, load):
pass
def __init__(self, id=None, loaded=None):
super(LaserMediumI, self).__init__()
if id is not None and isinstance(id, (str, unicode)) and ":" in id:
parts = id.split(":")
if len(parts) != 2:
raise Exception("Invalid proxy string: %s", id)
if parts[0] != self.__class__.__name__ and \
parts[0]+"I" != self.__class__.__name__:
raise Exception("Proxy class mismatch: %s<>%s" %
(self.__class__.__name__, parts[0]))
self._id = rlong(parts[1])
if loaded is None:
# If no loadedness was requested with
# a proxy string, then assume False.
loaded = False
else:
# Relying on omero.rtypes.rlong's error-handling
self._id = rlong(id)
if loaded is None:
loaded = True # Assume true as previously
self._loaded = loaded
if self._loaded:
self._details = _omero_model.DetailsI()
self._toggleCollectionsLoaded(True)
def unload(self, current = None):
self._loaded = False
self.unloadValue( )
self.unloadDetails( )
def isLoaded(self, current = None):
return self._loaded
def unloadCollections(self, current = None):
self._toggleCollectionsLoaded( False )
def isGlobal(self, current = None):
return True ;
def isMutable(self, current = None):
return False ;
def isAnnotated(self, current = None):
return False ;
def isLink(self, current = None):
return False ;
def shallowCopy(self, current = None):
if not self._loaded: return self.proxy()
copy = LaserMediumI()
copy._id = self._id;
copy._details = None # Unloading for the moment.
raise omero.ClientError("NYI")
def proxy(self, current = None):
if self._id is None: raise omero.ClientError("Proxies require an id")
return LaserMediumI( self._id.getValue(), False )
def getDetails(self, current = None):
self.errorIfUnloaded()
return self._details
def unloadDetails(self, current = None):
self._details = None
def getId(self, current = None):
return self._id
def setId(self, _id, current = None):
self._id = _id
def checkUnloadedProperty(self, value, loadedField):
if value == None:
self.__dict__[loadedField] = False
else:
self.__dict__[loadedField] = True
def unloadValue(self, ):
self._valueLoaded = False
self._value = None;
def getValue(self, current = None):
self.errorIfUnloaded()
return self._value
def setValue(self, _value, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.value.wrapper is not None:
if _value is not None:
_value = self._field_info.value.wrapper(_value)
self._value = _value
pass
def ice_postUnmarshal(self):
"""
Provides additional initialization once all data loaded
"""
pass # Currently unused
def ice_preMarshal(self):
"""
Provides additional validation before data is sent
"""
pass # Currently unused
def __getattr__(self, name):
"""
Reroutes all access to object.field through object.getField() or object.isField()
"""
if "_" in name: # Ice disallows underscores, so these should be treated normally.
return object.__getattribute__(self, name)
field = "_" + name
capitalized = name[0].capitalize() + name[1:]
getter = "get" + capitalized
questn = "is" + capitalized
try:
self.__dict__[field]
if hasattr(self, getter):
method = getattr(self, getter)
return method()
elif hasattr(self, questn):
method = getattr(self, questn)
return method()
except:
pass
raise AttributeError("'%s' object has no attribute '%s' or '%s'" % (self.__class__.__name__, getter, questn))
def __setattr__(self, name, value):
"""
Reroutes all access to object.field through object.getField(), with the caveat
that all sets on variables starting with "_" are permitted directly.
"""
if name.startswith("_"):
self.__dict__[name] = value
return
else:
field = "_" + name
setter = "set" + name[0].capitalize() + name[1:]
if hasattr(self, field) and hasattr(self, setter):
method = getattr(self, setter)
return method(value)
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, setter))
_omero_model.LaserMediumI = LaserMediumI
|
PypiClean
|
/tf-models-no-deps-2.11.2.tar.gz/tf-models-no-deps-2.11.2/official/legacy/xlnet/optimization.py
|
from absl import logging
import tensorflow as tf
from official.nlp import optimization
class WarmUp(tf.keras.optimizers.schedules.LearningRateSchedule):
"""Applys a warmup schedule on a given learning rate decay schedule."""
def __init__(self,
initial_learning_rate,
decay_schedule_fn,
warmup_steps,
power=1.0,
name=None):
super(WarmUp, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.warmup_steps = warmup_steps
self.power = power
self.decay_schedule_fn = decay_schedule_fn
self.name = name
def __call__(self, step):
with tf.name_scope(self.name or "WarmUp") as name:
# Implements polynomial warmup. i.e., if global_step < warmup_steps, the
# learning rate will be `global_step/num_warmup_steps * init_lr`.
global_step_float = tf.cast(step, tf.float32)
warmup_steps_float = tf.cast(self.warmup_steps, tf.float32)
warmup_percent_done = global_step_float / warmup_steps_float
warmup_learning_rate = (
self.initial_learning_rate *
tf.math.pow(warmup_percent_done, self.power))
return tf.cond(
global_step_float < warmup_steps_float,
lambda: warmup_learning_rate,
lambda: self.decay_schedule_fn(step - self.warmup_steps),
name=name)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"decay_schedule_fn": self.decay_schedule_fn,
"warmup_steps": self.warmup_steps,
"power": self.power,
"name": self.name
}
def create_optimizer(init_lr,
num_train_steps,
num_warmup_steps,
min_lr_ratio=0.0,
adam_epsilon=1e-8,
weight_decay_rate=0.0):
"""Creates an optimizer with learning rate schedule."""
# Implements linear decay of the learning rate.
learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay(
initial_learning_rate=init_lr,
decay_steps=num_train_steps - num_warmup_steps,
end_learning_rate=init_lr * min_lr_ratio)
if num_warmup_steps:
learning_rate_fn = WarmUp(
initial_learning_rate=init_lr,
decay_schedule_fn=learning_rate_fn,
warmup_steps=num_warmup_steps)
if weight_decay_rate > 0.0:
logging.info(
"Using AdamWeightDecay with adam_epsilon=%.9f weight_decay_rate=%.3f",
adam_epsilon, weight_decay_rate)
optimizer = optimization.AdamWeightDecay(
learning_rate=learning_rate_fn,
weight_decay_rate=weight_decay_rate,
beta_1=0.9,
beta_2=0.999,
epsilon=adam_epsilon,
exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"],
include_in_weight_decay=["r_s_bias", "r_r_bias", "r_w_bias"])
else:
logging.info("Using Adam with adam_epsilon=%.9f", (adam_epsilon))
optimizer = tf.keras.optimizers.legacy.Adam(
learning_rate=learning_rate_fn, epsilon=adam_epsilon)
return optimizer, learning_rate_fn
|
PypiClean
|
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/models/user_training_event_info.py
|
from __future__ import annotations
from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from . import training_status, user_training_content_event_info
class UserTrainingEventInfo(AdditionalDataHolder, Parsable):
def __init__(self,) -> None:
"""
Instantiates a new userTrainingEventInfo and sets the default values.
"""
# Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
self._additional_data: Dict[str, Any] = {}
# Display name of the training.
self._display_name: Optional[str] = None
# Latest status of the training assigned to the user. Possible values are: unknown, assigned, inProgress, completed, overdue, unknownFutureValue.
self._latest_training_status: Optional[training_status.TrainingStatus] = None
# The OdataType property
self._odata_type: Optional[str] = None
# Event details of the training when it was assigned to the user.
self._training_assigned_properties: Optional[user_training_content_event_info.UserTrainingContentEventInfo] = None
# Event details of the training when it was completed by the user.
self._training_completed_properties: Optional[user_training_content_event_info.UserTrainingContentEventInfo] = None
# Event details of the training when it was updated/in-progress by the user.
self._training_updated_properties: Optional[user_training_content_event_info.UserTrainingContentEventInfo] = None
@property
def additional_data(self,) -> Dict[str, Any]:
"""
Gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
Returns: Dict[str, Any]
"""
return self._additional_data
@additional_data.setter
def additional_data(self,value: Dict[str, Any]) -> None:
"""
Sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
Args:
value: Value to set for the AdditionalData property.
"""
self._additional_data = value
@staticmethod
def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> UserTrainingEventInfo:
"""
Creates a new instance of the appropriate class based on discriminator value
Args:
parseNode: The parse node to use to read the discriminator value and create the object
Returns: UserTrainingEventInfo
"""
if parse_node is None:
raise Exception("parse_node cannot be undefined")
return UserTrainingEventInfo()
@property
def display_name(self,) -> Optional[str]:
"""
Gets the displayName property value. Display name of the training.
Returns: Optional[str]
"""
return self._display_name
@display_name.setter
def display_name(self,value: Optional[str] = None) -> None:
"""
Sets the displayName property value. Display name of the training.
Args:
value: Value to set for the display_name property.
"""
self._display_name = value
def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:
"""
The deserialization information for the current model
Returns: Dict[str, Callable[[ParseNode], None]]
"""
from . import training_status, user_training_content_event_info
fields: Dict[str, Callable[[Any], None]] = {
"displayName": lambda n : setattr(self, 'display_name', n.get_str_value()),
"latestTrainingStatus": lambda n : setattr(self, 'latest_training_status', n.get_enum_value(training_status.TrainingStatus)),
"@odata.type": lambda n : setattr(self, 'odata_type', n.get_str_value()),
"trainingAssignedProperties": lambda n : setattr(self, 'training_assigned_properties', n.get_object_value(user_training_content_event_info.UserTrainingContentEventInfo)),
"trainingCompletedProperties": lambda n : setattr(self, 'training_completed_properties', n.get_object_value(user_training_content_event_info.UserTrainingContentEventInfo)),
"trainingUpdatedProperties": lambda n : setattr(self, 'training_updated_properties', n.get_object_value(user_training_content_event_info.UserTrainingContentEventInfo)),
}
return fields
@property
def latest_training_status(self,) -> Optional[training_status.TrainingStatus]:
"""
Gets the latestTrainingStatus property value. Latest status of the training assigned to the user. Possible values are: unknown, assigned, inProgress, completed, overdue, unknownFutureValue.
Returns: Optional[training_status.TrainingStatus]
"""
return self._latest_training_status
@latest_training_status.setter
def latest_training_status(self,value: Optional[training_status.TrainingStatus] = None) -> None:
"""
Sets the latestTrainingStatus property value. Latest status of the training assigned to the user. Possible values are: unknown, assigned, inProgress, completed, overdue, unknownFutureValue.
Args:
value: Value to set for the latest_training_status property.
"""
self._latest_training_status = value
@property
def odata_type(self,) -> Optional[str]:
"""
Gets the @odata.type property value. The OdataType property
Returns: Optional[str]
"""
return self._odata_type
@odata_type.setter
def odata_type(self,value: Optional[str] = None) -> None:
"""
Sets the @odata.type property value. The OdataType property
Args:
value: Value to set for the odata_type property.
"""
self._odata_type = value
def serialize(self,writer: SerializationWriter) -> None:
"""
Serializes information the current object
Args:
writer: Serialization writer to use to serialize this model
"""
if writer is None:
raise Exception("writer cannot be undefined")
writer.write_str_value("displayName", self.display_name)
writer.write_enum_value("latestTrainingStatus", self.latest_training_status)
writer.write_str_value("@odata.type", self.odata_type)
writer.write_object_value("trainingAssignedProperties", self.training_assigned_properties)
writer.write_object_value("trainingCompletedProperties", self.training_completed_properties)
writer.write_object_value("trainingUpdatedProperties", self.training_updated_properties)
writer.write_additional_data_value(self.additional_data)
@property
def training_assigned_properties(self,) -> Optional[user_training_content_event_info.UserTrainingContentEventInfo]:
"""
Gets the trainingAssignedProperties property value. Event details of the training when it was assigned to the user.
Returns: Optional[user_training_content_event_info.UserTrainingContentEventInfo]
"""
return self._training_assigned_properties
@training_assigned_properties.setter
def training_assigned_properties(self,value: Optional[user_training_content_event_info.UserTrainingContentEventInfo] = None) -> None:
"""
Sets the trainingAssignedProperties property value. Event details of the training when it was assigned to the user.
Args:
value: Value to set for the training_assigned_properties property.
"""
self._training_assigned_properties = value
@property
def training_completed_properties(self,) -> Optional[user_training_content_event_info.UserTrainingContentEventInfo]:
"""
Gets the trainingCompletedProperties property value. Event details of the training when it was completed by the user.
Returns: Optional[user_training_content_event_info.UserTrainingContentEventInfo]
"""
return self._training_completed_properties
@training_completed_properties.setter
def training_completed_properties(self,value: Optional[user_training_content_event_info.UserTrainingContentEventInfo] = None) -> None:
"""
Sets the trainingCompletedProperties property value. Event details of the training when it was completed by the user.
Args:
value: Value to set for the training_completed_properties property.
"""
self._training_completed_properties = value
@property
def training_updated_properties(self,) -> Optional[user_training_content_event_info.UserTrainingContentEventInfo]:
"""
Gets the trainingUpdatedProperties property value. Event details of the training when it was updated/in-progress by the user.
Returns: Optional[user_training_content_event_info.UserTrainingContentEventInfo]
"""
return self._training_updated_properties
@training_updated_properties.setter
def training_updated_properties(self,value: Optional[user_training_content_event_info.UserTrainingContentEventInfo] = None) -> None:
"""
Sets the trainingUpdatedProperties property value. Event details of the training when it was updated/in-progress by the user.
Args:
value: Value to set for the training_updated_properties property.
"""
self._training_updated_properties = value
|
PypiClean
|
/second_brain_tools-0.0.4.tar.gz/second_brain_tools-0.0.4/docs/index.md
|
# Welcome to Second-Brain-Tools
Please note that the Second-Brain-Tools project is intended solely as a public archive and is not to be considered a production version. The tools provided within this project are meant for informational and educational purposes only. While efforts have been made to ensure the accuracy and reliability of the information, we cannot guarantee its completeness or suitability for any specific purpose.
Users are advised to exercise caution and discretion when utilizing the tools and content available in Second-Brain-Tools. We do not assume any responsibility for any errors, omissions, or consequences arising from the use of this project.
## Options
* `second-brain-tools --help` - Display Help.
* `second-brain-tools --install-completion [bash|zsh|fish|powershell|pwsh] ` - Install auto completion for your desired shell.
* `second-brain-tools --show-completion [bash|zsh|fish|powershell|pwsh]` - shows completion for your desired shell.

## Alias
* `sbt --help` - Display Help.
* `sbt --install-completion [bash|zsh|fish|powershell|pwsh] ` - Install auto completion for your desired shell.
* `sbt --show-completion [bash|zsh|fish|powershell|pwsh]` - shows completion for your desired shell.

|
PypiClean
|
/python-pyparts-1.0.0.tar.gz/python-pyparts-1.0.0/src/pyparts/platforms/raspberrypi_platform.py
|
import RPi.GPIO as gpio
from pyparts.platforms import base_platform
from pyparts.platforms.gpio import raspberrypi_gpio as rpi_gpio
from pyparts.platforms.pwm import raspberrypi_pwm as rpi_pwm
from pyparts.platforms.spi import raspberrypi_spi as rpi_spi
# Create local copies of the numbering schemes for conveinence.
BCM = gpio.BCM
BOARD = gpio.BOARD
class RaspberryPiPlatform(base_platform.BasePlatform):
"""Raspberry Pi implementation of a platform.
RaspberryPiPlatform provides peripheral devices that can be used by parts to
interact with the Raspberry Pi computer. Available peripherals:
* DigitalInput
* DigitalOutput
* PWMOutput
* HardwareSPIBus
Attributes:
_pin_numbering: BCM or BOARD. The current pin numbering scheme.
"""
def __init__(self, pin_numbering=gpio.BOARD):
"""Creates a Raspberry Pi platform.
Args:
pin_numbering: BCM or BOARD. Specifies the pin numbering scheme to use.
(default=BOARD)
Raises:
ValueError: The pin numbering scheme was not one of (BCM, BOARD).
"""
super(RaspberryPiPlatform, self).__init__()
if pin_numbering not in (BCM, BOARD):
raise ValueError('Pin numbering must be one of: BCM, BOARD. Got %s'
% str(pin_numbering))
gpio.setmode(pin_numbering)
self._pin_numbering = pin_numbering
def __del__(self):
"""Destructor. Cleans up GPIO pins."""
gpio.cleanup()
@property
def pin_numbering(self):
"""Gets the current pin numbering scheme.
Returns:
The current pin numbering scheme. One of BCM or BOARD.
"""
return self._pin_numbering
def get_digital_input(self, pin):
"""Creates a digital input pin on a Raspberry Pi.
Args:
pin: Integer. Pin number to create the pin on.
Returns:
A RaspberryPiDigitalInput object for the pin.
"""
return rpi_gpio.RaspberryPiDigitalInput(pin)
def get_digital_output(self, pin):
"""Creates a digital output pin on a Raspberry Pi.
Args:
pin: Integer. Pin number to create the pin on.
Returns:
A RaspberryPiDigitalOutput object for the pin.
"""
return rpi_gpio.RaspberryPiDigitalOutput(pin)
def get_pwm_output(self, pin):
"""Creates a PWM outut pin on a Raspberry Pi.
Args:
pin: Integer. Pin number to create the pin on.
Returns:
A RaspberryPiPWMOutput object for the pin.
"""
output = rpi_gpio.RaspberryPiDigitalOutput(pin)
return rpi_pwm.RaspberryPiPWMOutput(output)
def get_hardware_spi_bus(self, port, device):
"""Creates a hardware based SPI bus on a Raspberry Pi.
The Raspberry Pi has an available hardware SPI interface at /dev/spidevX.Y
where X and Y are the port and device number respectively.
Args:
port: Integer. The SPI port number to use.
device: Integer. The SPI device number to use.
Returns:
A RaspberryPiHardwareSPIBus object for the port/device.
"""
return rpi_spi.RaspberryPiHardwareSPIBus(port, device)
def get_software_spi_bus(self, sclk_pin, mosi_pin, miso_pin, ss_pin):
"""Not implemented."""
raise NotImplementedError
def get_i2c_bus(self):
"""Not implemented."""
raise NotImplementedError
|
PypiClean
|
/featureform-enterprise-0.10.3.tar.gz/featureform-enterprise-0.10.3/src/featureform/local_cache.py
|
import json
import os
from functools import lru_cache
from typing import Callable, Set
import pandas as pd
from featureform import SQLiteMetadata
from featureform.local_utils import get_sql_transformation_sources
from featureform.resources import SourceType # fix to do client.source.import
from pandas.core.generic import NDFrame
from typeguard import typechecked
from .file_utils import absolute_file_paths
class LocalCache:
def __init__(self):
feature_form_dir = os.environ.get("FEATUREFORM_DIR", ".featureform")
self.cache_dir = os.environ.get(
"FEATUREFORM_CACHE_DIR", os.path.join(feature_form_dir, "cache")
)
@typechecked
def get_or_put(
self,
resource_type: str,
resource_name: str,
resource_variant: str,
source_name: str,
source_variant: str,
func: Callable[[], NDFrame],
) -> NDFrame:
"""
Caches the result of a callable to a local file. If the source files have changed, the cache is invalidated.
"""
cache_file_path = self._cache_file_path(
resource_type, resource_name, resource_variant
)
with SQLiteMetadata() as db:
# check db for source files
source_files_from_db = db.get_source_files_for_resource(
resource_type, resource_name, resource_variant
)
if source_files_from_db:
self._invalidate_cache_if_source_files_changed(
source_files_from_db, cache_file_path
)
# get source files from db or compute the sources
source_files: Set[str] = (
set(map(lambda x: x["file_path"], source_files_from_db))
if source_files_from_db
else self.get_source_files_for_source(db, source_name, source_variant)
)
return self._get_or_put(
db,
resource_type,
resource_name,
resource_variant,
cache_file_path,
source_files,
func,
)
@typechecked
def get_or_put_training_set(
self,
training_set_name: str,
training_set_variant: str,
func: Callable[[], NDFrame],
) -> NDFrame:
"""
Caches the result of a training set to a local file. Difference between this one and the one above
is how this needs to fetch all the source files for the training set.
"""
resource_type = "training_set"
file_path = self._cache_file_path(
resource_type, training_set_name, training_set_variant
)
with SQLiteMetadata() as db:
# check db for source files
source_files_from_db = db.get_source_files_for_resource(
resource_type, training_set_name, training_set_variant
)
# Only check to invalidate the cache if we have source files in the db
if source_files_from_db:
self._invalidate_cache_if_source_files_changed(
source_files_from_db, file_path
)
source_files = set()
if source_files_from_db:
source_files.update(
set(map(lambda x: x["file_path"], source_files_from_db))
)
else:
ts_variant = db.get_training_set_variant(
training_set_name, training_set_variant
)
label_variant = db.get_label_variant(
ts_variant["label_name"], ts_variant["label_variant"]
)
source_files.update(
self.get_source_files_for_source(
db,
label_variant["source_name"],
label_variant["source_variant"],
)
)
features = db.get_training_set_features(
training_set_name, training_set_variant
)
for feature in features:
feature_variant = db.get_feature_variant(
feature["feature_name"], feature["feature_variant"]
)
source_files.update(
self.get_source_files_for_source(
db,
feature_variant["source_name"],
feature_variant["source_variant"],
)
)
return self._get_or_put(
db,
resource_type,
training_set_name,
training_set_variant,
file_path,
source_files,
func,
)
def _get_or_put(
self,
db,
resource_type,
resource_name,
resource_variant,
file_path,
source_files,
func,
) -> NDFrame:
if os.path.exists(file_path):
return pd.read_pickle(file_path)
else:
# create the dir if not exists and write the file
df = func()
os.makedirs(self.cache_dir, exist_ok=True)
df.to_pickle(file_path)
for source_file in source_files:
db.insert_or_update(
"resource_source_files",
["resource_type", "name", "variant", "file_path"],
["updated_at"],
resource_type,
resource_name,
resource_variant,
source_file,
str(os.path.getmtime(source_file)),
)
return df
@lru_cache(maxsize=128)
def get_source_files_for_source(self, db, source_name, source_variant) -> Set[str]:
"""
Recursively gets the source files for a given source. Each call is cached.
"""
source = db.get_source_variant(source_name, source_variant)
transform_type = db.is_transformation(source_name, source_variant)
sources = set()
if transform_type == SourceType.PRIMARY_SOURCE.value:
return {source["definition"]}
elif transform_type == SourceType.SQL_TRANSFORMATION.value:
query = source["definition"]
transformation_sources = get_sql_transformation_sources(query)
for source_name, source_variant in transformation_sources:
sources.update(
self.get_source_files_for_source(db, source_name, source_variant)
)
elif transform_type == SourceType.DF_TRANSFORMATION.value:
dependencies = json.loads(source["inputs"])
for name, variant in dependencies:
sources.update(self.get_source_files_for_source(db, name, variant))
elif transform_type == SourceType.DIRECTORY.value:
path = source["definition"]
for absolute_file, _ in absolute_file_paths(path):
sources.add(absolute_file)
else:
raise Exception(f"Unknown source type: {transform_type}")
return sources
def _invalidate_cache_if_source_files_changed(
self, source_files_from_db, cache_file_path
):
if any(
self._file_has_changed(source_file["updated_at"], source_file["file_path"])
for source_file in source_files_from_db
):
if os.path.exists(cache_file_path):
os.remove(cache_file_path)
def _cache_file_path(self, resource_type: str, name: str, variant: str):
key = f"{resource_type}__{name}__{variant}"
return f"{self.cache_dir}/{key}.pkl"
@staticmethod
def _file_has_changed(last_updated_at, file_path):
"""
Currently using last updated at for determining if a file has changed. We can consider using the file hash
if this becomes a performance issue.
"""
os_last_updated = os.path.getmtime(file_path)
return os_last_updated > float(last_updated_at)
|
PypiClean
|
/love_course_2016_2019-2023.3.1.0-py3-none-any.whl/LoveCourse20162019/docs/mo-ka-xi-lie/魔卡新课《恋爱脊椎框架推拉》:8错误的框架已经形成,还有救吗.md
|
# 魔卡新课《恋爱脊椎 框架推拉》:8错误的框架已经形成,还有救吗
你好 课程听到这里 也许你已经觉得霍然开朗,想明白了之前为什么有那么多事情会是那样的一个结果,也许你很震撼 从来不知道看似感性无规力的情感原理,里面原来有这么多的内在逻辑结构。
又或者你还很困惑 知识大概都明白了,但是现在你正处于一段关系当中,而且因为之前没有框架的概念导致什么呢,现在的关系只量很低,它也不开心 也不开心,想改变又没有办法 错误的框架已经形成了。
现在还有机会吗 其实是有的,不过风险性也很高,所以在你往下看之前 我要提醒你,这一刻是一纪猛药,很有可能 你拿捏不好 你们的关系就会走向分手,所以请你再三考虑 根据情况谨慎的应对。
要想好 如果有不如自己意愿的结果发生,最坏的情况发生的时候,你是否能够面对和承担,如果不能 那么千万不要忙目的尝试,如果结果不好 魔卡带不负责,好了 我们先来看一段视频素材,我来坚定下班了,来 这个。
我有话要跟你说,好 你说,因为这件事情让我认清了我们两个人的关系,我们不合适 相互不信任,所以,还是更开得好,对了,这个还你,我真的知道我错了,别这样,好说好散,你好 我们还是朋友。
买什么花儿 多不环保,你就是你 别为了我去改变,再见,小薇 小薇,小薇,你别觉得对,我说的你就不乐意是吧,我告诉你 今天,从这以后我还就不理你了,咱俩就奋定了,谁谁 我也不怕。
没吃我一个人 我一个人我怎么了,我没问题,没你我一样活着我活着,好好好 我告诉你,刚带着跟我来见我 真的,还真的 还会来找找,我非,我非,说什么呢 电话都没电了,不谁说呢 要说死啊,我真我太谢你了。
他让陪陪我 那么多天了,你说你也不回家,你媳妇 你不管你媳妇,会把你闹死,你不用管我 我喜欢上你,你先睡觉起来,我不是,原来 原来,要干吗,喝酒 我想不,喝酒 我想不想,你给我喝一来,我给我。
这我了 我还要,我把你撒住,撒住 撒住,我也拿心的,这我 我也拿心的,视频中那个男生,是不是很可怜呢,你有没有类似这样的经历呢,对他千般好万般重,换来的呢 却不是自己理想中的,幸福生活。
反而不知道为什么,对他越好 他越烦,越是不代兼,还是那句话,如果框架错了,那所有的付出,都有可能是事备公办的,在一段关系中,跪舔的久了,就像陷入了早折之中,无一无靠,除了缓慢体验 吓成和死亡。
没有任何一点的办法,那实际上呢,给你们的关系,做一个形象的比喻吧,你和他的关系,就像一个天品,你们各站一边,因为你很喜欢他,很爱他,所以你在自己,这一边的脱盘里面,放置了越来越多的法码。
而并没有关注到对方,对你的好感,是与日剧增的,还是在原地踏步,甚至呢是倒退回避的,当你一枪热血,不顾一切的拼命加法码的时候,对方如果没有,如你一般投入,或者投入的质量比你低,那么,你们的关系。
就会呈现出他高以低的状态,如果这种不平衡的关系状态,长久的维持,甚至加重,那么你们的关系,就进入了实质性的贵田状态,而我在现实生活中,观察到的,非常麻烦的事情是什么呢,就是当投入多的一方。
感觉到了这种关系的不平衡,感觉到了,对方的姿态高,自己的姿态低,所造成的不舒适感时,他们往往才取男人贝泽的做法,他们不但不减少自己的投入,反而会因为希望对方多关注自己一点,对自己好一点继续追加投入。
也就是说,他们不但不减少法码,反而继续的追加法码,这样做的结果,就是对方越来越高,自己越来越低,最终变成贵田,而别人,还越来越讨厌自己,爱人感觉高入云端,变成了心中的神,自己好似呢,陷入城埃中悲慰如鬼。
讲到这里,你是不是明白了点什么呢,关系的错误框架该如何重置呢,不错就是抽取法码,要一次性大量的抽取爱的法码,目的是为了让关系的天品重新回到,他本来的平衡位置,但是抽取多少这个杜素如何拿捏。
就要看你自己的情商,根据你和他的实际情况来争着考虑了,因为位置瞬间有高走低,所以对方一定会有很强的不舒适感,也许会很伤心很难过,甚至有可能直接造成天品的崩断关系的解体,所以有一定的操作危险性需要谨慎。
听到这里,你可能心中会有疑惑,那既然一次性拿掉会这么危险,我为什么不能一点一点抽走法码呢,因为一点一点抽走,需要极高的情商和控制力,还有情感成熟度及经验,往往具备这样条件的人,根本就不会让自己的关系。
进入到贵田的模式,其次第二,我只是用天品来给你做一个形象的比喻,让你能够直观地理解概念,但是实际感谢生活中,把你对他的爱和喜欢一点一点抽走,而且分很多次,在你和他的生活各种场景溝通当中,前后保持一致。
不露痕迹,是非常非常难的一件事情,那么具体该怎么做呢,我们来看一个案例视频,还得进电话,跟结果不电话,以后就再也不要起了,来了,来啦,老海,跟我说你和我们母子知道,真是的,太好了。
小心点啊 别跟我动脏了,知道多贵吗,我一年公司都还不起,是是是,谢谢可可姐,找可可姐以后不吃到那就更好了,下一个,我就吃到了三分钟,吃足了你白剑你们的,知道了,这里也太大了吧,是吧,拿着,干嘛。
给我好把拿着,就知道没那么大方,走吧你,一二三,等一下,来 下一条轻密点的,对对对对,这样,一二三,我,我,你没事吧,上什么呀,我送去医院吧,没事,这儿,这超级啊,如果你身体有什么问题啊,这儿我明片。
随着给我打电话,对,车掌开终于还保啊,你怎么还来啊,小威呢,但照片里人人是跑的了,去哪儿了,沐妃,你得抓紧呢,现在,不知道发生什么事了,你干嘛呢,把当勞给我拿开,干什么呢,现在的。
这房子我找他跑的不找我,你是不是要变啊,你给我起来,你先来,这儿他是谁吗,他是我的人,对这被子弄住我的人,知道吗,不是,你这不他,回去光座,你这儿,你这儿,你这儿,你这儿,我去光座,这孩子。
咱别闹了行吗,放手,还不错,还算语实,说吧,为什么见死不救,我没见死不救啊,还见救死我啊,还见江淑啊,救今天早上,我做梦的时候,那么多人吹我,说,为什么不救我,不是你你做梦我怎么能知道呢,我怎么救你啊。
这样就可以了啊,这是不赖你来谁啊,这是救赖你,好怪我怪我,你看看,本来我今天可以安不救完的,其增车出门的,你这么一闹,我是坐别人车来了,就算开了五公里吧,这五公里最起码得耗了,零点五升要吧。
这所有的罪你,动算了我的头上心了吧,第一贪,是对地球的尊重,可是地球呢,以前觉得我实在是,太漂亮了,所以你决定,原来我了,再见,拜拜,小伙,这个视频中,女生对男生使用的方法,就是抽烦吗。
给男生造成失去感,重新机器男生的追求,再一喝碗骨,实际上就是女生一边,讲意自己抽取法码,一边让男生增加法码,以达到天平平衡,甚至天平,像自己这次,台高的效果,那么具体怎么做呢,首先,第一个阶段。
你和他的关系,需要具备一个前提,这个前提就是,你从很长一段时间开始,一直到现在,都对他很好,特别地好,甚至贵田,而他并不珍惜感恩,觉得理所当然,甚至便本家里的要求你,然后需要等待一个机会。
就是他确实做错了事情,而且这件事,就算在他的亲人朋友看来,都是蛮过分的事情,然后呢,你要和他提出分手,至于是电话微信,还是当面都可以,表达完毕之后,就要消失,他一定会来找你联系你,而这个时间点的关键。
就是不要让他找到你联系到你,因为呢,此时,他心里面有毁恨,自责,难过等等情绪,想像你解释和说明,而如果在这个时候,他找不到你,那么这些情绪,就会在他的心里,进一步的发酵,会增多,会变大。
会让他更想要找到你,他心里这样的情绪,对于你们的关系,回归健康,是有好处的,但是需要注意一点的就是,你要失联多长时间,这个就是一个,比较危险,或者难以拿捏的点,有的人可能,需要断联三天。
有的可能需要断联一周,有的可能需要断联十天,你需要在他情绪冷静下来,放弃联系你之前,和他恢复联系,否则就真的分手了,但这里还需要注意一个点,就是你必须在他联系你的时候回应,而不能主动去联系他。
否则前攻进气,举例来说,他如果给你打二十多个电话,你没接,你判断他应该会打二十五个左右,那么你就应该在第二十二,或者二十三个电话的时候接起来,而不能在他打完第二十四个电话之后,第二十五个电话,遲迟不来。
然后你别不住了,给他回了一个,一旦发生这样的事情,那么前后一致信你就断练了,整个框架重塑呢,任务宣告失败,其次第二个阶段,联系恢复之后,他一定会问你,为什么不理他,不联系他,但是与其呢。
一定不会特别的强力,只是会被包裹在道歉和对不起当中,这个时候啊,如何去掉这种不满的情绪,保留和放大他心中的天音呢,你需要刷心肯指数,具体讲解,在引力原理系列视频课程当中,你可以去查语,比如你可以说。
其实每个人都很累,我的工作压力这么大,在生活中,还要受这么多的委屈,回到了女朋友身边,不要求被理解支持,但我也不想让你自己,不想被理解支持,但我也无法一直承受你的无力取脑,所以我真的想放弃了。
我真的累了,那这就是刷心肯指数,然后第三个阶段,他会极力地道歉,挽回你,这个时候不要马上答应,你可以说,我在想想吧,我很累过几天再说,这几天当中,他还是会时不时地联系你关心你,甚至想要讨好和补偿你。
当你感觉收到了他的诚意之后,可以告诉他,那我们就在试试吧,你还可以提出一些要求,这些要求是你平时看不贯他的点,比如他经常打游戏,经常喝酒,经常会很晚出去,对你的父母书于问候,花钱大手大脚等等。
要求提一到两个就好了,不一太多,多了他也无法全部改正,反而他会心声不满,到这里整个框架重说的任务,就完成了,最后我再次提醒你,这种方法的实际操作难度是很高的,你要启动之前,必须做好。
有可能崩断分手的准备,制制死地而厚生,这个操作有两个可能性,第一,你们的关系天品重新去向于平衡,关系开始健康的发展,第二,你可以看清你们关系的实质,关系被打回原型,所有不切实际的幻想和泡沫都消失。
让你能够清醒地,认清自己的亲密关系现状,所以,如果你正陷入一段,让你生不如死,痛苦不堪的贵田感情当中,你可以在充分冷静的考虑之后。
被水一斤
|
PypiClean
|
/zodbshootout-0.8.0.tar.gz/zodbshootout-0.8.0/doc/index.rst
|
================
ZODB Shoot Out
================
This application measures and compares the performance of various
ZODB storages and configurations. It is derived from the RelStorage
speedtest script, but this version allows arbitrary storage types and
configurations, provides more measurements, and produces numbers that
are easier to interpret.
.. toctree::
install
zodbshootout
results
changelog
=============
Development
=============
.. image:: https://travis-ci.org/zodb/zodbshootout.png?branch=master
:target: https://travis-ci.org/zodb/zodbshootout
.. image:: https://coveralls.io/repos/zodb/zodbshootout/badge.svg?branch=master&service=github
:target: https://coveralls.io/github/zodb/zodbshootout?branch=master
.. image:: https://readthedocs.org/projects/zodbshootout/badge/?version=latest
:target: http://zodbshootout.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
zodbshootout is hosted at GitHub:
https://github.com/zodb/zodbshootout
|
PypiClean
|
/smx-client-3.2.14.tar.gz/smx-client-3.2.14/xms_client/api_client.py
|
from __future__ import absolute_import
import datetime
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from xms_client.configuration import Configuration
import xms_client.models
from xms_client import rest
class ApiClient(object):
"""Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
self.pool = ThreadPool()
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/3.2.14/python'
def __del__(self):
self.pool.close()
self.pool.join()
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.configuration.host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match('list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match('dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(xms_client.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async request, set the async parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async is False or missing,
then the method will return the response directly.
"""
if not async:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path,
method, path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
_return_http_data_only,
collection_formats,
_preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "wb") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.u(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datatime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if not klass.swagger_types and not hasattr(klass,
'get_real_child_model'):
return data
kwargs = {}
if klass.swagger_types is not None:
for attr, attr_type in six.iteritems(klass.swagger_types):
if (data is not None and
klass.attribute_map[attr] in data and
isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AlipayUserWufufukaAliyunRefundRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayUserWufufukaAliyunRefundModel import AlipayUserWufufukaAliyunRefundModel
class AlipayUserWufufukaAliyunRefundRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayUserWufufukaAliyunRefundModel):
self._biz_content = value
else:
self._biz_content = AlipayUserWufufukaAliyunRefundModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.user.wufufuka.aliyun.refund'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/acvt45-1.1.3.tar.gz/acvt45-1.1.3/acv_app/sdp_app.py
|
from acv_app.colors import _colors as colors
import pandas as pd
import numpy as np
import streamlit as st
import matplotlib.pyplot as plt
from acv_explainers.utils import extend_partition
labels = {
'MAIN_EFFECT': "SHAP main effect value for\n%s",
'INTERACTION_VALUE': "SHAP interaction value",
'INTERACTION_EFFECT': "SHAP interaction value for\n%s and %s",
'VALUE': "SHAP value (impact on model output)",
'GLOBAL_VALUE': "mean(|SHAP value|) (average impact on model output magnitude)",
'VALUE_FOR': "SHAP value for\n%s",
'PLOT_FOR': "SHAP plot for %s",
'FEATURE': "Feature %s",
'FEATURE_VALUE': "Feature value",
'FEATURE_VALUE_LOW': "Low",
'FEATURE_VALUE_HIGH': "High",
'JOINT_VALUE': "Joint SHAP value",
'MODEL_OUTPUT': "Model output value"
}
def write_pg(x_train, x_test, y_train, y_test, acvtree):
st.sidebar.title("Parameters")
if y_test.dtype == int or y_test.dtype == bool:
CLASSIFIER = st.sidebar.checkbox('Classifier', value=True)
else:
CLASSIFIER = st.sidebar.checkbox('Classifier', value=False)
col1, col2, col3 = st.columns(3)
with col1:
nb = st.sidebar.number_input(label='SAMPLE SIZE', value=10, min_value=5, max_value=500)
with col2:
pi_level = st.sidebar.number_input(label='SDP MIN (\pi)', value=0.9, min_value=0.7, max_value=1.)
with col3:
t = st.sidebar.number_input(label='SDP THRESHOLD FOR REGRESSOR', value=10., min_value=1., max_value=500.)
idx = st.selectbox(
'Choose the observation you want to explain',
list(range(nb))
)
@st.cache(allow_output_mutation=True)
def compute_sdp(nb, x_train, y_train, x_test, y_test, pi_level, t):
sufficient_coal, sdp_coal, sdp_global = acvtree.sufficient_expl_rf(x_test[:nb], y_test[:nb], x_train, y_train,
stop=False, pi_level=pi_level,
t=t)
for i in range(len(sufficient_coal)):
sufficient_coal[i].pop(0)
sdp_coal[i].pop(0)
return sufficient_coal, sdp_coal, sdp_global
@st.cache(allow_output_mutation=True)
def compute_sdp_rule(obs, x_train_np, y_train_np, x_test_np, y_test_np, t, S):
sdp, rules = acvtree.compute_sdp_rule(x_test_np[obs:obs+1], y_test_np[obs:obs+1],
x_train_np, y_train_np, S=[S], t=t)
rule = rules[0]
columns = [x_train.columns[i] for i in range(x_train.shape[1])]
rule_string = ['{} <= {} <= {}'.format(rule[i, 0] if rule[i, 0] > -1e+10 else -np.inf, columns[i],
rule[i, 1] if rule[i, 1] < 1e+10 else +np.inf) for i in S]
rule_string = ' and '.join(rule_string)
return rule_string
@st.cache(allow_output_mutation=True)
def compute_sdp_maxrule(obs, x_train_np, y_train_np, x_test_np, y_test_np, t, S, pi):
sdp, rules, sdp_all, rules_data, w = acvtree.compute_sdp_maxrules(x_test_np[obs:obs + 1], y_test_np[obs:obs + 1],
x_train_np, y_train_np, S=[S], t=t, pi_level=pi)
acvtree.fit_global_rules(x_train_np, y_train_np, rules, [S])
# extend_partition(rules, rules_data, sdp_all, pi=pi, S=[S])
rule = rules[0]
columns = [x_train.columns[i] for i in range(x_train.shape[1])]
rule_string = ['{} <= {} <= {}'.format(rule[i, 0] if rule[i, 0] > -1e+10 else -np.inf, columns[i],
rule[i, 1] if rule[i, 1] < 1e+10 else +np.inf) for i in S]
rule_string = ' and '.join(rule_string)
return rule_string
@st.cache(allow_output_mutation=True)
def transform_scoal_to_col(sufficient_coal, columns_names):
col_byobs = []
for obs in sufficient_coal:
col = []
for S in obs:
name = ''
for i in range(len(S)):
if i != len(S) - 1:
name += columns_names[S[i]] + ' - '
else:
name += columns_names[S[i]]
col.append(name)
col_byobs.append(col)
return col_byobs
@st.cache(allow_output_mutation=True)
def compute_local_sdp(idx, sufficient_coal):
flat = [item for sublist in sufficient_coal[idx] for item in sublist]
flat = pd.Series(flat)
flat = dict(flat.value_counts() / len(sufficient_coal[idx]))
local_sdp = np.zeros(x_train.shape[1])
for key in flat.keys():
local_sdp[key] = flat[key]
return local_sdp
@st.cache(allow_output_mutation=True)
def color_max(data, sdp_index):
color = []
for i in range(x_train.shape[1]+1):
if i in sdp_index:
color.append('background-color: #3e82fc')
else:
color.append('')
color.append('background-color: #ff073a')
return color
@st.cache(allow_output_mutation=True)
def bar_legacy(shap_values, features=None, feature_names=None, max_display=None, show=True):
# unwrap pandas series
fig = plt.figure()
if str(type(features)) == "<class 'pandas.core.series.Series'>":
if feature_names is None:
feature_names = list(features.index)
features = features.values
if feature_names is None:
feature_names = np.array([labels['FEATURE'] % str(i) for i in range(len(shap_values))])
if max_display is None:
max_display = 7
else:
max_display = min(len(feature_names), max_display)
feature_order = np.argsort(-np.abs(shap_values))
#
feature_inds = feature_order[:max_display]
y_pos = np.arange(len(feature_inds), 0, -1)
plt.barh(
y_pos, shap_values[feature_inds],
0.7, align='center',
color=[colors.red_rgb if shap_values[feature_inds[i]] < 0 else colors.blue_rgb for i in range(len(y_pos))]
)
for y in range(len(y_pos)):
plt.text(shap_values[feature_inds][y] + 0.001, y_pos[y] - 0.07, round(shap_values[feature_inds][y], 3))
plt.yticks(y_pos, fontsize=13)
if features is not None:
features = list(features)
# try and round off any trailing zeros after the decimal point in the feature values
for i in range(len(features)):
try:
if round(features[i]) == features[i]:
features[i] = int(features[i])
except TypeError:
pass # features[i] must not be a number
yticklabels = []
for i in feature_inds:
if features is not None:
yticklabels.append(feature_names[i] + " = " + str(features[i]))
else:
yticklabels.append(feature_names[i])
plt.gca().set_yticklabels(yticklabels)
plt.gca().xaxis.set_ticks_position('bottom')
plt.gca().yaxis.set_ticks_position('none')
plt.gca().spines['right'].set_visible(False)
plt.gca().spines['top'].set_visible(False)
# plt.gca().spines['left'].set_visible(False)
plt.xlabel("Frequency of apparition in the Sufficient Coalitions")
return fig
# explantions_load_state = st.text('Computing SDP explanations...')
sufficient_coal, sdp_coal, sdp_global = compute_sdp(nb, x_train.values.astype(np.double),
y_train.astype(np.double), x_test.values.astype(np.double),
y_test.astype(np.double), pi_level=pi_level, t=t)
sufficient_coal_names = transform_scoal_to_col(sufficient_coal, x_train.columns)
# explantions_load_state.text("SDP explanation Done!")
# st.subheader('All sufficient coalitions')
if len(sufficient_coal[idx]) == 0:
st.text('No explanation was found for this observation')
else:
col1, col2 = st.columns(2)
with col1:
st.header('All sufficient explanations')
sufficient_coal_df = {'Sufficient explanations': sufficient_coal_names[idx],
'SDP': sdp_coal[idx]}
sufficient_coal_df = pd.DataFrame(sufficient_coal_df)
# print(sufficient_coal_df.head())
st.dataframe(sufficient_coal_df, 6000, 6000)
with col2:
st.header('Local Explanatory Importance')
local_sdp = compute_local_sdp(idx, sufficient_coal)
# data = {'feature_names': [x_train.columns[i] for i in range(x_train.shape[1])],
# 'feature_importance': local_sdp}
# fi_df = pd.DataFrame(data)
# Sort the DataFrame in order decreasing feature importance
# fi_df.sort_values(by=['feature_importance'], ascending=False, inplace=True)
# sns.set_theme(font='sans-serif')
fig = bar_legacy(local_sdp, x_test.values[idx], x_test.columns)
# sns.set(font_scale=1.5)
# sns.set_theme(font='sans-serif')
# sns.barplot(x=fi_df['feature_importance'], y=fi_df['feature_names'], color='#3e82fc')
# plt.xlabel('Frequency of apparition in the Sufficient Coalitions')
# for y in range(len(fi_df['feature_importance'].values)):
# plt.text(fi_df['feature_importance'].values[y], y, round(fi_df['feature_importance'].values[y], 3))
#
# plt.ylabel(' ')
st.pyplot(fig)
st.header('Feature values highlight by SDP')
st.text('This observation has {} different explanations, below to observe their values'.format(
len(sufficient_coal[idx])))
exp_idx = st.selectbox(
'Change the explanations',
list(range(len(sufficient_coal[idx])))
)
x_group = pd.DataFrame(x_test.values[idx:idx + 1], columns=x_test.columns)
x_group['Output'] = y_test[idx]
x_group['Same Decision Probability (SDP)'] = sdp_coal[idx][exp_idx]
st.dataframe(x_group.iloc[:1].style.apply(color_max, sdp_index=sufficient_coal[idx][exp_idx], axis=1))
st.header('Local rule explanation')
rule_string = compute_sdp_rule(idx, x_train.values.astype(np.double), y_train.astype(np.double),
x_test.values.astype(np.double), y_test.astype(np.double), t, sufficient_coal[idx][exp_idx])
# st.markdown(rule_string)
st.markdown("<" + 'h3' + " style='text-align: " + \
"; color:" + 'black' + "; '>" + rule_string + "</" + 'h3' + ">",
unsafe_allow_html=True)
st.header('Sufficient local rule explanation (Maximal rule)')
maxrule = st.checkbox('Compute', value=False)
if maxrule:
rule_string = compute_sdp_maxrule(idx, x_train.values.astype(np.double), y_train.astype(np.double),
x_test.values.astype(np.double), y_test.astype(np.double), t,
sufficient_coal[idx][exp_idx], pi_level)
st.markdown("<" + 'h3' + " style='text-align: " + \
"; color:" + 'black' + "; '>" + rule_string + "</" + 'h3' + ">",
unsafe_allow_html=True)
# st.markdown(rule_string)
rule_info = {'Rule coverage': acvtree.rules_coverage,
'Rule accuracy/mse': acvtree.rules_acc}
rule_info = pd.DataFrame(rule_info)
st.dataframe(rule_info, 6000, 6000)
|
PypiClean
|
/Grid2Op-1.9.3-py3-none-any.whl/grid2op/Reward/combinedScaledReward.py
|
import numpy as np
from grid2op.Reward.combinedReward import CombinedReward
from grid2op.dtypes import dt_float
class CombinedScaledReward(CombinedReward):
"""
This class allows to combine multiple rewards.
It will compute a scaled reward of the weighted sum of the registered rewards.
Scaling is done by linearly interpolating the weighted sum,
from the range [min_sum; max_sum] to [reward_min; reward_max]
min_sum and max_sum are computed from the weights and ranges of registered rewards.
See :class:`Reward.BaseReward` for setting the output range.
Examples
--------
.. code-block:: python
import grid2op
from grid2op.Reward import GameplayReward, FlatReward, CombinedScaledReward
env = grid2op.make(..., reward_class=CombinedScaledReward)
cr = self.env.get_reward_instance()
cr.addReward("Gameplay", GameplayReward(), 1.0)
cr.addReward("Flat", FlatReward(), 1.0)
cr.initialize(self.env)
obs = env.reset()
obs, reward, done, info = env.step(env.action_space())
# reward here is computed by summing the results of what would have
# given `GameplayReward` and the one from `FlatReward`
"""
def __init__(self, logger=None):
super().__init__(logger=logger)
self.reward_min = dt_float(-0.5)
self.reward_max = dt_float(0.5)
self._sum_max = dt_float(0.0)
self._sum_min = dt_float(0.0)
self.rewards = {}
def initialize(self, env):
"""
Overloaded initialze from `Reward.CombinedReward`.
This is because it needs to store the ranges internaly
"""
self._sum_max = dt_float(0.0)
self._sum_min = dt_float(0.0)
for key, reward in self.rewards.items():
reward_w = dt_float(reward["weight"])
reward_instance = reward["instance"]
reward_instance.initialize(env)
self._sum_max += dt_float(reward_instance.reward_max * reward_w)
self._sum_min += dt_float(reward_instance.reward_min * reward_w)
def __call__(self, action, env, has_error, is_done, is_illegal, is_ambiguous):
# Get weighted sum from parent
ws = super().__call__(action, env, has_error, is_done, is_illegal, is_ambiguous)
# Scale to range
res = np.interp(
ws, [self._sum_min, self._sum_max], [self.reward_min, self.reward_max]
)
return dt_float(res)
def close(self):
super().close()
|
PypiClean
|
/django_unicorn_gbusby-0.50.3-py3-none-any.whl/django_unicorn/static/unicorn/js/utils.js
|
export function isEmpty(obj) {
return (
typeof obj === "undefined" ||
obj === null ||
(Object.keys(obj).length === 0 && obj.constructor === Object) ||
obj === ""
);
}
/**
* Checks if an object has a value.
*/
export function hasValue(obj) {
return !isEmpty(obj);
}
/**
* Checks if an object is a function.
*/
export function isFunction(obj) {
return obj && typeof obj === "function";
}
/**
* Checks if a string has the search text.
*/
export function contains(str, search) {
if (!str) {
return false;
}
return str.indexOf(search) > -1;
}
/**
* A simple shortcut for querySelector that everyone loves.
*/
export function $(selector, scope) {
if (scope === undefined) {
scope = document;
}
return scope.querySelector(selector);
}
/**
* Get the CSRF token used by Django.
*/
export function getCsrfToken(component) {
// Default to looking for the CSRF in the cookie
const cookieKey = "csrftoken=";
const csrfTokenCookie = component.document.cookie
.split(";")
.filter((item) => item.trim().startsWith(cookieKey));
if (csrfTokenCookie.length > 0) {
return csrfTokenCookie[0].replace(cookieKey, "");
}
// Fall back to check for the CSRF hidden input
const csrfElements = component.document.getElementsByName(
"csrfmiddlewaretoken"
);
if (csrfElements && csrfElements.length > 0) {
return csrfElements[0].getAttribute("value");
}
throw Error("CSRF token is missing. Do you need to add {% csrf_token %}?");
}
/**
* Converts a string to "kebab-case", aka lower-cased with hyphens.
* @param {string} str The string to be converted.
*/
export function toKebabCase(str) {
if (!str) {
return "";
}
const match = str.match(
/[A-Z]{2,}(?=[A-Z][a-z]+[0-9]*|\b)|[A-Z]?[a-z]+[0-9]*|[A-Z]|[0-9]+/g
);
if (!match) {
return str;
}
return match.map((x) => x.toLowerCase()).join("-");
}
/**
* Filter to accept any element (use with walk)
*/
export const FilterAny = {
acceptNode: (node) => NodeFilter.FILTER_ACCEPT,
};
/**
* Filter to skip nested components (use with walk)
*/
export const FilterSkipNested = {
acceptNode: (node) => {
if (node.getAttribute("unicorn:checksum")) {
// with a tree walker, child nodes are also rejected
return NodeFilter.FILTER_REJECT;
}
return NodeFilter.FILTER_ACCEPT;
},
};
/**
* Traverses the DOM looking for child elements.
*/
export function walk(el, callback, filter = FilterAny) {
const walker = document.createTreeWalker(
el,
NodeFilter.SHOW_ELEMENT,
filter,
false
);
while (walker.nextNode()) {
// TODO: Handle sub-components?
callback(walker.currentNode);
}
}
export function args(func) {
func = func.trim();
if (!contains(func, "(") || !func.endsWith(")")) {
return [];
}
// Remove the method name and parenthesis
func = func.slice(func.indexOf("(") + 1, func.length - 1);
const functionArgs = [];
let currentArg = "";
let inSingleQuote = false;
let inDoubleQuote = false;
let parenthesisCount = 0;
let bracketCount = 0;
let curlyCount = 0;
for (let idx = 0; idx < func.length; idx++) {
const c = func.charAt(idx);
currentArg += c;
if (c === "[") {
bracketCount++;
} else if (c === "]") {
bracketCount--;
} else if (c === "(") {
parenthesisCount++;
} else if (c === ")") {
parenthesisCount--;
} else if (c === "{") {
curlyCount++;
} else if (c === "}") {
curlyCount--;
} else if (c === "'") {
inSingleQuote = !inSingleQuote;
} else if (c === '"') {
inDoubleQuote = !inDoubleQuote;
} else if (c === ",") {
if (
!inSingleQuote &&
!inDoubleQuote &&
bracketCount === 0 &&
parenthesisCount === 0
) {
// Remove the trailing comma
currentArg = currentArg.slice(0, currentArg.length - 1);
functionArgs.push(currentArg);
currentArg = "";
}
}
if (idx === func.length - 1) {
if (
!inSingleQuote &&
!inDoubleQuote &&
bracketCount === 0 &&
parenthesisCount === 0
) {
functionArgs.push(currentArg.trim());
currentArg = "";
}
}
}
return functionArgs;
}
|
PypiClean
|
/pycxx-0.0.4.tar.gz/pycxx-0.0.4/README.md
|
# Cxx
### simple crypting library
## installation
```bash
pip install pycxx
```
## usage
```python
import pycxx
c = pycxx.Cxx(key="password", expires=0)
"""${expires} is the Time in milliseconds for the
encrypted password to be destroyed
setting ${encrypted} to 0 implies the data would
not be destroyed
"""
# to encrypt;
data = dict(
name="rubbie kelvin",
country="Nigeria"
)
encrypted = c.encrypt(**data) # =>str
# to decrypt
data2 = pycxx.Cxx.decrypt(encrypted, key="password") # => dict
```
`made with ❤️ by rubbie`
|
PypiClean
|
/nautobot_ssot_infoblox-0.8.0.tar.gz/nautobot_ssot_infoblox-0.8.0/nautobot_ssot_infoblox/diffsync/adapters/infoblox.py
|
import ipaddress
import re
from diffsync import DiffSync
from diffsync.enum import DiffSyncFlags
from nautobot.extras.plugins.exceptions import PluginImproperlyConfigured
from nautobot_ssot_infoblox.constant import PLUGIN_CFG
from nautobot_ssot_infoblox.utils.client import get_default_ext_attrs, get_dns_name
from nautobot_ssot_infoblox.utils.diffsync import get_ext_attr_dict, build_vlan_map
from nautobot_ssot_infoblox.diffsync.models.infoblox import (
InfobloxAggregate,
InfobloxIPAddress,
InfobloxNetwork,
InfobloxVLANView,
InfobloxVLAN,
)
class InfobloxAdapter(DiffSync):
"""DiffSync adapter using requests to communicate to Infoblox server."""
prefix = InfobloxNetwork
ipaddress = InfobloxIPAddress
vlangroup = InfobloxVLANView
vlan = InfobloxVLAN
top_level = ["vlangroup", "vlan", "prefix", "ipaddress"]
def __init__(self, *args, job=None, sync=None, conn=None, **kwargs):
"""Initialize Infoblox.
Args:
job (object, optional): Infoblox job. Defaults to None.
sync (object, optional): Infoblox DiffSync. Defaults to None.
conn (object, optional): InfobloxAPI connection. Defaults to None.
"""
super().__init__(*args, **kwargs)
self.job = job
self.sync = sync
self.conn = conn
self.subnets = []
if self.conn in [None, False]:
self.job.log_failure(
message="Improperly configured settings for communicating to Infoblox. Please validate accuracy."
)
raise PluginImproperlyConfigured
def load_prefixes(self):
"""Load InfobloxNetwork DiffSync model."""
if PLUGIN_CFG.get("import_subnets"):
subnets = []
for prefix in PLUGIN_CFG["import_subnets"]:
subnets.extend(self.conn.get_all_subnets(prefix=prefix))
all_networks = subnets
else:
# Need to load containers here to prevent duplicates when syncing back to Infoblox
containers = self.conn.get_network_containers()
subnets = self.conn.get_all_subnets()
all_networks = containers + subnets
self.subnets = [(x["network"], x["network_view"]) for x in subnets]
default_ext_attrs = get_default_ext_attrs(review_list=all_networks)
for _pf in all_networks:
pf_ext_attrs = get_ext_attr_dict(extattrs=_pf.get("extattrs", {}))
new_pf = self.prefix(
network=_pf["network"],
description=_pf.get("comment", ""),
status=_pf.get("status", "active"),
ext_attrs={**default_ext_attrs, **pf_ext_attrs},
vlans=build_vlan_map(vlans=_pf["vlans"]) if _pf.get("vlans") else {},
)
self.add(new_pf)
def load_ipaddresses(self):
"""Load InfobloxIPAddress DiffSync model."""
ipaddrs = self.conn.get_all_ipv4address_networks(prefixes=self.subnets)
default_ext_attrs = get_default_ext_attrs(review_list=ipaddrs)
for _ip in ipaddrs:
_, prefix_length = _ip["network"].split("/")
dns_name = ""
if _ip["names"]:
dns_name = get_dns_name(possible_fqdn=_ip["names"][0])
ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {}))
new_ip = self.ipaddress(
address=_ip["ip_address"],
prefix=_ip["network"],
prefix_length=prefix_length,
dns_name=dns_name,
status=self.conn.get_ipaddr_status(_ip),
description=_ip["comment"],
ext_attrs={**default_ext_attrs, **ip_ext_attrs},
)
self.add(new_ip)
def load_vlanviews(self):
"""Load InfobloxVLANView DiffSync model."""
vlanviews = self.conn.get_vlanviews()
default_ext_attrs = get_default_ext_attrs(review_list=vlanviews)
for _vv in vlanviews:
vv_ext_attrs = get_ext_attr_dict(extattrs=_vv.get("extattrs", {}))
new_vv = self.vlangroup(
name=_vv["name"],
description=_vv["comment"] if _vv.get("comment") else "",
ext_attrs={**default_ext_attrs, **vv_ext_attrs},
)
self.add(new_vv)
def load_vlans(self):
"""Load InfobloxVlan DiffSync model."""
vlans = self.conn.get_vlans()
default_ext_attrs = get_default_ext_attrs(review_list=vlans)
for _vlan in vlans:
vlan_ext_attrs = get_ext_attr_dict(extattrs=_vlan.get("extattrs", {}))
vlan_group = re.search(r"(?:.+\:)(\S+)(?:\/\S+\/.+)", _vlan["_ref"])
new_vlan = self.vlan(
name=_vlan["name"],
vid=_vlan["id"],
status=_vlan["status"],
vlangroup=vlan_group.group(1) if vlan_group else "",
description=_vlan["comment"] if _vlan.get("comment") else "",
ext_attrs={**default_ext_attrs, **vlan_ext_attrs},
)
self.add(new_vlan)
def load(self):
"""Load all models by calling other methods."""
if "infoblox_import_objects" in PLUGIN_CFG:
if PLUGIN_CFG["infoblox_import_objects"].get("subnets"):
self.load_prefixes()
if PLUGIN_CFG["infoblox_import_objects"].get("ip_addresses"):
self.load_ipaddresses()
if PLUGIN_CFG["infoblox_import_objects"].get("vlan_views"):
self.load_vlanviews()
if PLUGIN_CFG["infoblox_import_objects"].get("vlans"):
self.load_vlans()
else:
self.job.log_info(
message="The `infoblox_import_objects` setting was not found so all objects will be imported."
)
self.load_prefixes()
self.load_ipaddresses()
self.load_vlanviews()
self.load_vlans()
for obj in ["prefix", "ipaddress", "vlangroup", "vlan"]:
if obj in self.dict():
self.job.log(message=f"Loaded {len(self.dict()[obj])} {obj} from Infoblox.")
def sync_complete(self, source, diff, flags=DiffSyncFlags.NONE, logger=None):
"""Add tags and custom fields to synced objects."""
source.tag_involved_objects(target=self)
class InfobloxAggregateAdapter(DiffSync):
"""DiffSync adapter using requests to communicate to Infoblox server."""
aggregate = InfobloxAggregate
top_level = ["aggregate"]
def __init__(self, *args, job=None, sync=None, conn=None, **kwargs):
"""Initialize Infoblox.
Args:
job (object, optional): Infoblox job. Defaults to None.
sync (object, optional): Infoblox DiffSync. Defaults to None.
conn (object): InfobloxAPI connection.
"""
super().__init__(*args, **kwargs)
self.job = job
self.sync = sync
self.conn = conn
if self.conn in [None, False]:
self.job.log_failure(
message="Improperly configured settings for communicating to Infoblox. Please validate accuracy."
)
raise PluginImproperlyConfigured
def load(self):
"""Load aggregate models."""
containers = self.conn.get_network_containers()
default_ext_attrs = get_default_ext_attrs(review_list=containers)
for container in containers:
network = ipaddress.ip_network(container["network"])
container_ext_attrs = get_ext_attr_dict(extattrs=container.get("extattrs", {}))
if network.is_private and container["network"] in ["10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"]:
new_aggregate = self.aggregate(
network=container["network"],
description=container["comment"] if container.get("comment") else "",
ext_attrs={**default_ext_attrs, **container_ext_attrs},
)
self.add(new_aggregate)
|
PypiClean
|
/allianceauth-blacklist-0.1.1.tar.gz/allianceauth-blacklist-0.1.1/blacklist/tasks.py
|
from allianceauth.services.hooks import get_extension_logger
from celery import shared_task
from allianceauth.authentication.models import State
from allianceauth.eveonline.models import EveCharacter, EveAllianceInfo, EveCorporationInfo
from . import models
from . import app_settings
logger = get_extension_logger(__name__)
@shared_task
def run_blacklist_update(note_id):
blk_state = State.objects.get(name=app_settings.BLACKLIST_STATE_NAME)
instance = models.EveNote.objects.get(pk=note_id)
if instance.eve_catagory == "character":
logger.debug(f"Checking Character for blacklist '{instance.eve_name}'")
eve_char = EveCharacter.objects.get_character_by_id(instance.eve_id)
blacklist = instance.blacklisted
exists = blk_state.member_characters.filter(character_id=instance.eve_id).exists()
if blacklist and exists:
logger.debug(f"'{instance.eve_name}'' is already Blacklisted")
elif blacklist and not exists:
if eve_char is None:
logger.debug(f"Creating new Auth Model for '{instance.eve_name}'")
eve_char = EveCharacter.objects.create_character(instance.eve_id)
logger.debug(f"Blacklisted '{instance.eve_name}'")
blk_state.member_characters.add(eve_char)
elif not blacklist and exists:
logger.debug(f"Removing '{instance.eve_name}' from Blacklist")
blk_state.member_characters.remove(eve_char)
if instance.eve_catagory == "corporation":
logger.debug(f"Checking Corporation for blacklist '{instance.eve_name}'")
eve_corp = EveCorporationInfo.objects.filter(corporation_id=instance.eve_id)
blacklist = instance.blacklisted
exists = blk_state.member_corporations.filter(corporation_id=instance.eve_id).exists()
if blacklist and exists:
logger.debug(f"'{instance.eve_name}'' is already Blacklisted")
elif blacklist and not exists:
if eve_corp.exists() is False:
logger.debug(f"Creating new Auth Model for '{instance.eve_name}'")
eve_corp = EveCorporationInfo.objects.create_corporation(instance.eve_id)
else:
eve_corp = eve_corp.first()
logger.debug(f"Blacklisted '{instance.eve_name}'")
blk_state.member_corporations.add(eve_corp)
elif not blacklist and exists:
logger.debug(f"Removing '{instance.eve_name}' from Blacklist")
blk_state.member_corporations.remove(eve_corp.first())
if instance.eve_catagory == "alliance":
logger.debug(f"Checking Alliance for blacklist '{instance.eve_name}'")
eve_alli = EveAllianceInfo.objects.filter(alliance_id=instance.eve_id)
blacklist = instance.blacklisted
exists = blk_state.member_alliances.filter(alliance_id=instance.eve_id).exists()
if blacklist and exists:
logger.debug(f"'{instance.eve_name}'' is already Blacklisted")
elif blacklist and not exists:
if eve_alli.exists() is False:
logger.debug(f"Creating new Auth Model for '{instance.eve_name}'")
eve_alli = EveAllianceInfo.objects.create_alliance(instance.eve_id)
else:
eve_alli = eve_alli.first()
logger.debug(f"Blacklisted '{instance.eve_name}'")
blk_state.member_alliances.add(eve_alli)
elif not blacklist and exists:
logger.debug(f"Removing '{instance.eve_name}' from Blacklist")
blk_state.member_alliances.remove(eve_alli.first())
|
PypiClean
|
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/@lumino/domutils/types/element.d.ts
|
export declare namespace ElementExt {
/**
* An object which holds the border and padding data for an element.
*/
interface IBoxSizing {
/**
* The top border width, in pixels.
*/
borderTop: number;
/**
* The left border width, in pixels.
*/
borderLeft: number;
/**
* The right border width, in pixels.
*/
borderRight: number;
/**
* The bottom border width, in pixels.
*/
borderBottom: number;
/**
* The top padding width, in pixels.
*/
paddingTop: number;
/**
* The left padding width, in pixels.
*/
paddingLeft: number;
/**
* The right padding width, in pixels.
*/
paddingRight: number;
/**
* The bottom padding width, in pixels.
*/
paddingBottom: number;
/**
* The sum of horizontal border and padding.
*/
horizontalSum: number;
/**
* The sum of vertical border and padding.
*/
verticalSum: number;
}
/**
* Compute the box sizing for an element.
*
* @param element - The element of interest.
*
* @returns The box sizing data for the specified element.
*/
function boxSizing(element: Element): IBoxSizing;
/**
* An object which holds the min and max size data for an element.
*/
interface ISizeLimits {
/**
* The minimum width, in pixels.
*/
minWidth: number;
/**
* The minimum height, in pixels.
*/
minHeight: number;
/**
* The maximum width, in pixels.
*/
maxWidth: number;
/**
* The maximum height, in pixels.
*/
maxHeight: number;
}
/**
* Compute the size limits for an element.
*
* @param element - The element of interest.
*
* @returns The size limit data for the specified element.
*/
function sizeLimits(element: Element): ISizeLimits;
/**
* Test whether a client position lies within an element.
*
* @param element - The DOM element of interest.
*
* @param clientX - The client X coordinate of interest.
*
* @param clientY - The client Y coordinate of interest.
*
* @returns Whether the point is within the given element.
*/
function hitTest(element: Element, clientX: number, clientY: number): boolean;
/**
* Vertically scroll an element into view if needed.
*
* @param area - The scroll area element.
*
* @param element - The element of interest.
*
* #### Notes
* This follows the "nearest" behavior of the native `scrollIntoView`
* method, which is not supported by all browsers.
* https://drafts.csswg.org/cssom-view/#element-scrolling-members
*
* If the element fully covers the visible area or is fully contained
* within the visible area, no scrolling will take place. Otherwise,
* the nearest edges of the area and element are aligned.
*/
function scrollIntoViewIfNeeded(area: Element, element: Element): void;
}
//# sourceMappingURL=element.d.ts.map
|
PypiClean
|
/nni_upload_test-0.7.1904290925-py3-none-win_amd64.whl/nni_upload_test-0.7.1904290925.data/data/nni/node_modules/moment/locale/sd.js
|
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['../moment'], factory) :
factory(global.moment)
}(this, (function (moment) { 'use strict';
var months = [
'جنوري',
'فيبروري',
'مارچ',
'اپريل',
'مئي',
'جون',
'جولاءِ',
'آگسٽ',
'سيپٽمبر',
'آڪٽوبر',
'نومبر',
'ڊسمبر'
];
var days = [
'آچر',
'سومر',
'اڱارو',
'اربع',
'خميس',
'جمع',
'ڇنڇر'
];
var sd = moment.defineLocale('sd', {
months : months,
monthsShort : months,
weekdays : days,
weekdaysShort : days,
weekdaysMin : days,
longDateFormat : {
LT : 'HH:mm',
LTS : 'HH:mm:ss',
L : 'DD/MM/YYYY',
LL : 'D MMMM YYYY',
LLL : 'D MMMM YYYY HH:mm',
LLLL : 'dddd، D MMMM YYYY HH:mm'
},
meridiemParse: /صبح|شام/,
isPM : function (input) {
return 'شام' === input;
},
meridiem : function (hour, minute, isLower) {
if (hour < 12) {
return 'صبح';
}
return 'شام';
},
calendar : {
sameDay : '[اڄ] LT',
nextDay : '[سڀاڻي] LT',
nextWeek : 'dddd [اڳين هفتي تي] LT',
lastDay : '[ڪالهه] LT',
lastWeek : '[گزريل هفتي] dddd [تي] LT',
sameElse : 'L'
},
relativeTime : {
future : '%s پوء',
past : '%s اڳ',
s : 'چند سيڪنڊ',
ss : '%d سيڪنڊ',
m : 'هڪ منٽ',
mm : '%d منٽ',
h : 'هڪ ڪلاڪ',
hh : '%d ڪلاڪ',
d : 'هڪ ڏينهن',
dd : '%d ڏينهن',
M : 'هڪ مهينو',
MM : '%d مهينا',
y : 'هڪ سال',
yy : '%d سال'
},
preparse: function (string) {
return string.replace(/،/g, ',');
},
postformat: function (string) {
return string.replace(/,/g, '،');
},
week : {
dow : 1, // Monday is the first day of the week.
doy : 4 // The week that contains Jan 4th is the first week of the year.
}
});
return sd;
})));
|
PypiClean
|
/moto-improved-cognitoidentity-1.3.tar.gz/moto-improved-cognitoidentity-1.3/moto/cloudformation/utils.py
|
import yaml
import os
import string
from moto.moto_api._internal import mock_random as random
from typing import Any, List
from urllib.parse import urlparse
def generate_stack_id(stack_name: str, region: str, account: str) -> str:
random_id = random.uuid4()
return f"arn:aws:cloudformation:{region}:{account}:stack/{stack_name}/{random_id}"
def generate_changeset_id(
changeset_name: str, region_name: str, account_id: str
) -> str:
random_id = random.uuid4()
return f"arn:aws:cloudformation:{region_name}:{account_id}:changeSet/{changeset_name}/{random_id}"
def generate_stackset_id(stackset_name: str) -> str:
random_id = random.uuid4()
return f"{stackset_name}:{random_id}"
def generate_stackset_arn(stackset_id: str, region_name: str, account_id: str) -> str:
return f"arn:aws:cloudformation:{region_name}:{account_id}:stackset/{stackset_id}"
def random_suffix() -> str:
size = 12
chars = list(range(10)) + list(string.ascii_uppercase)
return "".join(str(random.choice(chars)) for x in range(size))
def yaml_tag_constructor(loader: Any, tag: Any, node: Any) -> Any:
"""convert shorthand intrinsic function to full name"""
def _f(loader: Any, tag: Any, node: Any) -> Any:
if tag == "!GetAtt":
if isinstance(node.value, list):
return node.value
return node.value.split(".")
elif type(node) == yaml.SequenceNode:
return loader.construct_sequence(node)
else:
return node.value
if tag == "!Ref":
key = "Ref"
else:
key = f"Fn::{tag[1:]}"
return {key: _f(loader, tag, node)}
def validate_template_cfn_lint(template: str) -> List[Any]:
# Importing cfnlint adds a significant overhead, so we keep it local
from cfnlint import decode, core
# Save the template to a temporary file -- cfn-lint requires a file
filename = "file.tmp"
with open(filename, "w") as file:
file.write(template)
abs_filename = os.path.abspath(filename)
# decode handles both yaml and json
try:
template, matches = decode.decode(abs_filename, False)
except TypeError:
# As of cfn-lint 0.39.0, the second argument (ignore_bad_template) was dropped
# https://github.com/aws-cloudformation/cfn-python-lint/pull/1580
template, matches = decode.decode(abs_filename)
# Set cfn-lint to info
core.configure_logging(None)
# Initialize the ruleset to be applied (no overrules, no excludes)
rules = core.get_rules([], [], [])
# Use us-east-1 region (spec file) for validation
regions = ["us-east-1"]
# Process all the rules and gather the errors
matches = core.run_checks(abs_filename, template, rules, regions)
return matches
def get_stack_from_s3_url(template_url: str, account_id: str) -> str:
from moto.s3.models import s3_backends
template_url_parts = urlparse(template_url)
if "localhost" in template_url:
bucket_name, key_name = template_url_parts.path.lstrip("/").split("/", 1)
else:
if template_url_parts.netloc.endswith(
"amazonaws.com"
) and template_url_parts.netloc.startswith("s3"):
# Handle when S3 url uses amazon url with bucket in path
# Also handles getting region as technically s3 is region'd
# region = template_url.netloc.split('.')[1]
bucket_name, key_name = template_url_parts.path.lstrip("/").split("/", 1)
else:
bucket_name = template_url_parts.netloc.split(".")[0]
key_name = template_url_parts.path.lstrip("/")
key = s3_backends[account_id]["global"].get_object(bucket_name, key_name)
return key.value.decode("utf-8")
|
PypiClean
|
/FAST_OAD_CS23-1.2.0-py3-none-any.whl/fastga/models/geometry/geom_components/wing_tank/compute_mfw_simple.py
|
# This file is part of FAST-OAD_CS23 : A framework for rapid Overall Aircraft Design
# Copyright (C) 2022 ONERA & ISAE-SUPAERO
# FAST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import numpy as np
import warnings
from openmdao.core.explicitcomponent import ExplicitComponent
import fastoad.api as oad
from ...constants import SUBMODEL_MFW
oad.RegisterSubmodel.active_models[SUBMODEL_MFW] = "fastga.submodel.geometry.mfw.legacy"
@oad.RegisterSubmodel(SUBMODEL_MFW, "fastga.submodel.geometry.mfw.legacy")
class ComputeMFWSimple(ExplicitComponent):
"""Max fuel weight estimation based o RAYMER table 10.5 p269."""
def setup(self):
self.add_input("data:propulsion:fuel_type", val=np.nan)
self.add_input("data:geometry:wing:area", val=np.nan, units="m**2")
self.add_input("data:geometry:wing:root:chord", val=np.nan, units="m")
self.add_input("data:geometry:wing:tip:chord", val=np.nan, units="m")
self.add_input("data:geometry:wing:root:thickness_ratio", val=np.nan)
self.add_input("data:geometry:wing:tip:thickness_ratio", val=np.nan)
self.add_output("data:weight:aircraft:MFW", units="kg")
self.declare_partials(
"data:weight:aircraft:MFW",
[
"data:geometry:wing:area",
"data:geometry:wing:root:chord",
"data:geometry:wing:tip:chord",
"data:geometry:wing:root:thickness_ratio",
"data:geometry:wing:tip:thickness_ratio",
],
method="fd",
)
def compute(self, inputs, outputs, discrete_inputs=None, discrete_outputs=None):
fuel_type = inputs["data:propulsion:fuel_type"]
wing_area = inputs["data:geometry:wing:area"]
root_chord = inputs["data:geometry:wing:root:chord"]
tip_chord = inputs["data:geometry:wing:tip:chord"]
root_thickness_ratio = inputs["data:geometry:wing:root:thickness_ratio"]
tip_thickness_ratio = inputs["data:geometry:wing:tip:thickness_ratio"]
if fuel_type == 1.0:
m_vol_fuel = 718.9 # gasoline volume-mass [kg/m**3], cold worst case, Avgas
elif fuel_type == 2.0:
m_vol_fuel = 860.0 # Diesel volume-mass [kg/m**3], cold worst case
elif fuel_type == 3.0:
m_vol_fuel = 804.0 # Jet-A1 volume mass [kg/m**3], cold worst case
else:
m_vol_fuel = 718.9
warnings.warn("Fuel type {} does not exist, replaced by type 1!".format(fuel_type))
# Tanks are between 1st (30% MAC) and 3rd (60% MAC) longeron: 30% of the wing
ave_thickness = (
0.7 * (root_chord * root_thickness_ratio + tip_chord * tip_thickness_ratio) / 2.0
)
mfv = 0.3 * wing_area * ave_thickness
mfw = mfv * m_vol_fuel
outputs["data:weight:aircraft:MFW"] = mfw
|
PypiClean
|
/panddas-0.2.14.tar.gz/panddas-0.2.14/lib-python/pandda/inspect/ligands.py
|
import os, shutil, string
import gtk
from bamboo.ccp4_utils import generate_ligand_with_acedrg
def modal_msg(msg):
"""Display an error window - model"""
d = gtk.MessageDialog( type = gtk.MESSAGE_INFO,
buttons = gtk.BUTTONS_CLOSE,
message_format = msg )
d.set_position(gtk.WIN_POS_CENTER)
d.set_keep_above(True)
d.run()
d.destroy()
def post_new_ligand_window(output_directory='.'):
"""Display an error window - model"""
if not os.path.exists(output_directory): os.mkdir(output_directory)
dialog = gtk.Dialog("Create New Ligand",
None,
gtk.DIALOG_MODAL,
(gtk.STOCK_CANCEL, gtk.RESPONSE_DELETE_EVENT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
# Name of the ligand
name_hbox = gtk.HBox(homogeneous=False, spacing=5)
dialog.vbox.pack_start(name_hbox)
label = gtk.Label('Ligand Name:')
label.props.width_chars = 20
label.set_alignment(0.5, 0.0)
name_hbox.pack_start(label, expand=True)
name_entry = gtk.Entry(max=100)
name_entry.set_text('new-compound')
name_hbox.pack_start(name_entry)
# ID for the ligand
id_hbox = gtk.HBox(homogeneous=False, spacing=5)
dialog.vbox.pack_start(id_hbox)
label = gtk.Label('3-letter code:')
label.props.width_chars = 20
label.set_alignment(0.5, 0.0)
id_hbox.pack_start(label)
id_entry = gtk.Entry(max=3)
id_entry.set_text('UNL')
id_hbox.pack_start(id_entry)
# SMILES for the ligand
smiles_hbox = gtk.HBox(homogeneous=False, spacing=5)
dialog.vbox.pack_start(smiles_hbox)
label = gtk.Label('Smiles:')
label.props.width_chars = 20
label.set_alignment(0.5, 0.0)
smiles_hbox.pack_start(label)
smiles_entry = gtk.Entry(max=300)
smiles_entry.set_text('')
smiles_hbox.pack_start(smiles_entry)
dialog.show_all()
disallowed = set(string.punctuation + ' ')
success = False
while success is False:
ligand_pdb = ligand_cif = None
response = dialog.run()
# Delete window/cancel?
if response in [int(gtk.RESPONSE_REJECT), int(gtk.RESPONSE_DELETE_EVENT)]:
print 'close "new-ligand" window'
dialog.destroy()
return None
assert response is int(gtk.RESPONSE_ACCEPT), 'invalid response received ({} should be {})'.format(response, int(gtk.RESPONSE_ACCEPT))
ligand_name = name_entry.get_text().strip(' ')
ligand_id = id_entry.get_text().strip(' ')
ligand_smiles = smiles_entry.get_text().strip(' ')
if disallowed.difference(['-','_']).intersection(ligand_name):
modal_msg('ligand name cannot contain space or punctuation except for {}'.format(' or '.join(['-','_'])))
continue
if disallowed.difference(['_']).intersection(ligand_id):
modal_msg('ligand name cannot contain spaces or punctuation except for {}'.format(' or '.join(['_'])))
continue
if len(ligand_name) == 0:
modal_msg('No ligand name provided')
continue
if len(ligand_id) == 0:
modal_msg('No ligand id provided')
continue
if len(ligand_smiles) == 0:
modal_msg('No ligand smiles provided')
continue
ligand_prefix = os.path.join(output_directory, ligand_name)
print 'ID: {}\nNAME: {}\nSMILES: {}'.format(ligand_id, ligand_name, ligand_smiles)
ligand_pdb = ligand_prefix+'.pdb'
ligand_cif = ligand_prefix+'.cif'
ligand_dir = ligand_prefix+'_TMP'
if os.path.exists(ligand_prefix+'.pdb') or os.path.exists(ligand_prefix+'.cif'):
modal_msg('PDB/CIF files already exists called {}.\nPlease select another name.'.format(ligand_name))
continue
try:
ligand_pdb, ligand_cif = generate_ligand_with_acedrg(smiles = ligand_smiles,
name = ligand_id,
prefix = ligand_prefix,
verbose = True)
except Exception as e:
msg = 'Error running acedrg'
msg += '\n--------------->\n'
msg += e.message
if hasattr(e, 'command'):
msg += '\n--------------->\n'
msg += e.command.output
for f in [ligand_pdb,ligand_cif,ligand_dir]:
if os.path.isfile(f):
os.remove(f)
if os.path.exists(ligand_dir):
shutil.rmtree(ligand_dir)
modal_msg(msg)
continue
for f in ['_approx.list']:
f_full = os.path.join(output_directory, f)
if os.path.exists(f_full):
os.remove(f_full)
if os.path.exists(ligand_pdb) and os.path.exists(ligand_cif):
break
dialog.destroy()
return ligand_id, ligand_name, ligand_smiles, ligand_pdb, ligand_cif
|
PypiClean
|
/protean-flask-0.0.11.tar.gz/protean-flask-0.0.11/README.rst
|
========
Overview
========
.. start-badges
.. list-table::
:stub-columns: 1
* - docs
- |docs|
* - tests
- |
|
* - package
- | |version| |wheel| |supported-versions| |supported-implementations|
.. |docs| image:: https://readthedocs.org/projects/protean-flask/badge/?style=flat
:target: https://readthedocs.org/projects/protean-flask
:alt: Documentation Status
.. |version| image:: https://img.shields.io/pypi/v/protean-flask.svg
:alt: PyPI Package latest release
:target: https://pypi.org/project/protean-flask
.. |wheel| image:: https://img.shields.io/pypi/wheel/protean-flask.svg
:alt: PyPI Wheel
:target: https://pypi.org/project/protean-flask
.. |supported-versions| image:: https://img.shields.io/pypi/pyversions/protean-flask.svg
:alt: Supported versions
:target: https://pypi.org/project/protean-flask
.. |supported-implementations| image:: https://img.shields.io/pypi/implementation/protean-flask.svg
:alt: Supported implementations
:target: https://pypi.org/project/protean-flask
.. end-badges
Protean Flask Extension
* Free software: BSD 3-Clause License
Installation
============
::
pip install protean-flask
Documentation
=============
https://protean-flask.readthedocs.io/
Development
===========
::
pyenv virtualenv -p python3.7 3.7.2 protean-flask-dev
To run the all tests run::
tox
Note, to combine the coverage data from all the tox environments run:
.. list-table::
:widths: 10 90
:stub-columns: 1
- - Windows
- ::
set PYTEST_ADDOPTS=--cov-append
tox
- - Other
- ::
PYTEST_ADDOPTS=--cov-append tox
|
PypiClean
|
/tensorflow-gpu-macosx-1.8.1.tar.gz/tensorflow/tools/gcs_test/python/gcs_smoke.py
|
"""Smoke test for reading records from GCS to TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import sys
import time
import numpy as np
import tensorflow as tf
from tensorflow.core.example import example_pb2
from tensorflow.python.lib.io import file_io
flags = tf.app.flags
flags.DEFINE_string("gcs_bucket_url", "",
"The URL to the GCS bucket in which the temporary "
"tfrecord file is to be written and read, e.g., "
"gs://my-gcs-bucket/test-directory")
flags.DEFINE_integer("num_examples", 10, "Number of examples to generate")
FLAGS = flags.FLAGS
def create_examples(num_examples, input_mean):
"""Create ExampleProto's containing data."""
ids = np.arange(num_examples).reshape([num_examples, 1])
inputs = np.random.randn(num_examples, 1) + input_mean
target = inputs - input_mean
examples = []
for row in range(num_examples):
ex = example_pb2.Example()
ex.features.feature["id"].bytes_list.value.append(str(ids[row, 0]))
ex.features.feature["target"].float_list.value.append(target[row, 0])
ex.features.feature["inputs"].float_list.value.append(inputs[row, 0])
examples.append(ex)
return examples
def create_dir_test():
"""Verifies file_io directory handling methods."""
# Test directory creation.
starttime_ms = int(round(time.time() * 1000))
dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime_ms)
print("Creating dir %s" % dir_name)
file_io.create_dir(dir_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Created directory in: %d milliseconds" % elapsed_ms)
# Check that the directory exists.
dir_exists = file_io.is_directory(dir_name)
assert dir_exists
print("%s directory exists: %s" % (dir_name, dir_exists))
# Test recursive directory creation.
starttime_ms = int(round(time.time() * 1000))
recursive_dir_name = "%s/%s/%s" % (dir_name,
"nested_dir1",
"nested_dir2")
print("Creating recursive dir %s" % recursive_dir_name)
file_io.recursive_create_dir(recursive_dir_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Created directory recursively in: %d milliseconds" % elapsed_ms)
# Check that the directory exists.
recursive_dir_exists = file_io.is_directory(recursive_dir_name)
assert recursive_dir_exists
print("%s directory exists: %s" % (recursive_dir_name, recursive_dir_exists))
# Create some contents in the just created directory and list the contents.
num_files = 10
files_to_create = ["file_%d.txt" % n for n in range(num_files)]
for file_num in files_to_create:
file_name = "%s/%s" % (dir_name, file_num)
print("Creating file %s." % file_name)
file_io.write_string_to_file(file_name, "test file.")
print("Listing directory %s." % dir_name)
starttime_ms = int(round(time.time() * 1000))
directory_contents = file_io.list_directory(dir_name)
print(directory_contents)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Listed directory %s in %s milliseconds" % (dir_name, elapsed_ms))
assert set(directory_contents) == set(files_to_create + ["nested_dir1/"])
# Test directory renaming.
dir_to_rename = "%s/old_dir" % dir_name
new_dir_name = "%s/new_dir" % dir_name
file_io.create_dir(dir_to_rename)
assert file_io.is_directory(dir_to_rename)
assert not file_io.is_directory(new_dir_name)
starttime_ms = int(round(time.time() * 1000))
print("Will try renaming directory %s to %s" % (dir_to_rename, new_dir_name))
file_io.rename(dir_to_rename, new_dir_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Renamed directory %s to %s in %s milliseconds" % (
dir_to_rename, new_dir_name, elapsed_ms))
assert not file_io.is_directory(dir_to_rename)
assert file_io.is_directory(new_dir_name)
# Test Delete directory recursively.
print("Deleting directory recursively %s." % dir_name)
starttime_ms = int(round(time.time() * 1000))
file_io.delete_recursively(dir_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
dir_exists = file_io.is_directory(dir_name)
assert not dir_exists
print("Deleted directory recursively %s in %s milliseconds" % (
dir_name, elapsed_ms))
def create_object_test():
"""Verifies file_io's object manipulation methods ."""
starttime_ms = int(round(time.time() * 1000))
dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime_ms)
print("Creating dir %s." % dir_name)
file_io.create_dir(dir_name)
num_files = 5
# Create files of 2 different patterns in this directory.
files_pattern_1 = ["%s/test_file_%d.txt" % (dir_name, n)
for n in range(num_files)]
files_pattern_2 = ["%s/testfile%d.txt" % (dir_name, n)
for n in range(num_files)]
starttime_ms = int(round(time.time() * 1000))
files_to_create = files_pattern_1 + files_pattern_2
for file_name in files_to_create:
print("Creating file %s." % file_name)
file_io.write_string_to_file(file_name, "test file creation.")
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Created %d files in %s milliseconds" % (
len(files_to_create), elapsed_ms))
# Listing files of pattern1.
list_files_pattern = "%s/test_file*.txt" % dir_name
print("Getting files matching pattern %s." % list_files_pattern)
starttime_ms = int(round(time.time() * 1000))
files_list = file_io.get_matching_files(list_files_pattern)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Listed files in %s milliseconds" % elapsed_ms)
print(files_list)
assert set(files_list) == set(files_pattern_1)
# Listing files of pattern2.
list_files_pattern = "%s/testfile*.txt" % dir_name
print("Getting files matching pattern %s." % list_files_pattern)
starttime_ms = int(round(time.time() * 1000))
files_list = file_io.get_matching_files(list_files_pattern)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("Listed files in %s milliseconds" % elapsed_ms)
print(files_list)
assert set(files_list) == set(files_pattern_2)
# Test renaming file.
file_to_rename = "%s/oldname.txt" % dir_name
file_new_name = "%s/newname.txt" % dir_name
file_io.write_string_to_file(file_to_rename, "test file.")
assert file_io.file_exists(file_to_rename)
assert not file_io.file_exists(file_new_name)
print("Will try renaming file %s to %s" % (file_to_rename, file_new_name))
starttime_ms = int(round(time.time() * 1000))
file_io.rename(file_to_rename, file_new_name)
elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
print("File %s renamed to %s in %s milliseconds" % (
file_to_rename, file_new_name, elapsed_ms))
assert not file_io.file_exists(file_to_rename)
assert file_io.file_exists(file_new_name)
# Delete directory.
print("Deleting directory %s." % dir_name)
file_io.delete_recursively(dir_name)
def main(argv):
del argv # Unused.
# Sanity check on the GCS bucket URL.
if not FLAGS.gcs_bucket_url or not FLAGS.gcs_bucket_url.startswith("gs://"):
print("ERROR: Invalid GCS bucket URL: \"%s\"" % FLAGS.gcs_bucket_url)
sys.exit(1)
# Generate random tfrecord path name.
input_path = FLAGS.gcs_bucket_url + "/"
input_path += "".join(random.choice("0123456789ABCDEF") for i in range(8))
input_path += ".tfrecord"
print("Using input path: %s" % input_path)
# Verify that writing to the records file in GCS works.
print("\n=== Testing writing and reading of GCS record file... ===")
example_data = create_examples(FLAGS.num_examples, 5)
with tf.python_io.TFRecordWriter(input_path) as hf:
for e in example_data:
hf.write(e.SerializeToString())
print("Data written to: %s" % input_path)
# Verify that reading from the tfrecord file works and that
# tf_record_iterator works.
record_iter = tf.python_io.tf_record_iterator(input_path)
read_count = 0
for _ in record_iter:
read_count += 1
print("Read %d records using tf_record_iterator" % read_count)
if read_count != FLAGS.num_examples:
print("FAIL: The number of records read from tf_record_iterator (%d) "
"differs from the expected number (%d)" % (read_count,
FLAGS.num_examples))
sys.exit(1)
# Verify that running the read op in a session works.
print("\n=== Testing TFRecordReader.read op in a session... ===")
with tf.Graph().as_default():
filename_queue = tf.train.string_input_producer([input_path], num_epochs=1)
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
tf.train.start_queue_runners()
index = 0
for _ in range(FLAGS.num_examples):
print("Read record: %d" % index)
sess.run(serialized_example)
index += 1
# Reading one more record should trigger an exception.
try:
sess.run(serialized_example)
print("FAIL: Failed to catch the expected OutOfRangeError while "
"reading one more record than is available")
sys.exit(1)
except tf.errors.OutOfRangeError:
print("Successfully caught the expected OutOfRangeError while "
"reading one more record than is available")
create_dir_test()
create_object_test()
if __name__ == "__main__":
tf.app.run(main)
|
PypiClean
|
/dialog_api-1.901.0-py3-none-any.whl/dialog_api/event_bus_pb2_grpc.py
|
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from . import event_bus_pb2 as event__bus__pb2
from . import miscellaneous_pb2 as miscellaneous__pb2
class EventBusStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.JoinEventBus = channel.unary_unary(
'/dialog.EventBus/JoinEventBus',
request_serializer=event__bus__pb2.RequestJoinEventBus.SerializeToString,
response_deserializer=event__bus__pb2.ResponseJoinEventBus.FromString,
)
self.KeepAliveEventBus = channel.unary_unary(
'/dialog.EventBus/KeepAliveEventBus',
request_serializer=event__bus__pb2.RequestKeepAliveEventBus.SerializeToString,
response_deserializer=miscellaneous__pb2.ResponseVoid.FromString,
)
self.PostToEventBus = channel.unary_unary(
'/dialog.EventBus/PostToEventBus',
request_serializer=event__bus__pb2.RequestPostToEventBus.SerializeToString,
response_deserializer=miscellaneous__pb2.ResponseVoid.FromString,
)
class EventBusServicer(object):
"""Missing associated documentation comment in .proto file."""
def JoinEventBus(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def KeepAliveEventBus(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PostToEventBus(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_EventBusServicer_to_server(servicer, server):
rpc_method_handlers = {
'JoinEventBus': grpc.unary_unary_rpc_method_handler(
servicer.JoinEventBus,
request_deserializer=event__bus__pb2.RequestJoinEventBus.FromString,
response_serializer=event__bus__pb2.ResponseJoinEventBus.SerializeToString,
),
'KeepAliveEventBus': grpc.unary_unary_rpc_method_handler(
servicer.KeepAliveEventBus,
request_deserializer=event__bus__pb2.RequestKeepAliveEventBus.FromString,
response_serializer=miscellaneous__pb2.ResponseVoid.SerializeToString,
),
'PostToEventBus': grpc.unary_unary_rpc_method_handler(
servicer.PostToEventBus,
request_deserializer=event__bus__pb2.RequestPostToEventBus.FromString,
response_serializer=miscellaneous__pb2.ResponseVoid.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'dialog.EventBus', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class EventBus(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def JoinEventBus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dialog.EventBus/JoinEventBus',
event__bus__pb2.RequestJoinEventBus.SerializeToString,
event__bus__pb2.ResponseJoinEventBus.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def KeepAliveEventBus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dialog.EventBus/KeepAliveEventBus',
event__bus__pb2.RequestKeepAliveEventBus.SerializeToString,
miscellaneous__pb2.ResponseVoid.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def PostToEventBus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dialog.EventBus/PostToEventBus',
event__bus__pb2.RequestPostToEventBus.SerializeToString,
miscellaneous__pb2.ResponseVoid.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
PypiClean
|
/ansible-8.3.0-py3-none-any.whl/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_info.py
|
# Copyright (c) 2021, Cisco Systems
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
DOCUMENTATION = r"""
---
module: interface_network_device_info
short_description: Information module for Interface Network Device
description:
- Get Interface Network Device by id.
- Returns list of interfaces by specified device.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
options:
headers:
description: Additional headers.
type: dict
deviceId:
description:
- DeviceId path parameter. Device ID.
type: str
requirements:
- dnacentersdk >= 2.5.5
- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetInterfaceInfoById
description: Complete reference of the GetInterfaceInfoById API.
link: https://developer.cisco.com/docs/dna-center/#!get-interface-info-by-id
notes:
- SDK Method used are
devices.Devices.get_interface_info_by_id,
- Paths used are
get /dna/intent/api/v1/interface/network-device/{deviceId},
"""
EXAMPLES = r"""
- name: Get Interface Network Device by id
cisco.dnac.interface_network_device_info:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
dnac_password: "{{dnac_password}}"
dnac_verify: "{{dnac_verify}}"
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
deviceId: string
register: result
"""
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
type: dict
sample: >
{
"response": [
{
"adminStatus": "string",
"className": "string",
"description": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
"ifIndex": "string",
"instanceTenantId": "string",
"instanceUuid": "string",
"interfaceType": "string",
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
"portMode": "string",
"portName": "string",
"portType": "string",
"serialNo": "string",
"series": "string",
"speed": "string",
"status": "string",
"vlanId": "string",
"voiceVlan": "string"
}
],
"version": "string"
}
"""
|
PypiClean
|
/terraformlintingcli-0.1.0.tar.gz/terraformlintingcli-0.1.0/docs/terraformlintingcli.rst
|
terraformlintingcli package
===========================
Submodules
----------
terraformlintingcli.terraformlintingcli module
----------------------------------------------
.. automodule:: terraformlintingcli.terraformlintingcli
:members:
:undoc-members:
:show-inheritance:
terraformlintingcli.terraformlintingcliexceptions module
--------------------------------------------------------
.. automodule:: terraformlintingcli.terraformlintingcliexceptions
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: terraformlintingcli
:members:
:undoc-members:
:show-inheritance:
|
PypiClean
|
/neuro_pypes-1.1.2.tar.gz/neuro_pypes-1.1.2/neuro_pypes/pet/utils.py
|
from nipype.interfaces.base import traits
from nipype.interfaces.utility import Merge, Function, IdentityInterface
from nipype.pipeline import Workflow
from neuro_pypes.config import setup_node
from neuro_pypes.interfaces.nilearn import math_img, concat_imgs, resample_to_img
from neuro_pypes.preproc import PETPVC
from neuro_pypes.utils import selectindex, rename
# TODO: add a becquerel/ml normalization function node
# hint: img*slope + intercept
def petpvc_cmd(in_file=traits.Undefined, mask_file=traits.Undefined, out_file=traits.Undefined,
pvc_method='RBV', fwhm_mm=(4, 4, 4)):
""" Return a nipype interface to PETPVC.
Parameters
----------
in_file: str
Path to the PET image file.
out_file: str
Path to the result output file
mask_file: str
Path to the mask image file with tissue maps.
pvc_method: str
Keyword of the method to run.
Run `petpvc --help` for choices.
fwhm_mm: iterable of floats
Iterable with 3 ints or floats to define the full-width at half maximum in mm along
each axis of the point-spread function (PSF) of the scanner.
Returns
-------
pet_pvc: PETPVC(nipype.interfaces.base.CommandLine)
"""
pvc = PETPVC()
pvc.inputs.in_file = in_file
pvc.inputs.out_file = out_file
pvc.inputs.mask_file = mask_file
pvc.inputs.pvc = pvc_method
pvc.inputs.fwhm_x = fwhm_mm[0]
pvc.inputs.fwhm_y = fwhm_mm[1]
pvc.inputs.fwhm_z = fwhm_mm[2]
return pvc
def petpvc_mask(wf_name="petpvc_mask"):
""" A Workflow that returns a 4D merge of 4 volumes for PETPVC: GM, WM, CSF and background.
Parameters
----------
wf_name: str
The name of the workflow.
Nipype.Inputs
-------------
pvcmask_input.tissues: list of existing files
List of tissue files in anatomical space, the 3 file
paths must be in this order: GM, WM, CSF
Nipype.Outputs
--------------
pvcmask_output.petpvc_mask: existing file
A 4D volume file with these maps in order: GM, WM, CSF, background
pvcmask_output.brain_mask: existing file
A mask that is a binarised sum of the tissues file with fslmaths.
Can be used as brain mask in anatomical space for the PET image.
Returns
-------
wf: nipype Workflow
"""
# define nodes
# specify input and output fields
in_fields = ["tissues"]
out_fields = ["petpvc_mask",
"brain_mask", ]
# input
pvcmask_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
name="pvcmask_input")
tissues = setup_node(IdentityInterface(fields=["gm", "wm", "csf"], mandatory_inputs=True),
name="tissues")
merge_list = setup_node(Merge(4), name="merge_list")
# maths for background
img_bkg = setup_node(Function(function=math_img,
input_names=["formula", "out_file", "gm", "wm", "csf"],
output_names=["out_file"],
imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
name='background')
img_bkg.inputs.out_file = "tissue_bkg.nii.gz"
img_bkg.inputs.formula = "np.maximum((-((gm + wm + csf) - 1)), 0)"
# maths for brain mask
brain_mask = setup_node(Function(function=math_img,
input_names=["formula", "out_file", "gm", "wm", "csf"],
output_names=["out_file"],
imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
name='brain_mask')
brain_mask.inputs.out_file = "tissues_brain_mask.nii.gz"
brain_mask.inputs.formula = "np.abs(gm + wm + csf) > 0"
# concat the tissues images and the background for PETPVC
merge_tissues = setup_node(Function(function=concat_imgs,
input_names=["in_files"],
output_names=["out_file"],
imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
name='merge_tissues')
merge_tissues.inputs.out_file = "petpvc_mask.nii.gz"
# output
pvcmask_output = setup_node(IdentityInterface(fields=out_fields), name="pvcmask_output")
# Create the workflow object
wf = Workflow(name=wf_name)
# Connect the nodes
wf.connect([
# separate [GM, WM, CSF] into [GM] and [WM, CSF]
(pvcmask_input, tissues, [(("tissues", selectindex, 0), "gm"),
(("tissues", selectindex, 1), "wm"),
(("tissues", selectindex, 2), "csf")
]
),
(tissues, img_bkg, [("gm", "gm"), ("wm", "wm"), ("csf", "csf")]),
(tissues, brain_mask, [("gm", "gm"), ("wm", "wm"), ("csf", "csf")]),
(tissues, merge_list, [("gm", "in1"), ("wm", "in2"), ("csf", "in3")]),
# create a list of [GM, WM, CSF, BKG]
(img_bkg, merge_list, [("out_file", "in4")]),
# merge into 4D: [GM, WM, CSF, BKG]
(merge_list, merge_tissues, [("out", "in_files")]),
# output
(merge_tissues, pvcmask_output, [("out_file", "petpvc_mask")]),
(brain_mask, pvcmask_output, [("out_file", "brain_mask")]),
])
return wf
def intensity_norm(wf_name='intensity_norm'):
""" Workflow that uses a mask against a source from where the mean value will be taken.
This mean value will be used to demean the whole source and leave it in out_file.
Parameters
----------
wf_name: str
The name of the workflow.
Nipype Inputs
-------------
intnorm_input.source: existing file
The image from where to extract the signal values and normalize.
intnorm_input.mask: existing file
The mask to specify which voxels to use to calculate the statistics
for normalization.
Nipype Outputs
--------------
intnorm_output.out_file: existing file
Returns
-------
wf: nipype Workflow
"""
# specify input and output fields
in_fields = ["source",
"mask"]
out_fields = ["out_file"]
# input
intnorm_input = setup_node(IdentityInterface(
fields=in_fields,
mandatory_inputs=True),
name="intnorm_input"
)
# fix the affine matrix (it's necessary for some cases)
resample = setup_node(Function(
function=resample_to_img,
input_names=["in_file", "target", "interpolation"],
output_names=["out_file"],
imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
name="resample_mask"
)
resample.inputs.interpolation = "nearest"
# calculate masked mean value
mean_val = setup_node(Function(
function=math_img,
input_names=["formula", "img", "mask"],
output_names=["out_value"],
imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
name='mean_value'
)
mean_val.inputs.formula = "np.mean(np.nonzero(img[mask > 0]))"
# normalize
norm_img = setup_node(Function(
function=math_img,
input_names=["formula", "out_file", "img", "val"],
output_names=["out_file"],
imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
name='norm_img'
)
norm_img.inputs.formula = "img / val"
# output
intnorm_output = setup_node(IdentityInterface(
fields=out_fields),
name="intnorm_output"
)
# Create the workflow object
wf = Workflow(name=wf_name)
wf.connect([
# resample
(intnorm_input, resample, [("source", "target"),
("mask", "in_file")
]
),
# normalize
(intnorm_input, mean_val, [("source", "img")]),
(resample, mean_val, [("out_file", "mask")]),
(intnorm_input, norm_img, [("source", "img"),
(("source", rename, "_intnormed"), "out_file"),
]
),
(mean_val, norm_img, [("out_value", "val")]),
(norm_img, intnorm_output, [("out_file", "out_file")]),
])
return wf
|
PypiClean
|
/model-driven-inference-its-1.0.3.tar.gz/model-driven-inference-its-1.0.3/docs/_build/html/_static/js/theme.js
|
!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("<div class='wy-table-responsive'></div>"),n("table.docutils.footnote").wrap("<div class='wy-table-responsive footnote'></div>"),n("table.docutils.citation").wrap("<div class='wy-table-responsive citation'></div>"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n('<button class="toctree-expand" title="Open/close menu"></button>'),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t<e.length&&!window.requestAnimationFrame;++t)window.requestAnimationFrame=window[e[t]+"RequestAnimationFrame"],window.cancelAnimationFrame=window[e[t]+"CancelAnimationFrame"]||window[e[t]+"CancelRequestAnimationFrame"];window.requestAnimationFrame||(window.requestAnimationFrame=function(e,t){var i=(new Date).getTime(),o=Math.max(0,16-(i-n)),r=window.setTimeout((function(){e(i+o)}),o);return n=i+o,r}),window.cancelAnimationFrame||(window.cancelAnimationFrame=function(n){clearTimeout(n)})}()}).call(window)},function(n,e){n.exports=jQuery},function(n,e,t){}]);
|
PypiClean
|
/odoo14_addon_ssi_helpdesk-14.0.2.13.0-py3-none-any.whl/odoo/addons/ssi_helpdesk/models/helpdesk_ticket.py
|
from odoo import api, fields, models
class HelpdeskTicket(models.Model):
_name = "helpdesk_ticket"
_inherit = [
"mixin.transaction_open",
"mixin.transaction_confirm",
"mixin.transaction_done",
"mixin.transaction_cancel",
"mixin.transaction_terminate",
]
_description = "Helpdesk Ticket"
# Multiple Approval Attribute
_approval_from_state = "open"
_approval_to_state = "done"
_approval_state = "confirm"
_after_approved_method = "action_done"
# Attributes related to add element on view automatically
_automatically_insert_view_element = True
_automatically_insert_done_policy_fields = False
_automatically_insert_done_button = False
_statusbar_visible_label = "draft,open,confirm,done"
_policy_field_order = [
"open_ok",
"confirm_ok",
"approve_ok",
"reject_ok",
"restart_approval_ok",
"cancel_ok",
"terminate_ok",
"restart_ok",
"manual_number_ok",
]
_header_button_order = [
"action_open",
"action_confirm",
"action_approve_approval",
"action_reject_approval",
"%(ssi_transaction_cancel_mixin.base_select_cancel_reason_action)d",
"%(ssi_transaction_terminate_mixin.base_select_terminate_reason_action)d",
"action_restart",
]
# Attributes related to add element on search view automatically
_state_filter_order = [
"dom_draft",
"dom_open",
"dom_confirm",
"dom_reject",
"dom_done",
"dom_terminate",
"dom_cancel",
]
_create_sequence_state = "open"
title = fields.Char(
required=True,
readonly=True,
states={"draft": [("readonly", False)]},
copy=True,
)
partner_id = fields.Many2one(
string="Contact",
comodel_name="res.partner",
domain=[
("is_company", "=", False),
("parent_id", "!=", False),
],
required=True,
ondelete="restrict",
readonly=True,
states={"draft": [("readonly", False)]},
copy=True,
)
commercial_partner_id = fields.Many2one(
string="Commercial Contact",
comodel_name="res.partner",
related="partner_id.commercial_partner_id",
store=True,
)
contact_group_ids = fields.Many2many(
string="Contact Group",
comodel_name="helpdesk_contact_group",
relation="rel_helpdesk_ticket_2_contact_group",
column1="ticket_id",
column2="group_id",
copy=True,
)
additional_partner_ids = fields.Many2many(
string="CC To",
comodel_name="res.partner",
relation="rel_helpdesk_ticket_2_additional_partner",
column1="ticket_id",
column2="partner_id",
copy=True,
)
update_progress = fields.Boolean(
string="Update Progress",
default=True,
)
type_id = fields.Many2one(
string="Type",
comodel_name="helpdesk_type",
required=False,
ondelete="restrict",
readonly=True,
states={"draft": [("readonly", False)]},
copy=True,
)
type_category_id = fields.Many2one(
string="Category",
related="type_id.category_id",
store=True,
)
@api.model
def _default_date(self):
return fields.Date.today()
date = fields.Date(
string="Date",
required=True,
readonly=True,
states={"draft": [("readonly", False)]},
default=lambda self: self._default_date(),
copy=True,
)
duration_id = fields.Many2one(
string="Duration",
comodel_name="base.duration",
copy=False,
)
date_deadline = fields.Date(
string="Deadline",
required=False,
copy=False,
)
description = fields.Html(
string="Description",
readonly=False,
copy=True,
)
communication_ids = fields.One2many(
string="Communications",
comodel_name="helpdesk_communication",
inverse_name="ticket_id",
copy=False,
)
starting_communication_id = fields.Many2one(
string="# Starting Communication",
comodel_name="helpdesk_communication",
readonly=True,
copy=False,
)
finishing_communication_id = fields.Many2one(
string="# Finishing Communication",
comodel_name="helpdesk_communication",
readonly=True,
copy=False,
)
latest_message_date = fields.Datetime(
string="Latest Message Date",
related="finishing_communication_id.latest_message_date",
store=True,
)
latest_partner_message_date = fields.Datetime(
string="Latest Partner Message Date",
related="finishing_communication_id.latest_partner_message_date",
store=True,
)
finishing_communication_state = fields.Selection(
string="Finishing Communication State",
related="finishing_communication_id.state",
store=True,
)
duplicate_id = fields.Many2one(
string="# Duplicate With",
comodel_name="helpdesk_ticket",
copy=False,
)
duplicate_ids = fields.One2many(
string="Duplicates",
comodel_name="helpdesk_ticket",
inverse_name="duplicate_id",
copy=False,
)
split_id = fields.Many2one(
string="# Original Ticket",
comodel_name="helpdesk_ticket",
copy=False,
)
split_ids = fields.One2many(
string="Split Into",
comodel_name="helpdesk_ticket",
inverse_name="split_id",
copy=False,
)
data_requirement_ids = fields.One2many(
string="Data Requirements",
comodel_name="helpdesk_ticket.data_requirement",
inverse_name="ticket_id",
)
data_requirement_state = fields.Selection(
string="Data Requirement Status",
selection=[
("no_need", "Not Needed"),
("open", "In Progress"),
("done", "Done"),
],
compute="_compute_data_requirement_state",
store=True,
)
resolution_documentation_ids = fields.One2many(
string="Resolution Documentations",
comodel_name="helpdesk_ticket.resolution_documentation",
inverse_name="ticket_id",
)
resolution_documentation_state = fields.Selection(
string="Resolution Documentation Status",
selection=[
("no_need", "Not Needed"),
("open", "In Progress"),
("done", "Done"),
],
compute="_compute_resolution_documentation_state",
store=True,
)
communication_draft_count = fields.Integer(
string="Need Respond Count", store=True, compute="_compute_communication_count"
)
communication_open_count = fields.Integer(
string="Waiting for Respond Count",
store=True,
compute="_compute_communication_count",
)
state = fields.Selection(
string="State",
selection=[
("draft", "Draft"),
("open", "In Progress"),
("confirm", "Waiting for Approval"),
("done", "Done"),
("reject", "Reject"),
("terminate", "Terminate"),
("cancel", "Cancelled"),
],
copy=False,
default="draft",
required=True,
readonly=True,
)
@api.model
def _get_policy_field(self):
res = super(HelpdeskTicket, self)._get_policy_field()
policy_field = [
"open_ok",
"confirm_ok",
"approve_ok",
"reject_ok",
"restart_approval_ok",
"done_ok",
"cancel_ok",
"restart_ok",
"terminate_ok",
"manual_number_ok",
]
res += policy_field
return res
@api.depends(
"data_requirement_ids",
"data_requirement_ids.state",
)
def _compute_data_requirement_state(self):
for record in self:
result = "no_need"
count_req = len(record.data_requirement_ids)
if count_req > 0:
result = "done"
for req in record.data_requirement_ids:
if req.state == "open":
result = "open"
record.data_requirement_state = result
@api.depends(
"resolution_documentation_ids",
"resolution_documentation_ids.state",
)
def _compute_resolution_documentation_state(self):
for record in self:
result = "no_need"
count_req = len(record.resolution_documentation_ids)
if count_req > 0:
result = "done"
for req in record.resolution_documentation_ids:
if req.state == "open":
result = "open"
record.resolution_documentation_state = result
@api.onchange(
"partner_id",
"contact_group_ids",
)
def onchange_additional_partner_ids(self):
self.additional_partner_ids = [(6, 0, [])]
if self.contact_group_ids:
contact_groups = self.contact_group_ids.mapped("contact_ids")
# TODO: This is not working. Why?
contact_groups = contact_groups - self.partner_id
self.additional_partner_ids = [(6, 0, contact_groups.ids)]
@api.onchange(
"partner_id",
)
def onchange_contact_group_ids(self):
self.contact_group_ids = [(6, 0, [])]
@api.onchange(
"type_id",
)
def onchange_duration_id(self):
self.duration_id = False
if self.type_id:
self.duration_id = self.type_id.duration_id
@api.onchange(
"duration_id",
"date",
)
def onchange_date_deadline(self):
self.date_deadline = False
if self.duration_id:
self.date_deadline = self.duration_id.get_duration(self.date)
@api.model_create_multi
def create(self, values):
_super = super(HelpdeskTicket, self)
results = _super.create(values)
for result in results:
result._create_sequence()
return result
def action_create_finishing_communication(self):
for record in self.sudo():
result = record._create_finishing_communication()
return result
def action_create_updating_communication(self):
for record in self.sudo():
result = record._create_updating_communication()
return result
def action_open_communication(self):
for record in self.sudo():
result = record._open_communication()
return result
def action_open_split(self):
for record in self.sudo():
result = record._open_split()
return result
def _open_split(self):
waction = self.env.ref("ssi_helpdesk.helpdesk_ticket_action").read()[0]
new_context = {
"default_split_id": self.id,
}
waction.update(
{
"view_mode": "tree,form",
"domain": [("split_id", "=", self.id)],
"context": new_context,
}
)
return waction
def _open_communication(self):
waction = self.env.ref("ssi_helpdesk.helpdesk_communication_action").read()[0]
waction.update(
{
"view_mode": "tree,form",
"domain": [("ticket_id", "=", self.id)],
}
)
return waction
def _create_finishing_communication(self):
HC = self.env["helpdesk_communication"]
hc = HC.create(self._prepare_create_finishing_communication())
partner_ids = (self.additional_partner_ids + self.partner_id).ids
hc.message_subscribe(partner_ids)
self.write(
{
"finishing_communication_id": hc.id,
}
)
return {
"name": hc.title,
"view_mode": "form",
"res_model": "helpdesk_communication",
"res_id": hc.id,
"type": "ir.actions.act_window",
}
def _create_updating_communication(self):
self.ensure_one()
HC = self.env["helpdesk_communication"]
hc = HC.create(self._prepare_create_updating_communication())
partner_ids = (self.additional_partner_ids + self.partner_id).ids
hc.message_subscribe(partner_ids)
return {
"name": hc.title,
"view_mode": "form",
"res_model": "helpdesk_communication",
"res_id": hc.id,
"type": "ir.actions.act_window",
}
def _prepare_create_finishing_communication(self):
return {
"partner_id": self.partner_id.id,
"title": self.title,
"ticket_id": self.id,
}
def _prepare_create_updating_communication(self):
title = "Ticket status update - %s - %s" % (self.name, fields.Date.today())
return {
"partner_id": self.partner_id.id,
"title": title,
"ticket_id": self.id,
}
@api.depends("communication_ids", "communication_ids.state")
def _compute_communication_count(self):
for record in self:
record.communication_draft_count = len(
record.communication_ids.filtered(lambda x: x.state == "draft")
)
record.communication_open_count = len(
record.communication_ids.filtered(lambda x: x.state == "open")
)
|
PypiClean
|
/ciefunctions-1.0.2.tar.gz/ciefunctions-1.0.2/tc1_97/MathJax-2.7.5/unpacked/localization/lki/MathML.js
|
MathJax.Localization.addTranslation("lki","MathML",{
version: "2.7.5",
isLoaded: true,
strings: {
BadMglyph: "mglyph \u0646\u0627\u0645\u0646\u0627\u0633\u0628: %1",
BadMglyphFont: "\u0642\u0644\u0645 \u0646\u0627\u0645\u0646\u0627\u0633\u0628: %1",
MathPlayer: "MathJax \u0646\u062A\u0648\u0627\u0646\u0633\u062A MathPlayer \u0631\u0627 \u0631\u0627\u0647\u200C\u0627\u0646\u062F\u0627\u0632\u06CC \u06A9\u0646\u062F.\n\n\u0627\u06AF\u0631 MathPlayer \u0646\u0635\u0628 \u0646\u06CC\u0633\u062A\u060C \u0634\u0645\u0627 \u0646\u06CC\u0627\u0632 \u062F\u0627\u0631\u06CC\u062F \u06A9\u0647 \u0627\u0628\u062A\u062F\u0627 \u0622\u0646 \u0631\u0627 \u0646\u0635\u0628 \u06A9\u0646\u06CC\u062F.\n\u062F\u0631 \u063A\u06CC\u0631 \u0627\u06CC\u0646 \u0635\u0648\u0631\u062A\u060C \u062A\u0646\u0638\u06CC\u0645\u0627\u062A \u0627\u0645\u0646\u06CC\u062A\u06CC \u0634\u0645\u0627 \u0645\u0645\u06A9\u0646 \u0627\u0633\u062A \u06A9\u0647 \u0627\u0632 \u0627\u062C\u0631\u0627\u06CC\n\u06A9\u0646\u062A\u0631\u0644\u200C\u0647\u0627\u06CC \u0627\u06A9\u062A\u06CC\u0648\u0627\u06A9\u0633 \u062C\u0644\u0648\u06AF\u06CC\u0631\u06CC \u06A9\u0646\u062F. \u0627\u0632 \u06AF\u0632\u06CC\u0646\u0647\u200C\u0647\u0627\u06CC \u0627\u06CC\u0646\u062A\u0631\u0646\u062A\u06CC \u0645\u0648\u062C\u0648\u062F \u0632\u06CC\u0631\n\u0645\u0646\u0648\u06CC \u0627\u0628\u0632\u0627\u0631 \u0627\u0633\u062A\u0641\u0627\u062F\u0647 \u06A9\u0646\u06CC\u062F \u0648 \u0628\u0631\u06AF\u0647\u0654 \u0627\u0645\u0646\u06CC\u062A \u0631\u0627 \u0627\u0646\u062A\u062E\u0627\u0628 \u06A9\u0646\u06CC\u062F\u060C \u0633\u067E\u0633 \u062F\u06A9\u0645\u0647\u0654\n\u0645\u0631\u062C\u0644\u0647\u0654 \u0633\u0641\u0627\u0631\u0634\u06CC \u0631\u0627 \u0641\u0634\u0627\u0631 \u062F\u0647\u06CC\u062F. \u0628\u0631\u0631\u0633\u06CC \u06A9\u0646\u06CC\u062F \u06A9\u0647 \u062A\u0646\u0638\u06CC\u0645\u0627\u062A \u0627\u062C\u0631\u0627\u06CC\n\u00AB\u0627\u062C\u0631\u0627\u06CC \u06A9\u0646\u062A\u0631\u0644\u200C\u0647\u0627\u06CC \u0627\u06A9\u062A\u06CC\u0648\u0627\u06A9\u0633\u00BB \u0648 \u00AB\u0631\u0641\u062A\u0627\u0631\u0647\u0627\u06CC \u062F\u0648\u062F\u0648\u06CC\u06CC \u0648 \u0627\u0633\u06A9\u0631\u06CC\u067E\u062A\u00BB \u0641\u0639\u0627\u0644\n\u0647\u0633\u062A\u0646\u062F.\n\n\u062F\u0631 \u062D\u0627\u0644 \u062D\u0627\u0636\u0631 \u0634\u0645\u0627 \u0628\u0647 \u062C\u0627\u06CC \u062D\u0631\u0648\u0641 \u0631\u06CC\u0627\u0636\u06CC \u067E\u06CC\u0627\u0645\u200C\u0647\u0627\u06CC \u062E\u0637\u0627\u06CC \u062E\u0648\u0627\u0647\u06CC\u062F \u062F\u06CC\u062F.",
CantCreateXMLParser: "MathJax \u0646\u062A\u0648\u0627\u0633\u062A \u06CC\u06A9 \u062A\u062C\u0632\u06CC\u0647\u200C\u06AF\u0631 \u0627\u06A9\u0633\u200C\u0627\u0645\u200C\u0627\u0644 \u0628\u0631\u0627\u06CC MathML \u0627\u06CC\u062C\u0627\u062F \u06A9\u0646\u062F.\n\u0628\u0631\u0631\u0633\u06CC \u06A9\u0646\u06CC\u062F \u06A9\u0647 \u062A\u0646\u0638\u06CC\u0645\u0627\u062A \u0627\u0645\u0646\u06CC\u062A\u06CC \u00AB\u0627\u0633\u06A9\u0631\u06CC\u067E\u062A \u06A9\u0646\u062A\u0631\u0644\u200C\u0647\u0627\u06CC \u0627\u06A9\u062A\u06CC\u0648\u0627\u06A9\u0633\n\u0639\u0644\u0627\u0645\u062A\u200C\u06AF\u0630\u0627\u0631\u06CC\u200C\u0634\u062F\u0647 \u0628\u0647 \u0639\u0646\u0648\u0627\u0646 \u0627\u0645\u0646 \u0628\u0631\u0627\u06CC \u06A9\u062F\u0632\u0646\u06CC\u00BB \u0641\u0639\u0627\u0644 \u0627\u0633\u062A (\u0627\u0632\n\u06AF\u0632\u06CC\u0646\u0647\u0654 \u0627\u06CC\u0646\u062A\u0631\u0646\u062A\u06CC \u06AF\u0632\u06CC\u0646\u0647 \u062F\u0631 \u0645\u0646\u0648\u06CC \u0627\u0628\u0632\u0627\u0631\u0647\u0627 \u0627\u0633\u062A\u0641\u0627\u062F\u0647 \u06A9\u0646\u06CC\u062F \u0648 \u067E\u0646\u0644 \u0627\u0645\u0646\u06CC\u062A \u0631\u0627 \u0627\u0646\u062A\u062E\u0627\u0628\n\u06A9\u0646\u06CC\u062F \u0648 \u062F\u06A9\u0645\u0647\u0654 \u0645\u0631\u062D\u0644\u0647\u0654 \u0633\u0641\u0627\u0631\u0634\u06CC \u062A\u0627 \u0627\u06CC\u0646 \u0631\u0627 \u0628\u0631\u0631\u0633\u06CC \u06A9\u0646\u06CC\u062F).\n\n\u0645\u0639\u0627\u062F\u0644\u0647\u200C\u0647\u0627\u06CC MathML \u0646\u0645\u06CC\u200C\u062A\u0648\u0627\u0646\u0646\u062F \u062A\u0648\u0633\u0637 MathJax \u067E\u0631\u062F\u0627\u0632\u0634 \u06AF\u0631\u062F\u0646\u062F.",
UnknownNodeType: "\u0646\u0648\u0639 \u06AF\u0631\u0647\u0654 \u0646\u0627\u0634\u0646\u0627\u062E\u062A\u0647: %1",
UnexpectedTextNode: "\u06AF\u0631\u0647\u0654 \u0645\u062A\u0646\u06CC \u063A\u06CC\u0631\u0645\u0646\u062A\u0638\u0631\u0647:\u200C %1",
ErrorParsingMathML: "\u062E\u0637\u0627\u06CC \u062A\u062C\u0632\u06CC\u0647\u0654 MathML",
ParsingError: "\u062E\u0637\u0627\u06CC \u062A\u062C\u0632\u06CC\u0647\u0654 MathML\u200F: %1",
MathMLSingleElement: "MathML \u0628\u0627\u06CC\u062F \u0628\u0647 \u062F\u0646\u0628\u0627\u0644 \u06CC\u06A9 \u0639\u0646\u0635\u0631 \u0648\u0627\u062D\u062F \u0628\u06CC\u0627\u06CC\u062F",
MathMLRootElement: "MathML \u0628\u0627\u06CC\u062F \u062A\u0648\u0633\u0637 \u0639\u0646\u0635\u0631 \u003Cmath\u003E \u062A\u0634\u06A9\u06CC\u0644 \u0634\u062F\u0647 \u0628\u0627\u0634\u062F \u0646\u0647 %1"
}
});
MathJax.Ajax.loadComplete("[MathJax]/localization/lki/MathML.js");
|
PypiClean
|
/mxnet_cu101-2.0.0a0-py3-none-manylinux2014_x86_64.whl/mxnet/ndarray/gen_random.py
|
from ._internal import NDArrayBase
from ..base import _Null
def exponential(lam=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from an exponential distribution.
Samples are distributed according to an exponential distribution parametrized by *lambda* (rate).
Example::
exponential(lam=4, shape=(2,2)) = [[ 0.0097189 , 0.08999364],
[ 0.04146638, 0.31715935]]
Defined in ../src/operator/random/sample_op.cc:L137
Parameters
----------
lam : float, optional, default=1
Lambda parameter (rate) of the exponential distribution.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def exponential_like(data=None, lam=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from an exponential distribution according to the input array shape.
Samples are distributed according to an exponential distribution parametrized by *lambda* (rate).
Example::
exponential(lam=4, data=ones(2,2)) = [[ 0.0097189 , 0.08999364],
[ 0.04146638, 0.31715935]]
Defined in ../src/operator/random/sample_op.cc:L244
Parameters
----------
lam : float, optional, default=1
Lambda parameter (rate) of the exponential distribution.
data : NDArray
The input
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def gamma(alpha=_Null, beta=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a gamma distribution.
Samples are distributed according to a gamma distribution parametrized by *alpha* (shape) and *beta* (scale).
Example::
gamma(alpha=9, beta=0.5, shape=(2,2)) = [[ 7.10486984, 3.37695289],
[ 3.91697288, 3.65933681]]
Defined in ../src/operator/random/sample_op.cc:L125
Parameters
----------
alpha : float, optional, default=1
Alpha parameter (shape) of the gamma distribution.
beta : float, optional, default=1
Beta parameter (scale) of the gamma distribution.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def gamma_like(data=None, alpha=_Null, beta=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a gamma distribution according to the input array shape.
Samples are distributed according to a gamma distribution parametrized by *alpha* (shape) and *beta* (scale).
Example::
gamma(alpha=9, beta=0.5, data=ones(2,2)) = [[ 7.10486984, 3.37695289],
[ 3.91697288, 3.65933681]]
Defined in ../src/operator/random/sample_op.cc:L233
Parameters
----------
alpha : float, optional, default=1
Alpha parameter (shape) of the gamma distribution.
beta : float, optional, default=1
Beta parameter (scale) of the gamma distribution.
data : NDArray
The input
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def generalized_negative_binomial(mu=_Null, alpha=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a generalized negative binomial distribution.
Samples are distributed according to a generalized negative binomial distribution parametrized by
*mu* (mean) and *alpha* (dispersion). *alpha* is defined as *1/k* where *k* is the failure limit of the
number of unsuccessful experiments (generalized to real numbers).
Samples will always be returned as a floating point data type.
Example::
generalized_negative_binomial(mu=2.0, alpha=0.3, shape=(2,2)) = [[ 2., 1.],
[ 6., 4.]]
Defined in ../src/operator/random/sample_op.cc:L180
Parameters
----------
mu : float, optional, default=1
Mean of the negative binomial distribution.
alpha : float, optional, default=1
Alpha (dispersion) parameter of the negative binomial distribution.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def generalized_negative_binomial_like(data=None, mu=_Null, alpha=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a generalized negative binomial distribution according to the
input array shape.
Samples are distributed according to a generalized negative binomial distribution parametrized by
*mu* (mean) and *alpha* (dispersion). *alpha* is defined as *1/k* where *k* is the failure limit of the
number of unsuccessful experiments (generalized to real numbers).
Samples will always be returned as a floating point data type.
Example::
generalized_negative_binomial(mu=2.0, alpha=0.3, data=ones(2,2)) = [[ 2., 1.],
[ 6., 4.]]
Defined in ../src/operator/random/sample_op.cc:L285
Parameters
----------
mu : float, optional, default=1
Mean of the negative binomial distribution.
alpha : float, optional, default=1
Alpha (dispersion) parameter of the negative binomial distribution.
data : NDArray
The input
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def negative_binomial(k=_Null, p=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a negative binomial distribution.
Samples are distributed according to a negative binomial distribution parametrized by
*k* (limit of unsuccessful experiments) and *p* (failure probability in each experiment).
Samples will always be returned as a floating point data type.
Example::
negative_binomial(k=3, p=0.4, shape=(2,2)) = [[ 4., 7.],
[ 2., 5.]]
Defined in ../src/operator/random/sample_op.cc:L165
Parameters
----------
k : int, optional, default='1'
Limit of unsuccessful experiments.
p : float, optional, default=1
Failure probability in each experiment.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def negative_binomial_like(data=None, k=_Null, p=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a negative binomial distribution according to the input array shape.
Samples are distributed according to a negative binomial distribution parametrized by
*k* (limit of unsuccessful experiments) and *p* (failure probability in each experiment).
Samples will always be returned as a floating point data type.
Example::
negative_binomial(k=3, p=0.4, data=ones(2,2)) = [[ 4., 7.],
[ 2., 5.]]
Defined in ../src/operator/random/sample_op.cc:L269
Parameters
----------
k : int, optional, default='1'
Limit of unsuccessful experiments.
p : float, optional, default=1
Failure probability in each experiment.
data : NDArray
The input
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def normal(loc=_Null, scale=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a normal (Gaussian) distribution.
.. note:: The existing alias ``normal`` is deprecated.
Samples are distributed according to a normal distribution parametrized by *loc* (mean) and *scale*
(standard deviation).
Example::
normal(loc=0, scale=1, shape=(2,2)) = [[ 1.89171135, -1.16881478],
[-1.23474145, 1.55807114]]
Defined in ../src/operator/random/sample_op.cc:L113
Parameters
----------
loc : float, optional, default=0
Mean of the distribution.
scale : float, optional, default=1
Standard deviation of the distribution.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def normal_like(data=None, loc=_Null, scale=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a normal (Gaussian) distribution according to the input array shape.
Samples are distributed according to a normal distribution parametrized by *loc* (mean) and *scale*
(standard deviation).
Example::
normal(loc=0, scale=1, data=ones(2,2)) = [[ 1.89171135, -1.16881478],
[-1.23474145, 1.55807114]]
Defined in ../src/operator/random/sample_op.cc:L222
Parameters
----------
loc : float, optional, default=0
Mean of the distribution.
scale : float, optional, default=1
Standard deviation of the distribution.
data : NDArray
The input
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_dirichlet(sample=None, alpha=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of *sample* of
Dirichlet distributions with parameter *alpha*.
The shape of *alpha* must match the leftmost subshape of *sample*. That is, *sample*
can have the same shape as *alpha*, in which case the output contains one density per
distribution, or *sample* can be a tensor of tensors with that shape, in which case
the output is a tensor of densities such that the densities at index *i* in the output
are given by the samples at index *i* in *sample* parameterized by the value of *alpha*
at index *i*.
Examples::
random_pdf_dirichlet(sample=[[1,2],[2,3],[3,4]], alpha=[2.5, 2.5]) =
[38.413498, 199.60245, 564.56085]
sample = [[[1, 2, 3], [10, 20, 30], [100, 200, 300]],
[[0.1, 0.2, 0.3], [0.01, 0.02, 0.03], [0.001, 0.002, 0.003]]]
random_pdf_dirichlet(sample=sample, alpha=[0.1, 0.4, 0.9]) =
[[2.3257459e-02, 5.8420084e-04, 1.4674458e-05],
[9.2589635e-01, 3.6860607e+01, 1.4674468e+03]]
Defined in ../src/operator/random/pdf_op.cc:L316
Parameters
----------
sample : NDArray
Samples from the distributions.
alpha : NDArray
Concentration parameters of the distributions.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_exponential(sample=None, lam=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of *sample* of
exponential distributions with parameters *lam* (rate).
The shape of *lam* must match the leftmost subshape of *sample*. That is, *sample*
can have the same shape as *lam*, in which case the output contains one density per
distribution, or *sample* can be a tensor of tensors with that shape, in which case
the output is a tensor of densities such that the densities at index *i* in the output
are given by the samples at index *i* in *sample* parameterized by the value of *lam*
at index *i*.
Examples::
random_pdf_exponential(sample=[[1, 2, 3]], lam=[1]) =
[[0.36787945, 0.13533528, 0.04978707]]
sample = [[1,2,3],
[1,2,3],
[1,2,3]]
random_pdf_exponential(sample=sample, lam=[1,0.5,0.25]) =
[[0.36787945, 0.13533528, 0.04978707],
[0.30326533, 0.18393973, 0.11156508],
[0.1947002, 0.15163267, 0.11809164]]
Defined in ../src/operator/random/pdf_op.cc:L305
Parameters
----------
sample : NDArray
Samples from the distributions.
lam : NDArray
Lambda (rate) parameters of the distributions.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_gamma(sample=None, alpha=None, beta=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of *sample* of
gamma distributions with parameters *alpha* (shape) and *beta* (rate).
*alpha* and *beta* must have the same shape, which must match the leftmost subshape
of *sample*. That is, *sample* can have the same shape as *alpha* and *beta*, in which
case the output contains one density per distribution, or *sample* can be a tensor
of tensors with that shape, in which case the output is a tensor of densities such that
the densities at index *i* in the output are given by the samples at index *i* in *sample*
parameterized by the values of *alpha* and *beta* at index *i*.
Examples::
random_pdf_gamma(sample=[[1,2,3,4,5]], alpha=[5], beta=[1]) =
[[0.01532831, 0.09022352, 0.16803136, 0.19536681, 0.17546739]]
sample = [[1, 2, 3, 4, 5],
[2, 3, 4, 5, 6],
[3, 4, 5, 6, 7]]
random_pdf_gamma(sample=sample, alpha=[5,6,7], beta=[1,1,1]) =
[[0.01532831, 0.09022352, 0.16803136, 0.19536681, 0.17546739],
[0.03608941, 0.10081882, 0.15629345, 0.17546739, 0.16062315],
[0.05040941, 0.10419563, 0.14622283, 0.16062315, 0.14900276]]
Defined in ../src/operator/random/pdf_op.cc:L303
Parameters
----------
sample : NDArray
Samples from the distributions.
alpha : NDArray
Alpha (shape) parameters of the distributions.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
beta : NDArray
Beta (scale) parameters of the distributions.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_generalized_negative_binomial(sample=None, mu=None, alpha=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of *sample* of
generalized negative binomial distributions with parameters *mu* (mean)
and *alpha* (dispersion). This can be understood as a reparameterization of
the negative binomial, where *k* = *1 / alpha* and *p* = *1 / (mu \* alpha + 1)*.
*mu* and *alpha* must have the same shape, which must match the leftmost subshape
of *sample*. That is, *sample* can have the same shape as *mu* and *alpha*, in which
case the output contains one density per distribution, or *sample* can be a tensor
of tensors with that shape, in which case the output is a tensor of densities such that
the densities at index *i* in the output are given by the samples at index *i* in *sample*
parameterized by the values of *mu* and *alpha* at index *i*.
Examples::
random_pdf_generalized_negative_binomial(sample=[[1, 2, 3, 4]], alpha=[1], mu=[1]) =
[[0.25, 0.125, 0.0625, 0.03125]]
sample = [[1,2,3,4],
[1,2,3,4]]
random_pdf_generalized_negative_binomial(sample=sample, alpha=[1, 0.6666], mu=[1, 1.5]) =
[[0.25, 0.125, 0.0625, 0.03125 ],
[0.26517063, 0.16573331, 0.09667706, 0.05437994]]
Defined in ../src/operator/random/pdf_op.cc:L314
Parameters
----------
sample : NDArray
Samples from the distributions.
mu : NDArray
Means of the distributions.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
alpha : NDArray
Alpha (dispersion) parameters of the distributions.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_negative_binomial(sample=None, k=None, p=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of samples of
negative binomial distributions with parameters *k* (failure limit) and *p* (failure probability).
*k* and *p* must have the same shape, which must match the leftmost subshape
of *sample*. That is, *sample* can have the same shape as *k* and *p*, in which
case the output contains one density per distribution, or *sample* can be a tensor
of tensors with that shape, in which case the output is a tensor of densities such that
the densities at index *i* in the output are given by the samples at index *i* in *sample*
parameterized by the values of *k* and *p* at index *i*.
Examples::
random_pdf_negative_binomial(sample=[[1,2,3,4]], k=[1], p=a[0.5]) =
[[0.25, 0.125, 0.0625, 0.03125]]
# Note that k may be real-valued
sample = [[1,2,3,4],
[1,2,3,4]]
random_pdf_negative_binomial(sample=sample, k=[1, 1.5], p=[0.5, 0.5]) =
[[0.25, 0.125, 0.0625, 0.03125 ],
[0.26516506, 0.16572815, 0.09667476, 0.05437956]]
Defined in ../src/operator/random/pdf_op.cc:L310
Parameters
----------
sample : NDArray
Samples from the distributions.
k : NDArray
Limits of unsuccessful experiments.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
p : NDArray
Failure probabilities in each experiment.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_normal(sample=None, mu=None, sigma=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of *sample* of
normal distributions with parameters *mu* (mean) and *sigma* (standard deviation).
*mu* and *sigma* must have the same shape, which must match the leftmost subshape
of *sample*. That is, *sample* can have the same shape as *mu* and *sigma*, in which
case the output contains one density per distribution, or *sample* can be a tensor
of tensors with that shape, in which case the output is a tensor of densities such that
the densities at index *i* in the output are given by the samples at index *i* in *sample*
parameterized by the values of *mu* and *sigma* at index *i*.
Examples::
sample = [[-2, -1, 0, 1, 2]]
random_pdf_normal(sample=sample, mu=[0], sigma=[1]) =
[[0.05399097, 0.24197073, 0.3989423, 0.24197073, 0.05399097]]
random_pdf_normal(sample=sample*2, mu=[0,0], sigma=[1,2]) =
[[0.05399097, 0.24197073, 0.3989423, 0.24197073, 0.05399097],
[0.12098537, 0.17603266, 0.19947115, 0.17603266, 0.12098537]]
Defined in ../src/operator/random/pdf_op.cc:L300
Parameters
----------
sample : NDArray
Samples from the distributions.
mu : NDArray
Means of the distributions.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
sigma : NDArray
Standard deviations of the distributions.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_poisson(sample=None, lam=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of *sample* of
Poisson distributions with parameters *lam* (rate).
The shape of *lam* must match the leftmost subshape of *sample*. That is, *sample*
can have the same shape as *lam*, in which case the output contains one density per
distribution, or *sample* can be a tensor of tensors with that shape, in which case
the output is a tensor of densities such that the densities at index *i* in the output
are given by the samples at index *i* in *sample* parameterized by the value of *lam*
at index *i*.
Examples::
random_pdf_poisson(sample=[[0,1,2,3]], lam=[1]) =
[[0.36787945, 0.36787945, 0.18393973, 0.06131324]]
sample = [[0,1,2,3],
[0,1,2,3],
[0,1,2,3]]
random_pdf_poisson(sample=sample, lam=[1,2,3]) =
[[0.36787945, 0.36787945, 0.18393973, 0.06131324],
[0.13533528, 0.27067056, 0.27067056, 0.18044704],
[0.04978707, 0.14936121, 0.22404182, 0.22404182]]
Defined in ../src/operator/random/pdf_op.cc:L307
Parameters
----------
sample : NDArray
Samples from the distributions.
lam : NDArray
Lambda (rate) parameters of the distributions.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def pdf_uniform(sample=None, low=None, high=None, is_log=_Null, out=None, name=None, **kwargs):
r"""Computes the value of the PDF of *sample* of
uniform distributions on the intervals given by *[low,high)*.
*low* and *high* must have the same shape, which must match the leftmost subshape
of *sample*. That is, *sample* can have the same shape as *low* and *high*, in which
case the output contains one density per distribution, or *sample* can be a tensor
of tensors with that shape, in which case the output is a tensor of densities such that
the densities at index *i* in the output are given by the samples at index *i* in *sample*
parameterized by the values of *low* and *high* at index *i*.
Examples::
random_pdf_uniform(sample=[[1,2,3,4]], low=[0], high=[10]) = [0.1, 0.1, 0.1, 0.1]
sample = [[[1, 2, 3],
[1, 2, 3]],
[[1, 2, 3],
[1, 2, 3]]]
low = [[0, 0],
[0, 0]]
high = [[ 5, 10],
[15, 20]]
random_pdf_uniform(sample=sample, low=low, high=high) =
[[[0.2, 0.2, 0.2 ],
[0.1, 0.1, 0.1 ]],
[[0.06667, 0.06667, 0.06667],
[0.05, 0.05, 0.05 ]]]
Defined in ../src/operator/random/pdf_op.cc:L298
Parameters
----------
sample : NDArray
Samples from the distributions.
low : NDArray
Lower bounds of the distributions.
is_log : boolean, optional, default=0
If set, compute the density of the log-probability instead of the probability.
high : NDArray
Upper bounds of the distributions.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def poisson(lam=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a Poisson distribution.
Samples are distributed according to a Poisson distribution parametrized by *lambda* (rate).
Samples will always be returned as a floating point data type.
Example::
poisson(lam=4, shape=(2,2)) = [[ 5., 2.],
[ 4., 6.]]
Defined in ../src/operator/random/sample_op.cc:L151
Parameters
----------
lam : float, optional, default=1
Lambda parameter (rate) of the Poisson distribution.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def poisson_like(data=None, lam=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a Poisson distribution according to the input array shape.
Samples are distributed according to a Poisson distribution parametrized by *lambda* (rate).
Samples will always be returned as a floating point data type.
Example::
poisson(lam=4, data=ones(2,2)) = [[ 5., 2.],
[ 4., 6.]]
Defined in ../src/operator/random/sample_op.cc:L256
Parameters
----------
lam : float, optional, default=1
Lambda parameter (rate) of the Poisson distribution.
data : NDArray
The input
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def randint(low=_Null, high=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a discrete uniform distribution.
Samples are uniformly distributed over the half-open interval *[low, high)*
(includes *low*, but excludes *high*).
Example::
randint(low=0, high=5, shape=(2,2)) = [[ 0, 2],
[ 3, 1]]
Defined in ../src/operator/random/sample_op.cc:L195
Parameters
----------
low : long, required
Lower bound of the distribution.
high : long, required
Upper bound of the distribution.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'int32', 'int64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to int32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def uniform(low=_Null, high=_Null, shape=_Null, ctx=_Null, dtype=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a uniform distribution.
.. note:: The existing alias ``uniform`` is deprecated.
Samples are uniformly distributed over the half-open interval *[low, high)*
(includes *low*, but excludes *high*).
Example::
uniform(low=0, high=1, shape=(2,2)) = [[ 0.60276335, 0.85794562],
[ 0.54488319, 0.84725171]]
Defined in ../src/operator/random/sample_op.cc:L96
Parameters
----------
low : float, optional, default=0
Lower bound of the distribution.
high : float, optional, default=1
Upper bound of the distribution.
shape : Shape(tuple), optional, default=None
Shape of the output.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
dtype : {'None', 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
def uniform_like(data=None, low=_Null, high=_Null, out=None, name=None, **kwargs):
r"""Draw random samples from a uniform distribution according to the input array shape.
Samples are uniformly distributed over the half-open interval *[low, high)*
(includes *low*, but excludes *high*).
Example::
uniform(low=0, high=1, data=ones(2,2)) = [[ 0.60276335, 0.85794562],
[ 0.54488319, 0.84725171]]
Defined in ../src/operator/random/sample_op.cc:L210
Parameters
----------
low : float, optional, default=0
Lower bound of the distribution.
high : float, optional, default=1
Upper bound of the distribution.
data : NDArray
The input
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return (0,)
__all__ = ['exponential', 'exponential_like', 'gamma', 'gamma_like', 'generalized_negative_binomial', 'generalized_negative_binomial_like', 'negative_binomial', 'negative_binomial_like', 'normal', 'normal_like', 'pdf_dirichlet', 'pdf_exponential', 'pdf_gamma', 'pdf_generalized_negative_binomial', 'pdf_negative_binomial', 'pdf_normal', 'pdf_poisson', 'pdf_uniform', 'poisson', 'poisson_like', 'randint', 'uniform', 'uniform_like']
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.