id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/robotcode_language_server-0.54.3.tar.gz/robotcode_language_server-0.54.3/README.md
|
# robotcode-language-server
[](https://pypi.org/project/robotcode-language-server)
[](https://pypi.org/project/robotcode-language-server)
[](https://github.com/d-biehl/robotcode/blob/master/LICENSE.txt)
-----
## Introduction
The Language Server for [RobotCode](https://robotcode.io).
## Installation
```console
pip install robotcode-language-server
```
## License
`robotcode-language-server` is distributed under the terms of the [Apache-2.0](https://spdx.org/licenses/Apache-2.0.html) license.
|
PypiClean
|
/NeodroidAgent-0.4.8-py36-none-any.whl/neodroidagent/common/session_factory/vertical/single_agent_environment_session.py
|
import inspect
import time
from contextlib import suppress
from typing import Any, Type
import torch
import torchsnooper
from draugr import CaptureEarlyStop, add_early_stopping_key_combination, sprint
from draugr.torch_utilities import TensorBoardPytorchWriter, torch_seed
from neodroidagent import PROJECT_APP_PATH
from neodroidagent.agents import Agent
from neodroidagent.utilities import NoAgent
from warg import GDKC, passes_kws_to
from warg.context_wrapper import ContextWrapper
from warg.decorators.timing import StopWatch
from .environment_session import EnvironmentSession
from .procedures.procedure_specification import Procedure
__author__ = "Christian Heider Nielsen"
__doc__ = r"""
"""
__all__ = ["SingleAgentEnvironmentSession"]
class SingleAgentEnvironmentSession(EnvironmentSession):
@passes_kws_to(
add_early_stopping_key_combination,
Agent.__init__,
Agent.save,
Procedure.__call__,
)
def __call__(
self,
agent: Type[Agent],
*,
load_time: Any,
seed: int,
save_ending_model: bool = False,
continue_training: bool = True,
train_agent: bool = True,
debug: bool = False,
**kwargs
):
"""
Start a session, builds Agent and starts/connect environment(s), and runs Procedure
:param args:
:param kwargs:
:return:
"""
with ContextWrapper(torchsnooper.snoop(), debug):
with ContextWrapper(torch.autograd.detect_anomaly(), debug):
if agent is None:
raise NoAgent
if inspect.isclass(agent):
sprint('Instantiating Agent', color="crimson", bold=True, italic=True)
torch_seed(seed)
self._environment.seed(seed)
agent = agent(load_time=load_time, seed=seed, **kwargs)
agent_class_name = agent.__class__.__name__
total_shape = "_".join(
[
str(i)
for i in (
self._environment.observation_space.shape
+ self._environment.action_space.shape
+ self._environment.signal_space.shape
)
]
)
environment_name = f"{self._environment.environment_name}_{total_shape}"
save_directory = (
PROJECT_APP_PATH.user_data / environment_name / agent_class_name
)
log_directory = (
PROJECT_APP_PATH.user_log / environment_name / agent_class_name / load_time
)
with TensorBoardPytorchWriter(log_directory) as metric_writer:
agent.build(
self._environment.observation_space,
self._environment.action_space,
self._environment.signal_space,
metric_writer=metric_writer
)
kwargs.update(
environment_name=(self._environment.environment_name,),
save_directory=save_directory,
log_directory=log_directory,
load_time=load_time,
seed=seed,
train_agent=train_agent,
)
found = False
if continue_training:
sprint(
"Searching for previously trained models for initialisation for this configuration "
"(Architecture, Action Space, Observation Space, ...)", color="crimson", bold=True, italic=True
)
found = agent.load(
save_directory=save_directory, evaluation=not train_agent
)
if not found:
sprint(
"Did not find any previously trained models for this configuration", color="crimson", bold=True, italic=True
)
if not train_agent:
agent.eval()
else:
agent.train()
if not found:
sprint("Training from new initialisation", color="crimson", bold=True, italic=True)
session_proc = self._procedure(agent, environment=self._environment)
with CaptureEarlyStop(callbacks=self._procedure.stop_procedure, **kwargs):
with StopWatch() as timer:
with suppress(KeyboardInterrupt):
training_resume = session_proc(metric_writer=metric_writer, **kwargs)
if training_resume and "stats" in training_resume:
training_resume.stats.save(**kwargs)
end_message = f"Training ended, time elapsed: {timer // 60:.0f}m {timer % 60:.0f}s"
line_width = 9
sprint(f'\n{"-" * line_width} {end_message} {"-" * line_width}\n', color="crimson", bold=True, italic=True)
if save_ending_model:
agent.save(**kwargs)
try:
self._environment.close()
except BrokenPipeError:
pass
exit(0)
if __name__ == "__main__":
print(SingleAgentEnvironmentSession)
|
PypiClean
|
/eng_spacysentiment-2.3.0.tar.gz/eng_spacysentiment-2.3.0/eng_spacysentiment/eng_spacysentiment-2.3.0/README.md
|
sentiment analysis using spacy pipelines
| Feature | Description |
| --- | --- |
| **Name** | `eng_spacysentiment` |
| **Version** | `2.3.0` |
| **spaCy** | `>=3.5.3,<3.6.0` |
| **Default Pipeline** | `textcat` |
| **Components** | `textcat` |
| **Vectors** | 0 keys, 0 unique vectors (0 dimensions) |
| **Sources** | n/a |
| **License** | `MIT` |
| **Author** | [Vishnu](https://github.com/Vishnunkumar/eng_spacysentiment) |
### Label Scheme
<details>
<summary>View label scheme (3 labels for 1 components)</summary>
| Component | Labels |
| --- | --- |
| **`textcat`** | `positive`, `negative`, `neutral` |
</details>
|
PypiClean
|
/CatLearn-0.6.2.tar.gz/CatLearn-0.6.2/catlearn/utilities/utilities.py
|
import numpy as np
import hashlib
from scipy.stats import pearsonr, spearmanr, kendalltau
from catlearn.preprocess.scaling import standardize
def formal_charges(atoms, ion_number=8, ion_charge=-2):
"""Return a list of formal charges on atoms.
Parameters
----------
atoms : object
ase.Atoms object representing a chalcogenide. The default parameters
are relevant for an oxide.
anion_number : int
atomic number of anion.
anion_charge : int
formal charge of anion.
Returns
----------
all_charges : list
Formal charges ordered by atomic index.
"""
cm = atoms.connectivity
anion_charges = np.zeros(len(atoms))
for i, atom in enumerate(atoms):
if atoms.numbers[i] == ion_number:
anion_charges[i] = ion_charge
transfer = cm * np.vstack(anion_charges)
row_sums = transfer.sum(axis=1)
for j, s in enumerate(row_sums):
if s == ion_charge:
row_sums[j] *= abs(ion_charge)
shared = ion_charge * transfer / np.vstack(row_sums)
cation_charges = -np.nansum(shared, axis=0)
all_charges = anion_charges + cation_charges
return all_charges
def holdout_set(data, fraction, target=None, seed=None):
"""Return a dataset split in a hold out set and a training set.
Parameters
----------
matrix : array
n by d array
fraction : float
fraction of data to hold out for testing.
target : list
optional list of targets or separate feature.
seed : float
optional float for reproducible splits.
"""
matrix = np.array(data)
# Randomize order.
if seed is not None:
np.random.seed(seed)
np.random.shuffle(matrix)
# Split data.
index = int(len(matrix) * fraction)
holdout = matrix[:index, :]
train = matrix[index:, :]
if target is None:
return train, holdout
train_target = target[:index]
test_target = target[index:]
return train, train_target, holdout, test_target
def target_correlation(train, target,
correlation=['pearson', 'spearman', 'kendall']):
"""Return the correlation of all columns of train with a target feature.
Parameters
----------
train : array
n by d training data matrix.
target : list
target for correlation.
Returns
-------
metric : array
len(metric) by d matrix of correlation coefficients.
"""
# Scale and shape the data.
train_data = standardize(train_matrix=train)['train']
train_target = target
output = []
for c in correlation:
correlation = c
# Find the correlation.
row = []
for d in train_data.T:
if correlation is 'pearson':
row.append(pearsonr(d, train_target)[0])
elif correlation is 'spearman':
row.append(spearmanr(d, train_target)[0])
elif correlation is 'kendall':
row.append(kendalltau(d, train_target)[0])
output.append(row)
return output
def geometry_hash(atoms):
"""A hash based strictly on the geometry features of an atoms object.
Uses positions, cell, and symbols.
This is intended for planewave basis set calculations, so pbc is not
considered.
Each element is sorted in the algorithem to help prevent new hashs for
identical geometries.
"""
atoms.wrap()
pos = atoms.get_positions()
# Sort the cell array by magnitude of z, y, x coordinates, in that order
cell = np.array(sorted(atoms.get_cell(),
key=lambda x: (x[2], x[1], x[0])))
# Flatten the array and return a string of numbers only
# We only consider position changes up to 3 decimal places
cell_hash = np.array_str(np.ndarray.flatten(cell.round(3)))
cell_hash = ''.join(cell_hash.strip('[]').split()).replace('.', '')
# Sort the atoms positions similarly, but store the sorting order
pos = atoms.get_positions()
srt = [i for i, _ in sorted(enumerate(pos),
key=lambda x: (x[1][2], x[1][1], x[1][0]))]
pos_hash = np.array_str(np.ndarray.flatten(pos[srt].round(3)))
pos_hash = ''.join(pos_hash.strip('[]').split()).replace('.', '')
# Create a symbols hash in the same fashion conserving position sort order
sym = np.array(atoms.get_atomic_numbers())[srt]
sym_hash = np.array_str(np.ndarray.flatten(sym))
sym_hash = ''.join(sym_hash.strip('[]').split())
# Assemble a master hash and convert it through an md5
master_hash = cell_hash + pos_hash + sym_hash
md5 = hashlib.md5(master_hash)
_hash = md5.hexdigest()
return _hash
|
PypiClean
|
/realms-wiki-0.9.3.tar.gz/realms-wiki-0.9.3/realms/static/vendor/ace-builds/src/theme-textmate.js
|
define("ace/theme/textmate",["require","exports","module","ace/lib/dom"], function(require, exports, module) {
"use strict";
exports.isDark = false;
exports.cssClass = "ace-tm";
exports.cssText = ".ace-tm .ace_gutter {\
background: #f0f0f0;\
color: #333;\
}\
.ace-tm .ace_print-margin {\
width: 1px;\
background: #e8e8e8;\
}\
.ace-tm .ace_fold {\
background-color: #6B72E6;\
}\
.ace-tm {\
background-color: #FFFFFF;\
color: black;\
}\
.ace-tm .ace_cursor {\
color: black;\
}\
.ace-tm .ace_invisible {\
color: rgb(191, 191, 191);\
}\
.ace-tm .ace_storage,\
.ace-tm .ace_keyword {\
color: blue;\
}\
.ace-tm .ace_constant {\
color: rgb(197, 6, 11);\
}\
.ace-tm .ace_constant.ace_buildin {\
color: rgb(88, 72, 246);\
}\
.ace-tm .ace_constant.ace_language {\
color: rgb(88, 92, 246);\
}\
.ace-tm .ace_constant.ace_library {\
color: rgb(6, 150, 14);\
}\
.ace-tm .ace_invalid {\
background-color: rgba(255, 0, 0, 0.1);\
color: red;\
}\
.ace-tm .ace_support.ace_function {\
color: rgb(60, 76, 114);\
}\
.ace-tm .ace_support.ace_constant {\
color: rgb(6, 150, 14);\
}\
.ace-tm .ace_support.ace_type,\
.ace-tm .ace_support.ace_class {\
color: rgb(109, 121, 222);\
}\
.ace-tm .ace_keyword.ace_operator {\
color: rgb(104, 118, 135);\
}\
.ace-tm .ace_string {\
color: rgb(3, 106, 7);\
}\
.ace-tm .ace_comment {\
color: rgb(76, 136, 107);\
}\
.ace-tm .ace_comment.ace_doc {\
color: rgb(0, 102, 255);\
}\
.ace-tm .ace_comment.ace_doc.ace_tag {\
color: rgb(128, 159, 191);\
}\
.ace-tm .ace_constant.ace_numeric {\
color: rgb(0, 0, 205);\
}\
.ace-tm .ace_variable {\
color: rgb(49, 132, 149);\
}\
.ace-tm .ace_xml-pe {\
color: rgb(104, 104, 91);\
}\
.ace-tm .ace_entity.ace_name.ace_function {\
color: #0000A2;\
}\
.ace-tm .ace_heading {\
color: rgb(12, 7, 255);\
}\
.ace-tm .ace_list {\
color:rgb(185, 6, 144);\
}\
.ace-tm .ace_meta.ace_tag {\
color:rgb(0, 22, 142);\
}\
.ace-tm .ace_string.ace_regex {\
color: rgb(255, 0, 0)\
}\
.ace-tm .ace_marker-layer .ace_selection {\
background: rgb(181, 213, 255);\
}\
.ace-tm.ace_multiselect .ace_selection.ace_start {\
box-shadow: 0 0 3px 0px white;\
}\
.ace-tm .ace_marker-layer .ace_step {\
background: rgb(252, 255, 0);\
}\
.ace-tm .ace_marker-layer .ace_stack {\
background: rgb(164, 229, 101);\
}\
.ace-tm .ace_marker-layer .ace_bracket {\
margin: -1px 0 0 -1px;\
border: 1px solid rgb(192, 192, 192);\
}\
.ace-tm .ace_marker-layer .ace_active-line {\
background: rgba(0, 0, 0, 0.07);\
}\
.ace-tm .ace_gutter-active-line {\
background-color : #dcdcdc;\
}\
.ace-tm .ace_marker-layer .ace_selected-word {\
background: rgb(250, 250, 255);\
border: 1px solid rgb(200, 200, 250);\
}\
.ace-tm .ace_indent-guide {\
background: url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bLly//BwAmVgd1/w11/gAAAABJRU5ErkJggg==\") right repeat-y;\
}\
";
var dom = require("../lib/dom");
dom.importCssString(exports.cssText, exports.cssClass);
});
|
PypiClean
|
/binalyzer_template_provider-1.0.3.tar.gz/binalyzer_template_provider-1.0.3/CHANGELOG.md
|
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v1.0.3] - 13.10.2022
- Use Python 3.9 for CI pipeline
- Added tests for relative offset references
## [v1.0.2] - 18.08.2021
- Added tests for auto-sizing using the text attribute
## [v1.0.1] - 22.07.2021
- Added `text` attribute:
- The `text` property has been added to provide a way of storing static
binary data with a template.
## [v1.0.0] - 28.04.2021
- Initial release
[v1.0.0]: https://github.com/denisvasilik/binalyzer-template-provider/tree/v1.0.0
[v1.0.1]: https://github.com/denisvasilik/binalyzer-template-provider/tree/v1.0.1
[v1.0.2]: https://github.com/denisvasilik/binalyzer-template-provider/tree/v1.0.2
[v1.0.3]: https://github.com/denisvasilik/binalyzer-template-provider/tree/v1.0.3
|
PypiClean
|
/cookiepress-0.1.5.tar.gz/cookiepress-0.1.5/README.rst
|
===========
CookiePress
===========
.. image:: https://img.shields.io/pypi/v/cookiepress.svg
:target: https://pypi.python.org/pypi/cookiepress
.. image:: https://img.shields.io/travis/com/elgertam/cookiepress.svg
:target: https://travis-ci.com/elgertam/cookiepress
.. image:: https://readthedocs.org/projects/cookiepress/badge/?version=latest
:target: https://cookiepress.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://img.shields.io/badge/donate-PayPal-blue.svg?logo=paypal
:target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=CPTWMKWMVXDQW¤cy_code=USD&source=url
:alt: Donate to Andrew Elgert
A project scaffold generator that moves beyond mere templates.
* Free software: BSD license
* Documentation: https://cookiepress.readthedocs.io.
Features
--------
* None yet. Very early project.
Credits
-------
This package was created with Cookiecutter_ and the `elgertam/cookiecutter-pipenv`_ project template, based on `audreyr/cookiecutter-pypackage`_.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`elgertam/cookiecutter-pipenv`: https://github.com/elgertam/cookiecutter-pipenv
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
|
PypiClean
|
/csle_common-0.3.8.tar.gz/csle_common-0.3.8/src/csle_common/dao/jobs/data_collection_job_config.py
|
from typing import Dict, Any, List
from csle_common.dao.emulation_action.attacker.emulation_attacker_action import EmulationAttackerAction
from csle_common.dao.emulation_action.defender.emulation_defender_action import EmulationDefenderAction
from csle_common.dao.emulation_config.emulation_trace import EmulationTrace
from csle_base.json_serializable import JSONSerializable
class DataCollectionJobConfig(JSONSerializable):
"""
DTO representing the configuration of a data collection job
"""
def __init__(self, emulation_env_name: str, num_collected_steps: int,
progress_percentage: float, attacker_sequence: List[EmulationAttackerAction], pid: int,
repeat_times: int, emulation_statistic_id: int, num_sequences_completed: int,
traces: List[EmulationTrace], save_emulation_traces_every: int, num_cached_traces: int,
defender_sequence: List[EmulationDefenderAction], log_file_path: str, physical_host_ip: str,
descr: str = ""):
"""
Initializes the DTO
:param emulation_env_name: the emulation environment name
:param num_collected_steps: number of collected steps in the emulation
:param num_sequences_completed: number of sequences completed
:param progress_percentage: the progress of the data collection job in %
:param attacker_sequence: the sequence of actions to emulate the attacker
:param defender_sequence: the sequence of actions to emulate the defender
:param repeat_times: the number of times to repeat the sequences
:param traces: list of collected emulation traces
:param descr: description of the job
:param emulation_statistic_id: the id of the emulation statistic
:param save_emulation_traces_every: the frequency to save emulation traces to the metastore
:param num_cached_traces: the number of emulation traces to keep with the job metadata
:param physical_host_ip: the IP of the physical host where the job is running
"""
self.emulation_env_name = emulation_env_name
self.progress_percentage = round(progress_percentage, 3)
self.pid = pid
self.num_collected_steps = num_collected_steps
self.progress_percentage = self.progress_percentage
self.attacker_sequence = attacker_sequence
self.defender_sequence = defender_sequence
self.id = -1
self.running = False
self.descr = descr
self.repeat_times = repeat_times
self.emulation_statistic_id = emulation_statistic_id
self.num_sequences_completed = num_sequences_completed
self.traces = traces
self.save_emulation_traces_every = save_emulation_traces_every
self.num_cached_traces = num_cached_traces
self.log_file_path = log_file_path
self.physical_host_ip = physical_host_ip
def to_dict(self) -> Dict[str, Any]:
"""
Converts the object to a dict representation
:return: a dict representation of the object
"""
d: Dict[str, Any] = {}
d["emulation_env_name"] = self.emulation_env_name
d["progress_percentage"] = round(self.progress_percentage, 2)
d["pid"] = self.pid
d["num_collected_steps"] = self.num_collected_steps
d["progress_percentage"] = self.progress_percentage
d["attacker_sequence"] = list(map(lambda x: x.to_dict(), self.attacker_sequence))
d["defender_sequence"] = list(map(lambda x: x.to_dict(), self.defender_sequence))
d["id"] = self.id
d["running"] = self.running
d["descr"] = self.descr
d["repeat_times"] = self.repeat_times
d["emulation_statistic_id"] = self.emulation_statistic_id
d["traces"] = list(map(lambda x: x.to_dict(), self.traces))
d["num_sequences_completed"] = self.num_sequences_completed
d["save_emulation_traces_every"] = self.save_emulation_traces_every
d["num_cached_traces"] = self.num_cached_traces
d["log_file_path"] = self.log_file_path
d["physical_host_ip"] = self.physical_host_ip
return d
@staticmethod
def from_dict(d: Dict[str, Any]) -> "DataCollectionJobConfig":
"""
Converts a dict representation of the object to an instance
:param d: the dict to convert
:return: the created instance
"""
obj = DataCollectionJobConfig(
emulation_env_name=d["emulation_env_name"], pid=d["pid"], num_collected_steps=d["num_collected_steps"],
progress_percentage=d["progress_percentage"],
attacker_sequence=list(map(lambda x: EmulationAttackerAction.from_dict(x), d["attacker_sequence"])),
defender_sequence=list(map(lambda x: EmulationDefenderAction.from_dict(x), d["defender_sequence"])),
descr=d["descr"], repeat_times=d["repeat_times"], emulation_statistic_id=d["emulation_statistic_id"],
traces=list(map(lambda x: EmulationTrace.from_dict(x), d["traces"])),
num_sequences_completed=d["num_sequences_completed"],
save_emulation_traces_every=d["save_emulation_traces_every"], num_cached_traces=d["num_cached_traces"],
log_file_path=d["log_file_path"], physical_host_ip=d["physical_host_ip"])
obj.id = d["id"]
obj.running = d["running"]
return obj
def __str__(self) -> str:
"""
:return: a string representation of the object
"""
return f"emulation_env_name: {self.emulation_env_name}, pid: {self.pid}, " \
f"progress_percentage: {self.progress_percentage}, " \
f"attacker_sequence={list(map(lambda x: str(x), self.attacker_sequence))}," \
f"defender_sequence={list(map(lambda x: str(x), self.defender_sequence))}, id: {self.id}," \
f"running:{self.running}, descr: {self.descr}, repeat_times: {self.repeat_times}," \
f"emulation_statistic_id: {self.emulation_statistic_id}, " \
f"num_sequences_completed: {self.num_sequences_completed}, " \
f"traces: {list(map(lambda x: str(x), self.traces))}, " \
f"save_emulation_traces_every: {self.save_emulation_traces_every}, " \
f"num_cached_traces: {self.num_cached_traces}, log_file_path: {self.log_file_path}, " \
f"physical_host_ip: {self.physical_host_ip}"
@staticmethod
def from_json_file(json_file_path: str) -> "DataCollectionJobConfig":
"""
Reads a json file and converts it to a DTO
:param json_file_path: the json file path
:return: the converted DTO
"""
import io
import json
with io.open(json_file_path, 'r') as f:
json_str = f.read()
return DataCollectionJobConfig.from_dict(json.loads(json_str))
|
PypiClean
|
/nni_daily-1.5.2005180104-py3-none-manylinux1_x86_64.whl/nni_daily-1.5.2005180104.data/data/nni/node_modules/moment/locale/en-ie.js
|
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['../moment'], factory) :
factory(global.moment)
}(this, (function (moment) { 'use strict';
//! moment.js locale configuration
var enIe = moment.defineLocale('en-ie', {
months: 'January_February_March_April_May_June_July_August_September_October_November_December'.split(
'_'
),
monthsShort: 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_'),
weekdays: 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split(
'_'
),
weekdaysShort: 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_'),
weekdaysMin: 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_'),
longDateFormat: {
LT: 'HH:mm',
LTS: 'HH:mm:ss',
L: 'DD/MM/YYYY',
LL: 'D MMMM YYYY',
LLL: 'D MMMM YYYY HH:mm',
LLLL: 'dddd D MMMM YYYY HH:mm',
},
calendar: {
sameDay: '[Today at] LT',
nextDay: '[Tomorrow at] LT',
nextWeek: 'dddd [at] LT',
lastDay: '[Yesterday at] LT',
lastWeek: '[Last] dddd [at] LT',
sameElse: 'L',
},
relativeTime: {
future: 'in %s',
past: '%s ago',
s: 'a few seconds',
ss: '%d seconds',
m: 'a minute',
mm: '%d minutes',
h: 'an hour',
hh: '%d hours',
d: 'a day',
dd: '%d days',
M: 'a month',
MM: '%d months',
y: 'a year',
yy: '%d years',
},
dayOfMonthOrdinalParse: /\d{1,2}(st|nd|rd|th)/,
ordinal: function (number) {
var b = number % 10,
output =
~~((number % 100) / 10) === 1
? 'th'
: b === 1
? 'st'
: b === 2
? 'nd'
: b === 3
? 'rd'
: 'th';
return number + output;
},
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
});
return enIe;
})));
|
PypiClean
|
/chess_tuning_tools-0.9.5-py3-none-any.whl/tune/priors.py
|
import warnings
from typing import Callable, List
import numpy as np
from scipy.optimize import curve_fit
from scipy.stats import halfnorm, invgamma
from scipy.stats._distn_infrastructure import rv_frozen # noqa
__all__ = ["make_invgamma_prior", "roundflat", "create_priors"]
def roundflat(x, a_low=2.0, a_high=8.0, d_low=0.005, d_high=1.2):
"""Return the log probability of the round flat prior.
The round flat prior is completely uninformative inside the interval bounds
``d_low`` and ``d_high`` while smoothly going to -inf for values outside.
``a_low`` and ``a_high`` specify how quickly the density falls at the boundaries.
Args:
x (float): A parameter value in [0, inf) for which to compute the log
probability
a_low (float): Steepness of the prior at the boundary ``d_low``.
a_high (float): Steepness of the prior at the boundary ``d_high``.
d_low (float): Lower boundary for which the log probability is -2.
d_high (float): Upper boundary for which the log probability is -2.
Returns:
The log probability for x.
"""
if x <= 0:
return -np.inf
return -2 * ((x / d_low) ** (-2 * a_low) + (x / d_high) ** (2 * a_high))
def make_invgamma_prior(
lower_bound: float = 0.1, upper_bound: float = 0.5
) -> rv_frozen:
"""Create an inverse gamma distribution prior with 98% density inside the bounds.
Not all combinations of (lower_bound, upper_bound) are feasible and some of them
could result in a RuntimeError.
Parameters
----------
lower_bound : float, default=0.1
Lower bound at which 1 % of the cumulative density is reached.
upper_bound : float, default=0.5
Upper bound at which 99 % of the cumulative density is reached.
Returns
-------
scipy.stats._distn_infrastructure.rv_frozen
The frozen distribution with shape parameters already set.
Raises
------
ValueError
Either if any of the bounds is 0 or negative, or if the upper bound is equal or
smaller than the lower bound.
"""
if lower_bound <= 0 or upper_bound <= 0:
raise ValueError("The bounds cannot be equal to or smaller than 0.")
if lower_bound >= upper_bound:
raise ValueError(
"Lower bound needs to be strictly smaller than the upper " "bound."
)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
(a_out, scale_out), pcov = curve_fit(
lambda xdata, a, scale: invgamma.ppf(xdata, a=a, scale=scale),
[0.01, 0.99],
[lower_bound, upper_bound],
)
return invgamma(a=a_out, scale=scale_out)
def create_priors(
n_parameters: int,
signal_scale: float = 4.0,
lengthscale_lower_bound: float = 0.1,
lengthscale_upper_bound: float = 0.5,
noise_scale: float = 0.0006,
) -> List[Callable[[float], float]]:
"""Create a list of priors to be used for the hyperparameters of the tuning process.
Parameters
----------
n_parameters : int
Number of parameters to be optimized.
signal_scale : float
Prior scale of the signal (standard deviation) which is used to parametrize a
half-normal distribution.
lengthscale_lower_bound : float
Lower bound of the inverse-gamma lengthscale prior. It marks the point at which
1 % of the cumulative density is reached.
lengthscale_upper_bound : float
Upper bound of the inverse-gamma lengthscale prior. It marks the point at which
99 % of the cumulative density is reached.
noise_scale : float
Prior scale of the noise (standard deviation) which is used to parametrize a
half-normal distribution.
Returns
-------
list of callables
List of priors in the following order:
- signal prior
- lengthscale prior (n_parameters times)
- noise prior
"""
if signal_scale <= 0.0:
raise ValueError(
f"The signal scale needs to be strictly positive. Got {signal_scale}."
)
if noise_scale <= 0.0:
raise ValueError(
f"The noise scale needs to be strictly positive. Got {noise_scale}."
)
signal_prior = halfnorm(scale=signal_scale)
lengthscale_prior = make_invgamma_prior(
lower_bound=lengthscale_lower_bound, upper_bound=lengthscale_upper_bound
)
noise_prior = halfnorm(scale=noise_scale)
priors = [lambda x: signal_prior.logpdf(np.sqrt(np.exp(x))) + x / 2.0 - np.log(2.0)]
for _ in range(n_parameters):
priors.append(lambda x: lengthscale_prior.logpdf(np.exp(x)) + x)
priors.append(
lambda x: noise_prior.logpdf(np.sqrt(np.exp(x))) + x / 2.0 - np.log(2.0)
)
return priors
|
PypiClean
|
/audio.index-1.0.4.tar.gz/audio.index-1.0.4/.lib/setuptools/command/easy_install.py
|
from glob import glob
from distutils.util import get_platform
from distutils.util import convert_path, subst_vars
from distutils.errors import DistutilsArgError, DistutilsOptionError, \
DistutilsError, DistutilsPlatformError
from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
from distutils import log, dir_util
from distutils.command.build_scripts import first_line_re
import sys
import os
import zipimport
import shutil
import tempfile
import zipfile
import re
import stat
import random
import platform
import textwrap
import warnings
import site
import struct
from setuptools import Command, _dont_write_bytecode
from setuptools.sandbox import run_setup
from setuptools.py31compat import get_path, get_config_vars
from setuptools.command import setopt
from setuptools.archive_util import unpack_archive
from setuptools.package_index import PackageIndex
from setuptools.package_index import URL_SCHEME
from setuptools.command import bdist_egg, egg_info
from setuptools.compat import (iteritems, maxsize, basestring, unicode,
reraise, PY2, PY3)
from pkg_resources import (
yield_lines, normalize_path, resource_string, ensure_directory,
get_distribution, find_distributions, Environment, Requirement,
Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
VersionConflict, DEVELOP_DIST,
)
import pkg_resources
sys_executable = os.environ.get('__PYVENV_LAUNCHER__',
os.path.normpath(sys.executable))
__all__ = [
'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
'main', 'get_exe_prefixes',
]
def is_64bit():
return struct.calcsize("P") == 8
def samefile(p1, p2):
both_exist = os.path.exists(p1) and os.path.exists(p2)
use_samefile = hasattr(os.path, 'samefile') and both_exist
if use_samefile:
return os.path.samefile(p1, p2)
norm_p1 = os.path.normpath(os.path.normcase(p1))
norm_p2 = os.path.normpath(os.path.normcase(p2))
return norm_p1 == norm_p2
if PY2:
def _to_ascii(s):
return s
def isascii(s):
try:
unicode(s, 'ascii')
return True
except UnicodeError:
return False
else:
def _to_ascii(s):
return s.encode('ascii')
def isascii(s):
try:
s.encode('ascii')
return True
except UnicodeError:
return False
class easy_install(Command):
"""Manage a download/build/install process"""
description = "Find/get/install Python packages"
command_consumes_arguments = True
user_options = [
('prefix=', None, "installation prefix"),
("zip-ok", "z", "install package as a zipfile"),
("multi-version", "m", "make apps have to require() a version"),
("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
("install-dir=", "d", "install package to DIR"),
("script-dir=", "s", "install scripts to DIR"),
("exclude-scripts", "x", "Don't install scripts"),
("always-copy", "a", "Copy all needed packages to install dir"),
("index-url=", "i", "base URL of Python Package Index"),
("find-links=", "f", "additional URL(s) to search for packages"),
("build-directory=", "b",
"download/extract/build in DIR; keep the results"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('record=', None,
"filename in which to record list of installed files"),
('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
('site-dirs=', 'S', "list of directories where .pth files work"),
('editable', 'e', "Install specified packages in editable form"),
('no-deps', 'N', "don't install dependencies"),
('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
('local-snapshots-ok', 'l',
"allow building eggs from local checkouts"),
('version', None, "print version information and exit"),
('no-find-links', None,
"Don't load find-links defined in packages being installed")
]
boolean_options = [
'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
'editable',
'no-deps', 'local-snapshots-ok', 'version'
]
if site.ENABLE_USER_SITE:
help_msg = "install in user site-package '%s'" % site.USER_SITE
user_options.append(('user', None, help_msg))
boolean_options.append('user')
negative_opt = {'always-unzip': 'zip-ok'}
create_index = PackageIndex
def initialize_options(self):
if site.ENABLE_USER_SITE:
whereami = os.path.abspath(__file__)
self.user = whereami.startswith(site.USER_SITE)
else:
self.user = 0
self.zip_ok = self.local_snapshots_ok = None
self.install_dir = self.script_dir = self.exclude_scripts = None
self.index_url = None
self.find_links = None
self.build_directory = None
self.args = None
self.optimize = self.record = None
self.upgrade = self.always_copy = self.multi_version = None
self.editable = self.no_deps = self.allow_hosts = None
self.root = self.prefix = self.no_report = None
self.version = None
self.install_purelib = None # for pure module distributions
self.install_platlib = None # non-pure (dists w/ extensions)
self.install_headers = None # for C/C++ headers
self.install_lib = None # set to either purelib or platlib
self.install_scripts = None
self.install_data = None
self.install_base = None
self.install_platbase = None
if site.ENABLE_USER_SITE:
self.install_userbase = site.USER_BASE
self.install_usersite = site.USER_SITE
else:
self.install_userbase = None
self.install_usersite = None
self.no_find_links = None
# Options not specifiable via command line
self.package_index = None
self.pth_file = self.always_copy_from = None
self.site_dirs = None
self.installed_projects = {}
self.sitepy_installed = False
# Always read easy_install options, even if we are subclassed, or have
# an independent instance created. This ensures that defaults will
# always come from the standard configuration file(s)' "easy_install"
# section, even if this is a "develop" or "install" command, or some
# other embedding.
self._dry_run = None
self.verbose = self.distribution.verbose
self.distribution._set_command_options(
self, self.distribution.get_option_dict('easy_install')
)
def delete_blockers(self, blockers):
for filename in blockers:
if os.path.exists(filename) or os.path.islink(filename):
log.info("Deleting %s", filename)
if not self.dry_run:
if (os.path.isdir(filename) and
not os.path.islink(filename)):
rmtree(filename)
else:
os.unlink(filename)
def finalize_options(self):
if self.version:
print('setuptools %s' % get_distribution('setuptools').version)
sys.exit()
py_version = sys.version.split()[0]
prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
self.config_vars = {
'dist_name': self.distribution.get_name(),
'dist_version': self.distribution.get_version(),
'dist_fullname': self.distribution.get_fullname(),
'py_version': py_version,
'py_version_short': py_version[0:3],
'py_version_nodot': py_version[0] + py_version[2],
'sys_prefix': prefix,
'prefix': prefix,
'sys_exec_prefix': exec_prefix,
'exec_prefix': exec_prefix,
# Only python 3.2+ has abiflags
'abiflags': getattr(sys, 'abiflags', ''),
}
if site.ENABLE_USER_SITE:
self.config_vars['userbase'] = self.install_userbase
self.config_vars['usersite'] = self.install_usersite
# fix the install_dir if "--user" was used
# XXX: duplicate of the code in the setup command
if self.user and site.ENABLE_USER_SITE:
self.create_home_path()
if self.install_userbase is None:
raise DistutilsPlatformError(
"User base directory is not specified")
self.install_base = self.install_platbase = self.install_userbase
if os.name == 'posix':
self.select_scheme("unix_user")
else:
self.select_scheme(os.name + "_user")
self.expand_basedirs()
self.expand_dirs()
self._expand('install_dir', 'script_dir', 'build_directory',
'site_dirs')
# If a non-default installation directory was specified, default the
# script directory to match it.
if self.script_dir is None:
self.script_dir = self.install_dir
if self.no_find_links is None:
self.no_find_links = False
# Let install_dir get set by install_lib command, which in turn
# gets its info from the install command, and takes into account
# --prefix and --home and all that other crud.
self.set_undefined_options(
'install_lib', ('install_dir', 'install_dir')
)
# Likewise, set default script_dir from 'install_scripts.install_dir'
self.set_undefined_options(
'install_scripts', ('install_dir', 'script_dir')
)
if self.user and self.install_purelib:
self.install_dir = self.install_purelib
self.script_dir = self.install_scripts
# default --record from the install command
self.set_undefined_options('install', ('record', 'record'))
# Should this be moved to the if statement below? It's not used
# elsewhere
normpath = map(normalize_path, sys.path)
self.all_site_dirs = get_site_dirs()
if self.site_dirs is not None:
site_dirs = [
os.path.expanduser(s.strip()) for s in
self.site_dirs.split(',')
]
for d in site_dirs:
if not os.path.isdir(d):
log.warn("%s (in --site-dirs) does not exist", d)
elif normalize_path(d) not in normpath:
raise DistutilsOptionError(
d + " (in --site-dirs) is not on sys.path"
)
else:
self.all_site_dirs.append(normalize_path(d))
if not self.editable:
self.check_site_dir()
self.index_url = self.index_url or "https://pypi.python.org/simple"
self.shadow_path = self.all_site_dirs[:]
for path_item in self.install_dir, normalize_path(self.script_dir):
if path_item not in self.shadow_path:
self.shadow_path.insert(0, path_item)
if self.allow_hosts is not None:
hosts = [s.strip() for s in self.allow_hosts.split(',')]
else:
hosts = ['*']
if self.package_index is None:
self.package_index = self.create_index(
self.index_url, search_path=self.shadow_path, hosts=hosts,
)
self.local_index = Environment(self.shadow_path + sys.path)
if self.find_links is not None:
if isinstance(self.find_links, basestring):
self.find_links = self.find_links.split()
else:
self.find_links = []
if self.local_snapshots_ok:
self.package_index.scan_egg_links(self.shadow_path + sys.path)
if not self.no_find_links:
self.package_index.add_find_links(self.find_links)
self.set_undefined_options('install_lib', ('optimize', 'optimize'))
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
if not (0 <= self.optimize <= 2):
raise ValueError
except ValueError:
raise DistutilsOptionError("--optimize must be 0, 1, or 2")
if self.editable and not self.build_directory:
raise DistutilsArgError(
"Must specify a build directory (-b) when using --editable"
)
if not self.args:
raise DistutilsArgError(
"No urls, filenames, or requirements specified (see --help)")
self.outputs = []
def _expand_attrs(self, attrs):
for attr in attrs:
val = getattr(self, attr)
if val is not None:
if os.name == 'posix' or os.name == 'nt':
val = os.path.expanduser(val)
val = subst_vars(val, self.config_vars)
setattr(self, attr, val)
def expand_basedirs(self):
"""Calls `os.path.expanduser` on install_base, install_platbase and
root."""
self._expand_attrs(['install_base', 'install_platbase', 'root'])
def expand_dirs(self):
"""Calls `os.path.expanduser` on install dirs."""
self._expand_attrs(['install_purelib', 'install_platlib',
'install_lib', 'install_headers',
'install_scripts', 'install_data', ])
def run(self):
if self.verbose != self.distribution.verbose:
log.set_verbosity(self.verbose)
try:
for spec in self.args:
self.easy_install(spec, not self.no_deps)
if self.record:
outputs = self.outputs
if self.root: # strip any package prefix
root_len = len(self.root)
for counter in range(len(outputs)):
outputs[counter] = outputs[counter][root_len:]
from distutils import file_util
self.execute(
file_util.write_file, (self.record, outputs),
"writing list of installed files to '%s'" %
self.record
)
self.warn_deprecated_options()
finally:
log.set_verbosity(self.distribution.verbose)
def pseudo_tempname(self):
"""Return a pseudo-tempname base in the install directory.
This code is intentionally naive; if a malicious party can write to
the target directory you're already in deep doodoo.
"""
try:
pid = os.getpid()
except:
pid = random.randint(0, maxsize)
return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
def warn_deprecated_options(self):
pass
def check_site_dir(self):
"""Verify that self.install_dir is .pth-capable dir, if needed"""
instdir = normalize_path(self.install_dir)
pth_file = os.path.join(instdir, 'easy-install.pth')
# Is it a configured, PYTHONPATH, implicit, or explicit site dir?
is_site_dir = instdir in self.all_site_dirs
if not is_site_dir and not self.multi_version:
# No? Then directly test whether it does .pth file processing
is_site_dir = self.check_pth_processing()
else:
# make sure we can write to target dir
testfile = self.pseudo_tempname() + '.write-test'
test_exists = os.path.exists(testfile)
try:
if test_exists:
os.unlink(testfile)
open(testfile, 'w').close()
os.unlink(testfile)
except (OSError, IOError):
self.cant_write_to_target()
if not is_site_dir and not self.multi_version:
# Can't install non-multi to non-site dir
raise DistutilsError(self.no_default_version_msg())
if is_site_dir:
if self.pth_file is None:
self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
else:
self.pth_file = None
PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]):
# only PYTHONPATH dirs need a site.py, so pretend it's there
self.sitepy_installed = True
elif self.multi_version and not os.path.exists(pth_file):
self.sitepy_installed = True # don't need site.py in this case
self.pth_file = None # and don't create a .pth file
self.install_dir = instdir
def cant_write_to_target(self):
template = """can't create or remove files in install directory
The following error occurred while trying to add or remove files in the
installation directory:
%s
The installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
"""
msg = template % (sys.exc_info()[1], self.install_dir,)
if not os.path.exists(self.install_dir):
msg += """
This directory does not currently exist. Please create it and try again, or
choose a different installation directory (using the -d or --install-dir
option).
"""
else:
msg += """
Perhaps your account does not have write access to this directory? If the
installation directory is a system-owned directory, you may need to sign in
as the administrator or "root" account. If you do not have administrative
access to this machine, you may wish to choose a different installation
directory, preferably one that is listed in your PYTHONPATH environment
variable.
For information on other options, you may wish to consult the
documentation at:
https://pythonhosted.org/setuptools/easy_install.html
Please make the appropriate changes for your system and try again.
"""
raise DistutilsError(msg)
def check_pth_processing(self):
"""Empirically verify whether .pth files are supported in inst. dir"""
instdir = self.install_dir
log.info("Checking .pth file support in %s", instdir)
pth_file = self.pseudo_tempname() + ".pth"
ok_file = pth_file + '.ok'
ok_exists = os.path.exists(ok_file)
try:
if ok_exists:
os.unlink(ok_file)
dirname = os.path.dirname(ok_file)
if not os.path.exists(dirname):
os.makedirs(dirname)
f = open(pth_file, 'w')
except (OSError, IOError):
self.cant_write_to_target()
else:
try:
f.write("import os; f = open(%r, 'w'); f.write('OK'); "
"f.close()\n" % (ok_file,))
f.close()
f = None
executable = sys.executable
if os.name == 'nt':
dirname, basename = os.path.split(executable)
alt = os.path.join(dirname, 'pythonw.exe')
if (basename.lower() == 'python.exe' and
os.path.exists(alt)):
# use pythonw.exe to avoid opening a console window
executable = alt
from distutils.spawn import spawn
spawn([executable, '-E', '-c', 'pass'], 0)
if os.path.exists(ok_file):
log.info(
"TEST PASSED: %s appears to support .pth files",
instdir
)
return True
finally:
if f:
f.close()
if os.path.exists(ok_file):
os.unlink(ok_file)
if os.path.exists(pth_file):
os.unlink(pth_file)
if not self.multi_version:
log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
return False
def install_egg_scripts(self, dist):
"""Write all the scripts for `dist`, unless scripts are excluded"""
if not self.exclude_scripts and dist.metadata_isdir('scripts'):
for script_name in dist.metadata_listdir('scripts'):
if dist.metadata_isdir('scripts/' + script_name):
# The "script" is a directory, likely a Python 3
# __pycache__ directory, so skip it.
continue
self.install_script(
dist, script_name,
dist.get_metadata('scripts/' + script_name)
)
self.install_wrapper_scripts(dist)
def add_output(self, path):
if os.path.isdir(path):
for base, dirs, files in os.walk(path):
for filename in files:
self.outputs.append(os.path.join(base, filename))
else:
self.outputs.append(path)
def not_editable(self, spec):
if self.editable:
raise DistutilsArgError(
"Invalid argument %r: you can't use filenames or URLs "
"with --editable (except via the --find-links option)."
% (spec,)
)
def check_editable(self, spec):
if not self.editable:
return
if os.path.exists(os.path.join(self.build_directory, spec.key)):
raise DistutilsArgError(
"%r already exists in %s; can't do a checkout there" %
(spec.key, self.build_directory)
)
def easy_install(self, spec, deps=False):
tmpdir = tempfile.mkdtemp(prefix="easy_install-")
download = None
if not self.editable:
self.install_site_py()
try:
if not isinstance(spec, Requirement):
if URL_SCHEME(spec):
# It's a url, download it to tmpdir and process
self.not_editable(spec)
download = self.package_index.download(spec, tmpdir)
return self.install_item(None, download, tmpdir, deps,
True)
elif os.path.exists(spec):
# Existing file or directory, just process it directly
self.not_editable(spec)
return self.install_item(None, spec, tmpdir, deps, True)
else:
spec = parse_requirement_arg(spec)
self.check_editable(spec)
dist = self.package_index.fetch_distribution(
spec, tmpdir, self.upgrade, self.editable,
not self.always_copy, self.local_index
)
if dist is None:
msg = "Could not find suitable distribution for %r" % spec
if self.always_copy:
msg += " (--always-copy skips system and development eggs)"
raise DistutilsError(msg)
elif dist.precedence == DEVELOP_DIST:
# .egg-info dists don't need installing, just process deps
self.process_distribution(spec, dist, deps, "Using")
return dist
else:
return self.install_item(spec, dist.location, tmpdir, deps)
finally:
if os.path.exists(tmpdir):
rmtree(tmpdir)
def install_item(self, spec, download, tmpdir, deps, install_needed=False):
# Installation is also needed if file in tmpdir or is not an egg
install_needed = install_needed or self.always_copy
install_needed = install_needed or os.path.dirname(download) == tmpdir
install_needed = install_needed or not download.endswith('.egg')
install_needed = install_needed or (
self.always_copy_from is not None and
os.path.dirname(normalize_path(download)) ==
normalize_path(self.always_copy_from)
)
if spec and not install_needed:
# at this point, we know it's a local .egg, we just don't know if
# it's already installed.
for dist in self.local_index[spec.project_name]:
if dist.location == download:
break
else:
install_needed = True # it's not in the local index
log.info("Processing %s", os.path.basename(download))
if install_needed:
dists = self.install_eggs(spec, download, tmpdir)
for dist in dists:
self.process_distribution(spec, dist, deps)
else:
dists = [self.egg_distribution(download)]
self.process_distribution(spec, dists[0], deps, "Using")
if spec is not None:
for dist in dists:
if dist in spec:
return dist
def select_scheme(self, name):
"""Sets the install directories by applying the install schemes."""
# it's the caller's problem if they supply a bad name!
scheme = INSTALL_SCHEMES[name]
for key in SCHEME_KEYS:
attrname = 'install_' + key
if getattr(self, attrname) is None:
setattr(self, attrname, scheme[key])
def process_distribution(self, requirement, dist, deps=True, *info):
self.update_pth(dist)
self.package_index.add(dist)
if dist in self.local_index[dist.key]:
self.local_index.remove(dist)
self.local_index.add(dist)
self.install_egg_scripts(dist)
self.installed_projects[dist.key] = dist
log.info(self.installation_report(requirement, dist, *info))
if (dist.has_metadata('dependency_links.txt') and
not self.no_find_links):
self.package_index.add_find_links(
dist.get_metadata_lines('dependency_links.txt')
)
if not deps and not self.always_copy:
return
elif requirement is not None and dist.key != requirement.key:
log.warn("Skipping dependencies for %s", dist)
return # XXX this is not the distribution we were looking for
elif requirement is None or dist not in requirement:
# if we wound up with a different version, resolve what we've got
distreq = dist.as_requirement()
requirement = requirement or distreq
requirement = Requirement(
distreq.project_name, distreq.specs, requirement.extras
)
log.info("Processing dependencies for %s", requirement)
try:
distros = WorkingSet([]).resolve(
[requirement], self.local_index, self.easy_install
)
except DistributionNotFound:
e = sys.exc_info()[1]
raise DistutilsError(
"Could not find required distribution %s" % e.args
)
except VersionConflict:
e = sys.exc_info()[1]
raise DistutilsError(
"Installed distribution %s conflicts with requirement %s"
% e.args
)
if self.always_copy or self.always_copy_from:
# Force all the relevant distros to be copied or activated
for dist in distros:
if dist.key not in self.installed_projects:
self.easy_install(dist.as_requirement())
log.info("Finished processing dependencies for %s", requirement)
def should_unzip(self, dist):
if self.zip_ok is not None:
return not self.zip_ok
if dist.has_metadata('not-zip-safe'):
return True
if not dist.has_metadata('zip-safe'):
return True
return False
def maybe_move(self, spec, dist_filename, setup_base):
dst = os.path.join(self.build_directory, spec.key)
if os.path.exists(dst):
msg = ("%r already exists in %s; build directory %s will not be "
"kept")
log.warn(msg, spec.key, self.build_directory, setup_base)
return setup_base
if os.path.isdir(dist_filename):
setup_base = dist_filename
else:
if os.path.dirname(dist_filename) == setup_base:
os.unlink(dist_filename) # get it out of the tmp dir
contents = os.listdir(setup_base)
if len(contents) == 1:
dist_filename = os.path.join(setup_base, contents[0])
if os.path.isdir(dist_filename):
# if the only thing there is a directory, move it instead
setup_base = dist_filename
ensure_directory(dst)
shutil.move(setup_base, dst)
return dst
def install_wrapper_scripts(self, dist):
if not self.exclude_scripts:
for args in get_script_args(dist):
self.write_script(*args)
def install_script(self, dist, script_name, script_text, dev_path=None):
"""Generate a legacy script wrapper and install it"""
spec = str(dist.as_requirement())
is_script = is_python_script(script_text, script_name)
if is_script:
script_text = (get_script_header(script_text) +
self._load_template(dev_path) % locals())
self.write_script(script_name, _to_ascii(script_text), 'b')
@staticmethod
def _load_template(dev_path):
"""
There are a couple of template scripts in the package. This
function loads one of them and prepares it for use.
"""
# See https://bitbucket.org/pypa/setuptools/issue/134 for info
# on script file naming and downstream issues with SVR4
name = 'script.tmpl'
if dev_path:
name = name.replace('.tmpl', ' (dev).tmpl')
raw_bytes = resource_string('setuptools', name)
return raw_bytes.decode('utf-8')
def write_script(self, script_name, contents, mode="t", blockers=()):
"""Write an executable file to the scripts directory"""
self.delete_blockers( # clean up old .py/.pyw w/o a script
[os.path.join(self.script_dir, x) for x in blockers]
)
log.info("Installing %s script to %s", script_name, self.script_dir)
target = os.path.join(self.script_dir, script_name)
self.add_output(target)
mask = current_umask()
if not self.dry_run:
ensure_directory(target)
if os.path.exists(target):
os.unlink(target)
f = open(target, "w" + mode)
f.write(contents)
f.close()
chmod(target, 0o777 - mask)
def install_eggs(self, spec, dist_filename, tmpdir):
# .egg dirs or files are already built, so just return them
if dist_filename.lower().endswith('.egg'):
return [self.install_egg(dist_filename, tmpdir)]
elif dist_filename.lower().endswith('.exe'):
return [self.install_exe(dist_filename, tmpdir)]
# Anything else, try to extract and build
setup_base = tmpdir
if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
unpack_archive(dist_filename, tmpdir, self.unpack_progress)
elif os.path.isdir(dist_filename):
setup_base = os.path.abspath(dist_filename)
if (setup_base.startswith(tmpdir) # something we downloaded
and self.build_directory and spec is not None):
setup_base = self.maybe_move(spec, dist_filename, setup_base)
# Find the setup.py file
setup_script = os.path.join(setup_base, 'setup.py')
if not os.path.exists(setup_script):
setups = glob(os.path.join(setup_base, '*', 'setup.py'))
if not setups:
raise DistutilsError(
"Couldn't find a setup script in %s" %
os.path.abspath(dist_filename)
)
if len(setups) > 1:
raise DistutilsError(
"Multiple setup scripts in %s" %
os.path.abspath(dist_filename)
)
setup_script = setups[0]
# Now run it, and return the result
if self.editable:
log.info(self.report_editable(spec, setup_script))
return []
else:
return self.build_and_install(setup_script, setup_base)
def egg_distribution(self, egg_path):
if os.path.isdir(egg_path):
metadata = PathMetadata(egg_path, os.path.join(egg_path,
'EGG-INFO'))
else:
metadata = EggMetadata(zipimport.zipimporter(egg_path))
return Distribution.from_filename(egg_path, metadata=metadata)
def install_egg(self, egg_path, tmpdir):
destination = os.path.join(self.install_dir,
os.path.basename(egg_path))
destination = os.path.abspath(destination)
if not self.dry_run:
ensure_directory(destination)
dist = self.egg_distribution(egg_path)
if not samefile(egg_path, destination):
if os.path.isdir(destination) and not os.path.islink(destination):
dir_util.remove_tree(destination, dry_run=self.dry_run)
elif os.path.exists(destination):
self.execute(os.unlink, (destination,), "Removing " +
destination)
try:
new_dist_is_zipped = False
if os.path.isdir(egg_path):
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copytree, "Copying"
elif self.should_unzip(dist):
self.mkpath(destination)
f, m = self.unpack_and_compile, "Extracting"
else:
new_dist_is_zipped = True
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copy2, "Copying"
self.execute(f, (egg_path, destination),
(m + " %s to %s") %
(os.path.basename(egg_path),
os.path.dirname(destination)))
update_dist_caches(destination,
fix_zipimporter_caches=new_dist_is_zipped)
except:
update_dist_caches(destination, fix_zipimporter_caches=False)
raise
self.add_output(destination)
return self.egg_distribution(destination)
def install_exe(self, dist_filename, tmpdir):
# See if it's valid, get data
cfg = extract_wininst_cfg(dist_filename)
if cfg is None:
raise DistutilsError(
"%s is not a valid distutils Windows .exe" % dist_filename
)
# Create a dummy distribution object until we build the real distro
dist = Distribution(
None,
project_name=cfg.get('metadata', 'name'),
version=cfg.get('metadata', 'version'), platform=get_platform(),
)
# Convert the .exe to an unpacked egg
egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() +
'.egg')
egg_tmp = egg_path + '.tmp'
_egg_info = os.path.join(egg_tmp, 'EGG-INFO')
pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
self.exe_to_egg(dist_filename, egg_tmp)
# Write EGG-INFO/PKG-INFO
if not os.path.exists(pkg_inf):
f = open(pkg_inf, 'w')
f.write('Metadata-Version: 1.0\n')
for k, v in cfg.items('metadata'):
if k != 'target_version':
f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
f.close()
script_dir = os.path.join(_egg_info, 'scripts')
self.delete_blockers( # delete entry-point scripts to avoid duping
[os.path.join(script_dir, args[0]) for args in
get_script_args(dist)]
)
# Build .egg file from tmpdir
bdist_egg.make_zipfile(
egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
)
# install the .egg
return self.install_egg(egg_path, tmpdir)
def exe_to_egg(self, dist_filename, egg_tmp):
"""Extract a bdist_wininst to the directories an egg would use"""
# Check for .pth file and set up prefix translations
prefixes = get_exe_prefixes(dist_filename)
to_compile = []
native_libs = []
top_level = {}
def process(src, dst):
s = src.lower()
for old, new in prefixes:
if s.startswith(old):
src = new + src[len(old):]
parts = src.split('/')
dst = os.path.join(egg_tmp, *parts)
dl = dst.lower()
if dl.endswith('.pyd') or dl.endswith('.dll'):
parts[-1] = bdist_egg.strip_module(parts[-1])
top_level[os.path.splitext(parts[0])[0]] = 1
native_libs.append(src)
elif dl.endswith('.py') and old != 'SCRIPTS/':
top_level[os.path.splitext(parts[0])[0]] = 1
to_compile.append(dst)
return dst
if not src.endswith('.pth'):
log.warn("WARNING: can't process %s", src)
return None
# extract, tracking .pyd/.dll->native_libs and .py -> to_compile
unpack_archive(dist_filename, egg_tmp, process)
stubs = []
for res in native_libs:
if res.lower().endswith('.pyd'): # create stubs for .pyd's
parts = res.split('/')
resource = parts[-1]
parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
pyfile = os.path.join(egg_tmp, *parts)
to_compile.append(pyfile)
stubs.append(pyfile)
bdist_egg.write_stub(resource, pyfile)
self.byte_compile(to_compile) # compile .py's
bdist_egg.write_safety_flag(
os.path.join(egg_tmp, 'EGG-INFO'),
bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
for name in 'top_level', 'native_libs':
if locals()[name]:
txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
if not os.path.exists(txt):
f = open(txt, 'w')
f.write('\n'.join(locals()[name]) + '\n')
f.close()
def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users"""
msg = "\n%(what)s %(eggloc)s%(extras)s"
if self.multi_version and not self.no_report:
msg += """
Because this distribution was installed --multi-version, before you can
import modules from this package in an application, you will need to
'import pkg_resources' and then use a 'require()' call similar to one of
these examples, in order to select the desired version:
pkg_resources.require("%(name)s") # latest installed version
pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
"""
if self.install_dir not in map(normalize_path, sys.path):
msg += """
Note also that the installation directory must be on sys.path at runtime for
this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.)
"""
eggloc = dist.location
name = dist.project_name
version = dist.version
extras = '' # TODO: self.report_extras(req, dist)
return msg % locals()
def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script)
python = sys.executable
return """\nExtracted editable version of %(spec)s to %(dirname)s
If it uses setuptools in its setup script, you can activate it in
"development" mode by going to that directory and running::
%(python)s setup.py develop
See the setuptools documentation for the "develop" command for more info.
""" % locals()
def run_setup(self, setup_script, setup_base, args):
sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
sys.modules.setdefault('distutils.command.egg_info', egg_info)
args = list(args)
if self.verbose > 2:
v = 'v' * (self.verbose - 1)
args.insert(0, '-' + v)
elif self.verbose < 2:
args.insert(0, '-q')
if self.dry_run:
args.insert(0, '-n')
log.info(
"Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
)
try:
run_setup(setup_script, args)
except SystemExit:
v = sys.exc_info()[1]
raise DistutilsError("Setup script exited with %s" % (v.args[0],))
def build_and_install(self, setup_script, setup_base):
args = ['bdist_egg', '--dist-dir']
dist_dir = tempfile.mkdtemp(
prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
)
try:
self._set_fetcher_options(os.path.dirname(setup_script))
args.append(dist_dir)
self.run_setup(setup_script, setup_base, args)
all_eggs = Environment([dist_dir])
eggs = []
for key in all_eggs:
for dist in all_eggs[key]:
eggs.append(self.install_egg(dist.location, setup_base))
if not eggs and not self.dry_run:
log.warn("No eggs found in %s (setup script problem?)",
dist_dir)
return eggs
finally:
rmtree(dist_dir)
log.set_verbosity(self.verbose) # restore our log verbosity
def _set_fetcher_options(self, base):
"""
When easy_install is about to run bdist_egg on a source dist, that
source dist might have 'setup_requires' directives, requiring
additional fetching. Ensure the fetcher options given to easy_install
are available to that command as well.
"""
# find the fetch options from easy_install and write them out
# to the setup.cfg file.
ei_opts = self.distribution.get_option_dict('easy_install').copy()
fetch_directives = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts',
)
fetch_options = {}
for key, val in ei_opts.items():
if key not in fetch_directives:
continue
fetch_options[key.replace('_', '-')] = val[1]
# create a settings dictionary suitable for `edit_config`
settings = dict(easy_install=fetch_options)
cfg_filename = os.path.join(base, 'setup.cfg')
setopt.edit_config(cfg_filename, settings)
def update_pth(self, dist):
if self.pth_file is None:
return
for d in self.pth_file[dist.key]: # drop old entries
if self.multi_version or d.location != dist.location:
log.info("Removing %s from easy-install.pth file", d)
self.pth_file.remove(d)
if d.location in self.shadow_path:
self.shadow_path.remove(d.location)
if not self.multi_version:
if dist.location in self.pth_file.paths:
log.info(
"%s is already the active version in easy-install.pth",
dist
)
else:
log.info("Adding %s to easy-install.pth file", dist)
self.pth_file.add(dist) # add new entry
if dist.location not in self.shadow_path:
self.shadow_path.append(dist.location)
if not self.dry_run:
self.pth_file.save()
if dist.key == 'setuptools':
# Ensure that setuptools itself never becomes unavailable!
# XXX should this check for latest version?
filename = os.path.join(self.install_dir, 'setuptools.pth')
if os.path.islink(filename):
os.unlink(filename)
f = open(filename, 'wt')
f.write(self.pth_file.make_relative(dist.location) + '\n')
f.close()
def unpack_progress(self, src, dst):
# Progress filter for unpacking
log.debug("Unpacking %s to %s", src, dst)
return dst # only unpack-and-compile skips files for dry run
def unpack_and_compile(self, egg_path, destination):
to_compile = []
to_chmod = []
def pf(src, dst):
if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
to_compile.append(dst)
elif dst.endswith('.dll') or dst.endswith('.so'):
to_chmod.append(dst)
self.unpack_progress(src, dst)
return not self.dry_run and dst or None
unpack_archive(egg_path, destination, pf)
self.byte_compile(to_compile)
if not self.dry_run:
for f in to_chmod:
mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
chmod(f, mode)
def byte_compile(self, to_compile):
if _dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
try:
# try to make the byte compile messages quieter
log.set_verbosity(self.verbose - 1)
byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
if self.optimize:
byte_compile(
to_compile, optimize=self.optimize, force=1,
dry_run=self.dry_run
)
finally:
log.set_verbosity(self.verbose) # restore original verbosity
def no_default_version_msg(self):
template = """bad install directory or PYTHONPATH
You are attempting to install a package to a directory that is not
on PYTHONPATH and which Python does not read ".pth" files from. The
installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
and your PYTHONPATH environment variable currently contains:
%r
Here are some of your options for correcting the problem:
* You can choose a different installation directory, i.e., one that is
on PYTHONPATH or supports .pth files
* You can add the installation directory to the PYTHONPATH environment
variable. (It must then also be on PYTHONPATH whenever you run
Python and want to use the package(s) you are installing.)
* You can set up the installation directory to support ".pth" files by
using one of the approaches described here:
https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
Please make the appropriate changes for your system and try again."""
return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
def install_site_py(self):
"""Make sure there's a site.py in the target dir, if needed"""
if self.sitepy_installed:
return # already did it, or don't need to
sitepy = os.path.join(self.install_dir, "site.py")
source = resource_string("setuptools", "site-patch.py")
current = ""
if os.path.exists(sitepy):
log.debug("Checking existing site.py in %s", self.install_dir)
f = open(sitepy, 'rb')
current = f.read()
# we want str, not bytes
if PY3:
current = current.decode()
f.close()
if not current.startswith('def __boot():'):
raise DistutilsError(
"%s is not a setuptools-generated site.py; please"
" remove it." % sitepy
)
if current != source:
log.info("Creating %s", sitepy)
if not self.dry_run:
ensure_directory(sitepy)
f = open(sitepy, 'wb')
f.write(source)
f.close()
self.byte_compile([sitepy])
self.sitepy_installed = True
def create_home_path(self):
"""Create directories under ~."""
if not self.user:
return
home = convert_path(os.path.expanduser("~"))
for name, path in iteritems(self.config_vars):
if path.startswith(home) and not os.path.isdir(path):
self.debug_print("os.makedirs('%s', 0o700)" % path)
os.makedirs(path, 0o700)
INSTALL_SCHEMES = dict(
posix=dict(
install_dir='$base/lib/python$py_version_short/site-packages',
script_dir='$base/bin',
),
)
DEFAULT_SCHEME = dict(
install_dir='$base/Lib/site-packages',
script_dir='$base/Scripts',
)
def _expand(self, *attrs):
config_vars = self.get_finalized_command('install').config_vars
if self.prefix:
# Set default install_dir/scripts from --prefix
config_vars = config_vars.copy()
config_vars['base'] = self.prefix
scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
for attr, val in scheme.items():
if getattr(self, attr, None) is None:
setattr(self, attr, val)
from distutils.util import subst_vars
for attr in attrs:
val = getattr(self, attr)
if val is not None:
val = subst_vars(val, config_vars)
if os.name == 'posix':
val = os.path.expanduser(val)
setattr(self, attr, val)
def get_site_dirs():
# return a list of 'site' dirs
sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
'').split(os.pathsep) if _f]
prefixes = [sys.prefix]
if sys.exec_prefix != sys.prefix:
prefixes.append(sys.exec_prefix)
for prefix in prefixes:
if prefix:
if sys.platform in ('os2emx', 'riscos'):
sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
elif os.sep == '/':
sitedirs.extend([os.path.join(prefix,
"lib",
"python" + sys.version[:3],
"site-packages"),
os.path.join(prefix, "lib", "site-python")])
else:
sitedirs.extend(
[prefix, os.path.join(prefix, "lib", "site-packages")]
)
if sys.platform == 'darwin':
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if 'Python.framework' in prefix:
home = os.environ.get('HOME')
if home:
sitedirs.append(
os.path.join(home,
'Library',
'Python',
sys.version[:3],
'site-packages'))
lib_paths = get_path('purelib'), get_path('platlib')
for site_lib in lib_paths:
if site_lib not in sitedirs:
sitedirs.append(site_lib)
if site.ENABLE_USER_SITE:
sitedirs.append(site.USER_SITE)
sitedirs = list(map(normalize_path, sitedirs))
return sitedirs
def expand_paths(inputs):
"""Yield sys.path directories that might contain "old-style" packages"""
seen = {}
for dirname in inputs:
dirname = normalize_path(dirname)
if dirname in seen:
continue
seen[dirname] = 1
if not os.path.isdir(dirname):
continue
files = os.listdir(dirname)
yield dirname, files
for name in files:
if not name.endswith('.pth'):
# We only care about the .pth files
continue
if name in ('easy-install.pth', 'setuptools.pth'):
# Ignore .pth files that we control
continue
# Read the .pth file
f = open(os.path.join(dirname, name))
lines = list(yield_lines(f))
f.close()
# Yield existing non-dupe, non-import directory lines from it
for line in lines:
if not line.startswith("import"):
line = normalize_path(line.rstrip())
if line not in seen:
seen[line] = 1
if not os.path.isdir(line):
continue
yield line, os.listdir(line)
def extract_wininst_cfg(dist_filename):
"""Extract configuration data from a bdist_wininst .exe
Returns a ConfigParser.RawConfigParser, or None
"""
f = open(dist_filename, 'rb')
try:
endrec = zipfile._EndRecData(f)
if endrec is None:
return None
prepended = (endrec[9] - endrec[5]) - endrec[6]
if prepended < 12: # no wininst data here
return None
f.seek(prepended - 12)
from setuptools.compat import StringIO, ConfigParser
import struct
tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
if tag not in (0x1234567A, 0x1234567B):
return None # not a valid tag
f.seek(prepended - (12 + cfglen))
cfg = ConfigParser.RawConfigParser(
{'version': '', 'target_version': ''})
try:
part = f.read(cfglen)
# part is in bytes, but we need to read up to the first null
# byte.
if sys.version_info >= (2, 6):
null_byte = bytes([0])
else:
null_byte = chr(0)
config = part.split(null_byte, 1)[0]
# Now the config is in bytes, but for RawConfigParser, it should
# be text, so decode it.
config = config.decode(sys.getfilesystemencoding())
cfg.readfp(StringIO(config))
except ConfigParser.Error:
return None
if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
return None
return cfg
finally:
f.close()
def get_exe_prefixes(exe_filename):
"""Get exe->egg path translations for a given .exe file"""
prefixes = [
('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
('PLATLIB/', ''),
('SCRIPTS/', 'EGG-INFO/scripts/'),
('DATA/lib/site-packages', ''),
]
z = zipfile.ZipFile(exe_filename)
try:
for info in z.infolist():
name = info.filename
parts = name.split('/')
if len(parts) == 3 and parts[2] == 'PKG-INFO':
if parts[1].endswith('.egg-info'):
prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
break
if len(parts) != 2 or not name.endswith('.pth'):
continue
if name.endswith('-nspkg.pth'):
continue
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
contents = z.read(name)
if PY3:
contents = contents.decode()
for pth in yield_lines(contents):
pth = pth.strip().replace('\\', '/')
if not pth.startswith('import'):
prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
finally:
z.close()
prefixes = [(x.lower(), y) for x, y in prefixes]
prefixes.sort()
prefixes.reverse()
return prefixes
def parse_requirement_arg(spec):
try:
return Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" % (spec,)
)
class PthDistributions(Environment):
"""A .pth file with Distribution paths in it"""
dirty = False
def __init__(self, filename, sitedirs=()):
self.filename = filename
self.sitedirs = list(map(normalize_path, sitedirs))
self.basedir = normalize_path(os.path.dirname(self.filename))
self._load()
Environment.__init__(self, [], None, None)
for path in yield_lines(self.paths):
list(map(self.add, find_distributions(path, True)))
def _load(self):
self.paths = []
saw_import = False
seen = dict.fromkeys(self.sitedirs)
if os.path.isfile(self.filename):
f = open(self.filename, 'rt')
for line in f:
if line.startswith('import'):
saw_import = True
continue
path = line.rstrip()
self.paths.append(path)
if not path.strip() or path.strip().startswith('#'):
continue
# skip non-existent paths, in case somebody deleted a package
# manually, and duplicate paths as well
path = self.paths[-1] = normalize_path(
os.path.join(self.basedir, path)
)
if not os.path.exists(path) or path in seen:
self.paths.pop() # skip it
self.dirty = True # we cleaned up, so we're dirty now :)
continue
seen[path] = 1
f.close()
if self.paths and not saw_import:
self.dirty = True # ensure anything we touch has import wrappers
while self.paths and not self.paths[-1].strip():
self.paths.pop()
def save(self):
"""Write changed .pth file back to disk"""
if not self.dirty:
return
data = '\n'.join(map(self.make_relative, self.paths))
if data:
log.debug("Saving %s", self.filename)
data = (
"import sys; sys.__plen = len(sys.path)\n"
"%s\n"
"import sys; new=sys.path[sys.__plen:];"
" del sys.path[sys.__plen:];"
" p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
" sys.__egginsert = p+len(new)\n"
) % data
if os.path.islink(self.filename):
os.unlink(self.filename)
f = open(self.filename, 'wt')
f.write(data)
f.close()
elif os.path.exists(self.filename):
log.debug("Deleting empty %s", self.filename)
os.unlink(self.filename)
self.dirty = False
def add(self, dist):
"""Add `dist` to the distribution map"""
if (dist.location not in self.paths and (
dist.location not in self.sitedirs or
dist.location == os.getcwd() # account for '.' being in PYTHONPATH
)):
self.paths.append(dist.location)
self.dirty = True
Environment.add(self, dist)
def remove(self, dist):
"""Remove `dist` from the distribution map"""
while dist.location in self.paths:
self.paths.remove(dist.location)
self.dirty = True
Environment.remove(self, dist)
def make_relative(self, path):
npath, last = os.path.split(normalize_path(path))
baselen = len(self.basedir)
parts = [last]
sep = os.altsep == '/' and '/' or os.sep
while len(npath) >= baselen:
if npath == self.basedir:
parts.append(os.curdir)
parts.reverse()
return sep.join(parts)
npath, last = os.path.split(npath)
parts.append(last)
else:
return path
def _first_line_re():
"""
Return a regular expression based on first_line_re suitable for matching
strings.
"""
if isinstance(first_line_re.pattern, str):
return first_line_re
# first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
return re.compile(first_line_re.pattern.decode())
def get_script_header(script_text, executable=sys_executable, wininst=False):
"""Create a #! line, getting options (if any) from script_text"""
first = (script_text + '\n').splitlines()[0]
match = _first_line_re().match(first)
options = ''
if match:
options = match.group(1) or ''
if options:
options = ' ' + options
if wininst:
executable = "python.exe"
else:
executable = nt_quote_arg(executable)
hdr = "#!%(executable)s%(options)s\n" % locals()
if not isascii(hdr):
# Non-ascii path to sys.executable, use -x to prevent warnings
if options:
if options.strip().startswith('-'):
options = ' -x' + options.strip()[1:]
# else: punt, we can't do it, let the warning happen anyway
else:
options = ' -x'
executable = fix_jython_executable(executable, options)
hdr = "#!%(executable)s%(options)s\n" % locals()
return hdr
def auto_chmod(func, arg, exc):
if func is os.remove and os.name == 'nt':
chmod(arg, stat.S_IWRITE)
return func(arg)
et, ev, _ = sys.exc_info()
reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
def update_dist_caches(dist_path, fix_zipimporter_caches):
"""
Fix any globally cached `dist_path` related data
`dist_path` should be a path of a newly installed egg distribution (zipped
or unzipped).
sys.path_importer_cache contains finder objects that have been cached when
importing data from the original distribution. Any such finders need to be
cleared since the replacement distribution might be packaged differently,
e.g. a zipped egg distribution might get replaced with an unzipped egg
folder or vice versa. Having the old finders cached may then cause Python
to attempt loading modules from the replacement distribution using an
incorrect loader.
zipimport.zipimporter objects are Python loaders charged with importing
data packaged inside zip archives. If stale loaders referencing the
original distribution, are left behind, they can fail to load modules from
the replacement distribution. E.g. if an old zipimport.zipimporter instance
is used to load data from a new zipped egg archive, it may cause the
operation to attempt to locate the requested data in the wrong location -
one indicated by the original distribution's zip archive directory
information. Such an operation may then fail outright, e.g. report having
read a 'bad local file header', or even worse, it may fail silently &
return invalid data.
zipimport._zip_directory_cache contains cached zip archive directory
information for all existing zipimport.zipimporter instances and all such
instances connected to the same archive share the same cached directory
information.
If asked, and the underlying Python implementation allows it, we can fix
all existing zipimport.zipimporter instances instead of having to track
them down and remove them one by one, by updating their shared cached zip
archive directory information. This, of course, assumes that the
replacement distribution is packaged as a zipped egg.
If not asked to fix existing zipimport.zipimporter instances, we still do
our best to clear any remaining zipimport.zipimporter related cached data
that might somehow later get used when attempting to load data from the new
distribution and thus cause such load operations to fail. Note that when
tracking down such remaining stale data, we can not catch every conceivable
usage from here, and we clear only those that we know of and have found to
cause problems if left alive. Any remaining caches should be updated by
whomever is in charge of maintaining them, i.e. they should be ready to
handle us replacing their zip archives with new distributions at runtime.
"""
# There are several other known sources of stale zipimport.zipimporter
# instances that we do not clear here, but might if ever given a reason to
# do so:
# * Global setuptools pkg_resources.working_set (a.k.a. 'master working
# set') may contain distributions which may in turn contain their
# zipimport.zipimporter loaders.
# * Several zipimport.zipimporter loaders held by local variables further
# up the function call stack when running the setuptools installation.
# * Already loaded modules may have their __loader__ attribute set to the
# exact loader instance used when importing them. Python 3.4 docs state
# that this information is intended mostly for introspection and so is
# not expected to cause us problems.
normalized_path = normalize_path(dist_path)
_uncache(normalized_path, sys.path_importer_cache)
if fix_zipimporter_caches:
_replace_zip_directory_cache_data(normalized_path)
else:
# Here, even though we do not want to fix existing and now stale
# zipimporter cache information, we still want to remove it. Related to
# Python's zip archive directory information cache, we clear each of
# its stale entries in two phases:
# 1. Clear the entry so attempting to access zip archive information
# via any existing stale zipimport.zipimporter instances fails.
# 2. Remove the entry from the cache so any newly constructed
# zipimport.zipimporter instances do not end up using old stale
# zip archive directory information.
# This whole stale data removal step does not seem strictly necessary,
# but has been left in because it was done before we started replacing
# the zip archive directory information cache content if possible, and
# there are no relevant unit tests that we can depend on to tell us if
# this is really needed.
_remove_and_clear_zip_directory_cache_data(normalized_path)
def _collect_zipimporter_cache_entries(normalized_path, cache):
"""
Return zipimporter cache entry keys related to a given normalized path.
Alternative path spellings (e.g. those using different character case or
those using alternative path separators) related to the same path are
included. Any sub-path entries are included as well, i.e. those
corresponding to zip archives embedded in other zip archives.
"""
result = []
prefix_len = len(normalized_path)
for p in cache:
np = normalize_path(p)
if (np.startswith(normalized_path) and
np[prefix_len:prefix_len + 1] in (os.sep, '')):
result.append(p)
return result
def _update_zipimporter_cache(normalized_path, cache, updater=None):
"""
Update zipimporter cache data for a given normalized path.
Any sub-path entries are processed as well, i.e. those corresponding to zip
archives embedded in other zip archives.
Given updater is a callable taking a cache entry key and the original entry
(after already removing the entry from the cache), and expected to update
the entry and possibly return a new one to be inserted in its place.
Returning None indicates that the entry should not be replaced with a new
one. If no updater is given, the cache entries are simply removed without
any additional processing, the same as if the updater simply returned None.
"""
for p in _collect_zipimporter_cache_entries(normalized_path, cache):
# N.B. pypy's custom zipimport._zip_directory_cache implementation does
# not support the complete dict interface:
# * Does not support item assignment, thus not allowing this function
# to be used only for removing existing cache entries.
# * Does not support the dict.pop() method, forcing us to use the
# get/del patterns instead. For more detailed information see the
# following links:
# https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960
# https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
old_entry = cache[p]
del cache[p]
new_entry = updater and updater(p, old_entry)
if new_entry is not None:
cache[p] = new_entry
def _uncache(normalized_path, cache):
_update_zipimporter_cache(normalized_path, cache)
def _remove_and_clear_zip_directory_cache_data(normalized_path):
def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
old_entry.clear()
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=clear_and_remove_cached_zip_archive_directory_data)
# PyPy Python implementation does not allow directly writing to the
# zipimport._zip_directory_cache and so prevents us from attempting to correct
# its content. The best we can do there is clear the problematic cache content
# and have PyPy repopulate it as needed. The downside is that if there are any
# stale zipimport.zipimporter instances laying around, attempting to use them
# will fail due to not having its zip archive directory information available
# instead of being automatically corrected to use the new correct zip archive
# directory information.
if '__pypy__' in sys.builtin_module_names:
_replace_zip_directory_cache_data = \
_remove_and_clear_zip_directory_cache_data
else:
def _replace_zip_directory_cache_data(normalized_path):
def replace_cached_zip_archive_directory_data(path, old_entry):
# N.B. In theory, we could load the zip directory information just
# once for all updated path spellings, and then copy it locally and
# update its contained path strings to contain the correct
# spelling, but that seems like a way too invasive move (this cache
# structure is not officially documented anywhere and could in
# theory change with new Python releases) for no significant
# benefit.
old_entry.clear()
zipimport.zipimporter(path)
old_entry.update(zipimport._zip_directory_cache[path])
return old_entry
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=replace_cached_zip_archive_directory_data)
def is_python(text, filename='<string>'):
"Is this string a valid Python script?"
try:
compile(text, filename, 'exec')
except (SyntaxError, TypeError):
return False
else:
return True
def is_sh(executable):
"""Determine if the specified executable is a .sh (contains a #! line)"""
try:
fp = open(executable)
magic = fp.read(2)
fp.close()
except (OSError, IOError):
return executable
return magic == '#!'
def nt_quote_arg(arg):
"""Quote a command line argument according to Windows parsing rules"""
result = []
needquote = False
nb = 0
needquote = (" " in arg) or ("\t" in arg)
if needquote:
result.append('"')
for c in arg:
if c == '\\':
nb += 1
elif c == '"':
# double preceding backslashes, then add a \"
result.append('\\' * (nb * 2) + '\\"')
nb = 0
else:
if nb:
result.append('\\' * nb)
nb = 0
result.append(c)
if nb:
result.append('\\' * nb)
if needquote:
result.append('\\' * nb) # double the trailing backslashes
result.append('"')
return ''.join(result)
def is_python_script(script_text, filename):
"""Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
"""
if filename.endswith('.py') or filename.endswith('.pyw'):
return True # extension says it's Python
if is_python(script_text, filename):
return True # it's syntactically valid Python
if script_text.startswith('#!'):
# It begins with a '#!' line, so check if 'python' is in it somewhere
return 'python' in script_text.splitlines()[0].lower()
return False # Not any Python I can recognize
try:
from os import chmod as _chmod
except ImportError:
# Jython compatibility
def _chmod(*args):
pass
def chmod(path, mode):
log.debug("changing mode of %s to %o", path, mode)
try:
_chmod(path, mode)
except os.error:
e = sys.exc_info()[1]
log.debug("chmod failed: %s", e)
def fix_jython_executable(executable, options):
if sys.platform.startswith('java') and is_sh(executable):
# Workaround for Jython is not needed on Linux systems.
import java
if java.lang.System.getProperty("os.name") == "Linux":
return executable
# Workaround Jython's sys.executable being a .sh (an invalid
# shebang line interpreter)
if options:
# Can't apply the workaround, leave it broken
log.warn(
"WARNING: Unable to adapt shebang line for Jython,"
" the following script is NOT executable\n"
" see http://bugs.jython.org/issue1112 for"
" more information.")
else:
return '/usr/bin/env %s' % executable
return executable
class ScriptWriter(object):
"""
Encapsulates behavior around writing entry point scripts for console and
gui apps.
"""
template = textwrap.dedent("""
# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
__requires__ = %(spec)r
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point(%(spec)r, %(group)r, %(name)r)()
)
""").lstrip()
@classmethod
def get_script_args(cls, dist, executable=sys_executable, wininst=False):
"""
Yield write_script() argument tuples for a distribution's entrypoints
"""
gen_class = cls.get_writer(wininst)
spec = str(dist.as_requirement())
header = get_script_header("", executable, wininst)
for type_ in 'console', 'gui':
group = type_ + '_scripts'
for name, ep in dist.get_entry_map(group).items():
script_text = gen_class.template % locals()
for res in gen_class._get_script_args(type_, name, header,
script_text):
yield res
@classmethod
def get_writer(cls, force_windows):
if force_windows or sys.platform == 'win32':
return WindowsScriptWriter.get_writer()
return cls
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
# Simply write the stub with no extension.
yield (name, header + script_text)
class WindowsScriptWriter(ScriptWriter):
@classmethod
def get_writer(cls):
"""
Get a script writer suitable for Windows
"""
writer_lookup = dict(
executable=WindowsExecutableLauncherWriter,
natural=cls,
)
# for compatibility, use the executable launcher by default
launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
return writer_lookup[launcher]
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"For Windows, add a .py extension"
ext = dict(console='.pya', gui='.pyw')[type_]
if ext not in os.environ['PATHEXT'].lower().split(';'):
warnings.warn("%s not listed in PATHEXT; scripts will not be "
"recognized as executables." % ext, UserWarning)
old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
old.remove(ext)
header = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield name + ext, header + script_text, 't', blockers
@staticmethod
def _adjust_header(type_, orig_header):
"""
Make sure 'pythonw' is used for gui and and 'python' is used for
console (regardless of what sys.executable is).
"""
pattern = 'pythonw.exe'
repl = 'python.exe'
if type_ == 'gui':
pattern, repl = repl, pattern
pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
new_header = pattern_ob.sub(string=orig_header, repl=repl)
clean_header = new_header[2:-1].strip('"')
if sys.platform == 'win32' and not os.path.exists(clean_header):
# the adjusted version doesn't exist, so return the original
return orig_header
return new_header
class WindowsExecutableLauncherWriter(WindowsScriptWriter):
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"""
For Windows, add a .py extension and an .exe launcher
"""
if type_ == 'gui':
launcher_type = 'gui'
ext = '-script.pyw'
old = ['.pyw']
else:
launcher_type = 'cli'
ext = '-script.py'
old = ['.py', '.pyc', '.pyo']
hdr = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield (name + ext, hdr + script_text, 't', blockers)
yield (
name + '.exe', get_win_launcher(launcher_type),
'b' # write in binary mode
)
if not is_64bit():
# install a manifest for the launcher to prevent Windows
# from detecting it as an installer (which it will for
# launchers like easy_install.exe). Consider only
# adding a manifest for launchers detected as installers.
# See Distribute #143 for details.
m_name = name + '.exe.manifest'
yield (m_name, load_launcher_manifest(name), 't')
# for backward-compatibility
get_script_args = ScriptWriter.get_script_args
def get_win_launcher(type):
"""
Load the Windows launcher (executable) suitable for launching a script.
`type` should be either 'cli' or 'gui'
Returns the executable as a byte string.
"""
launcher_fn = '%s.exe' % type
if platform.machine().lower() == 'arm':
launcher_fn = launcher_fn.replace(".", "-arm.")
if is_64bit():
launcher_fn = launcher_fn.replace(".", "-64.")
else:
launcher_fn = launcher_fn.replace(".", "-32.")
return resource_string('setuptools', launcher_fn)
def load_launcher_manifest(name):
manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
if PY2:
return manifest % vars()
else:
return manifest.decode('utf-8') % vars()
def rmtree(path, ignore_errors=False, onerror=auto_chmod):
"""Recursively delete a directory tree.
This code is taken from the Python 2.4 version of 'shutil', because
the 2.3 version doesn't really work right.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
names = []
try:
names = os.listdir(path)
except os.error:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def current_umask():
tmp = os.umask(0o022)
os.umask(tmp)
return tmp
def bootstrap():
# This function is called when setuptools*.egg is run using /bin/sh
import setuptools
argv0 = os.path.dirname(setuptools.__path__[0])
sys.argv[0] = argv0
sys.argv.append(argv0)
main()
def main(argv=None, **kw):
from setuptools import setup
from setuptools.dist import Distribution
import distutils.core
USAGE = """\
usage: %(script)s [options] requirement_or_url ...
or: %(script)s --help
"""
def gen_usage(script_name):
return USAGE % dict(
script=os.path.basename(script_name),
)
def with_ei_usage(f):
old_gen_usage = distutils.core.gen_usage
try:
distutils.core.gen_usage = gen_usage
return f()
finally:
distutils.core.gen_usage = old_gen_usage
class DistributionWithoutHelpCommands(Distribution):
common_usage = ""
def _show_help(self, *args, **kw):
with_ei_usage(lambda: Distribution._show_help(self, *args, **kw))
if argv is None:
argv = sys.argv[1:]
with_ei_usage(
lambda: setup(
script_args=['-q', 'easy_install', '-v'] + argv,
script_name=sys.argv[0] or 'easy_install',
distclass=DistributionWithoutHelpCommands, **kw
)
)
|
PypiClean
|
/GxSphinx-1.0.0.tar.gz/GxSphinx-1.0.0/CONTRIBUTING.rst
|
======================
Contributing to Sphinx
======================
Interested in contributing to Sphinx? Hurrah! We welcome all forms of
contribution, including code patches, documentation improvements and bug
reports/feature requests.
Our contributing guide can be found online at:
https://www.sphinx-doc.org/en/master/internals/contributing/
You can also browse it from this repository from
``doc/internals/contributing/``
Sphinx uses GitHub to host source code, track patches and bugs, and more.
Please make an effort to provide as much possible when filing bugs.
|
PypiClean
|
/tw.dojo-0.9.181.tar.gz/tw.dojo-0.9.181/tw/dojo/static/1.8.1/debug/dojox/grid/enhanced/nls/sv/Filter.js.uncompressed.js
|
define(
"dojox/grid/enhanced/nls/sv/Filter", ({
"clearFilterDialogTitle": "Rensa filter",
"filterDefDialogTitle": "Filter",
"ruleTitleTemplate": "Regel ${0}",
"conditionEqual": "lika med",
"conditionNotEqual": "inte lika med",
"conditionLess": "mindre än",
"conditionLessEqual": "mindre än eller lika med",
"conditionLarger": "större än",
"conditionLargerEqual": "större än eller lika med",
"conditionContains": "innehåller",
"conditionIs": "är",
"conditionStartsWith": "börjar med",
"conditionEndWith": "slutar med",
"conditionNotContain": "innehåller inte",
"conditionIsNot": "är inte",
"conditionNotStartWith": "börjar inte med",
"conditionNotEndWith": "slutar inte med",
"conditionBefore": "före",
"conditionAfter": "efter",
"conditionRange": "intervall",
"conditionIsEmpty": "är tom",
"all": "alla",
"any": "någon",
"relationAll": "alla regler",
"waiRelAll": "Matcha alla följande regler:",
"relationAny": "någon regel",
"waiRelAny": "Matcha någon av följande regler:",
"relationMsgFront": "Matcha:",
"relationMsgTail": "",
"and": "och",
"or": "eller",
"addRuleButton": "Lägg till regel",
"waiAddRuleButton": "Lägg till ny regel",
"removeRuleButton": "Ta bort regel",
"waiRemoveRuleButtonTemplate": "Ta bort regel ${0}",
"cancelButton": "Avbryt",
"waiCancelButton": "Stäng dialogrutan",
"clearButton": "Rensa",
"waiClearButton": "Rensa filtret",
"filterButton": "Filtrera",
"waiFilterButton": "Filtrera",
"columnSelectLabel": "Kolumn",
"waiColumnSelectTemplate": "Kolumn för regel ${0}",
"conditionSelectLabel": "Villkor",
"waiConditionSelectTemplate": "Villkor för regel ${0}",
"valueBoxLabel": "Värde",
"waiValueBoxTemplate": "Ange värde för filtrering efter regeln ${0}",
"rangeTo": "till",
"rangeTemplate": "från ${0} till ${1}",
"statusTipHeaderColumn": "Kolumn",
"statusTipHeaderCondition": "Regler",
"statusTipTitle": "Filterfält",
"statusTipMsg": "Klicka på filterfältet om du vill filtrera värden i ${0}.",
"anycolumn": "alla kolumner",
"statusTipTitleNoFilter": "Filterfält",
"statusTipTitleHasFilter": "Filter",
"statusTipRelAny": "Matcha någon regel.",
"statusTipRelAll": "Matcha alla regler.",
"defaultItemsName": "objekt",
"filterBarMsgHasFilterTemplate": "${0} av ${1} ${2} visas.",
"filterBarMsgNoFilterTemplate": "Inget filter tillämpat",
"filterBarDefButton": "Definiera filter",
"waiFilterBarDefButton": "Filtrera tabellen",
"a11yFilterBarDefButton": "Filter...",
"filterBarClearButton": "Rensa filter",
"waiFilterBarClearButton": "Rensa filtret",
"closeFilterBarBtn": "Stäng filterfält",
"clearFilterMsg": "Tar bort filtret och visar alla tillgängliga poster.",
"anyColumnOption": "Någon kolumn",
"trueLabel": "Sant",
"falseLabel": "Falskt"
})
);
|
PypiClean
|
/bpi_13_python-0.1a4.tar.gz/bpi_13_python-0.1a4/domain_model/utils.py
|
import xes
# Format: [[(actor name, method name)*]]
action_log = [[]]
skip_log = False
def set_skip_log(val):
global skip_log
skip_log = val
def new_trace():
action_log.append([])
def log_activity(func):
'''
A decorator that logs an activity being performed by some actor.
:param func: The decorated method, should be a method call in a process.
:return: A decorated function.
'''
def logged_func(*args, **kwargs):
# For XES stuff:
# 1. agent name comes from arg[0] being the agent the method relates to.
# 2. How do I pick up the trace? Maybe tag each task _object_ with its trace, so I can pick it up here?
global skip_log
if not skip_log:
action_log[-1].append((args[0].actor_name, func.func_name))
skip_log = False
return func(*args, **kwargs)
logged_func.func_name = func.func_name
return logged_func
# 3. Function for outputting an XES log
def generate_XES(traces=None, log_path='log.xes'):
# Borrow from the XES example at https://github.com/maxsumrall/xes
log = xes.Log()
def makeEvent(logged_event):
event = xes.Event()
event.attributes = [xes.Attribute(type="string",
key="concept:name",
value=logged_event[1]),
xes.Attribute(type="string",
key="org:resource",
value=logged_event[0])
]
return event
for event_set in action_log:
trace = xes.Trace()
[trace.add_event(makeEvent(logged_event)) for logged_event in event_set]
log.add_trace(trace)
log.classifiers = [
xes.Classifier(name="org:resource",
keys="org:resource"),
xes.Classifier(name="concept:name",
keys="concept:name")
]
with open(log_path, 'w') as log_file:
log_file.write(str(log))
|
PypiClean
|
/git-deps-1.1.0.zip/git-deps-1.1.0/git_deps/html/node_modules/d3-tip/index.js
|
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module with d3 as a dependency.
define(['d3'], factory)
} else if (typeof module === 'object' && module.exports) {
// CommonJS
module.exports = function(d3) {
d3.tip = factory(d3)
return d3.tip
}
} else {
// Browser global.
root.d3.tip = factory(root.d3)
}
}(this, function (d3) {
// Public - contructs a new tooltip
//
// Returns a tip
return function() {
var direction = d3_tip_direction,
offset = d3_tip_offset,
html = d3_tip_html,
node = initNode(),
svg = null,
point = null,
target = null
function tip(vis) {
svg = getSVGNode(vis)
point = svg.createSVGPoint()
document.body.appendChild(node)
}
// Public - show the tooltip on the screen
//
// Returns a tip
tip.show = function() {
var args = Array.prototype.slice.call(arguments)
if(args[args.length - 1] instanceof SVGElement) target = args.pop()
var content = html.apply(this, args),
poffset = offset.apply(this, args),
dir = direction.apply(this, args),
nodel = getNodeEl(),
i = directions.length,
coords,
scrollTop = document.documentElement.scrollTop || document.body.scrollTop,
scrollLeft = document.documentElement.scrollLeft || document.body.scrollLeft
nodel.html(content)
.style({ opacity: 1, 'pointer-events': 'all' })
while(i--) nodel.classed(directions[i], false)
coords = direction_callbacks.get(dir).apply(this)
nodel.classed(dir, true).style({
top: (coords.top + poffset[0]) + scrollTop + 'px',
left: (coords.left + poffset[1]) + scrollLeft + 'px'
})
return tip
}
// Public - hide the tooltip
//
// Returns a tip
tip.hide = function() {
var nodel = getNodeEl()
nodel.style({ opacity: 0, 'pointer-events': 'none' })
return tip
}
// Public: Proxy attr calls to the d3 tip container. Sets or gets attribute value.
//
// n - name of the attribute
// v - value of the attribute
//
// Returns tip or attribute value
tip.attr = function(n, v) {
if (arguments.length < 2 && typeof n === 'string') {
return getNodeEl().attr(n)
} else {
var args = Array.prototype.slice.call(arguments)
d3.selection.prototype.attr.apply(getNodeEl(), args)
}
return tip
}
// Public: Proxy style calls to the d3 tip container. Sets or gets a style value.
//
// n - name of the property
// v - value of the property
//
// Returns tip or style property value
tip.style = function(n, v) {
if (arguments.length < 2 && typeof n === 'string') {
return getNodeEl().style(n)
} else {
var args = Array.prototype.slice.call(arguments)
d3.selection.prototype.style.apply(getNodeEl(), args)
}
return tip
}
// Public: Set or get the direction of the tooltip
//
// v - One of n(north), s(south), e(east), or w(west), nw(northwest),
// sw(southwest), ne(northeast) or se(southeast)
//
// Returns tip or direction
tip.direction = function(v) {
if (!arguments.length) return direction
direction = v == null ? v : d3.functor(v)
return tip
}
// Public: Sets or gets the offset of the tip
//
// v - Array of [x, y] offset
//
// Returns offset or
tip.offset = function(v) {
if (!arguments.length) return offset
offset = v == null ? v : d3.functor(v)
return tip
}
// Public: sets or gets the html value of the tooltip
//
// v - String value of the tip
//
// Returns html value or tip
tip.html = function(v) {
if (!arguments.length) return html
html = v == null ? v : d3.functor(v)
return tip
}
// Public: destroys the tooltip and removes it from the DOM
//
// Returns a tip
tip.destroy = function() {
if(node) {
getNodeEl().remove();
node = null;
}
return tip;
}
function d3_tip_direction() { return 'n' }
function d3_tip_offset() { return [0, 0] }
function d3_tip_html() { return ' ' }
var direction_callbacks = d3.map({
n: direction_n,
s: direction_s,
e: direction_e,
w: direction_w,
nw: direction_nw,
ne: direction_ne,
sw: direction_sw,
se: direction_se
}),
directions = direction_callbacks.keys()
function direction_n() {
var bbox = getScreenBBox()
return {
top: bbox.n.y - node.offsetHeight,
left: bbox.n.x - node.offsetWidth / 2
}
}
function direction_s() {
var bbox = getScreenBBox()
return {
top: bbox.s.y,
left: bbox.s.x - node.offsetWidth / 2
}
}
function direction_e() {
var bbox = getScreenBBox()
return {
top: bbox.e.y - node.offsetHeight / 2,
left: bbox.e.x
}
}
function direction_w() {
var bbox = getScreenBBox()
return {
top: bbox.w.y - node.offsetHeight / 2,
left: bbox.w.x - node.offsetWidth
}
}
function direction_nw() {
var bbox = getScreenBBox()
return {
top: bbox.nw.y - node.offsetHeight,
left: bbox.nw.x - node.offsetWidth
}
}
function direction_ne() {
var bbox = getScreenBBox()
return {
top: bbox.ne.y - node.offsetHeight,
left: bbox.ne.x
}
}
function direction_sw() {
var bbox = getScreenBBox()
return {
top: bbox.sw.y,
left: bbox.sw.x - node.offsetWidth
}
}
function direction_se() {
var bbox = getScreenBBox()
return {
top: bbox.se.y,
left: bbox.e.x
}
}
function initNode() {
var node = d3.select(document.createElement('div'))
node.style({
position: 'absolute',
top: 0,
opacity: 0,
'pointer-events': 'none',
'box-sizing': 'border-box'
})
return node.node()
}
function getSVGNode(el) {
el = el.node()
if(el.tagName.toLowerCase() === 'svg')
return el
return el.ownerSVGElement
}
function getNodeEl() {
if(node === null) {
node = initNode();
// re-add node to DOM
document.body.appendChild(node);
};
return d3.select(node);
}
// Private - gets the screen coordinates of a shape
//
// Given a shape on the screen, will return an SVGPoint for the directions
// n(north), s(south), e(east), w(west), ne(northeast), se(southeast), nw(northwest),
// sw(southwest).
//
// +-+-+
// | |
// + +
// | |
// +-+-+
//
// Returns an Object {n, s, e, w, nw, sw, ne, se}
function getScreenBBox() {
var targetel = target || d3.event.target;
while ('undefined' === typeof targetel.getScreenCTM && 'undefined' === targetel.parentNode) {
targetel = targetel.parentNode;
}
var bbox = {},
matrix = targetel.getScreenCTM(),
tbbox = targetel.getBBox(),
width = tbbox.width,
height = tbbox.height,
x = tbbox.x,
y = tbbox.y
point.x = x
point.y = y
bbox.nw = point.matrixTransform(matrix)
point.x += width
bbox.ne = point.matrixTransform(matrix)
point.y += height
bbox.se = point.matrixTransform(matrix)
point.x -= width
bbox.sw = point.matrixTransform(matrix)
point.y -= height / 2
bbox.w = point.matrixTransform(matrix)
point.x += width
bbox.e = point.matrixTransform(matrix)
point.x -= width / 2
point.y -= height / 2
bbox.n = point.matrixTransform(matrix)
point.y += height
bbox.s = point.matrixTransform(matrix)
return bbox
}
return tip
};
}));
|
PypiClean
|
/dbpedia_ent-0.1.9-py3-none-any.whl/dbpedia_ent/dto/syn/n2/t/rev_tr.py
|
d_rev_tr = {'tr-1700_submarine': 'tr-1700_class_submarine',
'tr-1a_dragonlady': 'lockheed_u-2',
'tr-85_m1': 'tr-85',
'tr100_list': 'technology_review',
'tr3s_lunas': 'tres_lunas',
'tr_069': 'tr-069',
'tr_1': 'tr1',
'tr_1700': 'tr-1700_class_submarine',
'tr_1736': 'aristolochic_acid',
'tr_1b': 'lockheed_u-2',
'tr_3a': 'tr3a',
'tr_ajayan': 't._r._ajayan',
'tr_allinson': 'thomas_allinson',
'tr_baalu': 't._r._baalu',
'tr_bryden': 't._r._bryden',
'tr_dunn': 't._r._dunn',
'tr_edwards': 't._r._edwards',
'tr_henn': 'thomas_rice_henn',
'tr_knight': 't._r._knight',
'tr_malthus': 'thomas_robert_malthus',
'tr_pearson': 't._r._pearson',
'tr_stockdale': 't._r._stockdale',
'tr_zeliang': 't._r._zeliang',
'tra_1986': 'tax_reform_act_of_1986',
'tra_86': 'tax_reform_act_of_1986',
'tra_jet': 'hyundai_trajet',
'tra_volta': 'john_travolta',
'trabancos_river': 'trabancos',
'trabant_sputnik': 'trabant',
'trabeated_building': 'post_and_lintel',
'trabeated_masonry': 'post_and_lintel',
'trabeculae_carnae': 'trabeculae_carneae',
'trabecular_artery': 'trabecular_arteries',
'trabecular_bone': 'cancellous_bone',
'trabecular_carcinoma': 'merkel_cell_cancer',
'trabecular_mesh': 'trabecular_meshwork',
'trabecular_tissue': 'trabecula',
'trabecular_vein': 'trabecular_veins',
'trabzon_spor': 'trabzonspor',
'tracadie-fort_augustus': 'tracadie-hillsborough_park',
'trace-class_operator': 'trace_class',
'trace_amines': 'trace_amine',
'trace_cache': 'cpu_cache',
'trace_caches': 'cpu_cache',
'trace_decay': 'forgetting',
'trace_diagrams': 'trace_diagram',
'trace_elements': 'trace_element',
'trace_form': 'algebraic_number_field',
'trace_fossils': 'trace_fossil',
'trace_gases': 'trace_gas',
'trace_italienne': 'star_fort',
'trace_language': 'trace_monoid',
'trace_lemma': 'trace_operator',
'trace_memory': 'another_code:_two_memories',
'trace_metals': 'trace_metal',
'trace_mineral': 'dietary_mineral',
'trace_minerals': 'dietary_mineral',
'trace_relation': 'trace_identity',
'trace_route': 'traceroute',
'trace_theorem': 'trace_operator',
'traceable_graph': 'hamiltonian_path',
'traceable_path': 'hamiltonian_path',
'tracee_ross': 'tracee_ellis_ross',
'tracer_bullets': 'tracer_ammunition',
'tracer_dye': 'dye_tracing',
'tracer_element': 'isotopic_labeling',
'tracer_fire': 'tracer_ammunition',
'tracer_milano': 'olimpia_milano',
'tracer_round': 'tracer_ammunition',
'tracer_rounds': 'tracer_ammunition',
'traceroute_program': 'traceroute',
'tracey-ann_oberman': 'tracy-ann_oberman',
'tracey_austin': 'tracy_austin',
'tracey_beaker': 'the_story_of_tracy_beaker',
'tracey_bond': 'tracy_bond',
'tracey_brabin': 'tracy_brabin',
'tracey_bregman': 'tracey_e._bregman',
'tracey_byrd': 'tracy_byrd',
'tracey_caulkins': 'tracy_caulkins',
'tracey_chapman': 'tracy_chapman',
'tracey_connelly': 'death_of_baby_p',
'tracey_cuesta': 'love_wrecked',
'tracey_edwards': 'tracy_edwards',
'tracey_fragments': 'the_tracey_fragments',
'tracey_grimshaw': 'tracy_grimshaw',
'tracey_jacobson': 'tracey_ann_jacobson',
'tracey_jordan': 'tracy_jordan',
'tracey_kelliher': 'tracey_k',
'tracey_kiber': 'tracey_kibre',
'tracey_lawrence': 'tracy_lawrence',
'tracey_lords': 'traci_lords',
'tracey_mann': 'tracy_mann',
'tracey_moffat': 'tracey_moffatt',
'tracey_morgan': 'tracy_morgan',
'tracey_pew': 'tracy_pew',
'tracey_philips': 'tracy_phillips',
'tracey_phillips': 'tracy_phillips',
'tracey_smothers': 'tracy_smothers',
'tracey_tarlton': 'celeste_beard',
'tracey_ulman': 'tracey_ullman',
'tracey_wilson': 'tracy_wilson',
'trach_tube': 'tracheostomy_tube',
'trachea/bronchus/lung_cancers': 'lung_cancer',
'trachea_injury': 'tracheobronchial_injury',
'trachea_malaysia': 'tracheomalacia',
'trachea_shave': 'chondrolaryngoplasty',
'trachea_shaving': 'chondrolaryngoplasty',
'trachea_umbrifacta': 'apamea_spaldingi',
'tracheal_artery': 'tracheal_branches_of_inferior_thyroid_artery',
'tracheal_branches': 'tracheal_branches_of_inferior_thyroid_artery',
'tracheal_disease': 'vertebrate_trachea',
'tracheal_diseases': 'vertebrate_trachea',
'tracheal_fracture': 'tracheobronchial_injury',
'tracheal_injury': 'tracheobronchial_injury',
'tracheal_intubation': 'intubation',
'tracheal_neoplasms': 'head_and_neck_cancer',
'tracheal_ring': 'tracheal_rings',
'tracheal_rupture': 'tracheobronchial_injury',
'tracheal_shave': 'chondrolaryngoplasty',
'tracheal_tear': 'tracheobronchial_injury',
'tracheal_trauma': 'tracheobronchial_injury',
'tracheal_tube': 'endotracheal_tube',
'tracheal_tubes': 'endotracheal_tube',
'tracheitis_bacterial': 'tracheitis',
'trachelochismus_melobesia': 'striped_clingfish',
'trachelochismus_pinnulatus': 'new_zealand_lumpfish',
'trachelophorus_giraffa': 'giraffe_weevil',
'trachemys_decorata': 'haitian_slider',
'trachemys_stejnegeri': 'central_antillean_slider',
'trachemys_terrapen': 'jamaican_slider',
'trachenburg_plan': 'trachenberg_plan',
'tracheo-bronchial_injury': 'tracheobronchial_injury',
'tracheo-bronchial_rupture': 'tracheobronchial_injury',
'tracheo-bronchial_trauma': 'tracheobronchial_injury',
'tracheo-esophageal_fistula': 'tracheoesophageal_fistula',
'tracheo-oesophageal_fistula': 'tracheoesophageal_fistula',
'tracheobroncheal_disruption': 'tracheobronchial_injury',
'tracheobronchial_disruption': 'tracheobronchial_injury',
'tracheobronchial_glands': 'tracheobronchial_lymph_nodes',
'tracheobronchial_laceration': 'tracheobronchial_injury',
'tracheobronchial_rupture': 'tracheobronchial_injury',
'tracheobronchial_tear': 'tracheobronchial_injury',
'tracheobronchial_trauma': 'tracheobronchial_injury',
'tracheoesophageal_ridges': 'tracheoesophageal_septum',
'tracheophorus_giraffa': 'giraffe_weevil',
'trachicaranx_tersus': 'trachicaranx',
'trachinus_araneus': 'spotted_weever',
'trachinus_draco': 'greater_weever',
'trachipterus_altivelis': 'king-of-the-salmon',
'trachodon_amurensis': 'mandschurosaurus',
'trachodon_mirabilis': 'trachodon',
'trachops_cirrhosus': 'fringe-lipped_bat',
'trachselwald_be': 'trachselwald',
'trachtenberg_method': 'trachtenberg_system',
'trachurus_declivis': 'greenback_horse_mackerel',
'trachurus_novaezelandiae': 'yellowtail_horse_mackerel',
'trachurus_symmetricus': 'pacific_jack_mackerel',
'trachurus_trachurus': 'atlantic_horse_mackerel',
'trachycaryon_klotzschii': 'adriana_quadripartita',
'trachycephalus_nigromaculatus': 'black-spotted_casque-headed_treefrog',
'trachycephalus_resinifictrix': 'amazon_milk_frog',
'trachycephalus_venulosus': 'rana_lechera_comun',
'trachycystis_clifdeni': 'dlinza_forest_pinwheel',
'trachydosaurus_rugosus': 'tiliqua_rugosa',
'trachylepis_seychellensis': 'seychelles_skink',
'trachylepis_vittata': 'bridled_mabuya',
'trachyphonus_erythrocephalus': 'red-and-yellow_barbet',
'trachyphonus_margaritatus': 'yellow-breasted_barbet',
'trachyphonus_purpuratus': 'yellow-billed_barbet',
'trachyphonus_usambiro': 'usambiro_barbet',
'trachyphonus_vaillantii': 'crested_barbet',
'trachyphrynus_myersi': 'eleutherodactylus_myersi',
'trachypithecus_auratus': 'javan_lutung',
'trachypithecus_barbei': 'tenasserim_lutung',
'trachypithecus_cristatus': 'silvery_lutung',
'trachypithecus_ebenus': 'indochinese_black_langur',
'trachypithecus_germaini': 'indochinese_lutung',
'trachypithecus_hatinhensis': 'hatinh_langur',
'trachypithecus_johnii': 'nilgiri_langur',
'trachypithecus_laotum': 'laotian_langur',
'trachypithecus_leucocephalus': 'white-headed_langur',
'trachypithecus_obscurus': 'dusky_leaf_monkey',
'trachypithecus_pileatus': 'capped_langur',
'trachypithecus_poliocephalus': 'white-headed_langur',
'trachypithecus_vetulus': 'purple-faced_langur',
'trachypithecus_villosus': 'silvery_lutung',
'trachypoma_macracanthus': 'toadstool_groper',
'trachypoma_marmoratum': 'eremophilus_mutisii',
'trachyrincus_longirostris': 'slender_unicorn_rattail',
'trachyspermum_ammi': 'ajwain',
'trachyte_texture': 'trachyte',
'traci_13': 'traci_thirteen',
'traci_abbott': 'traci_abbott_connolly',
'traci_braxton': 'the_braxtons',
'traci_brooks': 'tracy_brookshaw',
'traci_connelly': 'traci_abbott_connolly',
'traci_danielle': 'pepper_somerset',
'traci_guns': 'tracii_guns',
'tracie_lords': 'traci_lords',
'tracie_mcgrady': 'tracy_mcgrady',
'tracinda_corporation': 'tracinda',
'tracing_lines': 'tracing_lines_/_silent_cry',
'track-and-field_sports': 'track_and_field_athletics',
'track-two_diplomacy': 'track_ii_diplomacy',
'track-type_tractor': 'tracked_vehicle',
'track-type_tractors': 'bulldozer',
'track-while-scan_mode': 'track_while_scan',
'track7_games': 'track7',
'track_0': 'track0',
'track_athlete': 'track_and_field_athletics',
'track_ball': 'trackball',
'track_bar': 'panhard_rod',
'track_bike': 'track_bicycle',
'track_blinds': 'window_blind',
'track_circuits': 'track_circuit',
'track_cycling/keirin': 'keirin',
'track_cyclist': 'track_cycling',
'track_dating': 'fission_track_dating',
'track_duplication': 'double_track',
'track_follower': 'cam_follower',
'track_formation': 'land_grading',
'track_gain': 'replay_gain',
'track_gauge': 'rail_gauge',
'track_hoe': 'excavator',
'track_ii': 'project_fubelt',
'track_machines': 'rail_tracks',
'track_mania': 'trackmania',
'track_marks': 'drug_injection',
'track_marshal': 'motorsport_marshal',
'track_pants': 'sweatpants',
'track_phone': 'tracfone_wireless',
'track_point': 'pointing_stick',
'track_pump': 'bicycle_pump',
'track_repair': 'rail_tracks',
'track_rod': 'panhard_rod',
'track_runner': 'track_and_field_athletics',
'track_running': 'track_and_field_athletics',
'track_shoe': 'track_spikes',
'track_shoes': 'track_spikes',
'track_shorts': 'shorts',
'track_skidder': 'skidder',
'track_suit': 'tracksuit',
'track_tester': 'track_geometry_car',
'track_warrants': 'track_warrant',
'trackback_spam': 'sping',
'trackback_uri': 'trackback',
'trackback_url': 'trackback',
'tracked_vehicles': 'tracked_vehicle',
'tracker_modules': 'module_file',
'tracker_music': 'tracker',
'tracking_bracelet': 'ankle_monitor',
'tracking_bug': 'web_bug',
'tracking_cookie': 'http_cookie',
'tracking_cookies': 'http_cookie',
'tracking_device': 'tracking_system',
'tracking_dots': 'printer_steganography',
'tracking_pixel': 'web_bug',
'tracking_station': 'earth_station',
'trackless_trolley': 'trolleybus',
'trackless_trolleys': 'trolleybus',
'trackmania_nations': 'trackmania',
'trackmania_sunrise': 'trackmania',
'trackmaster_entertainment': 'trackmasters',
'trackmasters_records': 'trackmasters',
'tracksuit_bottoms': 'sweatpants',
'tracky_dacks': 'sweatpants',
'tracor_aerospace': 'tracor',
'tracor_aviation': 'tracor',
'tracor_inc.': 'tracor',
'tract_developements': 'tract_housing',
'tract_home': 'tract_housing',
'tract_house': 'tract_housing',
'tract_indexes': 'tract_index',
'tract_mansion': 'mcmansion',
'tract_xc': 'tract_90',
'tractarian_movement': 'oxford_movement',
'tractate_kiddushin': 'nashim',
'tractatus_armoricani': 'armorica',
'tractatus_theologico-politicus': 'theologico-political_treatise',
'traction_circle': 'circle_of_forces',
'traction_cities': 'traction_city',
'traction_current': 'railway_electrification_system',
'traction_drive': 'traction_motor',
'traction_drives': 'traction_motor',
'traction_kite': 'power_kite',
'traction_motors': 'traction_motor',
'traction_trainee': 'secondman',
'traction_unit': 'tractor_unit',
'tractive_effort': 'tractive_force',
'tractor-feed_paper': 'continuous_stationery',
'tractor-trailer_truck': 'semi-trailer_truck',
'tractor_aircraft': 'tractor_configuration',
'tractor_barn': 'functionally_classified_barn',
'tractor_boys': 'ipswich_town_f.c.',
'tractor_club': 'tractor_sazi_f.c.',
'tractor_crawler': 'tracked_vehicle',
'tractor_eicher': 'eicher_tractor',
'tractor_feed': 'continuous_stationery',
'tractor_ferry': 'sea_tractor',
'tractor_man': 'dwight_watson',
'tractor_plants': 'concern_tractor_plants',
'tractor_poker': 'bashi_fen',
'tractor_propeller': 'tractor_configuration',
'tractor_pull': 'tractor_pulling',
'tractor_safety': 'tractor',
'tractor_station': 'machine_and_tractor_station',
'tractor_supply': 'tractor_supply_company',
'tractor_trailer': 'semi-trailer_truck',
'tractor_trailers': 'semi-trailer_truck',
'tractor_traylor': 'robert_traylor',
'tractorfeed_paper': 'continuous_stationery',
'tractus_corticospinalis': 'corticospinal_tract',
'tractus_iliotibialis': 'iliotibial_tract',
'tractus_olfactorius': 'olfactory_tract',
'tractus_solitarius': 'solitary_tract',
'tractus_spinothalamicus': 'spinothalamic_tract',
'tracy_airport': 'tracy_municipal_airport',
'tracy_beaker': 'the_story_of_tracy_beaker',
'tracy_bingham': 'traci_bingham',
'tracy_bocage': 'tracy-bocage',
'tracy_brooks': 'tracy_brookshaw',
'tracy_cropper': 'tracy_barlow',
'tracy_curry': 'the_d.o.c.',
'tracy_deneault': 'gg_allin',
'tracy_draco': 'tracy_bond',
'tracy_edmonds': 'tracey_edmonds',
'tracy_emin': 'tracey_emin',
'tracy_gold': 'tracey_gold',
'tracy_grijalva': 'tracy_g',
'tracy_gross': 'tracy_grose',
'tracy_heard': 'tracy_maxwell_heard',
'tracy_jacks': 'parklife',
'tracy_kelusky': 'tracey_kelusky',
'tracy_kibre': 'tracey_kibre',
'tracy_langton': 'tracy_barlow',
'tracy_latimer': 'robert_latimer',
'tracy_lett': 'tracy_letts',
'tracy_lewis': 'tracy_r._lewis',
'tracy_lords': 'traci_lords',
'tracy_lynn': 'liz_braswell',
'tracy_marrow': 'ice-t',
'tracy_mcguigan': 'tracy_kavanagh',
'tracy_moffat': 'tracey_moffatt',
'tracy_perkins': 'tracy_e._perkins',
'tracy_preston': 'tracy_barlow',
'tracy_proust': 'tracy_poust',
'tracy_quartermaine-spencer': 'tracy_quartermaine',
'tracy_quartermaine-williams': 'tracy_quartermaine',
'tracy_schwarz': 'ginny_hawker_and_tracy_schwarz',
'tracy_seaward': 'tracey_seaward',
'tracy_seyfert': 'r._tracy_seyfert',
'tracy_spencer': 'tracy_quartermaine',
'tracy_sucato': 'robert_sucato',
'tracy_thorn': 'tracey_thorn',
'tracy_turnblad': 'hairspray',
'tracy_ullman': 'tracey_ullman',
'tracy_ulman': 'tracey_ullman',
'tracy_vorhees': 'tracy_voorhees',
'tracy_wainman': 'tracey_wainman',
'tracy_walker': 'r._tracy_walker',
'tracy_watson': 'tracy_hutson',
'tracy_worcester': 'tracy_louise_ward',
'trad._arr': 'trad._arr.',
'trad._ch.': 'traditional_chinese_characters',
'trad_arr': 'trad._arr.',
'trad_climbing': 'traditional_climbing',
'trad_goth': 'goth_subculture',
'trad_metal': 'traditional_heavy_metal',
'trad_music': 'traditional_music',
'trad_province': 'trat_province',
'tradable_allowance': 'european_union_emission_trading_scheme',
'tradable_good': 'tradable',
'tradable_goods': 'tradable',
'tradable_service': 'tradable',
'tradable_services': 'tradable',
'trade-off_theory': 'trade-off_theory_of_capital_structure',
'trade_act': 'trade_act_of_1974',
'trade_agreement': 'trade_pact',
'trade_agreements': 'trade_pact',
'trade_associations': 'trade_association',
'trade_balance': 'balance_of_trade',
'trade_barriers': 'trade_barrier',
'trade_beads': 'slave_beads',
'trade_block': 'trade_bloc',
'trade_blocks': 'trade_bloc',
'trade_brand': 'brand',
'trade_cards': 'trade_card',
'trade_center': 'world_trade_center',
'trade_certification': 'professional_certification',
'trade_convention': 'trade_fair',
'trade_costs': 'trade_facilitation_and_development',
'trade_currency': 'trade_coin',
'trade_cycle': 'business_cycle',
'trade_deficit': 'balance_of_trade',
'trade_deficits': 'balance_of_trade',
'trade_discount': 'discounts_and_allowances',
'trade_embargo': 'embargo',
'trade_fairs': 'trade_fair',
'trade_financing': 'trade_finance',
'trade_gap': 'balance_of_payments',
'trade_group': 'trade_association',
'trade_guild': 'guild',
'trade_hardcover': 'hardcover',
'trade_ideas': 'trade_idea',
'trade_language': 'pidgin',
'trade_law': 'international_trade_law',
'trade_libel': 'defamation',
'trade_liberalisation': 'free_trade',
'trade_liberalization': 'free_trade',
'trade_loading': 'channel_stuffing',
'trade_magazine': 'trade_journal',
'trade_mark': 'trademark',
'trade_marks': 'trademark',
'trade_me': 'trademe',
'trade_minister': 'minister_of_state_for_trade',
'trade_names': 'trade_name',
'trade_obstacle': 'trade_restriction',
'trade_off': 'trade-off',
'trade_organisation': 'trade_association',
'trade_organization': 'trade_association',
'trade_policy': 'trade',
'trade_post': 'trading_post',
'trade_protectionism': 'protectionism',
'trade_publication': 'trade_journal',
'trade_representative': 'office_of_the_united_states_trade_representative',
'trade_routes': 'trade_route',
'trade_sanction': 'economic_sanctions',
'trade_sanctions': 'economic_sanctions',
'trade_school': 'vocational_school',
'trade_schools': 'vocational_school',
'trade_secrecy': 'trade_secret',
'trade_secrets': 'trade_secret',
'trade_show': 'trade_fair',
'trade_shows': 'trade_fair',
'trade_studies': 'trade_study',
'trade_submarine': 'merchant_submarine',
'trade_submarines': 'merchant_submarine',
'trade_surplus': 'balance_of_trade',
'trade_tech': 'los_angeles_trade-technical_college',
'trade_treaty': 'trade_pact',
'trade_unionism': 'trade_union',
'trade_unionist': 'trade_union',
'trade_unionists': 'trade_union',
'trade_unions': 'trade_union',
'trade_winds': 'trade_wind',
'traded_publicly': 'public_company',
'trademark_agent': 'trademark_attorney',
'trademark_cases': 'trade-mark_cases',
'trademark_examiners': 'trademark_examiner',
'trademark_genericization': 'genericized_trademark',
'trademark_law': 'trademark',
'trademark_properties': 'richard_c._davis',
'trademark_registration': 'trademark',
'trademark_violation': 'trademark_infringement',
'trader_cliff': 'cliff_fletcher',
'trader_ethic': 'jane_jacobs',
'trader_lane': 'frank_lane',
'trader_syndrome': 'jane_jacobs',
'traders_hotel': 'shangri-la_hotels_and_resorts',
'trades_council': 'labour_council',
'trades_union': 'trade_union',
'trades_unions': 'trade_union',
'tradescantia_virginica': 'tradescantia',
'tradestation_software': 'tradestation',
'tradewater_railway': 'western_kentucky_railway',
'tradica_sebifera': 'chinese_tallow',
'tradicionalist_communion': 'carlism',
'tradigital_animation': 'traditional_animation',
'trading_algorithm': 'algorithmic_trading',
'trading_as': 'doing_business_as',
'trading_band': 'currency_band',
'trading_bloc': 'trade_bloc',
'trading_cards': 'trading_card',
'trading_collar': 'trading_curb',
'trading_company': 'joint_stock_company',
'trading_curbs': 'trading_curb',
'trading_deadline': 'trade_deadline',
'trading_efficiency': 'tradewars_2002',
'trading_estate': 'industrial_park',
'trading_figure': 'gashapon',
'trading_floor': 'open_outcry',
'trading_house': 'joint_stock_company',
'trading_idea': 'alpha_capture_system',
'trading_indicator': 'technical_indicator',
'trading_licks': 'trading_blows',
'trading_name': 'trade_name',
'trading_network': 'trade',
'trading_option': 'options_strategies',
'trading_options': 'options_strategies',
'trading_pit': 'stock_market',
'trading_posts': 'trading_post',
'trading_route': 'trade_route',
'trading_routes': 'trade_route',
'trading_scheme': 'emissions_trading',
'trading_shots': 'trading_blows',
'trading_spread': 'options_spread',
'trading_spreads': 'options_spread',
'trading_stamps': 'trading_stamp',
'trading_standards': 'trading_standards_institute',
'trading_strategies': 'trading_strategy',
'trading_swats': 'trading_blows',
'trading_system': 'algorithmic_trading',
'trading_turret': 'electronic_trading_platform',
'trading_yesterday': 'the_age_of_information',
'traditio_apostolica': 'apostolic_tradition',
'traditional-economic_term': 'tradition',
'traditional_adventism': 'historic_adventism',
'traditional_adventist': 'historic_adventism',
'traditional_adventists': 'historic_adventism',
'traditional_airline': 'legacy_carrier',
'traditional_ale': 'ale',
'traditional_anarchism': 'anarchism',
'traditional_architecture': 'vernacular_architecture',
'traditional_art': 'folk_art',
'traditional_catholic': 'traditionalist_catholic',
'traditional_catholicism': 'traditionalist_catholic',
'traditional_catholics': 'traditionalist_catholic',
'traditional_character': 'traditional_chinese_characters',
'traditional_characters': 'traditional_chinese_characters',
'traditional_chinese': 'traditional_chinese_characters',
'traditional_christianity': 'conservative_christianity',
'traditional_clothing': 'national_costume',
'traditional_conservatism': 'traditionalist_conservatism',
'traditional_conservative': 'paleoconservatism',
'traditional_costume': 'national_costume',
'traditional_counties': 'association_of_british_counties',
'traditional_county': 'association_of_british_counties',
'traditional_dance': 'folk_dance',
'traditional_dates': 'japanese_era_name',
'traditional_domination': 'traditional_authority',
'traditional_dress': 'national_costume',
'traditional_football': 'medieval_football',
'traditional_fry-up': 'full_breakfast',
'traditional_games': 'traditional_game',
'traditional_garment': 'national_costume',
'traditional_hanzi': 'traditional_chinese_characters',
'traditional_healer': 'traditional_medicine',
'traditional_history': 'twenty-four_histories',
'traditional_hockey': 'roller_hockey',
'traditional_instrument': 'folk_instrument',
'traditional_instruments': 'folk_instrument',
'traditional_islam': 'cultural_muslim',
'traditional_jazz': 'trad_jazz',
'traditional_jew': 'conservadox_judaism',
'traditional_judaism': 'conservadox_judaism',
'traditional_kendo': 'kendo',
'traditional_leaders': 'traditional_authority',
'traditional_liberalism': 'classical_liberalism',
'traditional_logic': 'term_logic',
'traditional_media': 'old_media',
'traditional_medicines': 'traditional_medicine',
'traditional_metal': 'traditional_heavy_metal',
'traditional_metaphysics': 'traditionalist_school',
'traditional_method': 'sparkling_wine_production',
'traditional_musics': 'folk_music',
'traditional_muslim': 'sunni_islam',
'traditional_planet': 'classical_planet',
'traditional_planets': 'classical_planet',
'traditional_pop': 'traditional_pop_music',
'traditional_publishing': 'publishing',
'traditional_remedy': 'traditional_medicine',
'traditional_rods': 'hot_rod',
'traditional_satanism': 'theistic_satanism',
'traditional_school': 'traditionalist_school',
'traditional_skinhead': 'trojan_skinhead',
'traditional_skinheads': 'trojan_skinhead',
'traditional_song': 'traditional_music',
'traditional_songs': 'traditional_music',
'traditional_state': 'traditional_authority',
'traditional_unix': 'unix',
'traditional_vehicle': 'internal_combustion_engine',
'traditional_wrestling': 'wrestling',
'traditionalist_catholicism': 'traditionalist_catholic',
'traditionalist_catholics': 'traditionalist_catholic',
'traditionalist_hellenismos': 'hellenic_polytheism',
'traditionalist_sunni': 'sunni_islam',
'traditionalists_catholic': 'traditionalist_catholic',
'traditionally_animated': 'traditional_animation',
'traditonal_metal': 'heavy_metal_music',
'traduttore_traditore': 'untranslatability',
'trae_discography': 'trae',
'traena_festival': 'traena_music_festival',
'trafalfar_st': 'trafalgar_st',
'trafalgar_bridge': 'sa1_swansea_waterfront',
'trafalgar_road': 'a206_road',
'trafalgar_way': 'the_trafalgar_way',
'trafalger_square': 'trafalgar_square',
'traffic-flow_security': 'traffic_analysis',
'traffic_accident': 'traffic_collision',
'traffic_accidents': 'traffic_collision',
'traffic_agent': 'bylaw_enforcement_officer',
'traffic_assignment': 'route_assignment',
'traffic_backup': 'traffic_congestion',
'traffic_checking': 'burnout_revenge',
'traffic_circles': 'traffic_circle',
'traffic_citation': 'traffic_ticket',
'traffic_collisions': 'traffic_collision',
'traffic_cones': 'traffic_cone',
'traffic_controller': 'traffic_guard',
'traffic_crash': 'traffic_collision',
'traffic_de-sign': 'traffic_sign_design',
'traffic_death': 'automobile_safety',
'traffic_deaths': 'traffic_collision',
'traffic_department': 'traffic_department_2192',
'traffic_direction': 'right-_and_left-hand_traffic',
'traffic_directionality': 'right-_and_left-hand_traffic',
'traffic_england': 'highways_agency',
'traffic_exchanges': 'traffic_exchange',
'traffic_fatalities': 'traffic_collision',
'traffic_forecast': 'transportation_forecasting',
'traffic_forecasting': 'transportation_forecasting',
'traffic_generation': 'traffic_generation_model',
'traffic_generator': 'traffic_generation_model',
'traffic_information': 'traffic_reporting',
'traffic_infraction': 'traffic_ticket',
'traffic_jam': 'traffic_congestion',
'traffic_jams': 'traffic_congestion',
'traffic_lane': 'lane',
'traffic_law': 'traffic',
'traffic_laws': 'traffic',
'traffic_lights': 'traffic_light',
'traffic_offences': 'regulatory_offences',
'traffic_pattern': 'airfield_traffic_pattern',
'traffic_planning': 'transportation_planning',
'traffic_pole': 'bollard',
'traffic_pulse': 'traffic.com',
'traffic_radar': 'radar_gun',
'traffic_ramasamy': 'traffic_ramaswamy',
'traffic_report': 'traffic_reporting',
'traffic_reporter': 'traffic_reporting',
'traffic_reporters': 'traffic_reporting',
'traffic_reports': 'traffic_reporting',
'traffic_rotary': 'traffic_circle',
'traffic_rules': 'traffic',
'traffic_safety': 'road_traffic_safety',
'traffic_semaphore': 'traffic_light',
'traffic_shock': 'traffic_wave',
'traffic_signal': 'traffic_light',
'traffic_signals': 'traffic_light',
'traffic_signs': 'traffic_sign',
'traffic_speed': 'traffic_flow',
'traffic_sports': 'traffic_sports_marketing',
'traffic_taming': 'traffic_calming',
'traffic_tickets': 'traffic_ticket',
'traffic_totty': 'wake_up_to_wogan',
'traffic_victims': 'roadkill',
'traffic_violations': 'traffic_violations_reciprocity',
'traffic_wardens': 'traffic_warden',
'traffic_waves': 'traffic_wave',
'trafficking_australia': 'human_trafficking_in_australia',
'trafficking_protein': 'transport_protein',
'trafford_borough': 'blackpool_borough',
'trafford_center': 'trafford_centre',
'trafford_fc': 'trafford_f.c.',
'trafic_jam': 'traffic_congestion',
'trafic_musique': 'trafic.musique',
'trafigura_ag': 'trafigura',
'trafigura_beheer': 'trafigura',
'trafigura_ltd.': 'trafigura',
'tragacanth_gum': 'tragacanth',
'tragal_pumping': 'tragal_pressure',
'tragelaphus_angasii': 'nyala',
'tragelaphus_buxtoni': 'mountain_nyala',
'tragelaphus_imberbis': 'lesser_kudu',
'tragelaphus_imberbius': 'lesser_kudu',
'tragelaphus_oryx': 'common_eland',
'tragelaphus_scriptus': 'bushbuck',
'tragelaphus_spekeii': 'sitatunga',
'tragelaphus_spekii': 'sitatunga',
'tragelaphus_strepsiceros': 'greater_kudu',
'tragelphus_scriptus': 'bushbuck',
'tragic_ballad': 'murder_ballad',
'tragic_character': 'tragic_hero',
'tragic_comedy': 'tragicomedy',
'tragic_drama': 'tragedy',
'tragic_flaw': 'hamartia',
'tragic_heroes': 'just_heroes',
'tragic_mulatta': 'tragic_mulatto',
'tragic_play': 'tragedy',
'tragic_pleiad': 'alexandrian_pleiad',
'tragic_theater': 'tragedy',
'tragic_theatre': 'tragedy',
'tragic_villain': 'villain',
'tragically_hip': 'the_tragically_hip',
'tragico_ritorno': 'tragic_return',
'tragopan_dorsigris': 'western_tragopan',
'tragopan_melanocephalus': 'western_tragopan',
'tragopan_occidental': 'western_tragopan',
'tragopan_satiro': 'satyr_tragopan',
'tragopan_satyra': 'satyr_tragopan',
'tragopan_satyre': 'satyr_tragopan',
'tragopogon_pratensis': 'meadow_salsify',
'tragulus_javanicus': 'java_mouse-deer',
'tragulus_kanchil': 'lesser_mouse-deer',
'tragulus_napu': 'greater_mouse-deer',
'tragulus_nigricans': 'philippine_mouse-deer',
'tragulus_versicolor': 'vietnam_mouse-deer',
'tragus_ltd': 'tragus_ltd.',
'tragus_pratensis': 'festuca_pratensis',
'traian_demetrescu-tradem': 'traian_demetrescu',
'traiano_boccalini': 'trajano_boccalini',
'traicho_kostov': 'traycho_kostov',
'trail-blazing_system': 'trail_blazing',
'trail_beyond': 'the_trail_beyond',
'trail_blaze': 'trail_blazing',
'trail_blazer': 'trailblazer',
'trail_blazers': 'portland_trail_blazers',
'trail_blazes': 'trail_blazing',
'trail_camera': 'game_camera',
'trail_construction': 'trail',
'trail_dust': 'operation_ranch_hand',
'trail_engineering': 'trail',
'trail_groomer': 'snow_grooming',
'trail_horse': 'trail_riding',
'trail_lake': 'trail_lakes',
'trail_maintenance': 'trail',
'trail_marker': 'trail_blazing',
'trail_mixes': 'trail_mix',
'trail_motorsports': 'trail_motorsport',
'trail_o': 'trail_orienteering',
'trail_ride': 'trail_riding',
'trail_rushing': 'freestyle_walking',
'trailable_yacht': 'trailer_sailer',
'trailer-train_company': 'ttx_company',
'trailer_1/4-ton': 'jeep_trailer',
'trailer_guy': 'don_lafontaine',
'trailer_home': 'mobile_home',
'trailer_homes': 'mobile_home',
'trailer_parks': 'trailer_park',
'trailer_sailor': 'trailer_sailer',
'trailer_stability': 'trailer_stability_program',
'trailer_tan': 'tan_lines',
'trailer_wheel': 'drive_wheel',
'trailer_yacht': 'trailer_sailer',
'trailerpark_boys': 'trailer_park_boys',
'trailing-throttle_oversteer': 'lift-off_oversteer',
'trailing_abutilon': 'abutilon_megapotamicum',
'trailing_arbutus': 'epigaea_repens',
'trailing_axle': 'trailing_wheel',
'trailing_bellflower': 'campanula_poscharskyana',
'trailing_bittersweet': 'solanum_dulcamara',
'trailing_bittersweets': 'solanum_dulcamara',
'trailing_lantana': 'lantana_montevidensis',
'trailing_lobelia': 'lobelia_erinus',
'trailing_lotus': 'lotus_berthelotii',
'trailing_nightshade': 'solanum_dulcamara',
'trailing_nightshades': 'solanum_dulcamara',
'trailing_p/e': 'p/e_ratio',
'trailing_pole': 'poles_of_astronomical_bodies',
'trailing_shrubverbena': 'lantana_montevidensis',
'trailing_truck': 'trailing_wheel',
'trailing_vortex': 'wingtip_vortices',
'trailing_vortices': 'wingtip_vortices',
'trailing_wheels': 'trailing_wheel',
'trailing_wire': 'trailing_cone',
'trailing_zeroes': 'trailing_zero',
'trailing_zeros': 'trailing_zero',
'trailinga_swami': 'trailanga_swami',
'traill_o': 'traill_island',
'trailor_park': 'trailer_park',
'trailor_trash': 'trailer_trash',
'train-and-equip_program': 'georgia_train_and_equip_program',
'train_accident': 'train_wreck',
'train_bumper': 'buffer_stop',
'train_car': 'railroad_car',
'train_category': 'train_categories_in_europe',
'train_chasing': 'railfan',
'train_collision': 'train_wreck',
'train_companies': 'train_operating_company',
'train_crash': 'train_wreck',
'train_crashes': 'train_wreck',
'train_crossing': 'level_crossing',
'train_depot': 'train_station',
'train_derailment': 'derailment',
'train_disaster': 'train_wreck',
'train_dispatching': 'train_dispatcher',
'train_driver': 'railroad_engineer',
'train_drivers': 'railroad_engineer',
'train_driving': 'railroad_engineer',
'train_electrification': 'railway_electrification_system',
'train_guy': 'densha_otoko',
'train_halt': 'train_station',
'train_headcode': 'train_reporting_number',
'train_hopping': 'freighthopping',
'train_horns': 'train_horn',
'train_job': 'the_train_job',
'train_jumping': 'freighthopping',
'train_man': 'densha_otoko',
'train_oil': 'whale_oil',
'train_operating': 'railroad_engineer',
'train_operation': 'railroad_engineer',
'train_operator': 'railroad_engineer',
'train_operators': 'railroad_engineer',
'train_order': 'train_order_operation',
'train_platform': 'railway_platform',
'train_robber': 'train_robbery',
'train_robberies': 'train_robbery',
'train_schedule': 'public_transport_timetable',
'train_signal': 'railway_signal',
'train_signals': 'railway_signal',
'train_sim': 'train_simulator',
'train_simulation': 'train_simulator',
'train_simulators': 'train_simulator',
'train_spotter': 'railfan',
'train_spotters': 'railfan',
'train_stations': 'train_station',
'train_surf': 'train_surfing',
'train_terminology': 'glossary_of_rail_terminology',
'train_timetable': 'public_transport_timetable',
'train_track': 'rail_tracks',
'train_tracks': 'train_track',
'train_train': 'train-train',
'train_transport': 'rail_transport',
'train_transportation': 'rail_transport',
'train_type': 'train_categories_in_europe',
'train_watching': 'railfan',
'train_wrecks': 'train_wreck',
'train_yard': 'classification_yard',
'trained_animal': 'animal_training',
'trained_band': 'trainband',
'trained_bands': 'trainband',
'trainer_bra': 'training_bra',
'trainer_socks': 'sock',
'trainer_toolkit': 'datel_action_replay',
'traingle_trade': 'triangular_trade',
'training_analyst': 'training_analysis',
'training_area': 'proving_ground',
'training_base': 'military_education_and_training',
'training_boat': 'training_ship',
'training_bras': 'brassiere',
'training_centre': 'training',
'training_cruiser': 'cruiser',
'training_excavations': 'training_excavation',
'training_exercise': 'military_exercise',
'training_facilitator': 'facilitator',
'training_fin': 'swimfin',
'training_group': 'training_group_raf',
'training_manuals': 'training_manual',
'training_medley': 'the_pick_of_destiny',
'training_pant': 'diaper',
'training_pants': 'diaper',
'training_provider': 'training',
'training_stamp': 'test_stamp',
'training_teachers': 'teacher_education',
'training_vessel': 'training_ship',
'training_wheel': 'training_wheels',
'trainmaster_jr.': 'fm_h-16-44',
'trainmaster_junior': 'fm_h-16-44',
'trait_anger': 'charles_spielberger',
'trait_anxiety': 'anxiety',
'trait_class': 'traits_class',
'trait_selection': 'designer_baby',
'traitorous_8': 'traitorous_eight',
'trajan_decius': 'decius',
'trajectory_hermeneutics': 'biblical_hermeneutics',
'trak_auto': 'advance_auto_parts',
'trakai_district': 'trakai_district_municipality',
'trakai_palatinate': 'trakai_voivodeship',
'trakai_voivodship': 'trakai_voivodeship',
'trakhtemyrivskyi_monastery': 'trakhtemyrov_monastery',
'trakia_highway': 'trakiya_motorway',
'trakia_plovdiv': 'pfc_botev_plovdiv',
'trakky_daks': 'tracksuit',
'trakt_krolewski': 'royal_route',
'traktar_stadium': 'traktor_stadium',
'traktor_lyubertsy': 'fc_lyubertsy-korenyovo_lyubertsy',
'traktor_minsk': 'fc_traktor_minsk',
'traktor_pavlodar': 'fc_irtysh_pavlodar',
'traktor_stalingrad': 'fc_rotor_volgograd',
'traktor_vladimir': 'fc_torpedo_vladimir',
'traktor_volgograd': 'fc_rotor_volgograd',
'traktorozavodski_district': 'traktorozavodsky_city_district',
'traktorozavodski_raion': 'traktorozavodsky_city_district',
'traktorozavodskii_district': 'traktorozavodsky_city_district',
'traktorozavodskii_raion': 'traktorozavodsky_city_district',
'traktorozavodskiy_district': 'traktorozavodsky_city_district',
'traktorozavodskiy_raion': 'traktorozavodsky_city_district',
'traktorozavodsky_district': 'traktorozavodsky_city_district',
'traktorozavodsky_raion': 'traktorozavodsky_city_district',
'traku_voke': 'paneriai',
'tralee_dynamos': 'tralee_dynamos_f.c.',
'tralee_f.c.': 'tralee_dynamos_f.c.',
'traleg_rinpoche': 'traleg_kyabgon_rinpoche',
'tralflotovets_murmansk': 'fc_sever_murmansk',
'tram_2000': 'tram_2000_zurich',
'tram_car': 'tram',
'tram_cars': 'tram',
'tram_kak': 'tram_kak_district',
'tram_lane': 'bus_lane',
'tram_network': 'tram',
'tram_shed': 'train_shed',
'tram_spotter': 'railfan',
'tram_station': 'tram_stop',
'tram_stops': 'tram_stop',
'tram_system': 'tram',
'tram_track': 'tramway_track',
'tram_way': 'tramway',
'tramadol_hcl': 'tramadol',
'tramadol_hydrochloride': 'tramadol',
'tramagetic_od': 'tramadol',
'tramagetic_retard': 'tramadol',
'tramar_dillard': 'flo_rida',
'tramayne_thompson': 'shade_sheist',
'tramea_lacerata': 'black_saddlebags',
'tramelan_be': 'tramelan',
'trametes_acuta': 'coriolopsis_floccosa',
'trametes_annosa': 'heterobasidion_annosum',
'trametes_captiosa': 'coriolopsis_floccosa',
'trametes_carnea': 'fomitopsis_rosea',
'trametes_carteri': 'coriolopsis_floccosa',
'trametes_connata': 'oxyporus_populinus',
'trametes_elegans': 'lenzites_elegans',
'trametes_evolutus': 'rigidoporus_microporus',
'trametes_felipponei': 'coriolopsis_floccosa',
'trametes_floccosa': 'coriolopsis_floccosa',
'trametes_fraxinea': 'perenniporia_fraxinea',
'trametes_griseodurus': 'trametes_nivosa',
'trametes_hydnoides': 'hexagonia_hydnoides',
'trametes_lactea': 'irpex_lacteus',
'trametes_latemarginata': 'oxyporus_latemarginatus',
'trametes_lignea': 'fomes_meliae',
'trametes_limitata': 'rigidoporus_microporus',
'trametes_medulla-panis': 'perenniporia_medulla-panis',
'trametes_microstomus': 'rigidoporus_lineatus',
'trametes_ocellata': 'hexagonia_hydnoides',
'trametes_populina': 'oxyporus_populinus',
'trametes_porioides': 'trametes_hirsuta',
'trametes_protea': 'coriolopsis_floccosa',
'trametes_radiciperda': 'heterobasidion_annosum',
'trametes_rigida': 'coriolopsis_floccosa',
'trametes_rosea': 'fomitopsis_rosea',
'trametes_roseozonata': 'fomitopsis_cajanderi',
'trametes_sanguinea': 'pycnoporus_sanguineus',
'trametes_secretanii': 'oxyporus_populinus',
'trametes_semitosta': 'rigidoporus_microporus',
'trametes_sordida': 'gloeophyllum_trabeum',
'trametes_spraguei': 'fomitopsis_spraguei',
'trametes_submurina': 'fomes_meliae',
'trametes_subnivosa': 'fomes_meliae',
'trametes_subrosea': 'fomitopsis_cajanderi',
'trametes_trabea': 'gloeophyllum_trabeum',
'trametes_ungulata': 'trametes_nivosa',
'trametes_unicolor': 'spongipellis_unicolor',
'trametes_ursina': 'hexagonia_hydnoides',
'trametes_verrucosohirtus': 'hexagonia_hydnoides',
'trametes_vincta': 'rigidoporus_vinctus',
'trammel_crow': 'trammell_crow',
'tramp_freighter': 'tramp_trade',
'tramp_freighters': 'tramp_trade',
'tramp_ship': 'tramp_trade',
'tramp_stamp': 'lower_back_tattoo',
'tramp_stamps': 'lower_back_tattoo',
'tramp_steamer': 'tramp_trade',
'tramp_steamers': 'tramp_trade',
'trampled_underfoot': 'trampled_under_foot',
'trampling_fetish': 'crush_fetish',
'trampling_fetishism': 'crush_fetish',
'trampling_pornography': 'crush_fetish',
'trampoline_wrestling': 'backyard_wrestling',
'tramser_lake': 'tramser_see',
'tramtrack_croydon': 'tramlink',
'tramwaje_warszawskie': 'warsaw_tramway',
'tramway_enthusiast': 'railfan',
'tramway_lyon': 'tramways_in_lyon',
'tramway_tracks': 'tramway_track',
'tramways_union': 'australian_rail_tram_and_bus_industry_union',
'tran-duc_thao': 'tran_duc_thao',
'tran-siberian_railway': 'trans-siberian_railway',
'tran_cao': 'tran_cao_rebellion',
'tran_duc-thao': 'tran_duc_thao',
'tran_dung': 'tran_huu_dung',
'trancarpathian_oblast': 'zakarpattia_oblast',
'trance_channelling': 'mediumship',
'trance_medium': 'mediumship',
'trance_team': 'crash_nitro_kart',
'trancelucent_records': 'trancelucent_productions',
'trancendental_meditation': 'transcendental_meditation',
'tranceport_series': 'tranceport',
'trane_inc.': 'trane',
'tranemo_kommun': 'tranemo_municipality',
'tranent_fc': 'tranent_f.c.',
'trang_chinh': 'main_page',
'trang_ho': 'sinedu_tadesse',
'trangia_ab': 'trangia',
'trangia_stove': 'trangia',
'trango_tower': 'trango_towers',
'tranmere_rovers': 'tranmere_rovers_f.c.',
'trannie_chaser': 'transfan',
'tranny_chaser': 'transfan',
'tranny_chasers': 'transfan',
'tranny_town': 'trans_world',
'tranparent_alumina': 'corundum',
'tranquility_principle': 'bell-la_padula_model',
'tranquilizer_dart': 'tranquilliser_gun',
'tranquilizer_drugs': 'sedative',
'tranquilizer_gun': 'tranquilliser_gun',
'tranquilizing_agents': 'sedative',
'tranquilizing_drug': 'sedative',
'tranquillity_base': 'tranquility_base',
'tranquillus_suetonius': 'suetonius',
'tranquilo_cappozzo': 'tranquilo_capozzo',
'tranqulino_luna': 'tranquilino_luna',
'trans-2-butenoic_acid': 'crotonic_acid',
'trans-8-trans-10-cis-12-octadecatrienoic_acid': 'calendic_acid',
'trans-acting_factor': 'trans-acting',
'trans-acting_factors': 'trans-acting',
'trans-adriatic_pipeline': 'trans_adriatic_pipeline',
'trans-african_highway': 'trans-african_highway_network',
'trans-alaska_pipeline': 'trans-alaska_pipeline_system',
'trans-alaskan_pipeline': 'trans-alaska_pipeline_system',
'trans-am_championship': 'trans-am_series',
'trans-amazon_highway': 'trans-amazonian_highway',
'trans-antarctic_mountains': 'transantarctic_mountains',
'trans-appalachian_west': 'trans-appalachia',
'trans-atlantic_cabe': 'transatlantic_telegraph_cable',
'trans-atlantic_cable': 'transatlantic_telegraph_cable',
'trans-atlantic_flight': 'transatlantic_flight',
'trans-atlantic_tunnel': 'transatlantic_tunnel',
'trans-balkan_pipeline': 'balkan_pipeline',
'trans-canada_airlines': 'trans-canada_air_lines',
'trans-canada_microwave': 'trans_canada_microwave',
'trans-canada_pipeline': 'transcanada_pipeline',
'trans-canada_trail': 'trans_canada_trail',
'trans-caprivi_highway': 'transcaprivi_highway',
'trans-caribean_pipeline': 'trans-caribbean_pipeline',
'trans-carpathian_oblast': 'zakarpattia_oblast',
'trans-carpathian_ruthenia': 'carpathian_ruthenia',
'trans-caspian_railroad': 'trans-caspian_railway',
'trans-caucasian_highway': 'transcaucasian_highway',
'trans-caucasian_railway': 'transcaucasian_railway',
'trans-caucasus_highway': 'transcaucasian_highway',
'trans-cinnamic_acid': 'cinnamic_acid',
'trans-cis_isomerization': 'isomerisation',
'trans-danube_sich': 'danubian_sich',
'trans-danubian_sich': 'danubian_sich',
'trans-derivational_search': 'transderivational_search',
'trans-dniester_region': 'transnistria',
'trans-dniester_republic': 'transnistria',
'trans-dniestr_republic': 'transnistria',
'trans-eurasia_express': 'trans-eurasia_logistics',
'trans-europ_express': 'trans_europ_express',
'trans-europa_express': 'trans_europ_express',
'trans-europe_express': 'trans_europ_express',
'trans-europe_race': 'trans_europe_foot_race',
'trans-european_division': 'trans-european_division_of_seventh-day_adventists',
'trans-fatty_acids': 'trans_fat',
'trans-fly_languages': 'eastern_trans-fly_languages',
'trans-france_race': 'herbie_goes_to_monte_carlo',
'trans-gambian_highway': 'trans-gambia_highway',
'trans-global_highway': 'trans_global_highway',
'trans-global_underground': 'transglobal_underground',
'trans-golgi_network': 'golgi_apparatus',
'trans-gomati_area': 'trans-gomti_area',
'trans-hudson_express': 'mass_transit_tunnel',
'trans-hudson_oregon': 'trans-hudson_orogeny',
'trans-hudson_orogen': 'trans-hudson_orogeny',
'trans-hudson_suture': 'trans-hudson_orogeny',
'trans-hudsonian_orogeny': 'trans-hudson_orogeny',
'trans-iranian_railroad': 'trans-iranian_railway',
'trans-load_facility': 'transloading',
'trans-lunar_injection': 'trans_lunar_injection',
'trans-mississippi_department': 'trans-mississippi_theater_of_the_american_civil_war',
'trans-mississippi_district': 'trans-mississippi_theater_of_the_american_civil_war',
'trans-mississippi_theater': 'trans-mississippi_theater_of_the_american_civil_war',
'trans-namib_railway': 'transnamib',
'trans-neptune_object': 'trans-neptunian_object',
'trans-neptunian_moon': 'minor_planet_moon',
'trans-neptunian_objects': 'trans-neptunian_object',
'trans-neptunian_planet': 'planets_beyond_neptune',
'trans-neptunian_planets': 'planets_beyond_neptune',
'trans-neuronal_degeneration': 'transneuronal_degeneration',
'trans-oceanic_highway': 'interoceanic_highway',
'trans-orbital_lobotomy': 'psychosurgery',
'trans-pacific_airlines': 'aloha_airlines',
'trans-pecos_copperhead': 'agkistrodon_contortrix_pictigaster',
'trans-pennine_trail': 'trans_pennine_trail',
'trans-planetary_subway': 'vactrain',
'trans-regulatory_elements': 'trans-regulatory_element',
'trans-sahara_pipeline': 'trans-saharan_gas_pipeline',
'trans-saharan_pipeline': 'trans-saharan_gas_pipeline',
'trans-saharan_route': 'trans-saharan_trade',
'trans-sahel_highway': 'trans-sahelian_highway',
'trans-siberia_express': 'trans-siberian_railway',
'trans-siberia_railway': 'trans-siberian_railway',
'trans-siberian_express': 'trans-siberian_railway',
'trans-siberian_pipeline': 'urengoy-pomary-uzhgorod_pipeline',
'trans-siberian_railroad': 'trans-siberian_railway',
'trans-sibirian_railway': 'trans-siberian_railway',
'trans-tasman_trophy': 'trans_tasman_trophy',
'trans-texas_airways': 'texas_international_airlines',
'trans-texas_corridor-10': 'trans-texas_corridor',
'trans-texas_corridor-35': 'trans-texas_corridor',
'trans-texas_corridor-69': 'interstate_69_in_texas',
'trans-texas_highway': 'trans-texas_corridor',
'trans-traumatic_acid': 'traumatic_acid',
'trans-urethral_resection': 'transurethral_resection_of_the_prostate',
'trans-volga_region': 'transvolga',
'trans-world_airlines': 'trans_world_airlines',
'trans-zeatin_o-b-d-glucosyltransferase': 'trans-zeatin_o-beta-d-glucosyltransferase',
'trans_acting': 'trans-acting',
'trans_alai': 'trans-alay_range',
'trans_america': 'transamerica',
'trans_american': 'transamerica',
'trans_artists': 'trans_artists_foundation',
'trans_b': 'philosophical_transactions_of_the_royal_society_b:_biological_sciences',
'trans_configuration': 'cis-trans_isomerism',
'trans_dniester': 'transnistria',
'trans_face': 'trans',
'trans_fats': 'trans_fat',
'trans_fatty': 'trans_fat',
'trans_goldfields': 'transgoldfields',
'trans_influence': 'trans_effect',
'trans_isomer': 'cis-trans_isomerism',
'trans_link': 'translink',
'trans_lunar': 'trans_lunar_injection',
'trans_male': 'transman',
'trans_man': 'transman',
'trans_men': 'transman',
'trans_narva': 'fc_narva_trans',
'trans_nzoia': 'trans-nzoia_district',
'trans_ocimene': 'ocimene',
'trans_panic': 'gay_panic_defense',
'trans_people': 'transgender',
'trans_retinol': 'retinol',
'trans_shipment': 'transshipment',
'trans_siberia': 'trans-siberian_railway',
'trans_siberian': 'trans-siberian',
'trans_sport': 'pontiac_trans_sport',
'trans_toilet': 'gender-neutral_toilet',
'trans_union': 'transunion',
'trans_vaal': 'transvaal',
'trans_warp': 'warp_drive',
'trans_woman': 'transwoman',
'trans_women': 'transwoman',
'trans_x': 'trans-x',
'transabdominal_cerclage': 'cervical_incompetence',
'transact_sql': 'transact-sql',
'transacting_rna': 'trans-acting_sirna',
'transactinide_elements': 'transactinide_element',
'transaction-level_modelling': 'transaction-level_modeling',
'transaction_account': 'transactional_account',
'transaction_accounts': 'transaction_deposit',
'transaction_code': 't-code',
'transaction_costs': 'transaction_cost',
'transaction_deposits': 'transaction_deposit',
'transaction_documents': 'transaction_document',
'transaction_fee': 'fee',
'transaction_management': 'business_transaction_management',
'transaction_manager': 'transaction_processing',
'transaction_monitoring': 'business_transaction_management',
'transaction_risk': 'currency_risk',
'transaction_tax': 'transfer_tax',
'transaction_tracing': 'business_transaction_management',
'transactional_data': 'dynamic_data',
'transactional_database': 'navigational_database',
'transactional_immunity': 'immunity_from_prosecution',
'transactional_programming': 'software_transactional_memory',
'transactional_queries': 'web_search_query',
'transactional_query': 'web_search_query',
'transactions_cost': 'transaction_cost',
'transactions_costs': 'transaction_cost',
'transactions_deposit': 'transaction_deposit',
'transactions_deposits': 'transaction_deposit',
'transaddition_pyramid': 'transdeletion_pyramid',
'transall_c.160': 'transall_c-160',
'transall_c.160ng': 'transall_c-160',
'transall_gabriel': 'transall_c-160',
'transalpine_gaul': 'gallia_narbonensis',
'transalpine_gaulish': 'gaulish_language',
'transalpine_line': 'transalpine_pipeline',
'transalpine_redemptorist': 'sons_of_the_most_holy_redeemer',
'transalpine_redemptorists': 'sons_of_the_most_holy_redeemer',
'transalta_corporation': 'transalta',
'transalta_utilities': 'transalta',
'transaltantic_cable': 'transatlantic_telegraph_cable',
'transamazonian_highway': 'trans-amazonian_highway',
'transamerica_airlines': 'trans_international_airlines',
'transamerica_building': 'transamerica_pyramid',
'transamerica_plaque': 'autopro_plaque',
'transamerica_tower': 'transamerica_pyramid',
'transamerica_trail': 'bikecentennial',
'transamination_reaction': 'transamination',
'transandinomys_bolivaris': 'bolivar_rice_rat',
'transandinomys_talamancae': 'talamancan_rice_rat',
'transannular_interaction': 'annulation',
'transannular_strain': 'prelog_strain',
'transarab_pipeline': 'trans-arabian_pipeline',
'transasian_railway': 'trans-asian_railway',
'transat_at': 'transat_a.t.',
'transatlantic_accent': 'mid-atlantic_english',
'transatlantic_aviation': 'transatlantic_flight',
'transatlantic_cable': 'atlantic_telegraph_company',
'transatlantic_crossing': 'transatlantic',
'transatlantic_crossings': 'transatlantic',
'transatlantic_diffusion': 'pre-columbian_trans-oceanic_contact',
'transatlantic_flights': 'transatlantic_flight',
'transatlantic_lines': 'transatlantic_lines_llc',
'transatlantic_mail': 'transatlantic',
'transatlantic_migration': 'transatlantic_migrations',
'transatlantic_tube': 'transatlantic_tunnel',
'transatlantic_union': 'transatlantic_free_trade_area',
'transavia_airlines': 'transavia.com',
'transavia_airtruck': 'transavia_pl-12_airtruk',
'transavia_airtruk': 'transavia_pl-12_airtruk',
'transavia_holland': 'transavia.com',
'transavia_skyfarmer': 'transavia_pl-12_airtruk',
'transbaikal_oblast': 'transbaikal',
'transbaikalia_krai': 'zabaykalsky_krai',
'transbay_terminal': 'san_francisco_transbay_terminal',
'transboundary_waters': 'international_waters',
'transbus_enviro200': 'alexander_dennis_enviro200',
'transbus_international': 'alexander_dennis',
'transbus_trident': 'dennis_trident_2',
'transcalvarial_herniation': 'brain_herniation',
'transcanada_corporation': 'transcanada_corp.',
'transcanada_highway': 'trans-canada_highway',
'transcanada_trail': 'trans_canada_trail',
'transcaribbean_airways': 'trans_caribbean_airways',
'transcaribbean_pipeline': 'trans-caribbean_pipeline',
'transcaribean_pipeline': 'trans-caribbean_pipeline',
'transcarpathian_oblast': 'zakarpattia_oblast',
'transcarpathian_region': 'zakarpattia_oblast',
'transcarpathian_ruthenia': 'carpatho-ukraine',
'transcarpathian_ukraine': 'carpathian_ruthenia',
'transcarpatian_oblast': 'zakarpattia_oblast',
'transcaspian_railroad': 'trans-caspian_railway',
'transcaspian_railway': 'trans-caspian_railway',
'transcaucasian_federation': 'transcaucasian_democratic_federative_republic',
'transcaucasian_highland': 'south_caucasus',
'transcaucasian_iberia': 'caucasian_iberia',
'transcaucasian_republic': 'transcaucasian_democratic_federative_republic',
'transcaucasian_republics': 'south_caucasus',
'transcaucasian_sejm': 'transcaucasian_democratic_federative_republic',
'transcaucasian_sfsr': 'transcaucasian_socialist_federative_soviet_republic',
'transcaucasian_ssr': 'transcaucasian_socialist_federative_soviet_republic',
'transcaucasus_front': 'transcaucasian_front',
'transcaucasus_highway': 'transcaucasian_highway',
'transcedental_blues': 'transcendental_blues',
'transcend_jetflash': 'jetflash',
'transcend_t.sonic': 'transcend_information_inc.',
'transcendal_number': 'transcendental_number',
'transcendantal_subject': 'transcendental_idealism',
'transcendence_basis': 'transcendence_degree',
'transcendence_measure': 'transcendence_theory',
'transcendent_function': 'transcendental_function',
'transcendent_philosophy': 'transcendent_theosophy',
'transcendent_truth': 'religious_belief',
'transcendental_aesthetic': 'critique_of_pure_reason',
'transcendental_argument': 'transcendental_argument_for_the_existence_of_god',
'transcendental_element': 'algebraic_element',
'transcendental_etude': 'transcendental_etudes',
'transcendental_extension': 'algebraic_extension',
'transcendental_functions': 'transcendental_function',
'transcendental_movement': 'transcendentalism',
'transcendental_numbers': 'transcendental_number',
'transcendental_philosophy': 'transcendental_idealism',
'transcendental_studies': 'transcendental_etudes',
'transcendental_subject': 'transcendental_idealism',
'transcendental_theology': 'transcendentalism',
'transcendentalist_movement': 'transcendentalism',
'transcending_boundaries': 'transcending_boundaries_conference',
'transco_tower': 'williams_tower',
'transcobalamin_ii': 'transcobalamin',
'transcoelomic_spread': 'metastasis',
'transconductance_amplifier': 'transconductance',
'transcontinental_railroads': 'transcontinental_railroad',
'transcontinental_railway': 'transcontinental_railroad',
'transcontinental_records': 'lou_pearlman',
'transconveyance_amplifiers': 'charge_transfer_amplifier',
'transcribing_ihvh': 'tetragrammaton',
'transcribing_ihwh': 'tetragrammaton',
'transcribing_jhvh': 'tetragrammaton',
'transcribing_jhwh': 'tetragrammaton',
'transcribing_yhwh': 'tetragrammaton',
'transcription_co-regulator': 'transcription_coregulator',
'transcription_co-regulators': 'transcription_coregulator',
'transcription_coregulators': 'transcription_coregulator',
'transcription_disc': 'acetate_disc',
'transcription_enterprises': 'numerical_technologies',
'transcription_factors': 'transcription_factor',
'transcription_network': 'gene_regulatory_network',
'transcriptional_pulsing': 'transcriptional_bursting',
'transcriptional_regulators': 'regulation_of_gene_expression',
'transcriptional_silencing': 'gene_silencing',
'transcultural_psychiatry': 'cross-cultural_psychiatry',
'transcutanous_pacing': 'transcutaneous_pacing',
'transdanubian_principality': 'balaton_principality',
'transdanubian_sich': 'danubian_sich',
'transderm_scop': 'scopolamine',
'transdermal_estradiol': 'estrogen_patch',
'transdermal_estrogen': 'estrogen_patch',
'transdermal_implants': 'transdermal_implant',
'transdermal_patches': 'transdermal_patch',
'transdev_blazefield': 'blazefield_group',
'transdev_tsl': 'shorelink',
'transdimensional_tmnt': 'transdimensional_teenage_mutant_ninja_turtles',
'transdniester_republic': 'transnistria',
'transesophageal_echocardiography': 'echocardiography',
'transesophageal_fistula': 'tracheoesophageal_fistula',
'transesophogeal_echocardiogram': 'echocardiography',
'transeuropean_division': 'trans-european_division_of_seventh-day_adventists',
'transeuropean_network': 'trans-european_networks',
'transfatty_acid': 'trans_fat',
'transfatty_acids': 'trans_fat',
'transfeild_services': 'toll_nz',
'transfer_agreement': 'material_transfer_agreement',
'transfer_approach': 'transfer-based_machine_translation',
'transfer_box': 'transfer_case',
'transfer_camp': 'population_transfer',
'transfer_cells': 'transfer_cell',
'transfer_chess': 'bughouse_chess',
'transfer_committees': 'transfer_committee',
'transfer_complete': 'eternity_rites',
'transfer_deadline': 'transfer_window',
'transfer_dysphagia': 'dysphagia',
'transfer_efficiency': 'energy_conversion_efficiency',
'transfer_jet': 'transferjet',
'transfer_learning': 'inductive_transfer',
'transfer_map': 'shriek_map',
'transfer_moulding': 'transfer_molding',
'transfer_object': 'data_transfer_object',
'transfer_orbit': 'hohmann_transfer_orbit',
'transfer_pack': 'nintendo_64_accessories',
'transfer_pak': 'nintendo_64_accessories',
'transfer_payments': 'transfer_payment',
'transfer_pipette': 'pasteur_pipette',
'transfer_print': 'transfer-print',
'transfer_printing': 'transfer-print',
'transfer_rate': 'bit_rate',
'transfer_resistor': 'transistor',
'transfer_seeking': 'rent_seeking',
'transfer_technology': 'technology_transfer',
'transfer_ticket': 'transit_pass',
'transfer_truck': 'semi-trailer_truck',
'transfer_trucks': 'semi-trailer_truck',
'transfer_windows': 'transfer_window',
'transferable_vote': 'instant-runoff_voting',
'transferred_epithet': 'hypallage',
'transferred_malice': 'transferred_intent',
'transferrin-binding_proteins': 'transferrin',
'transferrin_receptor-2': 'tfr2',
'transferrin_receptors': 'transferrin_receptor',
'transfinite_cardinal': 'transfinite_number',
'transfinite_numbers': 'transfinite_number',
'transfinite_ordinal': 'transfinite_number',
'transfinite_ordinals': 'transfinite_number',
'transflective_display': 'transflective_liquid_crystal_display',
'transform_boundary': 'transform_fault',
'transform_coder': 'transform_coding',
'transform_faults': 'transform_fault',
'transform_margin': 'transform_fault',
'transform_plate': 'transform_fault',
'transformation_ago': 'art_gallery_of_ontario',
'transformation_curve': 'production-possibility_frontier',
'transformation_deck': 'transformation_playing_card',
'transformation_economy': 'transition_economy',
'transformation_fantasy': 'shapeshifting',
'transformation_fiction': 'shapeshifting',
'transformation_group': 'group_action',
'transformation_groups': 'group_action',
'transformation_languages': 'transformation_language',
'transformation_matrices': 'transformation_matrix',
'transformation_monoid': 'transformation_semigroup',
'transformation_pipeline': 'graphics_pipeline',
'transformation_rule': 'rule_of_inference',
'transformation_rules': 'rule_of_inference',
'transformation_semi-group': 'transformation_semigroup',
'transformation_sequence': 'henshin',
'transformation_technologies': 'werner_erhard_and_associates',
'transformational-generative_grammar': 'transformational_grammar',
'transformational_activism': 'activism',
'transformational_breathing': 'rebirthing-breathwork',
'transformational_breathwork': 'rebirthing-breathwork',
'transformational_figure': 'transformational_leadership',
'transformational_learning': 'transformative_learning',
'transformational_linguistics': 'transformational_grammar',
'transformational_ministry': 'ex-gay',
'transformational_rule': 'rewrite_rule',
'transformational_space': 't/space',
'transformative-use_defense': 'fair_use',
'transformative_use': 'fair_use',
'transformed_man': 'the_transformed_man',
'transformer_steel': 'electrical_steel',
'transformers:_animated': 'transformers_animated',
'transformers:_autobots': 'transformers:_the_game',
'transformers:_classics': 'transformers_classics',
'transformers:_deceptercons': 'transformers:_the_game',
'transformers:_decepticons': 'transformers:_the_game',
'transformers:_heroes': 'transformers_animated',
'transformers:_infiltration': 'the_transformers:_infiltration',
'transformers:_masterforce': 'transformers:_super-god_masterforce',
'transformers:_superlink': 'transformers:_energon',
'transformers:generation_2': 'transformers:_generation_2',
'transformers:the_movie': 'the_transformers:_the_movie',
'transformers_2': 'transformers:_revenge_of_the_fallen',
'transformers_alternators': 'transformers:_alternators',
'transformers_armada': 'transformers:_armada',
'transformers_attacktix': 'attacktix',
'transformers_con': 'tfcon',
'transformers_cybertron': 'transformers:_cybertron',
'transformers_ds': 'transformers:_the_game',
'transformers_energon': 'transformers:_energon',
'transformers_franchise': 'transformers',
'transformers_g1': 'transformers:_generation_1',
'transformers_game': 'transformers:_the_game',
'transformers_heroes': 'transformers_animated',
'transformers_ii': 'transformers:_revenge_of_the_fallen',
'transformers_masterforce': 'transformers:_super-god_masterforce',
'transformers_rotf': 'transformers:_revenge_of_the_fallen',
'transformers_superlink': 'transformers:_energon',
'transformers_theme': 'smosh',
'transformers_titanium': 'transformers:_titanium',
'transformers_universe': 'transformers:_universe',
'transformers_universes': 'transformers',
'transformers_victory': 'transformers:_victory',
'transforming_robot': 'transforming_robots',
'transfusion-transmitted_infection': 'transfusion_reaction',
'transfusion_journal': 'aabb',
'transfusion_reactions': 'transfusion_reaction',
'transgaming_inc.': 'transgaming_technologies',
'transgastric_surgery': 'natural_orifice_translumenal_endoscopic_surgery',
'transgendered_girl': 'transwoman',
'transgendered_musicians': 'transgender_musicians',
'transgenic_crops': 'transgenic_plant',
'transgenic_flies': 'genetically_modified_organism',
'transgenic_fly': 'genetically_modified_organism',
'transgenic_mice': 'genetically_modified_organism',
'transgenic_mouse': 'genetically_modified_organism',
'transgenic_organism': 'genetically_modified_organism',
'transgenic_organisms': 'genetically_modified_organism',
'transgenic_pig': 'enviropig',
'transgenic_plants': 'transgenic_plant',
'transgolian_league': 'zealot_trivia',
'transgressional_fiction': 'transgressive_fiction',
'transgressional_literature': 'transgressive_fiction',
'transgressive_literature': 'transgressive_fiction',
'transhepatic_pancreato-cholangiography': 'percutaneous_transhepatic_cholangiography',
'transhumance_farming': 'transhumance',
'transiberian_orchestra': 'trans-siberian_orchestra',
'transiberian_railway': 'trans-siberian_railway',
'transient_application': 'application_posture',
'transient_electromagnetic': 'transient_electromagnetics',
'transient_hotel': 'flophouse',
'transient_killerwhale': 'killer_whale',
'transient_paresthesia': 'paresthesia',
'transient_suppressor': 'transient_voltage_suppression_diode',
'transient_tics': 'tic_disorder',
'transimpedance_amplifier': 'current-to-voltage_converter',
'transister_radio': 'transistor_radio',
'transister_radios': 'transistor_radio',
'transistor_amplifier': 'class_d_amplifier',
'transistor_history': 'history_of_the_transistor',
'transistor_parameters': 'transistor_models',
'transistor_radios': 'transistor_radio',
'transistor_revolt': 'rise_against',
'transit_agency': 'transit_district',
'transit_authority': 'transit_district',
'transit_card': 'stored-value_card',
'transit_circle': 'meridian_circle',
'transit_circlet': 'meridian_circle',
'transit_connect': 'ford_transit_connect',
'transit_enforcement': 'transit_police',
'transit_fare': 'fare',
'transit_fares': 'fare',
'transit_guard': 'transit_police',
'transit_lanes': 'transit_lane',
'transit_number': 'routing_transit_number',
'transit_nz': 'transit_new_zealand',
'transit_officer': 'railcorp_transit_officer',
'transit_planner': 'public_transport_planning',
'transit_privatisation': 'transit_privatization',
'transit_rd': 'new_york_state_route_78',
'transit_road': 'new_york_state_route_78',
'transit_router': 'label_switch_router',
'transit_system': 'public_transport',
'transit_telescope': 'jodrell_bank_observatory',
'transit_van': 'ford_transit',
'transiting_planet': 'astronomical_transit',
'transition-metal_oxides': 'transition_metal_oxides',
'transition-state_theory': 'transition_state_theory',
'transition_ballistics': 'transitional_ballistics',
'transition_boiling': 'boiling',
'transition_culture': 'transition_towns',
'transition_curve': 'euler_spiral',
'transition_economies': 'transition_economy',
'transition_element': 'transition_metal',
'transition_elements': 'transition_metal',
'transition_flow': 'laminar-turbulent_transition',
'transition_form': 'transitional_fossil',
'transition_functions': 'transition_function',
'transition_lens': 'photochromic_lens',
'transition_lenses': 'photochromic_lens',
'transition_matrix': 'stochastic_matrix',
'transition_metals': 'transition_metal',
'transition_monoid': 'semiautomaton',
'transition_probabilities': 'markov_chain',
'transition_probability': 'markov_chain',
'transition_program': 'the_university_transition_program',
'transition_reaction': 'pyruvate_decarboxylation',
'transition_region': 'solar_transition_region',
'transition_rule': 'selection_rule',
'transition_rules': 'selection_rule',
'transition_spiral': 'euler_spiral',
'transition_stage': 'transition_state',
'transition_system': 'state_transition_system',
'transition_town': 'transition_towns',
'transition_ubc': 'the_university_transition_program',
'transition_word': 'transition_words',
'transitional_assembly': 'northern_ireland_assembly',
'transitional_cell': 'epithelium',
'transitional_cells': 'transitional_epithelium',
'transitional_demands': 'transitional_demand',
'transitional_design': 'transitional_style',
'transitional_diaconate': 'transitional_deacon',
'transitional_economy': 'transition_economy',
'transitional_epithelia': 'transitional_epithelium',
'transitional_form': 'transitional_fossil',
'transitional_forms': 'transitional_fossil',
'transitional_fossils': 'transitional_fossil',
'transitional_furniture': 'transitional_style',
'transitional_government': 'provisional_government',
'transitional_job': 'job_creation_program',
'transitional_jobs': 'job_creation_program',
'transitional_objects': 'transitional_object',
'transitional_program': 'transitional_demand',
'transitional_programme': 'transitional_demand',
'transitional_revolver': 'pepper-box',
'transitional_species': 'transitional_fossil',
'transitional_tanana': 'tanacross_language',
'transitional_vertebra': 'congenital_vertebral_anomaly',
'transitional_vertebrae': 'congenital_vertebral_anomaly',
'transitional_work': 'workforce_investment_act_of_1998',
'transitions_lens': 'photochromic_lens',
'transitions_lenses': 'photochromic_lens',
'transitive_class': 'transitive_set',
'transitive_corporation': 'transitive_corp.',
'transitive_group': 'group_action',
'transitive_law': 'transitive_relation',
'transitive_model': 'inner_model',
'transitive_orientation': 'comparability_graph',
'transitive_property': 'transitive_relation',
'transitive_verbs': 'transitive_verb',
'transitory_human': 'transhuman',
'transitus_mariae': 'assumption_of_mary',
'transjurane_burgundy': 'upper_burgundy',
'transkaryotic_therapies': 'shire_plc',
'translation-lookaside_buffer': 'translation_lookaside_buffer',
'translation_cell': 'primitive_cell',
'translation_dictionaries': 'bilingual_dictionary',
'translation_dictionary': 'bilingual_dictionary',
'translation_engine': 'machine_translation',
'translation_invariant': 'translational_symmetry',
'translation_lexicon': 'bilingual_dictionary',
'translation_memories': 'translation_memory',
'translation_process': 'translation',
'translation_relation': 'translation_operator',
'translation_relay': 'chinese_whispers',
'translation_risk': 'currency_risk',
'translation_software': 'machine_translation',
'translation_stage': 'linear_stage',
'translation_system': 'machine_translation',
'translation_technology': 'translation',
'translational_energy': 'kinetic_energy',
'translational_frameshifting': 'translational_frameshift',
'translational_invariance': 'translational_symmetry',
'translational_symmetries': 'translational_symmetry',
'translator_crew': 'fugees',
'translatory_motion': 'linear_motion',
'translinear_circuits': 'translinear_circuit',
'translinear_principle': 'translinear_circuit',
'transliterating_ukrainian': 'romanization_of_ukrainian',
'translocation_factor': 'signal_recognition_particle',
'translumbar_amputation': 'hemicorporectomy',
'translunar_injection': 'trans_lunar_injection',
'transmash_mahilyow': 'fc_transmash_mogilev',
'transmed_pipeline': 'trans-mediterranean_pipeline',
'transmembrane_alpha-helix': 'transmembrane_domain',
'transmembrane_atpase': 'atpase',
'transmembrane_channel': 'transmembrane_channels',
'transmembrane_helix': 'transmembrane_domain',
'transmembrane_potential': 'membrane_potential',
'transmembrane_proteins': 'transmembrane_protein',
'transmembrane_pump': 'transport_protein',
'transmembrane_receptor': 'membrane_receptor',
'transmembrane_receptors': 'membrane_receptor',
'transmembrane_region': 'membrane_receptor',
'transmembrane_voltage': 'membrane_potential',
'transmeta_corporation': 'transmeta',
'transmigration_operation': 'solid_modeling',
'transmigration_programme': 'transmigration_program',
'transmission_beacon-locomotive': 'train_protection_system',
'transmission_constant': 'propagation_constant',
'transmission_density': 'absorbance',
'transmission_factor': 'transmission_coefficient',
'transmission_fluid': 'hydraulic_fluid',
'transmission_function': 'propagation_constant',
'transmission_grid': 'electric_power_transmission',
'transmission_gully': 'transmission_gully_motorway',
'transmission_lines': 'transmission_line',
'transmission_media': 'transmission_medium',
'transmission_meditation': 'share_international',
'transmission_multiplexor': 'multiplexer',
'transmission_network': 'electric_power_transmission',
'transmission_parameter': 'propagation_constant',
'transmission_parameters': 'propagation_constant',
'transmission_raman': 'transmission_raman_spectroscopy',
'transmission_rate': ':bit_rate',
'transmission_speed': ':bit_rate',
'transmission_tunnel': 'drive_shaft',
'transmissionary_six': 'the_transmissionary_six',
'transmitted_light': 'transmission',
'transmitter-studio_link': 'transmitter/studio_link',
'transmitter_arfon': 'arfon_transmitting_station',
'transmitter_berlin-britz': 'berlin-britz_transmitter',
'transmitter_berlin-tegel': 'berlin-tegel_radio_transmitter',
'transmitter_beromuenster': 'blosenbergturm',
'transmitter_beromunster': 'blosenbergturm',
'transmitter_brocken': 'sender_brocken',
'transmitter_cremlingen': 'cremlingen_transmitter',
'transmitter_heusweiler': 'heusweiler_radio_transmitter',
'transmitter_hornisgrinde': 'hornisgrinde',
'transmitter_ismaning': 'ismaning_radio_transmitter',
'transmitter_junglinster': 'junglinster_longwave_transmitter',
'transmitter_kalundborg': 'kalundborg_transmitter',
'transmitter_koblenz': 'koblenz_radio_transmitter',
'transmitter_lahti': 'lahti_longwave_transmitter',
'transmitter_langenberg': 'langenberg_transmission_tower',
'transmitter_motala': 'motala_longwave_transmitter',
'transmitter_nuernberg-kleinreuth': 'nuremberg-kleinreuth_radio_transmitter',
'transmitter_nuremberg-kleinreuth': 'nuremberg-kleinreuth_radio_transmitter',
'transmitter_nuremburg-kleinreuth': 'nuremberg-kleinreuth_radio_transmitter',
'transmitter_nurnberg-kleinreuth': 'nuremberg-kleinreuth_radio_transmitter',
'transmitter_orfordness': 'orfordness_transmitting_station',
'transmitter_orlunda': 'orlunda_radio_transmitter',
'transmitter_peterborough': 'peterborough_transmitting_station',
'transmitter_plant': 'physical_plant',
'transmitter_redruth': 'redruth_transmitting_station',
'transmitter_roumoules': 'roumoules_radio_transmitter',
'transmitter_sottens': 'sottens_transmitter',
'transmitter_substance': 'endocrine_system',
'transmitter_system': 'neuromodulation',
'transmitter_wachenbrunn': 'wachenbrunn_transmitter',
'transmitter_wavre': 'wavre_transmitter',
'transmitter_wilsdruff': 'wilsdruff_transmitter',
'transmode_virus': 'techno-organic_virus',
'transmogrification_potion': 'neopets',
'transmongolian_railway': 'trans-mongolian_railway',
'transmontaigne_incorporated': 'transmontaigne',
'transmorphers_2': 'transmorphers:_fall_of_man',
'transmutation_circle': 'fullmetal_alchemist',
'transmutation_circles': 'fullmetal_alchemist',
'transmuter_reactor': 'fast_breeder_reactor',
'transnamib_railway': 'transnamib',
'transnational_cinemas': 'transnational_cinema',
'transnational_citizenship': 'transnationalism',
'transnational_compaines': 'multinational_corporation',
'transnational_companies': 'multinational_corporation',
'transnational_company': 'multinational_corporation',
'transnational_corporation': 'multinational_corporation',
'transnational_corporations': 'multinational_corporation',
'transnational_crime': 'transnational_organized_crime',
'transnational_feminism': 'global_feminism',
'transnational_film': 'transnational_cinema',
'transnational_law': 'international_law',
'transneptunian_object': 'trans-neptunian_object',
'transnet_limited': 'transnet',
'transnistria_conflict': 'war_of_transnistria',
'transnistrian_conflict': 'war_of_transnistria',
'transnistrian_region': 'transnistria',
'transnistrian_rouble': 'transnistrian_ruble',
'transocean_drilling': 'transocean',
'transocean_inc.': 'transocean',
'transoconee_republic': 'trans-oconee_republic',
'transoesophageal_echocardiogram': 'transesophageal_echocardiogram',
'transonic_flight': 'transonic',
'transonic_flow': 'transonic',
'transorbital_leukotomy': 'lobotomy',
'transorbital_lobotomy': 'lobotomy',
'transorientale_rally': 'trans-oriental_rally',
'transouth_500': 'southern_500',
'transouth_conference': 'transouth_athletic_conference',
'transpacific_diffusion': 'pre-columbian_trans-oceanic_contact',
'transpacific_race': 'transpacific_yacht_race',
'transpadane_gaul': 'cisalpine_gaul',
'transpalatal_arch': 'lingual_arch',
'transparency_aa': 'intellisample',
'transparency_projector': 'overhead_projector',
'transparent_alumina': 'transparent_ceramics',
'transparent_aluminum': 'transparent_aluminium',
'transparent_ceramics': 'transparent_materials',
'transparent_concrete': 'litracon',
'transparent_decompression': 'data_compression',
'transparent_encryption': 'encrypted_filesystem',
'transparent_gif': 'graphics_interchange_format',
'transparent_government': 'open_government',
'transparent_lan': 'transparent_lan_service',
'transparent_man': 'manikin',
'transparent_material': 'transparent_materials',
'transparent_men': 'manikin',
'transparent_opengl': 'opengl_multipipe',
'transparent_paper': 'translucent_paper',
'transparent_photovoltaics': 'building-integrated_photovoltaics',
'transparent_plastic': 'plastic',
'transparent_proxy': 'proxy_server',
'transparent_society': 'the_transparent_society',
'transparent_tape': 'scotch_tape',
'transparent_woman': 'manikin',
'transparent_women': 'manikin',
'transpelvic_amputation': 'hemipelvectomy',
'transpennine_express': 'first_transpennine_express',
'transpersonal_art': 'transpersonal_psychology',
'transphobic_violence': 'violence_against_lgbt_people',
'transpiration_pull': 'transpirational_pull',
'transpiration_ratio': 'transpiration',
'transpired_collector': 'solar_thermal_energy',
'transplant._proc.': 'transplantation_proceedings',
'transplant_experiments': 'transplant_experiment',
'transplant_organs': 'transplantable_organs_and_tissues',
'transplant_shock': 'transplanting',
'transplant_surgery': 'organ_transplant',
'transplant_trade': 'organ_donation',
'transplantable_organs': 'transplantable_organs_and_tissues',
'transplantable_tissues': 'transplantable_organs_and_tissues',
'transplantation_immunology': 'transplant_rejection',
'transplantation_therapy': 'organ_transplant',
'transplanted_organs': 'organ_transplant',
'transplutonium_elements': 'transplutonium_element',
'transpolar_route': 'polar_route',
'transport-oriented_development': 'transit-oriented_development',
'transport_agent': 'transport_board',
'transport_agents': 'transport_board',
'transport_allianz': 'transall',
'transport_auckland': 'transport_in_auckland',
'transport_cafe': 'truck_stop',
'transport_code': 'traffic_code',
'transport_command': 'raf_transport_command',
'transport_commissioner': 'european_commissioner_for_transport',
'transport_committee': 'transport_select_committee',
'transport_delay': 'latent',
'transport_division': 'transdiv',
'transport_electrification': 'electric_vehicle',
'transport_engineer': 'transport_engineering',
'transport_engineers': 'transport_engineering',
'transport_equation': 'generic_scalar_transport_equation',
'transport_f.c': 'transport_f.c.',
'transport_fc': 'transport_f.c.',
'transport_forecasting': 'transportation_forecasting',
'transport_heritage': 'transport_trust',
'transport_industry': 'transport',
'transport_interchange': 'transport_hub',
'transport_minister': 'transportation_ministry',
'transport_modalities': 'mode_of_transport',
'transport_modality': 'mode_of_transport',
'transport_mode': 'mode_of_transport',
'transport_modes': 'mode_of_transport',
'transport_module': 'technodrome',
'transport_mpls': 't-mpls',
'transport_office': 'transport_board',
'transport_phenomenon': 'transport_phenomena',
'transport_plane': 'cargo_aircraft',
'transport_planning': 'transportation_planning',
'transport_police': 'transit_police',
'transport_portal': 'portal:transport',
'transport_preservation': 'transport_trust',
'transport_problem': 'transportation_theory',
'transport_proteins': 'transport_protein',
'transport_protocol': 'transport_layer',
'transport_secretary': 'secretary_of_state_for_transport',
'transport_ship': 'troopship',
'transport_ships': 'troopship',
'transport_squadron': 'transron',
'transport_stage': 'rallying',
'transport_stream': 'mpeg_transport_stream',
'transport_sustainability': 'sustainable_transport',
'transport_system': 'transport_network',
'transport_truck': 'truck',
'transportation_coil': 'transportation_coils',
'transportation_command': 'united_states_transportation_command',
'transportation_corridor': 'transport_corridor',
'transportation_corridors': 'transport_corridor',
'transportation_cycling': 'utility_cycling',
'transportation_cyclist': 'utility_cycling',
'transportation_department': 'united_states_department_of_transportation',
'transportation_device': 'vehicle',
'transportation_economics': 'transport_economics',
'transportation_electrification': 'electric_transportation_technology',
'transportation_engineer': 'transport_engineering',
'transportation_engineering': 'transport_engineering',
'transportation_engineers': 'transport_engineering',
'transportation_hub': 'transport_hub',
'transportation_hubs': 'transport_hub',
'transportation_industry': 'transport',
'transportation_law': 'transport_law',
'transportation_minister': 'transportation_ministry',
'transportation_modalities': 'mode_of_transport',
'transportation_modality': 'mode_of_transport',
'transportation_mode': 'mode_of_transport',
'transportation_modes': 'mode_of_transport',
'transportation_plan': 'transport_plan',
'transportation_police': 'transit_police',
'transportation_portal': 'portal:transport',
'transportation_problem': 'transportation_theory',
'transportation_secretary': 'united_states_secretary_of_transportation',
'transportation_system': 'transport_network',
'transportationin_zimbabwe': 'transport_in_zimbabwe',
'transporter_1': 'the_transporter',
'transporter_bridges': 'transporter_bridge',
'transporter_ii': 'transporter_2',
'transporter_protein': 'transport_protein',
'transporter_psychosis': 'realm_of_fear',
'transporter_truck': 'transporter_wagon',
'transports_montreux-vevey-riviera': 'chemin_de_fer_montreux-glion-rochers-de-naye',
'transposable_element': 'transposon',
'transposable_elements': 'transposon',
'transpose_conjugate': 'conjugate_transpose',
'transposed_matrix': 'transpose',
'transposing_instruments': 'transposing_instrument',
'transposition_ciphers': 'transposition_cipher',
'transposition_convention': 'transposing_instrument',
'transposition_cryptography': 'transposition_cipher',
'transposition_mutagenesis': 'transposon_mutagenesis',
'transpositional_puns': 'transpositional_pun',
'transpyloric_line': 'transpyloric_plane',
'transracial_transformation': 'racial_transformation',
'transradial_prostheses': 'transradial_prosthesis',
'transradio_press': 'transradio_press_service',
'transrapid_shanghai': 'shanghai_maglev_train',
'transrectal_ultrasound': 'transrectal_ultrasonography',
'transreflective_display': 'transflective_liquid_crystal_display',
'transreflective_lcd': 'transflective_liquid_crystal_display',
'transsavian_croatia': 'pannonian_croatia',
'transsex_procedures': 'sex_reassignment_surgery',
'transsexaul_woman': 'transwoman',
'transsexual_man': 'transman',
'transsexual_men': 'transman',
'transsexual_sexuality': 'transgender_sexuality',
'transsexual_woman': 'transwoman',
'transsexual_women': 'transwoman',
'transshipment_point': 'transshipment',
'transsiberian_orchestra': 'trans-siberian_orchestra',
'transsiberian_railroad': 'trans-siberian_railway',
'transsiberian_railway': 'trans-siberian_railway',
'transsulphuration_pathway': 'transsulfuration_pathway',
'transsyberia_rallye': 'transsyberia_rally',
'transtar_radio': 'transtar_radio_networks',
'transtentorial_herniation': 'brain_herniation',
'transthoracic_echocardiography': 'echocardiography',
'transtibial_prostheses': 'transtibial_prosthesis',
'transtubercular_line': 'intertubercular_plane',
'transtubercular_plane': 'intertubercular_plane',
'transuranic_element': 'transuranium_element',
'transuranic_elements': 'transuranium_element',
'transuranium_elements': 'transuranium_element',
'transurban_group': 'transurban',
'transurban_limited': 'transurban',
'transurethral_resection': 'transurethral_resection_of_the_prostate',
'transvaal_daisy': 'gerbera',
'transvaal_jade': 'grossular',
'transvaal_kaffirboom': 'erythrina_lysistemon',
'transvaal_milkplum': 'englerophytum_magalismontanum',
'transvaal_province': 'transvaal',
'transvaal_republic': 'south_african_republic',
'transvaal_war': 'first_boer_war',
'transvaalse_volkslied': 'national_anthem_of_the_transvaal',
'transvaginal_ultrasonography': 'gynecologic_ultrasonography',
'transvaginal_ultrasound': 'gynecologic_ultrasonography',
'transversal_intersection': 'transversality',
'transversal_mapping': 'transversality',
'transversal_mappings': 'transversality',
'transversal_matroid': 'matroid',
'transversal_vibration': 'transverse_wave',
'transversalis_muscle': 'transversus_abdominis_muscle',
'transversality_condition': 'transversality',
'transverse-mounted_engine': 'transverse_engine',
'transverse_abdominal': 'transversus_abdominis_muscle',
'transverse_abdominis': 'transversus_abdominis_muscle',
'transverse_abdominus': 'transversus_abdominis_muscle',
'transverse_arches': 'arches_of_the_foot',
'transverse_cervical': 'transverse_cervical_artery',
'transverse_cut': 'transverse_plane',
'transverse_fibers': 'commissural_fibers',
'transverse_flute': 'western_concert_flute',
'transverse_ladybird': 'coccinella_transversalis',
'transverse_lie': 'shoulder_presentation',
'transverse_ligaments': 'transverse_ligament',
'transverse_line': 'transverse_plane',
'transverse_magnetic': 'transverse_mode',
'transverse_magnification': 'magnification',
'transverse_melanonychia': 'melanonychia',
'transverse_mercator': 'transverse_mercator_projection',
'transverse_muscle': 'transverse_muscle_of_tongue',
'transverse_pericardial': 'pericardial_sinus',
'transverse_process': 'transverse_processes',
'transverse_range': 'transverse_ranges',
'transverse_scapular': 'suprascapular_artery',
'transverse_sinus': 'transverse_sinuses',
'transverse_stability': 'metacentric_height',
'transverse_tubule': 't-tubule',
'transverse_tubules': 't-tubule',
'transverse_velocity': 'velocity',
'transverse_vibration': 'transverse_wave',
'transverse_waves': 'transverse_wave',
'transversely-mounted_engine': 'transverse_engine',
'transversely_isotropic': 'transverse_isotropy',
'transversion_mutation': 'transversion',
'transversospinalis_-multifidus': 'multifidus_muscle',
'transversospinalis_-rotatores': 'rotatores_muscles',
'transversospinalis_-semispinalis': 'semispinalis_muscle',
'transversus_abdominis': 'transversus_abdominis_muscle',
'transversus_abdominus': 'transversus_abdominis_muscle',
'transversus_auriculae': 'transverse_muscle_of_auricle',
'transversus_linguae': 'transverse_muscle_of_tongue',
'transversus_menti': 'transverse_muscle_of_the_chin',
'transversus_muscle': 'transverse_muscle_of_tongue',
'transversus_perinaei': 'superficial_transverse_perineal_muscle',
'transversus_thoracis': 'transversus_thoracis_muscle',
'transvestic_fetish': 'transvestic_fetishism',
'transvolga_region': 'transvolga',
'transwarp_drive': 'warp_drive',
'transworld_magazine': 'transworld_skateboarding',
'transylvan_saxons': 'transylvanian_saxons',
'transylvania_coat': 'historical_coat_of_arms_of_transylvania',
'transylvania_college': 'transylvania_university',
'transylvania_saxons': 'transylvanian_saxons',
'transylvanian_bloodhound': 'hungarian_hound',
'transylvanian_germans': 'transylvanian_saxons',
'transylvanian_hound': 'hungarian_hound',
'transylvanian_hypothesis': 'lunar_effect',
'transylvanian_music': 'music_of_transylvania',
'transylvanian_saxon': 'transylvanian_saxons',
'tranverse_arytenoid': 'transverse_arytenoid',
'tranverse_wave': 'transverse_wave',
'tranverse_waves': 'transverse_wave',
'tranylcypromine_sulfate': 'tranylcypromine',
'tranz_link': 'toll_nz',
'tranzlator_crew': 'fugees',
'tranzor_z': 'mazinger_z',
'tranzschelia_discolor': 'tranzschelia_discolor_f.sp._persica',
'trap-bath_split': 'phonological_history_of_english_short_a',
'trap-door_spider': 'trapdoor_spider',
'trap-jaw_ant': 'odontomachus',
'trap-jaw_ants': 'odontomachus',
'trap_bill': 'trap_law',
'trap_cropping': 'trap_crop',
'trap_door': 'the_trap_door',
'trap_doors': 'trapdoor',
'trap_grease': 'grease_interceptor',
'trap_line': 'trap-lining',
'trap_lines': 'trap-lining',
'trap_lining': 'trap-lining',
'trap_point': 'catch_points',
'trap_points': 'catch_points',
'trap_room': 'parts_of_a_theatre',
'trap_runner': 'trap_gunner',
'trap_sequence': 'parasitic_twin',
'trap_set': 'drum_kit',
'trap_shooter': 'trap_shooting',
'trap_stain': 'tartrate-resistant_acid_phosphatase',
'trap_star': 'trapstar',
'trap_stilton': 'geronimo_stilton',
'trap_streets': 'trap_street',
'trap_syndrome': 'twin_reversed_arterial_perfusion',
'trapa_natans': 'water_caltrop',
'trapain_law': 'traprain_law',
'trapani_airport': 'trapani-birgi_airport',
'trapani_province': 'province_of_trapani',
'trapbar_deadlift': 'deadlift',
'trapdoor_algorithm': 'trapdoor_function',
'trapdoor_permutation': 'trapdoor_function',
'trapeang_prasat': 'trapeang_prasat_district',
'trapelus_tournevillei': 'erg_agama',
'trapez_ltd': 'traum_schallplatten',
'trapeze_artist': 'trapeze',
'trapezites_eremicola': 'trapezites_sciron',
'trapezites_glaucus': 'trapezites_lutea',
'trapezites_heliomacula': 'trapezites_heteromacula',
'trapezites_icosia': 'rachelia_extrusus',
'trapezites_leucon': 'trapezites_lutea',
'trapezites_monocycla': 'trapezites_eliena',
'trapezites_paraphaes': 'anisynta_sphenosema',
'trapezites_phila': 'trapezites_phigalia',
'trapezites_phillyra': 'trapezites_phigalia',
'trapezites_soma': 'trapezites_symmomus',
'trapezites_sombra': 'trapezites_symmomus',
'trapezites_sphenosema': 'anisynta_sphenosema',
'trapezium_formula': 'trapezoid',
'trapezium_rule': 'trapezoidal_rule',
'trapezius_lower': 'trapezius_muscle',
'trapezius_middle': 'trapezius_muscle',
'trapezius_muscles': 'trapezius_muscle',
'trapezoid-rhombic_dodecahedron': 'trapezo-rhombic_dodecahedron',
'trapezoid_method': 'trapezoidal_rule',
'trapezoid_ridge': 'trapezoid_line',
'trapezoid_rule': 'trapezoidal_rule',
'trapezoidal_icositetrahedron': 'deltoidal_icositetrahedron',
'trapezoidal_matrix': 'triangular_matrix',
'trapezoidal_method': 'linear_multistep_method',
'trapezoidal_number': 'polite_number',
'trapezoidal_thread': 'acme_thread_form',
'trapezoidium_bone': 'trapezoid_bone',
'trapezoidocephaly-synostosis_syndrome': 'antley-bixler_syndrome',
'trapezoptera_lobata': 'bocula_odontosema',
'trapezunt_gospel': 'trebizond_gospel',
'trapezuntine_empire': 'empire_of_trebizond',
'trapiche_emerald': 'emerald',
'trapjaw_ant': 'odontomachus',
'trapp_basalts': 'flood_basalt',
'trapp_family': 'georg_ludwig_von_trapp',
'trapped_fish': 'fish_trap',
'trapped_wind': 'bloating',
'trapper_john': 'trapper_john_mcintyre',
'trapper_keepers': 'trapper_keeper',
'trapping_agent': 'chemical_trap',
'trapping_reagent': 'chemical_trap',
'trappist_ale': 'trappist_beer',
'trappist_breweries': 'trappist_beer',
'trappist_brewery': 'trappist_beer',
'trappist_monastery': 'trappists',
'trappist_nuns': 'trappists',
'trappist_order': 'trappists',
'trappistes_rochefort': 'rochefort_brewery',
'traprock_ridge': 'traprock_mountains',
'traprock_ridges': 'traprock_mountains',
'traps_kit': 'drum_kit',
'trarza_region': 'trarza',
'tras_trasera': 'trastrasera',
'trasadingen_sh': 'trasadingen',
'trascendental_function': 'transcendental_function',
'trascendental_number': 'transcendental_number',
'trasdamad_ganayan': 'drastamat_kanayan',
'trash-to-energy_plant': 'incineration',
'trash_80': 'trs-80',
'trash_a-go-go': 'cosmic_slop',
'trash_bag': 'bin_bag',
'trash_bags': 'bin_bag',
'trash_barrel': 'waste_container',
'trash_belt': 'testing_recall_about_strange_happenings',
'trash_bin': 'waste_container',
'trash_bucket': 'waste_container',
'trash_can': 'waste_container',
'trash_cans': 'waste_container',
'trash_collector': 'waste_collector',
'trash_compaction': 'compactor',
'trash_compactor': 'compactor',
'trash_container': 'waste_container',
'trash_diving': 'dumpster_diving',
'trash_film': 'z_movie',
'trash_fish': 'rough_fish',
'trash_foot': 'cholesterol_embolism',
'trash_island': 'great_pacific_garbage_patch',
'trash_mcsweeney': 'the_red_paintings',
'trash_metal': 'thrash_metal',
'trash_palace': 'dimitri_tikovoi',
'trash_pickup': 'garbage_collection',
'trash_pussies': 'peaches_geldof',
'trash_receptacle': 'waste_container',
'trash_receptacles': 'waste_container',
'trash_strewn': 'litter',
'trash_talk': 'trash-talk',
'trash_talking': 'trash-talk',
'trash_tv': 'tabloid_talk_show',
'trash_vortex': 'great_pacific_garbage_patch',
'trash_wednesday': 'new_orleans_mardi_gras',
'trashcan_diagnosis': 'wastebasket_diagnosis',
'trashcan_man': 'the_stand',
'trashcan_sinatras': 'the_trash_can_sinatras',
'trashi_yangtse': 'tashi_yangtse',
'trashiyangste_district': 'trashiyangtse_district',
'trashy_magazine': 'gossip_magazine',
'trashy_magazines': 'gossip_magazine',
'trasimene_lake': 'lake_trasimeno',
'trasimeno_lake': 'lake_trasimeno',
'trasko_moscow': 'fc_fshm_torpedo_moscow',
'trasnfer_gene': 'transfer_gene',
'trastra_cera': 'trastrasera',
'trasylvania_coats': 'historical_coat_of_arms_of_transylvania',
'trat_town': 'trat',
'traudl_humps': 'traudl_junge',
'traugott_oesterreich': 'traugott_konstantin_oesterreich',
'trauma_centers': 'trauma_center',
'trauma_centre': 'trauma_center',
'trauma_counseling': 'grief_counseling',
'trauma_handshake': 'rectal_examination',
'trauma_hawk': 'trauma_hawk_aero-medical_program',
'trauma_model': 'trauma_model_of_mental_disorders',
'trauma_plate': 'ceramic_plate',
'trauma_surgeon': 'trauma_surgery',
'trauma_system': 'advanced_trauma_life_support',
'trauma_ward': 'emergency_department',
'traumatic_amnesia': 'post-traumatic_amnesia',
'traumatic_amputation': 'amputation',
'traumatic_experience': 'psychological_trauma',
'traumatic_injury': 'injury',
'traumatic_situation': 'psychological_trauma',
'trautson_palace': 'palais_trautson',
'trauzl_test': 'trauzl_lead_block_test',
'trav_demsey': 'travis_demsey',
'trav_sd': 'trav_s.d.',
'travancore-cochin_union': 'travancore-cochin',
'travancore_army': 'nair_brigade',
'travancore_cochin': 'travancore-cochin',
'travancore_kingdom': 'travancore',
'travancore_lines': 'nedumkotta',
'travancore_province': 'travancore',
'travancore_state': 'travancore',
'travancore_titanium': 'travancore_titanium_products',
'travancore_tortoise': 'indotestudo_forstenii',
'travancore_uropeltis': 'uropeltis_rubrolineata',
'trave_river': 'trave',
'travel_adapter': 'adapter',
'travel_adaptor': 'adapter',
'travel_agencies': 'travel_agency',
'travel_agent': 'travel_agency',
'travel_agents': 'travel_agency',
'travel_athens': 'athens',
'travel_bag': 'suitcase',
'travel_behaviour': 'travel_behavior',
'travel_blog': 'travel_journal',
'travel_blogs': 'travel_journal',
'travel_books': 'travel_literature',
'travel_bureau': 'convention_and_visitor_bureau',
'travel_case': 'luggage',
'travel_consolidators': 'travel_agency',
'travel_coventry': 'national_express_coventry',
'travel_demand': 'travel_behavior',
'travel_diary': 'travel_journal',
'travel_documentaries': 'travel_documentary',
'travel_drive': 'hard_disk_drive',
'travel_dundee': 'national_express_dundee',
'travel_expense': 'operating_expense',
'travel_expenses': 'operating_expense',
'travel_forum': 'internet_forum',
'travel_game': 'car_game',
'travel_games': 'car_game',
'travel_guide': 'guide_book',
'travel_incentives': 'travel_incentive',
'travel_industry': 'tourism',
'travel_inn': 'premier_inn',
'travel_itinerary': 'travel_literature',
'travel_lane': 'lane',
'travel_log': 'travel_journal',
'travel_logue': 'travel_journal',
'travel_ludlows': 'ludlows',
'travel_management': 'corporate_travel_management',
'travel_mug': 'mug',
'travel_nurse': 'travel_nursing',
'travel_papers': 'travel_document',
'travel_permit': 'permit_to_travel',
'travel_planning': 'travel_plan',
'travel_plaza': 'rest_area',
'travel_reporting': 'travel_journal',
'travel_scam': 'confidence_trick',
'travel_search': 'travel_search_engine',
'travel_services': 'travel_agency',
'travel_sickness': 'motion_sickness',
'travel_town': 'travel_town_museum',
'travel_trailers': 'travel_trailer',
'travel_witing': 'travel_journal',
'travel_writer': 'travel_literature',
'traveler-oriented_business': 'travel',
'traveler_insurance': 'travel_insurance',
'travelers_aid': 'travelers_aid_international',
'travelers_companies': 'the_travelers_companies',
'travelers_insurance': 'the_travelers_companies',
'travelers_palm': 'ravenala_madagascariensis',
'travelers_tree': 'ravenala_madagascariensis',
'travelex_plc': 'travelex',
'traveling-wave_reactor': 'traveling_wave_reactor',
'traveling_bard': 'itinerant_poet',
'traveling_bosnia': 'tourism_in_bosnia_and_herzegovina',
'traveling_carnivals': 'traveling_carnival',
'traveling_circus': 'circus',
'traveling_exhibit': 'travelling_exhibition',
'traveling_exhibition': 'travelling_exhibition',
'traveling_library': 'bookmobile',
'traveling_merchants': 'the_legend_of_zelda:_the_wind_waker',
'traveling_mike': 'john_d._grady',
'traveling_minstrel': 'itinerant_poet',
'traveling_pants': 'the_sisterhood_of_the_traveling_pants',
'traveling_poet': 'itinerant_poet',
'traveling_salesman': 'travelling_salesman',
'traveling_salesmen': 'peddler',
'traveling_trophy': 'trophy',
'traveling_wave': 'wave',
'traveling_wilbuys': 'traveling_wilburys',
'traveling_willburys': 'traveling_wilburys',
'traveller_norwegian': 'norwegian_travellers',
'traveller_scottish': 'beurla-reagaird',
'travellers_joy': 'clematis',
'travellers_palm': 'ravenala_madagascariensis',
'travellers_rest': 'travelers_rest',
'travelling-salesman_problem': 'travelling_salesman_problem',
'travelling_bard': 'itinerant_poet',
'travelling_birds': 'winged_migration',
'travelling_carnival': 'traveling_carnival',
'travelling_circus': 'circus',
'travelling_gnome': 'travelling_gnome_prank',
'travelling_microscope': 'traveling_microscope',
'travelling_minstrel': 'itinerant_poet',
'travelling_people': 'irish_traveller',
'travelling_poet': 'itinerant_poet',
'travelling_salesmen': 'peddler',
'travelling_showmen': 'showman',
'travelling_wilburrys': 'traveling_wilburys',
'travelling_wilburys': 'traveling_wilburys',
'travelling_willburys': 'traveling_wilburys',
'travelmate_3020': 'acer_travelmate_3020',
'travelmate_8000': 'acer_inc.',
'travelodge_hotels': 'travelodge',
'travencore_tortoise': 'indotestudo_forstenii',
'traver_rains': 'heatherette',
'travers_island': 'the_pelham_islands',
'traversable_graph': 'eulerian_path',
'traversable_wormhole': 'wormhole',
'traverse_islands': 'traversay_islands',
'traverse_jury': 'jury',
'traverse_lake': 'lake_traverse',
'traverse_mountains': 'traverse_ridge',
'traversey_islands': 'traversay_islands',
'traversia_lyalli': 'stephens_island_wren',
'travertine_beetle': 'lutrochidae',
'travertine_house': 'bunshaft_residence',
'travertine_marble': 'travertine',
'traveston_crossing': 'traveston_crossing_dam',
'travis_afb': 'travis_air_force_base',
'travis_almany': 'timothy_rhea',
'travis_bane': 'travis_tomko',
'travis_banwart': 'oakland_athletics_minor_league_players',
'travis_bickle': 'taxi_driver',
'travis_bowe': 'choosy_wives_choose_smith',
'travis_clark': 'we_the_kings',
'travis_dawkins': 'gookie_dawkins',
'travis_dempsey': 'travis_demsey',
'travis_field': 'savannah/hilton_head_international_airport',
'travis_greene': 'travis_green',
'travis_jeans': 'juicy_couture',
'travis_lake': 'lake_travis',
'travis_magazine': 'sheridan_college',
'travis_mcqueen': 'vengaboys',
'travis_nesbitt': 'social_code',
'travis_osborne': 'r._travis_osborne',
'travis_parrot': 'travis_parrott',
'travis_pestrana': 'travis_pastrana',
'travis_richter': 'from_first_to_last',
'travis_sampson': 'survivor:_vanuatu',
'travis_snyder': 'travis_snider',
'travis_stewart': 'machinedrum',
'travis_stork': 'travis_lane_stork',
'travis_tag': 'william_b._travis_academy/vanguard_for_the_academically_talented_and_gifted',
'travis_tea': 'atlanta_nights',
'travis_warren': 'blind_melon',
'travis_watkins': 'travis_e._watkins',
'travis_wolfe': 'jenna_lewis',
'travniki_camp': 'trawniki_concentration_camp',
'trawick_boger': 'tra_boger',
'trawl_bycatch': 'bycatch',
'trawl_fishing': 'trawling',
'trawl_net': 'trawling',
'trawl_whiting': 'southern_school_whiting',
'trawniki_camp': 'trawniki',
'trawniki_commune': 'gmina_trawniki',
'traws_cambria': 'trawscambria',
'trawsfynedd_nuclear': 'trawsfynydd_nuclear_power_station',
'trawsfynydd_reservoir': 'llyn_trawsfynydd',
'trawsgoed_crosswood': 'trawsgoed',
'traxion_a/s': 'traxion',
'traxxas_jato': 'traxxas',
'tray_cloth': 'textile',
'tray_dee': 'tray_deee',
'tray_lewis': 'trey_lewis',
'tray_parker': 'trey_parker',
'tray_table': 'tv_tray_table',
'traynor_amplifiers': 'traynor',
'traynor_halftown': 'traynor_ora_halftown',
'trazodone_hydrochloride': 'trazodone',
'trc_37l': 'lorraine_37l',
'trciniec_culture': 'trzciniec_culture',
'trd_hilux': 'toyota_hilux',
'trdat_iii': 'tiridates_iii_of_armenia',
'trdat_iv': 'tiridates_iii_of_armenia',
'trdina_peak': 'sveta_gera',
'trdinov_vrh': 'sveta_gera',
'tre_ankare': 'snus',
'tre_corde': 'soft_pedal',
'tre_flip': 'flip_tricks',
'tre_fontane': 'tre_fontane_abbey',
'tre_hardson': 'slimkid3',
'tre_lewis': 'trey_lewis',
'tre_vdk': 'tre_van_die_kasie',
'tre_venezie': 'northeast_italy',
'treacher-collins_syndrome': 'treacher_collins_syndrome',
'treacherous_computing': 'trusted_computing',
'treacle_mine': 'treacle_mining',
'treacle_mines': 'treacle_mining',
'treacle_pudding': 'treacle_sponge_pudding',
'treacle_toffee': 'bonfire_toffee',
'tread_pattern': 'tread',
'tread_water': 'treading_water',
'treadle_machine': 'treadle',
'treadmill_state': 'treadmilling',
'treadway/hubbard_racing': 'treadway_racing',
'treadwear_grade': 'treadwear_rating',
'trearddur_bay': 'trearddur',
'trearie_lake': 'lago_trearie',
'treas_ponley': 'treas',
'treason_felony': 'treason_felony_act_1848',
'treason_times': 'the_new_york_times',
'treasure_houses': 'treasury',
'treasure_hunt': 'treasure_hunting',
'treasure_isle': 'duke_reid',
'treasure_llanganatis': 'treasure_of_the_llanganatis',
'treasure_ships': 'treasure_ship',
'treasure_state': 'montana',
'treasure_sword': 'sword_kladenets',
'treasure_talk': 'trash-talk',
'treasure_trail': 'abdominal_hair',
'treasure_trawlers': 'mario_party_6',
'treasure_troll': 'troll_doll',
'treasure_troves': 'treasure_trove',
'treasure_vase': 'bumpa',
'treasures_tv': 'treasures.tv',
'treasury_bench': 'frontbencher',
'treasury_committee': 'treasury_select_committee',
'treasury_department': 'united_states_department_of_the_treasury',
'treasury_island': 'mono_island',
'treasury_reform': 'monetary_reform',
'treasury_registrar': 'register_of_the_treasury',
'treasury_regulation': 'treasury_regulations',
'treasury_seal': 'seal_of_the_united_states_department_of_the_treasury',
'treasury_secretary': 'united_states_secretary_of_the_treasury',
'treasury_securities': 'united_states_treasury_security',
'treasury_security': 'united_states_treasury_security',
'treasury_share': 'treasury_stock',
'treasury_shares': 'treasury_stock',
'treasury_strips': 'united_states_treasury_security',
'treasury_tags': 'treasury_tag',
'treat_chagas': 'chagas:_time_to_treat_campaign',
'treat_johnson': 'treat_baldwin_johnson',
'treated_diamond': 'diamond_enhancement',
'treated_lumber': 'wood_preservation',
'treated_pine': 'wood_preservation',
'treated_timber': 'wood_preservation',
'treated_wood': 'wood_preservation',
'treating_tinnitus': 'tinnitus',
'treatment_centres': 'treatment_centre',
'treatment_effects': 'treatment_effect',
'treatment_group': 'treatment_groups',
'treatment_guidelines': 'treatment_guidelines_from_the_medical_letter',
'treatment_plant': 'wastewater_treatment_plant',
'treaty_adoption': 'treaty',
'treaty_five': 'treaty_5',
'treaty_ghent': 'treaty_of_ghent',
'treaty_law': 'law_of_treaties',
'treaty_negotiation': 'treaty',
'treaty_paris': 'treaty_of_paris',
'treaty_port': 'treaty_ports',
'treaty_settlement': 'treaty_of_waitangi_claims_and_settlements',
'treaty_seven': 'treaty_7',
'treaty_six': 'treaty_6',
'treaty_three': 'treaty_3',
'treaty_versailles': 'treaty_of_versailles',
'treaty_washington': 'north_atlantic_treaty',
'trebah_gardens': 'trebah',
'trebang_sunda': 'music_of_indonesia',
'trebbia_river': 'trebbia',
'trebbiano_giallo': 'trebbiano',
'trebbiano_greco': 'trebbiano',
'trebbiano_perugino': 'trebbiano',
'trebbiano_romagnolo': 'trebbiano',
'trebbiano_spoletino': 'trebbiano',
'trebbiano_toscano': 'trebbiano',
'trebecco_lake': 'lago_di_trebecco',
'trebellius_pollio': 'augustan_history',
'trebinje_municipality': 'trebinje',
'trebitsch_lincoln': 'ignaz_trebitsch-lincoln',
'trebitsch_memorial': 'leopold_trebitsch_memorial_tournament',
'trebizon_school': 'trebizon',
'treble-cut_filter': 'low-pass_filter',
'treble_bar': 'treble-bar',
'treble_clef': 'clef',
'treble_hook': 'fish_hook',
'trebnitz_abbey': 'sanctuary_of_st._jadwiga_in_trzebnica',
'trebnitz_sanctuary': 'sanctuary_of_st._jadwiga_in_trzebnica',
'trebor_basset': 'cadbury_uk',
'trebor_bassett': 'cadbury_uk',
'trebor_thorpe': 'lionel_fanthorpe',
'trebula_baliensis': 'trebula_balliensis',
'trebula_mustuca': 'trebula_mutusca',
'trebula_suffenes': 'trebula_suffenas',
'trebunie_tutki': 'trebunie-tutki',
'trecator_sc': 'ethionamide',
'trecco_bay': 'porthcawl',
'trecento_music': 'music_of_the_trecento',
'trecia_smith': 'trecia-kaye_smith',
'tredegar_newydd': 'new_tredegar',
'tredington_school': 'tredington_community_primary_school',
'tree-climbing_crab': 'caribbean_hermit_crab',
'tree-climbing_fox': 'gray_fox',
'tree-ear_fungus': 'wood_ear',
'tree-ear_mushroom': 'wood_ear',
'tree-ring_analysis': 'dendrochronology',
'tree-ring_dating': 'dendrochronology',
'tree-search_algorithm': 'tree_traversal',
'tree_63': 'tree63',
'tree_anemone': 'carpenteria',
'tree_automata': 'tree_automaton',
'tree_bark': 'bark',
'tree_boa': 'corallus',
'tree_branch': 'branch',
'tree_bumblebee': 'bombus_hypnorum',
'tree_cactus': 'pilosocereus',
'tree_chipper': 'woodchipper',
'tree_cholla': 'cylindropuntia_imbricata',
'tree_circus': 'axel_erlandson',
'tree_city': 'tree_city_usa',
'tree_clover': 'melilotus_albus',
'tree_colors': 'days_of_the_new_presents_tree_colors',
'tree_cotton': 'gossypium_arboreum',
'tree_crab': 'caribbean_hermit_crab',
'tree_creeper': 'treecreeper',
'tree_database': 'hierarchical_model',
'tree_diagrams': 'tree_diagram',
'tree_doctor': 'arborist',
'tree_dubautia': 'dubautia_arborea',
'tree_ear': 'wood_ear',
'tree_echium': 'echium_pininana',
'tree_everlasting': 'ozothamnus_ferrugineus',
'tree_farmer': 'tree_farm',
'tree_farming': 'tree_farm',
'tree_farms': 'tree_farm',
'tree_fern': 'cyatheales',
'tree_ferns': 'cyatheales',
'tree_fiber': 'fiber_crop',
'tree_fort': 'tree_house',
'tree_frogs': 'tree_frog',
'tree_fuchsia': 'fuchsia_excorticata',
'tree_germander': 'teucrium_fruticans',
'tree_goblin': 'squirrel',
'tree_graffiti': 'graffiti',
'tree_grape': 'cyphostemma_juttae',
'tree_groundsel': 'baccharis_halimifolia',
'tree_hallow': 'tree_hollow',
'tree_hash': 'hash_tree',
'tree_heath': 'erica_arborea',
'tree_heliotrope': 'heliotropium_foertherianum',
'tree_holes': 'tree_hole',
'tree_hollows': 'tree_hollow',
'tree_houses': 'tree_house',
'tree_huggers': 'chipko_movement',
'tree_hugging': 'chipko_movement',
'tree_ivy': 'fatshedera_lizei',
'tree_jasmine': 'millingtonia',
'tree_kangaroo': 'tree-kangaroo',
'tree_kingfishers': 'tree_kingfisher',
'tree_leaf': 'leaf_node',
'tree_level': 'tree_diagram',
'tree_lizard': 'urosaurus',
'tree_lobster': 'dryococelus_australis',
'tree_lomatia': 'lomatia_fraseri',
'tree_lucerne': 'tagasaste',
'tree_lungwort': 'lobaria_pulmonaria',
'tree_lupin': 'lupinus_arboreus',
'tree_lupine': 'lupinus_arboreus',
'tree_mallow': 'lavatera',
'tree_man': 'epidermodysplasia_verruciformis',
'tree_map': 'treemapping',
'tree_mapping': 'treemapping',
'tree_marigold': 'tithonia_diversifolia',
'tree_medick': 'medicago_arborea',
'tree_meta': 'tree-meta',
'tree_mining': 'structure_mining',
'tree_moss': 'usnea',
'tree_nettle': 'urtica_ferox',
'tree_ocelot': 'margay',
'tree_octopus': 'pacific_northwest_tree_octopus',
'tree_onion': 'allium_canadense',
'tree_orchid': 'cymbidium_dayanum',
'tree_peony': 'paeonia_rockii',
'tree_pinning': 'tree_spiking',
'tree_planting': 'treeplanting',
'tree_poppy': 'matilija_poppy',
'tree_pose': 'vrksasana',
'tree_property': 'aronszajn_tree',
'tree_rat': 'squirrel',
'tree_register': 'the_tree_register',
'tree_ring': 'dendrochronology',
'tree_rings': 'dendrochronology',
'tree_root': 'root',
'tree_runes': 'cipher_runes',
'tree_sap': 'plant_sap',
'tree_savanna': 'savanna',
'tree_search': 'tree_traversal',
'tree_senna': 'senna_corymbosa',
'tree_shrew': 'treeshrew',
'tree_shrews': 'treeshrew',
'tree_sit': 'tree_sitting',
'tree_sits': 'tree_sitting',
'tree_sitter': 'tree_sitting',
'tree_skink': 'lamprolepis',
'tree_sloth': 'sloth',
'tree_snake': 'brown_tree_snake',
'tree_sorrel': 'averrhoa_bilimbi',
'tree_spade': 'digging_trees_and_shrubs_for_transplanting',
'tree_spades': 'digging_trees_and_shrubs_for_transplanting',
'tree_spikes': 'tree_spiking',
'tree_spirit': 'dryad',
'tree_squid': 'pacific_northwest_tree_octopus',
'tree_squirrels': 'tree_squirrel',
'tree_stands': 'tree_stand',
'tree_stars': 'the_land_before_time',
'tree_stub': 'stub',
'tree_surgeon': 'arborist',
'tree_surgery': 'arboriculture',
'tree_swallows': 'tree_swallow',
'tree_sweat': 'evapotranspiration',
'tree_swift': 'treeswift',
'tree_test': 'baum_test',
'tree_text': 'outliner',
'tree_toad': 'tree_frog',
'tree_tobacco': 'nicotiana_glauca',
'tree_tomato': 'tamarillo',
'tree_traversals': 'tree_traversal',
'tree_triggerplant': 'stylidium_laricifolium',
'tree_tumbo': 'welwitschia',
'tree_walk': 'tree_traversal',
'tree_width': 'tree_decomposition',
'tree_woman': 'soldier_son_trilogy',
'tree_yellow': 'gandaca_harina',
'tree_yucca': 'yucca_brevifolia',
'tree_zoo': 'deerfield_beach_arboretum',
'treehouse_kids': 'spring_hill_music_group',
'treehouse_records': 'oar_folkjokeopus',
'treeing_feist': 'mountain_feist',
'treeing_walker': 'treeing_walker_coonhound',
'treenut_allergy': 'tree_nut_allergy',
'treetop_flyer': 'stills_alone',
'treetop_straus': 'jack_straus',
'treetop_strauss': 'jack_straus',
'treetop_walk': 'walpole-nornalup_national_park',
'treffor_davies': 'trefor_davies',
'trefoil_curve': 'trefoil_knot',
'trefonen_fc': 'trefonen_f.c.',
'trefor_granite': 'trefor',
'trefor_quarry': 'trefor',
'trefriw_mills': 'trefriw',
'tregellasia_capito': 'pale-yellow_robin',
'tregellasia_leucops': 'white-faced_robin',
'tregeseal_e': 'tregeseal_east_stone_circle',
'tregeseal_east': 'tregeseal_east_stone_circle',
'tregoweth_brown': 'treg_brown',
'tregrosse_islands': 'coral_sea_islands',
'tregua_dei': 'peace_and_truce_of_god',
'treharris_f.c.': 'treharris_athletic_western_f.c.',
'treharris_fc': 'treharris_athletic_western_f.c.',
'trei_asar': 'minor_prophet',
'treibstoff_records': 'treibstoff_recordings',
'treiglad_trwynol': 'colloquial_welsh_morphology',
'treisman_model': 'feature_integration_theory',
'treiten_be': 'treiten',
'trek_2.0': 'star_trek:_the_original_series',
'trek_bbs': 'treknation',
'trek_bicycles': 'trek_bicycle_corporation',
'trek_bikes': 'trek_bicycle_corporation',
'trek_boer': 'trekboer',
'trek_boers': 'trekboer',
'trek_life': 'the_trek_life',
'trek_online': 'star_trek_online',
'trek_wars': 'comparison_of_star_trek_and_star_wars',
'trekkie_convention': 'science_fiction_convention',
'trekking_poles': 'trekking_pole',
'trelawney_baronets': 'salusbury-trelawny_baronets',
'trelawney_hope': 'the_adventure_of_the_second_stain',
'trelawney_pitbulls': 'trelawny_pitbulls',
'trelawney_tigers': 'trelawny_tigers',
'trelawny_baronets': 'salusbury-trelawny_baronets',
'treliske_hospital': 'royal_cornwall_hospital',
'trelissick_gardens': 'trelissick_garden',
'trelleborg_congregation': 'trelleborg_parish',
'trelleborg_ff': 'trelleborgs_ff',
'trelleborgs_kommun': 'trelleborg_municipality',
'trellech_grange': 'trelleck_grange',
'trellis-coded_modulation': 'trellis_modulation',
'trellis_coding': 'trellis_modulation',
'trellis_diagram': 'convolutional_code',
'trellis_drainage': 'drainage_system',
'trellis_lattice': 'trellis',
'trellis_network': 'trellis_modulation',
'trellix_corporation': 'trellix',
'trema_aspera': 'trema_tomentosa_var._viridis',
'trema_tormentosa': 'trema_tomentosa_var._viridis',
'tremacebus_harringtoni': 'tremacebus',
'tremadoc_bay': 'tremadog_bay',
'tremaine_kirkland': 'tremayne_kirkland',
'tremarctos_ornatus': 'spectacled_bear',
'tremateia_halophila': 'tremateia',
'trematochampsa_oblita': 'trematochampsa',
'trematocranus_microstoma': 'haplochromis_placodon_pointed_head',
'trematode_infections': 'trematoda',
'trematosphaeriopsis_parmeliana': 'trematosphaeriopsis',
'trembling-hand_perfection': 'trembling_hand_perfect_equilibrium',
'trembling_heart': 'beating_heart',
'trembling_lamb': 'michael_john_fles',
'trembling_lamp': 'michael_john_fles',
'trembling_madness': 'delirium_tremens',
'trembling_sea-mat': 'victorella_pavida',
'tremella_atra': 'exidia_glandulosa',
'tremella_digitata': 'gymnosporangium_fuscum',
'tremella_fuciformis': 'snow_fungus',
'tremella_fusca': 'gymnosporangium_fuscum',
'tremella_glandulosa': 'exidia_glandulosa',
'tremella_purpurea': 'nectria_cinnabarina',
'tremella_sabinae': 'gymnosporangium_fuscum',
'tremella_spiculosa': 'exidia_glandulosa',
'tremellogaster_surinamensis': 'tremellogaster',
'tremelo_arm': 'tremolo_arm',
'tremendous_dynamite': 'hombre_lobo',
'tremoctopus_scalenus': 'atlantic_white-spotted_octopus',
'tremoctopus_violaceus': 'blanket_octopus',
'tremolo_bar': 'tremolo_arm',
'tremona_ti': 'tremona',
'tremor_essentialis': 'essential_tremor',
'tremorgio_lake': 'lago_tremorgio',
'tremors_2': 'tremors_2:_aftershocks',
'tremors_3': 'tremors_3:_back_to_perfection',
'tremors_4': 'tremors_4:_the_legend_begins',
'tremorton_city': 'my_life_as_a_teenage_robot',
'tremough_campus': 'tremough',
'tremper_trojans': 'george_nelson_tremper_high_school',
'tren_valencia': 'adolfo_valencia',
'trenbolone_acetate': 'trenbolone',
'trencavel_cartulary': 'liber_instrumentorum_vicecomitalium',
'trench_broom': 'thompson_submachine_gun',
'trench_coats': 'trench_coat',
'trench_composting': 'home_composting',
'trench_fighter': 'ground-attack_aircraft',
'trench_foot': 'immersion_foot_syndromes',
'trench_gun': 'combat_shotgun',
'trench_knives': 'trench_knife',
'trench_mouth': 'necrotizing_ulcerative_gingivitis',
'trench_rollback': 'oceanic_trench',
'trench_shields': 'trench_shield',
'trench_shotgun': 'combat_shotgun',
'trench_spike': 'trench_knife',
'trench_town': 'trenchtown',
'trench_war': 'trench_warfare',
'trench_wars': 'trench_warfare',
'trenchard_baronets': 'viscount_trenchard',
'trenchcoat_brigade': 'the_trenchcoat_brigade',
'trenchcoat_mafia': 'eric_harris_and_dylan_klebold',
'trencher_cap': 'square_academic_cap',
'trencherfield_mill': 'wigan_pier',
'trenches_war': 'trench_warfare',
'trend_forecasting': 'trend_analysis',
'trend_lines': 'trend_line',
'trend_stationary': 'stationary_process',
'trend_trading': 'trend_following',
'trendle_hill': 'cerne_abbas_giant',
'trendmicro_housecall': 'housecall',
'trenell_young': 'savant_young',
'trenitalia_cargo': 'trenitalia',
'trenkovi_panduri': 'baron_franz_von_der_trenck',
'trenkwalder_admira': 'fc_trenkwalder_admira',
'trenkwalter_modena': 'pallavolo_modena',
'trennt_lake': 'trenntsee',
'trent_1000': 'rolls-royce_trent',
'trent_1700': 'rolls-royce_trent',
'trent_500': 'rolls-royce_trent',
'trent_600': 'rolls-royce_trent',
'trent_700': 'rolls-royce_trent',
'trent_8104': 'rolls-royce_trent',
'trent_900': 'rolls-royce_trent',
'trent_building': 'campuses_of_the_university_of_nottingham',
'trent_canal': 'trent-severn_waterway',
'trent_codex': 'trent_codices',
'trent_easton': 'perfect_dark',
'trent_excalibur': 'trent_university',
'trent_frank': 'trent_franks',
'trent_house': 'william_trent_house',
'trent_hubbard': 'trenidad_hubbard',
'trent_incident': 'trent_affair',
'trent_isd': 'trent_independent_school_district',
'trent_junction': 'trent_railway_station',
'trent_locks': 'trent_lock',
'trent_polytechnic': 'nottingham_trent_university',
'trent_radio': 'cfff-fm',
'trent_rezner': 'trent_reznor',
'trent_tomlison': 'trent_tomlinson',
'trent_tv': 'british_student_television',
'trenter_lake': 'trenter_see',
'trentham_awakes': 'trentham_gardens',
'trentham_camp': 'trentham_military_camp',
'trentham_hall': 'trentham_gardens',
'trentham_park': 'trentham_gardens',
'trentino_domani': 'trentino_tomorrow',
'trento_province': 'province_of_trento',
'trenton-morrisville_bridge': 'trenton-morrisville_toll_bridge',
'trenton_bobcats': 'belleville_bobcats',
'trenton_class': 'austin_class_amphibious_transport_dock',
'trenton_freeway': 'u.s._route_1_in_new_jersey',
'trenton_hercs': 'trenton_sting',
'trenton_isd': 'trenton_independent_school_district',
'trenton_station': 'trenton_transit_center',
'trenton_titans': 'trenton_devils',
'trenton_works': 'trentonworks',
'trentonworks_ltd.': 'trentonworks',
'trentside_derby': 'nottingham_derby',
'trentway_wagner': 'trentway_wagar',
'treo_755': 'treo_755p',
'treo_800wx': 'palm_treo_800w',
'treo_ace': 'treo_650',
'treo_centro': 'palm_centro',
'treo_pro': 'palm_treo_pro',
'treorchy_comprehensive': 'treorchy_comprehensive_school',
'treponema_infection': 'treponema_pallidum',
'treponema_pertenue': 'yaws',
'treponemal_infections': 'treponema_pallidum',
'treppe_phenomenon': 'bowditch_effect',
'treprostinil_sodium': 'treprostinil',
'treptichnus_pedum': 'trichophycus_pedum',
'treptow_park': 'treptower_park',
'treron_affinis': 'pompadour_green_pigeon',
'treron_apicauda': 'pin-tailed_green-pigeon',
'treron_aromaticus': 'pompadour_green_pigeon',
'treron_australis': 'madagascar_green-pigeon',
'treron_axillaris': 'pompadour_green_pigeon',
'treron_bicincta': 'orange-breasted_green_pigeon',
'treron_calva': 'african_green-pigeon',
'treron_calvus': 'african_green-pigeon',
'treron_capellei': 'large_green-pigeon',
'treron_chloropterus': 'pompadour_green_pigeon',
'treron_comorensis': 'madagascar_green-pigeon',
'treron_curvirostra': 'thick-billed_green-pigeon',
'treron_floris': 'flores_green-pigeon',
'treron_formosae': 'whistling_green-pigeon',
'treron_fulvicollis': 'cinnamon-headed_green-pigeon',
'treron_griseicauda': 'grey-cheeked_green-pigeon',
'treron_griveaudi': 'madagascar_green-pigeon',
'treron_olax': 'little_green-pigeon',
'treron_oxyura': 'sumatran_green-pigeon',
'treron_oxyurus': 'sumatran_green-pigeon',
'treron_pembaensis': 'pemba_green-pigeon',
'treron_phayrei': 'pompadour_green_pigeon',
'treron_phoenicoptera': 'yellow-footed_green_pigeon',
'treron_phoenicopterus': 'yellow-footed_green_pigeon',
'treron_pompadora': 'pompadour_green_pigeon',
'treron_psittacea': 'timor_green-pigeon',
'treron_psittaceus': 'timor_green-pigeon',
'treron_seimundi': 'yellow-vented_green-pigeon',
'treron_sieboldii': 'white-bellied_green-pigeon',
'treron_sphenura': 'wedge-tailed_green-pigeon',
'treron_sphenurus': 'wedge-tailed_green-pigeon',
'treron_teysmannii': 'sumba_green-pigeon',
'treron_vernans': 'pink-necked_green-pigeon',
'treryn_dinas': 'logan_rock',
'tres_1': 'tres-1',
'tres_cool': 'l.a._girls/tres_cool',
'tres_cubano': 'tres',
'tres_golpes': 'conga',
'tres_leches': 'tres_leches_cake',
'tres_marias': 'las_tres_marias',
'tres_picos': 'el_sombrero_de_tres_picos',
'tres_tabernae': 'three_taverns',
'tresham_college': 'tresham_college_of_further_and_higher_education',
'tresham_institute': 'tresham_college_of_further_and_higher_education',
'treshnish_islands': 'treshnish_isles',
'tresina_lobi': 'balance_point',
'tresor_records': 'tresor',
'trespass_offering': 'sin-offering',
'trespolo_tutore': 'il_trespolo_tutore',
'tress_macnielle': 'tress_macneille',
'tress_mcneil': 'tress_macneille',
'trestar_ostankino': 'fc_trestar_ostankino',
'trestle_bridge': 'trestle',
'trestle_tables': 'trestle_table',
'trestolone_ac': 'trestolone',
'trestolone_acetate': 'trestolone',
'tresviri_monetalis': 'moneyer',
'treta_yug': 'treta_yuga',
'tretha_yuga': 'treta_yuga',
'tretiakov_gallery': 'tretyakov_gallery',
'tretyakov_drive': 'tretyakovsky_proyezd',
'tretyakovskaya_gallery': 'tretyakov_gallery',
'tretyakovsky_drive': 'tretyakovsky_proyezd',
'treubiites_kidstonii': 'treubiites',
'treuga_dei': 'peace_and_truce_of_god',
'treur_barb': 'treur_river_barb',
'trev_warner': 'kym_warner',
'treva_throneberry': 'brianna_stewart',
'trevally_scad': 'herring_scad',
'treve_castle': 'threave_castle',
'trevecca_college': 'trefeca',
'trevecca_nazarene': 'trevecca_nazarene_university',
'treverton_college': 'treverton_preparatory_school_and_college',
'treverton_prep': 'treverton_preparatory_school_and_college',
'treverton_preparatory': 'treverton_preparatory_school_and_college',
'treverton_school': 'treverton_preparatory_school_and_college',
'treverton_schools': 'treverton_preparatory_school_and_college',
'trevilian_station': 'battle_of_trevilian_station',
'trevillet_river': 'rocky_valley',
'treviso_f.b.c.': 'a.s.d._treviso_2009',
'treviso_fbc': 'a.s.d._treviso_2009',
'treviso_fc': 'a.s.d._treviso_2009',
'treviso_open': 'torneo_internazionale_citta_di_treviso',
'treviso_province': 'province_of_treviso',
'trevo_worm': 'butterworm',
'trevor_allen': 'trevor_allan',
'trevor_baker': 'trevor_the_weather',
'trevor_brooks': 'abu_izzadeen',
'trevor_bryce': 'trevor_r._bryce',
'trevor_doerksen': 'mobovivo',
'trevor_dudley-smith': 'elleston_trevor',
'trevor_dupuy': 'trevor_n._dupuy',
'trevor_evans': 'trefor_evans',
'trevor_gretzky': 'wayne_gretzky',
'trevor_helpline': 'the_trevor_project',
'trevor_hurst': 'econoline_crush',
'trevor_macdonald': 'trevor_mcdonald',
'trevor_malloy': 'trevor_molloy',
'trevor_mcfur': 'trevor_mcfur_in_the_crescent_galaxy',
'trevor_murdoch': 'william_mueller',
'trevor_neal': 'trevor_and_simon',
'trevor_neubauer': 'trevor_donovan',
'trevor_pettingill': 'pettingill_family',
'trevor_philips': 'trevor_phillips',
'trevor_project': 'the_trevor_project',
'trevor_pyman': 'trevor_ashmore_pyman',
'trevor_rees': 'trevor_rees-jones',
'trevor_reznick': 'the_machinist',
'trevor_reznik': 'the_machinist',
'trevor_rhys': 'trevor_rees-jones',
'trevor_rhys-jones': 'trevor_rees-jones',
'trevor_rocks': 'eglwyseg',
'trevor_shaw': 'jimmy_london',
'trevor_stanford': 'russ_conway',
'trevor_strnad': 'the_black_dahlia_murder',
'trevor_sutherland': 'ijahman_levi',
'trevor_thoms': 'trev_thoms',
'trevor_worthy': 'trevor_h._worthy',
'trevors_disease': 'trevor_disease',
'trey_fields': 'trey_kenyon',
'trey_perkins': 'virginia_tech_massacre',
'trey_phillips': 'laguna_beach:_the_real_orange_county',
'trey_racer': 'horohoro',
'trey_songs': 'trey_songz',
'trey_wright': 'roger_wright',
'treyarch_corporation': 'treyarch',
'treyarch_ngl': 'treyarch',
'treycovagnes_vd': 'treycovagnes',
'treytorrens_vd': 'treytorrens',
'treyvaux_fr': 'treyvaux',
'trg_22': 'sako_trg',
'trg_42': 'sako_trg',
'trg_republike': 'republic_square',
'tri-axle_bus': 'multi-axle_bus',
'tri-border_area': 'tripoint',
'tri-border_region': 'triple_frontier',
'tri-butyl_tin': 'tributyltin',
'tri-calcium_phosphate': 'tricalcium_phosphate',
'tri-cameral_system': 'tricameralism',
'tri-centennial_park': 'detroit_international_riverfront',
'tri-cities_americans': 'tri-city_americans',
'tri-cities_blackhawks': 'atlanta_hawks',
'tri-cities_stadium': 'gesa_stadium',
'tri-cities_xtreme': 'coquitlam_metro-ford_sc_xtreme',
'tri-city_arena': 'viaero_event_center',
'tri-city_atoms': 'tri-city_dust_devils',
'tri-city_fever': 'tri-cities_fever',
'tri-city_hearld': 'tri-city_herald',
'tri-city_ports': 'tri-city_dust_devils',
'tri-city_stadium': 'gesa_stadium',
'tri-colored_blackbird': 'tricolored_blackbird',
'tri-colored_heron': 'tricolored_heron',
'tri-consonantal_root': 'semitic_root',
'tri-corner_knob': 'tricorner_knob',
'tri-cornered_hat': 'tricorne',
'tri-county_freeway': 'wisconsin_highway_441',
'tri-d_chess': 'three-dimensional_chess',
'tri-delta_transit': 'tri_delta_transit',
'tri-diagonal_matrix': 'tridiagonal_matrix',
'tri-dimensional_chess': 'three-dimensional_chess',
'tri-gate_transistor': 'field-effect_transistor',
'tri-lateral_commission': 'trilateral_commission',
'tri-n-butyl_phosphate': 'tributyl_phosphate',
'tri-n-butyltin_hydride': 'tributyltin_hydride',
'tri-n-octylphosphine_oxide': 'trioctylphosphine_oxide',
'tri-o-cresyl_phosphate': 'tricresyl_phosphate',
'tri-o-tolyl_phosphate': 'tricresyl_phosphate',
'tri-ortho-cresyl_phosphate': 'tricresyl_phosphate',
'tri-ortho-tolyl_phosphate': 'tricresyl_phosphate',
'tri-port_headphone': 'bose_headphones',
'tri-racial_isolate': 'melungeon',
'tri-racial_isolates': 'melungeon',
'tri-rail_station': 'tri-rail_and_metrorail_transfer_station',
'tri-sodium_citrate': 'trisodium_citrate',
'tri-star_pictures': 'tristar_pictures',
'tri-stat_system': 'tri-stat_dx',
'tri-state_crematorium': 'tri-state_crematory',
'tri-state_driver': 'three-state_logic',
'tri-state_logic': 'three-state_logic',
'tri-state_uiversity': 'trine_university',
'tri-state_university': 'trine_university',
'tri-tert-butoxyaluminum_hydride': 'lithium_aluminium_hydride',
'tri-tip_steak': 'tri-tip',
'tri-valley_area': 'tri-valley',
'tri-valley_herald': 'ang_newspapers',
'tri-veneto_region': 'northeast_italy',
'tri-wing_screw': 'tri-wing',
'tri-wing_screwdriver': 'tri-wing',
'tri_band': 'tri-band',
'tri_core': 'multi-core',
'tri_delt': 'delta_delta_delta',
'tri_delta': 'delta_delta_delta',
'tri_kusharyanto': 'tri_kusharjanto',
'tri_quang': 'thich_tri_quang',
'tri_ralpachen': 'ralpacan',
'tri_sigma': 'sigma_sigma_sigma',
'tri_songdetsen': 'trisong_detsen',
'tri_square': 'try_square',
'tri_srca': 'radenska',
'tri_state': 'tri-state',
'tri_tip': 'tri-tip',
'tria_chania': 'tria_hania',
'triacetone_peroxide': 'acetone_peroxide',
'triacetone_triperoxide': 'acetone_peroxide',
'triacetyl_cellulose': 'cellulose_triacetate',
'triactinomyxon_dubium': 'myxobolus_cerebralis',
'triactinomyxon_gyrosalmo': 'myxobolus_cerebralis',
'triacylglycerol--sterol_o-acyltransferase': 'triacylglycerol-sterol_o-acyltransferase',
'triad_election': 'election_2',
'triad_stone': 'strahl',
'triad_syndrome': 'prune_belly_syndrome',
'triadan_chart': 'modified_triadan_system',
'triadan_system': 'modified_triadan_system',
'triadelphia_lake': 'triadelphia_reservoir',
'triadic_irreducibility': 'ternary_relation',
'triadic_patents': 'triadic_patent',
'triadic_relation': 'ternary_relation',
'triadica_sebifera': 'chinese_tallow',
'triaena_psi': 'grey_dagger',
'triaeniopholis_arenarius': 'lampropeltis_getula',
'triaenodon_obesus': 'whitetip_reef_shark',
'triaenodon_obtusus': 'pigeye_shark',
'triaenops_persicus': 'persian_trident_bat',
'triaenops_rufus': 'rufous_trident_bat',
'triainomyces_hollowayanus': 'triainomyces',
'triakis_acutipinna': 'sharpfin_houndshark',
'triakis_maculata': 'spotted_houndshark',
'triakis_megalopterus': 'sharptooth_houndshark',
'triakis_scyllium': 'banded_houndshark',
'triakis_semifasciata': 'leopard_shark',
'trial-and-error_conditioning': 'trial_and_error',
'trial-and-error_learning': 'trial_and_error',
'trial-and-error_method': 'trial_and_error',
'trial_advocacy': 'mock_trial',
'trial_attorney': 'lawyer',
'trial_blazers': 'portland_trail_blazers',
'trial_courts': 'trial_court',
'trial_divisor': 'trial_division',
'trial_jury': 'jury_trial',
'trial_lawyer': 'lawyer',
'trial_lawyers': 'lawyer',
'trial_marriage': 'betrothal',
'trial_rocks': 'tryal_rocks',
'trial_stakes': 'queen_anne_stakes',
'trial_version': 'evaluation',
'trialeti_ossetia': 'ossetians_in_trialeti',
'trialeurodes_vaporariorum': 'greenhouse_whitefly',
'trialkyl_phosphine': 'organophosphorus',
'trials_2': 'trials_2:_second_edition',
'trials_bike': 'mountain_bike_trials',
'trials_bikes': 'mountain_bike_trials',
'trials_riding': 'mountain_bike_trials',
'trials_unicycle': 'unicycle_trials',
'trials_unicycling': 'unicycle_trials',
'triamcinolone_acetonide': 'triamcinolone',
'triamcinolone_benetonide': 'triamcinolone',
'triamcinolone_diacetate': 'triamcinolone',
'triamcinolone_furetonide': 'triamcinolone',
'triamcinolone_hexacetonide': 'triamcinolone',
'triammonium_phosphate': 'ammonium_phosphate',
'triampat_ratchada': 'triamudomsuksapattanakarn_ratchada_school',
'triamudomsuksapattanakarn_ratchada': 'triamudomsuksapattanakarn_ratchada_school',
'triana_satellite': 'deep_space_climate_observatory',
'trianectes_bucephalus': 'bullhead_triplefin',
'triangle_arbitrage': 'triangular_arbitrage',
'triangle_books': 'society_for_promoting_christian_knowledge',
'triangle_brewery': 'triangle_brewing_company',
'triangle_category': 'triangulated_category',
'triangle_centroid': 'centroid',
'triangle_diagram': 'ternary_plot',
'triangle_distribution': 'triangular_distribution',
'triangle_expressway': 'triex',
'triangle_fire': 'triangle_shirtwaist_factory_fire',
'triangle_function': 'triangular_function',
'triangle_geometry': 'triangle',
'triangle_girls': 'international_order_of_the_rainbow_for_girls',
'triangle_groups': 'triangle_group',
'triangle_head': 'pyramid_head',
'triangle_hearts': 'triangle_heart',
'triangle_hill': 'battle_of_triangle_hill',
'triangle_hole': 'missing_square_puzzle',
'triangle_inequalities': 'triangle_inequality',
'triangle_jump': 'wall_jump',
'triangle_keelback': 'xenochrophis_trianguligerus',
'triangle_law': 'euclidean_vector',
'triangle_number': 'triangular_number',
'triangle_numbers': 'triangular_number',
'triangle_palm': 'dypsis_decaryi',
'triangle_parkway': 'triex',
'triangle_plot': 'ternary_plot',
'triangle_postulate': 'triangle_inequality',
'triangle_push-up': 'press-up',
'triangle_pushup': 'press-up',
'triangle_roast': 'flat_iron_steak',
'triangle_routing': 'triangular_routing',
'triangle_shirtwaist': 'triangle_shirtwaist_factory_fire',
'triangle_software': 'beat_the_traffic',
'triangle_stratos': 'triangle_tv',
'triangle_television': 'triangle_tv',
'triangle_tiling': 'triangular_tiling',
'triangle_trade': 'triangular_trade',
'triangle_waves': 'triangle_wave',
'triangular-wave_function': 'triangle_wave',
'triangular_antiprism': 'octahedron',
'triangular_bandage': 'bandage',
'triangular_bipyramid': 'triangular_dipyramid',
'triangular_bone': 'triquetral_bone',
'triangular_bypyramidal': 'trigonal_bipyramidal_molecular_geometry',
'triangular_element': 'geodesic_dome',
'triangular_form': 'triangular_matrix',
'triangular_graph': 'planar_graph',
'triangular_grid': 'triangular_tiling',
'triangular_group': 'triangle_group',
'triangular_gyrobicupola': 'cuboctahedron',
'triangular_inequalities': 'triangle_inequality',
'triangular_inequality': 'triangle_inequality',
'triangular_lattice': 'hexagonal_lattice',
'triangular_lodge': 'rushton_triangular_lodge',
'triangular_matrices': 'triangular_matrix',
'triangular_norm': 't-norm',
'triangular_numbers': 'triangular_number',
'triangular_part': 'pars_triangularis',
'triangular_plot': 'ternary_plot',
'triangular_pyramid': 'tetrahedron',
'triangular_squares': 'square_triangular_number',
'triangular_theory': 'triangular_theory_of_love',
'triangular_washer': 'anchor_plate',
'triangular_window': 'window_function',
'triangularis_muscle': 'depressor_anguli_oris_muscle',
'triangulate_butterflyfish': 'chevron_butterflyfish',
'triangulated_categories': 'triangulated_category',
'triangulated_graph': 'chordal_graph',
'triangulation_pillar': 'triangulation_station',
'triangulation_point': 'triangulation_station',
'triangulum_constellation': 'triangulum',
'triangulum_minor': 'triangulum_minus',
'triangulum_spiral': 'triangulum_galaxy',
'trianon_treaty': 'treaty_of_trianon',
'trianosperma_ficcifolia': 'tayuya',
'trianosperma_piauhiensis': 'tayuya',
'trianosperma_tayuya': 'tayuya',
'triarchic_theory': 'triarchic_theory_of_intelligence',
'triaristellina_anaristella': 'barbosella_dolichorhiza',
'triarylmethane_dye': 'triphenylmethane',
'trias_politica': 'separation_of_powers',
'trias_thaumaturga': 'acta_triadis_thaumaturgae',
'triassic_dinosaurs': 'category:triassic_dinosaurs',
'triassic_era': 'triassic',
'triassic_period': 'triassic',
'triassic_system': 'triassic',
'triathlon_cymru': 'welsh_triathlon',
'triathlon_distance': 'triathlon',
'triathlon_training': 'triathlon',
'triaxial_cables': 'triaxial_cable',
'triaxial_ellipsoid': 'ellipsoid',
'tribal_anarchism': 'national-anarchism',
'tribal_authority': 'tribe',
'tribal_band': 'tribe',
'tribal_casino': 'native_american_gambling_enterprises',
'tribal_casinos': 'native_american_gambling_enterprises',
'tribal_class': 'tribal-class',
'tribal_college': 'tribal_colleges_and_universities',
'tribal_communities': 'tribe',
'tribal_danio': 'fire_bar_danio',
'tribal_feminism': 'ecofeminism',
'tribal_group': 'ethnic_group',
'tribal_groups': 'ethnic_group',
'tribal_health': 'tribal_health_initiative',
'tribal_hideage': 'tribal_hidage',
'tribal_law': 'tribal_sovereignty_in_the_united_states',
'tribal_metal': 'heavy_metal_music',
'tribal_people': 'tribe',
'tribal_religion': 'folk_religion',
'tribal_religions': 'folk_religion',
'tribal_reservation': 'indian_reservation',
'tribal_rhythm': 'tribal_rhythms',
'tribal_scar': 'scarification',
'tribal_scarification': 'scarification',
'tribal_societies': 'tribe',
'tribal_society': 'tribe',
'tribal_sovereignty': 'tribal_sovereignty_in_the_united_states',
'tribal_theology': 'folk_religion',
'tribal_trouble2': 'tribal_trouble_2',
'tribal_university': 'tribal_colleges_and_universities',
'tribal_warfare': 'endemic_warfare',
'tribal_warrior': 'warrior',
'tribe_networks': 'tribe.net',
'tribe_time': 'mcminn_county_high_school',
'tribe_vibes': 'a_tribe_called_quest',
'tribeca_film': 'tribeca_film_festival',
'triberyllium_dinitride': 'beryllium_nitride',
'tribes_3': 'tribes:_vengeance',
'tribes_ii': 'tribes_2',
'tribes_iii': 'tribes:_vengeance',
'tribes_israel': 'israelites',
'tribes_man': 'tribe',
'tribes_men': 'tribe',
'tribes_vengeance': 'tribes:_vengeance',
'tribes_woman': 'tribe',
'tribes_women': 'tribe',
'tribeworks_ishell': 'ishell',
'tribhovandas_patel': 'tribhuvandas_kishibhai_patel',
'tribhuvan_airport': 'tribhuvan_international_airport',
'tribhuvandas_patel': 'tribhuvandas_kishibhai_patel',
'triblidium_reticulatum': 'tryblidium_reticulatum',
'triblidium_rufulum': 'rhytidhysteron_rufulum',
'triboelectric_series': 'triboelectric_effect',
'tribolium_castaneum': 'red_flour_beetle',
'tribolium_confusum': 'confused_flour_beetle',
'tribolium_destructor': 'destructive_flour_beetle',
'triboltingen_tg': 'triboltingen',
'tribonaci_numbers': 'fibonacci_number',
'triboniophorus_graeffei': 'red_triangle_slug',
'tribonyx_mortierii': 'tasmanian_native-hen',
'tribonyx_ventralis': 'black-tailed_native-hen',
'triboro_bridge': 'robert_f._kennedy_bridge',
'triborough_bridge': 'robert_f._kennedy_bridge',
'triborough_stadium': 'downing_stadium',
'tribrachia_pendula': 'bulbophyllum_pendulum',
'tribrachidium_heraldicum': 'tribrachidium',
'tribrid_car': 'tribrid_vehicle',
'tribrid_cars': 'hybrid_vehicle',
'tribu_records': 'la_tribu',
'tribulatia_appendicospora': 'tribulatia',
'tribunal_bill': 'military_commissions_act_of_2006',
'tribunal_constitucional': 'portuguese_constitutional_court',
'tribunal_federal': 'federal_supreme_court_of_switzerland',
'tribunal_services': 'tribunals_service',
'tribunale_federale': 'federal_supreme_court_of_switzerland',
'tribune_co.': 'tribune_company',
'tribune_corporation': 'tribune_company',
'tribune_india': 'the_tribune',
'tribune_newspapers': 'tribune_company',
'tribune_star': 'tribune-star',
'tribune_studios': 'ktla',
'tribunician_power': 'tribune',
'tribunus_militum': 'tribune',
'tribunus_plebis': 'tribune',
'tributary_system': 'tribute',
'tributary_valley': 'side_valley',
'tribute_acts': 'tribute_act',
'tribute_albums': 'tribute_album',
'tribute_band': 'tribute_act',
'tribute_bands': 'tribute_act',
'tribute_group': 'tribute_act',
'tribute_payment': 'tribute',
'tribute_state': 'tributary_state',
'tributyl_tin': 'tributyltin',
'tric_award': 'television_and_radio_industries_club',
'tric_awards': 'television_and_radio_industries_club',
'tric_trac': 'shut_the_box',
'tricaine_mesilate': 'tricaine_mesylate',
'tricaine_methanesulfonate': 'tricaine_mesylate',
'tricaine_methanesulphonate': 'tricaine_mesylate',
'tricalcium_citrate': 'calcium_citrate',
'tricalcium_dinitride': 'calcium_nitride',
'tricameral_constitution': 'tricameral_parliament',
'tricaprylmethylammonium_chloride': 'aliquat_336',
'tricaprylylmethylammonium_chloride': 'aliquat_336',
'tricarbon_dioxide': 'carbon_suboxide',
'tricarbon_disulfide': 'carbon_subsulfide',
'tricarbon_octahydride': 'propane',
'tricarboxylic_acids': 'tricarboxylic_acid',
'tricard_poker': 'three_card_poker',
'tricare_south': 'tricare',
'tricep_reflex': 'triceps_reflex',
'tricephalous_christ': 'depiction_of_jesus',
'triceps_brachii': 'triceps_brachii_muscle',
'triceps_extension': 'lying_triceps_extensions',
'triceps_muscle': 'triceps_brachii_muscle',
'triceratolepidophis_sieversorum': 'triceratolepidophis',
'triceratops_hatcheri': 'nedoceratops',
'triceratops_horridus': 'triceratops',
'triceratops_prorsus': 'triceratops',
'tricesium_tribromide': 'caesium_bromide',
'trichamelia_eugeniae': 'trichamelia',
'trichangium_vinosum': 'trichangium',
'trichaptum_zonale': 'rigidoporus_lineatus',
'trichastoma_bicolor': 'ferruginous_babbler',
'trichastoma_buettikoferi': 'sumatran_babbler',
'trichastoma_celebense': 'sulawesi_babbler',
'trichastoma_perspicillatum': 'black-browed_babbler',
'trichastoma_rostratum': 'white-chested_babbler',
'trichastoma_tickelli': 'buff-breasted_babbler',
'trichastoma_woodi': 'bagobo_babbler',
'trichechus_bernhardi': 'dwarf_manatee',
'trichechus_inunguis': 'amazonian_manatee',
'trichechus_manatus': 'west_indian_manatee',
'trichechus_rosmarus': 'walrus',
'trichechus_senegalensis': 'african_manatee',
'trichecus_bernhardi': 'dwarf_manatee',
'trichia_biconica': 'trochulus_biconicus',
'trichia_caelata': 'trochulus_caelatus',
'trichia_gramnicola': 'trochulus_graminicola',
'trichia_hispida': 'trochulus_hispidus',
'trichia_lubomirskii': 'trochulus_lubomirskii',
'trichia_oreinos': 'trochulus_oreinos',
'trichia_sericea': 'trochulus_sericeus',
'trichia_striolata': 'trochulus_striolatus',
'trichia_villosula': 'trochulus_villosulus',
'trichina_worm': 'trichinella_spiralis',
'trichine_disease': 'trichinosis',
'trichiotinus_piger': 'hairy_flower_chafer',
'trichipteris_feeana': 'cyathea_corcovadensis',
'trichipteris_glaziovii': 'cyathea_glaziovii',
'trichiuris_suis': 'trichuris_suis',
'trichiurus_lepturus': 'largehead_hairtail',
'trichius_fasciatus': 'bee_beetle',
'trichixos_pyrropyga': 'rufous-tailed_shama',
'trichixos_pyrropygus': 'rufous-tailed_shama',
'trichlormethyl_chloroformate': 'diphosgene',
'trichloro-tert-butyl_alcohol': 'chlorobutanol',
'trichloroacetaldehyde_monohydrate': 'chloral_hydrate',
'trichloroethanoic_acid': 'trichloroacetic_acid',
'trichloromethyl_chloroformate': 'diphosgene',
'trichobasis_betae': 'uromyces_beticola',
'trichobasis_cichoracearum': 'coleosporium_tussilaginis',
'trichobasis_clinopodii': 'puccinia_menthae',
'trichobasis_fabae': 'uromyces_viciae-fabae_var._viciae-fabae',
'trichobasis_fallens': 'uromyces_trifolii-repentis_var._fallens',
'trichobasis_glumarum': 'puccinia_striiformis_var._striiformis',
'trichobasis_labiatarum': 'puccinia_menthae',
'trichobasis_linearis': 'stem_rust',
'trichobasis_rubigo-vera': 'puccinia_recondita',
'trichobasis_senecionis': 'coleosporium_tussilaginis',
'trichobatrachus_robustus': 'hairy_frog',
'trichocentrum_cornucopiae': 'trichocentrum_fuscum',
'trichocentrum_cosymbephorum': 'trichocentrum_luridum',
'trichocentrum_pachyphyllum': 'trichocentrum_cavendishianum',
'trichocentrum_stipitatum': 'trichocentrum_lacerum',
'trichocephalus_acetabularis': 'greater_argonaut',
'trichocephalus_trichiuris': 'whipworm',
'trichocereus_bridgesii': 'echinopsis_lageniformis',
'trichocereus_macrogonus': 'echinopsis_macrogona',
'trichocereus_pachanoi': 'echinopsis_pachanoi',
'trichocereus_peruvianus': 'echinopsis_peruviana',
'trichocereus_scopulicolus': 'echinopsis_scopulicola',
'trichocereus_spachianus': 'echinopsis_spachiana',
'trichocichla_rufa': 'long-legged_warbler',
'trichocladia_diffusa': 'microsphaera_diffusa',
'trichocladium_basicola': 'thielaviopsis_basicola',
'trichoconiella_padwickii': 'alternaria_padwickii',
'trichoconis_crotalariae': 'phaeotrichoconis_crotalariae',
'trichoconis_padwickii': 'alternaria_padwickii',
'trichodere_cockerelli': 'white-streaked_honeyeater',
'trichoderma_lignorum': 'trichoderma_viride',
'trichoderma_narcissi': 'trichoderma_harzianum',
'trichoderma_roseum': 'trichothecium_roseum',
'trichodorus_christiei': 'paratrichodorus_minor',
'trichogaster_leeri': 'pearl_gourami',
'trichogaster_leerii': 'pearl_gourami',
'trichogaster_microlepis': 'moonlight_gourami',
'trichogaster_pectoralis': 'snakeskin_gourami',
'trichogaster_trichopterus': 'three_spot_gourami',
'trichoglossus_capistratus': 'marigold_lorikeet',
'trichoglossus_chlorolepidotus': 'scaly-breasted_lorikeet',
'trichoglossus_euteles': 'olive-headed_lorikeet',
'trichoglossus_flavoviridis': 'citrine_lorikeet',
'trichoglossus_forsteni': 'sunset_lorikeet',
'trichoglossus_haematodus': 'rainbow_lorikeet',
'trichoglossus_johnstoniae': 'mindanao_lorikeet',
'trichoglossus_moluccanus': 'rainbow_lorikeet',
'trichoglossus_ornatus': 'ornate_lorikeet',
'trichoglossus_rosenbergii': 'rainbow_lorikeet',
'trichoglossus_rubiginosus': 'pohnpei_lorikeet',
'trichoglossus_rubritorquis': 'red-collared_lorikeet',
'trichoglossus_weberi': 'leaf_lorikeet',
'trichogramma_wasp': 'trichogramma',
'tricholaema_diademata': 'red-fronted_barbet',
'tricholaema_frontata': 'miombo_barbet',
'tricholaema_hirsuta': 'hairy-breasted_barbet',
'tricholaema_lacrymosa': 'spot-flanked_barbet',
'tricholaema_leucomelas': 'pied_barbet',
'tricholaema_melanocephala': 'black-throated_barbet',
'tricholestes_criniger': 'hairy-backed_bulbul',
'tricholoma_flavovirens': 'tricholoma_equestre',
'tricholoma_matsutake': 'matsutake',
'tricholoma_nudum': 'blewit',
'tricholoma_sculpturatum': 'tricholoma_argyraceum',
'trichometasphaeria_turcica': 'setosphaeria_turcica',
'trichomonas_infections': 'trichomonas_vaginalis',
'trichomonas_vaginitis': 'trichomonas_vaginalis',
'trichomycterus_inermis': 'nematogenys_inermis',
'trichomycterus_pusillus': 'pareiodon_microps',
'trichonema_roseum': 'romulea_rosea',
'trichonida_lake': 'lake_trichonida',
'trichoniscus_caelebs': 'trichoniscus_pusillus',
'trichoniscus_coelebs': 'trichoniscus_pusillus',
'trichoniscus_dentiger': 'androniscus_dentiger',
'trichoniscus_elisabethae': 'trichoniscus_pusillus',
'trichoniscus_germanicus': 'hyloniscus_riparius',
'trichoniscus_montanus': 'hyloniscus_riparius',
'trichoniscus_notatus': 'hyloniscus_riparius',
'trichoniscus_rhenanus': 'trichoniscus_pusillus',
'trichoniscus_riparius': 'hyloniscus_riparius',
'trichoniscus_tirolensis': 'hyloniscus_riparius',
'trichoniscus_violaceus': 'hyloniscus_riparius',
'trichooculodermovertebral_syndrome': 'arthrogryposis',
'trichopeltella_montana': 'trichopeltella',
'trichophorus_minutus': 'hairy-backed_bulbul',
'trichophyllum_lanatum': 'eriophyllum_lanatum',
'trichophyton_mentagrophytes': 'trichophyton_interdigitale',
'trichophyton_schoenleini': 'trichophyton',
'trichophyton_soudanense': 'trichophyton_rubrum',
'trichopilia_albida': 'trichopilia_fragrans',
'trichopilia_candida': 'trichopilia_fragrans',
'trichopilia_kienastiana': 'trichopilia_suavis',
'trichopilia_lehmannii': 'trichopilia_fragrans',
'trichopilia_nobilis': 'trichopilia_fragrans',
'trichopilia_reichenheimia': 'trichopilia_laxa',
'trichoplacia_microscopica': 'trichoplacia',
'trichoplax_adhaerens': 'trichoplax',
'trichoplexia_exornata': 'apamea_contradicta',
'trichoplusia_ni': 'cabbage_looper',
'trichoplusia_transfixa': 'ctenoplusia_vittata',
'trichoplusia_vittata': 'ctenoplusia_vittata',
'trichopodus_pectoralis': 'snakeskin_gourami',
'trichopsis_pumila': 'pygmy_gourami',
'trichopsis_vittata': 'croaking_gourami',
'trichoptilium_incisum': 'trichoptilium',
'trichoptilus_adelphodes': 'megalorrhipida_leucodactyla',
'trichoptilus_centetes': 'megalorrhipida_leucodactyla',
'trichoptilus_compsochares': 'megalorrhipida_leucodactyla',
'trichoptilus_derelictus': 'megalorrhipida_leucodactyla',
'trichoptilus_ochrodactyla': 'megalorrhipida_leucodactyla',
'trichoptilus_ralumensis': 'megalorrhipida_leucodactyla',
'trichoptya_magna': 'bocula_sejuncta',
'trichoptya_nigropunctata': 'bocula_sejuncta',
'trichoptya_pallida': 'bocula_sejuncta',
'trichoptya_subspurcata': 'bocula_sejuncta',
'trichorhinophalangeal_syndrome': 'langer-giedion_syndrome',
'trichosalpinx_lancifera': 'trichosalpinx_orbicularis',
'trichosalpinx_oxychilos': 'trichosalpinx_orbicularis',
'trichosalpinx_quartzicola': 'trichosalpinx_montana',
'trichosanthes_anguina': 'trichosanthes_cucumerina',
'trichosanthes_grandiflora': 'hodgsonia',
'trichosanthes_heteroclita': 'hodgsonia',
'trichosanthes_hexasperma': 'hodgsonia',
'trichosanthes_kadam': 'hodgsonia',
'trichosanthes_macrocarpa': 'hodgsonia',
'trichosanthes_theba': 'hodgsonia',
'trichosellus_cupes': 'schinia_cupes',
'trichosirius_clathrata': 'trichosirius_inornatus',
'trichosirius_inornata': 'trichosirius_inornatus',
'trichosporon_adeninovorans': 'arxula_adeninivorans',
'trichosporon_cutaneum': 'trichosporon',
'trichosporon_klebahnii': 'geotrichum_klebahnii',
'trichosporon_mycotoxinvorans': 'trichosporon',
'trichosporon_penicillatum': 'geotrichum_klebahnii',
'trichosporum_sphaericum': 'khuskia_oryzae',
'trichostrongylus_tenius': 'trichostrongylus_tenuis',
'trichosurus_arnhemensis': 'northern_brushtail_possum',
'trichosurus_caninus': 'short-eared_possum',
'trichosurus_cunninghami': 'mountain_brushtail_possum',
'trichosurus_johnstonii': 'coppery_brushtail_possum',
'trichosurus_vulpecula': 'common_brushtail_possum',
'trichothecium_griseum': 'magnaporthe_grisea',
'trichothraupis_melanops': 'black-goggled_tanager',
'trichothyriella_quercigena': 'trichothyriella',
'trichothyriomyces_notatus': 'trichothyriomyces',
'trichotomy_law': 'trichotomy',
'trichotomy_property': 'inequality',
'trichromatic_theory': 'trichromacy',
'trichromatic_vision': 'trichromacy',
'trichur_pooram': 'thrissur_pooram',
'trichur_ramachandran': 'trichur_v._ramachandran',
'trichuris_campanula': 'trichuris',
'trichuris_trichiura': 'whipworm',
'trichys_fasciculata': 'long-tailed_porcupine',
'tricia_brock': 'superchick',
'tricia_dennison': 'tricia_mcneil',
'tricia_deveraux': 'tricia_devereaux',
'tricia_devereux': 'tricia_devereaux',
'tricia_dixon': 'tricia_dickson',
'tricia_dunn': 'tricia_dunn-luoma',
'tricia_leines': 'patricia_pine',
'tricia_mara': 'rooney_mara',
'tricia_nixon': 'tricia_nixon_cox',
'tricia_stokes': 'tricia_dingle',
'tricia_walsh-smith': 'tricia_walsh',
'tricia_willoughby': 'patricia_n._willoughby',
'tricia_yearwood': 'trisha_yearwood',
'trick_candle': 'trick_candles',
'trick_currelly': 'charles_trick_currelly',
'trick_drill': 'exhibition_drill',
'trick_ending': 'twist_ending',
'trick_flow': 'trick_flow_specialties',
'trick_kite': 'sport_kite',
'trick_knee': 'anterior_cruciate_ligament_injury',
'trick_night': 'mischief_night',
'trick_question': 'loaded_question',
'trick_rider': 'horse_riding_stunts',
'trick_riding': 'horse_riding_stunts',
'trick_shooting': 'exhibition_shooting',
'trick_skiing': 'waterskiing',
'trick_taking': 'trick-taking_game',
'trick_trick': 'trick-trick',
'trick_turner': 'trik_turner',
'tricking_someone': 'practical_joke',
'trickle-up_economics': 'trickle_up_effect',
'trickle_charge': 'trickle_charging',
'trickle_down': 'trickle-down_theory',
'trickle_filter': 'trickling_filter',
'trickle_irrigation': 'drip_irrigation',
'trickle_vents': 'trickle_vent',
'trickles_down': 'trickle-down_economics',
'trickling_filters': 'trickling_filter',
'tricks_tutorials': 'tricking',
'trickster_books': 'daughter_of_the_lioness',
'trickster_deity': 'trickster',
'trickster_duology': 'daughter_of_the_lioness',
'trickster_figure': 'trickster',
'trickster_g': 'kristoffer_rygg',
'trickster_g.': 'kristoffer_rygg',
'trickster_god': 'trickster',
'trickster_hero': 'trickster',
'trickster_myth': 'trickster',
'trickster_raven': 'raven_in_mythology',
'tricksters_queen': 'daughter_of_the_lioness',
'tricksters_series': 'daughter_of_the_lioness',
'tricky_beans': 'sonata_arctica',
'tricky_disco': 'greater_than_one',
'tricky_means': 'sonata_arctica',
'tricky_sam': 'tricky_sam_nanton',
'tricky_tray': 'chinese_auction',
'tricky_trees': 'nottingham_forest_f.c.',
'tricky_tyres': 'mario_party_6',
'triclaria_malachitacea': 'blue-bellied_parrot',
'triclavian_heresy': 'triclavianism',
'triclinic_symmetry': 'triclinic_crystal_system',
'triclinic_system': 'triclinic_crystal_system',
'triclofos_sodium': 'triclofos',
'tricoise_wrench': 'hydrant_wrench',
'tricolor_daisy': 'glebionis_carinatum',
'tricolor_effect': 'harris_shutter',
'tricolor_flycatcher': 'yellow-rumped_flycatcher',
'tricolor_shark': 'bala_shark',
'tricolored_brush-finch': 'tricoloured_brush-finch',
'tricolored_bumblebee': 'orange-belted_bumblebee',
'tricolored_flycatcher': 'yellow-rumped_flycatcher',
'tricolored_lory': 'black-capped_lory',
'tricolored_munia': 'tricoloured_munia',
'tricolorul_breaza': 'cs_tricolorul_breaza',
'tricolour_award': 'agnes_benidickson_tricolour_award',
'tricolour_flag': 'tricolour',
'tricolour_flame': 'tricolor_flame',
'tricolour_flycatcher': 'yellow-rumped_flycatcher',
'tricoloured_blackbird': 'tricolored_blackbird',
'tricoloured_bumblebee': 'orange-belted_bumblebee',
'tricoloured_flycatcher': 'yellow-rumped_flycatcher',
'tricoloured_heron': 'tricolored_heron',
'tricoloured_lory': 'black-capped_lory',
'tricom_corporation': 'southern_cross_broadcasting',
'tricomalee_campus': 'trincomalee_campus',
'tricondylus_fraseri': 'lomatia_fraseri',
'triconsonantal_root': 'semitic_root',
'tricoptya_expansilis': 'bocula_sejuncta',
'tricoptya_inquinata': 'bocula_sejuncta',
'tricorn_center': 'tricorn_centre',
'tricorn_hat': 'tricorne',
'tricorner_hat': 'tricorne',
'tricotaje_ineu': 'cs_ineu',
'trictena_argentata': 'trictena_atripalpis',
'tricupsid_stenosis': 'tricuspid_valve_stenosis',
'tricuspid_curve': 'deltoid_curve',
'tricuspid_incompetence': 'tricuspid_insufficiency',
'tricuspid_regurgitation': 'tricuspid_insufficiency',
'tricuspid_stenosis': 'tricuspid_valve_stenosis',
'tricuspid_valves': 'tricuspid_valve',
'tricuspidal_atresia': 'tricuspid_atresia',
'tricuspidal_stenosis': 'tricuspid_valve_stenosis',
'tricycle_cinema': 'tricycle_theatre',
'tricycle_inc': 'tricycle_inc.',
'tricycle_magazine': 'tricycle:_the_buddhist_review',
'tricycle_rickshaw': 'cycle_rickshaw',
'tricycle_undercarriage': 'tricycle_gear',
'tricyclic_anti-depressants': 'tricyclic_antidepressant',
'tricyclic_antidepressants': 'tricyclic_antidepressant',
'tricyclic_flower': 'cyclic_flower',
'tridacna_crocea': 'boring_clam',
'tridacna_derasa': 'southern_giant_clam',
'tridacna_gigas': 'giant_clam',
'tridacna_maxima': 'maxima_clam',
'tridacna_tevoroa': 'tevoro_clam',
'tridax_parviflora': 'galinsoga_parviflora',
'tridelphia_lake': 'triadelphia_reservoir',
'tridelphia_reservoir': 'triadelphia_reservoir',
'tridelta_transit': 'tri_delta_transit',
'tridens_flavus': 'purpletop',
'trident_2': 'hawker_siddeley_trident',
'trident_3b': 'hawker_siddeley_trident',
'trident_console': 'trident_studios',
'trident_i': 'ugm-96_trident_i',
'trident_ii': 'ugm-133_trident_ii',
'trident_maple': 'acer_buergerianum',
'trident_maples': 'acer_buergerianum',
'trident_replacement': 'british_replacement_of_the_trident_system',
'trident_shell': 'internet_explorer_shell',
'trident_system': 'uk_trident_programme',
'trident_towers': 'oberoi_trident',
'trident_volcano': 'mount_trident',
'tridentine_council': 'council_of_trent',
'tridentine_liturgy': 'tridentine_mass',
'tridentine_rite': 'tridentine_mass',
'trider_g-7': 'trider_g7',
'tridihexethyl_chloride': 'tridihexethyl',
'tridione_syndrome': 'fetal_trimethadione_syndrome',
'triebten_lakewli': 'triebtenseewli',
'triebten_seewli': 'triebtenseewli',
'triengen_lu': 'triengen',
'triennial_act': 'triennial_acts',
'triennial_bill': 'triennial_acts',
'triennial_stakes': 'jersey_stakes',
'trient_hut': 'cabane_du_trient',
'trientine_hydrochloride': 'triethylenetetramine',
'trier_cathedral': 'cathedral_of_trier',
'trier_dom': 'cathedral_of_trier',
'trier_psalter': 'gertrude_psalter',
'trier_stampers': 'pst_trier_stampers',
'triesenberg_commune': 'triesenberg',
'triest_university': 'university_of_trieste',
'trieste-ingolstadt_pipeline': 'transalpine_pipeline',
'trieste-opicina_tramway': 'opicina_tramway',
'trieste_airport': 'friuli_venezia_giulia_airport',
'trieste_class': 'deep-submergence_vehicle',
'trieste_dunn': 'trieste_kelly_dunn',
'trieste_ii': 'bathyscaphe_trieste_ii',
'trieste_observatory': 'trieste_astronomical_observatory',
'trieste_port': 'port_of_trieste',
'trieste_province': 'province_of_trieste',
'trieste_university': 'university_of_trieste',
'triethelene_glycol': 'triethylene_glycol',
'triethyl_borane': 'triethylborane',
'triethylamine/acetate_buffer': 'triethylammonium_acetate',
'triethylammonium_chloride': 'triethylamine_hydrochloride',
'triethylene_tetramine': 'triethylenetetramine',
'triethyleneglycol_dinitrate': 'triethylene_glycol_dinitrate',
'trieu_da': 'zhao_tuo',
'trifacial_nerve': 'trigeminal_nerve',
'trifekta_records': 'trifekta',
'triffid_nebula': 'trifid_nebula',
'triffin_paradox': 'triffin_dilemma',
'trifid_bur-marigold': 'bidens_tripartita',
'triflate_salt': 'trifluoromethanesulfonate',
'trifle_pewter': 'pewter',
'triflic_acid': 'trifluoromethanesulfonic_acid',
'triflic_anhydride': 'trifluoromethanesulfonic_anhydride',
'trifling_metal': 'pewter',
'trifluoperazine_hydrochloride': 'trifluoperazine',
'trifluoro_borane': 'boron_trifluoride',
'trifluoroethanoic_acid': 'trifluoroacetic_acid',
'trifluoromethyl_bromide': 'bromotrifluoromethane',
'trifluoromethyl_chloride': 'chlorotrifluoromethane',
'trifluoromethyl_iodide': 'trifluoroiodomethane',
'triflupromazine_hydrochloride': 'triflupromazine',
'trifocal_lens': 'trifocal_lenses',
'trifocon_a': 'rigid_gas_permeable',
'trifoliate_citrus': 'trifoliate_orange',
'trifoliin_a': 'isoquercitin',
'trifolium_agrarium': 'large_hop_trefoil',
'trifolium_aureum': 'large_hop_trefoil',
'trifolium_incarnata': 'trifolium_incarnatum',
'trifolium_pratensis': 'trifolium_pratense',
'trifolium_strepens': 'large_hop_trefoil',
'trifolium_willdenowii': 'trifolium_willdenovii',
'trifon_ivanon': 'trifon_ivanov',
'trift_lake': 'triftsee',
'trift_see': 'triftsee',
'trifun_zivonovic': 'trifun_zivanovic',
'trig_beacon': 'triangulation_station',
'trig_function': 'trigonometric_functions',
'trig_functions': 'trigonometric_functions',
'trig_parallax': 'parallax',
'trig_point': 'triangulation_station',
'trig_proofs': 'proofs_of_trigonometric_identities',
'trig_ratios': 'trigonometric_functions',
'trig_station': 'triangulation_station',
'trig_sub': 'trigonometric_substitution',
'trig_substitution': 'trigonometric_substitution',
'triga_reactor': 'triga',
'trigami_language': 'tregami_language',
'trigan_empire': 'the_trigan_empire',
'trigate_device': 'multigate_device',
'trigate_devices': 'multigate_device',
'trigate_transistor': 'multigate_device',
'trigate_transistors': 'multigate_device',
'trigeminal_ganglia': 'trigeminal_ganglion',
'trigeminal_nerves': 'trigeminal_nerve',
'trigeminal_nuclei': 'trigeminal_nerve_nuclei',
'trigeminal_nucleus': 'trigeminal_nerve_nuclei',
'trigeminal_system': 'trigeminal_nerve',
'trigeminothalamic_tract': 'ventral_trigeminothalamic_tract',
'trigeminus_nerve': 'trigeminal_nerve',
'trigg_major': 'triggshire',
'trigg_minor': 'triggshire',
'trigger_berg': 'edward_edson_lee',
'trigger_finger/thumb': 'trigger_finger',
'trigger_fish': 'triggerfish',
'trigger_gospel': 'anna_fermin',
'trigger_hair': 'trichome',
'trigger_language': 'austronesian_alignment',
'trigger_locks': 'trigger_lock',
'trigger_mechanism': 'trigger',
'trigger_mike': 'michael_coppola',
'trigger_pads': 'trigger_pad',
'trigger_plant': 'stylidium',
'trigger_plants': 'stylidium',
'trigger_points': 'trigger_point',
'trigger_site': 'trigger_point',
'trigger_sites': 'trigger_point',
'trigger_system': 'austronesian_alignment',
'trigger_thumb': 'trigger_finger',
'trigno_river': 'trigno',
'trignonmetric_ratios': 'trigonometry',
'trigon_disunity': 'the_trigon_disunity',
'trigon_puzzles': 'trigon_puzzle',
'trigona_fibrosa': 'fibrous_trigone',
'trigona_hypogea': 'vulture_bee',
'trigonal_bipyramid': 'triangular_dipyramid',
'trigonal_bipyramidal': 'trigonal_bipyramidal_molecular_geometry',
'trigonal_curve': 'gonality_of_an_algebraic_curve',
'trigonal_dipyramid': 'triangular_dipyramid',
'trigonal_planar': 'trigonal_planar_molecular_geometry',
'trigonal_planer': 'trigonal_planar_molecular_geometry',
'trigonal_pyramid': 'trigonal_pyramidal_molecular_geometry',
'trigonal_pyramidal': 'trigonal_pyramidal_molecular_geometry',
'trigonal_symmetry': 'trigonal_crystal_system',
'trigonal_system': 'trigonal_crystal_system',
'trigonella_brachycarpa': 'medicago_brachycarpa',
'trigonella_foenum-graecum': 'fenugreek',
'trigonella_radiata': 'medicago_radiata',
'trigonidium_ringens': 'mormolyca_ringens',
'trigoniulus_corallinus': 'rusty_millipede',
'trigonmetic_function': 'trigonometric_functions',
'trigonocephalus_affnis': 'gloydius_himalayanus',
'trigonocephalus_alternatus': 'bothrops_alternatus',
'trigonocephalus_ammodytes': 'lachesis_muta',
'trigonocephalus_asper': 'bothrops_asper',
'trigonocephalus_atro-fuscus': 'agkistrodon_contortrix',
'trigonocephalus_atrox': 'bothrops_atrox',
'trigonocephalus_cantori': 'trimeresurus_cantori',
'trigonocephalus_cenchris': 'agkistrodon_contortrix',
'trigonocephalus_colombiensis': 'bothrops_atrox',
'trigonocephalus_contortrix': 'agkistrodon_contortrix',
'trigonocephalus_crotalinus': 'lachesis_muta',
'trigonocephalus_darwini': 'trimeresurus_strigatus',
'trigonocephalus_erythrurus': 'trimeresurus_erythrurus',
'trigonocephalus_formosus': 'trimeresurus_sumatranus',
'trigonocephalus_gramineus': 'trimeresurus_gramineus',
'trigonocephalus_himalayanus': 'gloydius_himalayanus',
'trigonocephalus_histrionicus': 'agkistrodon_contortrix',
'trigonocephalus_holocericeus': 'bothrops_atrox',
'trigonocephalus_hombroni': 'tropidolaemus_wagleri',
'trigonocephalus_hypnale': 'hypnale_hypnale',
'trigonocephalus_hypnalis': 'hypnale_hypnale',
'trigonocephalus_jararaca': 'bothrops_jararaca',
'trigonocephalus_lanceolatus': 'bothrops_lanceolatus',
'trigonocephalus_lansbergii': 'porthidium_lansbergii',
'trigonocephalus_macrolepis': 'trimeresurus_macrolepis',
'trigonocephalus_mucrosquamatus': 'trimeresurus_mucrosquamatus',
'trigonocephalus_nigromarginatus': 'trimeresurus_trigonocephalus',
'trigonocephalus_nilghiriensis': 'trimeresurus_strigatus',
'trigonocephalus_nummifer': 'atropoides_nummifer',
'trigonocephalus_piscivorus': 'agkistrodon_piscivorus',
'trigonocephalus_praetextatus': 'calloselasma',
'trigonocephalus_pulcher': 'bothriopsis_pulchra',
'trigonocephalus_puniceus': 'trimeresurus_puniceus',
'trigonocephalus_purpureo-maculatus': 'trimeresurus_purpureomaculatus',
'trigonocephalus_rhodostoma': 'calloselasma',
'trigonocephalus_sagittiformis': 'trimeresurus_trigonocephalus',
'trigonocephalus_schlegelii': 'bothriechis_schlegelii',
'trigonocephalus_specialis': 'agkistrodon_bilineatus',
'trigonocephalus_sumatranus': 'trimeresurus_sumatranus',
'trigonocephalus_tigrinus': 'bothrops_lanceolatus',
'trigonocephalus_tisiphone': 'agkistrodon_piscivorus',
'trigonocephalus_viridis': 'trimeresurus_gramineus',
'trigonocephalus_wagleri': 'tropidolaemus_wagleri',
'trigonocephalus_xanthogrammus': 'bothrops_asper',
'trigonocephalus_xantogrammus': 'bothrops_asper',
'trigonocephalus_zara': 'hypnale_hypnale',
'trigonocephaly_syndrome': 'trigonocephaly',
'trigonoceps_occipitalis': 'white-headed_vulture',
'trigonognathus_kabeyai': 'viper_dogfish',
'trigonometric_approximation': 'fourier_series',
'trigonometric_differentiation': 'differentiation_of_trigonometric_functions',
'trigonometric_function': 'trigonometric_functions',
'trigonometric_integrals': 'trigonometric_integral',
'trigonometric_numbers': 'trigonometric_number',
'trigonometric_parallax': 'parallax',
'trigonometric_polynomials': 'trigonometric_polynomial',
'trigonometric_ratios': 'trigonometry',
'trigonometric_substitutions': 'trigonometric_substitution',
'trigonometric_sum': 'fourier_series',
'trigonometrical_interpolation': 'trigonometric_interpolation',
'trigonometrical_station': 'triangulation_station',
'trigonometry_table': 'exact_trigonometric_constants',
'trigonostigma_heteromorpha': 'harlequin_rasbora',
'trigonum_fibrosum': 'fibrous_trigone',
'trigonum_hypoglossi': 'hypoglossal_trigone',
'trigonum_retromolare': 'retromolar_space',
'trigonum_vesicae': 'trigone_of_urinary_bladder',
'trigun_anime': 'trigun',
'trigun_manga': 'trigun',
'trigun_maximum': 'trigun',
'trihexyphenidyl_hydrochloride': 'trihexyphenidyl',
'trihydroxy-benzoic_acid': 'gallic_acid',
'triiron_dodecarbonyl': 'triiron_dodecacarbonyl',
'triiron_tetraoxide': 'magnetite',
'trijaya_fm': 'radio_trijaya',
'trikala_bc': 'athlitikos_syllogos_trikala_2000_bc',
'trikala_fc': 'trikala_f.c.',
'trikalinos_christos': 'christos_trikalinos',
'trikkala_prefecture': 'trikala_prefecture',
'trikke_skki': 'trikke',
'trikora_peak': 'puncak_trikora',
'trikus_harjanto': 'tri_kusharjanto',
'trikus_haryanto': 'tri_kusharjanto',
'trikus_heryanto': 'tri_kusharjanto',
'trilateral_co-operation': 'trilateral_patent_offices',
'trilateral_commision': 'trilateral_commission',
'trilateral_cooperation': 'trilateral_patent_offices',
'trilateral_office': 'trilateral_patent_offices',
'trilateral_offices': 'trilateral_patent_offices',
'trilby_ewer': 'william_norman_ewer',
'trilby_freakes': 'hubert_freakes',
'trilby_hat': 'trilby',
'trilegiant_corporation': 'affinion_group',
'trilinear_coordinate': 'trilinear_coordinates',
'trilinear_form': 'multilinear_form',
'triliteral_root': 'semitic_root',
'trilithium_citrate': 'lithium_citrate',
'trill_records': 'trill_entertainment',
'trill_seeking': 'adventure',
'trillian_cut': 'diamond_cut',
'trillium_award': 'trillium_book_award',
'trillium_cable': 'cuc_broadcasting',
'trillium_rivale': 'pseudotrillium',
'trillium_school': 'trillium_charter_school',
'trilobite_beetle': 'duliticola',
'trilobulina_striata': 'utricularia_striata',
'trilobyte_software': 'trilobyte',
'trilogy_ring': 'wedding_ring',
'trilon_bs': 'edta',
'trilpe_a-metal': 'triple_a-metal',
'triluminal_catheter': 'catheter',
'trim_body': 'run_come_save_me',
'trim_level': 'car_model',
'trim_lines': 'trim_line',
'trim_phone': 'trimphone',
'trim_pot': 'potentiometer',
'trim_running': 'assault_course',
'trim_spa': 'trimspa',
'trim_tabs': 'trim_tab',
'trim_trail': 'assault_course',
'trim_trails': 'assault_course',
'trimaculatus_ceylonensis': 'hypnale_hypnale',
'trimaran_hull': 'trimaran',
'trimbakeshwar_mahadev': 'tikona',
'trimble_inc': 'trimble_navigation',
'trimble_island': 'blake_island',
'trimdon_station': 'trimdon',
'trimeprazine_tartrate': 'alimemazine',
'trimeresurus_acutimentalis': 'trimeresurus_purpureomaculatus_andersoni',
'trimeresurus_alternatus': 'bothrops_alternatus',
'trimeresurus_anamallensis': 'trimeresurus_malabaricus',
'trimeresurus_andersoni': 'trimeresurus_purpureomaculatus_andersoni',
'trimeresurus_andersonii': 'trimeresurus_purpureomaculatus_andersoni',
'trimeresurus_atrox': 'bothrops_atrox',
'trimeresurus_aurifer': 'bothriechis_aurifer',
'trimeresurus_barbouri': 'cerrophidion_barbouri',
'trimeresurus_barnetti': 'bothrops_barnetti',
'trimeresurus_bicolor': 'bothriechis_bicolor',
'trimeresurus_brachystoma': 'porthidium_lansbergii',
'trimeresurus_cantoris': 'trimeresurus_cantori',
'trimeresurus_capitetriangulatus': 'trimeresurus_trigonocephalus',
'trimeresurus_carinatus': 'trimeresurus_purpureomaculatus',
'trimeresurus_ceylonensis': 'hypnale_hypnale',
'trimeresurus_chaseni': 'ovophis_chaseni',
'trimeresurus_dunni': 'porthidium_dunni',
'trimeresurus_formosus': 'trimeresurus_sumatranus',
'trimeresurus_garciae': 'ophryacus_melanurus',
'trimeresurus_garciai': 'ophryacus_melanurus',
'trimeresurus_godmani': 'cerrophidion_godmani',
'trimeresurus_halieus': 'trimeresurus_flavomaculatus_halieus',
'trimeresurus_huttoni': 'tropidolaemus_huttoni',
'trimeresurus_insularis': 'bothrops_insularis',
'trimeresurus_jerdoni': 'trimeresurus_jerdonii',
'trimeresurus_luteus': 'trimeresurus_elegans',
'trimeresurus_mcgregori': 'trimeresurus_flavomaculatus_mcgregori',
'trimeresurus_melanurus': 'ophryacus_melanurus',
'trimeresurus_monticola': 'ovophis_monticola',
'trimeresurus_mutabilis': 'trimeresurus_labialis',
'trimeresurus_nasutus': 'porthidium_nasutum',
'trimeresurus_neuwiedii': 'bothrops_neuwiedi',
'trimeresurus_nigroviridis': 'bothriechis_nigroviridis',
'trimeresurus_obscurus': 'trimeresurus_purpureomaculatus_andersoni',
'trimeresurus_occidentalis': 'trimeresurus_gramineus',
'trimeresurus_okinavensis': 'ovophis_okinavensis',
'trimeresurus_ophryomegas': 'porthidium_ophryomegas',
'trimeresurus_philippensis': 'tropidolaemus_wagleri',
'trimeresurus_philippinensis': 'tropidolaemus_wagleri',
'trimeresurus_popeiorum': 'trimeresurus_popeorum',
'trimeresurus_purpureus': 'trimeresurus_purpureomaculatus',
'trimeresurus_riukiuanus': 'trimeresurus_flavoviridis',
'trimeresurus_schadenbergi': 'trimeresurus_flavomaculatus',
'trimeresurus_schlegelii': 'bothriechis_schlegelii',
'trimeresurus_septentrionalis': 'trimeresurus_albolabris_septentrionalis',
'trimeresurus_strigolus': 'trimeresurus_strigatus',
'trimeresurus_tonkinensis': 'ovophis_monticola',
'trimeresurus_undulatus': 'ophryacus_undulatus',
'trimeresurus_venustus': 'trimeresurus_kanburiensis',
'trimeresurus_viridis': 'trimeresurus_gramineus',
'trimeresurus_wagleri': 'tropidolaemus_wagleri',
'trimeresurus_wiroti': 'trimeresurus_puniceus',
'trimeresurus_xanthomelas': 'trimeresurus_jerdonii_xanthomelas',
'trimeresurus_yucatanicus': 'porthidium_yucatanicum',
'trimeresurus_yucatannicus': 'porthidium_yucatanicum',
'trimeresurus_yunnanensis': 'trimeresurus_stejnegeri_yunnanensis',
'trimerotropis_infantilis': 'zayante_band-winged_grasshopper',
'trimerotropis_pallidipennis': 'pallid-winged_grasshopper',
'trimersurus_mucrosquamatus': 'trimeresurus_mucrosquamatus',
'trimesurus_albolabris': 'trimeresurus_albolabris',
'trimesurus_bicolor': 'trimeresurus_erythrurus',
'trimesurus_carinatus': 'trimeresurus_purpureomaculatus',
'trimesurus_elegans': 'trimeresurus_popeorum',
'trimesurus_maculatus': 'tropidolaemus_wagleri',
'trimesurus_malabaricus': 'trimeresurus_malabaricus',
'trimesurus_nielgherriensis': 'trimeresurus_strigatus',
'trimesurus_philippensis': 'tropidolaemus_wagleri',
'trimesurus_philippinensis': 'tropidolaemus_wagleri',
'trimesurus_porphyraceus': 'trimeresurus_purpureomaculatus',
'trimesurus_purpureus': 'trimeresurus_purpureomaculatus',
'trimesurus_strigatus': 'trimeresurus_strigatus',
'trimesurus_subannulatus': 'tropidolaemus_wagleri',
'trimesurus_viridis': 'trimeresurus_gramineus',
'trimesurus_wardii': 'trimeresurus_malabaricus',
'trimethaphan_camsylate': 'trimetaphan_camsilate',
'trimethobenzamide_hydrochloride': 'trimethobenzamide',
'trimethoprim-sulfamethoxazole_combination': 'co-trimoxazole',
'trimethoprim_hydrochloride': 'trimethoprim',
'trimethoprim_sulfate': 'trimethoprim',
'trimethoxyphosphine_oxide': 'trimethyl_phosphate',
'trimethyl_aluminium': 'trimethylaluminium',
'trimethyl_aluminum': 'trimethylaluminium',
'trimethyl_chlorostannane': 'trimethyltin_chloride',
'trimethyl_ester': 'trimethyl_phosphate',
'trimethyl_orthophosphate': 'trimethyl_phosphate',
'trimethylacetic_acid': 'pivalic_acid',
'trimethylamine_oxide': 'trimethylamine_n-oxide',
'trimethylammonium_chloride': 'trimethylamine',
'trimethylaziridinylphosphine_oxide': 'metepa',
'trimethylene_imine': 'azetidine',
'trimethylene_oxide': 'oxetane',
'trimethylene_sulfide': 'thietane',
'trimethylol_propane': 'trimethylolpropane',
'trimethylolpropane_triacrylate': 'tmpta',
'trimethyloxonium_tetrafluoroborate': 'triethyloxonium_tetrafluoroborate',
'trimethylsilyl_compounds': 'trimethylsilyl',
'trimethylsilyl_diazomethane': 'trimethylsilyldiazomethane',
'trimethylsilyl_group': 'trimethylsilyl',
'trimethylstannyl_chloride': 'trimethyltin_chloride',
'trimethylsulfonium--tetrahydrofolate_n-methyltransferase': 'trimethylsulfonium-tetrahydrofolate_n-methyltransferase',
'trimethyltin_monochloride': 'trimethyltin_chloride',
'trimetrexate_glucuronate': 'trimetrexate',
'trimetric_projection': 'axonometric_projection',
'trimi_velliste': 'trivimi_velliste',
'trimipramine_maleate': 'trimipramine',
'trimline_phone': 'trimline_telephone',
'trimline_telephones': 'trimline_telephone',
'trimmed_average': 'truncated_mean',
'trimmed_mean': 'truncated_mean',
'trimmis_gr': 'trimmis',
'trimoda_necessitas': 'trinoda_necessitas',
'trimolecular_reaction': 'molecularity',
'trimstein_be': 'trimstein',
'trimucrodon_cuneatus': 'trimucrodon',
'trin_gr': 'trin',
'trina_braxton': 'the_braxtons',
'trina_dupri': 'trina',
'trina_mcgee': 'trina_mcgee-davis',
'trina_robins': 'trina_robbins',
'trina_videography': 'trina',
'trinamool_congress': 'all_india_trinamool_congress',
'trinamul_congress': 'all_india_trinamool_congress',
'trinary_function': 'ternary_operation',
'trinary_logic': 'ternary_logic',
'trinary_search': 'ternary_search',
'trinary_star': 'star_system',
'trincadeira_preta': 'tinta_amarela',
'trinci_palace': 'palazzo_trinci',
'trincomalee_frigate': 'hms_trincomalee',
'trindade_island': 'trindade_and_martim_vaz',
'trinder-emerson_reagent': 'trinder_glucose_activity_test',
'trinder_reaction': 'trinder_spot_test',
'trinder_reagent': 'trinder_spot_test',
'trinder_solution': 'trinder_spot_test',
'trine_solberg': 'trine_hattestad',
'trinectes_maculatus': 'hogchoker',
'tring_athletic': 'tring_athletic_f.c.',
'tring_central': 'tring',
'tring_east': 'tring',
'tringa_brevipes': 'grey-tailed_tattler',
'tringa_cancellata': 'tuamotu_sandpiper',
'tringa_canutus': 'red_knot',
'tringa_cinerea': 'terek_sandpiper',
'tringa_erythropus': 'spotted_redshank',
'tringa_flavipes': 'lesser_yellowlegs',
'tringa_glareola': 'wood_sandpiper',
'tringa_guttifer': 'spotted_greenshank',
'tringa_hoffmanni': 'ludiortyx',
'tringa_hypoleucos': 'common_sandpiper',
'tringa_incana': 'wandering_tattler',
'tringa_indica': 'red-wattled_lapwing',
'tringa_leucoptera': 'tahitian_sandpiper',
'tringa_macularia': 'spotted_sandpiper',
'tringa_melanoleuca': 'greater_yellowlegs',
'tringa_miles': 'masked_lapwing',
'tringa_nebularia': 'greenshank',
'tringa_ochropus': 'green_sandpiper',
'tringa_parvirostris': 'tuamotu_sandpiper',
'tringa_semipalmata': 'willet',
'tringa_solitaria': 'solitary_sandpiper',
'tringa_stagnatalis': 'marsh_sandpiper',
'tringa_stagnatilis': 'marsh_sandpiper',
'tringa_terek': 'terek_sandpiper',
'tringa_totanus': 'common_redshank',
'tringa_vanellus': 'northern_lapwing',
'trinh_minh-ha': 'trinh_t._minh-ha',
'trinh_thuan': 'trinh_xuan_thuan',
'trini_woodall': 'trinny_woodall',
'trinidad_carnival': 'trinidad_and_tobago_carnival',
'trinidad_energy': 'trinidad_drilling',
'trinidad_express': 'trinidad_and_tobago_express',
'trinidad_isd': 'trinidad_independent_school_district',
'trinidad_island': 'trinidad',
'trinidad_municipio': 'trinidad_municipality',
'trinidad_spiny-rat': 'trinidad_spiny_rat',
'trinidad_stadion': 'trinidad_stadium',
'trinidade_petrel': 'trindade_petrel',
'trinidadian_american': 'trinidadian_americans',
'trinidadian_canadian': 'canadians_of_trinidad_and_tobago_origin',
'trinidadian_canadians': 'canadians_of_trinidad_and_tobago_origin',
'trinidadian_music': 'music_of_trinidad_and_tobago',
'trinidadian_passport': 'trinidad_and_tobago_passport',
'trinitarian_church': 'trinity_church',
'trinitarian_monotheism': 'trinity',
'trinitarian_worship': 'trinity',
'trinitee_5:7': 'trin-i-tee_5:7',
'trinity-bellwoods_park': 'trinity_bellwoods_park',
'trinity_apse': 'trinity_college_kirk',
'trinity_arcade': 'leeds_shopping_plaza',
'trinity_barnes': 'trinity_loren',
'trinity_black': 'bree_timmins',
'trinity_broadcasting': 'trinity_broadcasting_network',
'trinity_cathedral': 'holy_trinity_cathedral',
'trinity_churchyard': 'trinity_church_cemetery',
'trinity_clinic': 'trinity_health',
'trinity_column': 'marian_and_holy_trinity_columns',
'trinity_common': 'trinity_common_mall',
'trinity_commons': 'trinity_common_mall',
'trinity_grammar': 'trinity_grammar_school',
'trinity_guildhall': 'trinity_college_london',
'trinity_healthcare': 'trinity_health',
'trinity_homes': 'trinity_health',
'trinity_isd': 'trinity_independent_school_district',
'trinity_knot': 'triquetra',
'trinity_lavra': 'troitse-sergiyeva_lavra',
'trinity_leeds': 'leeds_shopping_plaza',
'trinity_pawling': 'trinity-pawling_school',
'trinity_prep': 'trinity_preparatory_school',
'trinity_quarter': 'leeds_shopping_plaza',
'trinity_rep': 'trinity_repertory_company',
'trinity_roots': 'trinityroots',
'trinity_sessions': 'the_trinity_session',
'trinity_soul': 'persona:_trinity_soul',
'trinity_springs': 'trinity_springs_middle_school',
'trinity_tripod': 'the_trinity_tripod',
'trinity_ucc': 'trinity_united_church_of_christ',
'trinity_united': 'trinity_united_church_of_christ',
'trinity_univ': 'trinity_university',
'trinity_vacation': 'legal_year',
'trinity_west': 'trinity_medical_center',
'trinity_wildcats': 'wakefield_trinity_wildcats',
'trinket_snake': ':elaphe_helena',
'trinny_woodhall': 'trinny_woodall',
'trino_vercellese': 'trino',
'trinoma_mall': 'trinoma',
'trinomial_equation': 'trinomial',
'trinomial_identity': 'multinomial_theorem',
'trinomial_name': 'trinomial_nomenclature',
'trinomial_theorem': 'multinomial_theorem',
'trinominal_nomenclature': 'binomial_nomenclature',
'trinomys_albispinus': 'white-spined_atlantic_spiny-rat',
'trinomys_dimidiatus': 'soft-spined_atlantic_spiny-rat',
'trinomys_gratiosus': 'gracile_atlantic_spiny-rat',
'trinomys_mirapitanga': 'dark-caped_atlantic_spiny-rat',
'trinomys_myosuros': 'mouse-tailed_atlantic_spiny-rat',
'trinomys_paratus': 'spiked_atlantic_spiny-rat',
'trinomys_setosus': 'hairy_atlantic_spiny-rat',
'trinucleotide_repeat': 'trinucleotide_repeat_disorder',
'trinway_mansion': 'prospect_place',
'triny_woodall': 'trinny_woodall',
'trio_2': 'trio_ii',
'trio_elegiaque': 'sergei_rachmaninoff',
'trio_gitano': 'tg_collective',
'trio_yepes': 'narciso_yepes',
'triode_amplifier': 'vacuum_tube',
'triodia_sylvina': 'orange_swift',
'triodon_macropterus': 'threetooth_puffer',
'triodopsis_occidentalis': 'western_three-toothed_land_snail',
'triodopsis_platysayoides': 'flat-spired_three-toothed_snail',
'triolo_airfield': 'foggia_airfield_complex',
'triomphant_ssbn': 'triomphant_class_submarine',
'trion-volga_tver': 'fc_volga_tver',
'trion_navarchon': 'trion_navarchon_street',
'trionum_annuum': 'hibiscus_trionum',
'trionys_hurum': 'aspideretes_gangeticus',
'trionyx_brogniarti': 'florida_softshell_turtle',
'trionyx_carinatus': 'florida_softshell_turtle',
'trionyx_ferox': 'florida_softshell_turtle',
'trionyx_georgianus': 'florida_softshell_turtle',
'trionyx_harlani': 'florida_softshell_turtle',
'trionyx_hurum': 'aspideretes_hurum',
'trionyx_ocellatus': 'spiny_softshell_turtle',
'trionyx_spiniferus': 'spiny_softshell_turtle',
'triops_launcauditus': 'triops_longicaudatus',
'triopteris_jamaicensis': 'hiptage_benghalensis',
'trioptimum_corporation': 'system_shock',
'triorchis_cernua': 'spiranthes_cernua',
'triorchis_ochroleuca': 'spiranthes_ochroleuca',
'triorchis_romanzoffiana': 'spiranthes_romanzoffiana',
'triorchis_stricta': 'spiranthes_romanzoffiana',
'triose-phosphate_isomerase': 'triosephosphate_isomerase',
'triose_phosphate': 'glyceraldehyde_3-phosphate',
'triosephosphate_dehydrogenase': 'glyceraldehyde_3-phosphate_dehydrogenase',
'trip_advisor': 'tripadvisor',
'trip_attraction': 'trip_generation',
'trip_code': 'tripcode',
'trip_flare': 'tripflare',
'trip_hopkins': 'trip_hawkins',
'trip_mccracken': 'trip_maccracken',
'trip_meter': 'odometer',
'trip_mines': 'land_mine',
'trip_murphy': 'herbie:_fully_loaded',
'trip_odometer': 'odometer',
'trip_production': 'trip_generation',
'trip_recorder': 'odometer',
'trip_reset': 'trip/reset',
'trip_skating': 'tour_skating',
'trip_tucker': 'charles_tucker_iii',
'trip_valve': 'trip_valve_gear',
'trip_wire': 'tripwire',
'tripartite_act': 'tripartite_system',
'tripartite_aggression': 'suez_crisis',
'tripartite_agreement': 'tripartite_accord',
'tripartite_alliance': 'african_national_congress',
'tripartite_history': 'historiae_ecclesiasticae_tripartitae_epitome',
'tripartite_minehunter': 'tripartite_class_minehunter',
'tripartite_motto': 'hendiatris',
'tripartite_treaty': 'tripartite_pact',
'tripel_h': 'triple_h',
'tripelennamine_citrate': 'tripelennamine',
'tripelennamine_hydrochloride': 'tripelennamine',
'tripeptidyl-peptidase_i': 'tripeptidyl_peptidase_i',
'tripeptidyl-peptidase_ii': 'tripeptidyl_peptidase_ii',
'triphenyl_methane': 'triphenylmethane',
'triphenyl_phosphine': 'triphenylphosphine',
'triphenylmethane_dye': 'triphenylmethane',
'triphenyltetrazolium_chloride': 'tetrazolium_chloride',
'triphenyltin_acetate': 'fentin_acetate',
'triphosphopyridine_nucleotide': 'nicotinamide_adenine_dinucleotide_phosphate',
'triphyophyllum_peltatum': 'triphyophyllum',
'tripillian_culture': 'cucuteni-trypillian_culture',
'tripladenia_cunninghamii': 'tripladenia',
'triplane_aircraft': 'triplane',
'triplane_turmoil': 'triplane_turmoil_series',
'triple-a_radio': 'adult_album_alternative',
'triple-alpha_fusion': 'triple-alpha_process',
'triple-axis_spectrometer': 'neutron_triple-axis_spectrometry',
'triple-base_propellant': 'smokeless_powder',
'triple-dotted_note': 'dotted_note',
'triple-headed_monster': 'triple_deities',
'triple-headed_monsters': 'triple_deities',
'triple-pane_windows': 'insulated_glazing',
'triple-spotted_rattlesnake': 'sistrurus_catenatus',
'triple-threat_man': 'triple_threat_man',
'triple-witching_hour': 'triple_witching_hour',
'triple-x_females': 'triple_x_syndrome',
'triple-x_syndrome': 'triple_x_syndrome',
'triple_a': 'aaa',
'triple_a-side': 'a-side_and_b-side',
'triple_advantage': 'freecreditreport.com',
'triple_agent': 'double_agent',
'triple_alpha': 'triple-alpha_process',
'triple_antigen': 'dpt_vaccine',
'triple_axel': 'axel_jump',
'triple_axle': 'axel_jump',
'triple_b': 'bbb',
'triple_b-side': 'a-side_and_b-side',
'triple_base': 'triple_base_plan',
'triple_blind': 'randomized_controlled_trial',
'triple_bond': 'bond_order',
'triple_boot': 'multi_boot',
'triple_booting': 'multi_boot',
'triple_border': 'triple_frontier',
'triple_buffer': 'triple_buffering',
'triple_burner': 'san_jiao',
'triple_bypass': 'coronary_artery_bypass_surgery',
'triple_c': 'ccc',
'triple_cero': 'salvatore_mancuso',
'triple_changers': 'triple_changer',
'triple_channel': 'triple-channel_architecture',
'triple_chin': 'double_chin',
'triple_cleanse': 'colon_cleansing',
'triple_click': 'triple-click',
'triple_cocktail': 'antiretroviral_drug',
'triple_consciousness': 'tresconsciousness',
'triple_croche': 'thirty-second_note',
'triple_d': 'ddd',
'triple_damages': 'treble_damages',
'triple_dea': 'triple_des',
'triple_deckers': 'triple_decker',
'triple_deity': 'triple_deities',
'triple_demism': 'three_principles_of_the_people',
'triple_draw': 'draw_poker',
'triple_drop': 'drop_set',
'triple_drops': 'drop_set',
'triple_dub': 'world_wide_web',
'triple_e': 'eee',
'triple_eagle': 'double_and_triple_eagles',
'triple_edema': 'mirror_syndrome',
'triple_eee': 'shoe_size',
'triple_entendre': 'double_entendre',
'triple_f': 'triple_f_racing',
'triple_fff': 'triple_fff_brewery',
'triple_frontera': 'triple_frontier',
'triple_g': 'ggg',
'triple_gem': 'three_jewels',
'triple_gems': 'three_jewels',
'triple_glazing': 'insulated_glazing',
'triple_god': 'triple_deities',
'triple_goddesses': 'triple_deities',
'triple_gods': 'triple_deities',
'triple_headed': 'triple_deities',
'triple_heater': 'san_jiao',
'triple_hhh': 'triple_h',
'triple_integral': 'multiple_integral',
'triple_integrals': 'multiple_integral',
'triple_jay': 'triple_j',
'triple_jet': 'gretsch_triple_jet',
'triple_jumper': 'triple_jump',
'triple_k': 'triple_k_co-operative',
'triple_ko': 'ko_fight',
'triple_lumen': 'central_venous_catheter',
'triple_m': 'triple_m_network',
'triple_major': 'double_degree',
'triple_meter': 'triple_metre',
'triple_meters': 'triple_metre',
'triple_metres': 'triple_metre',
'triple_nipple': 'supernumerary_nipple',
'triple_oedema': 'mirror_syndrome',
'triple_ought': 'patriots:_a_novel_of_survival_in_the_coming_collapse',
'triple_pane': 'insulated_glazing',
'triple_paned': 'insulated_glazing',
'triple_peak': 'trikuta',
'triple_penetration': 'group_sex',
'triple_platinum': 'music_recording_sales_certification',
'triple_plays': 'triple_play',
'triple_post': 'internet_forum',
'triple_posting': 'internet_forum',
'triple_redundancy': 'triple_modular_redundancy',
'triple_religion': 'three_teachings',
'triple_revolution': 'the_triple_revolution',
'triple_rock': 'triple_rock_social_club',
'triple_s': 'triple-s_management_corporation',
'triple_screen': 'triple_test',
'triple_sem': 'triplesem',
'triple_silence': 'salvation_group',
'triple_sizzler': 'hot_lotto',
'triple_sss': 'triple-s_management_corporation',
'triple_star': 'multiple_star',
'triple_stop': 'double_stop',
'triple_store': 'triplestore',
'triple_stores': 'triplestore',
'triple_superphosphate': 'monocalcium_phosphate',
'triple_team': 'double_team',
'triple_threat': 'triple_threat_man',
'triple_time': 'triple_metre',
'triple_transformation': 'integral_yoga',
'triple_tremolo': 'the_white_stripes',
'triple_valve': 'railway_air_brake',
'triple_warmer': 'san_jiao',
'triple_watershed': 'drainage_divide',
'triple_whopper': 'whopper',
'triple_witching': 'triple_witching_hour',
'triple_x': 'xxx',
'triple_zero': 'star_wars_republic_commando:_triple_zero',
'tripled_deity': 'triple_deities',
'tripled_god': 'triple_deities',
'tripled_goddess': 'triple_deities',
'tripled_goddesses': 'triple_deities',
'tripled_gods': 'triple_deities',
'tripled_pawns': 'doubled_pawns',
'triplefin_blenny': 'threefin_blenny',
'triplemania_17': 'triplemania_xvii',
'triplet_code': 'genetic_code',
'triplet_lilies': 'triteleia',
'triplet_lily': 'triteleia',
'triplet_starz': 'star_system',
'tripleurospermum_inodorum': 'matricaria_perforata',
'tripleurospermum_perforatum': 'matricaria_perforata',
'triplicane_temple': 'parthasarathy_temple',
'triplicate_girl': 'luornu_durgo',
'tripod_trilogy': 'the_tripods',
'tripod_websites': 'tripod.com',
'tripods_trilogy': 'the_tripods',
'tripoli_accord': 'tripoli_agreement',
'tripoli_airport': 'tripoli_international_airport',
'tripoli_castle': 'citadel_of_raymond_de_saint-gilles',
'tripoli_declaration': 'tripoli_agreement',
'tripoli_harbor': 'port_of_tripoli',
'tripoli_harbour': 'port_of_tripoli',
'tripoli_massacre': 'siege_of_tripolitsa',
'tripoli_pact': 'tripoli_agreement',
'tripoli_port': 'port_of_tripoli',
'tripoli_six': 'hiv_trial_in_libya',
'tripoli_temple': 'tripoli_shrine_temple',
'tripoli_treaty': 'treaty_of_tripoli',
'tripoli_university': 'al_fateh_university',
'tripolie_culture': 'cucuteni-trypillian_culture',
'tripolitan_war': 'barbary_wars',
'tripolitanian_war': 'italo-turkish_war',
'tripolitsa_massacre': 'siege_of_tripolitsa',
'tripolskaya_culture': 'trypillia',
'tripolye_culture': 'cucuteni-trypillian_culture',
'tripolyphosphoric_acid': 'triphosphoric_acid',
'triport_headphone': 'bose_headphones',
'tripos_wrangler': 'cambridge_mathematical_tripos',
'tripp_eison': 'tripp_eisen',
'tripp_house': 'tripp_family_homestead',
'tripp_johnston': 'bristol_palin',
'tripp_palin': 'bristol_palin',
'tripping_out': 'psychedelic_experience',
'tripple_gauge': 'dual_gauge',
'tripple_j': 'triple_j',
'tripple_opression': 'triple_oppression',
'tripple_point': 'triple__point',
'tripple_sec': 'triple_sec',
'tripple_w': 'world_wide_web',
'tripredacus_council': 'tripredacus',
'tripredicus_council': 'tripredacus',
'triprolidine_hydrochloride': 'triprolidine',
'trips_agreement': 'agreement_on_trade-related_aspects_of_intellectual_property_rights',
'tripterophycis_gilchristi': 'grenadier_cod',
'triptorelin_pamoate': 'triptorelin',
'triptronic_transmission': 'tiptronic',
'tripura_kingdom': 'twipra_kingdom',
'tripura_nationalism': 'tripuri_nationalism',
'tripura_nationalist': 'tripuri_nationalism',
'tripurantakesvara_temple': 'tripurantaka_temple',
'tripuri_calender': 'tripuri_calendar',
'tripuri_clan': 'tripuri_people',
'tripuri_kingdom': 'twipra_kingdom',
'tripuri_language': 'kokborok_language',
'tripuri_nationalist': 'tripuri_nationalism',
'tripuscid_valve': 'tricuspid_valve',
'triquarterly_magazine': 'triquarterly',
'trique_people': 'trique',
'triquetral_bones': 'triquetral_bone',
'triquetrum_bone': 'triquetral_bone',
'triqui_indians': 'trique',
'triqui_language': 'trique_language',
'triracial_isolate': 'melungeon',
'triracial_isolates': 'melungeon',
'tris-2-carboxyethylphosphine_hydrochloride': 'tcep',
'tris_base': 'tris',
'tris_buffer': 'tris',
'tris_elies': 'treis_elies',
'tris_kungu': 'latvian_mythology',
'tris_legomenon': 'hapax_legomenon',
'trisagion_prayers': 'trisagion',
'trisect_angles': 'angle_trisection',
'trisecting_angles': 'compass_and_straightedge_constructions',
'trish_alexander': 'patricia_kay',
'trish_best': 'feloni',
'trish_hayes': 'trisa_hayes',
'trish_krish': 'trisha_krishnan',
'trish_macgregor': 't._j._macgregor',
'trish_stratigias': 'trish_stratus',
'trish_una': 'vento_aureo',
'trish_vandevere': 'trish_van_devere',
'trisha_alexander': 'patricia_kay',
'trisha_cummings': 'the_real_world:_sydney',
'trisha_david': 'marion_lennox',
'trisha_hayes': 'trisa_hayes',
'trisha_k': 'trisha_krishnan',
'trisha_krishnamurthy': 'trisha_krishnan',
'trisha_penrose': 'tricia_penrose',
'trisha_una': 'vento_aureo',
'trishuli_west': 'tirsuli_west',
'trismegistos_ep': 'trismegistos',
'trisno_ishak': 'trisno',
'trisodium_orthophosphate': 'trisodium_phosphate',
'trisomy_13': 'patau_syndrome',
'trisomy_17p11.2': 'potocki-lupski_syndrome',
'trisomy_18': 'edwards_syndrome',
'trisomy_23': 'trisomy',
'trisomy_8': 'warkany_syndrome_2',
'trisomy_disorders': 'aneuploidy',
'trisomy_x': 'triple_x_syndrome',
'trison_detsen': 'trisong_detsen',
'trisong_detsaen': 'trisong_detsen',
'trisong_detsan': 'trisong_detsen',
'trisong_deutsen': 'trisong_detsen',
'trisong_deutson': 'trisong_detsen',
'trisopterus_minutus': 'poor_cod',
'trissophaes_anguina': 'eudocima_anguina',
'trissur_district': 'thrissur_district',
'trissur_pooram': 'thrissur_pooram',
'trista_meio': 'sailor_pluto',
'trista_meioh': 'sailor_pluto',
'trista_rehn': 'trista_sutter',
'trista_rehn-sutter': 'trista_sutter',
'tristam_speedy': 'tristram_speedy',
'tristan_evans': 'the_upsidedown',
'tristan_gemmell': 'tristan_gemmill',
'tristan_islanders': 'tristan_da_cunha',
'tristan_jemsek': 'haunted_cologne',
'tristan_leabu': 'tristan_lake_leabu',
'tristan_ludlow': 'legends_of_the_fall',
'tristan_martin': 'tristan_norton',
'tristan_motif': 'tristan_chord',
'tristan_palma': 'triston_palmer',
'tristan_ruia': 'tristan_tzara',
'tristan_skua': 'brown_skua',
'tristan_taormina': 'tristan_taormino',
'tristan_tara': 'tristan_tzara',
'tristan_taylor': 'hiroto_honda',
'tristania_conferta': 'lophostemon_confertus',
'tristania_neriifolia': 'tristania',
'tristar_entertainment': 'tristar_pictures',
'tristar_television': 'tristar_pictures',
'tristat_dx': 'tri-stat_dx',
'tristate_area': 'tri-state_area',
'tristate_buffer': 'three-state_logic',
'tristate_tornado': 'tri-state_tornado',
'tristian_crawford': 'tristan_crawford',
'triston_palma': 'triston_palmer',
'tristram_burges': 'tristam_burges',
'tristram_carey': 'tristram_cary',
'tristram_coffin': 'tris_coffin',
'tristram_fawr': 'tristan',
'tristram_fychan': 'tristram_the_younger',
'tristram_knot': 'bowen_knot',
'tristram_speaker': 'tris_speaker',
'trisul_i': 'trisul',
'trisul_ii': 'trisul',
'trisul_iii': 'trisul',
'trisyllabic_shortening': 'trisyllabic_laxing',
'tritch_building': 'joslin_dry_goods_company_building',
'trithorax-group_protein': 'trithorax-group_proteins',
'triticum_aestivatum': 'common_wheat',
'triticum_aestivum': 'common_wheat',
'triticum_bicorne': 'aegilops_bicornis',
'triticum_boeoticum': 'einkorn_wheat',
'triticum_columnare': 'aegilops_columnaris',
'triticum_crassum': 'aegilops_crassa',
'triticum_cylindricum': 'aegilops_cylindrica',
'triticum_dicoccoides': 'emmer',
'triticum_dicoccon': 'emmer',
'triticum_dicoccum': 'emmer',
'triticum_durum': 'durum',
'triticum_juvenale': 'aegilops_juvenalis',
'triticum_kotschyi': 'aegilops_kotschyi',
'triticum_longissimum': 'aegilops_longissima',
'triticum_monococcum': 'einkorn_wheat',
'triticum_persicum': 'aegilops_triuncialis',
'triticum_repens': 'elytrigia_repens',
'triticum_sharonense': 'aegilops_longissima',
'triticum_spelta': 'spelt',
'triticum_syriacum': 'aegilops_crassa',
'triticum_tauschii': 'aegilops_triuncialis',
'triticum_triunciale': 'aegilops_triuncialis',
'triticum_umbellulatum': 'aegilops_umbellulata',
'triticum_vaillantianum': 'elytrigia_repens',
'triticum_ventricosum': 'aegilops_ventricosa',
'tritium_battery': 'atomic_battery',
'tritium_lighting': 'tritium_illumination',
'tritium_oxide': 'tritiated_water',
'tritolyl_phosphate': 'tricresyl_phosphate',
'triton_alpino': 'alpine_newt',
'triton_class': 'british_t_class_submarine',
'triton_dalgerie': 'algerian_ribbed_newt',
'triton_exaratus': 'turritriton_tabulatus_exaratus',
'triton_jaspeado': 'marbled_newt',
'triton_karelinii': 'southern_crested_newt',
'triton_longipes': 'southern_crested_newt',
'triton_palmeado': 'palmate_newt',
'triton_pcs': 'suncom',
'triton_pigmeo': 'southern_marbled_newt',
'triton_pirenaico': 'pyrenean_brook_salamander',
'triton_times': 'ucsd_guardian',
'triton_v8': 'ford_modular_engine',
'tritone_substitute': 'tritone_substitution',
'tritonville_f.c.': 'tritonville_fc',
'tritos_cycle': 'tritos',
'tritsu_detsen': 'ralpacan',
'triturus_alpestris': 'alpine_newt',
'triturus_carnifex': 'italian_crested_newt',
'triturus_cristatus': 'great_crested_newt',
'triturus_dobrogicus': 'danube_crested_newt',
'triturus_helveticus': 'palmate_newt',
'triturus_italicus': 'italian_newt',
'triturus_kallerti': 'notophthalmus_meridionalis',
'triturus_karelinii': 'southern_crested_newt',
'triturus_marmoratus': 'marbled_newt',
'triturus_meridionalis': 'notophthalmus_meridionalis',
'triturus_montandoni': 'carpathian_newt',
'triturus_ophzticus': 'banded_newt',
'triturus_pygmaeus': 'southern_marbled_newt',
'triturus_vittatus': 'banded_newt',
'triturus_vulgaris': 'smooth_newt',
'trityl_alcohol': 'triphenylmethanol',
'trityl_chloride': 'triphenylmethyl_chloride',
'triumph_2500': 'triumph_2000',
'triumph_3ta': 'triumph_twenty_one',
'triumph_arch': 'triumphal_arch',
'triumph_books': 'random_house',
'triumph_hurricane': 'triumph_x-75_hurricane',
'triumph_releasing': 'triumph_films',
'triumph_slant-4': 'triumph_slant-4_engine',
'triumph_tiger': 'triumph_tiger_1050',
'triumph_tigress': 'triumph_tigress/bsa_sunbeam',
'triumph_trident': 'bsa_rocket_3/triumph_trident',
'triumph_x-75': 'triumph_x-75_hurricane',
'triumph_x75': 'triumph_x-75_hurricane',
'triumphal_arches': 'triumphal_arch',
'triumphal_column': 'victory_column',
'triumphal_entries': 'triumphal_entry',
'triumphal_gate': 'triumphal_arch',
'triumphal_honours': 'roman_triumphal_honours',
'triumphal_quadriga': 'horses_of_saint_mark',
'triumviri_monetales': 'triumvir_monetalis',
'triumviri_monetalis': 'moneyer',
'triune_god': 'trinity',
'triune_gods': 'triple_deities',
'triune_kingdom': 'kingdom_of_croatia-slavonia',
'triune_persons': 'triple_deities',
'triuranium_octaoxide': 'triuranium_octoxide',
'trivalent_graph': 'cubic_graph',
'trivalent_logic': 'ternary_logic',
'trivandrum_airport': 'trivandrum_international_airport',
'trivandrum_city': 'thiruvananthapuram',
'trivandrum_cultre': 'culture_of_thiruvananthapuram',
'trivandrum_culture': 'culture_of_thiruvananthapuram',
'trivandrum_district': 'thiruvananthapuram_district',
'trivandrum_economy': 'economy_of_thiruvananthapuram',
'trivandrum_history': 'history_of_thiruvananthapuram',
'trivandrum_media': 'media_in_trivandrum',
'trivellona_excelsa': 'trivia_merces',
'triveneto_region': 'northeast_italy',
'trivest_partners': 'trivest',
'trivia_bowl': 'quiz_bowl',
'trivia_europea': 'trivia_monacha',
'trivia_game': 'trivia',
'trivia_games': 'trivia',
'trivia_robot': 'gamespot',
'trivial_application': 'hello_world_program',
'trivial_automorphism': 'automorphism',
'trivial_character': 'trivial_representation',
'trivial_database': 'dbm',
'trivial_extension': 'field_extension',
'trivial_knot': 'unknot',
'trivial_link': 'unlink',
'trivial_literature': 'pulp_magazine',
'trivial_module': 'trivial_representation',
'trivial_names': 'trivial_name',
'trivial_objection': 'trivial_objections',
'trivial_pursuits': 'trivial_pursuit',
'trivial_subgroup': 'trivial_group',
'trivially_true': 'vacuous_truth',
'triviella_maoriensis': 'trivia_merces',
'triviella_memorata': 'trivia_merces',
'triwing_screws': 'tri-wing',
'triwizard_tournament': 'harry_potter_and_the_goblet_of_fire',
'trixi_delgado': 'masterboy',
'trixie_norton': 'the_honeymooners',
'trixie_schuba': 'beatrix_schuba',
'trixiecat1/girl_king': 'girl_king',
'trixxi_delgado': 'masterboy',
'trizec_properties': 'brookfield_properties',
'trizechahn_corporation': 'the_hahn_company',
'trizinc_diphosphide': 'zinc_phosphide',
'trk_a': 'trka',
'trk_b': 'trkb',
'trk_tochka': 'otr-21_tochka',
'trka_receptor': 'trka',
'trm_howard': 't._r._m._howard',
'trna-queuosine_b-mannosyltransferase': 'trna-queuosine_beta-mannosyltransferase',
'trna_adenylyltransferase': 'trnt1',
'trna_cytidylyltransferase': 'trnt1',
'trnava_university': 'university_of_trnava',
'trnavsky_kraj': 'trnava_region',
'trnovo_municipality': 'trnovo',
'trnovski_bridge': 'trnovo_bridge',
'trnovski_most': 'trnovo_bridge',
'trnow_stajl': 'klemen_klemen',
'tro_tro': 'share_taxi',
'troano_codex': 'maya_codices',
'trobriand_island': 'trobriand_islands',
'trobriand_islanders': 'trobriand_islands',
'trochalopteron_cachinnans': 'rufous-breasted_laughingthrush',
'trochalopteron_chrysopterum': 'chestnut-crowned_laughingthrush',
'trochalopteron_erythrocephalum': 'chestnut-crowned_laughingthrush',
'trochalopteron_imbricatum': 'streaked_laughingthrush',
'trochalopteron_lineatum': 'streaked_laughingthrush',
'trochanter_minor': 'lesser_trochanter',
'trochanteric_bursitis': 'greater_trochanteric_pain_syndrome',
'trochenbrod_ghetto': 'trochenbrod',
'trochetia_candolleana': 'trochetia_boutoniana',
'trochilic_engine': 'swing-piston_engine',
'trochilium_melanocephala': 'sesia_melanocephala',
'trochilium_melanocephalum': 'sesia_melanocephala',
'trochilus_colubris': 'ruby-throated_hummingbird',
'trochilus_macrourus': 'swallow-tailed_hummingbird',
'trochilus_naevius': 'saw-billed_hermit',
'trochilus_polytmus': 'red-billed_streamertail',
'trochilus_scitulus': 'black-billed_streamertail',
'trochisci_alhandalae': 'alhandal',
'trochocercus_albiventris': 'white-bellied_crested-flycatcher',
'trochocercus_albonotata': 'white-tailed_crested-flycatcher',
'trochocercus_albonotatus': 'white-tailed_crested-flycatcher',
'trochocercus_cyanomelas': 'african_crested-flycatcher',
'trochocercus_nigromitratus': 'dusky_crested-flycatcher',
'trochocercus_nitens': 'blue-headed_crested-flycatcher',
'trochodendron_aralioides': 'trochodendron',
'trochoid_joint': 'pivot_joint',
'trochoideomyces_gracilicaulis': 'trochoideomyces',
'trochosa_purcelli': 'gladicosa_gulosa',
'trochostigma_polygama': 'actinidia_polygama',
'trochus_iris': 'cantharidus_opalus_opalus',
'trochus_pellucidus': 'calliostoma_pellucidum_pellucidum',
'trochus_ringens': 'clanculus_peccatus',
'trochus_takapunaensis': 'clanculus_peccatus',
'trock_palatinate': 'trakai_voivodeship',
'trodat_printy': 'trodat',
'troekurovskoe_cemetery': 'troyekurovskoye_cemetery',
'troels_folmann': 'troels_brun_folmann',
'troemner_inc.': 'henry_troemner',
'troender_sheep': 'grey_troender_sheep',
'trofeo_alcudia': 'vuelta_a_mallorca',
'trofeo_calvia': 'vuelta_a_mallorca',
'trofeo_mallorca': 'vuelta_a_mallorca',
'trofeo_manacor': 'vuelta_a_mallorca',
'trofeo_matteoti': 'trofeo_matteotti',
'trofeo_mezzalama': 'mezzalama_trophy',
'trofeo_pollenca': 'vuelta_a_mallorca',
'trofeo_soller': 'vuelta_a_mallorca',
'troff_command': 'tron_command',
'troff_macros': 'troff_macro',
'trogen_ar': 'trogen',
'troglocambarus_maclanei': 'troglocambarus',
'troglodytes_aedon': 'house_wren',
'troglodytes_gorilla': 'western_gorilla',
'troglodytes_monticola': 'santa_marta_wren',
'troglodytes_ochraceus': 'ochraceous_wren',
'troglodytes_rufociliatus': 'rufous-browed_wren',
'troglodytes_rufulus': 'tepui_wren',
'troglodytes_sissonii': 'socorro_wren',
'troglodytes_solstitialis': 'mountain_wren',
'troglodytes_troglodytes': 'winter_wren',
'troglodytes_zetlandicus': 'shetland_wren',
'trogloglanis_pattersoni': 'toothless_blindcat',
'troglomyces_manfredii': 'troglomyces',
'trogoderma_affrum': 'khapra_beetle',
'trogoderma_granarium': 'khapra_beetle',
'trogon_aurantiiventris': 'orange-bellied_trogon',
'trogon_aurantius': 'surucua_trogon',
'trogon_caligatus': 'violaceous_trogon',
'trogon_chionurus': 'white-tailed_trogon',
'trogon_citreolus': 'citreoline_trogon',
'trogon_clathratus': 'lattice-tailed_trogon',
'trogon_collaris': 'collared_trogon',
'trogon_comptus': 'white-eyed_trogon',
'trogon_curucui': 'blue-crowned_trogon',
'trogon_elegans': 'elegant_trogon',
'trogon_massena': 'slaty-tailed_trogon',
'trogon_melanocephalus': 'black-headed_trogon',
'trogon_melanurus': 'black-tailed_trogon',
'trogon_mesurus': 'black-tailed_trogon',
'trogon_mexicanus': 'mountain_trogon',
'trogon_personatus': 'masked_trogon',
'trogon_ramonianus': 'violaceous_trogon',
'trogon_rufus': 'black-throated_trogon',
'trogon_surrucura': 'surucua_trogon',
'trogon_temperatus': 'masked_trogon',
'trogon_violaceus': 'violaceous_trogon',
'trogon_viridis': 'white-tailed_trogon',
'trogonophis_wiegmanni': 'checkerboard_worm_lizard',
'trogonoptera_trojana': 'troides_trojana',
'trogontherium_cuvieri': 'trogontherium',
'trogopterus_pearsonii': 'hairy-footed_flying_squirrel',
'trogopterus_xanthipes': 'complex-toothed_flying_squirrel',
'trogus_pompeius': 'gnaeus_pompeius_trogus',
'troicki_district': 'troitsky_district',
'troicki_raion': 'troitsky_district',
'troickii_district': 'troitsky_district',
'troickii_raion': 'troitsky_district',
'troickiy_district': 'troitsky_district',
'troickiy_raion': 'troitsky_district',
'troicky_district': 'troitsky_district',
'troicky_raion': 'troitsky_district',
'troides_brookiana': 'trogonoptera_brookiana',
'troides_euphorion': 'cairns_birdwing',
'troides_hyolitus': 'troides_hypolitus',
'troides_prattorum': 'buru_opalescent_birdwing',
'troinex_ge': 'troinex',
'trois_accords': 'les_trois_accords',
'trois_couleurs': 'three_colours',
'trois_eveches': 'three_bishoprics',
'trois_gnossiennes': 'gnossienne',
'trois_gros': 'troisgros_family',
'trois_monts': 'trois-monts',
'trois_mousquetaires': 'the_three_musketeers',
'trois_pistoles': 'trois-pistoles',
'trois_soeurs': 'love_and_fear',
'trois_verges': 'troisvierges',
'troistorrents_vs': 'troistorrents',
'troitsa_lavra': 'troitse-sergiyeva_lavra',
'troitse-sergieva_lavra': 'troitse-sergiyeva_lavra',
'troitse-sergiyev_monastery': 'troitse-sergiyeva_lavra',
'troitski_district': 'troitsky_district',
'troitski_raion': 'troitsky_district',
'troitskii_district': 'troitsky_district',
'troitskii_raion': 'troitsky_district',
'troitskiy_district': 'troitsky_district',
'troitskiy_raion': 'troitsky_district',
'troitsko_pechorsk': 'troitsko-pechorsk',
'troitsky_line': 'two_knights_endgame',
'troitsky_monastery': 'troitse-sergiyeva_lavra',
'troitsky_raion': 'troitsky_district',
'troitzky_position': 'two_knights_endgame',
'trojan_army': 'trojan_war',
'trojan_bunny': 'monty_python_and_the_holy_grail',
'trojan_catalogue': 'trojan_battle_order',
'trojan_genealogy': 'trojan_genealogy_of_nennius',
'trojan_group': 'jupiter_trojan',
'trojan_horses': 'trojan_horse',
'trojan_lowzone': 'trojan.zonebac',
'trojan_men': 'the_trojan_men',
'trojan_moons': 'trojan_moon',
'trojan_npp': 'trojan_nuclear_power_plant',
'trojan_people': 'troy',
'trojan_point': 'lagrangian_point',
'trojan_rabbit': 'monty_python_and_the_holy_grail',
'trojan_satellites': 'trojan_moon',
'trojan_skin': 'trojan_skinhead',
'trojan_skinheads': 'trojan_skinhead',
'trojan_times': 'dakota_state_university',
'trojan_virtumonde': 'vundo',
'trojan_volleyball': 'usc_trojans_volleyball',
'trojan_wars': 'trojan_war',
'trojan_women': 'the_trojan_women',
'trojan_zonebac': 'trojan.zonebac',
'trojans_fc': 'trojans_f.c.',
'trojanus_santonensis': 'trojanus_of_saintes',
'troki_castle': 'trakai_island_castle',
'troki_voivodeship': 'trakai_voivodeship',
'troki_voivodship': 'trakai_voivodeship',
'troll_a': 'troll_a_platform',
'troll_bait': 'flamebait',
'troll_dolls': 'troll_doll',
'troll_ii': 'troll_2',
'troll_iii': 'troll_3',
'troll_platform': 'troll_a_platform',
'troll_tech': 'qt_development_frameworks',
'troll_tracker': 'richard_frenkel',
'trolle-ljugby_castle': 'trolle-ljungby_castle',
'trolley_bus': 'trolleybus',
'trolley_buses': 'trolleybus',
'trolley_car': 'tram',
'trolley_cars': 'tram',
'trolley_case': 'suitcase',
'trolley_coach': 'trolleybus',
'trolley_dilemma': 'trolley_problem',
'trolley_escalator': 'vermaport',
'trolley_line': 'tram',
'trolley_paradox': 'trolley_problem',
'trolley_problems': 'trolley_problem',
'trolley_song': 'the_trolley_song',
'trolley_square.': 'trolley_square',
'trolley_wire': 'overhead_lines',
'trolley_wires': 'overhead_lines',
'trollheimen_mountains': 'trollheimen',
'trolling_motors': 'trolling_motor',
'trolliomyces_rosae': 'phragmidium_mucronatum',
'trollius_altaiense': 'trollius_altaicus',
'trollius_altissimus': 'globe-flower',
'trollius_europaeus': 'globe-flower',
'trollius_europeaeus': 'globe-flower',
'trollius_paluster': 'caltha_palustris',
'trollkyrka_poem': 'trollkyrka',
'trolltech_asa': 'qt_development_frameworks',
'trolltech_labs': 'qt_development_frameworks',
'trolly_line': 'trolly_line_number_9_trail',
'troma_films': 'troma_entertainment',
'troma_studios': 'troma_entertainment',
'troma_team': 'troma_entertainment',
'tromadance_festival': 'tromadance',
'trombetas_river': 'trombetas',
'trombiculid_bite': 'trombicula',
'trombiculid_mite': 'trombicula',
'trombocytopenic_purpura': 'thrombocytopenic_purpura',
'trombone_action': 'pump-action',
'trombone_dixie': 'the_pet_sounds_sessions',
'trombone_shorty': 'troy_andrews',
'trombone_shot': 'dolly_zoom',
'tromelin_island/communications': 'scattered_islands_in_the_indian_ocean',
'tromelin_island/economy': 'scattered_islands_in_the_indian_ocean',
'tromelin_island/geography': 'scattered_islands_in_the_indian_ocean',
'tromelin_island/government': 'scattered_islands_in_the_indian_ocean',
'tromelin_island/history': 'scattered_islands_in_the_indian_ocean',
'tromelin_island/military': 'scattered_islands_in_the_indian_ocean',
'tromelin_island/people': 'scattered_islands_in_the_indian_ocean',
'tromelin_island/transportation': 'scattered_islands_in_the_indian_ocean',
'tromeropsis_microtheca': 'tromeropsis',
'tromif_lysenko': 'trofim_lysenko',
'trommels_becque': 'peene_becque',
'trompette_marine': 'tromba_marina',
'trompovsky_attack': 'trompowsky_attack',
'trompowski_attack': 'trompowsky_attack',
'trompwsky_attack': 'trompowsky_attack',
'troms_county': 'troms',
'tromsdalen_u.i.l.': 'tromsdalen_uil',
'tron_2': 'tron_2.0',
'tron_guy': 'jay_maynard',
'tron_os': 'tron_project',
'tron_paul': 'ron_paul',
'tron_theater': 'teatro_san_cassiano',
'tronco_maestro': 'padua',
'trond-einar_elden': 'trond_einar_elden',
'trond_holter': 'wig_wam',
'trond_soltvedt': 'trond_egil_soltvedt',
'trondelag_teater': 'trondheim',
'trondheim_bomring': 'trondheim_toll_scheme',
'trondheim_cathedral': 'nidaros_cathedral',
'trondheim_energiverk': 'trondheim_energi',
'trondheim_fjord': 'trondheimsfjord',
'trondheim_ik': 'trondheim_black_panthers',
'trondheim_naeringsbygg': 'klp_eiendom',
'trondheim_package': 'trondheim_toll_scheme',
'trondheim_prelature': 'roman_catholic_territorial_prelature_of_trondheim',
'trondheim_rose': 'flag_of_trondheim',
'trondheim_s': 'trondheim_central_station',
'trondheim_sentralstasjon': 'trondheim_central_station',
'trondheim_station': 'trondheim_central_station',
'trondheim_tomcats': 'nidaros_domers',
'trondhjems_skiklub': 'trondhjems_sk',
'trong_chai': 'chai_trong-rong',
'trongsa_dzong': 'trongsa',
'trongsa_valley': 'trongsa',
'tronto_river': 'tronto',
'troodon_formosus': 'troodon',
'troodos_rockcress': 'arabis_kennedyae',
'troon_fc': 'troon_f.c.',
'troop_carrier': 'troopship',
'troop_office': 'truppenamt',
'troop_review': 'military_parade',
'troop_sergeant-major': 'troop_sergeant_major',
'troop_ship': 'troopship',
'troop_surge': 'iraq_war_troop_surge_of_2007',
'troop_transport': 'troopship',
'troop_transports': 'troopship',
'troop_tube': 'trooptube',
'troopergate_controversy': 'alaska_public_safety_commissioner_dismissal',
'trooping_fairies': 'classifications_of_fairies',
'trop_rock': 'tropical_rock',
'tropaeolin_o': 'chrysoine_resorcinol',
'tropaeolin_r': 'chrysoine_resorcinol',
'tropaeolum_tuberosum': 'mashua',
'tropaeum_alpium': 'trophy_of_augustus',
'tropane_alkaloids': 'tropane_alkaloid',
'tropcial_rainforests': 'rainforest',
'trophaeum_traiani': 'tropaeum_traiani',
'trophee_andros': 'andros_trophy',
'tropheops_gracilior': 'pseudotropheus_gracilior',
'tropheops_macrophthalmus': 'pseudotropheus_macrophthalmus',
'tropheops_microstoma': 'pseudotropheus_microstoma',
'tropheops_novemfasciatus': 'pseudotropheus_novemfasciatus',
'tropheops_tropheops': 'pseudotropheus_tropheops',
'tropheryma_whippelii': 'tropheryma_whipplei',
'trophianthus_zonatus': 'aspasia_lunata',
'trophic_factor': 'growth_factor',
'trophic_hormone': 'tropic_hormone',
'trophic_level': 'trophic_dynamics',
'trophic_levels': 'trophic_dynamics',
'trophic_network': 'trophic_dynamics',
'trophic_pyramid': 'ecological_pyramid',
'trophic_web': 'trophic_dynamics',
'trophoblastic_disease': 'gestational_trophoblastic_disease',
'trophoblastic_neoplasms': 'trophoblastic_neoplasm',
'trophoblastic_tumor': 'gestational_trophoblastic_disease',
'trophon_bonneti': 'xymene_pumilus',
'trophon_columnaris': 'xymene_aucklandicus',
'trophon_convexus': 'xymene_convexus',
'trophon_crispus': 'xymene_gouldi',
'trophon_erectus': 'xymene_aucklandicus',
'trophon_gouldi': 'xymene_gouldi',
'trophon_huttoni': 'xymene_huttoni',
'trophon_mortenseni': 'xymene_mortenseni_mortenseni',
'trophon_plebejus': 'xymene_plebius',
'trophy_andros': 'andros_trophy',
'trophy_aps': 'trophy_active_protection_system',
'trophy_girls': 'crash_team_racing',
'trophy_properties': 'trophy_property',
'trophy_trucks': 'trophy_truck',
'trophy_wives': 'trophy_wife',
'tropic_bird': 'tropicbird',
'tropic_cherry': 'capulin',
'tropic_hunt': 'herald_hunt',
'tropic_udine': 'pallalcesto_amatori_udine',
'tropic_zone': 'tropics',
'tropical_almond': 'terminalia_catappa',
'tropical_amaranth': 'amaranthus_polygonoides',
'tropical_arithmetic': 'tropical_geometry',
'tropical_armyworm': 'spodoptera_litura',
'tropical_asparagus': 'sauropus_androgynus',
'tropical_banksia': 'banksia_dentata',
'tropical_brushfoot': 'biblidinae',
'tropical_caecilian': 'scolecomorphidae',
'tropical_caecilins': 'scolecomorphidae',
'tropical_cafe': 'tropical_smoothie_cafe',
'tropical_climates': 'tropical_climate',
'tropical_cyclones': 'tropical_cyclone',
'tropical_deforestation': 'deforestation',
'tropical_depressions': 'tropical_cyclone',
'tropical_diseases': 'tropical_disease',
'tropical_disturbance': 'tropical_cyclone',
'tropical_drinks': 'cocktail',
'tropical_easterlies': 'trade_wind',
'tropical_fever': 'tropical_disease',
'tropical_fish': 'aquarium',
'tropical_forest': 'tropical_and_subtropical_moist_broadleaf_forests',
'tropical_forests': 'tropical_and_subtropical_moist_broadleaf_forests',
'tropical_frog': 'leptodactylidae',
'tropical_grill': 'pollo_tropical',
'tropical_horticulture': 'tropical_agriculture',
'tropical_izak': 'crying_izak',
'tropical_keratopathy': 'florida_keratopathy',
'tropical_leafwing': 'anaea_aidea',
'tropical_mathematics': 'tropical_geometry',
'tropical_meteorology': 'tropical_cyclone',
'tropical_moccasin': 'agkistrodon_bilineatus',
'tropical_pathology': 'tropical_medicine',
'tropical_peatland': 'tropical_peat',
'tropical_period': 'orbital_period',
'tropical_phagedena': 'tropical_ulcer',
'tropical_plant': 'exotic_plants',
'tropical_plants': 'exotic_plants',
'tropical_pyomyositis': 'pyomyositis',
'tropical_queen': 'danaus_eresimus',
'tropical_rainforests': 'tropical_rainforest',
'tropical_rattlesnake': 'crotalus_durissus',
'tropical_region': 'tropics',
'tropical_sage': 'salvia_splendens',
'tropical_saltbush': 'grassleaf_orache',
'tropical_semiring': 'tropical_geometry',
'tropical_shipping': 'nicor',
'tropical_smoothie': 'tropical_smoothie_cafe',
'tropical_smoothies': 'tropical_smoothie_cafe',
'tropical_spiderwort': 'commelina_benghalensis',
'tropical_storm': 'tropical_cyclone',
'tropical_storms': 'tropical_cyclone',
'tropical_sundew': 'drosera_burmannii',
'tropical_swelling': 'loa_loa_filariasis',
'tropical_system': 'tropical_cyclone',
'tropical_systems': 'tropical_cyclone',
'tropical_variety': 'tropical_geometry',
'tropical_waves': 'tropical_wave',
'tropical_weasel': 'amazon_weasel',
'tropical_whiteweed': 'ageratum_conyzoides',
'tropical_worsted': 'worsted',
'tropical_zodiac': 'zodiac',
'tropical_zone': 'tropics',
'tropicali_pesaro': 'victoria_libertas_pesaro',
'tropicana_400': 'lifelock.com_400',
'tropicana_boulevard': 'tropicana_avenue',
'tropicana_dome': 'tropicana_field',
'tropicana_express': 'tropicana_express_hotel_and_casino',
'tropicana_twister': 'tropicana_products',
'tropico_2': 'tropico_2:_pirate_cove',
'tropico_two': 'tropico_2:_pirate_cove',
'tropicranus_albocristatus': 'white-crested_hornbill',
'tropidocarpum_capparideum': 'tropidocarpum',
'tropidoclonion_lineatum': 'tropidoclonion',
'tropidolaemus_hombroni': 'tropidolaemus_wagleri',
'tropidolaemus_hombronii': 'tropidolaemus_wagleri',
'tropidolaemus_philippensis': 'tropidolaemus_wagleri',
'tropidolaemus_philippinensis': 'tropidolaemus_wagleri',
'tropidolaemus_schlegeli': 'tropidolaemus_wagleri',
'tropidolaemus_schlegelii': 'tropidolaemus_wagleri',
'tropidolaemus_subannulatus': 'tropidolaemus_wagleri',
'tropidolepis_grammicus': 'sceloporus_grammicus',
'tropidolepis_pleurostictus': 'sceloporus_grammicus',
'tropidolepis_variabilis': 'sceloporus_variabilis',
'tropidolepisma_major': 'land_mullet',
'tropidonophis_mairii': 'keelback',
'tropidonotus_clarkii': 'nerodia_clarkii',
'tropidonotus_fasciatus': 'nerodia_fasciata',
'tropidonotus_firthi': 'amphiesma_platyceps',
'tropidonotus_natrix': 'grass_snake',
'tropidonotus_parallelus': 'amphiesma_parallelum',
'tropidonotus_platyceps': 'amphiesma_platyceps',
'tropidonotus_stolatus': 'amphiesma_stolatum',
'tropidonotus_transversus': 'nerodia_erythrogaster',
'tropidophis_greenvayi': 'tropidophis_greenwayi',
'tropidurus_grammicus': 'sceloporus_grammicus',
'tropidurus_tarapacensis': 'tarapaca_pacific_iguana',
'tropilis_gracilicaulis': 'dendrobium_gracilicaule',
'tropilis_ruppiana': 'dendrobium_jonesii',
'tropilis_subterrestris': 'eria_bractescens',
'tropilis_tetragona': 'dendrobium_tetragonum',
'tropine_dehydrogenase': 'tropinone_reductase_i',
'tropinone_reductase': 'tropinone_reductase_ii',
'tropo_duct': 'tv_and_fm_dx',
'tropospheric_ducting': 'tropospheric_propagation',
'troppau_protocol': 'congress_of_troppau',
'trosa_kommun': 'trosa_municipality',
'trospium_chloride': 'trospium',
'troszyn_commune': 'gmina_troszyn',
'trot_line': 'trotline',
'trote_lake': 'lago_delle_trote',
'trotskyist_group': 'david_korner',
'trotskyist_organisation': 'internationalist_communist_organisation',
'trotting_races': 'harness_racing',
'trotting_series': 'inter_dominion',
'trou_madame': 'bagatelle',
'troubador_style': 'troubadour_style',
'troubadour_poetry': 'troubadour',
'trouble_doll': 'worry_doll',
'trouble_hunter': 'the_trouble_hunter',
'trouble_shoot': 'troubleshooting',
'trouble_shooter': 'troubleshooting',
'trouble_shooting': 'troubleshooting',
'trouble_ticketing': 'issue_tracking_system',
'troubled_man': 'x-raided',
'troubled_mindz': 'cause_4_concern',
'troubled_windows': 'os-tan',
'troubleman_records': 'troubleman_unlimited_records',
'troubleman_unlimited': 'troubleman_unlimited_records',
'troubleshooting_wikipedia': 'wikipedia:troubleshooting',
'troublesome_diamond': 'diamond_problem',
'trouchet_noir': 'cabernet_franc',
'trough_concentrator': 'parabolic_trough',
'trough_levels': 'trough_level',
'trough_lolly': 'urinal_deodorizer_block',
'trough_shell': 'mactridae',
'trough_shells': 'mactridae',
'troup-howell_bridge': 'frederick_douglass-susan_b._anthony_memorial_bridge',
'troup_isd': 'troup_independent_school_district',
'trouser_pocket': 'pocket',
'trouser_pockets': 'pocket',
'trouser_role': 'breeches_role',
'trouser_skirt': 'culottes',
'trouser_snake': 'penis',
'trousseau_phenomenon': 'trousseau_sign_of_malignancy',
'trousseau_syndrome': 'trousseau_sign_of_malignancy',
'trout-stream_beetle': 'amphizoa',
'trout_almondine': 'amandine',
'trout_amandine': 'amandine',
'trout_inn': 'the_trout_inn',
'trout_lilies': 'erythronium',
'trout_lily': 'erythronium_americanum',
'trout_mask': 'trout_mask_replica',
'trout_pout': 'leslie_ash',
'trout_slap': 'the_fish-slapping_dance',
'trout_worms': 'plastic_worm',
'troutdale_airport': 'portland-troutdale_airport',
'troutmask_replica': 'trout_mask_replica',
'trovafloxacin_mesylate': 'trovafloxacin',
'trovan_iv': 'alatrofloxacin',
'trow_ghyll': 'trow_ghyll_skeleton',
'trowbridge_college': 'wiltshire_college',
'trowbridge_house': 'charles_trowbridge_house',
'trowley_bottom': 'flamstead',
'trox_alternans': 'omorgus_alternans',
'trox_amictus': 'omorgus_amictus',
'trox_carinatus': 'omorgus_carinatus',
'trox_costatus': 'omorgus_costatus',
'trox_howelli': 'omorgus_howelli',
'trox_montalbanensis': 'omorgus_costatus',
'trox_regularis': 'omorgus_costatus',
'trox_velutinus': 'omorgus_costatus',
'troxell_racing': 'mighty_motorsports',
'troy-menands_bridge': 'menands_bridge',
'troy_augusto': 'universal_music_group_v._augusto',
'troy_aumua': 'troy_polamalu',
'troy_cassar': 'troy_cassar-daley',
'troy_coleman': 'cowboy_troy',
'troy_conway': 'michael_avallone',
'troy_crosby': 'sidney_crosby',
'troy_daley': 'troy_cassar-daley',
'troy_donohue': 'troy_donahue',
'troy_game': 'the_troy_game',
'troy_gentry': 'montgomery_gentry',
'troy_gregg': 'troy_leon_gregg',
'troy_ilium': 'troy',
'troy_isd': 'troy_independent_school_district',
'troy_kennedy-martin': 'troy_kennedy_martin',
'troy_macclure': 'troy_mcclure',
'troy_martin': 'shane_douglas',
'troy_mcgill': 'troy_a._mcgill',
'troy_mclure': 'troy_mcclure',
'troy_middleton': 'troy_h._middleton',
'troy_ounce': 'troy_weight',
'troy_ounces': 'troy_weight',
'troy_oz': 'troy_weight',
'troy_phi': 'troy_philippines',
'troy_polomalu': 'troy_polamalu',
'troy_pound': 'troy_weight',
'troy_redmann': 'la_pama_cyclones',
'troy_ross': 'troy_amos-ross',
'troy_scale': 'troy_weight',
'troy_state': 'troy_university',
'troy_system': 'troy_weight',
'troy_tompkins': 'troy_cle',
'troy_tulowitski': 'troy_tulowitzki',
'troy_tuminelli': 'troy_kurtis',
'troy_units': 'troy_weight',
'troy_viia': 'troy_vii',
'troy_viib': 'troy_vii',
'troy_viib1': 'troy_vii',
'troy_viib2': 'troy_vii',
'troy_viib3': 'troy_vii',
'troy_w.': 'aiden',
'troy_wambold': 'aiden',
'troy_war': 'trojan_war',
'troy_weights': 'troy_weight',
'troy_zantuck': 'sen_1116',
'troyal_brooks': 'garth_brooks',
'troyan_pass': 'beklemeto_pass',
'troye_mellet': 'troye_sivan',
'troyer_syndrome': 'hereditary_spastic_paraplegia',
'troyes_af': 'troyes_ac',
'troyes_airport': 'troyes_-_barberey_airport',
'trp_channel': 'transient_receptor_potential_channel',
'trpimir_i': 'trpimir_i_of_croatia',
'trpimir_ii': 'trpimir_ii_of_croatia',
'trpinja_municipality': 'trpinja',
'trpoical_rainforest': 'tropical_rainforest',
'trrs_connector': 'trs_connector',
'trs-80_coco': 'trs-80_color_computer',
'trs_80': 'trs-80',
'trs_cable': 'tough_rubber-sheathed_cable',
'trs_jack': 'trs_connector',
'trstenik_municipality': 'trstenik',
'trt_int': 'trt_international',
'trt_int-turk': 'trt_international',
'trt_turkey': 'turkish_radio_and_television_corporation',
'tru3_magic': 'true_magic',
'tru_blood': 'true_blood',
'tru_life': 'tru-life',
'tru_rats': 'speakeasy_records',
'tru_records': 'no_limit_records',
'tru_tv': 'trutv',
'tru_walker': 'tru_confessions',
'tru_warrior': 'ron_artest',
'tru_wolfpack': 'thompson_rivers_wolfpack',
'truax_field': 'dane_county_regional_airport',
'trub_be': 'trub',
'trubetsk_duchy': 'principality_of_trubetsk',
'trubschachen_be': 'trubschachen',
'truce-smiles_rearrangement': 'smiles_rearrangement',
'truce_terms': 'truce_term',
'truchas_peaks': 'truchas_peak',
'trucial_coast': 'trucial_states',
'trucial_oman': 'trucial_states',
'trucial_sheikhdoms': 'trucial_states',
'trucial_skeikdoms': 'trucial_states',
'truck-driving_country': 'country_music',
'truck-only_lanes': 'lane',
'truck_1-9': 'u.s._route_1/9_truck',
'truck_accessories': 'truck_accessory',
'truck_accident': 'traffic_collision',
'truck_accidents': 'traffic_collision',
'truck_act': 'truck_acts',
'truck_apron': 'roundabout',
'truck_art': 'customised_buses_and_trucks_in_pakistan',
'truck_balls': 'truck_nuts',
'truck_bedliners': 'truck_bedliner',
'truck_body': 'coachwork',
'truck_bomb': 'car_bomb',
'truck_bombing': 'car_bomb',
'truck_campers': 'truck_camper',
'truck_canopy': 'tent',
'truck_classifications': 'truck_classification',
'truck_crossing': 'british_columbia_highway_15',
'truck_drivers': 'truck_driver',
'truck_driving': 'truck_driver',
'truck_farm': 'truck_farming',
'truck_fest': 'truck_festival',
'truck_lane': 'lane',
'truck_lanes': 'lane',
'truck_logger': 'canada_wide_media_limited',
'truck_maker': 'automotive_industry',
'truck_manufacturer': 'truck',
'truck_manufacturing': 'truck',
'truck_scales': 'truck_scale',
'truck_series': 'camping_world_truck_series',
'truck_shop': 'truck_system',
'truck_stops': 'truck_stop',
'truck_surfing': 'car_surfing',
'truck_wages': 'payment_in_kind',
'truckee-tahoe_airport': 'truckee_tahoe_airport',
'truckee_airport': 'truckee_tahoe_airport',
'trucker_cap': 'trucker_hat',
'trucker_hats': 'trucker_hat',
'trucker_max': 'tucker_max',
'trucker_norm': 'mike_shaw',
'truckers_atlas': 'the_lonesome_crowded_west',
'trucking_industry': 'transport',
'trucking_music': 'country_music',
'truckle_bed': 'trundle_bed',
'truckle_beds': 'trundle_bed',
'truckload_carrier': 'truckload_shipping',
'trucks-for-blood_proposal': 'joel_brand',
'trud_engels': 'fc_iskra_engels',
'trud_kaspiysk': 'fc_dagdizel_kaspiysk',
'trud_kurgan': 'fc_tobol_kurgan',
'trud_naro-fominsk': 'fc_nara-shbfr_naro-fominsk',
'trud_penza': 'fc_zenit_penza',
'trud_ryazan': 'fc_spartak_ryazan',
'trud_stalinogorsk': 'fc_don_novomoskovsk',
'trud_togliatti': 'fc_lada_togliatti',
'trud_tula': 'fc_arsenal-tula_tula',
'trud_vladimir': 'fc_torpedo_vladimir',
'trud_volzhsky': 'fc_energiya_volzhsky',
'trud_voronezh': 'fc_fakel-voronezh_voronezh',
'trud_yoshkar-ola': 'fc_spartak_yoshkar-ola',
'trude_beiser-jochum': 'trude_beiser',
'trude_jochum-beiser': 'trude_beiser',
'trude_leibhart': 'gertrude_liebhart',
'trude_liebhardt': 'gertrude_liebhart',
'trude_trefall': 'manjari',
'trudeau_foundation': 'pierre_elliott_trudeau_foundation',
'trudeau_sanitorium': 'adirondack_cottage_sanitarium',
'trudelia_cristata': 'vanda_cristata',
'trudi_beiser': 'trude_beiser',
'trudi_denucci': 'ohio_valley_wrestling',
'trudi_lecaine': 'trudi_le_caine',
'trudi_maxwell': 'james_moffat',
'trudy_clark': 'trudy_h._clark',
'trudy_lecaine': 'trudi_le_caine',
'trudy_proud': 'the_proud_family',
'trudy_smith': 'eastern_air_lines_flight_401',
'true-crime_author': 'crime_writer',
'true/false_festival': 'true/false_film_festival',
'true_20': 'true20',
'true_achievement': 'trueachievements',
'true_achievements': 'trueachievements',
'true_adamantium': 'adamantium',
'true_adder': 'viperinae',
'true_adventurer': 'lego_indiana_jones:_the_original_adventures',
'true_aloe': 'aloe_vera',
'true_alphabet': 'phonemic_orthography',
'true_asia': 'asia_no_junshin',
'true_babystars': 'leptosiphon_bicolor',
'true_base': 'altern_base',
'true_bat': 'microbat',
'true_bats': 'microbat',
'true_believers': 'true_believer',
'true_bill': 'indictment',
'true_bliss': 'truebliss',
'true_bone': 'silla',
'true_breed': 'true_breeding_organism',
'true_breeding': 'true_breeding_organism',
'true_brinelling': 'brinelling',
'true_bug': 'hemiptera',
'true_bugs': 'hemiptera',
'true_cinema': 'true_cinema_movement',
'true_color': 'true_colors',
'true_colour': 'true_colors',
'true_colours': 'true_colors',
'true_constancy': 'la_vera_costanza',
'true_crabs': 'crab',
'true_crypt': 'truecrypt',
'true_deer': 'deer',
'true_demcracy': 'direct_democracy',
'true_democracy': 'direct_democracy',
'true_dungeon': 'true_adventures',
'true_eel': 'eel',
'true_enough': 'stephen_mccauley',
'true_ent': 'true_entertainment',
'true_father': 'sun_myung_moon',
'true_finch': 'finch',
'true_finn': 'true_finns',
'true_flies': 'fly',
'true_flight': 'tiger_aircraft',
'true_fly': 'fly',
'true_forget-me-not': 'myosotis_scorpioides',
'true_freshmen': 'true_freshman',
'true_friends': 'friendship',
'true_frogs': 'true_frog',
'true_ginger': 'zingiber',
'true_gingers': 'zingiber',
'true_grass': 'poaceae',
'true_grasses': 'poaceae',
'true_hdtv': 'high-definition_television',
'true_head': 'itasca',
'true_hermaphrodite': 'true_hermaphroditism',
'true_image': 'acronis_true_image',
'true_internet': 'true_corporation',
'true_ira': 'real_irish_republican_army',
'true_jedi': 'lego_star_wars',
'true_katydid': 'pseudophyllinae',
'true_langur': 'northern_plains_gray_langur',
'true_lavender': 'lavandula_angustifolia',
'true_leaves': 'leaf',
'true_levellers': 'diggers',
'true_liberalism': 'classical_liberalism',
'true_limpet': 'limpet',
'true_limpets': 'limpet',
'true_loach': 'cobitidae',
'true_majority': 'truemajority',
'true_metal': 'heavy_metal_music',
'true_mole': 'mole',
'true_moles': 'mole',
'true_morel': 'morchella',
'true_mosses': 'moss',
'true_mother': 'hak_ja_han',
'true_move': 'true_corporation',
'true_mulga': 'acacia_aneura',
'true_music': 'early_work',
'true_myrtle': 'myrtus',
'true_negative': 'type_i_and_type_ii_errors',
'true_noon': 'noon',
'true_obsession': 'true_obsessions',
'true_oldies': 'the_true_oldies_channel',
'true_oriole': 'oriolus',
'true_oxlip': 'primula_elatior',
'true_oxslip': 'primula_elatior',
'true_oyster': 'ostreidae',
'true_oysters': 'ostreidae',
'true_parents': 'true_family',
'true_parrot': 'true_parrots',
'true_pelvis': 'lesser_pelvis',
'true_position': 'position_tolerance',
'true_positive': 'type_i_and_type_ii_errors',
'true_predator': 'predation',
'true_quillwort': 'isoetes_valida',
'true_ray': 'rajiformes',
'true_redstart': 'phoenicurus',
'true_rhyme': 'perfect_rhyme',
'true_rhymes': 'perfect_rhyme',
'true_rhyming': 'perfect_rhyme',
'true_rib': 'true_ribs',
'true_rj45': 'registered_jack',
'true_rms': 'true_rms_converter',
'true_salamander': 'salamandridae',
'true_school': 'j-live',
'true_script': 'regular_script',
'true_seal': 'earless_seal',
'true_seals': 'earless_seal',
'true_sergeant': 'athyma',
'true_service-tree': 'sorbus_domestica',
'true_silver': 'mithril',
'true_skool': 'j-live',
'true_snail': 'orthogastropoda',
'true_snails': 'orthogastropoda',
'true_source': 'one_power',
'true_sparrow': 'sparrow',
'true_stinkwood': 'ocotea_bullata',
'true_sturgeon': 'sturgeon',
'true_sturgeons': 'sturgeon',
'true_temper': 'true_temper_sports',
'true_thomas': 'thomas_the_rhymer',
'true_toad': 'toad',
'true_toads': 'true_toad',
'true_tunes': 'true_tunes_news',
'true_type': 'truetype',
'true_ungulate': 'ungulate',
'true_ute': 'paiute',
'true_vacuum': 'false_vacuum',
'true_variance': 'variance',
'true_vector': 'pseudovector',
'true_vegetarian': 'veganism',
'true_viper': 'viperinae',
'true_vipers': 'viperinae',
'true_visions': 'truevisions',
'true_warbler': 'old_world_warbler',
'true_weevil': 'curculionidae',
'true_whig': 'true_whig_party',
'true_wren': 'wren',
'true_wrens': 'wren',
'true_you': 'kelly_clarkson',
'truecombat:_elite': 'truecombat:elite',
'truelove_eyre': 'eyre_legend',
'truesports_racing': 'truesports',
'truett_cathy': 's._truett_cathy',
'truett_mckeehan': 'trudog',
'truett_seminary': 'george_w._truett_theological_seminary',
'truett_sewell': 'rip_sewell',
'truetype_font': 'truetype',
'truetype_fonts': 'truetype',
'truevision_targa': 'truevision_tga',
'truevisions_ubc': 'truevisions',
'truffle_hound': 'truffle_hog',
'truffle_pig': 'truffle_hog',
'truffle_shuffle': 'the_goonies',
'truffula_tree': 'the_lorax',
'truffula_trees': 'the_lorax',
'truful-truful_river': 'trufultruful_river',
'truganina_park': 'truganina_coastal_parklands',
'truganina_swamp': 'truganina_coastal_parklands',
'trugon_terrestris': 'thick-billed_ground-pigeon',
'trugreen_chemlawn': 'servicemaster',
'truilhas_lock': 'la_nouvelle_branch',
'truing_stand': 'wheel_truing_stand',
'trujillo_molina': 'rafael_trujillo',
'truk_atol': 'chuuk',
'truk_atoll': 'chuuk',
'truk_islands': 'chuuk',
'truk_monarch': 'chuuk_monarch',
'truk_white-eye': 'faichuk_white-eye',
'trukese_language': 'chuukese_language',
'trukhanov_ostrov': 'trukhaniv_island',
'truku_language': 'seediq_language',
'trulan_eyre': 'trulan_a._eyre',
'trullan_canons': 'quinisext_council',
'trullan_council': 'quinisext_council',
'trullan_synod': 'third_council_of_constantinople',
'trullula_sacchari': 'phaeocytostroma_sacchari',
'truly_scrumtious': 'truly_scrumptious',
'truly_unix': 'unix',
'trumai_language': 'trumai',
'truman_administration': 'harry_s._truman',
'truman_aldrich': 'truman_h._aldrich',
'truman_angel': 'truman_o._angell',
'truman_angell': 'truman_o._angell',
'truman_baxter': 'truman_smith_baxter',
'truman_building': 'harry_s_truman_building',
'truman_burbank': 'the_truman_show',
'truman_college': 'harry_s_truman_college',
'truman_hoag': 'truman_h._hoag',
'truman_hobbs': 'truman_mcgill_hobbs',
'truman_lake': 'truman_reservoir',
'truman_landon': 'truman_h._landon',
'truman_library': 'harry_s._truman_presidential_library_and_museum',
'truman_madsen': 'truman_g._madsen',
'truman_merriman': 'truman_a._merriman',
'truman_newberry': 'truman_handy_newberry',
'truman_olson': 'truman_o._olson',
'truman_project': 'truman_national_security_project',
'truman_scholar': 'harry_s._truman_scholarship',
'truman_scholars': 'harry_s._truman_scholarship',
'truman_scholarship': 'harry_s._truman_scholarship',
'truman_scholarships': 'harry_s._truman_scholarship',
'truman_show': 'the_truman_show',
'truman_state': 'truman_state_university',
'truman_yuncker': 'truman_g._yuncker',
'trumat_hadeshen': 'israel_isserlin',
'trumbull_high': 'trumbull_high_school',
'trump_air': 'trump_shuttle',
'trump_airline': 'trump_shuttle',
'trump_airlines': 'trump_shuttle',
'trump_atlanta': 'trump_towers_atlanta',
'trump_casino': 'majestic_star_ii',
'trump_castle': 'trump_marina',
'trump_city': 'trump_place',
'trump_entertainment': 'trump_entertainment_resorts',
'trump_family': 'donald_trump',
'trump_game': 'trump:_the_game',
'trump_organisation': 'trump_organization',
'trump_princess': 'kingdom_5kr',
'trump_rate': 'donald_trump',
'trump_raw': 'wwe_raw',
'trump_timepieces': 'donald_trump',
'trump_towers': 'trump_tower',
'trump_u': 'trump_university',
'trump_vodka': 'donald_trump',
'trumper_oval': 'trumper_park_oval',
'trumper_park': 'trumper_park_oval',
'trumpet-creeper_family': 'bignoniaceae',
'trumpet_bush': 'lapacho',
'trumpet_creeper': 'trumpet_vine',
'trumpet_fish': 'trumpetfish',
'trumpet_flower': 'trumpetflower',
'trumpet_gentian': 'gentiana_clusii',
'trumpet_marine': 'tromba_marina',
'trumpet_nails': 'pincer_nails',
'trumpet_pitcher': 'sarracenia',
'trumpet_player': 'trumpet',
'trumpet_stylewort': 'levenhookia_leptantha',
'trumpet_swan': 'trumpet_swan_productions',
'trumpet_tree': 'tabebuia',
'trumpet_weed': 'eutrochium_purpureum',
'trumpet_winsock': 'winsock',
'trumpet_worm': 'pectinariidae',
'trumpeter_swans': 'trumpeter_swan',
'truncal_obesity': 'obesity',
'truncate_imperial': 'cheritrella_truncipennis',
'truncated_8-cell': 'truncated_tesseract',
'truncated_bipyramid': 'bifrustum',
'truncated_cone': 'frustum',
'truncated_cubooctahedron': 'truncated_cuboctahedron',
'truncated_dome': 'tactile_paving',
'truncated_domes': 'tactile_paving',
'truncated_hexadeltille': 'great_rhombitrihexagonal_tiling',
'truncated_hexahedron': 'truncated_cube',
'truncated_hextille': 'truncated_hexagonal_tiling',
'truncated_pentachoron': 'truncated_5-cell',
'truncated_pyramid': 'frustum',
'truncated_quadrille': 'truncated_square_tiling',
'truncated_spurs': 'truncated_spur',
'truncatella_ramulosa': 'truncatella_laurocerasi',
'truncullariopsis_trunculus': 'hexaplex_trunculus',
'trunculus_murex': 'hexaplex_trunculus',
'truncus_brachiocephalicus': 'brachiocephalic_artery',
'truncus_coeliacus': 'celiac_artery',
'truncus_costocervicalis': 'costocervical_trunk',
'truncus_lumbosacralis': 'lumbosacral_trunk',
'truncus_pulmonalis': 'pulmonary_artery',
'trundle_beds': 'trundle_bed',
'trundle_bundle': 'blanket_sleeper',
'trung_bo': 'annam',
'trung_thu': 'mid-autumn_festival',
'trunk_code': 'telephone_numbering_plan',
'trunk_highway': 'trunk_road',
'trunk_line': 'trunking',
'trunk_lines': 'trunking',
'trunk_roads': 'trunk_road',
'trunk_route': 'trunk_road',
'trunkback_turtle': 'leatherback_turtle',
'truong_sa': 'spratly_islands',
'truong_son': 'annamite_range',
'truro_city': 'truro_city_f.c.',
'truro_river': 'river_truro',
'truro_station': 'truro_railway_station',
'truro_synagogue': 'touro_synagogue',
'trus_madi': 'mount_trus_madi',
'truscott_airfield': 'mungalalu_truscott_airbase',
'trusina_massacre': 'trusina_case',
'truss_archbridge': 'truss_arch_bridge',
'truss_archbridges': 'truss_arch_bridge',
'truss_bridges': 'truss_bridge',
'trust-building_game': 'trust_fall',
'trust-preferred_securities': 'trust-preferred_security',
'trust/juke-joint_jezebel': 'trust/juke_joint_jezebel',
'trust_arcticugol': 'arktikugol',
'trust_arctikugol': 'arktikugol',
'trust_buster': 'trust_busting',
'trust_communications': 'cjlf-fm',
'trust_companies': 'trust_company',
'trust_e': 'truste',
'trust_fund': 'trust_law',
'trust_game': 'dictator_game',
'trust_indenture': 'trust_indenture_act_of_1939',
'trust_law/australia': 'australian_trust_law',
'trust_mart': 'trust-mart',
'trust_metrics': 'trust_metric',
'trust_preffereds': 'trust-preferred_security',
'trust_rank': 'trustrank',
'trust_school': 'education_and_inspections_act_2006',
'trust_system': 'honor_system',
'trust_territories': 'united_nations_trust_territories',
'trust_territory': 'united_nations_trust_territories',
'trusted_execution': 'trusted_execution_technology',
'trusted_systems': 'trusted_system',
'trusted_timestamp': 'trusted_timestamping',
'trustees_academy': 'edinburgh_college_of_art',
'trusteeship_council': 'united_nations_trusteeship_council',
'trusthouse_forte': 'forte_group',
'trustmark_corporation': 'trustmark',
'truston_pond': 'trustom_pond',
'trustpower_limited': 'trustpower',
'trusts_law': 'trust_law',
'trusty_bell': 'eternal_sonata',
'trusurround_xt': 'sound_retrieval_system',
'truth-functional_conditional': 'material_conditional',
'truth-functional_connective': 'logical_connective',
'truth-functional_logic': 'truth_function',
'truth-in-lending_act': 'truth_in_lending_act',
'truth-in-savings_disclosure': 'truth_in_savings_act',
'truth-value_links': 'truth-value_link',
'truth_advertising': 'thetruth.com',
'truth_b2031a': 'behringer_truth_series',
'truth_comission': 'truth_and_reconciliation_commission',
'truth_commission': 'truth_and_reconciliation_commission',
'truth_commissions': 'truth_and_reconciliation_commission',
'truth_conditions': 'truth_condition',
'truth_definition': 'semantic_theory_of_truth',
'truth_drugs': 'truth_drug',
'truth_drums': 'truth_custom_drums',
'truth_mission': 'adelaide_institute',
'truth_quark': 'top_quark',
'truth_serum': 'truth_drug',
'truth_syrum': 'truth_drug',
'truth_tables': 'truth_table',
'truth_testimony': 'testimony_of_integrity',
'truth_theories': 'truth',
'truth_theory': 'truth',
'truth_tree': 'method_of_analytic_tableaux',
'truth_values': 'truth_value',
'truth_wall': 'truth_window',
'truth_walls': 'truth_window',
'truth_windows': 'truth_window',
'truth_wizard': 'wizards_project',
'truttemer-le_grand': 'truttemer-le-grand',
'truttikon_zh': 'truttikon',
'truxton_hare': 'truxtun_hare',
'truxton_king': 'graustark',
'truyen_kieu': 'the_tale_of_kieu',
'trw_credit': 'experian',
'trw_inc.': 'trw',
'trw_incorporated': 'trw',
'trwyn_bychestyn': 'aberdaron',
'trwyn_talfarach': 'aberdaron',
'try-catch_block': 'exception_handling',
'try_2004': 'shimizu_mega-city_pyramid',
'try_c': 'tryon_coterie',
'try_honestly': 'try_honesty',
'try_pots': 'try_pot',
'try_title': 'quiet_title',
'tryall_rocks': 'tryal_rocks',
'tryblidiella_rufula': 'rhytidhysteron_rufulum',
'trybunal_konstytucyjny': 'constitutional_tribunal_of_the_republic_of_poland',
'trybunal_stanu': 'state_tribunal_of_the_republic_of_poland',
'tryder_g7': 'trider_g7',
'tryggve_olavsson': 'tryggve_olafsson',
'tryggvi_gislason': 'akureyri_junior_college',
'tryggvi_olafsson': 'tryggve_olafsson',
'trygonoptera_mucosa': 'bebil',
'trygonoptera_ovalis': 'striped_stingaree',
'trygonoptera_personata': 'masked_stingaree',
'trygonoptera_testacea': 'common_stingaree',
'trygonorhina_fasciata': 'fiddler_ray',
'trygve_fossum': 'tryggve_fossum',
'trygve_gulbrannsen': 'trygve_gulbranssen',
'trygvie_lie': 'trygve_lie',
'trying_hard': 'to_you_all',
'trying_people': 'aoi:_bionix',
'trymedia_systems': 'trymedia',
'tryngites_subruficollis': 'buff-breasted_sandpiper',
'tryonia_adamantina': 'diamond_y_springsnail',
'tryonia_alamosae': 'alamosa_springsnail',
'tryonia_angulata': 'sportingoods_tryonia',
'tryonia_clathrata': 'grated_tryonia',
'tryonia_elata': 'point_of_rocks_tryonia',
'tryonia_ericae': 'minute_tryonia',
'tryonia_gilae': 'gila_tryonia',
'tryonia_imitator': 'california_brackish_water_snail',
'tryonia_quitobaquitae': 'quitabaquito_tryonia',
'tryonia_variegata': 'amargossa_tryonia',
'trypaea_australiensis': 'bass_yabby',
'trypanosoma_congolese': 'trypanosoma_congolense',
'trypanosoma_gambiense': 'trypanosoma_brucei',
'trypeta_capitata': 'ceratitis_capitata',
'trypeta_cultaris': 'paracantha_cultaris',
'tryphaena_cleopatraina': 'cleopatra_vi_of_egypt',
'tryphomys_adustus': 'luzon_short-nosed_rat',
'trypillian_civilization': 'cucuteni-trypillian_culture',
'trypillian_culture': 'cucuteni-trypillian_culture',
'trypollian_culture': 'cucuteni-trypillian_culture',
'trypoxylon_politum': 'organ_pipe_mud_dauber',
'trypoxylus_dichotomus': 'japanese_rhinoceros_beetle',
'trypsin_inhibitors': 'trypsin_inhibitor',
'trypsin_proteolysis': 'trypsin',
'trypterigium_wilfordii': 'tripterygium_wilfordii',
'tryptophan--phenylpyruvate_transaminase': 'tryptophan-phenylpyruvate_transaminase',
'tryptophan_5-monooxygenase': 'tryptophan_hydroxylase',
'tryptophan_metabolism': 'tryptophan',
'tryptophan_synthetase': 'tryptophan_synthase',
'trys_kryziai': 'three_crosses',
'tryweryn_bill': 'river_dee_regulation_system',
'tryweryn_reservoir': 'llyn_celyn',
'tryweryn_valley': 'capel_celyn',
'trz_80': 'video_genie',
'trzciana_commune': 'gmina_trzciana',
'trzcianka_commune': 'gmina_trzcianka',
'trzcianne_commune': 'gmina_trzcianne',
'trzciel_commune': 'gmina_trzciel',
'trzcinica_commune': 'gmina_trzcinica',
'trzebiel_commune': 'gmina_trzebiel',
'trzebielino_commune': 'gmina_trzebielino',
'trzebinia_commune': 'gmina_trzebinia',
'trzebnica_abbey': 'sanctuary_of_st._jadwiga_in_trzebnica',
'trzebnica_commune': 'gmina_trzebnica',
'trzebnica_sanctuary': 'sanctuary_of_st._jadwiga_in_trzebnica',
'trzebownisko_commune': 'gmina_trzebownisko',
'trzemeszno_commune': 'gmina_trzemeszno',
'trzeszczany_commune': 'gmina_trzeszczany'}
|
PypiClean
|
/alipay-python-3.3.17.tar.gz/alipay-python-3.3.17/alipay/aop/api/domain/FengdieSitesCreateModel.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.FengdieActivityCreatePageData import FengdieActivityCreatePageData
from alipay.aop.api.domain.FengdieActivityCreatePagesData import FengdieActivityCreatePagesData
class FengdieSitesCreateModel(object):
def __init__(self):
self._domain = None
self._name = None
self._offline_time = None
self._page = None
self._pages = None
self._title = None
@property
def domain(self):
return self._domain
@domain.setter
def domain(self, value):
self._domain = value
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def offline_time(self):
return self._offline_time
@offline_time.setter
def offline_time(self, value):
self._offline_time = value
@property
def page(self):
return self._page
@page.setter
def page(self, value):
if isinstance(value, FengdieActivityCreatePageData):
self._page = value
else:
self._page = FengdieActivityCreatePageData.from_alipay_dict(value)
@property
def pages(self):
return self._pages
@pages.setter
def pages(self, value):
if isinstance(value, list):
self._pages = list()
for i in value:
if isinstance(i, FengdieActivityCreatePagesData):
self._pages.append(i)
else:
self._pages.append(FengdieActivityCreatePagesData.from_alipay_dict(i))
@property
def title(self):
return self._title
@title.setter
def title(self, value):
self._title = value
def to_alipay_dict(self):
params = dict()
if self.domain:
if hasattr(self.domain, 'to_alipay_dict'):
params['domain'] = self.domain.to_alipay_dict()
else:
params['domain'] = self.domain
if self.name:
if hasattr(self.name, 'to_alipay_dict'):
params['name'] = self.name.to_alipay_dict()
else:
params['name'] = self.name
if self.offline_time:
if hasattr(self.offline_time, 'to_alipay_dict'):
params['offline_time'] = self.offline_time.to_alipay_dict()
else:
params['offline_time'] = self.offline_time
if self.page:
if hasattr(self.page, 'to_alipay_dict'):
params['page'] = self.page.to_alipay_dict()
else:
params['page'] = self.page
if self.pages:
if isinstance(self.pages, list):
for i in range(0, len(self.pages)):
element = self.pages[i]
if hasattr(element, 'to_alipay_dict'):
self.pages[i] = element.to_alipay_dict()
if hasattr(self.pages, 'to_alipay_dict'):
params['pages'] = self.pages.to_alipay_dict()
else:
params['pages'] = self.pages
if self.title:
if hasattr(self.title, 'to_alipay_dict'):
params['title'] = self.title.to_alipay_dict()
else:
params['title'] = self.title
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = FengdieSitesCreateModel()
if 'domain' in d:
o.domain = d['domain']
if 'name' in d:
o.name = d['name']
if 'offline_time' in d:
o.offline_time = d['offline_time']
if 'page' in d:
o.page = d['page']
if 'pages' in d:
o.pages = d['pages']
if 'title' in d:
o.title = d['title']
return o
|
PypiClean
|
/aliyun-log-python-sdk-0.8.8.tar.gz/aliyun-log-python-sdk-0.8.8/aliyun/log/resource_params.py
|
import json
from aliyun.log.util import Util
def check_type_for_init(**kwargs):
type_dict = kwargs["instance"].type_dict
del kwargs["instance"]
for key, value in kwargs.items():
if value is not None:
need_type = type_dict.get(key)
if not isinstance(value, need_type):
raise TypeError("the %s type must be %s" % (key, need_type))
def check_params(name, need_type):
def outer(func):
def inner(*args, **kwargs):
value = None
if len(args) > 1:
value = args[1]
if kwargs:
value = kwargs.get(name)
if value is not None:
if not isinstance(value, need_type):
raise TypeError("the %s type must be %s" % (name, need_type))
func(*args, **kwargs)
return inner
return outer
class ResourceRecord:
"""
ResourceRecord
Create required:value
Update required:id,value
:type value: dict
:param value: record value
:type record_id: string
:param record_id: record id
:type tag: string
:param tag: record tag
"""
type_dict = {
"record_id": str,
"tag": str,
"value": dict
}
def __init__(self, record_id=None, tag=None, value=None):
check_type_for_init(record_id=record_id, tag=tag, value=value, instance=self)
self.record_id = record_id
self.tag = tag
self.value = value
self.create_time = None
self.last_modify_time = None
def get_record_id(self):
return self.record_id
@check_params("record_id", str)
def set_record_id(self, record_id):
self.record_id = record_id
def get_tag(self):
return self.tag
@check_params("tag", str)
def set_tag(self, tag):
self.tag = tag
def get_value(self):
return self.value
@check_params("tag", dict)
def set_value(self, value):
self.value = value
def get_create_time(self):
return self.create_time
def set_create_time(self, create_time):
self.create_time = create_time
def get_last_modify_time(self):
return self.last_modify_time
def set_last_modify_time(self, last_modify_time):
self.last_modify_time = last_modify_time
def to_dict(self):
result = {"value": self.value}
if self.tag:
result["tag"] = self.tag
if self.record_id:
result["id"] = self.record_id
if self.create_time:
result["create_time"] = self.create_time
if self.last_modify_time:
result["last_modify_time"] = self.last_modify_time
return result
def check_value(self):
if not self.value:
raise ValueError("ResourceRecord Value must not be None")
def check_for_update(self):
self.check_value()
if not self.record_id:
raise ValueError(" ResourceRecord id must not be None")
@classmethod
def from_dict(cls, dict_data):
return ResourceRecord(record_id=dict_data.get("id"), tag=dict_data.get("tag"),
value=json.loads(dict_data.get("value")))
class ResourceSchemaItem:
"""
ResourceSchemaItem
:type column: string
:param column: column
:type ext_info: string
:param ext_info: schema ext_info
:type schema_type: string
:param schema_type: schema_type
"""
type_dict = {
"column": str,
"ext_info": str,
"schema_type": str
}
def __init__(self, column=None, ext_info=None, schema_type=None):
check_type_for_init(column=column, schema_type=schema_type, instance=self)
self.column = column
self.ext_info = ext_info
self.type = schema_type
def get_column(self):
return self.column
@check_params("column", str)
def set_column(self, column):
self.column = column
def get_ext_info(self):
return self.ext_info
def set_ext_info(self, ext_info):
self.ext_info = ext_info
def get_schema_type(self):
return self.type
@check_params("schema_type", str)
def set_schema_type(self, schema_type):
self.type = schema_type
def to_dict(self):
schema_dict = {}
if self.get_column():
schema_dict["column"] = self.get_column()
if self.get_ext_info():
schema_dict["ext_info"] = self.get_ext_info()
if self.get_schema_type():
schema_dict["type"] = self.get_schema_type()
return schema_dict
class Resource:
""" Resource
Create required: resource_name resource_type
Update required: resource_name
:type resource_name: string
:param resource_name: resource name
:type resource_type: string
:param resource_type: resource type
:type description: string
:param description: the description of resource name
:type schema_list: List[ResourceSchemaItem]
:param schema_list: schema of resource
:type acl: dict
:param acl: policy example:{"policy": {"type": "all_rw"}}
:type ext_info: string
:param ext_info: extra info of this resource
"""
type_dict = {
"resource_name": str,
"resource_type": str,
"schema_list": list,
"description": str,
"acl": dict,
"ext_info": str
}
def __init__(self, resource_name=None, resource_type=None, schema_list=None,
description=None, acl=None, ext_info=None):
check_type_for_init(resource_name=resource_name, resource_type=resource_type, schema_list=schema_list,
description=description, acl=acl, ext_info=ext_info, instance=self)
self.resource_name = resource_name
self.resource_type = resource_type
self.description = description
self.schema_list = self._check_schema(schema_list) if schema_list else schema_list
self.acl = acl
self.ext_info = ext_info
self.create_time = None
self.last_modify_time = None
def get_resource_name(self):
return self.resource_name
@check_params("resource_name", str)
def set_resource_name(self, resource_name):
self.resource_name = resource_name
def get_resource_type(self):
return self.resource_type
@check_params("resource_type", str)
def set_resource_type(self, resource_type):
self.resource_type = resource_type
def get_description(self):
return self.description
@check_params("description", str)
def set_description(self, description):
self.description = description
def get_schema(self):
return self.schema_list
@check_params("description", list)
def set_schema_list(self, schema):
self.schema_list = self._check_schema(schema) if schema else schema
def get_acl(self):
return self.acl
@check_params("acl", dict)
def set_acl(self, acl):
self.acl = acl
def get_ext_info(self):
return self.ext_info
@check_params("ext_info", str)
def set_ext_info(self, ext_info):
self.ext_info = ext_info
def get_create_time(self):
return self.create_time
def set_create_time(self, create_time):
self.create_time = create_time
def get_last_modify_time(self):
return self.last_modify_time
def set_last_modify_time(self, last_modify_time):
self.last_modify_time = last_modify_time
def check_for_update(self):
if not self.resource_name:
raise ValueError("resource_name must not be None!")
def check_for_create(self):
if not (self.resource_name and self.resource_type):
raise ValueError("resource_name and resource_type must not be None!")
@staticmethod
def _check_schema(schema_list):
for schema in schema_list:
if not isinstance(schema, ResourceSchemaItem):
raise TypeError("schema list element must be instance of ResourceSchemaItem ")
return schema_list
def to_dict(self):
schema_list = [schema.to_dict() for schema in self.schema_list]
result = {"schema": schema_list}
if self.create_time:
result["create_time"] = self.create_time
if self.last_modify_time:
result["last_modify_time"] = self.last_modify_time
if self.resource_name:
result["resource_name"] = self.resource_name
if self.resource_type:
result["resource_type"] = self.resource_type
if self.description:
result["description"] = self.description
if self.ext_info:
result["ext_info"] = self.ext_info
if self.acl:
result["acl"] = self.acl
return result
@classmethod
def from_dict(cls, dict_data):
schema_list = Util.convert_unicode_to_str(json.loads(dict_data.get("schema"))).get("schema")
schema_instance_list = []
if schema_list:
for schema in schema_list:
schema_instance_list.append(ResourceSchemaItem(column=schema.get("column"), schema_type=schema.get("type"),
ext_info=schema.get("ext_info")))
resource = Resource()
resource.set_resource_name(dict_data.get("name"))
resource.set_description(dict_data.get('description'))
resource.set_resource_type(dict_data.get("type"))
resource.set_ext_info(dict_data.get("extInfo"))
resource.set_acl(json.loads(dict_data.get("acl")))
resource.set_schema_list(schema_instance_list)
resource.set_create_time(dict_data.get("create_time"))
resource.set_last_modify_time(dict_data.get("last_modify_time"))
return resource
|
PypiClean
|
/pyqg_jax-0.6.0-py3-none-any.whl/pyqg_jax/parameterizations/_defs.py
|
import functools
import jax.numpy as jnp
from .. import state as _state
def uv_parameterization(param_func):
"""Decorator implementing parameterizations in terms of velocity.
The target function should take as its first three arguments
:pycode:`(state, param_aux, model)` as with any other
parameterization function. Additional arguments will be passed
through unmodified.
This function should then return two values: :pycode:`(du, dv),
param_aux`. These values will then be added to the model's
original update value to form the parameterized update.
The wrapped function is suitable for use with
:class:`ParameterizedModel`.
See also: :class:`pyqg.parameterizations.UVParameterization`
"""
@functools.wraps(param_func)
def wrapped_uv_param(state, param_aux, model, *args, **kwargs):
(du, dv), param_aux = param_func(state, param_aux, model, *args, **kwargs)
duh = _state._generic_rfftn(du)
dvh = _state._generic_rfftn(dv)
updates = model.get_updates(state)
dqhdt = (
updates.qh
+ ((-1 * jnp.expand_dims(model._il, (0, -1))) * duh)
+ (jnp.expand_dims(model._ik, (0, 1)) * dvh)
)
return updates.update(qh=dqhdt), param_aux
return wrapped_uv_param
def q_parameterization(param_func):
"""Decorator implementing parameterizations in terms of potential
vorticity.
The target function should take as its first three arguments
:pycode:`(state, param_aux, model)` as with any other
parameterization function. Additional arguments will be passed
through unmodified.
This function should then return two values: :pycode:`dq,
param_aux`. These values will then be added to the model's
original update value to form the parameterized update.
The wrapped function is suitable for use with
:class:`ParameterizedModel`.
See also: :class:`pyqg.parameterizations.QParameterization`
"""
@functools.wraps(param_func)
def wrapped_q_param(state, param_aux, model, *args, **kwargs):
dq, param_aux = param_func(state, param_aux, model, *args, **kwargs)
dqh = _state._generic_rfftn(dq)
updates = model.get_updates(state)
dqhdt = updates.qh + dqh
return updates.update(qh=dqhdt), param_aux
return wrapped_q_param
|
PypiClean
|
/swh.web-0.2.37.tar.gz/swh.web-0.2.37/static/jssources/core-js/internals/fix-regexp-well-known-symbol-logic.js
|
'use strict';
// TODO: Remove from `core-js@4` since it's moved to entry points
require('../modules/es.regexp.exec');
var uncurryThis = require('../internals/function-uncurry-this-clause');
var defineBuiltIn = require('../internals/define-built-in');
var regexpExec = require('../internals/regexp-exec');
var fails = require('../internals/fails');
var wellKnownSymbol = require('../internals/well-known-symbol');
var createNonEnumerableProperty = require('../internals/create-non-enumerable-property');
var SPECIES = wellKnownSymbol('species');
var RegExpPrototype = RegExp.prototype;
module.exports = function (KEY, exec, FORCED, SHAM) {
var SYMBOL = wellKnownSymbol(KEY);
var DELEGATES_TO_SYMBOL = !fails(function () {
// String methods call symbol-named RegEp methods
var O = {};
O[SYMBOL] = function () { return 7; };
return ''[KEY](O) !== 7;
});
var DELEGATES_TO_EXEC = DELEGATES_TO_SYMBOL && !fails(function () {
// Symbol-named RegExp methods call .exec
var execCalled = false;
var re = /a/;
if (KEY === 'split') {
// We can't use real regex here since it causes deoptimization
// and serious performance degradation in V8
// https://github.com/zloirock/core-js/issues/306
re = {};
// RegExp[@@split] doesn't call the regex's exec method, but first creates
// a new one. We need to return the patched regex when creating the new one.
re.constructor = {};
re.constructor[SPECIES] = function () { return re; };
re.flags = '';
re[SYMBOL] = /./[SYMBOL];
}
re.exec = function () { execCalled = true; return null; };
re[SYMBOL]('');
return !execCalled;
});
if (
!DELEGATES_TO_SYMBOL ||
!DELEGATES_TO_EXEC ||
FORCED
) {
var uncurriedNativeRegExpMethod = uncurryThis(/./[SYMBOL]);
var methods = exec(SYMBOL, ''[KEY], function (nativeMethod, regexp, str, arg2, forceStringMethod) {
var uncurriedNativeMethod = uncurryThis(nativeMethod);
var $exec = regexp.exec;
if ($exec === regexpExec || $exec === RegExpPrototype.exec) {
if (DELEGATES_TO_SYMBOL && !forceStringMethod) {
// The native String method already delegates to @@method (this
// polyfilled function), leasing to infinite recursion.
// We avoid it by directly calling the native @@method method.
return { done: true, value: uncurriedNativeRegExpMethod(regexp, str, arg2) };
}
return { done: true, value: uncurriedNativeMethod(str, regexp, arg2) };
}
return { done: false };
});
defineBuiltIn(String.prototype, KEY, methods[0]);
defineBuiltIn(RegExpPrototype, SYMBOL, methods[1]);
}
if (SHAM) createNonEnumerableProperty(RegExpPrototype[SYMBOL], 'sham', true);
};
|
PypiClean
|
/ocstylemaster-0.2.3.tar.gz/ocstylemaster-0.2.3/src/OCStyleMaster/tools/blockAnalyzerH.py
|
from OCStyleMaster.common import *
from OCStyleMaster.models import *
class BlockAnalyzerH:
def __init__(self,block,analyzePos):
self.block = block
self.text = block.text
self.length = 0
self.pos = analyzePos
def start(self):
# self.__clean_text()
self.__prepare()
self.__analzye()
def __prepare(self):
self.length = len(self.text)
def __analzye(self):
self.__normal_analyze()
def __valid_blocks(self):
ret = [
Property,
InterfaceH,
Comment_N,
Comment_1,
Protocol,
FuncH,
]
return ret
def __find_start(self,buffer,pos):
find = self.__is_include_start_text(buffer)
if find is None:
return pos
m = find[0]
cls = find[1]
# print("find child {}".format(cls))
# 分析 block
len = m.end(0) - m.start(0) - 1
start = pos - len
block = self.__create_block(cls, self.text, start)
block.file = self.block.file
block.parent = self.block
analyzer = BlockAnalyzerH(block,block.range.start+1)
analyzer.start()
self.block.add_child(block)
end = analyzer.block.range.end
if end == 0:
raise Exception("block no end {}".format(analyzer.block))
pos = end + 1
return pos
def __normal_analyze(self):
buffer = ""
pos = self.pos
while pos < self.length:
# print("post {}".format(pos))
c = self.text[pos]
buffer = buffer + c
# print("buffer {}".format(buffer))
# 找Block头
if self.block.type != Block.comment_1 and self.block.type != Block.comment_n:
newPos = self.__find_start(buffer,pos)
if newPos != pos:
pos = newPos
buffer = ""
continue
# 找block 尾部
index = self.__is_include_end_text(buffer)
if index >= 0:
self.block.range.end = pos
# print("find end {} pos {}".format(self.block.type,pos))
break
pos += 1
def __is_include_start_text(self,text):
blockClasses = self.__valid_blocks()
for cls in blockClasses:
start = cls.start_text()
if start is None:
continue
m = re.search(start,text,re.M)
if m is not None:
ret = (m,cls)
return ret
return None
def __is_include_end_text(self,text):
cls = type(self.block)
try:
endText = cls.end_text()
if endText is None:
return -1
m = re.search(endText,text,re.M)
if m is None:
return -1
return m.pos
except :
print(cls)
def __create_block(self,type,text,start):
ret = type(text)
ret.range.start = start
return ret
|
PypiClean
|
/netdisco-3.0.0.tar.gz/netdisco-3.0.0/README.md
|
## This library is deprecated. We will no longer release new versions, fix bugs or accept pull requests. If you are looking to make your Home Assistant integration discoverable, use [the zeroconf and SSDP manifest options](https://developers.home-assistant.io/docs/en/next/creating_integration_manifest.html#zeroconf).
# NetDisco
NetDisco is a Python 3 library to discover local devices and services. It allows to scan on demand or offer a service that will scan the network in the background in a set interval.
Current methods of scanning:
- mDNS (includes Chromecast, Homekit)
- uPnP
- Plex Media Server using Good Day Mate protocol
- Logitech Media Server discovery protocol
- Daikin discovery protocol
- Web OS discovery protocol
It is the library that powers the device discovery within [Home Assistant](https://home-assistant.io/).
### We are no longer accepting PRs that implement custom discovery protocols. Only PRs that use mDNS or uPnP are supported. See [this issue](https://github.com/home-assistant/netdisco/issues/230)
## Installation
Netdisco is available on PyPi. Install using `pip3 install netdisco`.
## Example
From command-line:
```bash
python3 -m netdisco
# To see all raw data:
python3 -m netdisco dump
```
In your script:
```python
from netdisco.discovery import NetworkDiscovery
netdis = NetworkDiscovery()
netdis.scan()
for dev in netdis.discover():
print(dev, netdis.get_info(dev))
netdis.stop()
```
Will result in a list of discovered devices and their most important information:
```
DLNA ['http://192.168.1.1:8200/rootDesc.xml', 'http://192.168.1.150:32469/DeviceDescription.xml']
google_cast [('Living Room.local.', 8009)]
philips_hue ['http://192.168.1.2:80/description.xml']
belkin_wemo ['http://192.168.1.10:49153/setup.xml']
```
|
PypiClean
|
/pulumi_gcp_native-0.0.2a1617829075.tar.gz/pulumi_gcp_native-0.0.2a1617829075/pulumi_gcp_native/datafusion/v1beta1/instance.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['Instance']
class Instance(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
accelerators: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AcceleratorArgs']]]]] = None,
available_version: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VersionArgs']]]]] = None,
dataproc_service_account: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
enable_rbac: Optional[pulumi.Input[bool]] = None,
enable_stackdriver_logging: Optional[pulumi.Input[bool]] = None,
enable_stackdriver_monitoring: Optional[pulumi.Input[bool]] = None,
instances_id: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
locations_id: Optional[pulumi.Input[str]] = None,
network_config: Optional[pulumi.Input[pulumi.InputType['NetworkConfigArgs']]] = None,
options: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
private_instance: Optional[pulumi.Input[bool]] = None,
projects_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Creates a new Data Fusion instance in the specified project and location.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AcceleratorArgs']]]] accelerators: List of accelerators enabled for this CDF instance.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VersionArgs']]]] available_version: Available versions that the instance can be upgraded to using UpdateInstanceRequest.
:param pulumi.Input[str] dataproc_service_account: User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines. This allows users to have fine-grained access control on Dataproc's accesses to cloud resources.
:param pulumi.Input[str] description: A description of this instance.
:param pulumi.Input[str] display_name: Display name for an instance.
:param pulumi.Input[bool] enable_rbac: Option to enable granular role-based access control.
:param pulumi.Input[bool] enable_stackdriver_logging: Option to enable Stackdriver Logging.
:param pulumi.Input[bool] enable_stackdriver_monitoring: Option to enable Stackdriver Monitoring.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: The resource labels for instance to use to annotate any related underlying resources such as Compute Engine VMs. The character '=' is not allowed to be used within the labels.
:param pulumi.Input[pulumi.InputType['NetworkConfigArgs']] network_config: Network configuration options. These are required when a private Data Fusion instance is to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] options: Map of additional options used to configure the behavior of Data Fusion instance.
:param pulumi.Input[bool] private_instance: Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
:param pulumi.Input[str] type: Required. Instance type.
:param pulumi.Input[str] version: Current version of Data Fusion.
:param pulumi.Input[str] zone: Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['accelerators'] = accelerators
__props__['available_version'] = available_version
__props__['dataproc_service_account'] = dataproc_service_account
__props__['description'] = description
__props__['display_name'] = display_name
__props__['enable_rbac'] = enable_rbac
__props__['enable_stackdriver_logging'] = enable_stackdriver_logging
__props__['enable_stackdriver_monitoring'] = enable_stackdriver_monitoring
if instances_id is None and not opts.urn:
raise TypeError("Missing required property 'instances_id'")
__props__['instances_id'] = instances_id
__props__['labels'] = labels
if locations_id is None and not opts.urn:
raise TypeError("Missing required property 'locations_id'")
__props__['locations_id'] = locations_id
__props__['network_config'] = network_config
__props__['options'] = options
__props__['private_instance'] = private_instance
if projects_id is None and not opts.urn:
raise TypeError("Missing required property 'projects_id'")
__props__['projects_id'] = projects_id
__props__['type'] = type
__props__['version'] = version
__props__['zone'] = zone
__props__['api_endpoint'] = None
__props__['create_time'] = None
__props__['gcs_bucket'] = None
__props__['name'] = None
__props__['p4_service_account'] = None
__props__['service_endpoint'] = None
__props__['state'] = None
__props__['state_message'] = None
__props__['tenant_project_id'] = None
__props__['update_time'] = None
super(Instance, __self__).__init__(
'gcp-native:datafusion/v1beta1:Instance',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Instance':
"""
Get an existing Instance resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["accelerators"] = None
__props__["api_endpoint"] = None
__props__["available_version"] = None
__props__["create_time"] = None
__props__["dataproc_service_account"] = None
__props__["description"] = None
__props__["display_name"] = None
__props__["enable_rbac"] = None
__props__["enable_stackdriver_logging"] = None
__props__["enable_stackdriver_monitoring"] = None
__props__["gcs_bucket"] = None
__props__["labels"] = None
__props__["name"] = None
__props__["network_config"] = None
__props__["options"] = None
__props__["p4_service_account"] = None
__props__["private_instance"] = None
__props__["service_endpoint"] = None
__props__["state"] = None
__props__["state_message"] = None
__props__["tenant_project_id"] = None
__props__["type"] = None
__props__["update_time"] = None
__props__["version"] = None
__props__["zone"] = None
return Instance(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def accelerators(self) -> pulumi.Output[Sequence['outputs.AcceleratorResponse']]:
"""
List of accelerators enabled for this CDF instance.
"""
return pulumi.get(self, "accelerators")
@property
@pulumi.getter(name="apiEndpoint")
def api_endpoint(self) -> pulumi.Output[str]:
"""
Endpoint on which the REST APIs is accessible.
"""
return pulumi.get(self, "api_endpoint")
@property
@pulumi.getter(name="availableVersion")
def available_version(self) -> pulumi.Output[Sequence['outputs.VersionResponse']]:
"""
Available versions that the instance can be upgraded to using UpdateInstanceRequest.
"""
return pulumi.get(self, "available_version")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
The time the instance was created.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter(name="dataprocServiceAccount")
def dataproc_service_account(self) -> pulumi.Output[str]:
"""
User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines. This allows users to have fine-grained access control on Dataproc's accesses to cloud resources.
"""
return pulumi.get(self, "dataproc_service_account")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
A description of this instance.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
Display name for an instance.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="enableRbac")
def enable_rbac(self) -> pulumi.Output[bool]:
"""
Option to enable granular role-based access control.
"""
return pulumi.get(self, "enable_rbac")
@property
@pulumi.getter(name="enableStackdriverLogging")
def enable_stackdriver_logging(self) -> pulumi.Output[bool]:
"""
Option to enable Stackdriver Logging.
"""
return pulumi.get(self, "enable_stackdriver_logging")
@property
@pulumi.getter(name="enableStackdriverMonitoring")
def enable_stackdriver_monitoring(self) -> pulumi.Output[bool]:
"""
Option to enable Stackdriver Monitoring.
"""
return pulumi.get(self, "enable_stackdriver_monitoring")
@property
@pulumi.getter(name="gcsBucket")
def gcs_bucket(self) -> pulumi.Output[str]:
"""
Cloud Storage bucket generated by Data Fusion in the customer project.
"""
return pulumi.get(self, "gcs_bucket")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, str]]:
"""
The resource labels for instance to use to annotate any related underlying resources such as Compute Engine VMs. The character '=' is not allowed to be used within the labels.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of this instance is in the form of projects/{project}/locations/{location}/instances/{instance}.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkConfig")
def network_config(self) -> pulumi.Output['outputs.NetworkConfigResponse']:
"""
Network configuration options. These are required when a private Data Fusion instance is to be created.
"""
return pulumi.get(self, "network_config")
@property
@pulumi.getter
def options(self) -> pulumi.Output[Mapping[str, str]]:
"""
Map of additional options used to configure the behavior of Data Fusion instance.
"""
return pulumi.get(self, "options")
@property
@pulumi.getter(name="p4ServiceAccount")
def p4_service_account(self) -> pulumi.Output[str]:
"""
P4 service account for the customer project.
"""
return pulumi.get(self, "p4_service_account")
@property
@pulumi.getter(name="privateInstance")
def private_instance(self) -> pulumi.Output[bool]:
"""
Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
"""
return pulumi.get(self, "private_instance")
@property
@pulumi.getter(name="serviceEndpoint")
def service_endpoint(self) -> pulumi.Output[str]:
"""
Endpoint on which the Data Fusion UI is accessible.
"""
return pulumi.get(self, "service_endpoint")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of this Data Fusion instance.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="stateMessage")
def state_message(self) -> pulumi.Output[str]:
"""
Additional information about the current state of this Data Fusion instance if available.
"""
return pulumi.get(self, "state_message")
@property
@pulumi.getter(name="tenantProjectId")
def tenant_project_id(self) -> pulumi.Output[str]:
"""
The name of the tenant project.
"""
return pulumi.get(self, "tenant_project_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Required. Instance type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
The time the instance was last updated.
"""
return pulumi.get(self, "update_time")
@property
@pulumi.getter
def version(self) -> pulumi.Output[str]:
"""
Current version of Data Fusion.
"""
return pulumi.get(self, "version")
@property
@pulumi.getter
def zone(self) -> pulumi.Output[str]:
"""
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
"""
return pulumi.get(self, "zone")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
PypiClean
|
/foundation-sphinx-theme-0.0.3.tar.gz/foundation-sphinx-theme-0.0.3/foundation_sphinx_theme/static/foundation/js/foundation/foundation.orbit.js
|
;(function ($, window, document, undefined) {
'use strict';
Foundation.libs = Foundation.libs || {};
Foundation.libs.orbit = {
name: 'orbit',
version: '4.0.0',
settings: {
timer_speed: 10000,
animation_speed: 500,
bullets: true,
stack_on_small: true,
container_class: 'orbit-container',
stack_on_small_class: 'orbit-stack-on-small',
next_class: 'orbit-next',
prev_class: 'orbit-prev',
timer_container_class: 'orbit-timer',
timer_paused_class: 'paused',
timer_progress_class: 'orbit-progress',
slides_container_class: 'orbit-slides-container',
bullets_container_class: 'orbit-bullets',
bullets_active_class: 'active',
slide_number_class: 'orbit-slide-number',
caption_class: 'orbit-caption',
active_slide_class: 'active',
orbit_transition_class: 'orbit-transitioning'
},
init: function (scope, method, options) {
var self = this;
Foundation.inherit(self, 'data_options');
if (typeof method === 'object') {
$.extend(true, self.settings, method);
}
$('[data-orbit]', scope).each(function(idx, el) {
var scoped_self = $.extend(true, {}, self);
scoped_self._init(idx, el);
});
},
_container_html: function() {
var self = this;
return '<div class="' + self.settings.container_class + '"></div>';
},
_bullets_container_html: function($slides) {
var self = this,
$list = $('<ol class="' + self.settings.bullets_container_class + '"></ol>');
$slides.each(function(idx, slide) {
var $item = $('<li data-orbit-slide-number="' + (idx+1) + '" class=""></li>');
if (idx === 0) {
$item.addClass(self.settings.bullets_active_class);
}
$list.append($item);
});
return $list;
},
_slide_number_html: function(slide_number, total_slides) {
var self = this,
$container = $('<div class="' + self.settings.slide_number_class + '"></div>');
$container.append('<span>' + slide_number + '</span> of <span>' + total_slides + '</span>');
return $container;
},
_timer_html: function() {
var self = this;
if (typeof self.settings.timer_speed === 'number' && self.settings.timer_speed > 0) {
return '<div class="' + self.settings.timer_container_class
+ '"><span></span><div class="' + self.settings.timer_progress_class
+ '"></div></div>';
} else {
return '';
}
},
_next_html: function() {
var self = this;
return '<a href="#" class="' + self.settings.next_class + '">Next <span></span></a>';
},
_prev_html: function() {
var self = this;
return '<a href="#" class="' + self.settings.prev_class + '">Prev <span></span></a>';
},
_init: function (idx, slider) {
var self = this,
$slides_container = $(slider),
$container = $slides_container.wrap(self._container_html()).parent(),
$slides = $slides_container.children();
$.extend(true, self.settings, self.data_options($slides_container));
$container.append(self._prev_html());
$container.append(self._next_html());
$slides_container.addClass(self.settings.slides_container_class);
if (self.settings.stack_on_small) {
$container.addClass(self.settings.stack_on_small_class);
}
$container.append(self._slide_number_html(1, $slides.length));
$container.append(self._timer_html());
if (self.settings.bullets) {
$container.after(self._bullets_container_html($slides));
}
// To better support the "sliding" effect it's easier
// if we just clone the first and last slides
$slides_container.append($slides.first().clone().attr('data-orbit-slide',''));
$slides_container.prepend($slides.last().clone().attr('data-orbit-slide',''));
// Make the first "real" slide active
$slides_container.css('marginLeft', '-100%');
$slides.first().addClass(self.settings.active_slide_class);
self._init_events($slides_container);
self._init_dimensions($slides_container);
self._start_timer($slides_container);
},
_init_events: function ($slides_container) {
var self = this,
$container = $slides_container.parent();
$(window)
.on('load.fndtn.orbit', function() {
$slides_container.height('');
$slides_container.height($slides_container.height($container.height()));
$slides_container.trigger('orbit:ready');
})
.on('resize.fndtn.orbit', function() {
$slides_container.height('');
$slides_container.height($slides_container.height($container.height()));
});
$(document).on('click.fndtn.orbit', '[data-orbit-link]', function(e) {
e.preventDefault();
var id = $(e.currentTarget).attr('data-orbit-link'),
$slide = $slides_container.find('[data-orbit-slide=' + id + ']').first();
if ($slide.length === 1) {
self._reset_timer($slides_container, true);
self._goto($slides_container, $slide.index(), function() {});
}
});
$container.siblings('.' + self.settings.bullets_container_class)
.on('click.fndtn.orbit', '[data-orbit-slide-number]', function(e) {
e.preventDefault();
self._reset_timer($slides_container, true);
self._goto($slides_container, $(e.currentTarget).data('orbit-slide-number'),function() {});
});
$container
.on('orbit:after-slide-change.fndtn.orbit', function(e, orbit) {
var $slide_number = $container.find('.' + self.settings.slide_number_class);
if ($slide_number.length === 1) {
$slide_number.replaceWith(self._slide_number_html(orbit.slide_number, orbit.total_slides));
}
})
.on('orbit:next-slide.fndtn.orbit click.fndtn.orbit', '.' + self.settings.next_class, function(e) {
e.preventDefault();
self._reset_timer($slides_container, true);
self._goto($slides_container, 'next', function() {});
})
.on('orbit:prev-slide.fndtn.orbit click.fndtn.orbit', '.' + self.settings.prev_class, function(e) {
e.preventDefault();
self._reset_timer($slides_container, true);
self._goto($slides_container, 'prev', function() {});
})
.on('orbit:toggle-play-pause.fndtn.orbit click.fndtn.orbit touchstart.fndtn.orbit', '.' + self.settings.timer_container_class, function(e) {
e.preventDefault();
var $timer = $(e.currentTarget).toggleClass(self.settings.timer_paused_class),
$slides_container = $timer.closest('.' + self.settings.container_class)
.find('.' + self.settings.slides_container_class);
if ($timer.hasClass(self.settings.timer_paused_class)) {
self._stop_timer($slides_container);
} else {
self._start_timer($slides_container);
}
})
.on('touchstart.fndtn.orbit', function(e) {
if (!e.touches) { e = e.originalEvent; }
var data = {
start_page_x: e.touches[0].pageX,
start_page_y: e.touches[0].pageY,
start_time: (new Date()).getTime(),
delta_x: 0,
is_scrolling: undefined
};
$container.data('swipe-transition', data);
e.stopPropagation();
})
.on('touchmove.fndtn.orbit', function(e) {
if (!e.touches) { e = e.originalEvent; }
// Ignore pinch/zoom events
if(e.touches.length > 1 || e.scale && e.scale !== 1) return;
var data = $container.data('swipe-transition');
if (typeof data === 'undefined') {
data = {};
}
data.delta_x = e.touches[0].pageX - data.start_page_x;
if ( typeof data.is_scrolling === 'undefined') {
data.is_scrolling = !!( data.is_scrolling || Math.abs(data.delta_x) < Math.abs(e.touches[0].pageY - data.start_page_y) );
}
if (!data.is_scrolling && !data.active) {
e.preventDefault();
self._stop_timer($slides_container);
var direction = (data.delta_x < 0) ? 'next' : 'prev';
data.active = true;
self._goto($slides_container, direction, function() {});
}
})
.on('touchend.fndtn.orbit', function(e) {
$container.data('swipe-transition', {});
e.stopPropagation();
});
},
_init_dimensions: function ($slides_container) {
var $container = $slides_container.parent(),
$slides = $slides_container.children();
$slides_container.css('width', $slides.length * 100 + '%');
$slides.css('width', 100 / $slides.length + '%');
$slides_container.height($container.height());
$slides_container.css('width', $slides.length * 100 + '%');
},
_start_timer: function ($slides_container) {
var self = this,
$container = $slides_container.parent();
var callback = function() {
self._reset_timer($slides_container, false);
self._goto($slides_container, 'next', function() {
self._start_timer($slides_container);
});
};
var $timer = $container.find('.' + self.settings.timer_container_class),
$progress = $timer.find('.' + self.settings.timer_progress_class),
progress_pct = ($progress.width() / $timer.width()),
delay = self.settings.timer_speed - (progress_pct * self.settings.timer_speed);
$progress.animate({'width': '100%'}, delay, 'linear', callback);
$slides_container.trigger('orbit:timer-started');
},
_stop_timer: function ($slides_container) {
var self = this,
$container = $slides_container.parent(),
$timer = $container.find('.' + self.settings.timer_container_class),
$progress = $timer.find('.' + self.settings.timer_progress_class),
progress_pct = $progress.width() / $timer.width()
self._rebuild_timer($container, progress_pct * 100 + '%');
// $progress.stop();
$slides_container.trigger('orbit:timer-stopped');
$timer = $container.find('.' + self.settings.timer_container_class);
$timer.addClass(self.settings.timer_paused_class);
},
_reset_timer: function($slides_container, is_paused) {
var self = this,
$container = $slides_container.parent();
self._rebuild_timer($container, '0%');
if (typeof is_paused === 'boolean' && is_paused) {
var $timer = $container.find('.' + self.settings.timer_container_class);
$timer.addClass(self.settings.timer_paused_class);
}
},
_rebuild_timer: function ($container, width_pct) {
// Zepto is unable to stop animations since they
// are css-based. This is a workaround for that
// limitation, which rebuilds the dom element
// thus stopping the animation
var self = this,
$timer = $container.find('.' + self.settings.timer_container_class),
$new_timer = $(self._timer_html()),
$new_timer_progress = $new_timer.find('.' + self.settings.timer_progress_class);
if (typeof Zepto === 'function') {
$timer.remove();
$container.append($new_timer);
$new_timer_progress.css('width', width_pct);
} else if (typeof jQuery === 'function') {
var $progress = $timer.find('.' + self.settings.timer_progress_class);
$progress.css('width', width_pct);
$progress.stop();
}
},
_goto: function($slides_container, index_or_direction, callback) {
var self = this,
$container = $slides_container.parent(),
$slides = $slides_container.children(),
$active_slide = $slides_container.find('.' + self.settings.active_slide_class),
active_index = $active_slide.index();
if ($container.hasClass(self.settings.orbit_transition_class)) {
return false;
}
if (index_or_direction === 'prev') {
if (active_index === 0) {
active_index = $slides.length - 1;
}
else {
active_index--;
}
}
else if (index_or_direction === 'next') {
active_index = (active_index+1) % $slides.length;
}
else if (typeof index_or_direction === 'number') {
active_index = (index_or_direction % $slides.length);
}
if (active_index === ($slides.length - 1) && index_or_direction === 'next') {
$slides_container.css('marginLeft', '0%');
active_index = 1;
}
else if (active_index === 0 && index_or_direction === 'prev') {
$slides_container.css('marginLeft', '-' + ($slides.length - 1) * 100 + '%');
active_index = $slides.length - 2;
}
// Start transition, make next slide active
$container.addClass(self.settings.orbit_transition_class);
$active_slide.removeClass(self.settings.active_slide_class);
$($slides[active_index]).addClass(self.settings.active_slide_class);
// Make next bullet active
var $bullets = $container.siblings('.' + self.settings.bullets_container_class);
if ($bullets.length === 1) {
$bullets.children().removeClass(self.settings.bullets_active_class);
$($bullets.children()[active_index-1]).addClass(self.settings.bullets_active_class);
}
var new_margin_left = '-' + (active_index * 100) + '%';
// Check to see if animation will occur, otherwise perform
// callbacks manually
$slides_container.trigger('orbit:before-slide-change');
if ($slides_container.css('marginLeft') === new_margin_left) {
$container.removeClass(self.settings.orbit_transition_class);
$slides_container.trigger('orbit:after-slide-change', [{slide_number: active_index, total_slides: $slides_container.children().length - 2}]);
callback();
} else {
$slides_container.animate({
'marginLeft' : new_margin_left
}, self.settings.animation_speed, 'linear', function() {
$container.removeClass(self.settings.orbit_transition_class);
$slides_container.trigger('orbit:after-slide-change', [{slide_number: active_index, total_slides: $slides_container.children().length - 2}]);
callback();
});
}
}
};
}(Foundation.zj, this, this.document));
|
PypiClean
|
/riot-0.17.7.tar.gz/riot-0.17.7/README.md
|
# riot
A Python virtual environment constructor and command runner.
[](https://ddriot.readthedocs.io/)
[](https://pypi.org/project/riot/)
[](https://pypi.org/project/riot/)
[](https://github.com/DataDog/riot/actions?query=workflow%3ACI)
[](https://codecov.io/gh/DataDog/riot)
## installation
Install from PyPI:
```sh
pip install riot
```
or install from source:
```sh
pip install git+https://github.com/DataDog/riot.git
```
|
PypiClean
|
/mwa_pb-1.4.tar.gz/mwa_pb-1.4/mwa_pb/suppress.py
|
import logging
import numpy
import skyfield.api as si
from . import primarybeammap_tant
from . import mwa_sweet_spots
from . import skyfield_utils as su
# configure the logging
logging.basicConfig()
LOGGER = logging.getLogger('pb.suppress')
LOGGER.setLevel(logging.INFO)
def get_best_gridpoints(gps_start,
obs_source_ra_deg,
obs_source_dec_deg,
avoid_source_ra_deg,
avoid_source_dec_deg,
model="analytic",
min_gain=None,
max_beam_distance_deg=360,
channel=145,
verb_level=1,
logger=LOGGER,
duration=3600,
step=120,
min_elevation=30.00):
su.init_data()
frequency = channel * 1.28
if model not in ['analytic', 'advanced', 'full_EE', 'full_EE_AAVS05']:
logger.error("Model %s not found\n" % model)
gp_numbers = list(mwa_sweet_spots.all_grid_points.keys())
gp_numbers.sort()
gp_azes = numpy.array([mwa_sweet_spots.all_grid_points[i][1] for i in gp_numbers])
gp_alts = numpy.array([mwa_sweet_spots.all_grid_points[i][2] for i in gp_numbers])
gp_delays = [mwa_sweet_spots.all_grid_points[i][4] for i in gp_numbers]
obs_source = si.Star(ra=si.Angle(degrees=obs_source_ra_deg),
dec=si.Angle(degrees=obs_source_dec_deg))
avoid_source = si.Star(ra=si.Angle(degrees=avoid_source_ra_deg),
dec=si.Angle(degrees=avoid_source_dec_deg))
freq = frequency * 1e6
tracklist = [] # List of (starttime, duration, az, el) tuples
for starttime in range(int(gps_start), int(gps_start + duration), int(step)):
t = su.time2tai(starttime)
observer = su.S_MWAPOS.at(t)
obs_source_apparent = observer.observe(obs_source).apparent()
obs_source_alt, obs_source_az, _ = obs_source_apparent.altaz()
if obs_source_alt.degrees < min_elevation:
logger.debug("Source at %.2f [deg] below minimum elevation = %.2f [deg] at this time, skip this timestep." % (obs_source_alt.degrees,
min_elevation))
continue # Source below pointing horizon at this time, skip this timestep.
if min_gain is None:
current_min_gain = 0.5
if obs_source_alt.degrees < 50:
current_min_gain = 0.1
else:
current_min_gain = min_gain
avoid_source_apparent = observer.observe(avoid_source).apparent()
avoid_source_alt, avoid_source_az, _ = avoid_source_apparent.altaz()
if avoid_source_alt.degrees < 0.0:
tracklist.append((starttime, step, obs_source_az.degrees, obs_source_alt.degrees))
logger.debug("Avoided source below TRUE horizon, just use actual target az/alt for this timestep.")
continue # Avoided source below TRUE horizon, just use actual target az/alt for this timestep.
dist_deg = obs_source_apparent.separation_from(avoid_source_apparent).degrees
logger.debug("Observed source at (az,alt) = (%.4f,%.4f) [deg]" % (obs_source_az.degrees, obs_source_alt.degrees))
logger.debug("Avoided source at (az,alt) = (%.4f,%.4f) [deg]" % (avoid_source_az.degrees, avoid_source_alt.degrees))
logger.debug("Anglular distance = %.2f [deg]" % (dist_deg))
logger.debug("Gps time = %d" % su.tai2gps(t))
gp_positions = observer.from_altaz(alt_degrees=gp_alts,
az_degrees=gp_azes,
distance=si.Distance(au=9e90))
dist_obs_degs = obs_source_apparent.separation_from(gp_positions).degrees
dist_avoid_degs = avoid_source_apparent.separation_from(gp_positions).degrees
# select gridpoints within given angular distance :
best_gridpoint = None
r_max = -1000
best_gain_obs = 0
best_gain_avoid = 0
skipped_too_far = 0
skipped_gain_too_low = 0
for i in range(len(gp_numbers)):
gpnum = gp_numbers[i]
dist_obs = dist_obs_degs[i]
dist_avoid = dist_avoid_degs[i]
if verb_level > 1:
outstring = "\n\t\ttesting gridpoint %d, dist_obs_deg = %.2f [deg], dist_avoid_deg = %.2f [deg]"
logger.debug(outstring % (gpnum, dist_obs, dist_avoid))
# if dist_obs_deg < options.max_beam_distance_deg and dist_avoid_deg < options.max_beam_distance_deg :
if dist_obs < max_beam_distance_deg:
beam_obs = primarybeammap_tant.get_beam_power(gp_delays[i],
freq,
model=model,
pointing_az_deg=obs_source_az.degrees,
pointing_za_deg=90 - obs_source_alt.degrees,
zenithnorm=True)
beam_avoid = primarybeammap_tant.get_beam_power(gp_delays[i],
freq,
model=model,
pointing_az_deg=avoid_source_az.degrees,
pointing_za_deg=90 - avoid_source_alt.degrees,
zenithnorm=True)
gain_XX_obs = beam_obs['XX']
gain_XX_avoid = beam_avoid['XX']
r = gain_XX_obs / gain_XX_avoid
if r > 1.00 and gain_XX_obs > current_min_gain:
outstring = "\t\tSelected gridpoint = %d at (az,elev) = (%.4f,%.4f) [deg] at (distances %.4f and %.4f deg) "
outstring += "-> gain_obs=%.4f and gain_avoid=%.4f -> gain_obs/gain_avoid = %.4f"
logger.debug(outstring % (gpnum, gp_azes[i], gp_alts[i], dist_obs, dist_avoid, gain_XX_obs, gain_XX_avoid, r))
if r > r_max:
best_gridpoint = i
r_max = r
best_gain_obs = gain_XX_obs
best_gain_avoid = gain_XX_avoid
else:
skipped_gain_too_low = skipped_gain_too_low + 1
if verb_level > 1:
outstring = "\t\tSKIPPED gridpoint = %d at (az,elev) = (%.4f,%.4f) [deg] at (distances %.4f and %.4f deg) "
outstring += "-> gain_obs=%.4f (vs. min_gain=%.2f) and gain_avoid=%.4f -> gain_obs/gain_avoid = %.4f"
logger.debug(outstring % (gpnum,
gp_azes[i],
gp_alts[i],
dist_obs,
dist_avoid,
gain_XX_obs,
current_min_gain,
gain_XX_avoid, r))
else:
skipped_too_far = skipped_too_far + 1
if verb_level > 1:
outstring = "\t\t\tskipped as dist_obs_deg = %.2f [deg] and dist_avoid_deg = %.2f [deg] , one > "
outstring += "max_beam_distance_deg = %.2f [deg]"
logger.debug(outstring % (dist_obs, dist_avoid, max_beam_distance_deg))
logger.debug("Number of gridpoints skipped due to gain lower than minimum (=%.2f) = %d" % (current_min_gain,
skipped_gain_too_low))
outstring = "Number of gridpoints skipped due to being further than limit ( max_beam_distance_deg = %.2f [deg] ) = %d"
logger.debug(outstring % (max_beam_distance_deg, skipped_too_far))
if best_gridpoint is not None:
outstring = "Best gridpoint %d at (az,alt)=(%.4f,%.4f) [deg] at %s UTC to observe has ratio = %.2f = %.8f / %.8f\n"
logger.info(outstring % (gp_numbers[best_gridpoint],
gp_azes[best_gridpoint],
gp_alts[best_gridpoint],
t.utc_iso(), r_max,
best_gain_obs,
best_gain_avoid))
tracklist.append((starttime, step, gp_azes[best_gridpoint], gp_alts[best_gridpoint]))
return tracklist
def get_best_gridpoints_suppress_sun(gps_start,
obs_source_ra_deg,
obs_source_dec_deg,
model="analytic",
min_gain=None,
max_beam_distance_deg=30,
channel=145,
verb_level=1,
logger=LOGGER,
duration=3600,
step=120,
min_elevation=30.00):
t = su.time2tai(gps_start)
sunra, sundec, _ = su.S_MWAPOS.at(t).observe(su.PLANETS['Sun']).apparent().radec()
return get_best_gridpoints(gps_start=gps_start,
obs_source_ra_deg=obs_source_ra_deg,
obs_source_dec_deg=obs_source_dec_deg,
avoid_source_ra_deg=sunra.hours * 15.0,
avoid_source_dec_deg=sundec.degrees,
model=model,
min_gain=min_gain,
max_beam_distance_deg=max_beam_distance_deg,
channel=channel,
verb_level=verb_level,
logger=logger,
duration=duration,
step=step,
min_elevation=min_elevation)
# Keep old name in case old code still uses it.
get_best_gridpoints_supress_sun = get_best_gridpoints_suppress_sun
def get_sun_elevation(gps_start=None):
t = su.time2tai(gps_start)
sunalt, sunaz, _ = su.S_MWAPOS.at(t).observe(su.PLANETS['Sun']).apparent().altaz()
return sunalt.degrees
|
PypiClean
|
/compliance-trestle-2.2.1.tar.gz/compliance-trestle-2.2.1/trestle/common/common_types.py
|
# Copyright (c) 2020 IBM Corp. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Special types are defined here."""
from typing import TypeVar
import trestle.oscal.component as comp
import trestle.oscal.profile as prof
import trestle.oscal.ssp as ossp
from trestle.core.base_model import OscalBaseModel
from trestle.oscal.assessment_plan import AssessmentPlan
from trestle.oscal.assessment_results import AssessmentResults
from trestle.oscal.catalog import Catalog, Control, Group
from trestle.oscal.common import AssessmentPart, Part, Resource
from trestle.oscal.poam import PlanOfActionAndMilestones
# model types containing uuids that should not regenerate
FixedUuidModel = Resource
TopLevelOscalModel = TypeVar(
'TopLevelOscalModel',
AssessmentPlan,
AssessmentResults,
Catalog,
comp.ComponentDefinition,
PlanOfActionAndMilestones,
prof.Profile,
ossp.SystemSecurityPlan
)
OBT = TypeVar('OBT', bound=OscalBaseModel)
TG = TypeVar('TG')
TG2 = TypeVar('TG2')
TypeWithProps = TypeVar(
'TypeWithProps',
Control,
Part,
AssessmentPart,
comp.Statement,
ossp.Statement,
comp.ImplementedRequirement,
ossp.ImplementedRequirement
)
TypeWithParts = TypeVar('TypeWithParts', Control, Part, Group, prof.Add, prof.Group)
TypeWithByComps = TypeVar(
'TypeWithByComps', ossp.ImplementedRequirement, ossp.Statement, comp.ImplementedRequirement, comp.Statement
)
TypeWithSetParams = TypeVar(
'TypeWithSetParams',
ossp.ImplementedRequirement,
ossp.ByComponent,
ossp.ControlImplementation,
comp.ImplementedRequirement,
comp.ControlImplementation,
prof.Modify
)
TypeWithParamId = TypeVar('TypeWithParamId', ossp.SetParameter, prof.SetParameter, comp.SetParameter)
|
PypiClean
|
/covmatic_robotmanager-0.0.6-py3-none-any.whl/covmatic_robotmanager/config.py
|
import argparse
import configargparse
import os
import logging
from .singleton import SingletonMeta
from .utils import FunctionCaseStartWith
desktop_file = FunctionCaseStartWith(os.sys.platform)
@desktop_file.case('linux')
def desktop_file_linux():
return os.path.expanduser("~/.local/share/applications/covmatic-robotmanager.desktop")
@desktop_file.case(('win32', 'cygwin'))
def desktop_file_win():
import winshell
return os.path.join(winshell.desktop(), "Covmatic Robotmanager server.lnk")
@desktop_file.case('') # all other
def desktop_file_other():
return ""
class Config(argparse.Namespace, metaclass=SingletonMeta):
_logger = logging.getLogger("robotmanager.config")
@classmethod
def get_base_config_folder(cls):
config_dir = os.path.join(os.path.expanduser("~"), ".covmatic")
cls.create_directory(config_dir)
return config_dir
@classmethod
def get_base_log_folder(cls):
log_dir = os.path.join(os.path.expanduser("~"), ".covmatic", "log")
cls.create_directory(log_dir)
return log_dir
@classmethod
def get_config_file_path(cls) -> str:
return os.path.join(cls.get_base_config_folder(), "robotmanager.conf")
@classmethod
def get_default_positions_file_path(cls) -> str:
return os.path.join(cls.get_base_config_folder(), "positions.json")
@classmethod
def get_desktop_file_path(cls) -> str:
return desktop_file()
@classmethod
def parse(cls, description):
cls._logger.info("Checking for arguments in config file {}".format(cls.get_config_file_path()))
parser = configargparse.ArgParser(description=description,
default_config_files=[cls.get_config_file_path()],
add_config_file_help=True)
parser.add_argument('-E', '--eva-ip', metavar='address', required=True, help="Eva hostname or ip address")
parser.add_argument('-T', '--eva-token', metavar='token', required=True, help="Eva token")
parser.add_argument('-P', '--port', type=int, metavar="port", default=5000, help="Server port for requests")
parser.add_argument('--positions-filepath', metavar="path", type=str, default=cls.get_default_positions_file_path(), help="JSON File to save positions data")
parser.add_argument('--test-only', dest="test_only", action="store_true", help="enable test-only execution")
parser.add_argument('--debug-mode', dest="debug_mode", action="store_true", help="enable debug mode to show unhandled exceptions.")
parser.add_argument('-L', '--log-folder', dest="log_folder", type=str, default=cls.get_base_log_folder(), help="Folder to store logs in")
return cls.reset(**parser.parse_known_args()[0].__dict__)
@classmethod
def pull(cls, description):
if not cls().__dict__:
cls.parse(description)
return cls()
@classmethod
def create_directory(cls, directory):
try:
os.makedirs(directory)
cls._logger.info("Created directory {}".format(directory))
except OSError:
cls._logger.debug("Directory {} already existing".format(directory))
|
PypiClean
|
/apache_tvm-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/tvm/relay/testing/lstm.py
|
from tvm import relay
from . import layers
from .init import create_workload
def lstm_cell(num_hidden, batch_size=1, dtype="float32", name=""):
"""Long-Short Term Memory (LSTM) network cell.
Parameters
----------
num_hidden : int
Number of units in output symbol.
batch_size : int
Batch size (length of states).
Returns
-------
result : tvm.relay.Function
A Relay function that evaluates an LSTM cell.
The function takes in a tensor of input data, a tuple of two
states, and weights and biases for dense operations on the
inputs and on the state. It returns a tuple with two members,
an output tensor and a tuple of two new states.
"""
builder = relay.ScopeBuilder()
input_type = relay.TensorType((batch_size, num_hidden), dtype)
weight_type = relay.TensorType((4 * num_hidden, num_hidden), dtype)
bias_type = relay.TensorType((4 * num_hidden,), dtype)
dense_type = relay.TensorType((batch_size, 4 * num_hidden), dtype)
slice_type = relay.TupleType([input_type, input_type, input_type, input_type])
ret_type = relay.TupleType([input_type, relay.TupleType([input_type, input_type])])
inputs = relay.Var("inputs", input_type)
states = relay.Var("states", relay.TupleType([input_type, input_type]))
i2h_weight = relay.Var("i2h_weight", weight_type)
i2h_bias = relay.Var("i2h_bias", bias_type)
h2h_weight = relay.Var("h2h_weight", weight_type)
h2h_bias = relay.Var("h2h_bias", bias_type)
i2h = builder.let(
("i2h", dense_type),
layers.dense_add_bias(
data=inputs, units=num_hidden * 4, weight=i2h_weight, bias=i2h_bias, name="%si2h" % name
),
)
h2h = builder.let(
("h2h", dense_type),
layers.dense_add_bias(
data=relay.TupleGetItem(states, 0),
units=num_hidden * 4,
weight=h2h_weight,
bias=h2h_bias,
name="%sh2h" % name,
),
)
gates = builder.let(("gates", dense_type), relay.add(i2h, h2h))
slice_gates = builder.let(
("slice_gates", slice_type), relay.split(gates, indices_or_sections=4, axis=1).astuple()
)
in_gate = builder.let(
("in_gate", input_type), relay.sigmoid(relay.TupleGetItem(slice_gates, 0))
)
forget_gate = builder.let(
("forget_gate", input_type), relay.sigmoid(relay.TupleGetItem(slice_gates, 1))
)
in_transform = builder.let(
("in_transform", input_type), relay.tanh(relay.TupleGetItem(slice_gates, 2))
)
out_gate = builder.let(
("out_gate", input_type), relay.sigmoid(relay.TupleGetItem(slice_gates, 3))
)
next_c = builder.let(
("next_c", input_type),
relay.add(
relay.multiply(forget_gate, relay.TupleGetItem(states, 1)),
relay.multiply(in_gate, in_transform),
),
)
next_h = builder.let(("next_h", input_type), relay.multiply(out_gate, relay.tanh(next_c)))
ret = builder.let(("ret", ret_type), relay.Tuple([next_h, relay.Tuple([next_h, next_c])]))
builder.ret(ret)
body = builder.get()
return relay.Function(
[inputs, states, i2h_weight, i2h_bias, h2h_weight, h2h_bias], body, ret_type
)
def get_net(iterations, num_hidden, batch_size=1, dtype="float32"):
"""Constructs an unrolled RNN with LSTM cells"""
input_type = relay.TensorType((batch_size, num_hidden), dtype)
weight_type = relay.TensorType((4 * num_hidden, num_hidden), dtype)
bias_type = relay.TensorType((4 * num_hidden,), dtype)
state_type = relay.TupleType([input_type, input_type])
cell_type = relay.TupleType([input_type, state_type])
builder = relay.ScopeBuilder()
zeros = builder.let(("zeros", input_type), relay.zeros((batch_size, num_hidden), dtype))
init_states = builder.let(("init_states", state_type), relay.Tuple([zeros, zeros]))
states = init_states
out = None
for i in range(iterations):
inputs = relay.Var("data", input_type)
i2h_weight = relay.Var("i2h_%s_weight" % i, weight_type)
i2h_bias = relay.Var("i2h_%i_bias" % i, bias_type)
h2h_weight = relay.Var("h2h_%s_weight" % i, weight_type)
h2h_bias = relay.Var("h2h_%s_bias" % i, bias_type)
cell_fn = lstm_cell(num_hidden, batch_size, dtype, "lstm_%s" % i)
call = builder.let(
("call_%s" % i, cell_type),
relay.Call(cell_fn, [inputs, states, i2h_weight, i2h_bias, h2h_weight, h2h_bias]),
)
new_out = builder.let(("out_%s" % i, input_type), relay.TupleGetItem(call, 0))
new_states = builder.let(("states_%s" % i, state_type), relay.TupleGetItem(call, 1))
states = new_states
out = new_out
builder.ret(out)
body = builder.get()
args = relay.analysis.free_vars(body)
return relay.Function(args, body, input_type)
def get_workload(iterations, num_hidden, batch_size=1, dtype="float32"):
"""Get benchmark workload for an LSTM RNN.
Parameters
----------
iterations : int
The number of iterations in the desired LSTM RNN.
num_hidden : int
The size of the hiddxen state
batch_size : int, optional (default 1)
The batch size used in the model
dtype : str, optional (default "float32")
The data type
Returns
-------
mod : tvm.IRModule
The relay module that contains a LSTM network.
params : dict of str to NDArray
The parameters.
"""
net = get_net(iterations, num_hidden, batch_size, dtype)
return create_workload(net)
|
PypiClean
|
/echarts-china-counties-pypkg-0.0.2.tar.gz/echarts-china-counties-pypkg-0.0.2/echarts_china_counties_pypkg/resources/echarts-china-counties-js/3332cd94a38f960d9a73bf2afbdd06f8.js
|
(function (root, factory) {if (typeof define === 'function' && define.amd) {define(['exports', 'echarts'], factory);} else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') {factory(exports, require('echarts'));} else {factory({}, root.echarts);}}(this, function (exports, echarts) {var log = function (msg) {if (typeof console !== 'undefined') {console && console.error && console.error(msg);}};if (!echarts) {log('ECharts is not Loaded');return;}if (!echarts.registerMap) {log('ECharts Map is not loaded');return;}echarts.registerMap('巴里坤哈萨克自治县', {"type":"FeatureCollection","features":[{"type":"Feature","id":"650521","properties":{"name":"巴里坤哈萨克自治县","cp":[93.010383,43.599929],"childNum":1},"geometry":{"type":"Polygon","coordinates":["@@IBKB@BCBC@C@AACC@A@@@A@A@CAAA@G@@@GACAGAABA@KDGHA@ABCDE@KA@@C@GBA@K@E@I@EAC@E@CDGBGAI@ABCDCBA@GAEBIBA@@AA@@AGIEAA@@@E@I@IBEBKBCBABA@@@EDEDC@A@EBA@EDADCDA@QAIBIBCA@@A@@@G@AAEBA@A@A@@BGFEFCBCBA@A@E@EAEBIDODEFEHEJ@HCBG@ACAAACA@AACBGDE@CC@@ICI@G@GEGAKBOBICGCE@GCMCIBEHEBIAK@G@GBCDIBC@@@C@EAGC@A@EAGGEOEI@CAE@EBKBCFCBGBI@G@KBGBEBOBE@OAEAE@CBEBG@K@E@IBCBIDGBCDAFEDIBIBEFIBO@KAECICECIAGEEAGBGBY@I@I@IAGGGCICEEMEGAA@GAGBSCGAEC@CIDKDI@G@A@CCCAW@WBE@K@EAAC@EDG@@DE@EEBODEBG@@@M@QBIBEBQDE@AFGFKDGDGBI@E@CAC@G@KCG@CG@ECEIGKEK@CDCFMBKECAI@ECKCIEKC@@ADADEDICGAI@GBGDIAI@C@GFEBI@IBEAGACCGAEDI@IAG@CCCAO@O@GDM@GAGBEFMBIAK@MBGBEBMAEAI@EA@ABCJE@CBEBAAAOCMDOBI@KCIAGAIBO@IBWBMDEBCDEDI@KDKBC@E@C@GBCAIBMBK@KBOBSFGDGFGDCBKBMDCBKBMDI@GBIFEDGBKDIF{PIDCFSFKHMBGDKNIPILQPOJMJMFWFKBKDKDCBOFADABBFSLUJEFQFCBK@KBMDOFEDMBWHCBODKFA@KDEDCBEBK@M@KB@@KBMBG@IAA@@@@@@@A@@@@@AB@@EDABCBGBGBABA@A@A@@AEAMEKEKGCCCACASGEA@ACACAKKA@AAA@@@A@A@AB@@AB@@BBBDFHBB@@BBBB@@B@@B@@BD@@@B@@@@@BAB@@@@@B@@@B@@@B@@@B@@@BGDABAB@@@BAF@B@@AB@BA@A@@BA@E@@BA@@@@@ABA@@@@BABAD@@@@@@@@@@@@@@C@@@@@@@@@A@AB@B@@@@@@@@A@@@A@@@@@@@@@A@@@@@@A@@@@AB@@A@EB@@@@A@A@A@A@ABC@GBA@@@ABA@A@@@A@@@A@A@A@CA@@A@@@A@@@ABA@@@@@A@A@@@A@@@@@CA@@A@@@A@A@C@MB@@A@@B@@@@@@@@@BA@@BA@@@@@@@A@@@@@@@@@@A@@@@@@@@A@@@@@A@@B@@@@@@@@@B@@@B@@@@@@@@@@A@@@A@A@@@A@@B@@A@@@@B@@@@@B@@@@@@@@@@@@@@@@@@A@@@A@@@@@A@@@@@A@A@A@A@@@@@A@@@@@@B@@@B@@@@A@@@@@@@A@@@AA@@@@@@@@@@A@@@@B@@@@@@@@A@@BA@@@@@@@@@@@A@@@@@BB@@@@@B@@B@@@@@@B@@@@@@@@BB@@@@@@@@@@@B@@ABAB@BA@@@@@@@@@@B@@@@@B@B@@ABABA@ABAB@B@@A@@@A@G@@@A@A@@BA@@@ABAA@BAAAB@@A@A@@@@@@@@@@@@@@@BB@@@@@@@@B@@@@@A@@@@@@@@@A@@@EBABIFA@ABE@A@A@CBA@ABC@C@E@C@CA@@A@A@CBABCBEDCBEBA@ABABA@CBA@GBABMBA@ABA@ABA@C@C@E@A@CB@@A@ABA@A@ABA@@@AB@@AB@@A@@@A@@@CB@@C@@@@@@@@BB@B@D@B@@@BB@@@@@B@@@@B@B@@@B@@@B@BBBB@@BBB@@@@BB@@@BAD@B@@A@@B@@@@@B@B@BBB@@@B@BAD@@@B@@A@@B@B@B@B@@@BBB@@BD@@BB@@@B@@@B@@@D@DBBAB@BBB@@@B@B@BB@@D@D@D@D@@@B@B@B@B@B@B@B@B@@@B@BBB@@A@@@@@@B@B@B@D@B@@@BB@@B@D@@@@BB@B@BB@@B@@@B@BBB@BBB@B@@@B@@AB@@@B@B@@@B@B@B@@@B@BBB@BB@@B@D@@@B@B@@@B@@@B@@@DBB@DBD@@@B@B@BBD@@@B@BB@@B@@@B@B@B@B@BBD@@BB@@@@@B@@@B@B@B@D@BAB@B@FB@@D@B@D@B@DA@@@@B@@@DA@@B@@@@@@@@@@@@@B@@@@@@BB@@@@@@@FBB@B@BBB@B@@@@@@@@@@@@@B@@@@@B@@@@@B@B@@@B@@@B@B@@BB@B@BBB@D@BBB@@BB@@B@@B@B@@@B@@@BBB@BBBB@@AB@@@@@B@@@@@@@@B@B@DBB@BBB@BB@@@@@B@@@B@@BB@@B@B@BBB@@@BBB@B@@BB@@@@D@D@F@@@B@@ADAB@B@B@F@@@B@BBBBD@B@@@B@@@B@@A@ABOJABAB@@@@@B@B@@@BAB@@ADCDEJCDAD@@CHEH@DA@@@@BA@AB@@A@@BEHEJ@@A@AB@B@@AB@D@@@B@@AJAB@B@@BD@F@@AB@DEH@BAB@B@B@F@J@@@B@@AB@@@BA@@@A@@@A@@@@BA@@B@@AB@B@@@B@@BD@B@B@@@@BB@@B@@B@@@@A@ABA@ABA@@@@@@B@@@B@@@B@@@B@@@BA@@@A@A@A@A@A@A@C@A@A@@@A@@@A@@@@B@@@@BD@@@@A@ABABCBA@CB@@A@ABA@ABCBKFCBCB@@ABABABA@A@@BA@@BA@ADC@C@AB@@A@@BA@@@A@@@@BA@@BA@ABA@AB@@@@@B@@@@@B@@@@@@@@A@@@A@@@A@@@A@AB@@A@@@A@A@@@ABA@ABA@ABA@@@@@@B@@@B@@@B@@A@@@A@A@@BA@@@ABA@@@@@ABAB@BA@AB@BA@@BA@ABAB@BABA@AB@@@@A@@@A@AB@@CB@@@@@B@@@B@@@BAB@@@B@@@B@@A@ABABCBA@A@A@AAA@@@@B@@@@AB@BA@AB@@A@@B@@A@@@@@@@A@A@A@@@A@ABEBABA@ABA@@@@B@BC@@BA@B@@B@@@@@@@BA@A@ABAB@@@@@B@@@BA@@@A@@BA@@B@BA@CBC@@@A@A@@@A@ABA@CBABABA@@@A@@@A@ABABAD@B@BA@@B@BA@@B@BAB@@A@AB@BA@A@A@@B@B@@A@@@A@A@@@A@A@AB@B@@@@@BB@@@@@AB@@ADA@@B@@@@A@@@@@A@@@@BA@@BC@A@@BA@A@@@@@@B@@@@@BABA@@@A@@@ABA@@@@@@BB@B@BB@@B@@@@@ABABADA@@B@B@B@@AD@@A@@@CB@@A@AB@@AB@@ADA@@@ABA@A@A@AB@@@B@@EDAB@@A@@@@@A@A@CAIFCD@@@@BJHVJ`DNBF@JBPDVBNBJ@HBJBLBHBJ@D@HBLBJ@BBJBF@D@FBH@RBb@@BLAF@L@l@NAl@^@P@R@L@F@JAD@@EPGRITKXGRGR@@CHGNM`GREJBFDPDRHZH`FTJFHFJFNLTLHFDHJV@JBH@V@LBJ@@@L@P@L@JBVBNBNNDLDFHFLFJDHBBDFBHDDHLHPHPLTFJDF@BBBBB@@@D@B@DAJ@H@L@F@J@DAB@B@B@@BB@D@B@BAD@B@DAD@@@@@BAB@D@B@DABAD@BAB@BAB@B@B@B@B@@@BBB@@A@@B@@@@BBBD@@BBAD@B@@BBBB@@@@@@AB@@A@@B@@@B@BAD@@ABA@@@@BABAB@BAB@@@D@BAF@F@D@@@@@B@@BB@B@B@B@B@D@@AB@D@BA@@BAB@B@@AD@@BBDFLPLPBBBDBDBBB@BDFDj`NLVP`ZjbLHPL@@@BFBB@@@@@B@@@DB@@BBB@B@B@@@BB@@BBBB@BBB@D@@@@AD@@@@@@PHHDHDD@@BB@@AB@D@D@@@@@BABA@@DA@AB@B@B@@@@@BBBBFFDFHHDFBBBB@@@D@D@B@BA@@B@B@B@BBDBBDF@@FFDDBDBB@BDD@B@B@BABAB@BA@ABABCBCBAB@BADAB@BAD@B@@@BBB@D@B@B@B@H@@@BAB@@@D@B@D@B@F@J@DBD@F@D@@ABAD@BABCDCDCDABA@@BAB@D@BA@@BCDAD@D@@AFAD@BB@@@DB@@BBBB@@ABADABADEFCFA@EHGL@@CBABAB@@@B@D@@ABADABADAF@DADABADA@CDIFEBCB@BC@@@@B@BBD@D@D@FBF@B@F@F@FBB@B@F@B@H@DBD@H@F@F@DAB@BBB@D@BBB@DBDBDBBDBFDJBhOp[\\N^EfKTI@bCfCrJ¶RvHz@XAEj@D¢B²FXDVCEPGz]fOpYlSaVP\\EfBfJZDLCzYpFBfESXCPCJ@PFfLjPAlHrBHb@tNvNVVIIbCdLfDhMfEVgr@^A\\[bPVDH^DZKtIjFxHLCT@VFJAnMjM~QpIfIR@VAB\\O\\AZ@PMXAXKXQ^OnFZA^JrMBZXKxWXG¬ipMU\\M^MdADeLSZK¦eúºm¤gY etY^StYlWbIOTGXm\\Yf]j]hQlMRKbwL]EcTMLpG´QdMredOv[bGStguTKPSjqPU\\WZABbEhORGMCWESEUCEAeICAyOQcIKCkKUEEAYEUEKCMCwO
QSaGcIMCIAQIQKcUWKeW[Q[MMI]Q[O_SAA]gW_WaS[U[@@U_MSMQGKBCDM@U@@@GAAIGQGQIAAMEUCKGKQACECOKUGOGMMMEMEIIIOKMGIEEMEIEGGIMKKUMo]WIOIMGIAOEAAEAMEMK[MYOAASGUKOSAAGIAAIKISCEGGGCAE@AGG@@ECCACA@EA@EEGCACAAC@@@@CCCAC@CCEGEAA@AAAA@CAG@A@ECAAEACAA@UEIBKBCBI@A@EBEBE@C@AAC@CBEBKCM@C@C@C@A@KCEACAGA@@GO@K@C@CFGFCHGDA@@@A@A@KAGECC@CCAICGKCG@GC@@GCIGAACGC@CGEEGAA@EAA@AAAAAAAA@@]KCAAABAHEDABCAC@@CCCIGEEAMCKEEEAE@C@GCAI@QBM@MAGESKEACEDOAcDMNgUOSHo`SDgmAP[aqm_Mu]KOUQMKYKEC@K@]AGBA@AD@BAHADCBGBCBCD@BAJ@DCBABC@EBCDGFCFADCFEDGDAD@DBBADADGBC@A@ACACMMEAOCI@G@ADAB@BHHBB@BADABCBC@IAEBEBCBIJCBC@E@AA@@EBABABCD@BDNABA@A@A@A@GD@BAD@B@BA@ABC@MBGAA@CBKDA@C@CAA@A@A@KDCB@BABEBABG@EBIDCBGAGBCBC@IB"],"encodeOffsets":[[95432,44511]]}}],"UTF8Encoding":true});}));
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/securityinsights/v20190101preview/action.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['Action']
class Action(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action_id: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
logic_app_resource_id: Optional[pulumi.Input[str]] = None,
operational_insights_resource_provider: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
rule_id: Optional[pulumi.Input[str]] = None,
trigger_uri: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Action for alert rule.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] action_id: Action ID
:param pulumi.Input[str] etag: Etag of the azure resource
:param pulumi.Input[str] logic_app_resource_id: Logic App Resource Id, /subscriptions/{my-subscription}/resourceGroups/{my-resource-group}/providers/Microsoft.Logic/workflows/{my-workflow-id}.
:param pulumi.Input[str] operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[str] rule_id: Alert rule ID
:param pulumi.Input[str] trigger_uri: Logic App Callback URL for this specific workflow.
:param pulumi.Input[str] workspace_name: The name of the workspace.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if action_id is None and not opts.urn:
raise TypeError("Missing required property 'action_id'")
__props__['action_id'] = action_id
__props__['etag'] = etag
__props__['logic_app_resource_id'] = logic_app_resource_id
if operational_insights_resource_provider is None and not opts.urn:
raise TypeError("Missing required property 'operational_insights_resource_provider'")
__props__['operational_insights_resource_provider'] = operational_insights_resource_provider
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if rule_id is None and not opts.urn:
raise TypeError("Missing required property 'rule_id'")
__props__['rule_id'] = rule_id
__props__['trigger_uri'] = trigger_uri
if workspace_name is None and not opts.urn:
raise TypeError("Missing required property 'workspace_name'")
__props__['workspace_name'] = workspace_name
__props__['name'] = None
__props__['type'] = None
__props__['workflow_id'] = None
super(Action, __self__).__init__(
'azure-nextgen:securityinsights/v20190101preview:Action',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Action':
"""
Get an existing Action resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Action(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
Etag of the action.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="logicAppResourceId")
def logic_app_resource_id(self) -> pulumi.Output[Optional[str]]:
"""
Logic App Resource Id, /subscriptions/{my-subscription}/resourceGroups/{my-resource-group}/providers/Microsoft.Logic/workflows/{my-workflow-id}.
"""
return pulumi.get(self, "logic_app_resource_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Azure resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="workflowId")
def workflow_id(self) -> pulumi.Output[Optional[str]]:
"""
The name of the logic app's workflow.
"""
return pulumi.get(self, "workflow_id")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
PypiClean
|
/Pizza.py-0.1.0.tar.gz/Pizza.py-0.1.0/pizza/gnu.py
|
# gnu tool
oneline = "Create plots via GnuPlot plotting program"
docstr = """
g = gnu() start up GnuPlot
g.stop() shut down GnuPlot process
g.plot(a) plot vector A against linear index
g.plot(a,b) plot B against A
g.plot(a,b,c,d,...) plot B against A, D against C, etc
g.mplot(M,N,S,"file",a,b,...) multiple plots saved to file0000.eps, etc
each plot argument can be a tuple, list, or Numeric/NumPy vector
mplot loops over range(M,N,S) and create one plot per iteration
last args are same as list of vectors for plot(), e.g. 1, 2, 4 vectors
each plot is made from a portion of the vectors, depending on loop index i
Ith plot is of b[0:i] vs a[0:i], etc
series of plots saved as file0000.eps, file0001.eps, etc
if use xrange(),yrange() then plot axes will be same for all plots
g("plot 'file.dat' using 2:3 with lines") execute string in GnuPlot
g.enter() enter GnuPlot shell
gnuplot> plot sin(x) with lines type commands directly to GnuPlot
gnuplot> exit, quit exit GnuPlot shell
g.export("data",range(100),a,...) create file with columns of numbers
all vectors must be of equal length
could plot from file with GnuPlot command: plot 'data' using 1:2 with lines
g.select(N) figure N becomes the current plot
subsequent commands apply to this plot
g.hide(N) delete window for figure N
g.save("file") save current plot as file.eps
Set attributes for current plot:
g.erase() reset all attributes to default values
g.aspect(1.3) aspect ratio
g.xtitle("Time") x axis text
g.ytitle("Energy") y axis text
g.title("My Plot") title text
g.title("title","x","y") title, x axis, y axis text
g.xrange(xmin,xmax) x axis range
g.xrange() default x axis range
g.yrange(ymin,ymax) y axis range
g.yrange() default y axis range
g.xlog() toggle x axis between linear and log
g.ylog() toggle y axis between linear and log
g.label(x,y,"text") place label at x,y coords
g.curve(N,'r') set color of curve N
colors: 'k' = black, 'r' = red, 'g' = green, 'b' = blue
'm' = magenta, 'c' = cyan, 'y' = yellow
"""
# History
# 8/05, Matt Jones (BYU): original version
# 9/05, Steve Plimpton: added mplot() method
# ToDo list
# allow choice of JPG or PNG or GIF when saving ?
# can this be done from GnuPlot or have to do via ImageMagick convert ?
# way to trim EPS plot that is created ?
# hide does not work on Mac aqua
# select does not pop window to front on Mac aqua
# Variables
# current = index of current figure (1-N)
# figures = list of figure objects with each plot's attributes
# so they aren't lost between replots
# Imports and external programs
import types, os
try: from DEFAULTS import PIZZA_GNUPLOT
except: PIZZA_GNUPLOT = "gnuplot"
try: from DEFAULTS import PIZZA_GNUTERM
except: PIZZA_GNUTERM = "x11"
# Class definition
class gnu:
# --------------------------------------------------------------------
def __init__(self):
self.GNUPLOT = os.popen(PIZZA_GNUPLOT,'w')
self.file = "tmp.gnu"
self.figures = []
self.select(1)
# --------------------------------------------------------------------
def stop(self):
self.__call__("quit")
del self.GNUPLOT
# --------------------------------------------------------------------
def __call__(self,command):
self.GNUPLOT.write(command + '\n')
self.GNUPLOT.flush()
# --------------------------------------------------------------------
def enter(self):
while 1:
command = raw_input("gnuplot> ")
if command == "quit" or command == "exit": return
self.__call__(command)
# --------------------------------------------------------------------
# write plot vectors to files and plot them
def plot(self,*vectors):
if len(vectors) == 1:
file = self.file + ".%d.1" % self.current
linear = range(len(vectors[0]))
self.export(file,linear,vectors[0])
self.figures[self.current-1].ncurves = 1
else:
if len(vectors) % 2: raise StandardError,"vectors must come in pairs"
for i in range(0,len(vectors),2):
file = self.file + ".%d.%d" % (self.current,i/2+1)
self.export(file,vectors[i],vectors[i+1])
self.figures[self.current-1].ncurves = len(vectors)/2
self.draw()
# --------------------------------------------------------------------
# create multiple plots from growing vectors, save to numbered files
# don't plot empty vector, create a [0] instead
def mplot(self,start,stop,skip,file,*vectors):
n = 0
for i in range(start,stop,skip):
partial_vecs = []
for vec in vectors:
if i: partial_vecs.append(vec[:i])
else: partial_vecs.append([0])
self.plot(*partial_vecs)
if n < 10: newfile = file + "000" + str(n)
elif n < 100: newfile = file + "00" + str(n)
elif n < 1000: newfile = file + "0" + str(n)
else: newfile = file + str(n)
self.save(newfile)
n += 1
# --------------------------------------------------------------------
# write list of equal-length vectors to filename
def export(self,filename,*vectors):
n = len(vectors[0])
for vector in vectors:
if len(vector) != n: raise StandardError,"vectors must be same length"
f = open(filename,'w')
nvec = len(vectors)
for i in xrange(n):
for j in xrange(nvec):
print >>f,vectors[j][i],
print >>f
f.close()
# --------------------------------------------------------------------
# select plot N as current plot
def select(self,n):
self.current = n
if len(self.figures) < n:
for i in range(n - len(self.figures)):
self.figures.append(figure())
cmd = "set term " + PIZZA_GNUTERM + ' ' + str(n)
self.__call__(cmd)
if self.figures[n-1].ncurves: self.draw()
# --------------------------------------------------------------------
# delete window for plot N
def hide(self,n):
cmd = "set term %s close %d" % (PIZZA_GNUTERM,n)
self.__call__(cmd)
# --------------------------------------------------------------------
# save plot to file.eps
# final re-select will reset terminal
# do not continue until plot file is written out
# else script could go forward and change data file
# use tmp.done as semaphore to indicate plot is finished
def save(self,file):
self.__call__("set terminal postscript enhanced solid lw 2 color portrait")
cmd = "set output '%s.eps'" % file
self.__call__(cmd)
if os.path.exists("tmp.done"): os.remove("tmp.done")
self.draw()
self.__call__("!touch tmp.done")
while not os.path.exists("tmp.done"): continue
self.__call__("set output")
self.select(self.current)
# --------------------------------------------------------------------
# restore default attributes by creating a new fig object
def erase(self):
fig = figure()
fig.ncurves = self.figures[self.current-1].ncurves
self.figures[self.current-1] = fig
self.draw()
# --------------------------------------------------------------------
def aspect(self,value):
self.figures[self.current-1].aspect = value
self.draw()
# --------------------------------------------------------------------
def xrange(self,*values):
if len(values) == 0:
self.figures[self.current-1].xlimit = 0
else:
self.figures[self.current-1].xlimit = (values[0],values[1])
self.draw()
# --------------------------------------------------------------------
def yrange(self,*values):
if len(values) == 0:
self.figures[self.current-1].ylimit = 0
else:
self.figures[self.current-1].ylimit = (values[0],values[1])
self.draw()
# --------------------------------------------------------------------
def label(self,x,y,text):
self.figures[self.current-1].labels.append((x,y,text))
self.figures[self.current-1].nlabels += 1
self.draw()
# --------------------------------------------------------------------
def nolabels(self):
self.figures[self.current-1].nlabel = 0
self.figures[self.current-1].labels = []
self.draw()
# --------------------------------------------------------------------
def title(self,*strings):
if len(strings) == 1:
self.figures[self.current-1].title = strings[0]
else:
self.figures[self.current-1].title = strings[0]
self.figures[self.current-1].xtitle = strings[1]
self.figures[self.current-1].ytitle = strings[2]
self.draw()
# --------------------------------------------------------------------
def xtitle(self,label):
self.figures[self.current-1].xtitle = label
self.draw()
# --------------------------------------------------------------------
def ytitle(self,label):
self.figures[self.current-1].ytitle = label
self.draw()
# --------------------------------------------------------------------
def xlog(self):
if self.figures[self.current-1].xlog:
self.figures[self.current-1].xlog = 0
else:
self.figures[self.current-1].xlog = 1
self.draw()
# --------------------------------------------------------------------
def ylog(self):
if self.figures[self.current-1].ylog:
self.figures[self.current-1].ylog = 0
else:
self.figures[self.current-1].ylog = 1
self.draw()
# --------------------------------------------------------------------
def curve(self,num,color):
fig = self.figures[self.current-1]
while len(fig.colors) < num: fig.colors.append(0)
fig.colors[num-1] = colormap[color]
self.draw()
# --------------------------------------------------------------------
# draw a plot with all its settings
# just return if no files of vectors defined yet
def draw(self):
fig = self.figures[self.current-1]
if not fig.ncurves: return
cmd = 'set size ratio ' + str(1.0/float(fig.aspect))
self.__call__(cmd)
cmd = 'set title ' + '"' + fig.title + '"'
self.__call__(cmd)
cmd = 'set xlabel ' + '"' + fig.xtitle + '"'
self.__call__(cmd)
cmd = 'set ylabel ' + '"' + fig.ytitle + '"'
self.__call__(cmd)
if fig.xlog: self.__call__("set logscale x")
else: self.__call__("unset logscale x")
if fig.ylog: self.__call__("set logscale y")
else: self.__call__("unset logscale y")
if fig.xlimit:
cmd = 'set xr [' + str(fig.xlimit[0]) + ':' + str(fig.xlimit[1]) + ']'
self.__call__(cmd)
else: self.__call__("set xr [*:*]")
if fig.ylimit:
cmd = 'set yr [' + str(fig.ylimit[0]) + ':' + str(fig.ylimit[1]) + ']'
self.__call__(cmd)
else: self.__call__("set yr [*:*]")
self.__call__("set nolabel")
for i in range(fig.nlabels):
x = fig.labels[i][0]
y = fig.labels[i][1]
text = fig.labels[i][2]
cmd = 'set label ' + '\"' + text + '\" at ' + str(x) + ',' + str(y)
self.__call__(cmd)
self.__call__("set key off")
cmd = 'plot '
for i in range(fig.ncurves):
file = self.file + ".%d.%d" % (self.current,i+1)
if len(fig.colors) > i and fig.colors[i]:
cmd += "'" + file + "' using 1:2 with line %d, " % fig.colors[i]
else:
cmd += "'" + file + "' using 1:2 with lines, "
self.__call__(cmd[:-2])
# --------------------------------------------------------------------
# class to store settings for a single plot
class figure:
def __init__(self):
self.ncurves = 0
self.colors = []
self.title = ""
self.xtitle = ""
self.ytitle = ""
self.aspect = 1.3
self.xlimit = 0
self.ylimit = 0
self.xlog = 0
self.ylog = 0
self.nlabels = 0
self.labels = []
# --------------------------------------------------------------------
# line color settings
colormap = {'k':-1, 'r':1, 'g':2, 'b':3, 'm':4, 'c':5, 'y':7}
|
PypiClean
|
/jupyter_declarativewidgets-0.7.0.tar.gz/jupyter_declarativewidgets-0.7.0/declarativewidgets/static/urth_components/handsontable/dist/moment/locale/lb.js
|
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['moment'], factory) :
factory(global.moment)
}(this, function (moment) { 'use strict';
function processRelativeTime(number, withoutSuffix, key, isFuture) {
var format = {
'm': ['eng Minutt', 'enger Minutt'],
'h': ['eng Stonn', 'enger Stonn'],
'd': ['een Dag', 'engem Dag'],
'M': ['ee Mount', 'engem Mount'],
'y': ['ee Joer', 'engem Joer']
};
return withoutSuffix ? format[key][0] : format[key][1];
}
function processFutureTime(string) {
var number = string.substr(0, string.indexOf(' '));
if (eifelerRegelAppliesToNumber(number)) {
return 'a ' + string;
}
return 'an ' + string;
}
function processPastTime(string) {
var number = string.substr(0, string.indexOf(' '));
if (eifelerRegelAppliesToNumber(number)) {
return 'viru ' + string;
}
return 'virun ' + string;
}
/**
* Returns true if the word before the given number loses the '-n' ending.
* e.g. 'an 10 Deeg' but 'a 5 Deeg'
*
* @param number {integer}
* @returns {boolean}
*/
function eifelerRegelAppliesToNumber(number) {
number = parseInt(number, 10);
if (isNaN(number)) {
return false;
}
if (number < 0) {
// Negative Number --> always true
return true;
} else if (number < 10) {
// Only 1 digit
if (4 <= number && number <= 7) {
return true;
}
return false;
} else if (number < 100) {
// 2 digits
var lastDigit = number % 10, firstDigit = number / 10;
if (lastDigit === 0) {
return eifelerRegelAppliesToNumber(firstDigit);
}
return eifelerRegelAppliesToNumber(lastDigit);
} else if (number < 10000) {
// 3 or 4 digits --> recursively check first digit
while (number >= 10) {
number = number / 10;
}
return eifelerRegelAppliesToNumber(number);
} else {
// Anything larger than 4 digits: recursively check first n-3 digits
number = number / 1000;
return eifelerRegelAppliesToNumber(number);
}
}
var lb = moment.defineLocale('lb', {
months: 'Januar_Februar_Mäerz_Abrëll_Mee_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'),
monthsShort: 'Jan._Febr._Mrz._Abr._Mee_Jun._Jul._Aug._Sept._Okt._Nov._Dez.'.split('_'),
weekdays: 'Sonndeg_Méindeg_Dënschdeg_Mëttwoch_Donneschdeg_Freideg_Samschdeg'.split('_'),
weekdaysShort: 'So._Mé._Dë._Më._Do._Fr._Sa.'.split('_'),
weekdaysMin: 'So_Mé_Dë_Më_Do_Fr_Sa'.split('_'),
longDateFormat: {
LT: 'H:mm [Auer]',
LTS: 'H:mm:ss [Auer]',
L: 'DD.MM.YYYY',
LL: 'D. MMMM YYYY',
LLL: 'D. MMMM YYYY H:mm [Auer]',
LLLL: 'dddd, D. MMMM YYYY H:mm [Auer]'
},
calendar: {
sameDay: '[Haut um] LT',
sameElse: 'L',
nextDay: '[Muer um] LT',
nextWeek: 'dddd [um] LT',
lastDay: '[Gëschter um] LT',
lastWeek: function () {
// Different date string for 'Dënschdeg' (Tuesday) and 'Donneschdeg' (Thursday) due to phonological rule
switch (this.day()) {
case 2:
case 4:
return '[Leschten] dddd [um] LT';
default:
return '[Leschte] dddd [um] LT';
}
}
},
relativeTime : {
future : processFutureTime,
past : processPastTime,
s : 'e puer Sekonnen',
m : processRelativeTime,
mm : '%d Minutten',
h : processRelativeTime,
hh : '%d Stonnen',
d : processRelativeTime,
dd : '%d Deeg',
M : processRelativeTime,
MM : '%d Méint',
y : processRelativeTime,
yy : '%d Joer'
},
ordinalParse: /\d{1,2}\./,
ordinal: '%d.',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4 // The week that contains Jan 4th is the first week of the year.
}
});
return lb;
}));
|
PypiClean
|
/wq.app-2.0.0a2.tar.gz/wq.app-2.0.0a2/packages/material-web/README.md
|
[![@wq/material][logo]][docs]
**@wq/material-web** provides web bindings for [@wq/material], via the [Material UI] library.
### [Documentation][docs]
[**Installation**][installation]
•
[**API**][api]
•
[**Components**][components]
•
[Icons][icons]
•
[Inputs][inputs]
[logo]: https://wq.io/images/@wq/material.svg
[docs]: https://wq.io/@wq/material
[installation]: https://wq.io/@wq/material#installation
[api]: https://wq.io/@wq/material#api
[components]: https://wq.io/components/
[icons]: https://wq.io/icons
[inputs]: https://wq.io/inputs/
[@wq/material]: https://wq.io/@wq/material
[Material UI]: https://material-ui.com/
|
PypiClean
|
/seeq_sysid-1.0.1-py3-none-any.whl/seeq_sysid/deployment_notebook/sysid_notebook.ipynb
|
```
from seeq_sysid.app import SYSID
# from seeq import spy
# from IPython.display import Javascript
# jv = '''$('#header-container').hide();'''
# Javascript(jv)
# Login
# spy.login(username='', password='', url='', quiet=True)
# %%javascript
# if (document.addEventListener) {
# document.addEventListener('contextmenu', function(e) {
# e.preventDefault();
# }, false);
# } else {
# document.attachEvent('oncontextmenu', function() {
# window.event.returnValue = false;
# });
# }
app = SYSID(sdl_notebook_url=jupyter_notebook_url)
app.run()
```
|
PypiClean
|
/codeforces_api-0.3.4.tar.gz/codeforces_api-0.3.4/codeforces/api/codeforces_api.py
|
import hashlib
import json
import operator
import random
import time
from collections import OrderedDict
from enum import Enum
from urllib.error import HTTPError
from urllib.request import urlopen
from .json_objects import Contest
from .json_objects import Hack
from .json_objects import Problem
from .json_objects import ProblemStatistics
from .json_objects import RanklistRow
from .json_objects import RatingChange
from .json_objects import Submission
from .json_objects import User
__all__ = ['CodeforcesAPI', 'CodeforcesLanguage']
class CodeforcesLanguage(Enum):
en = 'en'
ru = 'ru'
class CodeforcesDataRetriever:
"""
This class hides low-level operations with retrieving data from Codeforces site
"""
def __init__(self, lang=CodeforcesLanguage.en, key=None, secret=None):
"""
:param lang: Language
:type lang: CodeforcesLanguage
:param key: Private API key. Ignored if secret is None
:type key: str
:param secret: Private API secret. Ignored if key is None
:type secret: str
"""
self._key = None
self._secret = None
if key is not None and secret is not None:
self.key = key
self.secret = secret
self._base_from_language = {
CodeforcesLanguage.en: 'http://codeforces.com/api/',
CodeforcesLanguage.ru: 'http://codeforces.ru/api/'
}
self._language = lang
def get_data(self, method, **kwargs):
"""
Retrieves data by given method with given parameters
:param method: Request method
:param kwargs: HTTP parameters
:return:
"""
return self.__get_data(self.__generate_url(method, **kwargs))
def __get_data(self, url):
"""
Returns data retrieved from given url
"""
try:
with urlopen(url) as req:
return self.__check_json(req.read().decode('utf-8'))
except HTTPError as http_e:
try:
return self.__check_json(http_e.read().decode('utf-8'))
except Exception as e:
raise e from http_e
def __generate_url(self, method, **kwargs):
"""
Generates request url with given method and named parameters
:param method: Name of the method
:type method: str
:param kwargs: HTTP parameters
:type kwargs: dict of [str, object]
:return: Url
:rtype: str
"""
url = self.base + method
if self.key is not None and self.secret is not None:
kwargs['apiKey'] = self.key
kwargs['time'] = int(time.time())
if kwargs:
args = self.__get_valid_args(**kwargs)
url += '?' + '&'.join(map(self.__key_value_to_http_parameter, args.items()))
if self.key is not None and self.secret is not None:
url += '&apiSig=' + self.__generate_api_sig(method, args)
return url
def __generate_api_sig(self, method, params):
"""
apiSig — signature to ensure that you know both key and secret.
First six characters of the apiSig parameter can be arbitrary.
We recommend to choose them at random for each request. Let's denote them as rand.
The rest of the parameter is hexadecimal representation of SHA-512 hash-code of the following string:
<rand>/<methodName>?param1=value1¶m2=value2...¶mN=valueN#<secret>
where (param_1, value_1), (param_2, value_2),..., (param_n, value_n) are all the
request parameters (including apiKey, time, but excluding apiSig) with corresponding values,
sorted lexicographically first by param_i, then by value_i.
:return:
"""
rand = str(random.randint(100000, 999999))
s = '{}/{}?'.format(rand, method)
ordered_params = OrderedDict(sorted(params.items(), key=operator.itemgetter(0)))
s += '&'.join(map(self.__key_value_to_http_parameter, ordered_params.items()))
s += '#' + self.secret
return rand + hashlib.sha512(s.encode()).hexdigest()
@staticmethod
def __get_valid_args(**kwargs):
"""
Filters only not None values
"""
return {k: v for k, v in kwargs.items() if v is not None}
@staticmethod
def __key_value_to_http_parameter(key_value):
"""
Transforms dictionary of values to http parameters
"""
key, value = key_value
if isinstance(value, list):
value = ';'.join(sorted(map(str, value)))
else:
value = str(value)
return '{0}={1}'.format(key, value)
@staticmethod
def __check_json(answer):
"""
Check if answer is correct according to http://codeforces.com/api/help
"""
values = json.loads(answer)
try:
if values['status'] == 'OK':
return values['result']
else:
raise ValueError(values['comment'])
except KeyError as e:
raise ValueError('Missed required field', e.args[0])
@property
def base(self):
"""
:return: Base of url according to language
:rtype: str
"""
return self._base_from_language[self.language]
@property
def language(self):
"""
:returns: Language. By default is en
:rtype: CodeforcesLanguage
"""
return self._language
@language.setter
def language(self, value):
"""
:param value: Language
:type value: CodeforcesLanguage or str
"""
assert isinstance(value, (CodeforcesLanguage, str))
self._language = CodeforcesLanguage(value)
@property
def key(self):
"""
The private api key
:returns: Key or None if not presented
:rtype: str
"""
return self._key
@key.setter
def key(self, value):
"""
The private api key
:param value: Key or None
:type value: str
"""
assert isinstance(value, str) or value is None
self._key = value
@property
def secret(self):
"""
The secret part of api key
:returns: Secret or None if not presented
:rtype: str
"""
return self._secret
@secret.setter
def secret(self, value):
"""
The secret part of api key
:param value: Secret or None
:type value: str
"""
assert isinstance(value, str) or value is None
self._secret = value
class CodeforcesAPI:
"""
This class provides api for retrieving data from codeforces.com
"""
def __init__(self, lang='en', key=None, secret=None):
"""
:param lang: Language
:type lang: str or CodeforcesLanguage
:param key: Private API key. Ignored if secret is None
:type key: str
:param secret: Private API secret. Ignored if key is None
:type secret: str
"""
self._data_retriever = CodeforcesDataRetriever(CodeforcesLanguage(lang), key, secret)
def contest_hacks(self, contest_id):
"""
Returns list of hacks in the specified contests.
Full information about hacks is available only after some time after the contest end.
During the contest user can see only own hacks.
:param contest_id: Id of the contest.
It is not the round number. It can be seen in contest URL. For example: /contest/374/status
:type contest_id: int
:return: Returns an iterator of Hack objects.
:rtype: iterator of Hack
"""
assert isinstance(contest_id, int)
data = self._data_retriever.get_data('contest.hacks', contestId=contest_id)
return map(Hack, data)
def contest_list(self, gym=False):
"""
Returns information about all available contests.
:param gym: If true — than gym contests are returned. Otherwise, regular contests are returned.
:type gym: bool
:return: Returns an iterator of Contest objects. If this method is called not anonymously,
then all available contests for a calling user will be returned too,
including mashups and private gyms.
:rtype: iterator of Contest
"""
data = self._data_retriever.get_data('contest.list', gym=gym)
return map(Contest, data)
def contest_rating_changes(self, contest_id):
"""
Returns rating changes after the contest.
:param contest_id: Id of the contest. It is not the round number. It can be seen in contest URL.
:return: Returns an iterator of RatingChange objects.
:rtype: iterator of RatingChange
"""
data = self._data_retriever.get_data('contest.ratingChanges', contestId=contest_id)
return map(RatingChange, data)
def contest_standings(self, contest_id, from_=1, count=None, handles=None, show_unofficial=False):
"""
Returns the description of the contest and the requested part of the standings.
:param contest_id: Id of the contest. It is not the round number. It can be seen in contest URL.
For example: /contest/374/status
:type contest_id: int
:param from_: 1-based index of the standings row to start the ranklist.
:type from_: int
:param count: Number of standing rows to return.
:type count: int
:param handles: List of handles. No more than 10000 handles is accepted.
:type handles: list of str
:param show_unofficial: If true than all participants (virtual, out of competition) are shown.
Otherwise, only official contestants are shown.
:type show_unofficial: bool
:return: Returns object with three fields: "contest", "problems" and "rows".
Field "contest" contains a Contest object.
Field "problems" contains an iterator of Problem objects.
Field "rows" contains an iteator of RanklistRow objects.
:rtype: {'contest': Contest,
'problems': iterator of Problem,
'rows': iterator of RanklistRow}
"""
assert isinstance(contest_id, int), 'contest_id should be of type int, not {}'.format(type(contest_id))
assert isinstance(from_, int), 'from_ should be of type int, not {}'.format(type(from_))
assert isinstance(count, int) or count is None, 'count should be of type int, not {}'.format(type(count))
assert isinstance(handles, list) or handles is None, \
'handles should be of type list of str, not {}'.format(type(handles))
assert handles is None or len(handles) <= 10000, 'No more than 10000 handles is accepted'
assert isinstance(show_unofficial, bool), \
'show_unofficial should be of type bool, not {}'.format(type(show_unofficial))
data = self._data_retriever.get_data('contest.standings',
contestId=contest_id,
count=count,
handles=handles,
showUnofficial=show_unofficial,
**{'from': from_})
return {'contest': Contest(data['contest']),
'problems': map(Problem, data['problems']),
'rows': map(RanklistRow, data['rows'])}
def contest_status(self, contest_id, handle=None, from_=1, count=None):
"""
Returns submissions for specified contest.
Optionally can return submissions of specified user.
:param contest_id: Id of the contest.
It is not the round number. It can be seen in contest URL. For example: /contest/374/status
:type contest_id: int
:param handle: Codeforces user handle.
:type handle: str
:param from_: 1-based index of the first submission to return.
:type from_: int
:param count: Number of returned submissions.
:type count: int
:return: Returns an iterator of Submission objects, sorted in decreasing order of submission id.
:rtype: iterator of Submission
"""
assert isinstance(contest_id, int)
assert isinstance(handle, str) or handle is None
assert isinstance(from_, int)
assert isinstance(count, int) or count is None
data = self._data_retriever.get_data('contest.status',
contestId=contest_id,
handle=handle,
count=count,
**{'from': from_})
return map(Submission, data)
def problemset_problems(self, tags=None):
"""
Returns all problems from problemset. Problems can be filtered by tags.
:param tags: List of tags.
:type tags: list of str
:return: Returns two iterators. Iterator of Problem objects and iterator of ProblemStatistics objects.
:rtype: {'problems': list of Problem,
'problemStatistics': list of ProblemStatistics}
"""
data = self._data_retriever.get_data('problemset.problems', tags=tags)
return {'problems': map(Problem, data['problems']),
'problemStatistics': map(ProblemStatistics, data['problemStatistics'])}
def problemset_recent_status(self, count):
"""
Returns recent submissions.
:param count: Number of submissions to return. Can be up to 1000.
:type count: int
:return: Returns an iterator of Submission objects, sorted in decreasing order of submission id.
:rtype: iterator of Submission
"""
assert isinstance(count, int)
assert 0 < count <= 1000
data = self._data_retriever.get_data('problemset.recentStatus', count=count)
return map(Submission, data)
def user_info(self, handles):
"""
Returns information about one or several users.
:param handles: List of handles. No more than 10000 handles is accepted.
:type handles: list of str
:return: Returns an iterator of User objects for requested handles.
:rtype: iterator of User
"""
assert isinstance(handles, list)
data = self._data_retriever.get_data('user.info', handles=handles)
return map(User, data)
def user_rated_list(self, active_only=False):
"""
Returns the list of all rated users.
:param active_only: If true then only users, who participated in rated contest during the last month are
returned. Otherwise, all users with at least one rated contest are returned.
:type active_only: bool
:return: Returns an iterator of User objects, sorted in decreasing order of rating.
:rtype: iterator of User
"""
assert isinstance(active_only, bool)
data = self._data_retriever.get_data('user.ratedList', activeOnly=active_only)
return map(User, data)
def user_rating(self, handle):
"""
Returns rating history of the specified user.
:param handle: Codeforces user handle.
:type handle: str
:return: Returns an iterator of RatingChange objects for requested user.
:rtype: iterator of RatingChange
"""
assert isinstance(handle, str), 'Handle should have str type, not {}'.format(type(handle))
data = self._data_retriever.get_data('user.rating', handle=handle)
return map(RatingChange, data)
def user_status(self, handle, from_=1, count=None):
"""
Returns submissions of specified user.
:param handle: Codeforces user handle.
:type handle: str
:param from_: 1-based index of the first submission to return
:type from_: int
:param count: Number of returned submissions.
:type count: int or None
:return: Returns an iterator of Submission objects, sorted in decreasing order of submission id.
:rtype: iterator of Submission
"""
assert isinstance(handle, str)
assert isinstance(from_, int)
assert isinstance(count, int) or count is None
data = self._data_retriever.get_data('user.status', handle=handle, count=count, **{'from': from_})
return map(Submission, data)
|
PypiClean
|
/webship-0.1.2.tar.gz/webship-0.1.2/README.md
|
Tools to deploy python web application. The build process is run in container
using podman so make sure you have podman properly setup on the build machine.
## Install
pipx install webship
## Usage
Create directory to hold the deploy project:-
mkdir -p myapp_deploy
Create `webship.ini` to hold configuration about the deploy:-
```
[fetch]
repo = [email protected]:xoxzoeu/myapp.git
clone_args = recursive
[deploy]
path = /app/myapp
hosts =
127.0.0.1
127.0.0.2
```
To build the app:-
webship fetch
webship build myapp 1.0.1 --docker_image=myapp
That will generate the release tarball in `build/myapp-1.0.1.tar.gz`. Before
deploying the release tarball, we can test it first to make sure everything
is working as expected:-
webship run build/myapp-1.0.1.tar.gz ".venv/bin/myapp manage runserver 0.0.0.0:8000" --env-file=/home/kamal/python/myapp_deploy/env
To deploy:-
webship deploy build/myapp-1.0.1.tar.gz
Deploy directory structure is like below:-
```
deploy_path (default to /app/<project_name>)
releases/
current --> releases/<project_name>-0.0.1
```
Active release will be `/app/<project_name>/current` which is a symlink to active version. This
structure will allow multiple apps to be deployed on the same server.
|
PypiClean
|
/bob.db.base-3.2.0.zip/bob.db.base-3.2.0/doc/extend.rst
|
.. vim: set fileencoding=utf-8 :
.. Thu 13 Oct 2016 10:34:35 CEST
.. _annotations:
Annotations
-----------
Many databases come with additional information about their data. For image
databases, e.g., the locations of hand-labeled facial landmarks are provided.
Usually, these data is stored in additional text files. For most of the
available ``bob.db`` databases, there is exactly one text file for each data
file.
The function :py:func:`bob.db.base.read_annotation_file` can be used to read
annotation files of different types. It will output the data as a dictionary,
containing a ``key`` and the interpreted read data. For landmark locations,
the data is returned in **the common way** for bob, which is ``(y, x)``! The
following formats are currently accepted:
* ``'eyecenter'`` (for face images): Each file contains **only** the
locations of the two eyes, in one row, as follows: ``re_x re_y le_x le_y``.
The keys will be ``'reye'`` and ``'leye'``.
* ``'named'`` (for face images): Each file contains lines with the
landmark name and the two landmark locations, e.g. ``reye re_x re_y``.
* ``'idiap'`` (for face images): The file format to read Idiap specific
annotation files. It will return up to 24 key points. 22 of these are read
from the file, and the ``'reye'`` and ``'leye'`` are estimated from the
inner and outer corners of the eyes (if available).
.. note::
``Left`` and ``Right`` positions are always expected to be from the subject
perspective. This means that, e.g., the ``'leye'`` landmark usually has a
**higher** x-coordinate than the ``'reye'``.
|
PypiClean
|
/formification-1.2.0-py3-none-any.whl/formulaic/static/admin/formulaic/ember-formulaic/node_modules/bower/lib/node_modules/lodash/_createWrap.js
|
var baseSetData = require('./_baseSetData'),
createBind = require('./_createBind'),
createCurry = require('./_createCurry'),
createHybrid = require('./_createHybrid'),
createPartial = require('./_createPartial'),
getData = require('./_getData'),
mergeData = require('./_mergeData'),
setData = require('./_setData'),
setWrapToString = require('./_setWrapToString'),
toInteger = require('./toInteger');
/** Error message constants. */
var FUNC_ERROR_TEXT = 'Expected a function';
/** Used to compose bitmasks for function metadata. */
var BIND_FLAG = 1,
BIND_KEY_FLAG = 2,
CURRY_FLAG = 8,
CURRY_RIGHT_FLAG = 16,
PARTIAL_FLAG = 32,
PARTIAL_RIGHT_FLAG = 64;
/* Built-in method references for those with the same name as other `lodash` methods. */
var nativeMax = Math.max;
/**
* Creates a function that either curries or invokes `func` with optional
* `this` binding and partially applied arguments.
*
* @private
* @param {Function|string} func The function or method name to wrap.
* @param {number} bitmask The bitmask flags.
* The bitmask may be composed of the following flags:
* 1 - `_.bind`
* 2 - `_.bindKey`
* 4 - `_.curry` or `_.curryRight` of a bound function
* 8 - `_.curry`
* 16 - `_.curryRight`
* 32 - `_.partial`
* 64 - `_.partialRight`
* 128 - `_.rearg`
* 256 - `_.ary`
* 512 - `_.flip`
* @param {*} [thisArg] The `this` binding of `func`.
* @param {Array} [partials] The arguments to be partially applied.
* @param {Array} [holders] The `partials` placeholder indexes.
* @param {Array} [argPos] The argument positions of the new function.
* @param {number} [ary] The arity cap of `func`.
* @param {number} [arity] The arity of `func`.
* @returns {Function} Returns the new wrapped function.
*/
function createWrap(func, bitmask, thisArg, partials, holders, argPos, ary, arity) {
var isBindKey = bitmask & BIND_KEY_FLAG;
if (!isBindKey && typeof func != 'function') {
throw new TypeError(FUNC_ERROR_TEXT);
}
var length = partials ? partials.length : 0;
if (!length) {
bitmask &= ~(PARTIAL_FLAG | PARTIAL_RIGHT_FLAG);
partials = holders = undefined;
}
ary = ary === undefined ? ary : nativeMax(toInteger(ary), 0);
arity = arity === undefined ? arity : toInteger(arity);
length -= holders ? holders.length : 0;
if (bitmask & PARTIAL_RIGHT_FLAG) {
var partialsRight = partials,
holdersRight = holders;
partials = holders = undefined;
}
var data = isBindKey ? undefined : getData(func);
var newData = [
func, bitmask, thisArg, partials, holders, partialsRight, holdersRight,
argPos, ary, arity
];
if (data) {
mergeData(newData, data);
}
func = newData[0];
bitmask = newData[1];
thisArg = newData[2];
partials = newData[3];
holders = newData[4];
arity = newData[9] = newData[9] == null
? (isBindKey ? 0 : func.length)
: nativeMax(newData[9] - length, 0);
if (!arity && bitmask & (CURRY_FLAG | CURRY_RIGHT_FLAG)) {
bitmask &= ~(CURRY_FLAG | CURRY_RIGHT_FLAG);
}
if (!bitmask || bitmask == BIND_FLAG) {
var result = createBind(func, bitmask, thisArg);
} else if (bitmask == CURRY_FLAG || bitmask == CURRY_RIGHT_FLAG) {
result = createCurry(func, bitmask, arity);
} else if ((bitmask == PARTIAL_FLAG || bitmask == (BIND_FLAG | PARTIAL_FLAG)) && !holders.length) {
result = createPartial(func, bitmask, thisArg, partials);
} else {
result = createHybrid.apply(undefined, newData);
}
var setter = data ? baseSetData : setData;
return setWrapToString(setter(result, newData), func, bitmask);
}
module.exports = createWrap;
|
PypiClean
|
/v2/model/update_nat_gateway_dnat_rule_option.py
|
import pprint
import re
import six
class UpdateNatGatewayDnatRuleOption:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'nat_gateway_id': 'str',
'description': 'str',
'port_id': 'str',
'private_ip': 'str',
'protocol': 'str',
'floating_ip_id': 'str',
'internal_service_port': 'int',
'external_service_port': 'int',
'internal_service_port_range': 'str',
'external_service_port_range': 'str'
}
attribute_map = {
'nat_gateway_id': 'nat_gateway_id',
'description': 'description',
'port_id': 'port_id',
'private_ip': 'private_ip',
'protocol': 'protocol',
'floating_ip_id': 'floating_ip_id',
'internal_service_port': 'internal_service_port',
'external_service_port': 'external_service_port',
'internal_service_port_range': 'internal_service_port_range',
'external_service_port_range': 'external_service_port_range'
}
def __init__(self, nat_gateway_id=None, description=None, port_id=None, private_ip=None, protocol=None, floating_ip_id=None, internal_service_port=None, external_service_port=None, internal_service_port_range=None, external_service_port_range=None):
"""UpdateNatGatewayDnatRuleOption - a model defined in huaweicloud sdk"""
self._nat_gateway_id = None
self._description = None
self._port_id = None
self._private_ip = None
self._protocol = None
self._floating_ip_id = None
self._internal_service_port = None
self._external_service_port = None
self._internal_service_port_range = None
self._external_service_port_range = None
self.discriminator = None
self.nat_gateway_id = nat_gateway_id
if description is not None:
self.description = description
if port_id is not None:
self.port_id = port_id
if private_ip is not None:
self.private_ip = private_ip
if protocol is not None:
self.protocol = protocol
if floating_ip_id is not None:
self.floating_ip_id = floating_ip_id
if internal_service_port is not None:
self.internal_service_port = internal_service_port
if external_service_port is not None:
self.external_service_port = external_service_port
if internal_service_port_range is not None:
self.internal_service_port_range = internal_service_port_range
if external_service_port_range is not None:
self.external_service_port_range = external_service_port_range
@property
def nat_gateway_id(self):
"""Gets the nat_gateway_id of this UpdateNatGatewayDnatRuleOption.
NAT网关的id。
:return: The nat_gateway_id of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._nat_gateway_id
@nat_gateway_id.setter
def nat_gateway_id(self, nat_gateway_id):
"""Sets the nat_gateway_id of this UpdateNatGatewayDnatRuleOption.
NAT网关的id。
:param nat_gateway_id: The nat_gateway_id of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._nat_gateway_id = nat_gateway_id
@property
def description(self):
"""Gets the description of this UpdateNatGatewayDnatRuleOption.
DNAT规则的描述,长度限制为255。
:return: The description of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this UpdateNatGatewayDnatRuleOption.
DNAT规则的描述,长度限制为255。
:param description: The description of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._description = description
@property
def port_id(self):
"""Gets the port_id of this UpdateNatGatewayDnatRuleOption.
虚拟机或者裸机的Port ID,对应虚拟私有云场景,与private_ip参数二选一。
:return: The port_id of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._port_id
@port_id.setter
def port_id(self, port_id):
"""Sets the port_id of this UpdateNatGatewayDnatRuleOption.
虚拟机或者裸机的Port ID,对应虚拟私有云场景,与private_ip参数二选一。
:param port_id: The port_id of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._port_id = port_id
@property
def private_ip(self):
"""Gets the private_ip of this UpdateNatGatewayDnatRuleOption.
用户私有IP地址,对应专线、云连接场景,与port_id参数二选一。
:return: The private_ip of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._private_ip
@private_ip.setter
def private_ip(self, private_ip):
"""Sets the private_ip of this UpdateNatGatewayDnatRuleOption.
用户私有IP地址,对应专线、云连接场景,与port_id参数二选一。
:param private_ip: The private_ip of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._private_ip = private_ip
@property
def protocol(self):
"""Gets the protocol of this UpdateNatGatewayDnatRuleOption.
协议类型,目前支持TCP/tcp、UDP/udp、ANY/any。 对应协议号6、17、0。
:return: The protocol of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._protocol
@protocol.setter
def protocol(self, protocol):
"""Sets the protocol of this UpdateNatGatewayDnatRuleOption.
协议类型,目前支持TCP/tcp、UDP/udp、ANY/any。 对应协议号6、17、0。
:param protocol: The protocol of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._protocol = protocol
@property
def floating_ip_id(self):
"""Gets the floating_ip_id of this UpdateNatGatewayDnatRuleOption.
弹性公网IP的id。
:return: The floating_ip_id of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._floating_ip_id
@floating_ip_id.setter
def floating_ip_id(self, floating_ip_id):
"""Sets the floating_ip_id of this UpdateNatGatewayDnatRuleOption.
弹性公网IP的id。
:param floating_ip_id: The floating_ip_id of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._floating_ip_id = floating_ip_id
@property
def internal_service_port(self):
"""Gets the internal_service_port of this UpdateNatGatewayDnatRuleOption.
虚拟机或者裸机对外提供服务的协议端口号。 取值范围:0~65535。
:return: The internal_service_port of this UpdateNatGatewayDnatRuleOption.
:rtype: int
"""
return self._internal_service_port
@internal_service_port.setter
def internal_service_port(self, internal_service_port):
"""Sets the internal_service_port of this UpdateNatGatewayDnatRuleOption.
虚拟机或者裸机对外提供服务的协议端口号。 取值范围:0~65535。
:param internal_service_port: The internal_service_port of this UpdateNatGatewayDnatRuleOption.
:type: int
"""
self._internal_service_port = internal_service_port
@property
def external_service_port(self):
"""Gets the external_service_port of this UpdateNatGatewayDnatRuleOption.
Floatingip对外提供服务的端口号。 取值范围:0~65535。
:return: The external_service_port of this UpdateNatGatewayDnatRuleOption.
:rtype: int
"""
return self._external_service_port
@external_service_port.setter
def external_service_port(self, external_service_port):
"""Sets the external_service_port of this UpdateNatGatewayDnatRuleOption.
Floatingip对外提供服务的端口号。 取值范围:0~65535。
:param external_service_port: The external_service_port of this UpdateNatGatewayDnatRuleOption.
:type: int
"""
self._external_service_port = external_service_port
@property
def internal_service_port_range(self):
"""Gets the internal_service_port_range of this UpdateNatGatewayDnatRuleOption.
虚拟机或者裸机对外提供服务的协议端口号范围。 功能说明:该端口范围与external _service_port_range按顺序实现1:1映射。 取值范围:1~65535。 约束:只能以’-’字符连接端口范围。
:return: The internal_service_port_range of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._internal_service_port_range
@internal_service_port_range.setter
def internal_service_port_range(self, internal_service_port_range):
"""Sets the internal_service_port_range of this UpdateNatGatewayDnatRuleOption.
虚拟机或者裸机对外提供服务的协议端口号范围。 功能说明:该端口范围与external _service_port_range按顺序实现1:1映射。 取值范围:1~65535。 约束:只能以’-’字符连接端口范围。
:param internal_service_port_range: The internal_service_port_range of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._internal_service_port_range = internal_service_port_range
@property
def external_service_port_range(self):
"""Gets the external_service_port_range of this UpdateNatGatewayDnatRuleOption.
Floatingip对外提供服务的端口号范围。 功能说明:该端口范围与internal _service_port_range按顺序实现1:1映射。 取值范围:1~65535。 约束:只能以’-’字符连接端口范围。
:return: The external_service_port_range of this UpdateNatGatewayDnatRuleOption.
:rtype: str
"""
return self._external_service_port_range
@external_service_port_range.setter
def external_service_port_range(self, external_service_port_range):
"""Sets the external_service_port_range of this UpdateNatGatewayDnatRuleOption.
Floatingip对外提供服务的端口号范围。 功能说明:该端口范围与internal _service_port_range按顺序实现1:1映射。 取值范围:1~65535。 约束:只能以’-’字符连接端口范围。
:param external_service_port_range: The external_service_port_range of this UpdateNatGatewayDnatRuleOption.
:type: str
"""
self._external_service_port_range = external_service_port_range
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateNatGatewayDnatRuleOption):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/fake_bpy_module_3.4-20230117-py3-none-any.whl/mathutils/bvhtree.py
|
import sys
import typing
import bmesh.types
import bpy.types
import mathutils
GenericType = typing.TypeVar("GenericType")
class BVHTree:
@classmethod
def FromBMesh(cls, bmesh: 'bmesh.types.BMesh', epsilon: float = 0.0):
''' BVH tree based on `BMesh` data.
:param bmesh: BMesh data.
:type bmesh: 'bmesh.types.BMesh'
:param epsilon: Increase the threshold for detecting overlap and raycast hits.
:type epsilon: float
'''
pass
@classmethod
def FromObject(cls,
object: 'bpy.types.Object',
depsgraph: 'bpy.types.Depsgraph',
deform: bool = True,
render=False,
cage: bool = False,
epsilon: float = 0.0):
''' BVH tree based on `Object` data.
:param object: Object data.
:type object: 'bpy.types.Object'
:param depsgraph: Depsgraph to use for evaluating the mesh.
:type depsgraph: 'bpy.types.Depsgraph'
:param deform: Use mesh with deformations.
:type deform: bool
:param cage: Use modifiers cage.
:type cage: bool
:param epsilon: Increase the threshold for detecting overlap and raycast hits.
:type epsilon: float
'''
pass
@classmethod
def FromPolygons(cls,
vertices: typing.List[float],
polygons: 'bpy.types.Sequence',
all_triangles: bool = False,
epsilon: float = 0.0):
''' BVH tree constructed geometry passed in as arguments.
:param vertices: float triplets each representing ``(x, y, z)``
:type vertices: typing.List[float]
:param polygons: Sequence of polyugons, each containing indices to the vertices argument.
:type polygons: 'bpy.types.Sequence'
:param all_triangles: Use when all **polygons** are triangles for more efficient conversion.
:type all_triangles: bool
:param epsilon: Increase the threshold for detecting overlap and raycast hits.
:type epsilon: float
'''
pass
def find_nearest(self, origin,
distance: float = 1.84467e+19) -> typing.Tuple:
''' Find the nearest element (typically face index) to a point.
:param co: Find nearest element to this point.
:type co: typing.Union[typing.Sequence[float], 'mathutils.Vector']
:param distance: Maximum distance threshold.
:type distance: float
:rtype: typing.Tuple
:return: Returns a tuple (`Vector` location, `Vector` normal, int index, float distance), Values will all be None if no hit is found.
'''
pass
def find_nearest_range(self, origin,
distance: float = 1.84467e+19) -> typing.List:
''' Find the nearest elements (typically face index) to a point in the distance range.
:param co: Find nearest elements to this point.
:type co: typing.Union[typing.Sequence[float], 'mathutils.Vector']
:param distance: Maximum distance threshold.
:type distance: float
:rtype: typing.List
:return: Returns a list of tuples (`Vector` location, `Vector` normal, int index, float distance),
'''
pass
def overlap(self, other_tree: 'BVHTree') -> typing.List:
''' Find overlapping indices between 2 trees.
:param other_tree: Other tree to perform overlap test on.
:type other_tree: 'BVHTree'
:rtype: typing.List
:return: Returns a list of unique index pairs, the first index referencing this tree, the second referencing the **other_tree**.
'''
pass
def ray_cast(
self,
origin: typing.Union[typing.Sequence[float], 'mathutils.Vector'],
direction: typing.Union[typing.
Sequence[float], 'mathutils.Vector'],
distance: float = 'sys.float_info.max') -> typing.Tuple:
''' Cast a ray onto the mesh.
:param origin: Start location of the ray in object space.
:type origin: typing.Union[typing.Sequence[float], 'mathutils.Vector']
:param direction: Direction of the ray in object space.
:type direction: typing.Union[typing.Sequence[float], 'mathutils.Vector']
:param distance: Maximum distance threshold.
:type distance: float
:rtype: typing.Tuple
:return: Returns a tuple (`Vector` location, `Vector` normal, int index, float distance), Values will all be None if no hit is found.
'''
pass
def __init__(self, size) -> typing.Any:
'''
:rtype: typing.Any
'''
pass
|
PypiClean
|
/mitosheet3-0.3.281.tar.gz/mitosheet3-0.3.281/mitosheet/api/get_unique_value_counts.py
|
# Copyright (c) Saga Inc.
# Distributed under the terms of the GPL License.
import json
from typing import Any, Dict
import pandas as pd
from mitosheet.types import StepsManagerType
from mitosheet.utils import get_row_data_array
# The maximum number of values the front-end sends to the backend
# See comments in function description below.
MAX_UNIQUE_VALUES = 1_000
def get_unique_value_counts(params: Dict[str, Any], steps_manager: StepsManagerType) -> str:
"""
Sends back a string that can be parsed to a JSON object that
contains the normalized value counts for the series at column_id
in the df at sheet_index.
Also takes a search_string and sort string, which it uses to filter
down the dataset if there are more than MAX_UNIQUE_VALUES
NOTE: the front-end also filters with the search string, and
reorders with the sort; we just do it here on the backend in
the case that there are more than MAX_UNIQUE_VALUES so we
don't crash the front-end with too much data.
"""
sheet_index = params['sheet_index']
column_id = params['column_id']
search_string = params['search_string']
sort = params['sort']
column_header = steps_manager.curr_step.column_ids.get_column_header_by_id(sheet_index, column_id)
series: pd.Series = steps_manager.dfs[sheet_index][column_header]
unique_value_counts_percents_series = series.value_counts(normalize=True, dropna=False)
unique_value_counts_series = series.value_counts(dropna=False)
unique_value_counts_df = pd.DataFrame({
'values': unique_value_counts_percents_series.index,
'percents': unique_value_counts_percents_series,
'counts': unique_value_counts_series
})
if len(unique_value_counts_df) > MAX_UNIQUE_VALUES:
# First, we turn the series into a string series, so that we can
# easily filter on it without issues (and sort in some cases)
new_unique_value_counts_df = unique_value_counts_df.copy(deep=True)
new_unique_value_counts_df['values_strings'] = new_unique_value_counts_df['values'].astype('str')
# First, we sort in the order they want
try:
if sort == 'Ascending Value':
new_unique_value_counts_df = new_unique_value_counts_df.sort_values(by='values', ascending=True, na_position='first')
elif sort == 'Descending Value':
new_unique_value_counts_df = new_unique_value_counts_df.sort_values(by='values', ascending=False, na_position='first')
elif sort == 'Ascending Occurence':
new_unique_value_counts_df = new_unique_value_counts_df.sort_values(by='counts', ascending=True, na_position='first')
elif sort == 'Descending Occurence':
new_unique_value_counts_df = new_unique_value_counts_df.sort_values(by='counts', ascending=False, na_position='first')
except:
# If the sort values throws an exception, then this must be because we have a mixed value type, and so we instead
# sort on the string representation of the values (as this will always work)
if sort == 'Ascending Value':
new_unique_value_counts_df = new_unique_value_counts_df.sort_values(by='values_strings', ascending=True, na_position='first')
elif sort == 'Descending Value':
new_unique_value_counts_df = new_unique_value_counts_df.sort_values(by='values_strings', ascending=False, na_position='first')
# Then, we filter with the string. Note that we always filter on the string representation
# because the front-end sends a string
new_unique_value_counts_df = new_unique_value_counts_df[new_unique_value_counts_df['values_strings'].str.contains(search_string, na=False, case=False)]
# Finially, we only take the first MAX_UNIQUE_VALUES
if len(new_unique_value_counts_df) > MAX_UNIQUE_VALUES:
new_unique_value_counts_df = new_unique_value_counts_df.head(MAX_UNIQUE_VALUES)
is_all_data = False
else:
is_all_data = True
# And then we filter the unique values down to these specific values
unique_value_counts_df = unique_value_counts_df.loc[new_unique_value_counts_df.index]
unique_value_counts_df.reset_index(drop=True)
else:
is_all_data = True
return json.dumps({
'uniqueValueRowDataArray': get_row_data_array(unique_value_counts_df),
'isAllData': is_all_data
})
|
PypiClean
|
/pycounts_j99thoms-0.1.0.tar.gz/pycounts_j99thoms-0.1.0/README.md
|
# pycounts_j99thoms
Calculate word counts in a text file!
## Installation
```bash
$ pip install pycounts_j99thoms
```
## Usage
`pycounts_j99thoms` can be used to count words in a text file and plot results
as follows:
```python
from pycounts_j99thoms.pycounts import count_words
from pycounts_j99thoms.plotting import plot_words
import matplotlib.pyplot as plt
file_path = "test.txt" # path to your file
counts = count_words(file_path)
fig = plot_words(counts, n=10)
plt.show()
```
## Contributing
Interested in contributing? Check out the contributing guidelines. Please note that this project is released with a Code of Conduct. By contributing to this project, you agree to abide by its terms.
## License
`pycounts_j99thoms` was created by Jakob Thoms. It is licensed under the terms of the MIT license.
## Credits
`pycounts_j99thoms` was created with [`cookiecutter`](https://cookiecutter.readthedocs.io/en/latest/) and the `py-pkgs-cookiecutter` [template](https://github.com/py-pkgs/py-pkgs-cookiecutter).
|
PypiClean
|
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_0/models/host_group_response.py
|
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_0 import models
class HostGroupResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[HostGroup]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.HostGroup]
):
"""
Keyword args:
items (list[HostGroup]): Returns a list of all items after filtering. The values are displayed for each name where meaningful.
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostGroupResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostGroupResponse`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostGroupResponse`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `HostGroupResponse`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(HostGroupResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HostGroupResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/tensorflow_ascend-1.15.0-cp37-cp37m-manylinux2014_aarch64.whl/tensorflow_core/contrib/receptive_field/python/util/receptive_field.py
|
"""Functions to compute receptive field of a fully-convolutional network."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.receptive_field.python.util import graph_compute_order
from tensorflow.contrib.receptive_field.python.util import parse_layer_parameters
from tensorflow.python.framework import ops as framework_ops
from tensorflow.python.platform import tf_logging as logging
def _get_rf_size_node_input(stride, kernel_size, rf_size_output):
"""Computes RF size at the input of a given layer.
Args:
stride: Stride of given layer (integer).
kernel_size: Kernel size of given layer (integer).
rf_size_output: RF size at output of given layer (integer).
Returns:
rf_size_input: RF size at input of given layer (integer).
"""
return stride * rf_size_output + kernel_size - stride
def _get_effective_stride_node_input(stride, effective_stride_output):
"""Computes effective stride at the input of a given layer.
Args:
stride: Stride of given layer (integer).
effective_stride_output: Effective stride at output of given layer
(integer).
Returns:
effective_stride_input: Effective stride at input of given layer
(integer).
"""
return stride * effective_stride_output
def _get_effective_padding_node_input(stride, padding,
effective_padding_output):
"""Computes effective padding at the input of a given layer.
Args:
stride: Stride of given layer (integer).
padding: Padding of given layer (integer).
effective_padding_output: Effective padding at output of given layer
(integer).
Returns:
effective_padding_input: Effective padding at input of given layer
(integer).
"""
return stride * effective_padding_output + padding
class ReceptiveField(object):
"""Receptive field of a convolutional neural network.
Args:
size: Receptive field size.
stride: Effective stride.
padding: Effective padding.
"""
def __init__(self, size, stride, padding):
self.size = np.asarray(size)
self.stride = np.asarray(stride)
self.padding = np.asarray(padding)
def compute_input_center_coordinates(self, y, axis=None):
"""Computes the center of the receptive field that generated a feature.
Args:
y: An array of feature coordinates with shape `(..., d)`, where `d` is the
number of dimensions of the coordinates.
axis: The dimensions for which to compute the input center coordinates. If
`None` (the default), compute the input center coordinates for all
dimensions.
Returns:
x: Center of the receptive field that generated the features, at the input
of the network.
Raises:
ValueError: If the number of dimensions of the feature coordinates does
not match the number of elements in `axis`.
"""
# Use all dimensions.
if axis is None:
axis = range(self.size.size)
# Ensure axis is a list because tuples have different indexing behavior.
axis = list(axis)
y = np.asarray(y)
if y.shape[-1] != len(axis):
raise ValueError("Dimensionality of the feature coordinates `y` (%d) "
"does not match dimensionality of `axis` (%d)" %
(y.shape[-1], len(axis)))
return -self.padding[axis] + y * self.stride[axis] + (
self.size[axis] - 1) / 2
def compute_feature_coordinates(self, x, axis=None):
"""Computes the position of a feature given the center of a receptive field.
Args:
x: An array of input center coordinates with shape `(..., d)`, where `d`
is the number of dimensions of the coordinates.
axis: The dimensions for which to compute the feature coordinates. If
`None` (the default), compute the feature coordinates for all
dimensions.
Returns:
y: Coordinates of the features.
Raises:
ValueError: If the number of dimensions of the input center coordinates
does not match the number of elements in `axis`.
"""
# Use all dimensions.
if axis is None:
axis = range(self.size.size)
# Ensure axis is a list because tuples have different indexing behavior.
axis = list(axis)
x = np.asarray(x)
if x.shape[-1] != len(axis):
raise ValueError("Dimensionality of the input center coordinates `x` "
"(%d) does not match dimensionality of `axis` (%d)" %
(x.shape[-1], len(axis)))
return (x + self.padding[axis] +
(1 - self.size[axis]) / 2) / self.stride[axis]
def __iter__(self):
return iter(np.concatenate([self.size, self.stride, self.padding]))
def compute_receptive_field_from_graph_def(graph_def,
input_node,
output_node,
stop_propagation=None,
input_resolution=None):
"""Computes receptive field (RF) parameters from a Graph or GraphDef object.
The algorithm stops the calculation of the receptive field whenever it
encounters an operation in the list `stop_propagation`. Stopping the
calculation early can be useful to calculate the receptive field of a
subgraph such as a single branch of the
[inception network](https://arxiv.org/abs/1512.00567).
Args:
graph_def: Graph or GraphDef object.
input_node: Name of the input node or Tensor object from graph.
output_node: Name of the output node or Tensor object from graph.
stop_propagation: List of operations or scope names for which to stop the
propagation of the receptive field.
input_resolution: 2D list. If the input resolution to the model is fixed and
known, this may be set. This is helpful for cases where the RF parameters
vary depending on the input resolution (this happens since SAME padding in
tensorflow depends on input resolution in general). If this is None, it is
assumed that the input resolution is unknown, so some RF parameters may be
unknown (depending on the model architecture).
Returns:
rf_size_x: Receptive field size of network in the horizontal direction, with
respect to specified input and output.
rf_size_y: Receptive field size of network in the vertical direction, with
respect to specified input and output.
effective_stride_x: Effective stride of network in the horizontal direction,
with respect to specified input and output.
effective_stride_y: Effective stride of network in the vertical direction,
with respect to specified input and output.
effective_padding_x: Effective padding of network in the horizontal
direction, with respect to specified input and output.
effective_padding_y: Effective padding of network in the vertical
direction, with respect to specified input and output.
Raises:
ValueError: If network is not aligned or if either input or output nodes
cannot be found. For network criterion alignment, see
photos/vision/features/delf/g3doc/rf_computation.md
"""
# Convert a graph to graph_def if necessary.
if isinstance(graph_def, framework_ops.Graph):
graph_def = graph_def.as_graph_def()
# Convert tensors to names.
if isinstance(input_node, framework_ops.Tensor):
input_node = input_node.op.name
if isinstance(output_node, framework_ops.Tensor):
output_node = output_node.op.name
stop_propagation = stop_propagation or []
# Computes order of computation for a given graph.
node_info, name_to_node = graph_compute_order.get_compute_order(
graph_def=graph_def,
input_node_name=input_node,
input_node_size=input_resolution)
# Sort in reverse topological order.
ordered_node_info = sorted(node_info.items(), key=lambda x: -x[1].order)
# Dictionaries to keep track of receptive field, effective stride and
# effective padding of different nodes.
rf_sizes_x = {}
rf_sizes_y = {}
effective_strides_x = {}
effective_strides_y = {}
effective_paddings_x = {}
effective_paddings_y = {}
# Initialize dicts for output_node.
rf_sizes_x[output_node] = 1
rf_sizes_y[output_node] = 1
effective_strides_x[output_node] = 1
effective_strides_y[output_node] = 1
effective_paddings_x[output_node] = 0
effective_paddings_y[output_node] = 0
# Flag to denote if we found output node yet. If we have not, we skip nodes
# until the output node is found.
found_output_node = False
# Flag to denote if padding is undefined. This happens when SAME padding mode
# is used in conjunction with stride and kernel sizes which make it such that
# the padding to be applied would depend on the input size. In this case,
# alignment checks are skipped, and the effective padding is None.
undefined_padding = False
for _, (o, node, _, _) in ordered_node_info:
if node:
logging.vlog(3, "%10d %-100s %-20s" % (o, node.name[:90], node.op))
else:
continue
# When we find input node, we can stop.
if node.name == input_node:
break
# Loop until we find the output node. All nodes before finding the output
# one are irrelevant, so they can be skipped.
if not found_output_node:
if node.name == output_node:
found_output_node = True
if found_output_node:
if node.name not in rf_sizes_x:
assert node.name not in rf_sizes_y, ("Node %s is in rf_sizes_y, but "
"not in rf_sizes_x" % node.name)
# In this case, node is not relevant since it's not part of the
# computation we're interested in.
logging.vlog(3, "Irrelevant node %s, skipping it...", node.name)
continue
# Get params for this layer.
(kernel_size_x, kernel_size_y, stride_x, stride_y, padding_x, padding_y,
_, _) = parse_layer_parameters.get_layer_params(
node, name_to_node, node_info[node.name].input_size)
logging.vlog(
3, "kernel_size_x = %s, kernel_size_y = %s, "
"stride_x = %s, stride_y = %s, "
"padding_x = %s, padding_y = %s, input size = %s" %
(kernel_size_x, kernel_size_y, stride_x, stride_y, padding_x,
padding_y, node_info[node.name].input_size))
if padding_x is None or padding_y is None:
undefined_padding = True
# Get parameters at input of this layer which may or may not be propagated
# to the input layers.
rf_size_input_x = _get_rf_size_node_input(stride_x, kernel_size_x,
rf_sizes_x[node.name])
rf_size_input_y = _get_rf_size_node_input(stride_y, kernel_size_y,
rf_sizes_y[node.name])
effective_stride_input_x = _get_effective_stride_node_input(
stride_x, effective_strides_x[node.name])
effective_stride_input_y = _get_effective_stride_node_input(
stride_y, effective_strides_y[node.name])
if not undefined_padding:
effective_padding_input_x = _get_effective_padding_node_input(
stride_x, padding_x, effective_paddings_x[node.name])
effective_padding_input_y = _get_effective_padding_node_input(
stride_y, padding_y, effective_paddings_y[node.name])
else:
effective_padding_input_x = None
effective_padding_input_y = None
logging.vlog(
4, "rf_size_input_x = %s, rf_size_input_y = %s, "
"effective_stride_input_x = %s, effective_stride_input_y = %s, "
"effective_padding_input_x = %s, effective_padding_input_y = %s" %
(rf_size_input_x, rf_size_input_y, effective_stride_input_x,
effective_stride_input_y, effective_padding_input_x,
effective_padding_input_y))
# Loop over this node's inputs and potentially propagate information down.
for inp_name in node.input:
# Stop the propagation of the receptive field.
if any(inp_name.startswith(stop) for stop in stop_propagation):
logging.vlog(3, "Skipping explicitly ignored node %s.", inp_name)
continue
logging.vlog(4, "inp_name = %s", inp_name)
if inp_name.startswith("^"):
# The character "^" denotes a control dependency, so this input node
# can be safely ignored.
continue
inp_node = name_to_node[inp_name]
logging.vlog(4, "inp_node = \n%s", inp_node)
if inp_name in rf_sizes_x:
assert inp_name in rf_sizes_y, ("Node %s is in rf_sizes_x, but "
"not in rf_sizes_y" % inp_name)
logging.vlog(
4, "rf_sizes_x[inp_name] = %s,"
" rf_sizes_y[inp_name] = %s, "
"effective_strides_x[inp_name] = %s,"
" effective_strides_y[inp_name] = %s, "
"effective_paddings_x[inp_name] = %s,"
" effective_paddings_y[inp_name] = %s" %
(rf_sizes_x[inp_name], rf_sizes_y[inp_name],
effective_strides_x[inp_name], effective_strides_y[inp_name],
effective_paddings_x[inp_name], effective_paddings_y[inp_name]))
# This node was already discovered through a previous path, so we need
# to make sure that graph is aligned. This alignment check is skipped
# if the padding is not defined, since in this case alignment cannot
# be checked.
if not undefined_padding:
if effective_strides_x[inp_name] != effective_stride_input_x:
raise ValueError(
"Graph is not aligned since effective stride from different "
"paths is different in horizontal direction")
if effective_strides_y[inp_name] != effective_stride_input_y:
raise ValueError(
"Graph is not aligned since effective stride from different "
"paths is different in vertical direction")
if (rf_sizes_x[inp_name] -
1) / 2 - effective_paddings_x[inp_name] != (
rf_size_input_x - 1) / 2 - effective_padding_input_x:
raise ValueError(
"Graph is not aligned since center shift from different "
"paths is different in horizontal direction")
if (rf_sizes_y[inp_name] -
1) / 2 - effective_paddings_y[inp_name] != (
rf_size_input_y - 1) / 2 - effective_padding_input_y:
raise ValueError(
"Graph is not aligned since center shift from different "
"paths is different in vertical direction")
# Keep track of path with largest RF, for both directions.
if rf_sizes_x[inp_name] < rf_size_input_x:
rf_sizes_x[inp_name] = rf_size_input_x
effective_strides_x[inp_name] = effective_stride_input_x
effective_paddings_x[inp_name] = effective_padding_input_x
if rf_sizes_y[inp_name] < rf_size_input_y:
rf_sizes_y[inp_name] = rf_size_input_y
effective_strides_y[inp_name] = effective_stride_input_y
effective_paddings_y[inp_name] = effective_padding_input_y
else:
assert inp_name not in rf_sizes_y, ("Node %s is in rf_sizes_y, but "
"not in rf_sizes_x" % inp_name)
# In this case, it is the first time we encounter this node. So we
# propagate the RF parameters.
rf_sizes_x[inp_name] = rf_size_input_x
rf_sizes_y[inp_name] = rf_size_input_y
effective_strides_x[inp_name] = effective_stride_input_x
effective_strides_y[inp_name] = effective_stride_input_y
effective_paddings_x[inp_name] = effective_padding_input_x
effective_paddings_y[inp_name] = effective_padding_input_y
if not found_output_node:
raise ValueError("Output node was not found")
if input_node not in rf_sizes_x:
raise ValueError("Input node was not found")
return ReceptiveField(
(rf_sizes_x[input_node], rf_sizes_y[input_node]),
(effective_strides_x[input_node], effective_strides_y[input_node]),
(effective_paddings_x[input_node], effective_paddings_y[input_node]))
|
PypiClean
|
/forcebalance-1.9.5.tar.gz/forcebalance-1.9.5/tools/TagMol2.py
|
from __future__ import division
from __future__ import print_function
import numpy as np
import itertools
import os, sys
import networkx as nx
from collections import defaultdict
from forcebalance.molecule import Molecule
from forcebalance import Mol2
#=================================================#
#| Mol2 File Tagging Script |#
#| Lee-Ping Wang |#
#| |#
#| This script takes a mol2 file as an argument. |#
#| |#
#| The mol2 file is assumed to be atom-typed and |#
#| charged (from something like antechamber). |#
#| |#
#| On output, a new mol2 file is generated that |#
#| contains ForceBalance parameterization tags |#
#| printed to the 'status bit' field for each |#
#| atom. Atoms that are chemically equivalent |#
#| up to a number of bonds (hard-coded by the |#
#| Max variable) are taken to be equivalent and |#
#| tagged with the RPT keyword instead of the |#
#| PARM keyword. |#
#| |#
#| This script also symmetrizes the charges and |#
#| makes sure that the decimal numbers add up |#
#| EXACTLY to an integer. |#
#| |#
#| Required: networkx package |#
#| Mol2 and molecule modules in |#
#| 'forcebalance' directory |#
#| |#
#=================================================#
#===============
# Currently, the Mol2 class is not able to print the end
# of the Mol2 file, which is needed by AMBER.
# So we put it in manually.
Ending = """@<TRIPOS>SUBSTRUCTURE
1 <1> 1 TEMP 0 **** **** 0 ROOT
"""
#===============
# Derived class from networkx graph;
# this is a graph description of molecules.
# Stolen from nanoreactor scripts, in order to be standalone
class MolG(nx.Graph):
def __eq__(self, other):
# This defines whether two MyG objects are "equal" to one another.
return nx.is_isomorphic(self,other,node_match=nodematch)
def __hash__(self):
''' The hash function is something we can use to discard two things that are obviously not equal. Here we neglect the hash. '''
return 1
def L(self):
''' Return a list of the sorted atom numbers in this graph. '''
return sorted(self.nodes())
def AStr(self):
''' Return a string of atoms, which serves as a rudimentary 'fingerprint' : '99,100,103,151' . '''
return ','.join(['%i' % i for i in self.L()])
def e(self):
''' Return an array of the elements. For instance ['H' 'C' 'C' 'H']. '''
elems = nx.get_node_attributes(self,'e')
return [elems[i] for i in self.L()]
def ef(self):
''' Create an Empirical Formula '''
Formula = list(self.e())
return ''.join([('%s%i' % (k, Formula.count(k)) if Formula.count(k) > 1 else '%s' % k) for k in sorted(set(Formula))])
def x(self):
''' Get a list of the coordinates. '''
coors = nx.get_node_attributes(self,'x')
return np.array([coors[i] for i in self.L()])
def col(vec):
"""Given any list, array, or matrix, return a 1-column matrix."""
return np.array(vec).reshape(-1, 1)
def row(vec):
"""Given any list, array, or matrix, return a 1-row matrix."""
return np.array(vec).reshape(1, -1)
def flat(vec):
"""Given any list, array, or matrix, return a single-index array."""
return np.array(vec).reshape(-1)
def build_graph(M):
M.require('bonds')
G = MolG()
for i, a in enumerate(M.elem):
G.add_node(i)
nx.set_node_attributes(G,'n',{i:M.atomname[i]})
nx.set_node_attributes(G,'e',{i:a})
nx.set_node_attributes(G,'x',{i:M.xyzs[0][i]})
for i in enumerate(M.bonds):
G.add_edge(i[0], i[1])
return G
def get_equivalent_atoms(MyG):
GDat = MyG.nodes(data=True)
GDict = {}
for i in GDat:
GDict[i[0]] = i[1]
PairPaths = nx.all_pairs_shortest_path_length(MyG)
Walks = []
Max = 20
for A in PairPaths:
Walk = defaultdict(list)
for B in PairPaths[A]:
if PairPaths[A][B] > Max: continue
Walk[PairPaths[A][B]].append(GDict[B]['e'])
for idx, elist in Walk.items():
Walk[idx] = ''.join([('%s%i' % (k, elist.count(k)) if elist.count(k) > 1 else '%s' % k) for k in sorted(set(elist))])
# ef =
# Walk[i] =
Walks.append(Walk)
J = 0
Map = []
Suffix = []
MyList = []
for i, wi in enumerate(Walks):
UniqueFlag = True
atomi = GDict[i]['n']
for j, wj in enumerate(Walks):
atomj = GDict[j]['n']
if i <= j: continue
if wi == wj:
Repeat = atomj
UniqueFlag = False
break
MyList.append(J)
if UniqueFlag:
Map.append([i])
J += 1
Suffix.append(" # PARM 8")
else:
Map[MyList[j]].append(i)
Suffix.append(" # RPT 8 COUL:%s /RPT" % Repeat)
QMat = np.zeros((len(GDat), len(GDat)),dtype=float)
for i in Map:
for ii, jj in list(itertools.product(i, i)):
QMat[ii, jj] = 1.0 / len(i)
return QMat, Suffix
def charge_as_array(M2Mol, QMat):
oldq = np.array([atom.charge for atom in M2Mol.atoms])
def get_symq(q):
return np.array([float("% .6f" % i) for i in flat(np.dot(QMat, col(oldq)))])
J = 0
M = 0
oldq = get_symq(oldq)
print("Total charge is % .6f" % sum(oldq))
print("Doing something stupid to make sure all of the charges add up to EXACTLY an integer.")
CorrQ = (float(int(round(sum(oldq)))) - sum(oldq)) / len(oldq)
print("Adding % .6f to all charges" % CorrQ)
oldq += CorrQ
while True:
print("Adjusting charge element %i by" % (M%len(oldq)), J, end=' ')
oldq[M%len(oldq)] += J
newq = get_symq(oldq)
print(": Total charge is now % .6f" % sum(newq))
if abs(float(int(round(sum(newq)))) - sum(newq)) < 1e-8:
break
oldq[M%len(oldq)] -= J
if J <= 0:
J = -J + (1e-6 if M%len(oldq) == 0 else 0)
else:
J = -J
M += 1
if M == 10000:
raise Exception("Tried 10,000 iterations of charge adjustment, I probably screwed up.")
return list(newq)
def update_mol2(M2Mol, newq, newsuf):
for i, a in enumerate(M2Mol.atoms):
a.set_charge(newq[i])
a.set_status_bit(newsuf[i])
def main():
M = Molecule(sys.argv[1])
MyG = build_graph(M)
QMat, Suffix = get_equivalent_atoms(MyG)
M2 = list(Mol2.mol2_set(sys.argv[1]).compounds.items())[0][1]
NewQ = charge_as_array(M2, QMat)
update_mol2(M2, NewQ, Suffix)
if len(sys.argv) >= 3:
with open(sys.argv[2],'w') as f:
print(M2, file=f)
print(Ending, file=f)
else:
print(M2)
print(Ending)
if __name__ == "__main__":
main()
|
PypiClean
|
/hrm_omero-0.4.0-py3-none-any.whl/hrm_omero/cli.py
|
import argparse
import os
import sys
import omero.gateway
from loguru import logger as log
from .__init__ import __version__
from . import formatting
from . import hrm
from . import omero as _omero
from . import transfer
from .misc import printlog, OmeroId
def bool_to_exitstatus(value):
"""Convert a boolean to a POSIX process exit code.
As boolean values in Python are a subset of int, `True` corresponds to the int value
'1', which is the opposite of a successful POSIX return code. Therefore, this
function simply inverts the boolean value to turn it into a proper exit code. In
case the provided value is not of type `bool` it will be returned unchanged.
Parameters
----------
value : bool or int
The value to be converted.
Returns
-------
int
0 in case `value` is `True`, 1 in case `value` is `False` and `value` itself in
case it is not a bool.
"""
if isinstance(value, bool):
return not value
return value
def parse_arguments(args): # pragma: no cover
"""Parse the commandline arguments.
DEPRECATED function, use `arguments_parser()` instead!
"""
log.warning("'parse_arguments()' is deprecated and will be removed!")
argparser = arguments_parser()
try:
return argparser.parse_args(args)
except IOError as err:
argparser.error(str(err))
def arguments_parser():
"""Set up the commandline arguments parser.
Returns
-------
argparse.ArgumentParser
The parser instance ready to be run using its `parse_args()` method.
"""
# log.debug("Parsing command line arguments...")
argparser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
argparser.add_argument(
"-v",
"--verbose",
dest="verbosity",
action="count",
default=0,
help="verbose messages (repeat for more details)",
)
argparser.add_argument(
"--version",
action="version",
version=f"%(prog)s {__version__}",
)
argparser.add_argument(
"-c",
"--config",
default="/etc/hrm.conf",
help="the HRM configuration file (default: '/etc/hrm.conf')",
)
argparser.add_argument(
"--dry-run",
action="store_true",
default=False,
help="print requested action and parameters without actually performing it",
)
# deprecated arguments group
dep_args = argparser.add_argument_group(
"DEPRECATED arguments",
"See the documentation for instructions on how to adapt your call!",
)
dep_args.add_argument(
"-w",
"--password",
required=False,
help=(
"OMERO password ******** DEPRECATED ********"
"Use the environment variable 'OMERO_PASSWORD' instead!"
),
)
# required arguments group
req_args = argparser.add_argument_group(
"required arguments", "NOTE: MUST be given before any subcommand!"
)
req_args.add_argument("-u", "--user", required=True, help="OMERO username")
subparsers = argparser.add_subparsers(
help=".",
dest="action",
description="Action to be performed, one of the following:",
)
# checkCredentials parser
subparsers.add_parser(
"checkCredentials", help="check if login credentials are valid"
)
# retrieveChildren parser
parser_subtree = subparsers.add_parser(
"retrieveChildren", help="get the children of a given node object (JSON)"
)
parser_subtree.add_argument(
"--id",
type=str,
required=True,
help='ID of the parent object, e.g. "ROOT", "G:4:Experimenter:7',
)
# OMEROtoHRM parser
parser_o2h = subparsers.add_parser(
"OMEROtoHRM", help="download an image from the OMERO server"
)
parser_o2h.add_argument(
"-i",
"--imageid",
required=True,
help='the OMERO ID of the image to download, e.g. "G:4:Image:42"',
)
parser_o2h.add_argument(
"-d",
"--dest",
type=str,
required=True,
help="the destination directory where to put the downloaded file",
)
# HRMtoOMERO parser
parser_h2o = subparsers.add_parser(
"HRMtoOMERO", help="upload an image to the OMERO server"
)
parser_h2o.add_argument(
"-d",
"--dset",
required=True,
dest="dset",
help='the ID of the target dataset in OMERO, e.g. "G:7:Dataset:23"',
)
parser_h2o.add_argument(
"-f",
"--file",
type=str,
required=True,
help="the image file to upload, including the full path",
)
parser_h2o.add_argument(
"-n",
"--name",
type=str,
required=False,
help="a label to use for the image in OMERO",
)
parser_h2o.add_argument(
"-a",
"--ann",
type=str,
required=False,
help="annotation text to be added to the image in OMERO",
)
return argparser
def verbosity_to_loglevel(verbosity):
"""Map the verbosity count to a named log level for `loguru`.
Parameters
----------
verbosity : int
Verbosity count as returned e.g. by the following argparse code:
`argparser.add_argument("-v", dest="verbosity", action="count", default=0)`
Returns
-------
str
A log level name that can be used with `loguru.logger.add()`.
"""
log_level = "WARNING" # no verbosity flag has been provided -> use "WARNING"
if verbosity > 3: # -vvvv (4) and more will result in "TRACE"
log_level = "TRACE"
if verbosity == 3: # -vvv will be "DEBUG"
log_level = "DEBUG"
elif verbosity == 2: # -vv will be "INFO"
log_level = "INFO"
elif verbosity == 1: # -v will be "SUCCESS"
log_level = "SUCCESS"
return log_level
def logger_add_file_sink(hrm_config, target=""):
"""Helper to add a file sink to the logger unless disabled in the config file.
By default logging messages from the connector into a separate file is desired, so
this function will try to add a file sink by default. Only if the HRM configuration
file explicitly asks for no log file to be created it will skip this step.
Parameters
----------
hrm_config : dict
A parsed HRM configuration file as returned by `hrm_omero.hrm.parse_config()`.
target : str, optional
The path for the log file to be used. If empty (or skipped) the default
`$HRM_LOG/omero-connector.log` will be used, falling back to
`HRM_LOG="/var/log/hrm"` in case `$HRM_LOG` is not set in the hrm configuration.
"""
disable_file_logging = hrm_config.get("OMERO_CONNECTOR_LOGFILE_DISABLED", "")
if disable_file_logging:
return
if not target:
log_base = hrm_config.get("HRM_LOG", "/var/log/hrm")
target = f"{log_base}/omero-connector.log"
log_level = hrm_config.get("OMERO_CONNECTOR_LOGLEVEL", "INFO")
try:
log.add(target, level=log_level)
log.trace(f"Added file sink for logging: {target}.")
except Exception as err: # pylint: disable-msg=broad-except
log.error(f"Adding a file sink for logging failed: {err}")
def run_task(args):
"""Parse commandline arguments and initiate the requested tasks."""
argparser = arguments_parser()
args = argparser.parse_args(args)
# one of the downsides of loguru is that the level of an existing logger can't be
# changed - so to adjust verbosity we actually need to remove the default logger and
# re-add it with the new level (see https://github.com/Delgan/loguru/issues/138)
log_level = verbosity_to_loglevel(args.verbosity)
log.remove()
log.add(sys.stderr, level=log_level)
log.success(f"Logging verbosity requested: {args.verbosity} ({log_level})")
hrm_config = hrm.parse_config(args.config)
host = hrm_config.get("OMERO_HOSTNAME", "localhost")
port = hrm_config.get("OMERO_PORT", 4064)
omero_logfile = hrm_config.get("OMERO_DEBUG_LOG", "")
log_level = hrm_config.get("OMERO_CONNECTOR_LOGLEVEL")
if log_level:
log.remove()
log.add(sys.stderr, level=log_level)
log.success(f"Log level set from config file: {log_level}")
logger_add_file_sink(hrm_config)
# NOTE: reading the OMERO password from an environment variable instead of an
# argument supplied on the command line improves handling of this sensitive data as
# the value is *NOT* immediately revealed to anyone with shell access by simply
# looking at the process list (which is an absolute standard procedure to do). Since
# it is not passed to any other functions here (except the call to `BlitzGateway`)
# this also prevents it from being shown in an annotated stack trace in case an
# uncaught exception is coming through.
# However, this doesn't provide super-high security as it will still be possible for
# an admin to inspect the environment of a running process. Nevertheless going
# beyond this seems a bit pointless here as an admin could also modify the code that
# is actually calling the connector to get hold of user credentials.
passwd = os.environ.get("OMERO_PASSWORD")
# while being deprecated an explicitly specified password still has priority:
if args.password: # pragma: no cover
passwd = args.password
log.warning("Using the '--password' parameter is deprecated!")
else:
log.debug("Using password from environment.")
if not passwd:
printlog("ERROR", "ERROR: no password given to connect to OMERO!")
return False
if args.action == "checkCredentials":
log.trace("checkCredentials")
perform_action = _omero.check_credentials
kwargs = {}
elif args.action == "retrieveChildren":
log.trace("retrieveChildren")
perform_action = formatting.print_children_json
kwargs = {"omero_id": OmeroId(args.id)}
elif args.action == "OMEROtoHRM":
log.trace("OMEROtoHRM")
perform_action = transfer.from_omero
kwargs = {
"id_str": args.imageid,
"dest": args.dest,
}
elif args.action == "HRMtoOMERO":
log.trace("HRMtoOMERO")
perform_action = transfer.to_omero
kwargs = {
"id_str": args.dset,
"image_file": args.file,
"omero_logfile": omero_logfile,
}
else:
printlog("ERROR", "No valid action specified that should be performed!")
return False
conn = omero.gateway.BlitzGateway(
username=args.user,
passwd=passwd,
host=host,
port=port,
secure=True,
useragent="hrm-omero.py",
)
try:
if args.dry_run:
printlog("INFO", "*** dry-run, only showing action and parameters ***")
printlog("INFO", f"function: {perform_action.__qualname__}")
for key, value in kwargs.items():
printlog("INFO", f"{key}: [{str(value)}]")
return True
# FIXME: the conn.connect() call should be removed once all actions (or rather
# the related functions) have adopted the decorator for ensuring a connection:
if args.action != "checkCredentials":
conn.connect()
log.info(f"New OMERO connection [user={args.user}].")
return perform_action(conn, **kwargs)
except Exception as err: # pylint: disable-msg=broad-except # pragma: no cover
log.error(f"An unforeseen error occured: {err}")
return False
finally:
conn.close()
log.info(f"Closed OMERO connection [user={args.user}].")
@log.catch
def main(args=None):
"""Wrapper to call the run_task() function and return its exit code."""
if not args:
args = sys.argv[1:]
sys.exit(bool_to_exitstatus(run_task(args)))
|
PypiClean
|
/wsuks-0.4.1.tar.gz/wsuks-0.4.1/README.md
|
 [](https://twitter.com/intent/follow?screen_name=al3x_n3ff)
# wsuks
_Weaponizing the WSUS Attack_
Gaining local administrative access on a Windows machine that is part of a domain is typically the initial step towards acquiring domain admin privileges during a penetration test. In order to exploit the WSUS attack automatically, this tool spoofs the IP address of the WSUS server within the network using ARP, and when the client requests Windows updates, it provides its own malicious updates instead.
By default, a Windows client requests updates every 24 hours.
Both the executable file served (Default: PsExec64.exe) and the executed command can be changed as needed.
Prerequisits:
- The target Client must be on the local network
- The Windows Server Update Service (WSUS) must be configured using HTTP
Result:
- After successful execution the user provided will be added to the local admin group. If no user was specified a user with the format user[0-9]{5} (e.g. user12345) and a random password will be created
## Installation
Using pipx:
```
sudo apt install python3-pipx
pipx ensurepath
pipx install wsuks
sudo ln -s ~/.local/pipx/venvs/wsuks/bin/wsuks /usr/local/bin/wsuks
```
Using poetry:
```
sudo apt install python3-poetry
git clone https://github.com/NeffIsBack/wsuks
cd wsuks
sudo poetry install
```
## Usage
❗wsuks must be run as root❗
With pipx:
```
sudo wsuks
suso wsuks -t 10.0.0.10 --WSUS-Server 10.0.0.20 # This will generate a new local user and add it to the local admin group
sudo wsuks -t 10.0.0.10 --WSUS-Server 10.0.0.20 -u User -p Password -d Domain.local # This will add the provided user to the local admin group
sudo wsuks -t 10.0.0.10 -u User -p Password -d Domain.local --dc-ip 10.0.0.1 # This will start the auto discovery mode and add the provided user to the local admin group
```
With poetry:
```
suso poetry run wsuks -t 10.0.0.10 --WSUS-Server 10.0.0.20 # This will generate a new local user and add it to the local admin group
sudo poetry run wsuks -t 10.0.0.10 --WSUS-Server 10.0.0.20 -u User -p Password -d Domain.local # This will add the provided user to the local admin group
sudo poetry run wsuks -t 10.0.0.10 -u User -p Password -d Domain.local --dc-ip 10.0.0.1 # This will start the auto discovery mode and add the provided user to the local admin group
```
## About & Mitigation
In the [PyWSUS](https://github.com/GoSecure/pywsus) Repository from GoSecure you can find a great documentation how to you could detect and mitigate this attack.
They also wrote a great Guide demonstrating how this attack works in detail [here](https://www.gosecure.net/blog/2020/09/03/wsus-attacks-part-1-introducing-pywsus/).
This Tool is based on the following projects:
- https://github.com/GoSecure/pywsus
- https://github.com/GoSecure/wsuspect-proxy
|
PypiClean
|
/geoai_GDAL-3.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl/osgeo_utils/gdal_proximity.py
|
import sys
from typing import Optional, Sequence
from osgeo import gdal
from osgeo_utils.auxiliary.util import GetOutputDriverFor
def Usage():
print("""
gdal_proximity.py srcfile dstfile [-srcband n] [-dstband n]
[-of format] [-co name=value]*
[-ot Byte/UInt16/UInt32/Float32/etc]
[-values n,n,n] [-distunits PIXEL/GEO]
[-maxdist n] [-nodata n] [-use_input_nodata YES/NO]
[-fixed-buf-val n] [-q] """)
return 1
def main(argv):
driver_name = None
creation_options = []
alg_options = []
src_filename = None
src_band_n = 1
dst_filename = None
dst_band_n = 1
creation_type = 'Float32'
quiet = False
argv = gdal.GeneralCmdLineProcessor(argv)
if argv is None:
return 0
# Parse command line arguments.
i = 1
while i < len(argv):
arg = argv[i]
if arg == '-of' or arg == '-f':
i = i + 1
driver_name = argv[i]
elif arg == '-co':
i = i + 1
creation_options.append(argv[i])
elif arg == '-ot':
i = i + 1
creation_type = argv[i]
elif arg == '-maxdist':
i = i + 1
alg_options.append('MAXDIST=' + argv[i])
elif arg == '-values':
i = i + 1
alg_options.append('VALUES=' + argv[i])
elif arg == '-distunits':
i = i + 1
alg_options.append('DISTUNITS=' + argv[i])
elif arg == '-nodata':
i = i + 1
alg_options.append('NODATA=' + argv[i])
elif arg == '-use_input_nodata':
i = i + 1
alg_options.append('USE_INPUT_NODATA=' + argv[i])
elif arg == '-fixed-buf-val':
i = i + 1
alg_options.append('FIXED_BUF_VAL=' + argv[i])
elif arg == '-srcband':
i = i + 1
src_band_n = int(argv[i])
elif arg == '-dstband':
i = i + 1
dst_band_n = int(argv[i])
elif arg == '-q' or arg == '-quiet':
quiet = True
elif src_filename is None:
src_filename = argv[i]
elif dst_filename is None:
dst_filename = argv[i]
else:
return Usage()
i = i + 1
if src_filename is None or dst_filename is None:
return Usage()
return gdal_proximity(src_filename=src_filename, src_band_n=src_band_n,
dst_filename=dst_filename, dst_band_n=dst_band_n, driver_name=driver_name,
creation_type=creation_type, creation_options=creation_options,
alg_options=alg_options, quiet=quiet)
def gdal_proximity(
src_filename: Optional[str] = None,
src_band_n: int = 1,
dst_filename: Optional[str] = None,
dst_band_n: int = 1,
driver_name: Optional[str] = None,
creation_type: str = 'Float32',
creation_options: Optional[Sequence[str]] = None,
alg_options: Optional[Sequence[str]] = None,
quiet: bool = False):
# =============================================================================
# Open source file
# =============================================================================
creation_options = creation_options or []
alg_options = alg_options or []
src_ds = gdal.Open(src_filename)
if src_ds is None:
print('Unable to open %s' % src_filename)
return 1
srcband = src_ds.GetRasterBand(src_band_n)
# =============================================================================
# Try opening the destination file as an existing file.
# =============================================================================
try:
driver_name = gdal.IdentifyDriver(dst_filename)
if driver_name is not None:
dst_ds = gdal.Open(dst_filename, gdal.GA_Update)
dstband = dst_ds.GetRasterBand(dst_band_n)
else:
dst_ds = None
except:
dst_ds = None
# =============================================================================
# Create output file.
# =============================================================================
if dst_ds is None:
if driver_name is None:
driver_name = GetOutputDriverFor(dst_filename)
drv = gdal.GetDriverByName(driver_name)
dst_ds = drv.Create(dst_filename,
src_ds.RasterXSize, src_ds.RasterYSize, 1,
gdal.GetDataTypeByName(creation_type), creation_options)
dst_ds.SetGeoTransform(src_ds.GetGeoTransform())
dst_ds.SetProjection(src_ds.GetProjectionRef())
dstband = dst_ds.GetRasterBand(1)
# =============================================================================
# Invoke algorithm.
# =============================================================================
if quiet:
prog_func = None
else:
prog_func = gdal.TermProgress_nocb
gdal.ComputeProximity(srcband, dstband, alg_options,
callback=prog_func)
srcband = None
dstband = None
src_ds = None
dst_ds = None
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
PypiClean
|
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/nodes/NodeMakingHelpers.py
|
from nuitka import Options
from nuitka.__past__ import GenericAlias, UnionType
from nuitka.Builtins import builtin_names
from nuitka.Constants import isConstant
from nuitka.PythonVersions import python_version
from nuitka.Tracing import my_print, unusual_logger
def makeConstantReplacementNode(constant, node, user_provided):
from .ConstantRefNodes import makeConstantRefNode
return makeConstantRefNode(
constant=constant, source_ref=node.source_ref, user_provided=user_provided
)
def makeRaiseExceptionReplacementExpression(
expression, exception_type, exception_value
):
from .BuiltinRefNodes import ExpressionBuiltinExceptionRef
from .ExceptionNodes import ExpressionRaiseException
source_ref = expression.source_ref
assert type(exception_type) is str
if Options.shallWarnImplicitRaises():
unusual_logger.warning(
'%s: Will always raise exception: "%s(%s)"'
% (
source_ref.getAsString(),
exception_type,
exception_value,
)
)
result = ExpressionRaiseException(
exception_type=ExpressionBuiltinExceptionRef(
exception_name=exception_type, source_ref=source_ref
),
exception_value=makeConstantReplacementNode(
constant=exception_value, node=expression, user_provided=False
),
source_ref=source_ref,
)
return result
def makeRaiseExceptionReplacementStatement(statement, exception_type, exception_value):
from .BuiltinRefNodes import ExpressionBuiltinExceptionRef
from .ExceptionNodes import StatementRaiseExceptionImplicit
source_ref = statement.getSourceReference()
assert type(exception_type) is str
if Options.shallWarnImplicitRaises():
unusual_logger.warning(
'%s: Will always raise exception: "%s(%s)"'
% (
source_ref.getAsString(),
exception_type,
exception_value,
)
)
result = StatementRaiseExceptionImplicit(
exception_type=ExpressionBuiltinExceptionRef(
exception_name=exception_type, source_ref=source_ref
),
exception_value=makeConstantReplacementNode(
constant=exception_value, node=statement, user_provided=False
),
exception_cause=None,
exception_trace=None,
source_ref=source_ref,
)
return result
def makeRaiseExceptionReplacementExpressionFromInstance(expression, exception):
assert isinstance(exception, Exception)
args = exception.args
if type(args) is tuple and len(args) == 1:
value = args[0]
else:
assert type(args) is tuple
value = args
return makeRaiseExceptionReplacementExpression(
expression=expression,
exception_type=exception.__class__.__name__,
exception_value=value,
)
def makeRaiseExceptionStatementFromInstance(exception, source_ref):
assert isinstance(exception, Exception)
args = exception.args
if type(args) is tuple and len(args) == 1:
value = args[0]
else:
assert type(args) is tuple
value = args
from .BuiltinRefNodes import ExpressionBuiltinExceptionRef
from .ConstantRefNodes import makeConstantRefNode
from .ExceptionNodes import StatementRaiseExceptionImplicit
return StatementRaiseExceptionImplicit(
exception_type=ExpressionBuiltinExceptionRef(
exception_name=exception.__class__.__name__, source_ref=source_ref
),
exception_value=makeConstantRefNode(
constant=value, source_ref=source_ref, user_provided=False
),
exception_cause=None,
exception_trace=None,
source_ref=source_ref,
)
def makeRaiseExceptionExpressionFromTemplate(
exception_type, template, template_args, source_ref
):
from .BuiltinRefNodes import ExpressionBuiltinExceptionRef
from .ConstantRefNodes import makeConstantRefNode
from .ContainerMakingNodes import makeExpressionMakeTupleOrConstant
from .ExceptionNodes import ExpressionRaiseException
from .OperatorNodes import makeBinaryOperationNode
if type(template_args) is tuple:
template_args = makeExpressionMakeTupleOrConstant(
elements=template_args, user_provided=False, source_ref=source_ref
)
return ExpressionRaiseException(
exception_type=ExpressionBuiltinExceptionRef(
exception_name=exception_type, source_ref=source_ref
),
exception_value=makeBinaryOperationNode(
operator="Mod",
left=makeConstantRefNode(
constant=template, source_ref=source_ref, user_provided=True
),
right=template_args,
source_ref=source_ref,
),
source_ref=source_ref,
)
def makeRaiseTypeErrorExceptionReplacementFromTemplateAndValue(
template, operation, original_node, value_node
):
shape = value_node.getTypeShape()
type_name = shape.getTypeName()
if type_name is not None:
result = makeRaiseExceptionReplacementExpressionFromInstance(
expression=original_node,
exception=TypeError(template % type_name if "%" in template else template),
)
result = wrapExpressionWithNodeSideEffects(new_node=result, old_node=value_node)
else:
from .AttributeNodes import makeExpressionAttributeLookup
from .TypeNodes import ExpressionBuiltinType1
source_ref = original_node.getSourceReference()
result = makeRaiseExceptionExpressionFromTemplate(
exception_type="TypeError",
template=template,
template_args=makeExpressionAttributeLookup(
expression=ExpressionBuiltinType1(
value=value_node.makeClone(), source_ref=source_ref
),
attribute_name="__name__",
source_ref=source_ref,
),
source_ref=source_ref,
)
type_name = shape.__name__
return (
result,
"new_raise",
"Raising for use of '%s' on %s '%s'."
% (operation, "type" if type_name is not None else "shape", type_name),
)
def makeCompileTimeConstantReplacementNode(value, node, user_provided):
# This needs to match code in isCompileTimeConstantValue
if isConstant(value):
return makeConstantReplacementNode(
constant=value, node=node, user_provided=user_provided
)
elif type(value) is type:
if value.__name__ in builtin_names:
from .BuiltinRefNodes import makeExpressionBuiltinRef
# Need not provide locals_scope, not used for these kinds of built-in refs that
# refer to types.
return makeExpressionBuiltinRef(
builtin_name=value.__name__,
locals_scope=None,
source_ref=node.getSourceReference(),
)
else:
return node
elif GenericAlias is not None and isinstance(value, GenericAlias):
from .BuiltinTypeNodes import ExpressionConstantGenericAlias
return ExpressionConstantGenericAlias(
generic_alias=value,
source_ref=node.getSourceReference(),
)
elif UnionType is not None and isinstance(value, UnionType):
from .BuiltinTypeNodes import ExpressionConstantUnionType
return ExpressionConstantUnionType(
union_type=value,
source_ref=node.getSourceReference(),
)
else:
return node
def getComputationResult(node, computation, description, user_provided):
"""With a computation function, execute it and return constant result or
exception node.
"""
# Try and turn raised exceptions into static raises. pylint: disable=broad-except
try:
result = computation()
except Exception as e:
new_node = makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=e
)
change_tags = "new_raise"
change_desc = description + " Predicted to raise an exception."
else:
new_node = makeCompileTimeConstantReplacementNode(
value=result, node=node, user_provided=user_provided
)
if Options.is_debug:
assert new_node is not node, (node, result)
if new_node is not node:
change_tags = "new_constant"
change_desc = description + " Predicted constant result."
else:
change_tags = None
change_desc = None
return new_node, change_tags, change_desc
def makeStatementExpressionOnlyReplacementNode(expression, node):
from .StatementNodes import StatementExpressionOnly
return StatementExpressionOnly(
expression=expression, source_ref=node.getSourceReference()
)
def mergeStatements(statements, allow_none=False):
"""Helper function that merges nested statement sequences."""
merged_statements = []
for statement in statements:
if statement is None and allow_none:
pass
elif type(statement) in (tuple, list):
merged_statements += mergeStatements(statement, allow_none)
elif statement.isStatement() or statement.isStatementsFrame():
merged_statements.append(statement)
elif statement.isStatementsSequence():
merged_statements.extend(mergeStatements(statement.subnode_statements))
else:
assert False, statement
return tuple(merged_statements)
def makeStatementsSequenceReplacementNode(statements, node):
from .StatementNodes import StatementsSequence
return StatementsSequence(
statements=mergeStatements(statements), source_ref=node.getSourceReference()
)
def wrapExpressionWithSideEffects(side_effects, old_node, new_node):
assert new_node.isExpression()
from .SideEffectNodes import ExpressionSideEffects
if side_effects:
try:
side_effects = sum(
(
side_effect.extractSideEffects()
for side_effect in side_effects
if side_effect.mayHaveSideEffects()
),
(),
)
except AttributeError:
my_print("Problem with side effects:", side_effects)
raise
if side_effects:
new_node = ExpressionSideEffects(
expression=new_node,
side_effects=side_effects,
source_ref=old_node.getSourceReference(),
)
return new_node
def wrapExpressionWithNodeSideEffects(new_node, old_node):
return wrapExpressionWithSideEffects(
side_effects=old_node.extractSideEffects(), old_node=old_node, new_node=new_node
)
def wrapStatementWithSideEffects(new_node, old_node, allow_none=False):
assert new_node is not None or allow_none
side_effects = old_node.extractSideEffects()
if side_effects:
from .StatementNodes import StatementExpressionOnly
side_effects = tuple(
StatementExpressionOnly(
expression=side_effect, source_ref=side_effect.getSourceReference()
)
for side_effect in side_effects
)
if new_node is not None:
new_node = makeStatementsSequenceReplacementNode(
statements=side_effects + (new_node,), node=old_node
)
else:
new_node = makeStatementsSequenceReplacementNode(
statements=side_effects, node=old_node
)
return new_node
def makeStatementOnlyNodesFromExpressions(expressions):
from .StatementNodes import StatementExpressionOnly, StatementsSequence
statements = tuple(
StatementExpressionOnly(
expression=expression, source_ref=expression.getSourceReference()
)
for expression in expressions
)
if not statements:
return None
elif len(statements) == 1:
return statements[0]
else:
return StatementsSequence(
statements=statements, source_ref=statements[0].getSourceReference()
)
def makeVariableRefNode(variable, source_ref):
if variable.isTempVariable():
from .VariableRefNodes import ExpressionTempVariableRef
return ExpressionTempVariableRef(variable=variable, source_ref=source_ref)
else:
from .VariableRefNodes import ExpressionVariableRef
return ExpressionVariableRef(variable=variable, source_ref=source_ref)
def makeExpressionBuiltinLocals(locals_scope, source_ref):
if locals_scope.isModuleScope():
from .GlobalsLocalsNodes import ExpressionBuiltinGlobals
return ExpressionBuiltinGlobals(source_ref=source_ref)
else:
from .GlobalsLocalsNodes import (
ExpressionBuiltinLocalsCopy,
ExpressionBuiltinLocalsRef,
ExpressionBuiltinLocalsUpdated,
)
if locals_scope.isClassScope():
return ExpressionBuiltinLocalsRef(
locals_scope=locals_scope, source_ref=source_ref
)
elif python_version >= 0x300 or locals_scope.isUnoptimizedFunctionScope():
assert locals_scope.isFunctionScope(), locals_scope
return ExpressionBuiltinLocalsUpdated(
locals_scope=locals_scope, source_ref=source_ref
)
else:
return ExpressionBuiltinLocalsCopy(
locals_scope=locals_scope, source_ref=source_ref
)
def makeRaiseImportErrorReplacementExpression(expression, module_name):
return makeRaiseExceptionReplacementExpression(
expression=expression,
exception_type="ImportError",
exception_value=module_name.asString(),
)
|
PypiClean
|
/ec2instanceconnectcli-ptc-1.0.1.tar.gz/ec2instanceconnectcli-ptc-1.0.1/doc/README.rst
|
==========================
Building The Documentation
==========================
**Note: This is not complete and currently only does basic doc generation.**
Before building the documentation, make sure you have Python 2.7,
the ec2instanceconnectcli, and all the necessary dependencies installed. You can
install dependencies by using the requirements-docs.txt file at the
root of this repo::
pip install -r requirements-docs.txt
The process for building the documentation is:
* Run ``make html`` which will build all of the HTML documentation
into the ``build/html`` directory.
* Run ``make man`` which will build all of the man pages into
``../doc/man/man1``. These files are included in the source
distribution and installed by ``python setup.py install``.
* Run ``make text`` which will build all of the text pages that
are used for interactive help on the Windows platform. These files
are included in the source distribution and installed by
``python setup.py install``.
You can perform all of these tasks by running ``make all`` in this
directory. If you have previously built the documentation and want
to regenerate it, run ``make clean`` first.
|
PypiClean
|
/azure_mgmt_iothub-2.4.0-py3-none-any.whl/azure/mgmt/iothub/v2021_03_31/_iot_hub_client.py
|
from copy import deepcopy
from typing import Any, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from . import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import IotHubClientConfiguration
from .operations import (
CertificatesOperations,
IotHubOperations,
IotHubResourceOperations,
Operations,
PrivateEndpointConnectionsOperations,
PrivateLinkResourcesOperations,
ResourceProviderCommonOperations,
)
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class IotHubClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
"""Use this API to manage the IoT hubs in your Azure subscription.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.iothub.v2021_03_31.operations.Operations
:ivar iot_hub_resource: IotHubResourceOperations operations
:vartype iot_hub_resource: azure.mgmt.iothub.v2021_03_31.operations.IotHubResourceOperations
:ivar resource_provider_common: ResourceProviderCommonOperations operations
:vartype resource_provider_common:
azure.mgmt.iothub.v2021_03_31.operations.ResourceProviderCommonOperations
:ivar certificates: CertificatesOperations operations
:vartype certificates: azure.mgmt.iothub.v2021_03_31.operations.CertificatesOperations
:ivar iot_hub: IotHubOperations operations
:vartype iot_hub: azure.mgmt.iothub.v2021_03_31.operations.IotHubOperations
:ivar private_link_resources: PrivateLinkResourcesOperations operations
:vartype private_link_resources:
azure.mgmt.iothub.v2021_03_31.operations.PrivateLinkResourcesOperations
:ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
:vartype private_endpoint_connections:
azure.mgmt.iothub.v2021_03_31.operations.PrivateEndpointConnectionsOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The subscription identifier. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2021-03-31". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = IotHubClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.iot_hub_resource = IotHubResourceOperations(self._client, self._config, self._serialize, self._deserialize)
self.resource_provider_common = ResourceProviderCommonOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.certificates = CertificatesOperations(self._client, self._config, self._serialize, self._deserialize)
self.iot_hub = IotHubOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_link_resources = PrivateLinkResourcesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self) -> None:
self._client.close()
def __enter__(self) -> "IotHubClient":
self._client.__enter__()
return self
def __exit__(self, *exc_details: Any) -> None:
self._client.__exit__(*exc_details)
|
PypiClean
|
/dvha_stats-0.2.4.post1-py3-none-any.whl/dvhastats/plot.py
|
"""Basic plotting class objects for DVHA-Stats based on matplotlib"""
#
# Copyright (c) 2020 Dan Cutright
# This file is part of DVHA-Stats, released under a MIT license.
# See the file LICENSE included with this distribution, also
# available at https://github.com/cutright/DVHA-Stats
from matplotlib import pyplot as plt
import numpy as np
FIGURE_COUNT = 1
def get_new_figure_num():
"""Get a number for a new matplotlib figure
Returns
----------
int
Figure number
"""
global FIGURE_COUNT
FIGURE_COUNT += 1
return FIGURE_COUNT - 1
class Chart:
"""Base class for charts
Parameters
----------
title : str, optional
Set the title suptitle
fig_init : bool
Automatically call pyplot.figure, store in Chart.figure
"""
def __init__(self, title=None, fig_init=True):
"""Initialization of Chart base class"""
self.title = title
self.figure = plt.figure(get_new_figure_num()) if fig_init else None
if title and fig_init:
self.figure.suptitle(title, fontsize=16)
def show(self):
"""Display this figure"""
self.activate()
plt.show()
def activate(self):
"""Activate this figure"""
plt.figure(self.figure.number)
def close(self):
"""Close this figure"""
plt.close(self.figure.number)
class Plot(Chart):
"""Generic plotting class with matplotlib
Parameters
----------
y : np.ndarray, list
The y data to be plotted (1-D only)
x : np.ndarray, list, optional
Optionally specify the x-axis values. Otherwise index+1 is used.
show : bool
Automatically plot the data if True
title : str
Set the plot title
xlabel : str
Set the x-axis title
ylabel : str
Set the y-axis title
line : bool
Plot the data as a line series
line_color : str, optional
Specify the line color
line_width : float, int
Specify the line width
line_style : str
Specify the line style
scatter : bool
Plot the data as a scatter plot (circles)
scatter_color : str, optional
Specify the scatter plot circle color
"""
def __init__(
self,
y,
x=None,
show=True,
title="Chart",
xlabel="Independent Variable",
ylabel="Dependent Variable",
line=True,
line_color=None,
line_width=1.0,
line_style="-",
scatter=True,
scatter_color=None,
):
"""Initialization of a general Plot class object"""
Chart.__init__(self, title=title)
self.x = np.linspace(1, len(y), len(y)) if x is None else np.array(x)
self.y = np.array(y) if not isinstance(y, np.ndarray) else y
self.show = show
self.xlabel = xlabel
self.ylabel = ylabel
self.line = line
self.line_color = line_color
self.line_width = line_width
self.line_style = line_style
self.scatter = scatter
self.scatter_color = scatter_color
self.activate()
self.__add_labels()
self.__add_data()
if show:
plt.show()
def __add_labels(self):
"""Set the x and y axes labels to figure"""
plt.xlabel(self.xlabel)
plt.ylabel(self.ylabel)
def __add_data(self):
"""Add scatter and/or line data to figure"""
if self.scatter:
self.add_scatter()
if self.line:
self.add_default_line()
def add_scatter(self):
"""Add scatter data to figure"""
self.activate()
plt.scatter(self.x, self.y, color=self.scatter_color)
def add_default_line(self):
"""Add line data to figure"""
self.activate()
plt.plot(
self.x,
self.y,
color=self.line_color,
linewidth=self.line_width,
linestyle=self.line_style,
)
def add_line(
self, y, x=None, line_color=None, line_width=None, line_style=None
):
"""Add another line with the provided data
Parameters
----------
y : np.ndarray, list
The y data to be plotted (1-D only)
x: np.ndarray, list, optional
Optionally specify the x-axis values. Otherwise index+1 is used.
line_color: str, optional
Specify the line color
line_width: float, int
Specify the line width
line_style : str
Specify the line style
"""
self.activate()
plt.plot(
np.linspace(1, len(y), len(y)) if x is None else x,
y,
color=line_color,
linewidth=line_width,
linestyle=line_style,
)
class ControlChart(Plot):
"""ControlChart class object
Parameters
----------
y : np.ndarray, list
Charting data
out_of_control : np.ndarray, list
The indices of y that are out-of-control
center_line : float, np.ndarray
The center line value (e.g., np.mean(y))
lcl : float, optional
The lower control limit (LCL). Line omitted if lcl is None.
ucl : float, optional
The upper control limit (UCL). Line omitted if ucl is None.
title: str
Set the plot title
xlabel: str
Set the x-axis title
ylabel: str
Set the y-axis title
line_color: str, optional
Specify the line color
line_width: float, int
Specify the line width
kwargs : any
Any additional keyword arguments applicable to the Plot class
"""
def __init__(
self,
y,
out_of_control,
center_line,
lcl=None,
ucl=None,
title="Control Chart",
xlabel="Observation",
ylabel="Charting Variable",
line_color="black",
line_width=0.75,
center_line_color="black",
center_line_width=1.0,
center_line_style="--",
limit_line_color="red",
limit_line_width=1.0,
limit_line_style="--",
**kwargs
):
"""Initialization of a ControlChart plot class object"""
self.center_line = center_line
self.lcl = lcl
self.ucl = ucl
self.out_of_control = out_of_control
self.center_line_color = center_line_color
self.center_line_width = center_line_width
self.center_line_style = center_line_style
self.limit_line_color = limit_line_color
self.limit_line_width = limit_line_width
self.limit_line_style = limit_line_style
kwargs["title"] = title
kwargs["xlabel"] = xlabel
kwargs["ylabel"] = ylabel
kwargs["line_color"] = line_color
kwargs["line_width"] = line_width
Plot.__init__(self, y, **kwargs)
self.__add_cc_data()
self.__add_table_with_limits()
if self.show:
plt.show()
def __set_y_scatter_data(self):
"""Add circles colored by out-of-control status"""
include = np.full(len(self.y), True)
for i in self.out_of_control:
include[i] = False
self.ic = {"x": self.x[include], "y": self.y[include]}
self.ooc = {"x": self.x[~include], "y": self.y[~include]}
def __add_cc_data(self):
"""Add center line and upper/lower control limit lines"""
self.add_control_limit_line(self.ucl)
self.add_control_limit_line(self.lcl)
self.add_center_line()
def __add_table_with_limits(self):
"""Add tables with center line and upper/lower control limit values"""
self.activate()
plt.subplots_adjust(bottom=0.25)
plt.table(
cellText=self.__table_text,
cellLoc="center",
colLabels=["Center Line", "LCL", "UCL"],
loc="bottom",
bbox=[0.0, -0.31, 1, 0.12],
)
@property
def __table_text(self):
"""Get text to pass into matplotlib table creation"""
props = ["center_line", "lcl", "ucl"]
text = []
for prop in props:
value = getattr(self, prop)
if isinstance(value, float):
formatter = ["E", "f"][9999 >= abs(float(value)) >= 0.1]
text.append(("%%0.3%s" % formatter) % value)
else:
text.append(str(value))
return [text]
def add_scatter(self):
"""Set scatter data, add in- and out-of-control circles"""
self.activate()
self.__set_y_scatter_data()
plt.scatter(self.ic["x"], self.ic["y"], color=self.scatter_color)
plt.scatter(self.ooc["x"], self.ooc["y"], color="red")
def add_control_limit_line(
self, limit, color=None, line_width=None, line_style=None
):
"""Add a control limit line to plot"""
self.activate()
color = self.limit_line_color if color is None else color
line_width = (
self.limit_line_width if line_width is None else line_width
)
line_style = (
self.limit_line_style if line_style is None else line_style
)
if limit is not None:
plt.plot(
[1, len(self.x)],
[limit] * 2,
color=color,
linewidth=line_width,
linestyle=line_style,
)
def add_center_line(self, color=None, line_width=None, line_style=None):
"""Add the center line to the plot"""
self.activate()
color = self.center_line_color if color is None else color
line_width = (
self.center_line_width if line_width is None else line_width
)
line_style = (
self.center_line_style if line_style is None else line_style
)
plt.plot(
[1, len(self.x)],
[self.center_line] * 2,
color=color,
linewidth=line_width,
linestyle=line_style,
)
class HeatMap(Chart):
"""Create a heat map using matplotlib.pyplot.matshow
Parameters
----------
X : np.ndarray
Input data (2-D) with N rows of observations and
p columns of variables.
xlabels : list, optional
Optionally set the variable names with a list of str
ylabels : list, optional
Optionally set the variable names with a list of str
title : str, optional
Set the title suptitle
cmap : str
matplotlib compatible color map
show : bool
Automatically show the figure
"""
def __init__(
self,
X,
xlabels=None,
ylabels=None,
title=None,
cmap="viridis",
show=True,
):
"""Initialization of a HeatMap Chart object"""
Chart.__init__(self, title=title)
self.X = X
self.x_labels = range(X.shape[1]) if xlabels is None else xlabels
self.y_labels = range(X.shape[0]) if ylabels is None else ylabels
plt.matshow(X, cmap=cmap, fignum=self.figure.number)
plt.colorbar()
self.__set_ticks()
if show:
self.show()
def __set_ticks(self):
"""Set tick labels based on x and y labels"""
plt.xticks(
range(self.X.shape[1]), self.x_labels, rotation=30, ha="left"
)
plt.yticks(range(self.X.shape[0]), self.y_labels, rotation=30)
class PCAFeatureMap(HeatMap):
"""Specialized Heat Map for PCA feature evaluation
Parameters
----------
X : np.ndarray
Input data (2-D) with N rows of observations and
p columns of variables.
features : list, optional
Optionally set the feature names with a list of str
title : str, optional
Set the title suptitle
cmap : str
matplotlib compatible color map
show : bool
Automatically show the figure
"""
def __init__(
self,
X,
features=None,
cmap="viridis",
show=True,
title="PCA Feature Heat Map",
):
"""Initialization of a HeatMap Chart object"""
HeatMap.__init__(
self,
X,
xlabels=features,
ylabels=self.get_comp_labels(X.shape[0]),
cmap=cmap,
show=show,
title=title,
)
def get_comp_labels(self, n_components):
"""Get ylabels for HeatMap"""
return [
"%s Comp" % (self.get_ordinal(n + 1)) for n in range(n_components)
]
@staticmethod
def get_ordinal(n):
"""Convert number to its ordinal (e.g., 1 to 1st)
Parameters
----------
n : int
Number to be converted to ordinal
Returns
----------
str
the ordinal of n
"""
return "%d%s" % (
n,
"tsnrhtdd"[(n // 10 % 10 != 1) * (n % 10 < 4) * n % 10 :: 4],
)
class DistributionChart(Chart):
"""Distribution plotting class object (base for histogram / boxplot
Parameters
----------
data : array-like
Input array (1-D or 2-D)
title : str
Set the plot title
xlabel : str
Set the x-axis title
ylabel : str
Set the y-axis title
kwargs : any
Any keyword argument may be set per matplotlib histogram:
https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.hist.html
"""
def __init__(
self, data, title="Chart", xlabel="Bins", ylabel="Counts", **kwargs
):
"""Initialization of Histogram class"""
self.title = title
Chart.__init__(self, title=self.title, fig_init=False)
self.data = np.array(data)
self.xlabel = xlabel
self.ylabel = ylabel
self.kwargs = kwargs
def _set_title(self):
"""Set the figure title"""
self.figure.suptitle(self.title, fontsize=16)
def _add_labels(self):
"""Set the x and y axes labels to figure"""
plt.xlabel(self.xlabel)
plt.ylabel(self.ylabel)
class Histogram(DistributionChart):
"""Histogram plotting class object
Parameters
----------
data : array-like
Input array (1-D)
bins : int, sequence, str
default: rcParams["hist.bins"] (default: 10)
If bins is an integer, it defines the number of equal-width
bins in the range.
If bins is a sequence, it defines the bin edges, including the
left edge of the first bin and the right edge of the last bin;
in this case, bins may be unequally spaced. All but the last
(righthand-most) bin is half-open. In other words, if bins is:
[1, 2, 3, 4]
then the first bin is [1, 2) (including 1, but excluding 2) and
the second [2, 3). The last bin, however, is [3, 4], which
includes 4.
If bins is a string, it is one of the binning strategies supported
by numpy.histogram_bin_edges: 'auto', 'fd', 'doane', 'scott',
'stone', 'rice', 'sturges', or 'sqrt'.
title : str
Set the plot title
xlabel : str
Set the x-axis title
ylabel : str
Set the y-axis title
kwargs : any
Any keyword argument may be set per matplotlib histogram:
https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.hist.html
"""
def __init__(
self,
data,
bins=10,
title="Histogram",
xlabel="Bins",
ylabel="Counts",
**kwargs
):
"""Initialization of Histogram class"""
self.bins = bins
DistributionChart.__init__(
self, data, title=title, xlabel=xlabel, ylabel=ylabel, **kwargs
)
self.__set_hist_data()
self._set_title()
self._add_labels()
def __set_hist_data(self):
"""Generate histogram data and add to figure"""
self.figure, self.axes = plt.subplots()
self.axes.hist(self.data, bins=self.bins, **self.kwargs)
class BoxPlot(DistributionChart):
"""Box and Whisker plotting class object
Parameters
----------
data : array-like
Input array (1-D or 2-D)
title : str, optional
Set the plot title
xlabel : str, optional
Set the x-axis title
xlabels : array-like, optional
Set the xtick labels (e.g., variable names for each box plot)
ylabel : str, optional
Set the y-axis title
kwargs : any, optional
Any keyword argument may be set per matplotlib histogram:
https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.boxplot.html
"""
def __init__(
self,
data,
title="Box and Whisker",
xlabel="",
ylabel="",
xlabels=None,
**kwargs
):
"""Initialization of Histogram class"""
self.xlabels = xlabels
DistributionChart.__init__(
self, data, title=title, xlabel=xlabel, ylabel=ylabel, **kwargs
)
self.__set_boxplot_data()
self._set_title()
self._add_labels()
self.__set_ticks()
def __set_boxplot_data(self):
"""Generate boxplot data and add to figure"""
self.figure, self.axes = plt.subplots()
self.axes.boxplot(self.data)
def __set_ticks(self):
"""Set tick labels based on variable names"""
if self.xlabels is not None:
if len(self.data.shape) == 2:
length = self.data.shape[1]
else:
length = 1
plt.xticks(
range(1, length + 1),
self.xlabels,
rotation=30,
ha="left",
)
|
PypiClean
|
/ansible-8.3.0-py3-none-any.whl/ansible_collections/community/routeros/plugins/module_utils/_api_data.py
|
# The data inside here is private to this collection. If you use this from outside the collection,
# you are on your own. There can be random changes to its format even in bugfix releases!
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class APIData(object):
def __init__(self, primary_keys=None,
stratify_keys=None,
required_one_of=None,
mutually_exclusive=None,
has_identifier=False,
single_value=False,
unknown_mechanism=False,
fully_understood=False,
fixed_entries=False,
fields=None):
if sum([primary_keys is not None, stratify_keys is not None, has_identifier, single_value, unknown_mechanism]) > 1:
raise ValueError('primary_keys, stratify_keys, has_identifier, single_value, and unknown_mechanism are mutually exclusive')
if unknown_mechanism and fully_understood:
raise ValueError('unknown_mechanism and fully_understood cannot be combined')
self.primary_keys = primary_keys
self.stratify_keys = stratify_keys
self.required_one_of = required_one_of or []
self.mutually_exclusive = mutually_exclusive or []
self.has_identifier = has_identifier
self.single_value = single_value
self.unknown_mechanism = unknown_mechanism
self.fully_understood = fully_understood
self.fixed_entries = fixed_entries
if fixed_entries and primary_keys is None:
raise ValueError('fixed_entries can only be used with primary_keys')
if fields is None:
raise ValueError('fields must be provided')
self.fields = fields
if primary_keys:
for pk in primary_keys:
if pk not in fields:
raise ValueError('Primary key {pk} must be in fields!'.format(pk=pk))
if stratify_keys:
for sk in stratify_keys:
if sk not in fields:
raise ValueError('Stratify key {sk} must be in fields!'.format(sk=sk))
if required_one_of:
for index, require_list in enumerate(required_one_of):
if not isinstance(require_list, list):
raise ValueError('Require one of element at index #{index} must be a list!'.format(index=index + 1))
for rk in require_list:
if rk not in fields:
raise ValueError('Require one of key {rk} must be in fields!'.format(rk=rk))
if mutually_exclusive:
for index, exclusive_list in enumerate(mutually_exclusive):
if not isinstance(exclusive_list, list):
raise ValueError('Mutually exclusive element at index #{index} must be a list!'.format(index=index + 1))
for ek in exclusive_list:
if ek not in fields:
raise ValueError('Mutually exclusive key {ek} must be in fields!'.format(ek=ek))
class KeyInfo(object):
def __init__(self, _dummy=None, can_disable=False, remove_value=None, absent_value=None, default=None, required=False, automatically_computed_from=None):
if _dummy is not None:
raise ValueError('KeyInfo() does not have positional arguments')
if sum([required, default is not None or can_disable, automatically_computed_from is not None]) > 1:
raise ValueError(
'required, default, automatically_computed_from, and can_disable are mutually exclusive ' +
'besides default and can_disable which can be set together')
if not can_disable and remove_value is not None:
raise ValueError('remove_value can only be specified if can_disable=True')
if absent_value is not None and any([default is not None, automatically_computed_from is not None, can_disable]):
raise ValueError('absent_value can not be combined with default, automatically_computed_from, can_disable=True, or absent_value')
self.can_disable = can_disable
self.remove_value = remove_value
self.automatically_computed_from = automatically_computed_from
self.default = default
self.required = required
self.absent_value = absent_value
def split_path(path):
return path.split()
def join_path(path):
return ' '.join(path)
# How to obtain this information:
# 1. Run `/export verbose` in the CLI;
# 2. All attributes listed there go into the `fields` list;
# attributes which can have a `!` ahead should have `canDisable=True`
# 3. All bold attributes go into the `primary_keys` list -- this is not always true!
PATHS = {
('interface', 'bonding'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'arp': KeyInfo(default='enabled'),
'arp-interval': KeyInfo(default='100ms'),
'arp-ip-targets': KeyInfo(default=''),
'arp-timeout': KeyInfo(default='auto'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'down-delay': KeyInfo(default='0ms'),
'forced-mac-address': KeyInfo(can_disable=True),
'lacp-rate': KeyInfo(default='30secs'),
'lacp-user-key': KeyInfo(can_disable=True, remove_value=0),
'link-monitoring': KeyInfo(default='mii'),
'mii-interval': KeyInfo(default='100ms'),
'min-links': KeyInfo(default=0),
'mlag-id': KeyInfo(can_disable=True, remove_value=0),
'mode': KeyInfo(default='balance-rr'),
'mtu': KeyInfo(default=1500),
'name': KeyInfo(),
'primary': KeyInfo(default='none'),
'slaves': KeyInfo(required=True),
'transmit-hash-policy': KeyInfo(default='layer-2'),
'up-delay': KeyInfo(default='0ms'),
}
),
('interface', 'bridge'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'admin-mac': KeyInfo(default=''),
'ageing-time': KeyInfo(default='5m'),
'arp': KeyInfo(default='enabled'),
'arp-timeout': KeyInfo(default='auto'),
'auto-mac': KeyInfo(default=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'dhcp-snooping': KeyInfo(default=False),
'disabled': KeyInfo(default=False),
'ether-type': KeyInfo(default='0x8100'),
'fast-forward': KeyInfo(default=True),
'frame-types': KeyInfo(default='admit-all'),
'forward-delay': KeyInfo(default='15s'),
'igmp-snooping': KeyInfo(default=False),
'ingress-filtering': KeyInfo(default=True),
'max-message-age': KeyInfo(default='20s'),
'mtu': KeyInfo(default='auto'),
'name': KeyInfo(),
'priority': KeyInfo(default='0x8000'),
'protocol-mode': KeyInfo(default='rstp'),
'pvid': KeyInfo(default=1),
'transmit-hold-count': KeyInfo(default=6),
'vlan-filtering': KeyInfo(default=False),
},
),
('interface', 'eoip'): APIData(
fully_understood=True,
primary_keys=('name',),
fields={
'allow-fast-path': KeyInfo(default=True),
'arp': KeyInfo(default='enabled'),
'arp-timeout': KeyInfo(default='auto'),
'clamp-tcp-mss': KeyInfo(default=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'dont-fragment': KeyInfo(default=False),
'dscp': KeyInfo(default='inherit'),
'ipsec-secret': KeyInfo(can_disable=True),
'keepalive': KeyInfo(default='10s,10', can_disable=True),
'local-address': KeyInfo(default='0.0.0.0'),
'loop-protect': KeyInfo(default='default'),
'loop-protect-disable-time': KeyInfo(default='5m'),
'loop-protect-send-interval': KeyInfo(default='5s'),
'mac-address': KeyInfo(),
'mtu': KeyInfo(default='auto'),
'name': KeyInfo(),
'remote-address': KeyInfo(required=True),
'tunnel-id': KeyInfo(required=True),
},
),
('interface', 'ethernet'): APIData(
fixed_entries=True,
fully_understood=True,
primary_keys=('default-name', ),
fields={
'default-name': KeyInfo(),
'advertise': KeyInfo(),
'arp': KeyInfo(default='enabled'),
'arp-timeout': KeyInfo(default='auto'),
'auto-negotiation': KeyInfo(default=True),
'bandwidth': KeyInfo(default='unlimited/unlimited'),
'combo-mode': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'fec-mode': KeyInfo(can_disable=True),
'full-duplex': KeyInfo(default=True),
'l2mtu': KeyInfo(default=1598),
'loop-protect': KeyInfo(default='default'),
'loop-protect-disable-time': KeyInfo(default='5m'),
'loop-protect-send-interval': KeyInfo(default='5s'),
'mac-address': KeyInfo(),
'mdix-enable': KeyInfo(),
'mtu': KeyInfo(default=1500),
'name': KeyInfo(),
'orig-mac-address': KeyInfo(),
'poe-out': KeyInfo(can_disable=True),
'poe-priority': KeyInfo(can_disable=True),
'poe-voltage': KeyInfo(can_disable=True),
'power-cycle-interval': KeyInfo(),
'power-cycle-ping-address': KeyInfo(can_disable=True),
'power-cycle-ping-enabled': KeyInfo(),
'power-cycle-ping-timeout': KeyInfo(can_disable=True),
'rx-flow-control': KeyInfo(default='off'),
'sfp-rate-select': KeyInfo(default='high'),
'sfp-shutdown-temperature': KeyInfo(default='95C'),
'speed': KeyInfo(),
'tx-flow-control': KeyInfo(default='off'),
},
),
('interface', 'ethernet', 'poe'): APIData(
fixed_entries=True,
fully_understood=True,
primary_keys=('name', ),
fields={
'name': KeyInfo(),
'poe-out': KeyInfo(default='auto-on'),
'poe-priority': KeyInfo(default=10),
'poe-voltage': KeyInfo(default='auto'),
'power-cycle-interval': KeyInfo(default='none'),
'power-cycle-ping-address': KeyInfo(can_disable=True),
'power-cycle-ping-enabled': KeyInfo(default=False),
'power-cycle-ping-timeout': KeyInfo(can_disable=True),
}
),
('interface', 'gre'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'allow-fast-path': KeyInfo(default=True),
'clamp-tcp-mss': KeyInfo(default=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'dont-fragment': KeyInfo(default=False),
'dscp': KeyInfo(default='inherit'),
'ipsec-secret': KeyInfo(can_disable=True),
'keepalive': KeyInfo(default='10s,10', can_disable=True),
'local-address': KeyInfo(default='0.0.0.0'),
'mtu': KeyInfo(default='auto'),
'name': KeyInfo(),
'remote-address': KeyInfo(required=True),
},
),
('interface', 'gre6'): APIData(
fully_understood=True,
primary_keys=('name',),
fields={
'clamp-tcp-mss': KeyInfo(default=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'dscp': KeyInfo(default='inherit'),
'ipsec-secret': KeyInfo(can_disable=True),
'keepalive': KeyInfo(default='10s,10', can_disable=True),
'local-address': KeyInfo(default='::'),
'mtu': KeyInfo(default='auto'),
'name': KeyInfo(),
'remote-address': KeyInfo(required=True),
},
),
('interface', 'list'): APIData(
primary_keys=('name', ),
fully_understood=True,
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'exclude': KeyInfo(),
'include': KeyInfo(),
'name': KeyInfo(),
},
),
('interface', 'list', 'member'): APIData(
primary_keys=('list', 'interface', ),
fully_understood=True,
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'interface': KeyInfo(),
'list': KeyInfo(),
'disabled': KeyInfo(default=False),
},
),
('interface', 'lte', 'apn'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'add-default-route': KeyInfo(),
'apn': KeyInfo(),
'default-route-distance': KeyInfo(),
'name': KeyInfo(),
'use-peer-dns': KeyInfo(),
},
),
('interface', 'pppoe-client'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'ac-name': KeyInfo(default=''),
'add-default-route': KeyInfo(default=False),
'allow': KeyInfo(default='pap,chap,mschap1,mschap2'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'default-route-distance': KeyInfo(default=1),
'dial-on-demand': KeyInfo(default=False),
'disabled': KeyInfo(default=True),
'host-uniq': KeyInfo(can_disable=True),
'interface': KeyInfo(required=True),
'keepalive-timeout': KeyInfo(default=10),
'max-mru': KeyInfo(default='auto'),
'max-mtu': KeyInfo(default='auto'),
'mrru': KeyInfo(default='disabled'),
'name': KeyInfo(),
'password': KeyInfo(default=''),
'profile': KeyInfo(default='default'),
'service-name': KeyInfo(default=''),
'use-peer-dns': KeyInfo(default=False),
'user': KeyInfo(default=''),
},
),
('interface', 'vlan'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'arp': KeyInfo(default='enabled'),
'arp-timeout': KeyInfo(default='auto'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'interface': KeyInfo(required=True),
'loop-protect': KeyInfo(default='default'),
'loop-protect-disable-time': KeyInfo(default='5m'),
'loop-protect-send-interval': KeyInfo(default='5s'),
'mtu': KeyInfo(default=1500),
'name': KeyInfo(),
'use-service-tag': KeyInfo(default=False),
'vlan-id': KeyInfo(required=True),
},
),
('interface', 'vrrp'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'arp': KeyInfo(default='enabled'),
'arp-timeout': KeyInfo(default='auto'),
'authentication': KeyInfo(default='none'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'group-master': KeyInfo(default=''),
'interface': KeyInfo(required=True),
'interval': KeyInfo(default='1s'),
'mtu': KeyInfo(default=1500),
'name': KeyInfo(),
'on-backup': KeyInfo(default=''),
'on-fail': KeyInfo(default=''),
'on-master': KeyInfo(default=''),
'password': KeyInfo(default=''),
'preemption-mode': KeyInfo(default=True),
'priority': KeyInfo(default=100),
'remote-address': KeyInfo(),
'sync-connection-tracking': KeyInfo(default=False),
'v3-protocol': KeyInfo(default='ipv4'),
'version': KeyInfo(default=3),
'vrid': KeyInfo(default=1),
},
),
('interface', 'wireless', 'security-profiles'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'authentication-types': KeyInfo(),
'disable-pmkid': KeyInfo(),
'eap-methods': KeyInfo(),
'group-ciphers': KeyInfo(),
'group-key-update': KeyInfo(),
'interim-update': KeyInfo(),
'management-protection': KeyInfo(),
'management-protection-key': KeyInfo(),
'mode': KeyInfo(),
'mschapv2-password': KeyInfo(),
'mschapv2-username': KeyInfo(),
'name': KeyInfo(),
'radius-called-format': KeyInfo(),
'radius-eap-accounting': KeyInfo(),
'radius-mac-accounting': KeyInfo(),
'radius-mac-authentication': KeyInfo(),
'radius-mac-caching': KeyInfo(),
'radius-mac-format': KeyInfo(),
'radius-mac-mode': KeyInfo(),
'static-algo-0': KeyInfo(),
'static-algo-1': KeyInfo(),
'static-algo-2': KeyInfo(),
'static-algo-3': KeyInfo(),
'static-key-0': KeyInfo(),
'static-key-1': KeyInfo(),
'static-key-2': KeyInfo(),
'static-key-3': KeyInfo(),
'static-sta-private-algo': KeyInfo(),
'static-sta-private-key': KeyInfo(),
'static-transmit-key': KeyInfo(),
'supplicant-identity': KeyInfo(),
'tls-certificate': KeyInfo(),
'tls-mode': KeyInfo(),
'unicast-ciphers': KeyInfo(),
'wpa-pre-shared-key': KeyInfo(),
'wpa2-pre-shared-key': KeyInfo(),
},
),
('ip', 'hotspot', 'profile'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'dns-name': KeyInfo(),
'hotspot-address': KeyInfo(),
'html-directory': KeyInfo(),
'html-directory-override': KeyInfo(),
'http-cookie-lifetime': KeyInfo(),
'http-proxy': KeyInfo(),
'login-by': KeyInfo(),
'name': KeyInfo(),
'rate-limit': KeyInfo(),
'smtp-server': KeyInfo(),
'split-user-domain': KeyInfo(),
'use-radius': KeyInfo(),
},
),
('ip', 'hotspot', 'user', 'profile'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'add-mac-cookie': KeyInfo(),
'address-list': KeyInfo(),
'idle-timeout': KeyInfo(),
'insert-queue-before': KeyInfo(can_disable=True),
'keepalive-timeout': KeyInfo(),
'mac-cookie-timeout': KeyInfo(),
'name': KeyInfo(),
'parent-queue': KeyInfo(can_disable=True),
'queue-type': KeyInfo(can_disable=True),
'shared-users': KeyInfo(),
'status-autorefresh': KeyInfo(),
'transparent-proxy': KeyInfo(),
},
),
('ip', 'ipsec', 'identity'): APIData(
fully_understood=True,
primary_keys=('peer', ),
fields={
'auth-method': KeyInfo(default='pre-shared-key'),
'certificate': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'eap-methods': KeyInfo(default='eap-tls'),
'generate-policy': KeyInfo(default=False),
'key': KeyInfo(),
'match-by': KeyInfo(can_disable=True, remove_value='remote-id'),
'mode-config': KeyInfo(can_disable=True, remove_value='none'),
'my-id': KeyInfo(can_disable=True, remove_value='auto'),
'notrack-chain': KeyInfo(can_disable=True, remove_value=''),
'password': KeyInfo(),
'peer': KeyInfo(),
'policy-template-group': KeyInfo(can_disable=True, remove_value='default'),
'remote-certificate': KeyInfo(),
'remote-id': KeyInfo(can_disable=True, remove_value='auto'),
'remote-key': KeyInfo(),
'secret': KeyInfo(default=''),
'username': KeyInfo(),
},
),
('ip', 'ipsec', 'mode-config'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'name': KeyInfo(),
'responder': KeyInfo(),
'use-responder-dns': KeyInfo(),
},
),
('ip', 'ipsec', 'peer'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'address': KeyInfo(can_disable=True, remove_value=''),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'exchange-mode': KeyInfo(default='main'),
'local-address': KeyInfo(can_disable=True, remove_value='0.0.0.0'),
'name': KeyInfo(),
'passive': KeyInfo(can_disable=True, remove_value=False),
'port': KeyInfo(can_disable=True, remove_value=500),
'profile': KeyInfo(default='default'),
'send-initial-contact': KeyInfo(default=True),
},
),
('ip', 'ipsec', 'policy', 'group'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'name': KeyInfo(),
},
),
('ip', 'ipsec', 'profile'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'dh-group': KeyInfo(default='modp2048,modp1024'),
'dpd-interval': KeyInfo(default='2m'),
'dpd-maximum-failures': KeyInfo(default=5),
'enc-algorithm': KeyInfo(default='aes-128,3des'),
'hash-algorithm': KeyInfo(default='sha1'),
'lifebytes': KeyInfo(can_disable=True, remove_value=0),
'lifetime': KeyInfo(default='1d'),
'name': KeyInfo(),
'nat-traversal': KeyInfo(default=True),
'prf-algorithm': KeyInfo(can_disable=True, remove_value='auto'),
'proposal-check': KeyInfo(default='obey'),
},
),
('ip', 'ipsec', 'proposal'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'auth-algorithms': KeyInfo(default='sha1'),
'disabled': KeyInfo(default=False),
'enc-algorithms': KeyInfo(default='aes-256-cbc,aes-192-cbc,aes-128-cbc'),
'lifetime': KeyInfo(default='30m'),
'name': KeyInfo(),
'pfs-group': KeyInfo(default='modp1024'),
},
),
('ip', 'pool'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'name': KeyInfo(),
'ranges': KeyInfo(),
},
),
('ip', 'route'): APIData(
fully_understood=True,
fields={
'blackhole': KeyInfo(can_disable=True),
'check-gateway': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'distance': KeyInfo(),
'dst-address': KeyInfo(),
'gateway': KeyInfo(),
'pref-src': KeyInfo(),
'routing-table': KeyInfo(default='main'),
'route-tag': KeyInfo(can_disable=True),
'routing-mark': KeyInfo(can_disable=True),
'scope': KeyInfo(),
'suppress-hw-offload': KeyInfo(default=False),
'target-scope': KeyInfo(),
'type': KeyInfo(can_disable=True, remove_value='unicast'),
'vrf-interface': KeyInfo(can_disable=True),
},
),
('ip', 'route', 'vrf'): APIData(
fully_understood=True,
primary_keys=('routing-mark', ),
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'interfaces': KeyInfo(),
'routing-mark': KeyInfo(),
},
),
('ip', 'dhcp-server'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'address-pool': KeyInfo(default='static-only'),
'allow-dual-stack-queue': KeyInfo(can_disable=True, remove_value=True),
'always-broadcast': KeyInfo(can_disable=True, remove_value=False),
'authoritative': KeyInfo(default=True),
'bootp-lease-time': KeyInfo(default='forever'),
'bootp-support': KeyInfo(can_disable=True, remove_value='static'),
'client-mac-limit': KeyInfo(can_disable=True, remove_value='unlimited'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'conflict-detection': KeyInfo(can_disable=True, remove_value=True),
'delay-threshold': KeyInfo(can_disable=True, remove_value='none'),
'dhcp-option-set': KeyInfo(can_disable=True, remove_value='none'),
'disabled': KeyInfo(default=False),
'insert-queue-before': KeyInfo(can_disable=True, remove_value='first'),
'interface': KeyInfo(required=True),
'lease-script': KeyInfo(default=''),
'lease-time': KeyInfo(default='10m'),
'name': KeyInfo(),
'parent-queue': KeyInfo(can_disable=True, remove_value='none'),
'relay': KeyInfo(can_disable=True, remove_value='0.0.0.0'),
'server-address': KeyInfo(can_disable=True, remove_value='0.0.0.0'),
'use-framed-as-classless': KeyInfo(can_disable=True, remove_value=True),
'use-radius': KeyInfo(default=False),
},
),
('routing', 'ospf', 'instance'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'domain-id': KeyInfo(can_disable=True),
'domain-tag': KeyInfo(can_disable=True),
'in-filter-chain': KeyInfo(can_disable=True),
'mpls-te-address': KeyInfo(can_disable=True),
'mpls-te-area': KeyInfo(can_disable=True),
'name': KeyInfo(),
'originate-default': KeyInfo(can_disable=True),
'out-filter-chain': KeyInfo(can_disable=True),
'out-filter-select': KeyInfo(can_disable=True),
'redistribute': KeyInfo(can_disable=True),
'router-id': KeyInfo(default='main'),
'routing-table': KeyInfo(can_disable=True),
'use-dn': KeyInfo(can_disable=True),
'version': KeyInfo(default=2),
'vrf': KeyInfo(default='main'),
},
),
('routing', 'ospf', 'area'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'area-id': KeyInfo(default='0.0.0.0'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'default-cost': KeyInfo(can_disable=True),
'disabled': KeyInfo(default=False),
'instance': KeyInfo(required=True),
'name': KeyInfo(),
'no-summaries': KeyInfo(can_disable=True),
'nssa-translator': KeyInfo(can_disable=True),
'type': KeyInfo(default='default'),
},
),
('routing', 'ospf', 'area', 'range'): APIData(
fully_understood=True,
primary_keys=('area', 'prefix', ),
fields={
'advertise': KeyInfo(default=True),
'area': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'cost': KeyInfo(can_disable=True),
'disabled': KeyInfo(default=False),
'prefix': KeyInfo(),
},
),
('routing', 'ospf', 'interface-template'): APIData(
fully_understood=True,
fields={
'area': KeyInfo(required=True),
'auth': KeyInfo(can_disable=True),
'auth-id': KeyInfo(can_disable=True),
'auth-key': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'cost': KeyInfo(default=1),
'dead-interval': KeyInfo(default='40s'),
'disabled': KeyInfo(default=False),
'hello-interval': KeyInfo(default='10s'),
'instance-id': KeyInfo(default=0),
'interfaces': KeyInfo(can_disable=True),
'networks': KeyInfo(can_disable=True),
'passive': KeyInfo(can_disable=True),
'prefix-list': KeyInfo(can_disable=True),
'priority': KeyInfo(default=128),
'retransmit-interval': KeyInfo(default='5s'),
'transmit-delay': KeyInfo(default='1s'),
'type': KeyInfo(default='broadcast'),
'vlink-neighbor-id': KeyInfo(can_disable=True),
'vlink-transit-area': KeyInfo(can_disable=True),
},
),
('routing', 'ospf-v3', 'instance'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'disabled': KeyInfo(),
'distribute-default': KeyInfo(),
'metric-bgp': KeyInfo(),
'metric-connected': KeyInfo(),
'metric-default': KeyInfo(),
'metric-other-ospf': KeyInfo(),
'metric-rip': KeyInfo(),
'metric-static': KeyInfo(),
'name': KeyInfo(),
'redistribute-bgp': KeyInfo(),
'redistribute-connected': KeyInfo(),
'redistribute-other-ospf': KeyInfo(),
'redistribute-rip': KeyInfo(),
'redistribute-static': KeyInfo(),
'router-id': KeyInfo(),
},
),
('routing', 'ospf-v3', 'area'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'area-id': KeyInfo(),
'disabled': KeyInfo(),
'instance': KeyInfo(),
'name': KeyInfo(),
'type': KeyInfo(),
},
),
('routing', 'pimsm', 'instance'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'afi': KeyInfo(default='ipv4'),
'bsm-forward-back': KeyInfo(),
'crp-advertise-contained': KeyInfo(),
'disabled': KeyInfo(default=False),
'name': KeyInfo(),
'rp-hash-mask-length': KeyInfo(),
'rp-static-override': KeyInfo(default=False),
'ssm-range': KeyInfo(),
'switch-to-spt': KeyInfo(default=True),
'switch-to-spt-bytes': KeyInfo(default=0),
'switch-to-spt-interval': KeyInfo(),
'vrf': KeyInfo(default="main"),
},
),
('routing', 'pimsm', 'interface-template'): APIData(
fully_understood=True,
fields={
'disabled': KeyInfo(default=False),
'hello-delay': KeyInfo(default='5s'),
'hello-period': KeyInfo(default='30s'),
'instance': KeyInfo(required=True),
'interfaces': KeyInfo(can_disable=True),
'join-prune-period': KeyInfo(default='1m'),
'join-tracking-support': KeyInfo(default=True),
'override-interval': KeyInfo(default='2s500ms'),
'priority': KeyInfo(default=1),
'propagation-delay': KeyInfo(default='500ms'),
'source-addresses': KeyInfo(can_disable=True),
},
),
('snmp', 'community'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'addresses': KeyInfo(default='::/0'),
'authentication-password': KeyInfo(default=''),
'authentication-protocol': KeyInfo(default='MD5'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'encryption-password': KeyInfo(default=''),
'encryption-protocol': KeyInfo(default='DES'),
'name': KeyInfo(required=True),
'read-access': KeyInfo(default=True),
'security': KeyInfo(default='none'),
'write-access': KeyInfo(default=False),
},
),
('caps-man', 'aaa'): APIData(
single_value=True,
fully_understood=True,
fields={
'called-format': KeyInfo(default='mac:ssid'),
'interim-update': KeyInfo(default='disabled'),
'mac-caching': KeyInfo(default='disabled'),
'mac-format': KeyInfo(default='XX:XX:XX:XX:XX:XX'),
'mac-mode': KeyInfo(default='as-username'),
},
),
('caps-man', 'access-list'): APIData(
fully_understood=True,
fields={
'action': KeyInfo(can_disable=True),
'allow-signal-out-of-range': KeyInfo(can_disable=True),
'ap-tx-limit': KeyInfo(can_disable=True),
'client-to-client-forwarding': KeyInfo(can_disable=True),
'client-tx-limit': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(),
'interface': KeyInfo(can_disable=True),
'mac-address': KeyInfo(can_disable=True),
'mac-address-mask': KeyInfo(can_disable=True),
'private-passphrase': KeyInfo(can_disable=True),
'radius-accounting': KeyInfo(can_disable=True),
'signal-range': KeyInfo(can_disable=True),
'ssid-regexp': KeyInfo(),
'time': KeyInfo(can_disable=True),
'vlan-id': KeyInfo(can_disable=True),
'vlan-mode': KeyInfo(can_disable=True),
},
),
('caps-man', 'channel'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'band': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'control-channel-width': KeyInfo(can_disable=True),
'extension-channel': KeyInfo(can_disable=True),
'frequency': KeyInfo(can_disable=True),
'name': KeyInfo(),
'reselect-interval': KeyInfo(can_disable=True),
'save-selected': KeyInfo(can_disable=True),
'secondary-frequency': KeyInfo(can_disable=True),
'skip-dfs-channels': KeyInfo(can_disable=True),
'tx-power': KeyInfo(can_disable=True),
},
),
('caps-man', 'configuration'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'channel': KeyInfo(can_disable=True),
'channel.band': KeyInfo(can_disable=True),
'channel.control-channel-width': KeyInfo(can_disable=True),
'channel.extension-channel': KeyInfo(can_disable=True),
'channel.frequency': KeyInfo(can_disable=True),
'channel.reselect-interval': KeyInfo(can_disable=True),
'channel.save-selected': KeyInfo(can_disable=True),
'channel.secondary-frequency': KeyInfo(can_disable=True),
'channel.skip-dfs-channels': KeyInfo(can_disable=True),
'channel.tx-power': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'country': KeyInfo(can_disable=True),
'datapath': KeyInfo(can_disable=True),
'datapath.arp': KeyInfo(),
'datapath.bridge': KeyInfo(can_disable=True),
'datapath.bridge-cost': KeyInfo(can_disable=True),
'datapath.bridge-horizon': KeyInfo(can_disable=True),
'datapath.client-to-client-forwarding': KeyInfo(can_disable=True),
'datapath.interface-list': KeyInfo(can_disable=True),
'datapath.l2mtu': KeyInfo(),
'datapath.local-forwarding': KeyInfo(can_disable=True),
'datapath.mtu': KeyInfo(),
'datapath.openflow-switch': KeyInfo(can_disable=True),
'datapath.vlan-id': KeyInfo(can_disable=True),
'datapath.vlan-mode': KeyInfo(can_disable=True),
'disconnect-timeout': KeyInfo(can_disable=True),
'distance': KeyInfo(can_disable=True),
'frame-lifetime': KeyInfo(can_disable=True),
'guard-interval': KeyInfo(can_disable=True),
'hide-ssid': KeyInfo(can_disable=True),
'hw-protection-mode': KeyInfo(can_disable=True),
'hw-retries': KeyInfo(can_disable=True),
'installation': KeyInfo(can_disable=True),
'keepalive-frames': KeyInfo(can_disable=True),
'load-balancing-group': KeyInfo(can_disable=True),
'max-sta-count': KeyInfo(can_disable=True),
'mode': KeyInfo(can_disable=True),
'multicast-helper': KeyInfo(can_disable=True),
'name': KeyInfo(),
'rates': KeyInfo(can_disable=True),
'rates.basic': KeyInfo(can_disable=True),
'rates.ht-basic-mcs': KeyInfo(can_disable=True),
'rates.ht-supported-mcs': KeyInfo(can_disable=True),
'rates.supported': KeyInfo(can_disable=True),
'rates.vht-basic-mcs': KeyInfo(can_disable=True),
'rates.vht-supported-mcs': KeyInfo(can_disable=True),
'rx-chains': KeyInfo(can_disable=True),
'security': KeyInfo(can_disable=True),
'security.authentication-types': KeyInfo(can_disable=True),
'security.disable-pmkid': KeyInfo(can_disable=True),
'security.eap-methods': KeyInfo(can_disable=True),
'security.eap-radius-accounting': KeyInfo(can_disable=True),
'security.encryption': KeyInfo(can_disable=True),
'security.group-encryption': KeyInfo(can_disable=True),
'security.group-key-update': KeyInfo(),
'security.passphrase': KeyInfo(can_disable=True),
'security.tls-certificate': KeyInfo(),
'security.tls-mode': KeyInfo(),
'ssid': KeyInfo(can_disable=True),
'tx-chains': KeyInfo(can_disable=True),
},
),
('caps-man', 'datapath'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'arp': KeyInfo(),
'bridge': KeyInfo(can_disable=True),
'bridge-cost': KeyInfo(can_disable=True),
'bridge-horizon': KeyInfo(can_disable=True),
'client-to-client-forwarding': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'interface-list': KeyInfo(can_disable=True),
'l2mtu': KeyInfo(),
'local-forwarding': KeyInfo(can_disable=True),
'mtu': KeyInfo(),
'name': KeyInfo(),
'openflow-switch': KeyInfo(can_disable=True),
'vlan-id': KeyInfo(can_disable=True),
'vlan-mode': KeyInfo(can_disable=True),
},
),
('caps-man', 'manager', 'interface'): APIData(
fully_understood=True,
primary_keys=('interface', ),
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'default': KeyInfo(),
'disabled': KeyInfo(default=False),
'forbid': KeyInfo(default=False),
'interface': KeyInfo(),
},
),
('caps-man', 'provisioning'): APIData(
fully_understood=True,
fields={
'action': KeyInfo(default='none'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'common-name-regexp': KeyInfo(default=''),
'disabled': KeyInfo(default=False),
'hw-supported-modes': KeyInfo(default=''),
'identity-regexp': KeyInfo(default=''),
'ip-address-ranges': KeyInfo(default=''),
'master-configuration': KeyInfo(default='*FFFFFFFF'),
'name-format': KeyInfo(default='cap'),
'name-prefix': KeyInfo(default=''),
'radio-mac': KeyInfo(default='00:00:00:00:00:00'),
'slave-configurations': KeyInfo(default=''),
},
),
('caps-man', 'security'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'authentication-types': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disable-pmkid': KeyInfo(can_disable=True),
'eap-methods': KeyInfo(can_disable=True),
'eap-radius-accounting': KeyInfo(can_disable=True),
'encryption': KeyInfo(can_disable=True),
'group-encryption': KeyInfo(can_disable=True),
'group-key-update': KeyInfo(),
'name': KeyInfo(),
'passphrase': KeyInfo(can_disable=True),
'tls-certificate': KeyInfo(),
'tls-mode': KeyInfo(),
}
),
('certificate', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'crl-download': KeyInfo(default=False),
'crl-store': KeyInfo(default='ram'),
'crl-use': KeyInfo(default=False),
},
),
('interface', 'bridge', 'port'): APIData(
fully_understood=True,
primary_keys=('interface', ),
fields={
'auto-isolate': KeyInfo(default=False),
'bpdu-guard': KeyInfo(default=False),
'bridge': KeyInfo(required=True),
'broadcast-flood': KeyInfo(default=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'edge': KeyInfo(default='auto'),
'fast-leave': KeyInfo(default=False),
'frame-types': KeyInfo(default='admit-all'),
'horizon': KeyInfo(default='none'),
'hw': KeyInfo(default=True),
'ingress-filtering': KeyInfo(default=True),
'interface': KeyInfo(),
'internal-path-cost': KeyInfo(default=10),
'learn': KeyInfo(default='auto'),
'multicast-router': KeyInfo(default='temporary-query'),
'path-cost': KeyInfo(default=10),
'point-to-point': KeyInfo(default='auto'),
'priority': KeyInfo(default='0x80'),
'pvid': KeyInfo(default=1),
'restricted-role': KeyInfo(default=False),
'restricted-tcn': KeyInfo(default=False),
'tag-stacking': KeyInfo(default=False),
'trusted': KeyInfo(default=False),
'unknown-multicast-flood': KeyInfo(default=True),
'unknown-unicast-flood': KeyInfo(default=True),
},
),
('interface', 'bridge', 'mlag'): APIData(
single_value=True,
fully_understood=True,
fields={
'bridge': KeyInfo(default='none'),
'peer-port': KeyInfo(default='none'),
}
),
('interface', 'bridge', 'port-controller'): APIData(
single_value=True,
fully_understood=True,
fields={
'bridge': KeyInfo(default='none'),
'cascade-ports': KeyInfo(default=''),
'switch': KeyInfo(default='none'),
},
),
('interface', 'bridge', 'port-extender'): APIData(
single_value=True,
fully_understood=True,
fields={
'control-ports': KeyInfo(default=''),
'excluded-ports': KeyInfo(default=''),
'switch': KeyInfo(default='none'),
},
),
('interface', 'bridge', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'allow-fast-path': KeyInfo(default=True),
'use-ip-firewall': KeyInfo(default=False),
'use-ip-firewall-for-pppoe': KeyInfo(default=False),
'use-ip-firewall-for-vlan': KeyInfo(default=False),
},
),
('interface', 'bridge', 'vlan'): APIData(
fully_understood=True,
primary_keys=('bridge', 'vlan-ids', ),
fields={
'bridge': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'tagged': KeyInfo(default=''),
'untagged': KeyInfo(default=''),
'vlan-ids': KeyInfo(),
},
),
('ip', 'firewall', 'connection', 'tracking'): APIData(
single_value=True,
fully_understood=True,
fields={
'enabled': KeyInfo(default='auto'),
'generic-timeout': KeyInfo(default='10m'),
'icmp-timeout': KeyInfo(default='10s'),
'loose-tcp-tracking': KeyInfo(default=True),
'tcp-close-timeout': KeyInfo(default='10s'),
'tcp-close-wait-timeout': KeyInfo(default='10s'),
'tcp-established-timeout': KeyInfo(default='1d'),
'tcp-fin-wait-timeout': KeyInfo(default='10s'),
'tcp-last-ack-timeout': KeyInfo(default='10s'),
'tcp-max-retrans-timeout': KeyInfo(default='5m'),
'tcp-syn-received-timeout': KeyInfo(default='5s'),
'tcp-syn-sent-timeout': KeyInfo(default='5s'),
'tcp-time-wait-timeout': KeyInfo(default='10s'),
'tcp-unacked-timeout': KeyInfo(default='5m'),
'udp-stream-timeout': KeyInfo(default='3m'),
'udp-timeout': KeyInfo(default='10s'),
},
),
('ip', 'neighbor', 'discovery-settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'discover-interface-list': KeyInfo(),
'lldp-med-net-policy-vlan': KeyInfo(default='disabled'),
'protocol': KeyInfo(default='cdp,lldp,mndp'),
},
),
('ip', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'accept-redirects': KeyInfo(default=False),
'accept-source-route': KeyInfo(default=False),
'allow-fast-path': KeyInfo(default=True),
'arp-timeout': KeyInfo(default='30s'),
'icmp-rate-limit': KeyInfo(default=10),
'icmp-rate-mask': KeyInfo(default='0x1818'),
'ip-forward': KeyInfo(default=True),
'max-neighbor-entries': KeyInfo(default=8192),
'route-cache': KeyInfo(default=True),
'rp-filter': KeyInfo(default=False),
'secure-redirects': KeyInfo(default=True),
'send-redirects': KeyInfo(default=True),
'tcp-syncookies': KeyInfo(default=False),
},
),
('ipv6', 'address'): APIData(
fully_understood=True,
fields={
'address': KeyInfo(),
'advertise': KeyInfo(default=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'eui-64': KeyInfo(default=False),
'from-pool': KeyInfo(),
'interface': KeyInfo(required=True),
'no-dad': KeyInfo(default=False),
},
),
('ipv6', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'accept-redirects': KeyInfo(default='yes-if-forwarding-disabled'),
'accept-router-advertisements': KeyInfo(default='yes-if-forwarding-disabled'),
'disable-ipv6': KeyInfo(default=False),
'forward': KeyInfo(default=True),
'max-neighbor-entries': KeyInfo(default=8192),
},
),
('interface', 'detect-internet'): APIData(
single_value=True,
fully_understood=True,
fields={
'detect-interface-list': KeyInfo(default='none'),
'internet-interface-list': KeyInfo(default='none'),
'lan-interface-list': KeyInfo(default='none'),
'wan-interface-list': KeyInfo(default='none'),
},
),
('interface', 'l2tp-server', 'server'): APIData(
single_value=True,
fully_understood=True,
fields={
'allow-fast-path': KeyInfo(default=False),
'authentication': KeyInfo(default='pap,chap,mschap1,mschap2'),
'caller-id-type': KeyInfo(default='ip-address'),
'default-profile': KeyInfo(default='default-encryption'),
'enabled': KeyInfo(default=False),
'ipsec-secret': KeyInfo(default=''),
'keepalive-timeout': KeyInfo(default=30),
'max-mru': KeyInfo(default=1450),
'max-mtu': KeyInfo(default=1450),
'max-sessions': KeyInfo(default='unlimited'),
'mrru': KeyInfo(default='disabled'),
'one-session-per-host': KeyInfo(default=False),
'use-ipsec': KeyInfo(default=False),
},
),
('interface', 'ovpn-server', 'server'): APIData(
single_value=True,
fully_understood=True,
fields={
'auth': KeyInfo(),
'cipher': KeyInfo(),
'default-profile': KeyInfo(default='default'),
'enabled': KeyInfo(default=False),
'keepalive-timeout': KeyInfo(default=60),
'mac-address': KeyInfo(),
'max-mtu': KeyInfo(default=1500),
'mode': KeyInfo(default='ip'),
'netmask': KeyInfo(default=24),
'port': KeyInfo(default=1194),
'require-client-certificate': KeyInfo(default=False),
},
),
('interface', 'pptp-server', 'server'): APIData(
single_value=True,
fully_understood=True,
fields={
'authentication': KeyInfo(default='mschap1,mschap2'),
'default-profile': KeyInfo(default='default-encryption'),
'enabled': KeyInfo(default=False),
'keepalive-timeout': KeyInfo(default=30),
'max-mru': KeyInfo(default=1450),
'max-mtu': KeyInfo(default=1450),
'mrru': KeyInfo(default='disabled'),
},
),
('interface', 'sstp-server', 'server'): APIData(
single_value=True,
fully_understood=True,
fields={
'authentication': KeyInfo(default='pap,chap,mschap1,mschap2'),
'certificate': KeyInfo(default='none'),
'default-profile': KeyInfo(default='default'),
'enabled': KeyInfo(default=False),
'force-aes': KeyInfo(default=False),
'keepalive-timeout': KeyInfo(default=60),
'max-mru': KeyInfo(default=1500),
'max-mtu': KeyInfo(default=1500),
'mrru': KeyInfo(default='disabled'),
'pfs': KeyInfo(default=False),
'port': KeyInfo(default=443),
'tls-version': KeyInfo(default='any'),
'verify-client-certificate': KeyInfo(default='no'),
},
),
('interface', 'wireguard'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'listen-port': KeyInfo(),
'mtu': KeyInfo(default=1420),
'name': KeyInfo(),
'private-key': KeyInfo(),
},
),
('interface', 'wireguard', 'peers'): APIData(
fully_understood=True,
primary_keys=('public-key', 'interface'),
fields={
'allowed-address': KeyInfo(required=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'endpoint-address': KeyInfo(default=''),
'endpoint-port': KeyInfo(default=0),
'interface': KeyInfo(),
'persistent-keepalive': KeyInfo(can_disable=True, remove_value=0),
'preshared-key': KeyInfo(can_disable=True, remove_value=''),
'public-key': KeyInfo(),
},
),
('interface', 'wireless', 'align'): APIData(
single_value=True,
fully_understood=True,
fields={
'active-mode': KeyInfo(default=True),
'audio-max': KeyInfo(default=-20),
'audio-min': KeyInfo(default=-100),
'audio-monitor': KeyInfo(default='00:00:00:00:00:00'),
'filter-mac': KeyInfo(default='00:00:00:00:00:00'),
'frame-size': KeyInfo(default=300),
'frames-per-second': KeyInfo(default=25),
'receive-all': KeyInfo(default=False),
'ssid-all': KeyInfo(default=False),
},
),
('interface', 'wireless', 'cap'): APIData(
single_value=True,
fully_understood=True,
fields={
'bridge': KeyInfo(default='none'),
'caps-man-addresses': KeyInfo(default=''),
'caps-man-certificate-common-names': KeyInfo(default=''),
'caps-man-names': KeyInfo(default=''),
'certificate': KeyInfo(default='none'),
'discovery-interfaces': KeyInfo(default=''),
'enabled': KeyInfo(default=False),
'interfaces': KeyInfo(default=''),
'lock-to-caps-man': KeyInfo(default=False),
'static-virtual': KeyInfo(default=False),
},
),
('interface', 'wireless', 'sniffer'): APIData(
single_value=True,
fully_understood=True,
fields={
'channel-time': KeyInfo(default='200ms'),
'file-limit': KeyInfo(default=10),
'file-name': KeyInfo(default=''),
'memory-limit': KeyInfo(default=10),
'multiple-channels': KeyInfo(default=False),
'only-headers': KeyInfo(default=False),
'receive-errors': KeyInfo(default=False),
'streaming-enabled': KeyInfo(default=False),
'streaming-max-rate': KeyInfo(default=0),
'streaming-server': KeyInfo(default='0.0.0.0'),
},
),
('interface', 'wireless', 'snooper'): APIData(
single_value=True,
fully_understood=True,
fields={
'channel-time': KeyInfo(default='200ms'),
'multiple-channels': KeyInfo(default=True),
'receive-errors': KeyInfo(default=False),
},
),
('ip', 'accounting'): APIData(
single_value=True,
fully_understood=True,
fields={
'account-local-traffic': KeyInfo(default=False),
'enabled': KeyInfo(default=False),
'threshold': KeyInfo(default=256),
},
),
('ip', 'accounting', 'web-access'): APIData(
single_value=True,
fully_understood=True,
fields={
'accessible-via-web': KeyInfo(default=False),
'address': KeyInfo(default='0.0.0.0/0'),
},
),
('ip', 'address'): APIData(
fully_understood=True,
primary_keys=('address', 'interface', ),
fields={
'address': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'interface': KeyInfo(),
'network': KeyInfo(automatically_computed_from=('address', )),
},
),
('ip', 'arp'): APIData(
fully_understood=True,
fields={
'address': KeyInfo(default='0.0.0.0'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'interface': KeyInfo(required=True),
'mac-address': KeyInfo(default='00:00:00:00:00:00'),
'published': KeyInfo(default=False),
},
),
('ip', 'cloud'): APIData(
single_value=True,
fully_understood=True,
fields={
'ddns-enabled': KeyInfo(default=False),
'ddns-update-interval': KeyInfo(default='none'),
'update-time': KeyInfo(default=True),
},
),
('ip', 'cloud', 'advanced'): APIData(
single_value=True,
fully_understood=True,
fields={
'use-local-address': KeyInfo(default=False),
},
),
('ip', 'dhcp-client'): APIData(
fully_understood=True,
primary_keys=('interface', ),
fields={
'add-default-route': KeyInfo(default=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'default-route-distance': KeyInfo(default=1),
'dhcp-options': KeyInfo(default='hostname,clientid', can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'interface': KeyInfo(),
'script': KeyInfo(can_disable=True),
'use-peer-dns': KeyInfo(default=True),
'use-peer-ntp': KeyInfo(default=True),
},
),
('ip', 'dhcp-server', 'config'): APIData(
single_value=True,
fully_understood=True,
fields={
'accounting': KeyInfo(default=True),
'interim-update': KeyInfo(default='0s'),
'store-leases-disk': KeyInfo(default='5m'),
},
),
('ip', 'dhcp-server', 'lease'): APIData(
fully_understood=True,
primary_keys=('server', 'address', ),
fields={
'address': KeyInfo(),
'address-lists': KeyInfo(default=''),
'always-broadcast': KeyInfo(),
'client-id': KeyInfo(can_disable=True, remove_value=''),
'comment': KeyInfo(can_disable=True, remove_value=''),
'dhcp-option': KeyInfo(default=''),
'disabled': KeyInfo(default=False),
'insert-queue-before': KeyInfo(can_disable=True),
'mac-address': KeyInfo(can_disable=True, remove_value=''),
'server': KeyInfo(absent_value='all'),
},
),
('ip', 'dhcp-server', 'network'): APIData(
fully_understood=True,
primary_keys=('address', ),
fields={
'address': KeyInfo(),
'boot-file-name': KeyInfo(default=''),
'caps-manager': KeyInfo(default=''),
'comment': KeyInfo(can_disable=True, remove_value=''),
'dhcp-option': KeyInfo(default=''),
'dhcp-option-set': KeyInfo(default=''),
'dns-none': KeyInfo(default=False),
'dns-server': KeyInfo(default=''),
'domain': KeyInfo(default=''),
'gateway': KeyInfo(default=''),
'netmask': KeyInfo(can_disable=True, remove_value=0),
'next-server': KeyInfo(can_disable=True),
'ntp-server': KeyInfo(default=''),
'wins-server': KeyInfo(default=''),
},
),
('ip', 'dns'): APIData(
single_value=True,
fully_understood=True,
fields={
'allow-remote-requests': KeyInfo(),
'cache-max-ttl': KeyInfo(default='1w'),
'cache-size': KeyInfo(default='2048KiB'),
'max-concurrent-queries': KeyInfo(default=100),
'max-concurrent-tcp-sessions': KeyInfo(default=20),
'max-udp-packet-size': KeyInfo(default=4096),
'query-server-timeout': KeyInfo(default='2s'),
'query-total-timeout': KeyInfo(default='10s'),
'servers': KeyInfo(default=''),
'use-doh-server': KeyInfo(default=''),
'verify-doh-cert': KeyInfo(default=False),
},
),
('ip', 'dns', 'static'): APIData(
fully_understood=True,
required_one_of=[['name', 'regexp']],
mutually_exclusive=[['name', 'regexp']],
fields={
'address': KeyInfo(),
'cname': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'forward-to': KeyInfo(),
'mx-exchange': KeyInfo(),
'mx-preference': KeyInfo(),
'name': KeyInfo(),
'ns': KeyInfo(),
'regexp': KeyInfo(),
'srv-port': KeyInfo(),
'srv-priority': KeyInfo(),
'srv-target': KeyInfo(),
'srv-weight': KeyInfo(),
'text': KeyInfo(),
'ttl': KeyInfo(default='1d'),
'type': KeyInfo(),
},
),
('ip', 'firewall', 'address-list'): APIData(
fully_understood=True,
primary_keys=('address', 'list', ),
fields={
'address': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'list': KeyInfo(),
},
),
('ip', 'firewall', 'filter'): APIData(
fully_understood=True,
stratify_keys=('chain', ),
fields={
'action': KeyInfo(),
'chain': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'connection-bytes': KeyInfo(can_disable=True),
'connection-limit': KeyInfo(can_disable=True),
'connection-mark': KeyInfo(can_disable=True),
'connection-nat-state': KeyInfo(can_disable=True),
'connection-rate': KeyInfo(can_disable=True),
'connection-state': KeyInfo(can_disable=True),
'connection-type': KeyInfo(can_disable=True),
'content': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'dscp': KeyInfo(can_disable=True),
'dst-address': KeyInfo(can_disable=True),
'dst-address-list': KeyInfo(can_disable=True),
'dst-address-type': KeyInfo(can_disable=True),
'dst-limit': KeyInfo(can_disable=True),
'dst-port': KeyInfo(can_disable=True),
'fragment': KeyInfo(can_disable=True),
'hotspot': KeyInfo(can_disable=True),
'hw-offload': KeyInfo(can_disable=True),
'icmp-options': KeyInfo(can_disable=True),
'in-bridge-port': KeyInfo(can_disable=True),
'in-bridge-port-list': KeyInfo(can_disable=True),
'in-interface': KeyInfo(can_disable=True),
'in-interface-list': KeyInfo(can_disable=True),
'ingress-priority': KeyInfo(can_disable=True),
'ipsec-policy': KeyInfo(can_disable=True),
'ipv4-options': KeyInfo(can_disable=True),
'jump-target': KeyInfo(),
'layer7-protocol': KeyInfo(can_disable=True),
'limit': KeyInfo(can_disable=True),
'log': KeyInfo(),
'log-prefix': KeyInfo(),
'nth': KeyInfo(can_disable=True),
'out-bridge-port': KeyInfo(can_disable=True),
'out-bridge-port-list': KeyInfo(can_disable=True),
'out-interface': KeyInfo(can_disable=True),
'out-interface-list': KeyInfo(can_disable=True),
'p2p': KeyInfo(can_disable=True),
'packet-mark': KeyInfo(can_disable=True),
'packet-size': KeyInfo(can_disable=True),
'per-connection-classifier': KeyInfo(can_disable=True),
'port': KeyInfo(can_disable=True),
'priority': KeyInfo(can_disable=True),
'protocol': KeyInfo(can_disable=True),
'psd': KeyInfo(can_disable=True),
'random': KeyInfo(can_disable=True),
'reject-with': KeyInfo(),
'routing-mark': KeyInfo(can_disable=True),
'routing-table': KeyInfo(can_disable=True),
'src-address': KeyInfo(can_disable=True),
'src-address-list': KeyInfo(can_disable=True),
'src-address-type': KeyInfo(can_disable=True),
'src-mac-address': KeyInfo(can_disable=True),
'src-port': KeyInfo(can_disable=True),
'tcp-flags': KeyInfo(can_disable=True),
'tcp-mss': KeyInfo(can_disable=True),
'time': KeyInfo(can_disable=True),
'tls-host': KeyInfo(can_disable=True),
'ttl': KeyInfo(can_disable=True),
},
),
('ip', 'firewall', 'mangle'): APIData(
fully_understood=True,
stratify_keys=('chain', ),
fields={
'action': KeyInfo(),
'chain': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'connection-bytes': KeyInfo(can_disable=True),
'connection-limit': KeyInfo(can_disable=True),
'connection-mark': KeyInfo(can_disable=True),
'connection-nat-state': KeyInfo(can_disable=True),
'connection-rate': KeyInfo(can_disable=True),
'connection-state': KeyInfo(can_disable=True),
'connection-type': KeyInfo(can_disable=True),
'content': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'dscp': KeyInfo(can_disable=True),
'dst-address': KeyInfo(can_disable=True),
'dst-address-list': KeyInfo(can_disable=True),
'dst-address-type': KeyInfo(can_disable=True),
'dst-limit': KeyInfo(can_disable=True),
'dst-port': KeyInfo(can_disable=True),
'fragment': KeyInfo(can_disable=True),
'hotspot': KeyInfo(can_disable=True),
'icmp-options': KeyInfo(can_disable=True),
'in-bridge-port': KeyInfo(can_disable=True),
'in-bridge-port-list': KeyInfo(can_disable=True),
'in-interface': KeyInfo(can_disable=True),
'in-interface-list': KeyInfo(can_disable=True),
'ingress-priority': KeyInfo(can_disable=True),
'ipsec-policy': KeyInfo(can_disable=True),
'ipv4-options': KeyInfo(can_disable=True),
'jump-target': KeyInfo(),
'layer7-protocol': KeyInfo(can_disable=True),
'limit': KeyInfo(can_disable=True),
'log': KeyInfo(),
'log-prefix': KeyInfo(),
'new-connection-mark': KeyInfo(can_disable=True),
'new-dscp': KeyInfo(can_disable=True),
'new-mss': KeyInfo(can_disable=True),
'new-packet-mark': KeyInfo(can_disable=True),
'new-priority': KeyInfo(can_disable=True),
'new-routing-mark': KeyInfo(can_disable=True),
'new-ttl': KeyInfo(can_disable=True),
'nth': KeyInfo(can_disable=True),
'out-bridge-port': KeyInfo(can_disable=True),
'out-bridge-port-list': KeyInfo(can_disable=True),
'out-interface': KeyInfo(can_disable=True),
'out-interface-list': KeyInfo(can_disable=True),
'p2p': KeyInfo(can_disable=True),
'packet-mark': KeyInfo(can_disable=True),
'packet-size': KeyInfo(can_disable=True),
'passthrough': KeyInfo(can_disable=True),
'per-connection-classifier': KeyInfo(can_disable=True),
'port': KeyInfo(can_disable=True),
'priority': KeyInfo(can_disable=True),
'protocol': KeyInfo(can_disable=True),
'psd': KeyInfo(can_disable=True),
'random': KeyInfo(can_disable=True),
'route-dst': KeyInfo(can_disable=True),
'routing-mark': KeyInfo(can_disable=True),
'routing-table': KeyInfo(can_disable=True),
'sniff-id': KeyInfo(can_disable=True),
'sniff-target': KeyInfo(can_disable=True),
'sniff-target-port': KeyInfo(can_disable=True),
'src-address': KeyInfo(can_disable=True),
'src-address-list': KeyInfo(can_disable=True),
'src-address-type': KeyInfo(can_disable=True),
'src-mac-address': KeyInfo(can_disable=True),
'src-port': KeyInfo(can_disable=True),
'tcp-flags': KeyInfo(can_disable=True),
'tcp-mss': KeyInfo(can_disable=True),
'time': KeyInfo(can_disable=True),
'tls-host': KeyInfo(can_disable=True),
'ttl': KeyInfo(can_disable=True),
},
),
('ip', 'firewall', 'nat'): APIData(
fully_understood=True,
stratify_keys=('chain', ),
fields={
'action': KeyInfo(),
'address-list': KeyInfo(),
'address-list-timeout': KeyInfo(),
'chain': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'connection-bytes': KeyInfo(can_disable=True),
'connection-limit': KeyInfo(can_disable=True),
'connection-mark': KeyInfo(can_disable=True),
'connection-rate': KeyInfo(can_disable=True),
'connection-type': KeyInfo(can_disable=True),
'content': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'dscp': KeyInfo(can_disable=True),
'dst-address': KeyInfo(can_disable=True),
'dst-address-list': KeyInfo(can_disable=True),
'dst-address-type': KeyInfo(can_disable=True),
'dst-limit': KeyInfo(can_disable=True),
'dst-port': KeyInfo(can_disable=True),
'fragment': KeyInfo(can_disable=True),
'hotspot': KeyInfo(can_disable=True),
'icmp-options': KeyInfo(can_disable=True),
'in-bridge-port': KeyInfo(can_disable=True),
'in-bridge-port-list': KeyInfo(can_disable=True),
'in-interface': KeyInfo(can_disable=True),
'in-interface-list': KeyInfo(can_disable=True),
'ingress-priority': KeyInfo(can_disable=True),
'ipsec-policy': KeyInfo(can_disable=True),
'ipv4-options': KeyInfo(can_disable=True),
'jump-target': KeyInfo(),
'layer7-protocol': KeyInfo(can_disable=True),
'limit': KeyInfo(can_disable=True),
'log': KeyInfo(),
'log-prefix': KeyInfo(),
'nth': KeyInfo(can_disable=True),
'out-bridge-port': KeyInfo(can_disable=True),
'out-bridge-port-list': KeyInfo(can_disable=True),
'out-interface': KeyInfo(can_disable=True),
'out-interface-list': KeyInfo(can_disable=True),
'packet-mark': KeyInfo(can_disable=True),
'packet-size': KeyInfo(can_disable=True),
'per-connection-classifier': KeyInfo(can_disable=True),
'port': KeyInfo(can_disable=True),
'priority': KeyInfo(can_disable=True),
'protocol': KeyInfo(can_disable=True),
'psd': KeyInfo(can_disable=True),
'random': KeyInfo(can_disable=True),
'realm': KeyInfo(can_disable=True),
'routing-mark': KeyInfo(can_disable=True),
'same-not-by-dst': KeyInfo(),
'src-address': KeyInfo(can_disable=True),
'src-address-list': KeyInfo(can_disable=True),
'src-address-type': KeyInfo(can_disable=True),
'src-mac-address': KeyInfo(can_disable=True),
'src-port': KeyInfo(can_disable=True),
'tcp-mss': KeyInfo(can_disable=True),
'time': KeyInfo(can_disable=True),
'tls-host': KeyInfo(can_disable=True),
'to-addresses': KeyInfo(can_disable=True),
'to-ports': KeyInfo(can_disable=True),
'ttl': KeyInfo(can_disable=True),
},
),
('ip', 'firewall', 'raw'): APIData(
fully_understood=True,
stratify_keys=('chain',),
fields={
'action': KeyInfo(),
'address-list': KeyInfo(),
'address-list-timeout': KeyInfo(),
'chain': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'content': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'dscp': KeyInfo(can_disable=True),
'dst-address': KeyInfo(can_disable=True),
'dst-address-list': KeyInfo(can_disable=True),
'dst-address-type': KeyInfo(can_disable=True),
'dst-limit': KeyInfo(can_disable=True),
'dst-port': KeyInfo(can_disable=True),
'fragment': KeyInfo(can_disable=True),
'hotspot': KeyInfo(can_disable=True),
'icmp-options': KeyInfo(can_disable=True),
'in-bridge-port': KeyInfo(can_disable=True),
'in-bridge-port-list': KeyInfo(can_disable=True),
'in-interface': KeyInfo(can_disable=True),
'in-interface-list': KeyInfo(can_disable=True),
'ingress-priority': KeyInfo(can_disable=True),
'ipsec-policy': KeyInfo(can_disable=True),
'ipv4-options': KeyInfo(can_disable=True),
'jump-target': KeyInfo(),
'limit': KeyInfo(can_disable=True),
'log': KeyInfo(),
'log-prefix': KeyInfo(),
'nth': KeyInfo(can_disable=True),
'out-bridge-port': KeyInfo(can_disable=True),
'out-bridge-port-list': KeyInfo(can_disable=True),
'out-interface': KeyInfo(can_disable=True),
'out-interface-list': KeyInfo(can_disable=True),
'packet-mark': KeyInfo(can_disable=True),
'packet-size': KeyInfo(can_disable=True),
'per-connection-classifier': KeyInfo(can_disable=True),
'port': KeyInfo(can_disable=True),
'priority': KeyInfo(can_disable=True),
'protocol': KeyInfo(can_disable=True),
'psd': KeyInfo(can_disable=True),
'random': KeyInfo(can_disable=True),
'src-address': KeyInfo(can_disable=True),
'src-address-list': KeyInfo(can_disable=True),
'src-address-type': KeyInfo(can_disable=True),
'src-mac-address': KeyInfo(can_disable=True),
'src-port': KeyInfo(can_disable=True),
'tcp-flags': KeyInfo(can_disable=True),
'tcp-mss': KeyInfo(can_disable=True),
'time': KeyInfo(can_disable=True),
'tls-host': KeyInfo(can_disable=True),
'ttl': KeyInfo(can_disable=True),
},
),
('ip', 'hotspot', 'user'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(),
'name': KeyInfo(),
},
),
('ip', 'ipsec', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'accounting': KeyInfo(default=True),
'interim-update': KeyInfo(default='0s'),
'xauth-use-radius': KeyInfo(default=False),
},
),
('ip', 'proxy'): APIData(
single_value=True,
fully_understood=True,
fields={
'always-from-cache': KeyInfo(default=False),
'anonymous': KeyInfo(default=False),
'cache-administrator': KeyInfo(default='webmaster'),
'cache-hit-dscp': KeyInfo(default=4),
'cache-on-disk': KeyInfo(default=False),
'cache-path': KeyInfo(default='web-proxy'),
'enabled': KeyInfo(default=False),
'max-cache-object-size': KeyInfo(default='2048KiB'),
'max-cache-size': KeyInfo(default='unlimited'),
'max-client-connections': KeyInfo(default=600),
'max-fresh-time': KeyInfo(default='3d'),
'max-server-connections': KeyInfo(default=600),
'parent-proxy': KeyInfo(default='::'),
'parent-proxy-port': KeyInfo(default=0),
'port': KeyInfo(default=8080),
'serialize-connections': KeyInfo(default=False),
'src-address': KeyInfo(default='::'),
},
),
('ip', 'smb'): APIData(
single_value=True,
fully_understood=True,
fields={
'allow-guests': KeyInfo(default=True),
'comment': KeyInfo(default='MikrotikSMB'),
'domain': KeyInfo(default='MSHOME'),
'enabled': KeyInfo(default=False),
'interfaces': KeyInfo(default='all'),
},
),
('ip', 'smb', 'shares'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'directory': KeyInfo(),
'disabled': KeyInfo(),
'max-sessions': KeyInfo(),
'name': KeyInfo(),
},
),
('ip', 'smb', 'users'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'disabled': KeyInfo(),
'name': KeyInfo(),
'password': KeyInfo(),
'read-only': KeyInfo(),
},
),
('ip', 'socks'): APIData(
single_value=True,
fully_understood=True,
fields={
'auth-method': KeyInfo(default='none'),
'connection-idle-timeout': KeyInfo(default='2m'),
'enabled': KeyInfo(default=False),
'max-connections': KeyInfo(default=200),
'port': KeyInfo(default=1080),
'version': KeyInfo(default=4),
},
),
('ip', 'ssh'): APIData(
single_value=True,
fully_understood=True,
fields={
'allow-none-crypto': KeyInfo(default=False),
'always-allow-password-login': KeyInfo(default=False),
'forwarding-enabled': KeyInfo(default=False),
'host-key-size': KeyInfo(default=2048),
'strong-crypto': KeyInfo(default=False),
},
),
('ip', 'tftp', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'max-block-size': KeyInfo(default=4096),
},
),
('ip', 'traffic-flow'): APIData(
single_value=True,
fully_understood=True,
fields={
'active-flow-timeout': KeyInfo(default='30m'),
'cache-entries': KeyInfo(default='32k'),
'enabled': KeyInfo(default=False),
'inactive-flow-timeout': KeyInfo(default='15s'),
'interfaces': KeyInfo(default='all'),
'packet-sampling': KeyInfo(default=False),
'sampling-interval': KeyInfo(default=0),
'sampling-space': KeyInfo(default=0),
},
),
('ip', 'traffic-flow', 'ipfix'): APIData(
single_value=True,
fully_understood=True,
fields={
'bytes': KeyInfo(default=True),
'dst-address': KeyInfo(default=True),
'dst-address-mask': KeyInfo(default=True),
'dst-mac-address': KeyInfo(default=True),
'dst-port': KeyInfo(default=True),
'first-forwarded': KeyInfo(default=True),
'gateway': KeyInfo(default=True),
'icmp-code': KeyInfo(default=True),
'icmp-type': KeyInfo(default=True),
'igmp-type': KeyInfo(default=True),
'in-interface': KeyInfo(default=True),
'ip-header-length': KeyInfo(default=True),
'ip-total-length': KeyInfo(default=True),
'ipv6-flow-label': KeyInfo(default=True),
'is-multicast': KeyInfo(default=True),
'last-forwarded': KeyInfo(default=True),
'nat-dst-address': KeyInfo(default=True),
'nat-dst-port': KeyInfo(default=True),
'nat-events': KeyInfo(default=False),
'nat-src-address': KeyInfo(default=True),
'nat-src-port': KeyInfo(default=True),
'out-interface': KeyInfo(default=True),
'packets': KeyInfo(default=True),
'protocol': KeyInfo(default=True),
'src-address': KeyInfo(default=True),
'src-address-mask': KeyInfo(default=True),
'src-mac-address': KeyInfo(default=True),
'src-port': KeyInfo(default=True),
'sys-init-time': KeyInfo(default=True),
'tcp-ack-num': KeyInfo(default=True),
'tcp-flags': KeyInfo(default=True),
'tcp-seq-num': KeyInfo(default=True),
'tcp-window-size': KeyInfo(default=True),
'tos': KeyInfo(default=True),
'ttl': KeyInfo(default=True),
'udp-length': KeyInfo(default=True),
},
),
('ip', 'traffic-flow', 'target'): APIData(
single_value=True,
fully_understood=True,
fields={
'address': KeyInfo(),
'disabled': KeyInfo(default=False),
'dst-address': KeyInfo(),
'port': KeyInfo(default=2055),
'src-address': KeyInfo(),
'v9-template-refresh': KeyInfo(default=20),
'v9-template-timeout': KeyInfo(),
'version': KeyInfo(),
},
),
('ip', 'upnp'): APIData(
single_value=True,
fully_understood=True,
fields={
'allow-disable-external-interface': KeyInfo(default=False),
'enabled': KeyInfo(default=False),
'show-dummy-rule': KeyInfo(default=True),
},
),
('ipv6', 'dhcp-client'): APIData(
fully_understood=True,
primary_keys=('interface', 'request'),
fields={
'add-default-route': KeyInfo(default=False),
'comment': KeyInfo(can_disable=True, remove_value=''),
'default-route-distance': KeyInfo(default=1),
'dhcp-options': KeyInfo(default=''),
'disabled': KeyInfo(default=False),
'interface': KeyInfo(),
'pool-name': KeyInfo(required=True),
'pool-prefix-length': KeyInfo(default=64),
'prefix-hint': KeyInfo(default='::/0'),
'request': KeyInfo(),
'use-peer-dns': KeyInfo(default=True),
},
),
('ipv6', 'dhcp-server'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'address-pool': KeyInfo(required=True),
'allow-dual-stack-queue': KeyInfo(can_disable=True, remove_value=True),
'binding-script': KeyInfo(can_disable=True, remove_value=''),
'comment': KeyInfo(can_disable=True, remove_value=''),
'dhcp-option': KeyInfo(default=''),
'disabled': KeyInfo(default=False),
'insert-queue-before': KeyInfo(can_disable=True, remove_value='first'),
'interface': KeyInfo(required=True),
'lease-time': KeyInfo(default='3d'),
'name': KeyInfo(),
'parent-queue': KeyInfo(can_disable=True, remove_value='none'),
'preference': KeyInfo(default=255),
'rapid-commit': KeyInfo(default=True),
'route-distance': KeyInfo(default=1),
'use-radius': KeyInfo(default=False),
},
),
('ipv6', 'dhcp-server', 'option'): APIData(
fully_understood=True,
primary_keys=('name',),
fields={
'code': KeyInfo(required=True),
'name': KeyInfo(),
'value': KeyInfo(default=''),
},
),
('ipv6', 'firewall', 'address-list'): APIData(
fully_understood=True,
primary_keys=('address', 'list', ),
fields={
'address': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'list': KeyInfo(),
},
),
('ipv6', 'firewall', 'filter'): APIData(
fully_understood=True,
stratify_keys=('chain', ),
fields={
'action': KeyInfo(),
'chain': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'connection-bytes': KeyInfo(can_disable=True),
'connection-limit': KeyInfo(can_disable=True),
'connection-mark': KeyInfo(can_disable=True),
'connection-rate': KeyInfo(can_disable=True),
'connection-state': KeyInfo(can_disable=True),
'connection-type': KeyInfo(can_disable=True),
'content': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'dscp': KeyInfo(can_disable=True),
'dst-address': KeyInfo(can_disable=True),
'dst-address-list': KeyInfo(can_disable=True),
'dst-address-type': KeyInfo(can_disable=True),
'dst-limit': KeyInfo(can_disable=True),
'dst-port': KeyInfo(can_disable=True),
'headers': KeyInfo(can_disable=True),
'hop-limit': KeyInfo(can_disable=True),
'icmp-options': KeyInfo(can_disable=True),
'in-bridge-port': KeyInfo(can_disable=True),
'in-bridge-port-list': KeyInfo(can_disable=True),
'in-interface': KeyInfo(can_disable=True),
'in-interface-list': KeyInfo(can_disable=True),
'ingress-priority': KeyInfo(can_disable=True),
'ipsec-policy': KeyInfo(can_disable=True),
'jump-target': KeyInfo(),
'limit': KeyInfo(can_disable=True),
'log': KeyInfo(),
'log-prefix': KeyInfo(),
'nth': KeyInfo(can_disable=True),
'out-bridge-port': KeyInfo(can_disable=True),
'out-bridge-port-list': KeyInfo(can_disable=True),
'out-interface': KeyInfo(can_disable=True),
'out-interface-list': KeyInfo(can_disable=True),
'packet-mark': KeyInfo(can_disable=True),
'packet-size': KeyInfo(can_disable=True),
'per-connection-classifier': KeyInfo(can_disable=True),
'port': KeyInfo(can_disable=True),
'priority': KeyInfo(can_disable=True),
'protocol': KeyInfo(can_disable=True),
'random': KeyInfo(can_disable=True),
'reject-with': KeyInfo(),
'src-address': KeyInfo(can_disable=True),
'src-address-list': KeyInfo(can_disable=True),
'src-address-type': KeyInfo(can_disable=True),
'src-mac-address': KeyInfo(can_disable=True),
'src-port': KeyInfo(can_disable=True),
'tcp-flags': KeyInfo(can_disable=True),
'tcp-mss': KeyInfo(can_disable=True),
'time': KeyInfo(can_disable=True),
},
),
('ipv6', 'firewall', 'mangle'): APIData(
fully_understood=True,
stratify_keys=('chain', ),
fields={
'action': KeyInfo(),
'address-list': KeyInfo(),
'address-list-timeout': KeyInfo(),
'chain': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'connection-bytes': KeyInfo(can_disable=True),
'connection-limit': KeyInfo(can_disable=True),
'connection-mark': KeyInfo(can_disable=True),
'connection-rate': KeyInfo(can_disable=True),
'connection-state': KeyInfo(can_disable=True),
'connection-type': KeyInfo(can_disable=True),
'content': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'dscp': KeyInfo(can_disable=True),
'dst-address': KeyInfo(can_disable=True),
'dst-address-list': KeyInfo(can_disable=True),
'dst-address-type': KeyInfo(can_disable=True),
'dst-limit': KeyInfo(can_disable=True),
'dst-port': KeyInfo(can_disable=True),
'dst-prefix': KeyInfo(),
'headers': KeyInfo(can_disable=True),
'hop-limit': KeyInfo(can_disable=True),
'icmp-options': KeyInfo(can_disable=True),
'in-bridge-port': KeyInfo(can_disable=True),
'in-bridge-port-list': KeyInfo(can_disable=True),
'in-interface': KeyInfo(can_disable=True),
'in-interface-list': KeyInfo(can_disable=True),
'ingress-priority': KeyInfo(can_disable=True),
'ipsec-policy': KeyInfo(can_disable=True),
'jump-target': KeyInfo(),
'limit': KeyInfo(can_disable=True),
'log': KeyInfo(),
'log-prefix': KeyInfo(),
'new-connection-mark': KeyInfo(),
'new-dscp': KeyInfo(),
'new-hop-limit': KeyInfo(),
'new-mss': KeyInfo(),
'new-packet-mark': KeyInfo(),
'new-routing-mark': KeyInfo(),
'nth': KeyInfo(can_disable=True),
'out-bridge-port': KeyInfo(can_disable=True),
'out-bridge-port-list': KeyInfo(can_disable=True),
'out-interface': KeyInfo(can_disable=True),
'out-interface-list': KeyInfo(can_disable=True),
'packet-mark': KeyInfo(can_disable=True),
'packet-size': KeyInfo(can_disable=True),
'passthrough': KeyInfo(),
'per-connection-classifier': KeyInfo(can_disable=True),
'port': KeyInfo(can_disable=True),
'priority': KeyInfo(can_disable=True),
'protocol': KeyInfo(can_disable=True),
'random': KeyInfo(can_disable=True),
'routing-mark': KeyInfo(can_disable=True),
'sniff-id': KeyInfo(),
'sniff-target': KeyInfo(),
'sniff-target-port': KeyInfo(),
'src-address': KeyInfo(can_disable=True),
'src-address-list': KeyInfo(can_disable=True),
'src-address-type': KeyInfo(can_disable=True),
'src-mac-address': KeyInfo(can_disable=True),
'src-port': KeyInfo(can_disable=True),
'src-prefix': KeyInfo(),
'tcp-flags': KeyInfo(can_disable=True),
'tcp-mss': KeyInfo(can_disable=True),
'time': KeyInfo(can_disable=True),
'tls-host': KeyInfo(can_disable=True),
}
),
('ipv6', 'firewall', 'raw'): APIData(
fully_understood=True,
stratify_keys=('chain',),
fields={
'action': KeyInfo(),
'address-list': KeyInfo(),
'address-list-timeout': KeyInfo(),
'chain': KeyInfo(),
'comment': KeyInfo(can_disable=True, remove_value=''),
'content': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'dscp': KeyInfo(can_disable=True),
'dst-address': KeyInfo(can_disable=True),
'dst-address-list': KeyInfo(can_disable=True),
'dst-address-type': KeyInfo(can_disable=True),
'dst-limit': KeyInfo(can_disable=True),
'dst-port': KeyInfo(can_disable=True),
'headers': KeyInfo(can_disable=True),
'hop-limit': KeyInfo(can_disable=True),
'icmp-options': KeyInfo(can_disable=True),
'in-bridge-port': KeyInfo(can_disable=True),
'in-bridge-port-list': KeyInfo(can_disable=True),
'in-interface': KeyInfo(can_disable=True),
'in-interface-list': KeyInfo(can_disable=True),
'ingress-priority': KeyInfo(can_disable=True),
'ipsec-policy': KeyInfo(can_disable=True),
'jump-target': KeyInfo(),
'limit': KeyInfo(can_disable=True),
'log': KeyInfo(),
'log-prefix': KeyInfo(),
'nth': KeyInfo(can_disable=True),
'out-bridge-port': KeyInfo(can_disable=True),
'out-bridge-port-list': KeyInfo(can_disable=True),
'out-interface': KeyInfo(can_disable=True),
'out-interface-list': KeyInfo(can_disable=True),
'packet-mark': KeyInfo(can_disable=True),
'packet-size': KeyInfo(can_disable=True),
'per-connection-classifier': KeyInfo(can_disable=True),
'port': KeyInfo(can_disable=True),
'priority': KeyInfo(can_disable=True),
'protocol': KeyInfo(can_disable=True),
'random': KeyInfo(can_disable=True),
'src-address': KeyInfo(can_disable=True),
'src-address-list': KeyInfo(can_disable=True),
'src-address-type': KeyInfo(can_disable=True),
'src-mac-address': KeyInfo(can_disable=True),
'src-port': KeyInfo(can_disable=True),
'tcp-flags': KeyInfo(can_disable=True),
'tcp-mss': KeyInfo(can_disable=True),
'time': KeyInfo(can_disable=True),
'tls-host': KeyInfo(can_disable=True),
}
),
('ipv6', 'nd'): APIData(
fully_understood=True,
primary_keys=('interface', ),
fields={
'advertise-dns': KeyInfo(default=True),
'advertise-mac-address': KeyInfo(default=True),
'disabled': KeyInfo(default=False),
'dns': KeyInfo(default=''),
'hop-limit': KeyInfo(default='unspecified'),
'interface': KeyInfo(),
'managed-address-configuration': KeyInfo(default=False),
'mtu': KeyInfo(default='unspecified'),
'other-configuration': KeyInfo(default=False),
'ra-delay': KeyInfo(default='3s'),
'ra-interval': KeyInfo(default='3m20s-10m'),
'ra-lifetime': KeyInfo(default='30m'),
'ra-preference': KeyInfo(default='medium'),
'reachable-time': KeyInfo(default='unspecified'),
'retransmit-interval': KeyInfo(default='unspecified'),
},
),
('ipv6', 'nd', 'prefix', 'default'): APIData(
single_value=True,
fully_understood=True,
fields={
'autonomous': KeyInfo(default=True),
'preferred-lifetime': KeyInfo(default='1w'),
'valid-lifetime': KeyInfo(default='4w2d'),
},
),
('ipv6', 'route'): APIData(
fully_understood=True,
fields={
'bgp-as-path': KeyInfo(can_disable=True),
'bgp-atomic-aggregate': KeyInfo(can_disable=True),
'bgp-communities': KeyInfo(can_disable=True),
'bgp-local-pref': KeyInfo(can_disable=True),
'bgp-med': KeyInfo(can_disable=True),
'bgp-origin': KeyInfo(can_disable=True),
'bgp-prepend': KeyInfo(can_disable=True),
'type': KeyInfo(can_disable=True, remove_value='unicast'),
'blackhole': KeyInfo(can_disable=True),
'check-gateway': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(),
'distance': KeyInfo(default=1),
'dst-address': KeyInfo(),
'gateway': KeyInfo(),
'route-tag': KeyInfo(can_disable=True),
'routing-table': KeyInfo(default='main'),
'scope': KeyInfo(default=30),
'target-scope': KeyInfo(default=10),
'vrf-interface': KeyInfo(can_disable=True),
},
),
('mpls', ): APIData(
single_value=True,
fully_understood=True,
fields={
'allow-fast-path': KeyInfo(default=True),
'dynamic-label-range': KeyInfo(default='16-1048575'),
'propagate-ttl': KeyInfo(default=True),
},
),
('mpls', 'interface'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'disabled': KeyInfo(),
'interface': KeyInfo(),
'mpls-mtu': KeyInfo(),
},
),
('mpls', 'ldp'): APIData(
single_value=True,
fully_understood=True,
fields={
'distribute-for-default-route': KeyInfo(default=False),
'enabled': KeyInfo(default=False),
'hop-limit': KeyInfo(default=255),
'loop-detect': KeyInfo(default=False),
'lsr-id': KeyInfo(default='0.0.0.0'),
'path-vector-limit': KeyInfo(default=255),
'transport-address': KeyInfo(default='0.0.0.0'),
'use-explicit-null': KeyInfo(default=False),
},
),
('port', 'firmware'): APIData(
single_value=True,
fully_understood=True,
fields={
'directory': KeyInfo(default='firmware'),
'ignore-directip-modem': KeyInfo(default=False),
},
),
('ppp', 'aaa'): APIData(
single_value=True,
fully_understood=True,
fields={
'accounting': KeyInfo(default=True),
'interim-update': KeyInfo(default='0s'),
'use-circuit-id-in-nas-port-id': KeyInfo(default=False),
'use-radius': KeyInfo(default=False),
},
),
('radius', 'incoming'): APIData(
single_value=True,
fully_understood=True,
fields={
'accept': KeyInfo(default=False),
'port': KeyInfo(default=3799),
},
),
('routing', 'bfd', 'interface'): APIData(
unknown_mechanism=True,
# primary_keys=('default', ),
fields={
'default': KeyInfo(),
'disabled': KeyInfo(),
'interface': KeyInfo(),
'interval': KeyInfo(),
'min-rx': KeyInfo(),
'multiplier': KeyInfo(),
},
),
('routing', 'mme'): APIData(
single_value=True,
fully_understood=True,
fields={
'bidirectional-timeout': KeyInfo(default=2),
'gateway-class': KeyInfo(default='none'),
'gateway-keepalive': KeyInfo(default='1m'),
'gateway-selection': KeyInfo(default='no-gateway'),
'origination-interval': KeyInfo(default='5s'),
'preferred-gateway': KeyInfo(default='0.0.0.0'),
'timeout': KeyInfo(default='1m'),
'ttl': KeyInfo(default=50),
},
),
('routing', 'rip'): APIData(
single_value=True,
fully_understood=True,
fields={
'distribute-default': KeyInfo(default='never'),
'garbage-timer': KeyInfo(default='2m'),
'metric-bgp': KeyInfo(default=1),
'metric-connected': KeyInfo(default=1),
'metric-default': KeyInfo(default=1),
'metric-ospf': KeyInfo(default=1),
'metric-static': KeyInfo(default=1),
'redistribute-bgp': KeyInfo(default=False),
'redistribute-connected': KeyInfo(default=False),
'redistribute-ospf': KeyInfo(default=False),
'redistribute-static': KeyInfo(default=False),
'routing-table': KeyInfo(default='main'),
'timeout-timer': KeyInfo(default='3m'),
'update-timer': KeyInfo(default='30s'),
},
),
('routing', 'ripng'): APIData(
single_value=True,
fully_understood=True,
fields={
'distribute-default': KeyInfo(default='never'),
'garbage-timer': KeyInfo(default='2m'),
'metric-bgp': KeyInfo(default=1),
'metric-connected': KeyInfo(default=1),
'metric-default': KeyInfo(default=1),
'metric-ospf': KeyInfo(default=1),
'metric-static': KeyInfo(default=1),
'redistribute-bgp': KeyInfo(default=False),
'redistribute-connected': KeyInfo(default=False),
'redistribute-ospf': KeyInfo(default=False),
'redistribute-static': KeyInfo(default=False),
'timeout-timer': KeyInfo(default='3m'),
'update-timer': KeyInfo(default='30s'),
},
),
('snmp', ): APIData(
single_value=True,
fully_understood=True,
fields={
'contact': KeyInfo(default=''),
'enabled': KeyInfo(default=False),
'engine-id': KeyInfo(default=''),
'engine-id-suffix': KeyInfo(default=''),
'location': KeyInfo(default=''),
'src-address': KeyInfo(default='::'),
'trap-community': KeyInfo(default='public'),
'trap-generators': KeyInfo(default='temp-exception'),
'trap-target': KeyInfo(default=''),
'trap-version': KeyInfo(default=1),
'trap-interfaces': KeyInfo(default=''),
},
),
('system', 'clock'): APIData(
single_value=True,
fully_understood=True,
fields={
'time-zone-autodetect': KeyInfo(default=True),
'time-zone-name': KeyInfo(default='manual'),
},
),
('system', 'clock', 'manual'): APIData(
single_value=True,
fully_understood=True,
fields={
'dst-delta': KeyInfo(default='00:00'),
'dst-end': KeyInfo(default='jan/01/1970 00:00:00'),
'dst-start': KeyInfo(default='jan/01/1970 00:00:00'),
'time-zone': KeyInfo(default='+00:00'),
},
),
('system', 'identity'): APIData(
single_value=True,
fully_understood=True,
fields={
'name': KeyInfo(default='Mikrotik'),
},
),
('system', 'leds', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'all-leds-off': KeyInfo(default='never'),
},
),
('system', 'note'): APIData(
single_value=True,
fully_understood=True,
fields={
'note': KeyInfo(default=''),
'show-at-login': KeyInfo(default=True),
},
),
('system', 'ntp', 'client'): APIData(
single_value=True,
fully_understood=True,
fields={
'enabled': KeyInfo(default=False),
'primary-ntp': KeyInfo(default='0.0.0.0'),
'secondary-ntp': KeyInfo(default='0.0.0.0'),
'server-dns-names': KeyInfo(default=''),
'servers': KeyInfo(default=''),
'mode': KeyInfo(default='unicast'),
'vrf': KeyInfo(default='main'),
},
),
('system', 'ntp', 'client', 'servers'): APIData(
primary_keys=('address', ),
fully_understood=True,
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'address': KeyInfo(),
'auth-key': KeyInfo(default='none'),
'iburst': KeyInfo(default=True),
'max-poll': KeyInfo(default=10),
'min-poll': KeyInfo(default=6),
},
),
('system', 'ntp', 'server'): APIData(
single_value=True,
fully_understood=True,
fields={
'auth-key': KeyInfo(default='none'),
'broadcast': KeyInfo(default=False),
'broadcast-addresses': KeyInfo(default=''),
'enabled': KeyInfo(default=False),
'local-clock-stratum': KeyInfo(default=5),
'manycast': KeyInfo(default=False),
'multicast': KeyInfo(default=False),
'use-local-clock': KeyInfo(default=False),
'vrf': KeyInfo(default='main'),
},
),
('system', 'package', 'update'): APIData(
single_value=True,
fully_understood=True,
fields={
'channel': KeyInfo(default='stable'),
},
),
('system', 'routerboard', 'settings'): APIData(
single_value=True,
fully_understood=True,
fields={
'auto-upgrade': KeyInfo(default=False),
'baud-rate': KeyInfo(default=115200),
'boot-delay': KeyInfo(default='2s'),
'boot-device': KeyInfo(default='nand-if-fail-then-ethernet'),
'boot-protocol': KeyInfo(default='bootp'),
'enable-jumper-reset': KeyInfo(default=True),
'enter-setup-on': KeyInfo(default='any-key'),
'force-backup-booter': KeyInfo(default=False),
'protected-routerboot': KeyInfo(default='disabled'),
'reformat-hold-button': KeyInfo(default='20s'),
'reformat-hold-button-max': KeyInfo(default='10m'),
'silent-boot': KeyInfo(default=False),
},
),
('system', 'upgrade', 'mirror'): APIData(
single_value=True,
fully_understood=True,
fields={
'check-interval': KeyInfo(default='1d'),
'enabled': KeyInfo(default=False),
'primary-server': KeyInfo(default='0.0.0.0'),
'secondary-server': KeyInfo(default='0.0.0.0'),
'user': KeyInfo(default=''),
},
),
('system', 'ups'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'alarm-setting': KeyInfo(default='immediate'),
'check-capabilities': KeyInfo(can_disable=True, remove_value=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=True),
'min-runtime': KeyInfo(default='never'),
'name': KeyInfo(),
'offline-time': KeyInfo(default='0s'),
'port': KeyInfo(required=True),
},
),
('system', 'watchdog'): APIData(
single_value=True,
fully_understood=True,
fields={
'auto-send-supout': KeyInfo(default=False),
'automatic-supout': KeyInfo(default=True),
'ping-start-after-boot': KeyInfo(default='5m'),
'ping-timeout': KeyInfo(default='1m'),
'watch-address': KeyInfo(default='none'),
'watchdog-timer': KeyInfo(default=True),
},
),
('tool', 'bandwidth-server'): APIData(
single_value=True,
fully_understood=True,
fields={
'allocate-udp-ports-from': KeyInfo(default=2000),
'authenticate': KeyInfo(default=True),
'enabled': KeyInfo(default=True),
'max-sessions': KeyInfo(default=100),
},
),
('tool', 'e-mail'): APIData(
single_value=True,
fully_understood=True,
fields={
'address': KeyInfo(default='0.0.0.0'),
'from': KeyInfo(default='<>'),
'password': KeyInfo(default=''),
'port': KeyInfo(default=25),
'start-tls': KeyInfo(default=False),
'tls': KeyInfo(default=False),
'user': KeyInfo(default=''),
},
),
('tool', 'graphing'): APIData(
single_value=True,
fully_understood=True,
fields={
'page-refresh': KeyInfo(default=300),
'store-every': KeyInfo(default='5min'),
},
),
('tool', 'mac-server'): APIData(
single_value=True,
fully_understood=True,
fields={
'allowed-interface-list': KeyInfo(),
},
),
('tool', 'mac-server', 'mac-winbox'): APIData(
single_value=True,
fully_understood=True,
fields={
'allowed-interface-list': KeyInfo(),
},
),
('tool', 'mac-server', 'ping'): APIData(
single_value=True,
fully_understood=True,
fields={
'enabled': KeyInfo(default=True),
},
),
('tool', 'romon'): APIData(
single_value=True,
fully_understood=True,
fields={
'enabled': KeyInfo(default=False),
'id': KeyInfo(default='00:00:00:00:00:00'),
'secrets': KeyInfo(default=''),
},
),
('tool', 'romon', 'port'): APIData(
fields={
'cost': KeyInfo(),
'disabled': KeyInfo(),
'forbid': KeyInfo(),
'interface': KeyInfo(),
'secrets': KeyInfo(),
},
),
('tool', 'sms'): APIData(
single_value=True,
fully_understood=True,
fields={
'allowed-number': KeyInfo(default=''),
'auto-erase': KeyInfo(default=False),
'channel': KeyInfo(default=0),
'port': KeyInfo(default='none'),
'receive-enabled': KeyInfo(default=False),
'secret': KeyInfo(default=''),
'sim-pin': KeyInfo(default=''),
},
),
('tool', 'sniffer'): APIData(
single_value=True,
fully_understood=True,
fields={
'file-limit': KeyInfo(default='1000KiB'),
'file-name': KeyInfo(default=''),
'filter-cpu': KeyInfo(default=''),
'filter-direction': KeyInfo(default='any'),
'filter-interface': KeyInfo(default=''),
'filter-ip-address': KeyInfo(default=''),
'filter-ip-protocol': KeyInfo(default=''),
'filter-ipv6-address': KeyInfo(default=''),
'filter-mac-address': KeyInfo(default=''),
'filter-mac-protocol': KeyInfo(default=''),
'filter-operator-between-entries': KeyInfo(default='or'),
'filter-port': KeyInfo(default=''),
'filter-size': KeyInfo(default=''),
'filter-stream': KeyInfo(default=False),
'memory-limit': KeyInfo(default='100KiB'),
'memory-scroll': KeyInfo(default=True),
'only-headers': KeyInfo(default=False),
'streaming-enabled': KeyInfo(default=False),
'streaming-server': KeyInfo(default='0.0.0.0:37008'),
},
),
('tool', 'traffic-generator'): APIData(
single_value=True,
fully_understood=True,
fields={
'latency-distribution-max': KeyInfo(default='100us'),
'measure-out-of-order': KeyInfo(default=True),
'stats-samples-to-keep': KeyInfo(default=100),
'test-id': KeyInfo(default=0),
},
),
('user', 'aaa'): APIData(
single_value=True,
fully_understood=True,
fields={
'accounting': KeyInfo(default=True),
'default-group': KeyInfo(default='read'),
'exclude-groups': KeyInfo(default=''),
'interim-update': KeyInfo(default='0s'),
'use-radius': KeyInfo(default=False),
},
),
('queue', 'interface'): APIData(
primary_keys=('interface', ),
fully_understood=True,
fixed_entries=True,
fields={
'interface': KeyInfo(required=True),
'queue': KeyInfo(required=True),
},
),
('queue', 'tree'): APIData(
primary_keys=('name', ),
fully_understood=True,
fields={
'bucket-size': KeyInfo(default='0.1'),
'burst-limit': KeyInfo(default=0),
'burst-threshold': KeyInfo(default=0),
'burst-time': KeyInfo(default='0s'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'limit-at': KeyInfo(default=0),
'max-limit': KeyInfo(default=0),
'name': KeyInfo(),
'packet-mark': KeyInfo(default=''),
'parent': KeyInfo(required=True),
'priority': KeyInfo(default=8),
'queue': KeyInfo(default='default-small'),
},
),
('interface', 'ethernet', 'switch'): APIData(
fixed_entries=True,
primary_keys=('name', ),
fully_understood=True,
fields={
'cpu-flow-control': KeyInfo(default=True),
'mirror-source': KeyInfo(default='none'),
'mirror-target': KeyInfo(default='none'),
'name': KeyInfo(),
},
),
('interface', 'ethernet', 'switch', 'port'): APIData(
fixed_entries=True,
primary_keys=('name', ),
fully_understood=True,
fields={
'default-vlan-id': KeyInfo(),
'name': KeyInfo(),
'vlan-header': KeyInfo(default='leave-as-is'),
'vlan-mode': KeyInfo(default='disabled'),
},
),
('ip', 'dhcp-client', 'option'): APIData(
fixed_entries=True,
primary_keys=('name', ),
fully_understood=True,
fields={
'code': KeyInfo(),
'name': KeyInfo(),
'value': KeyInfo(),
},
),
('ppp', 'profile'): APIData(
has_identifier=True,
fields={
'address-list': KeyInfo(),
'bridge': KeyInfo(can_disable=True),
'bridge-horizon': KeyInfo(can_disable=True),
'bridge-learning': KeyInfo(),
'bridge-path-cost': KeyInfo(can_disable=True),
'bridge-port-priority': KeyInfo(can_disable=True),
'change-tcp-mss': KeyInfo(),
'dns-server': KeyInfo(can_disable=True),
'idle-timeout': KeyInfo(can_disable=True),
'incoming-filter': KeyInfo(can_disable=True),
'insert-queue-before': KeyInfo(can_disable=True),
'interface-list': KeyInfo(can_disable=True),
'local-address': KeyInfo(can_disable=True),
'name': KeyInfo(),
'on-down': KeyInfo(),
'on-up': KeyInfo(),
'only-one': KeyInfo(),
'outgoing-filter': KeyInfo(can_disable=True),
'parent-queue': KeyInfo(can_disable=True),
'queue-type': KeyInfo(can_disable=True),
'rate-limit': KeyInfo(can_disable=True),
'remote-address': KeyInfo(can_disable=True),
'session-timeout': KeyInfo(can_disable=True),
'use-compression': KeyInfo(),
'use-encryption': KeyInfo(),
'use-ipv6': KeyInfo(),
'use-mpls': KeyInfo(),
'use-upnp': KeyInfo(),
'wins-server': KeyInfo(can_disable=True),
},
),
('queue', 'type'): APIData(
has_identifier=True,
fields={
'kind': KeyInfo(),
'mq-pfifo-limit': KeyInfo(),
'name': KeyInfo(),
'pcq-burst-rate': KeyInfo(),
'pcq-burst-threshold': KeyInfo(),
'pcq-burst-time': KeyInfo(),
'pcq-classifier': KeyInfo(),
'pcq-dst-address-mask': KeyInfo(),
'pcq-dst-address6-mask': KeyInfo(),
'pcq-limit': KeyInfo(),
'pcq-rate': KeyInfo(),
'pcq-src-address-mask': KeyInfo(),
'pcq-src-address6-mask': KeyInfo(),
'pcq-total-limit': KeyInfo(),
'pfifo-limit': KeyInfo(),
'red-avg-packet': KeyInfo(),
'red-burst': KeyInfo(),
'red-limit': KeyInfo(),
'red-max-threshold': KeyInfo(),
'red-min-threshold': KeyInfo(),
'sfq-allot': KeyInfo(),
'sfq-perturb': KeyInfo(),
},
),
('routing', 'bgp', 'instance'): APIData(
fixed_entries=True,
primary_keys=('name', ),
fully_understood=True,
fields={
'as': KeyInfo(),
'client-to-client-reflection': KeyInfo(),
'cluster-id': KeyInfo(can_disable=True),
'confederation': KeyInfo(can_disable=True),
'disabled': KeyInfo(),
'ignore-as-path-len': KeyInfo(),
'name': KeyInfo(),
'out-filter': KeyInfo(),
'redistribute-connected': KeyInfo(),
'redistribute-ospf': KeyInfo(),
'redistribute-other-bgp': KeyInfo(),
'redistribute-rip': KeyInfo(),
'redistribute-static': KeyInfo(),
'router-id': KeyInfo(),
'routing-table': KeyInfo(),
},
),
('system', 'logging', 'action'): APIData(
fully_understood=True,
primary_keys=('name',),
fields={
'bsd-syslog': KeyInfo(default=False),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disk-file-count': KeyInfo(default=2),
'disk-file-name': KeyInfo(default='log'),
'disk-lines-per-file': KeyInfo(default=1000),
'disk-stop-on-full': KeyInfo(default=False),
'email-start-tls': KeyInfo(default=False),
'email-to': KeyInfo(default=''),
'memory-lines': KeyInfo(default=1000),
'memory-stop-on-full': KeyInfo(default=False),
'name': KeyInfo(),
'remember': KeyInfo(default=True),
'remote': KeyInfo(default='0.0.0.0'),
'remote-port': KeyInfo(default=514),
'src-address': KeyInfo(default='0.0.0.0'),
'syslog-facility': KeyInfo(default='daemon'),
'syslog-severity': KeyInfo(default='auto'),
'syslog-time-format': KeyInfo(default='bsd-syslog'),
'target': KeyInfo(required=True),
},
),
('user', 'group'): APIData(
fixed_entries=True,
primary_keys=('name', ),
fully_understood=True,
fields={
'name': KeyInfo(),
'policy': KeyInfo(),
'skin': KeyInfo(default='default'),
},
),
('caps-man', 'manager'): APIData(
single_value=True,
fully_understood=True,
fields={
'ca-certificate': KeyInfo(default='none'),
'certificate': KeyInfo(default='none'),
'enabled': KeyInfo(default=False),
'package-path': KeyInfo(default=''),
'require-peer-certificate': KeyInfo(default=False),
'upgrade-policy': KeyInfo(default='none'),
},
),
('ip', 'firewall', 'service-port'): APIData(
primary_keys=('name', ),
fully_understood=True,
fields={
'disabled': KeyInfo(default=False),
'name': KeyInfo(),
'ports': KeyInfo(),
'sip-direct-media': KeyInfo(),
'sip-timeout': KeyInfo(),
},
),
('ip', 'firewall', 'layer7-protocol'): APIData(
primary_keys=('name', ),
fully_understood=True,
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'name': KeyInfo(),
'regexp': KeyInfo(),
},
),
('ip', 'hotspot', 'service-port'): APIData(
fixed_entries=True,
primary_keys=('name', ),
fully_understood=True,
fields={
'disabled': KeyInfo(default=False),
'name': KeyInfo(),
'ports': KeyInfo(),
},
),
('ip', 'ipsec', 'policy'): APIData(
fully_understood=True,
fields={
'action': KeyInfo(default='encrypt'),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'dst-address': KeyInfo(),
'dst-port': KeyInfo(default='any'),
'group': KeyInfo(can_disable=True, remove_value='default'),
'ipsec-protocols': KeyInfo(default='esp'),
'level': KeyInfo(default='require'),
'peer': KeyInfo(),
'proposal': KeyInfo(default='default'),
'protocol': KeyInfo(default='all'),
'src-address': KeyInfo(),
'src-port': KeyInfo(default='any'),
'template': KeyInfo(can_disable=True, remove_value=False),
# the tepmlate field can't really be changed once the item is created. This config captures the behavior best as it can
# i.e. tepmplate=yes is shown, tepmlate=no is hidden
'tunnel': KeyInfo(default=False),
},
),
('ip', 'service'): APIData(
fixed_entries=True,
primary_keys=('name', ),
fully_understood=True,
fields={
'address': KeyInfo(),
'certificate': KeyInfo(),
'disabled': KeyInfo(default=False),
'name': KeyInfo(),
'port': KeyInfo(),
'tls-version': KeyInfo(),
},
),
('system', 'logging'): APIData(
fully_understood=True,
fields={
'action': KeyInfo(default='memory'),
'disabled': KeyInfo(default=False),
'prefix': KeyInfo(default=''),
'topics': KeyInfo(default=''),
},
),
('system', 'resource', 'irq'): APIData(
has_identifier=True,
fields={
'cpu': KeyInfo(),
},
),
('system', 'scheduler'): APIData(
fully_understood=True,
primary_keys=('name', ),
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
'interval': KeyInfo(default='0s'),
'name': KeyInfo(),
'on-event': KeyInfo(default=''),
'policy': KeyInfo(default='ftp,reboot,read,write,policy,test,password,sniff,sensitive,romon'),
'start-date': KeyInfo(),
'start-time': KeyInfo(),
},
),
('system', 'script'): APIData(
fully_understood=True,
primary_keys=('name',),
fields={
'comment': KeyInfo(can_disable=True, remove_value=''),
'dont-require-permissions': KeyInfo(default=False),
'name': KeyInfo(),
'owner': KeyInfo(),
'policy': KeyInfo(default='ftp,reboot,read,write,policy,test,password,sniff,sensitive,romon'),
'source': KeyInfo(default=''),
},
),
}
|
PypiClean
|
/latent_features/optimizers.py
|
import tensorflow as tf
import abc
import logging
import math
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
OPTIMIZER_REGISTRY = {}
def register_optimizer(name, external_params=[], class_params={}):
def insert_in_registry(class_handle):
OPTIMIZER_REGISTRY[name] = class_handle
class_handle.name = name
OPTIMIZER_REGISTRY[name].external_params = external_params
OPTIMIZER_REGISTRY[name].class_params = class_params
return class_handle
return insert_in_registry
# Default learning rate for the optimizers
DEFAULT_LR = 0.0005
# Default momentum for the optimizers
DEFAULT_MOMENTUM = 0.9
DEFAULT_DECAY_CYCLE = 0
DEFAULT_DECAY_CYCLE_MULTIPLE = 1
DEFAULT_LR_DECAY_FACTOR = 2
DEFAULT_END_LR = 1e-8
DEFAULT_SINE = False
class Optimizer(abc.ABC):
"""Abstract class for optimizer .
"""
name = ""
external_params = []
class_params = {}
def __init__(self, optimizer_params, batches_count, verbose):
"""Initialize the Optimizer
Parameters
----------
optimizer_params : dict
Consists of key-value pairs. The initializer will check the keys to get the corresponding params.
batches_count: int
number of batches in an epoch
verbose : bool
Enable/disable verbose mode
"""
self.verbose = verbose
self._optimizer_params = {}
self._init_hyperparams(optimizer_params)
self.batches_count = batches_count
def _display_params(self):
"""Display the parameter values
"""
logger.info('\n------ Optimizer -----')
logger.info('Name : {}'.format(self.name))
for key, value in self._optimizer_params.items():
logger.info('{} : {}'.format(key, value))
def _init_hyperparams(self, hyperparam_dict):
""" Initializes the hyperparameters needed by the algorithm.
Parameters
----------
hyperparam_dict : dictionary
Consists of key value pairs. The optimizer will check the keys to get the corresponding params
"""
self._optimizer_params['lr'] = hyperparam_dict.get('lr', DEFAULT_LR)
if self.verbose:
self._display_params()
def minimize(self, loss):
"""Create an optimizer to minimize the model loss
Parameters
----------
loss: tf.Tensor
Node which needs to be evaluated for computing the model loss.
Returns
-------
train: tf.Operation
Node that needs to be evaluated for minimizing the loss during training
"""
raise NotImplementedError('Abstract Method not implemented!')
def update_feed_dict(self, feed_dict, batch_num, epoch_num):
"""Fills values of placeholders created by the optimizers.
Parameters
----------
feed_dict : dict
Dictionary that would be passed while optimizing the model loss to sess.run.
batch_num: int
current batch number
epoch_num: int
current epoch number
"""
raise NotImplementedError('Abstract Method not implemented!')
@register_optimizer("adagrad", ['lr'])
class AdagradOptimizer(Optimizer):
"""Wrapper around adagrad optimizer
"""
def __init__(self, optimizer_params, batches_count, verbose=False):
"""Initialize the Optimizer
Parameters
----------
optimizer_params : dict
Consists of key-value pairs. The optimizer will check the keys to get the corresponding params:
- **'lr'**: (float). Learning Rate (default: 0.0005)
Example: ``optimizer_params={'lr': 0.001}``
batches_count: int
number of batches in an epoch
verbose : bool
Enable/disable verbose mode
"""
super(AdagradOptimizer, self).__init__(optimizer_params, batches_count, verbose)
def minimize(self, loss):
"""Create an optimizer to minimize the model loss
Parameters
----------
loss: tf.Tensor
Node which needs to be evaluated for computing the model loss.
Returns
-------
train: tf.Operation
Node that needs to be evaluated for minimizing the loss during training
"""
self.optimizer = tf.train.AdagradOptimizer(learning_rate=self._optimizer_params['lr'])
train = self.optimizer.minimize(loss)
return train
def update_feed_dict(self, feed_dict, batch_num, epoch_num):
"""Fills values of placeholders created by the optimizers.
Parameters
----------
feed_dict : dict
Dictionary that would be passed while optimizing the model loss to sess.run.
batch_num: int
current batch number
epoch_num: int
current epoch number
"""
return
@register_optimizer("adam", ['lr'])
class AdamOptimizer(Optimizer):
"""Abstract class for loss function.
"""
def __init__(self, optimizer_params, batches_count, verbose=False):
"""Initialize the Optimizer
Parameters
----------
optimizer_params : dict
Consists of key-value pairs. The optimizer will check the keys to get the corresponding params:
- **'lr'**: (float). Learning Rate (default: 0.0005)
Example: ``optimizer_params={'lr': 0.001}``
batches_count: int
number of batches in an epoch
verbose : bool
Enable/disable verbose mode
"""
super(AdamOptimizer, self).__init__(optimizer_params, batches_count, verbose)
def minimize(self, loss):
"""Create an optimizer to minimize the model loss
Parameters
----------
loss: tf.Tensor
Node which needs to be evaluated for computing the model loss.
Returns
-------
train: tf.Operation
Node that needs to be evaluated for minimizing the loss during training
"""
self.optimizer = tf.train.AdamOptimizer(learning_rate=self._optimizer_params['lr'])
train = self.optimizer.minimize(loss)
return train
def update_feed_dict(self, feed_dict, batch_num, epoch_num):
"""Fills values of placeholders created by the optimizers.
Parameters
----------
feed_dict : dict
Dictionary that would be passed while optimizing the model loss to sess.run.
batch_num: int
current batch number
epoch_num: int
current epoch number
"""
return
@register_optimizer("momentum", ['lr', 'momentum'])
class MomentumOptimizer(Optimizer):
"""Abstract class for loss function.
"""
def __init__(self, optimizer_params, batches_count, verbose=False):
"""Initialize the Optimizer
Parameters
----------
optimizer_params : dict
Consists of key-value pairs. The optimizer will check the keys to get the corresponding params:
- **'lr'**: (float). Learning Rate (default: 0.0005)
- **'momentum'**: (float). Momentum (default: 0.9)
Example: ``optimizer_params={'lr': 0.001, 'momentum':0.90}``
batches_count: int
number of batches in an epoch
verbose : bool
Enable/disable verbose mode
"""
super(MomentumOptimizer, self).__init__(optimizer_params, batches_count, verbose)
def _init_hyperparams(self, hyperparam_dict):
""" Initializes the hyperparameters needed by the algorithm.
Parameters
----------
hyperparam_dict : dictionary
Consists of key value pairs. The optimizer will check the keys to get the corresponding params
"""
self._optimizer_params['lr'] = hyperparam_dict.get('lr', DEFAULT_LR)
self._optimizer_params['momentum'] = hyperparam_dict.get('momentum', DEFAULT_MOMENTUM)
if self.verbose:
self._display_params()
def minimize(self, loss):
"""Create an optimizer to minimize the model loss
Parameters
----------
loss: tf.Tensor
Node which needs to be evaluated for computing the model loss.
Returns
-------
train: tf.Operation
Node that needs to be evaluated for minimizing the loss during training
"""
self.optimizer = tf.train.MomentumOptimizer(learning_rate=self._optimizer_params['lr'],
momentum=self._optimizer_params['momentum'])
train = self.optimizer.minimize(loss)
return train
def update_feed_dict(self, feed_dict, batch_num, epoch_num):
"""Fills values of placeholders created by the optimizers.
Parameters
----------
feed_dict : dict
Dictionary that would be passed while optimizing the model loss to sess.run.
batch_num: int
current batch number
epoch_num: int
current epoch number
"""
return
@register_optimizer("sgd", ['lr', 'decay_cycle', 'end_lr', 'sine_decay', 'expand_factor', 'decay_lr_rate'])
class SGDOptimizer(Optimizer):
def __init__(self, optimizer_params, batches_count, verbose=False):
"""Initialize the Optimizer
Parameters
----------
optimizer_params : dict
Consists of key-value pairs. The optimizer will check the keys to get the corresponding params:
- **'lr'**: (float). Learning Rate upper bound (default: 0.0005)
- **'decay_cycle'**: (int). Cycle of epoch over which to decay (default: 0)
- **'end_lr'**: (float). Learning Rate lower bound (default: 1e-8)
- **'cosine_decay'**: (bool). Use cosine decay or to fixed rate decay (default: False)
- **'expand_factor'**: (float). Expand the decay cycle length by this factor after each cycle (default: 1)
- **'decay_lr_rate'**: (float). Decay factor to decay the start lr after each cycle (default: 2)
Example: ``optimizer_params={'lr': 0.001}``
batches_count: int
number of batches in an epoch
verbose : bool
Enable/disable verbose mode
"""
super(SGDOptimizer, self).__init__(optimizer_params, batches_count, verbose)
def _init_hyperparams(self, hyperparam_dict):
""" Initializes the hyperparameters needed by the algorithm.
Parameters
----------
hyperparam_dict : dictionary
Consists of key value pairs. The optimizer will check the keys to get the corresponding params
"""
self._optimizer_params['lr'] = hyperparam_dict.get('lr', DEFAULT_LR)
self._optimizer_params['decay_cycle'] = hyperparam_dict.get('decay_cycle', DEFAULT_DECAY_CYCLE)
self._optimizer_params['cosine_decay'] = hyperparam_dict.get('cosine_decay', DEFAULT_SINE)
self._optimizer_params['expand_factor'] = hyperparam_dict.get('expand_factor', DEFAULT_DECAY_CYCLE_MULTIPLE)
self._optimizer_params['decay_lr_rate'] = hyperparam_dict.get('decay_lr_rate', DEFAULT_LR_DECAY_FACTOR)
self._optimizer_params['end_lr'] = hyperparam_dict.get('end_lr', DEFAULT_END_LR)
if self.verbose:
self._display_params()
def minimize(self, loss):
"""Create an optimizer to minimize the model loss
Parameters
----------
loss: tf.Tensor
Node which needs to be evaluated for computing the model loss.
Returns
-------
train: tf.Operation
Node that needs to be evaluated for minimizing the loss during training
"""
# create a placeholder for learning rate
self.lr_placeholder = tf.placeholder(tf.float32)
# create the optimizer with the placeholder
self.optimizer = tf.train.GradientDescentOptimizer(learning_rate=self.lr_placeholder)
# load the hyperparameters that would be used while generating the learning rate per batch
# start learning rate
self.start_lr = self._optimizer_params['lr']
self.current_lr = self.start_lr
# cycle rate for learning rate decay
self.decay_cycle_rate = self._optimizer_params['decay_cycle']
self.end_lr = self._optimizer_params['end_lr']
# check if it is a sinudoidal decay or constant decay
self.is_cosine_decay = self._optimizer_params['cosine_decay']
self.next_cycle_epoch = self.decay_cycle_rate + 1
# Get the cycle expand factor
self.decay_cycle_expand_factor = self._optimizer_params['expand_factor']
# Get the LR decay factor at the start of each cycle
self.decay_lr_rate = self._optimizer_params['decay_lr_rate']
self.curr_cycle_length = self.decay_cycle_rate
self.curr_start = 0
# create the operation that minimizes the loss
train = self.optimizer.minimize(loss)
return train
def update_feed_dict(self, feed_dict, batch_num, epoch_num):
"""Fills values of placeholders created by the optimizers.
Parameters
----------
feed_dict : dict
Dictionary that would be passed while optimizing the model loss to sess.run.
batch_num: int
current batch number
epoch_num: int
current epoch number
"""
# Sinusoidal Decay
if self.is_cosine_decay:
# compute the cycle number
current_cycle_num = \
((epoch_num - 1 - self.curr_start) * self.batches_count + (batch_num - 1)) / \
(self.curr_cycle_length * self.batches_count)
# compute a learning rate for the current batch/epoch
self.current_lr = \
self.end_lr + (self.start_lr - self.end_lr) * 0.5 * (1 + math.cos(math.pi * current_cycle_num))
# Start the next cycle and Expand the cycle/Decay the learning rate
if epoch_num % (self.next_cycle_epoch - 1) == 0 and batch_num == self.batches_count:
self.curr_cycle_length = self.curr_cycle_length * self.decay_cycle_expand_factor
self.next_cycle_epoch = self.next_cycle_epoch + self.curr_cycle_length
self.curr_start = epoch_num
self.start_lr = self.start_lr / self.decay_lr_rate
if self.current_lr < self.end_lr:
self.current_lr = self.end_lr
# fixed rate decay
elif self.decay_cycle_rate > 0:
if epoch_num % (self.next_cycle_epoch) == 0 and batch_num == 1:
if self.current_lr > self.end_lr:
self.next_cycle_epoch = self.decay_cycle_rate + \
((self.next_cycle_epoch - 1) * self.decay_cycle_expand_factor) + 1
self.current_lr = self.current_lr / self.decay_lr_rate
if self.current_lr < self.end_lr:
self.current_lr = self.end_lr
# no change to the learning rate
else:
pass
feed_dict.update({self.lr_placeholder: self.current_lr})
|
PypiClean
|
/noah-vega-1.8.5.tar.gz/noah-vega-1.8.5/vega/networks/sgas_network.py
|
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SGAS network."""
from vega.common import ClassFactory, ClassType
from vega.modules.operators import ops
from vega.networks.super_network import DartsNetwork
import torch
from torch.autograd import Variable
@ClassFactory.register(ClassType.NETWORK)
class SGASNetwork(DartsNetwork):
"""Base GDAS-DARTS Network of classification."""
def __init__(self, stem, cells, head, init_channels, num_classes=10, auxiliary=False, search=True, aux_size=8,
auxiliary_layer=13, drop_path_prob=0.):
"""Init SGASNetwork."""
super(SGASNetwork, self).__init__(stem, cells, head, init_channels, num_classes, auxiliary, search,
aux_size, auxiliary_layer, drop_path_prob)
self.normal_selected_idxs = None
self.reduce_selected_idxs = None
self.normal_candidate_flags = None
self.reduce_candidate_flags = None
self.initialize()
def initialize(self):
"""Initialize architecture parameters."""
self.alphas_normal = []
self.alphas_reduce = []
for i in range(self.steps):
for n in range(2 + i):
self.alphas_normal.append(Variable(
ops.random_normal(self.num_ops).cuda() / self.num_ops, requires_grad=True))
self.alphas_reduce.append(Variable(
ops.random_normal(self.num_ops).cuda() / self.num_ops, requires_grad=True))
@property
def learnable_params(self):
"""Get learnable params of alphas."""
return self.alphas_normal + self.alphas_reduce
@property
def arch_weights(self):
"""Get weights of alphas."""
self.alphas_normal = self.get_weights('alphas_normal')
self.alphas_reduce = self.get_weights('alphas_reduce')
alphas_normal = ops.softmax(torch.stack(self.alphas_normal, dim=0), -1)
alphas_reduce = ops.softmax(torch.stack(self.alphas_reduce, dim=0), -1)
return [ops.to_numpy(alphas_normal), ops.to_numpy(alphas_reduce)]
def calc_alphas(self, alphas, dim=-1, **kwargs):
"""Calculate Alphas."""
new_alphas = []
for alpha in alphas:
new_alphas.append(ops.softmax(alpha, dim))
return new_alphas
def call(self, input, alpha=None):
"""Forward a model that specified by alpha.
:param input: An input tensor
:type input: Tensor
"""
# TODO: training for tf
self.initialize()
s0, s1 = self.pre_stems(input)
alphas_normal, alphas_reduce = self.alphas_normal, self.alphas_reduce
if alpha is not None:
alphas_normal, alphas_reduce = alpha[:self.len_alpha], alpha[self.len_alpha:]
else:
alphas_normal = self.calc_alphas(alphas_normal)
alphas_reduce = self.calc_alphas(alphas_reduce)
logits_aux = None
for i, cell in enumerate(self.cells_.children()):
weights = None
selected_idxs = None
if self.is_search:
if cell.__class__.__name__ == 'NormalCell':
weights = alphas_normal
selected_idxs = self.normal_selected_idxs
elif cell.__class__.__name__ == 'ReduceCell':
weights = alphas_reduce
selected_idxs = self.reduce_selected_idxs
s0, s1 = s1, cell(s0, s1, weights, self.drop_path_prob, selected_idxs)
if not self.is_search and self._auxiliary and i == self._auxiliary_layer:
logits_aux = self.auxiliary_head(s1)
logits = self.head(s1)
if logits_aux is not None:
return logits, logits_aux
else:
return logits
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/search/v20150228/list_admin_key.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'ListAdminKeyResult',
'AwaitableListAdminKeyResult',
'list_admin_key',
]
@pulumi.output_type
class ListAdminKeyResult:
"""
Response containing the primary and secondary API keys for a given Azure Search service.
"""
def __init__(__self__, primary_key=None, secondary_key=None):
if primary_key and not isinstance(primary_key, str):
raise TypeError("Expected argument 'primary_key' to be a str")
pulumi.set(__self__, "primary_key", primary_key)
if secondary_key and not isinstance(secondary_key, str):
raise TypeError("Expected argument 'secondary_key' to be a str")
pulumi.set(__self__, "secondary_key", secondary_key)
@property
@pulumi.getter(name="primaryKey")
def primary_key(self) -> str:
"""
The primary API key of the Search service.
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> str:
"""
The secondary API key of the Search service.
"""
return pulumi.get(self, "secondary_key")
class AwaitableListAdminKeyResult(ListAdminKeyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListAdminKeyResult(
primary_key=self.primary_key,
secondary_key=self.secondary_key)
def list_admin_key(resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListAdminKeyResult:
"""
Use this data source to access information about an existing resource.
:param str resource_group_name: The name of the resource group within the current subscription.
:param str service_name: The name of the Search service for which to list admin keys.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:search/v20150228:listAdminKey', __args__, opts=opts, typ=ListAdminKeyResult).value
return AwaitableListAdminKeyResult(
primary_key=__ret__.primary_key,
secondary_key=__ret__.secondary_key)
|
PypiClean
|
/ixnetwork_restpy-1.1.10.tar.gz/ixnetwork_restpy-1.1.10/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/applysetfieldmissmask_a1d1c1318096b8b862772ca0daea5218.py
|
import sys
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class ApplySetFieldMissMask(Base):
"""Select the type of Apply Set Field Miss mask capability that the table will support.
The ApplySetFieldMissMask class encapsulates a required applySetFieldMissMask resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = "applySetFieldMissMask"
_SDM_ATT_MAP = {
"ArpDestinationIpv4AddressMask": "arpDestinationIpv4AddressMask",
"ArpDstHwAddressMask": "arpDstHwAddressMask",
"ArpSourceIpv4AddressMask": "arpSourceIpv4AddressMask",
"ArpSrcHwAddressMask": "arpSrcHwAddressMask",
"EthernetDestinationMask": "ethernetDestinationMask",
"EthernetSourceMask": "ethernetSourceMask",
"Ipv4DestinationMask": "ipv4DestinationMask",
"Ipv4SourceMask": "ipv4SourceMask",
"Ipv6DestinationMask": "ipv6DestinationMask",
"Ipv6ExtHeaderMask": "ipv6ExtHeaderMask",
"Ipv6FlowLabelMask": "ipv6FlowLabelMask",
"Ipv6SourceMask": "ipv6SourceMask",
"PbbIsidMask": "pbbIsidMask",
"TunnelIdMask": "tunnelIdMask",
"VlanMask": "vlanMask",
}
_SDM_ENUM_MAP = {}
def __init__(self, parent, list_op=False):
super(ApplySetFieldMissMask, self).__init__(parent, list_op)
@property
def ArpDestinationIpv4AddressMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for ARP Destination IPv4 Address miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["ArpDestinationIpv4AddressMask"])
@ArpDestinationIpv4AddressMask.setter
def ArpDestinationIpv4AddressMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ArpDestinationIpv4AddressMask"], value)
@property
def ArpDstHwAddressMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for ARP Destination IPv4 Address miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["ArpDstHwAddressMask"])
@ArpDstHwAddressMask.setter
def ArpDstHwAddressMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ArpDstHwAddressMask"], value)
@property
def ArpSourceIpv4AddressMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for ARP Source IPv4 Address miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["ArpSourceIpv4AddressMask"])
@ArpSourceIpv4AddressMask.setter
def ArpSourceIpv4AddressMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ArpSourceIpv4AddressMask"], value)
@property
def ArpSrcHwAddressMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for ARP Source Hardware Address miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["ArpSrcHwAddressMask"])
@ArpSrcHwAddressMask.setter
def ArpSrcHwAddressMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ArpSrcHwAddressMask"], value)
@property
def EthernetDestinationMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for Ethernet Destination miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["EthernetDestinationMask"])
@EthernetDestinationMask.setter
def EthernetDestinationMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["EthernetDestinationMask"], value)
@property
def EthernetSourceMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for Ethernet Source miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["EthernetSourceMask"])
@EthernetSourceMask.setter
def EthernetSourceMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["EthernetSourceMask"], value)
@property
def Ipv4DestinationMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for IPv4 Destination miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["Ipv4DestinationMask"])
@Ipv4DestinationMask.setter
def Ipv4DestinationMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Ipv4DestinationMask"], value)
@property
def Ipv4SourceMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for IPv4 Source miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["Ipv4SourceMask"])
@Ipv4SourceMask.setter
def Ipv4SourceMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Ipv4SourceMask"], value)
@property
def Ipv6DestinationMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for IPv6 Destination miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["Ipv6DestinationMask"])
@Ipv6DestinationMask.setter
def Ipv6DestinationMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Ipv6DestinationMask"], value)
@property
def Ipv6ExtHeaderMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for IPv6 Ext Header miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["Ipv6ExtHeaderMask"])
@Ipv6ExtHeaderMask.setter
def Ipv6ExtHeaderMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Ipv6ExtHeaderMask"], value)
@property
def Ipv6FlowLabelMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for IPv6 Flow Label miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["Ipv6FlowLabelMask"])
@Ipv6FlowLabelMask.setter
def Ipv6FlowLabelMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Ipv6FlowLabelMask"], value)
@property
def Ipv6SourceMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for IPv6 Source miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["Ipv6SourceMask"])
@Ipv6SourceMask.setter
def Ipv6SourceMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Ipv6SourceMask"], value)
@property
def PbbIsidMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for PBB ISID miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["PbbIsidMask"])
@PbbIsidMask.setter
def PbbIsidMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PbbIsidMask"], value)
@property
def TunnelIdMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for Tunnel ID miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["TunnelIdMask"])
@TunnelIdMask.setter
def TunnelIdMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TunnelIdMask"], value)
@property
def VlanMask(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, Apply Set Field for VLAN miss Mask is supported.
"""
return self._get_attribute(self._SDM_ATT_MAP["VlanMask"])
@VlanMask.setter
def VlanMask(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["VlanMask"], value)
def update(
self,
ArpDestinationIpv4AddressMask=None,
ArpDstHwAddressMask=None,
ArpSourceIpv4AddressMask=None,
ArpSrcHwAddressMask=None,
EthernetDestinationMask=None,
EthernetSourceMask=None,
Ipv4DestinationMask=None,
Ipv4SourceMask=None,
Ipv6DestinationMask=None,
Ipv6ExtHeaderMask=None,
Ipv6FlowLabelMask=None,
Ipv6SourceMask=None,
PbbIsidMask=None,
TunnelIdMask=None,
VlanMask=None,
):
# type: (bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool) -> ApplySetFieldMissMask
"""Updates applySetFieldMissMask resource on the server.
Args
----
- ArpDestinationIpv4AddressMask (bool): If selected, Apply Set Field for ARP Destination IPv4 Address miss Mask is supported.
- ArpDstHwAddressMask (bool): If selected, Apply Set Field for ARP Destination IPv4 Address miss Mask is supported.
- ArpSourceIpv4AddressMask (bool): If selected, Apply Set Field for ARP Source IPv4 Address miss Mask is supported.
- ArpSrcHwAddressMask (bool): If selected, Apply Set Field for ARP Source Hardware Address miss Mask is supported.
- EthernetDestinationMask (bool): If selected, Apply Set Field for Ethernet Destination miss Mask is supported.
- EthernetSourceMask (bool): If selected, Apply Set Field for Ethernet Source miss Mask is supported.
- Ipv4DestinationMask (bool): If selected, Apply Set Field for IPv4 Destination miss Mask is supported.
- Ipv4SourceMask (bool): If selected, Apply Set Field for IPv4 Source miss Mask is supported.
- Ipv6DestinationMask (bool): If selected, Apply Set Field for IPv6 Destination miss Mask is supported.
- Ipv6ExtHeaderMask (bool): If selected, Apply Set Field for IPv6 Ext Header miss Mask is supported.
- Ipv6FlowLabelMask (bool): If selected, Apply Set Field for IPv6 Flow Label miss Mask is supported.
- Ipv6SourceMask (bool): If selected, Apply Set Field for IPv6 Source miss Mask is supported.
- PbbIsidMask (bool): If selected, Apply Set Field for PBB ISID miss Mask is supported.
- TunnelIdMask (bool): If selected, Apply Set Field for Tunnel ID miss Mask is supported.
- VlanMask (bool): If selected, Apply Set Field for VLAN miss Mask is supported.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(
self,
ArpDestinationIpv4AddressMask=None,
ArpDstHwAddressMask=None,
ArpSourceIpv4AddressMask=None,
ArpSrcHwAddressMask=None,
EthernetDestinationMask=None,
EthernetSourceMask=None,
Ipv4DestinationMask=None,
Ipv4SourceMask=None,
Ipv6DestinationMask=None,
Ipv6ExtHeaderMask=None,
Ipv6FlowLabelMask=None,
Ipv6SourceMask=None,
PbbIsidMask=None,
TunnelIdMask=None,
VlanMask=None,
):
# type: (bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool) -> ApplySetFieldMissMask
"""Finds and retrieves applySetFieldMissMask resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve applySetFieldMissMask resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all applySetFieldMissMask resources from the server.
Args
----
- ArpDestinationIpv4AddressMask (bool): If selected, Apply Set Field for ARP Destination IPv4 Address miss Mask is supported.
- ArpDstHwAddressMask (bool): If selected, Apply Set Field for ARP Destination IPv4 Address miss Mask is supported.
- ArpSourceIpv4AddressMask (bool): If selected, Apply Set Field for ARP Source IPv4 Address miss Mask is supported.
- ArpSrcHwAddressMask (bool): If selected, Apply Set Field for ARP Source Hardware Address miss Mask is supported.
- EthernetDestinationMask (bool): If selected, Apply Set Field for Ethernet Destination miss Mask is supported.
- EthernetSourceMask (bool): If selected, Apply Set Field for Ethernet Source miss Mask is supported.
- Ipv4DestinationMask (bool): If selected, Apply Set Field for IPv4 Destination miss Mask is supported.
- Ipv4SourceMask (bool): If selected, Apply Set Field for IPv4 Source miss Mask is supported.
- Ipv6DestinationMask (bool): If selected, Apply Set Field for IPv6 Destination miss Mask is supported.
- Ipv6ExtHeaderMask (bool): If selected, Apply Set Field for IPv6 Ext Header miss Mask is supported.
- Ipv6FlowLabelMask (bool): If selected, Apply Set Field for IPv6 Flow Label miss Mask is supported.
- Ipv6SourceMask (bool): If selected, Apply Set Field for IPv6 Source miss Mask is supported.
- PbbIsidMask (bool): If selected, Apply Set Field for PBB ISID miss Mask is supported.
- TunnelIdMask (bool): If selected, Apply Set Field for Tunnel ID miss Mask is supported.
- VlanMask (bool): If selected, Apply Set Field for VLAN miss Mask is supported.
Returns
-------
- self: This instance with matching applySetFieldMissMask resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of applySetFieldMissMask data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the applySetFieldMissMask resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
|
PypiClean
|
/rosettasciio-0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/rsciio/utils/skimage_exposure.py
|
from packaging.version import Version
import warnings
import numpy as np
import skimage
from skimage.exposure.exposure import intensity_range, _output_dtype
def rescale_intensity(image, in_range="image", out_range="dtype"):
"""Return image after stretching or shrinking its intensity levels.
The desired intensity range of the input and output, `in_range` and
`out_range` respectively, are used to stretch or shrink the intensity range
of the input image. See examples below.
Parameters
----------
image : array
Image array.
in_range, out_range : str or 2-tuple, optional
Min and max intensity values of input and output image.
The possible values for this parameter are enumerated below.
'image'
Use image min/max as the intensity range.
'dtype'
Use min/max of the image's dtype as the intensity range.
dtype-name
Use intensity range based on desired `dtype`. Must be valid key
in `DTYPE_RANGE`.
2-tuple
Use `range_values` as explicit min/max intensities.
Returns
-------
out : array
Image array after rescaling its intensity. This image is the same dtype
as the input image.
Notes
-----
.. versionchanged:: 0.17
The dtype of the output array has changed to match the input dtype, or
float if the output range is specified by a pair of floats.
See Also
--------
equalize_hist
Examples
--------
By default, the min/max intensities of the input image are stretched to
the limits allowed by the image's dtype, since `in_range` defaults to
'image' and `out_range` defaults to 'dtype':
>>> image = np.array([51, 102, 153], dtype=np.uint8)
>>> rescale_intensity(image)
array([ 0, 127, 255], dtype=uint8)
It's easy to accidentally convert an image dtype from uint8 to float:
>>> 1.0 * image
array([ 51., 102., 153.])
Use `rescale_intensity` to rescale to the proper range for float dtypes:
>>> image_float = 1.0 * image
>>> rescale_intensity(image_float)
array([0. , 0.5, 1. ])
To maintain the low contrast of the original, use the `in_range` parameter:
>>> rescale_intensity(image_float, in_range=(0, 255))
array([0.2, 0.4, 0.6])
If the min/max value of `in_range` is more/less than the min/max image
intensity, then the intensity levels are clipped:
>>> rescale_intensity(image_float, in_range=(0, 102))
array([0.5, 1. , 1. ])
If you have an image with signed integers but want to rescale the image to
just the positive range, use the `out_range` parameter. In that case, the
output dtype will be float:
>>> image = np.array([-10, 0, 10], dtype=np.int8)
>>> rescale_intensity(image, out_range=(0, 127))
array([ 0. , 63.5, 127. ])
To get the desired range with a specific dtype, use ``.astype()``:
>>> rescale_intensity(image, out_range=(0, 127)).astype(np.int8)
array([ 0, 63, 127], dtype=int8)
If the input image is constant, the output will be clipped directly to the
output range:
>>> image = np.array([130, 130, 130], dtype=np.int32)
>>> rescale_intensity(image, out_range=(0, 127)).astype(np.int32)
array([127, 127, 127], dtype=int32)
"""
args = ()
if Version(skimage.__version__) >= Version("0.19.0"):
args = (image.dtype,)
if out_range in ["dtype", "image"]:
out_dtype = _output_dtype(image.dtype.type, *args)
else:
out_dtype = _output_dtype(out_range, *args)
imin, imax = map(float, intensity_range(image, in_range))
omin, omax = map(
float, intensity_range(image, out_range, clip_negative=(imin >= 0))
)
if np.any(np.isnan([imin, imax, omin, omax])):
warnings.warn(
"One or more intensity levels are NaN. Rescaling will broadcast "
"NaN to the full image. Provide intensity levels yourself to "
"avoid this. E.g. with np.nanmin(image), np.nanmax(image).",
stacklevel=2,
)
image = np.clip(image, imin, imax)
if imin != imax:
image = (image - imin) / (imax - imin)
return (image * (omax - omin) + omin).astype(dtype=out_dtype)
else:
return np.clip(image, omin, omax).astype(out_dtype)
|
PypiClean
|
/git-deps-1.1.0.zip/git-deps-1.1.0/git_deps/html/node_modules/es-abstract/2018/CopyDataProperties.js
|
'use strict';
var GetIntrinsic = require('get-intrinsic');
var $TypeError = GetIntrinsic('%TypeError%');
var callBound = require('call-bind/callBound');
var forEach = require('../helpers/forEach');
var OwnPropertyKeys = require('../helpers/OwnPropertyKeys');
var $isEnumerable = callBound('Object.prototype.propertyIsEnumerable');
var CreateDataProperty = require('./CreateDataProperty');
var Get = require('./Get');
var IsArray = require('./IsArray');
var IsInteger = require('./IsInteger');
var IsPropertyKey = require('./IsPropertyKey');
var SameValue = require('./SameValue');
var ToNumber = require('./ToNumber');
var ToObject = require('./ToObject');
var Type = require('./Type');
// https://262.ecma-international.org/9.0/#sec-copydataproperties
module.exports = function CopyDataProperties(target, source, excludedItems) {
if (Type(target) !== 'Object') {
throw new $TypeError('Assertion failed: "target" must be an Object');
}
if (!IsArray(excludedItems)) {
throw new $TypeError('Assertion failed: "excludedItems" must be a List of Property Keys');
}
for (var i = 0; i < excludedItems.length; i += 1) {
if (!IsPropertyKey(excludedItems[i])) {
throw new $TypeError('Assertion failed: "excludedItems" must be a List of Property Keys');
}
}
if (typeof source === 'undefined' || source === null) {
return target;
}
var fromObj = ToObject(source);
var sourceKeys = OwnPropertyKeys(fromObj);
forEach(sourceKeys, function (nextKey) {
var excluded = false;
forEach(excludedItems, function (e) {
if (SameValue(e, nextKey) === true) {
excluded = true;
}
});
var enumerable = $isEnumerable(fromObj, nextKey) || (
// this is to handle string keys being non-enumerable in older engines
typeof source === 'string'
&& nextKey >= 0
&& IsInteger(ToNumber(nextKey))
);
if (excluded === false && enumerable) {
var propValue = Get(fromObj, nextKey);
CreateDataProperty(target, nextKey, propValue);
}
});
return target;
};
|
PypiClean
|
/genie.libs.sdk-23.8.1-py3-none-any.whl/genie/libs/sdk/triggers/blitz/gnmi_util.py
|
from typing import List, Tuple
import logging
import re
import base64
import json
import sys
import pdb
from datetime import datetime
from threading import Thread, Event
import traceback
import time
from abc import ABC
from xml.etree.ElementPath import xpath_tokenizer_re
from google.protobuf import json_format
from six import string_types
from pyats.log.utils import banner
from pyats.utils.secret_strings import to_plaintext
import grpc
from yang.connector import proto
from yang.connector.proto import gnmi_pb2
from yang.connector.gnmi import Gnmi
from .rpcverify import DecodedField, DeletedPath
from copy import deepcopy
log = logging.getLogger(__name__)
class GnmiMessageException(Exception):
pass
class ForkedPdb(pdb.Pdb):
"""A pdb subclass for debugging GnmiNotification.
Usage: ForkedPdb().set_trace()
"""
def interaction(self, *args, **kwargs):
_stdin = sys.stdin
try:
sys.stdin = open('/dev/stdin')
pdb.Pdb.interaction(self, *args, **kwargs)
finally:
sys.stdin = _stdin
class GnmiMessage:
"""Class to prepare and return gMNI messages"""
def __init__(self, message_type, cfg):
self.msg_type = message_type
self.cfg = cfg
self.metadata = None
@classmethod
def run_set(self, device, payload):
"""Run gNMI set service.
Args:
device (ysdevice.DeviceProfile): Target device.
user (str): YANG Suite username.
payload (proto.gnmi_pb2.SetRequest): SetRequest.
Returns:
proto.gnmi_pb2.SetResponse
"""
if isinstance(payload, proto.gnmi_pb2.SetRequest):
gnmi_string = str(payload)
else:
try:
payload = GnmiMessageConstructor.json_to_gnmi('set', payload)
gnmi_string = str(payload)
except Exception:
log.error(traceback.format_exc())
raise GnmiMessageException("Invalid payload\n{0}".format(
str(payload))
)
log.info('gNMI SET\n' + '=' * 8 + '\n{0}'.format(
gnmi_string
))
try:
self.metadata = [
("username", device.device.credentials.default.get('username', '')),
("password", to_plaintext(device.device.credentials.default.get('password', ''))),
]
response = device.gnmi.service.Set(payload, metadata=self.metadata)
log.info(
'gNMI SET Response\n' + '=' * 17 + '\n{0}'.format(
str(response)
)
)
dt = datetime.fromtimestamp(response.timestamp / 1e9)
log.info(
'\ntimestamp decoded: {0}\n\n'.format(
dt.strftime('%Y %b %d %H:%M:%S')
)
)
return response
except Exception as exc:
log.error(traceback.format_exc())
if hasattr(exc, 'details'):
log.error('ERROR: {0}'.format(exc.details()))
else:
log.error(str(exc))
@classmethod
def run_get(self,
device: Gnmi,
payload: str,
namespace: dict,
transaction_time: float = 0) -> Tuple[gnmi_pb2.GetResponse, bool]:
"""Run gNMI get service.
Args:
device (ysdevice.DeviceProfile): Target device.
user (str): YANG Suite username.
payload (proto.gnmi_pb2.GetRequest): GetRequest.
payload (str): JSON representing a GetRequest.
Returns:
proto.gnmi_pb2.GetResponse
"""
if isinstance(payload, proto.gnmi_pb2.GetRequest):
gnmi_string = str(payload)
else:
try:
payload = GnmiMessageConstructor.json_to_gnmi('get', payload)
gnmi_string = str(payload)
# Fixup namespace for returns processing.
self.prefix_to_module({'namespace': namespace})
except Exception:
log.error(traceback.format_exc())
raise GnmiMessageException("Invalid payload\n{0}".format(
str(payload)
))
log.info('gNMI GET\n' + '=' * 8 + '\n{0}'.format(
gnmi_string
))
try:
self.metadata = [
("username", device.device.credentials.default.get('username', '')),
("password", to_plaintext(device.device.credentials.default.get('password', ''))),
]
if transaction_time:
start_time = time.time()
response = device.gnmi.service.Get(
payload, metadata=self.metadata)
response_time = time.time() - start_time
if response_time > transaction_time:
log.info(
'gNMI GET Response\n' + '=' * 17 + '\n{0}'.format(
str(response)
)
)
log.error(banner(
f'Response time: {response_time} seconds exceeded transaction_time {transaction_time}',
))
return response, False
else:
response = device.gnmi.service.Get(payload, metadata=self.metadata)
log.info(
'gNMI GET Response\n' + '=' * 17 + '\n{0}'.format(
str(response)
)
)
if isinstance(response, gnmi_pb2.GetResponse):
return response, True
return None, False
except Exception as exc:
log.error(traceback.format_exc())
if hasattr(exc, 'details'):
log.error('ERROR: {0}'.format(exc.details()))
else:
log.error(str(exc))
return None, False
@staticmethod
def get_opfields(val, xpath_str, opfields=[], namespace={}):
if isinstance(val, dict):
for name, dict_val in val.items():
opfields = GnmiMessage.get_opfields(
dict_val,
xpath_str + '/' + name,
opfields,
namespace
)
elif isinstance(val, list):
for item in val:
GnmiMessage.get_opfields(item, xpath_str, opfields, namespace)
else:
xpath_list = xpath_str.split('/')
name = xpath_list.pop()
for mod in namespace.values():
name = name.replace(mod + ':', '')
xpath_str = '/'.join(xpath_list)
opfields.append((val, xpath_str + '/' + name))
return opfields
@staticmethod
def path_elem_to_xpath(path_elem, prefix='', namespace={}, opfields=[]):
"""Convert a Path structure to an Xpath."""
elems = path_elem.get('elem', [])
xpath = []
for elem in elems:
name = elem.get('name', '')
if name:
for mod in namespace.values():
name = name.replace(mod + ':', '')
xpath.append(name)
key = elem.get('key', '')
if key:
for name, value in key.items():
for mod in namespace.values():
value = str(value).replace(mod + ':', '')
opfields.append((
value, prefix + '/' + '/'.join(xpath) + '/' + name))
if len(xpath):
return prefix + '/' + '/'.join(xpath)
else:
return ''
@staticmethod
def decode_update_value(val):
if 'jsonIetfVal' in val:
val = val.get('jsonIetfVal', '')
if not val:
log.info('"val" has no content')
return
json_val = base64.b64decode(val).decode('utf-8')
json_dict = json.loads(json_val, strict=False)
return json_dict
elif 'jsonVal' in val:
val = val.get('jsonVal', '')
if not val:
log.info('"val" has no content')
return
json_val = base64.b64decode(val).decode('utf-8')
json_dict = json.loads(json_val, strict=False)
return json_dict
elif 'asciiVal' in val:
val = val.get('asciiVal', '')
if not val:
log.info('"asciiVal" has no content')
return
val = val.strip()
return val
elif val:
datatype = next(iter(val))
value = val[datatype]
if 'int' in datatype:
return int(value)
elif ('float' in datatype or
'double' in datatype):
return float(value)
elif 'decimal' in datatype:
return float(value['digits']) / (10 ** value['precision'])
elif 'bytes' in datatype:
return bytes(value, encoding='utf8')
elif 'leaflist' in datatype:
return GnmiMessage.process_leaf_list_val(value)
else:
log.info('Update has no value')
@staticmethod
def process_update(update, prefix=None, namespace={}, opfields=[]):
"""Convert Update to Xpath, value, and datatype."""
pre_path = ''
xpath = ''
if not isinstance(update, list):
update = [update]
if prefix:
pre_path = GnmiMessage.path_elem_to_xpath(
prefix, pre_path, namespace, opfields
)
for upd in update:
if 'path' in upd:
xpath = GnmiMessage.path_elem_to_xpath(
upd['path'], pre_path, namespace, opfields
)
val = upd.get('val', {})
decoded_val = GnmiMessage.decode_update_value(val)
if 'asciiVal' in val:
opfields.append({
'datatype': 'ascii',
'value': decoded_val,
})
elif val:
datatype = next(iter(val))
value = val[datatype]
if 'int' in datatype:
value = int(value)
elif ('float' in datatype or
'double' in datatype):
value = float(value)
elif 'decimal' in datatype:
value = float(
value['digits']) / (10 ** value['precision'])
elif 'bytes' in datatype:
value = bytes(value, encoding='utf8')
elif 'leaflist' in datatype:
value = GnmiMessage.process_leaf_list_val(value)
if isinstance(value, list):
for val in value:
opfields.append((decoded_val, xpath))
else:
opfields.append((decoded_val, xpath))
else:
opfields.append((decoded_val, xpath))
if decoded_val == {}:
decoded_val = None
if decoded_val is not None:
if not isinstance(decoded_val, (dict, list)):
# Just one value returned
opfields.append((decoded_val, xpath))
else:
GnmiMessage.get_opfields(
decoded_val, xpath, opfields, namespace)
return decoded_val
@staticmethod
def process_delete(delete,
prefix: str = None,
namespace: dict = {},
deleted_paths: List[DeletedPath] = []):
pre_path = ''
if not isinstance(delete, list):
delete = [delete]
if prefix:
pre_path = GnmiMessage.path_elem_to_xpath(
prefix, pre_path, namespace, []
)
for del_elem in delete:
path_keys = []
xpath = GnmiMessage.path_elem_to_xpath(
del_elem, pre_path, namespace, path_keys)
deleted_paths.append(DeletedPath(
xpath, [DecodedField(*field) for field in path_keys]))
@staticmethod
def process_leaf_list_val(value):
"""Convert leaf list value to list of values
Args:
value (dict): leaf list value
For proto encoding leaf list value is received as below format:
val{
leaflist_val: {
element: [{'datatype':'value'},{'datatype':'value'}]
}
}
Returns (list): leaf list value convert into list format
leaf_list_val = ['value','value']
"""
leaf_list_val = []
elements = value['element']
for elem in elements:
for datatype, val in elem.items():
if 'int' in datatype:
val = int(val)
elif ('float' in datatype or
'double' in datatype):
val = float(val)
elif 'decimal' in datatype:
value = float(value['digits']) / (10 ** value['precision'])
elif 'bytes' in datatype:
val = bytes(value, encoding='utf8')
leaf_list_val.append(val)
return leaf_list_val
@classmethod
def run_subscribe(self, device: Gnmi, payload: proto.gnmi_pb2.SubscribeRequest, **request) -> Thread:
"""Run gNMI subscribe service.
Args:
device (ysdevice.DeviceProfile): Target device.
payload (proto.gnmi_pb2.SubscribeRequest): SetRequest.
request (dict): gNMI subscribe settings for thread.
Returns:
GnmiSubscription Thread
"""
if isinstance(payload, proto.gnmi_pb2.SubscribeRequest):
gnmi_string = str(payload)
else:
try:
payload = GnmiMessageConstructor.json_to_gnmi(
'subscribe', payload
)
gnmi_string = str(payload)
# Fixup namespace for returns processing.
self.prefix_to_module(request)
except Exception:
log.error(traceback.format_exc())
raise GnmiMessageException("Invalid payload\n{0}".format(
str(payload)
))
log.info('gNMI SUBSCRIBE\n' + '=' * 14 + '\n{0}'.format(
gnmi_string
))
try:
payloads = [payload]
request['log'] = log
request_mode = request.get('request_mode', 'ONCE')
if request_mode == 'ONCE':
subscribe_thread = GnmiSubscriptionOnce(
device, payloads, **request)
elif request_mode == 'POLL':
subscribe_thread = GnmiSubscriptionPoll(
device, payloads, **request)
elif request_mode == 'STREAM':
subscribe_thread = GnmiSubscriptionStream(
device, payloads, **request)
device.active_notifications[device] = subscribe_thread
subscribe_thread.start()
return subscribe_thread
except Exception as exc:
log.error(traceback.format_exc())
if hasattr(exc, 'details'):
log.error('ERROR: {0}'.format(exc.details()))
else:
log.error(str(exc))
@staticmethod
def prefix_to_module(request):
"""Convert from prefix-to-namespace to prefix-to-module."""
if not isinstance(request, dict) or 'namespace' not in request:
log.error('Reqest must have mapping of prefix-to-namespace')
return
if 'namespace_modules' in request:
request['namespace'] = request['namespace_modules']
return
namespace_module = {}
module = None
for prefix, nspace in request['namespace'].items():
if "/Cisco-IOS-" in nspace:
module = nspace[nspace.rfind("/") + 1:]
elif "/cisco-nx" in nspace: # NXOS lowercases namespace
module = "Cisco-NX-OS-device"
elif "/openconfig.net" in nspace:
nspace = nspace.replace('http://openconfig.net/yang/', '')
nspace = nspace.split('/')
module = '-'.join(['openconfig'] + nspace)
elif "urn:ietf:params:xml:ns:yang:" in nspace:
module = nspace.replace("urn:ietf:params:xml:ns:yang:", "")
if module:
namespace_module[prefix] = module
request['namespace'] = namespace_module
request['namespace_modules'] = namespace_module
def get_modules(self, cfg):
"""Helper function for get_entries."""
return cfg.get('modules', {})
def get_entries(self, cfg):
"""Helper function for get_messages."""
entries = []
modules = self.get_modules(cfg)
for mod in modules.keys():
entries.append({
'module': mod,
'namespace_modules': modules[mod].get('namespace_modules'),
'namespace_prefixes': modules[mod].get('namespace_prefixes'),
'nodes': modules[mod]['configs']
})
return entries
def get_messages(self):
"""Using request, instantiate GnmiMessageConstuctor class.
Returns:
list: GnmiMessageConstructor classes.
"""
gmcs = []
entries = self.get_entries(self.cfg)
for entry in entries:
gmc = GnmiMessageConstructor(self.msg_type, entry, **self.cfg)
gmcs.append(gmc)
return gmcs
class GnmiMessageConstructor:
"""Construct a single gNMI message based on request."""
edit_op = {
'create': 'update',
'merge': 'update',
'replace': 'replace',
'remove': 'delete',
'delete': 'delete'
}
def __init__(self, message_type, entry, **cfg):
self.request = entry
self.cfg = cfg
self.prefix = cfg.get('prefix')
self.origin = cfg.get('origin')
self.encoding = cfg.get('encoding', 'JSON_IETF').upper()
self.get_type = cfg.get('get_type', 'ALL').upper()
self.base64 = cfg.get('base64', False)
self.nodes = None
self.update = []
self.replace = []
self.delete = []
self.subscribe = []
self.get = []
self.json_val = {}
self.msg_type = message_type
self.xml_xpath_to_gnmi_xpath()
self.nodes_to_dict()
@property
def msg_type(self):
return self._msg_type
@msg_type.setter
def msg_type(self, msg_type):
self.payload = None
self._msg_type = msg_type
if msg_type == 'set':
self.payload = proto.gnmi_pb2.SetRequest()
elif msg_type == 'get':
self.payload = proto.gnmi_pb2.GetRequest()
self.payload.type = proto.gnmi_pb2.GetRequest.DataType.Value(
self.get_type
)
self.payload.encoding = proto.gnmi_pb2.Encoding.Value(
self.encoding
)
elif msg_type == 'subscribe':
self.payload = proto.gnmi_pb2.SubscribeRequest()
if msg_type != 'subscribe' and self.prefix:
# TODO: calculate prefix paths
prefix_path = proto.gnmi_pb2.Path()
prefix_path.origin = self.origin
self.payload.prefix.CopyFrom(prefix_path)
@staticmethod
def _upd_rpl(upd_rpl, base64_encode):
updates = []
for upd in upd_rpl:
val = None
if 'val' in upd:
val = upd.pop('val', {})
gnmi_update = json_format.ParseDict(
upd,
proto.gnmi_pb2.Update()
)
if val is not None:
if 'jsonIetfVal' in val:
if base64_encode:
jval = bytes(
json.dumps(val['jsonIetfVal']), encoding='utf-8'
)
gnmi_update.val.json_ietf_val = base64.b64encode(jval)
else:
gnmi_update.val.json_ietf_val = json.dumps(
val['jsonIetfVal']
).encode('utf-8')
elif 'jsonVal' in val:
if base64_encode:
jval = bytes(
json.dumps(val['jsonVal']), encoding='utf-8'
)
gnmi_update.val.json_val = base64.b64encode(jval)
else:
gnmi_update.val.json_val = json.dumps(
val['jsonVal']
).encode('utf-8')
updates.append(gnmi_update)
return updates
@classmethod
def json_to_gnmi(self, action, payload, **kwargs):
"""Given a JSON payload, convert it to a gNMI message.
Expected JSON format is similar to a string __repr__ of the
proto.gnmi_pb2 class with the exception of the "val" member.
The "val" is passed in normal JSON format in the payload parameter
but then gets converted to base64 encoding in the returned
associated proto.gnmi_pb2 class.
Args:
action (str): set | get | subscribe.
payload (str): Properly formated JSON string.
Raises:
GnmiMessageException
Returns:
gNMI proto.gnmi_pdb2 class
"""
base64_encode = kwargs.get('base64', False)
try:
gnmi_dict = json.loads(payload)
except Exception as exc:
log.error(traceback.format_exc())
raise GnmiMessageException('JSON parse failed: {0}'.format(
str(exc)
))
try:
if action == 'set':
updates = gnmi_dict.pop('update', [])
replaces = gnmi_dict.pop('replace', [])
gnmi_pld = json_format.ParseDict(
gnmi_dict,
proto.gnmi_pb2.SetRequest()
)
if updates:
gnmi_upd = self._upd_rpl(updates, base64_encode)
if gnmi_upd:
gnmi_pld.update.extend(gnmi_upd)
if replaces:
gnmi_upd = self._upd_rpl(replaces, base64_encode)
if gnmi_upd:
gnmi_pld.replace.extend(gnmi_upd)
elif action == 'get':
gnmi_pld = json_format.Parse(
payload,
proto.gnmi_pb2.GetRequest()
)
elif action == 'subscribe':
gnmi_pld = json_format.Parse(
payload,
proto.gnmi_pb2.SubscribeRequest()
)
return gnmi_pld
except Exception as exc:
log.error(traceback.format_exc())
raise GnmiMessageException('Message parse failed: {}'.format(
str(exc)
))
def _manage_paths(self, path_req, gnmi_req):
# Get initial gnmi path for gnmi message
short_xp = self.get_shortest_common_path(path_req)
gnmi_path = self.parse_xpath_to_gnmi_path(
short_xp, self.origin
)
ext_xpaths = []
for n in path_req:
xp = n['xpath']
n['xpath'] = xp[len(short_xp):]
ext_xpaths.append(n)
gnmi_req.path.CopyFrom(gnmi_path)
return self.get_payload(ext_xpaths)
def _gnmi_update_request(self, upd_req, gnmi_upd_req):
# Construct an Update structure for a SetRequest gNMI message.
json_val = self._manage_paths(upd_req, gnmi_upd_req)
# Human readable saved for logs
if self.json_val:
if not isinstance(self.json_val, list):
self.json_val = [self.json_val]
self.json_val.append(json_val)
else:
self.json_val = json_val
json_val = json.dumps(json_val).encode('utf-8')
if self.base64:
json_val = base64.b64encode(json_val)
if self.encoding and self.encoding.lower() == 'json_ietf':
gnmi_upd_req.val.json_ietf_val = json_val
else:
gnmi_upd_req.val.json_val = json_val
return [gnmi_upd_req]
def group_nodes(self, update):
""" Group the nodes based on leaf/list/container level
Args:
nodes (list): dicts with xpath in gNMI format, nodetypes, values.
"""
gnmi_update_paths = []
update_filter = []
update_nodes = []
list_or_cont = False
parent_xp = ""
# Group the nodes based on leaf/list/container level
# Eg nodes: [leaf, leaf, leaf, list, leaf, leaf]
# leaf level: [leaf], [leaf], [leaf] - Leaf level RPCs
# will build sepeartely for each leaf node.
# list level: [list, leaf, leaf]
for node in update:
if (node['xpath'].endswith("]") \
or node['nodetype'] == 'list' \
or node['nodetype'] == 'container') \
and (not parent_xp or not parent_xp in node['xpath']):
parent_xp = node['xpath']
list_or_cont = True
if update_filter:
update_nodes.append(update_filter)
update_filter = []
update_filter.append(node)
else:
update_filter.append(node)
if not list_or_cont:
update_nodes.append(update_filter)
update_filter = []
if update_filter:
update_nodes.append(update_filter)
return update_nodes
def nodes_to_dict(self, nodes=None, origin=None):
"""Construct full gNMI request message to be sent through service.
Args:
nodes (list): dicts with xpath in gNMI format, nodetypes, values.
origin (string): gNMI origin for message.
"""
# TODO: classmethod?
if not self.nodes:
self.nodes = nodes
if origin:
self.origin = origin
update = self.nodes.get('update', [])
replace = self.nodes.get('replace', [])
delete = self.nodes.get('delete', [])
get = self.nodes.get('get', [])
subscribes = self.nodes.get('subscribe', [])
if update:
gnmi_update = proto.gnmi_pb2.Update()
update_nodes = self.group_nodes(update)
# Send each group for payload building
for node in update_nodes:
self.update = self._gnmi_update_request(node, gnmi_update)
self.payload.update.extend(self.update)
if replace:
gnmi_replace = proto.gnmi_pb2.Update()
replace_nodes = self.group_nodes(replace)
# Send each group for payload building.
for node in replace_nodes:
self.replace = self._gnmi_update_request(node, gnmi_replace)
self.payload.replace.extend(self.replace)
if delete:
gnmi_delete_paths = []
for xp in delete:
gnmi_delete_paths.append(
self.parse_xpath_to_gnmi_path(xp, self.origin)
)
self.payload.delete.extend(gnmi_delete_paths)
if get:
gnmi_get_paths = []
for xp in get:
self.payload.path.append(
self.parse_xpath_to_gnmi_path(xp, self.origin)
)
self.payload.path.extend(gnmi_get_paths)
if subscribes:
# Create subscribe list.
subscribe_list = proto.gnmi_pb2.SubscriptionList()
subscribe_list.updates_only = self.cfg.get('updates_only', False)
subscribe_list.encoding = proto.gnmi_pb2.Encoding.Value(
self.encoding
)
mode = self.cfg.get('request_mode', 'STREAM')
subscribe_list.mode = proto.gnmi_pb2.SubscriptionList.Mode.Value(
mode
)
if self.prefix:
# TODO: calculate prefix paths
prefix_path = proto.gnmi_pb2.Path()
prefix_path.origin = self.origin
subscribe_list.prefix.CopyFrom(prefix_path)
# Create subscriptions for the list.
sub_mode = self.cfg.get('sub_mode')
for subscribe in subscribes:
subscription = proto.gnmi_pb2.Subscription()
sample_poll = self.cfg.get(
'sample_interval', self.cfg.get('sample_poll'))
if sub_mode:
subscription.mode = proto.gnmi_pb2.SubscriptionMode.Value(
sub_mode
)
if sub_mode == 'SAMPLE' and sample_poll:
sample_poll = int(1e9) * int(sample_poll)
subscription.sample_interval = sample_poll
gnmi_path = self.parse_xpath_to_gnmi_path(subscribe)
subscription.path.CopyFrom(gnmi_path)
# Add the subscription to the list.
subscribe_list.subscription.extend([subscription])
# Add list to the subscribe request.
self.payload.subscribe.CopyFrom(subscribe_list)
return self.payload
@classmethod
def get_subscribe_poll(self):
"""POLL subscribe requires a message to start polling."""
sub = proto.gnmi_pb2.SubscribeRequest()
sub.poll.SetInParent()
return sub
def _trim_xpaths(self, xpaths, short_xp):
# Helper function for get_shortest_common_path.
for xpath in xpaths:
if short_xp not in xpath:
if short_xp.endswith(']'):
while short_xp.endswith(']'):
short_xp = short_xp[:short_xp.rfind('[')]
xp = short_xp[:short_xp.rfind('/')]
else:
xp = short_xp[:short_xp.rfind('/')]
short_xp = self._trim_xpaths(xpaths, xp)
return short_xp
def get_shortest_common_path(self, nodes):
"""Find the shortest common path in a collection of nodes.
Args:
nodes (list): dicts with xpath in gNMI format, nodetypes, values.
Return:
str
"""
if(len(nodes) == 1):
return nodes[0]['xpath']
xpaths = [n['xpath'] for n in nodes]
short_xp = min(set(xpaths), key=len)
short_xp = self._trim_xpaths(xpaths, short_xp)
short_node = [n for n in nodes if n['xpath'] == short_xp]
if short_node:
if not short_node[0]['xpath'].endswith("]") \
and short_node[0]['nodetype'] not in ['list', 'container']:
while short_xp.endswith(']'):
short_xp = short_xp[:short_xp.rfind('[')]
short_xp = short_xp[:short_xp.rfind('/')]
return short_xp
def get_payload(self, update):
"""Construct dict that will be converted to json_val in Update.
dict will be in format of json {}
For all list having similar keys but different values, create a list of dictionaries.
This will allow to store every key value in a single json_val
Eg: xpath = common_xpath/x-list[type="t1"]/val
common_xpath/x-list[type="t2"]/val
json_val will be = "{"x-list": [{"type": "t1", "val": 10}, {"type": "t2", "val": 10}]}"
Args:
update (list): dicts with xpath in gNMI format, nodetypes, values.
Returns:
dict
"""
if(len(update) == 1 and not update[0]['xpath']):
return update[0]['value']
json_val = {}
processed_xp = []
for node in update:
ind = 0
xp = node['xpath']
if xp.endswith(']'):
xp = xp + '/'
if xp in processed_xp:
continue
jval = json_val
collect_key = False
key_elem = None
tokenized = xpath_tokenizer_re.findall(xp)
if len(tokenized) == 0:
continue
for i, seg in enumerate(tokenized, 1):
token, elem = seg
if token in ['/', '=']:
continue
if not token and not collect_key and elem:
if len(tokenized) == i:
# If a node has only one element
if len(jval) == 0:
jval[elem] = node['value']
else:
# Check if jval is pointing to a list or dict to assign values
if isinstance(jval, list):
jval[ind][elem] = node['value']
else:
jval[elem] = node['value']
else:
# Create a new list of dictionary / new key in dictionary if elem is not present
if elem not in jval:
if isinstance(jval, list):
if (elem not in jval[ind]):
if (len(jval) == 0 or {} in jval):
ind = 0
jval[ind][elem] = []
jval[ind][elem].append({})
else:
jval[elem] = []
ind = 0
jval[elem].append({})
# For every interation point jval to the last list created.
if isinstance(jval, list):
if jval[ind][elem] == "":
jval[ind][elem] = []
jval[ind][elem].append({})
jval = jval[ind][elem]
ind = 0
else:
jval = jval[elem]
continue
if token == '[':
collect_key = True
continue
if token == ']':
collect_key = False
continue
if key_elem is not None and token:
# Store key_elem only if it is not equal to prevous key_elem for the same list.
if key_elem in jval[ind]:
index=0
f=0
for j in jval:
if j[key_elem] == token.strip('"'):
f=1
break
index = index+1
if f==0:
ind = len(jval)
jval.append({})
jval[ind][key_elem] = token.strip('"')
else:
ind = index
else:
jval[ind][key_elem] = token.strip('"')
key_elem = None
continue
if collect_key and elem:
key_elem = elem
continue
processed_xp.append(xp)
self.format_json_val(json_val)
return json_val
def format_json_val(self,json_val):
# Convert List of Dictionaries with only 1 one element to Dictionary
if not isinstance(json_val,dict):
return
for j in json_val:
if isinstance(json_val[j],list) and len(json_val[j]) == 1:
json_val[j] = json_val[j][0]
self.format_json_val(json_val[j])
else:
if isinstance(json_val[j],list):
for i in json_val[j]:
self.format_json_val(i)
def _trim_nodes(self, nodes):
# Prune list nodes if already in other nodes xpath
if nodes:
xps = [n['xpath'] for n in nodes]
long_xp = max(xps, key=len)
for i in range(len(nodes)):
if nodes[i]['xpath'] == long_xp:
continue
if nodes[i]['xpath']+'/' in long_xp:
nodes.remove(nodes[i])
return self._trim_nodes(nodes)
return nodes
def xml_xpath_to_gnmi_xpath(self):
"""Convert XML Path Language 1.0 Xpath to gNMI Xpath.
Input modeled after YANG/NETCONF Xpaths.
References:
* https://www.w3.org/TR/1999/REC-xpath-19991116/#location-paths
* https://www.w3.org/TR/1999/REC-xpath-19991116/#path-abbrev
* https://tools.ietf.org/html/rfc6020#section-6.4
* https://tools.ietf.org/html/rfc6020#section-9.13
* https://tools.ietf.org/html/rfc6241
Parameters
----------
self.request: dict containing request namespace and nodes to be worked on.
namespace: dict of <prefix>: <namespace>
nodes: list of dict
<xpath>: Xpath pointing to resource
<nodetype>: YANG statement type
<value>: value to set resource to
<edit-op>: equivelant NETCONF edit-config operation
Constructs
----------
self.nodes: 4 lists containing possible updates, replaces,
deletes, or gets derived from input request.
"""
message = {
"update": [],
"replace": [],
"delete": [],
"get": [],
"subscribe": []
}
if "nodes" not in self.request:
# TODO: raw rpc?
return message
GnmiMessage.prefix_to_module(self.request)
nodes = deepcopy(self.request.get("nodes", []))
if self.msg_type == 'set':
# Prune key nodes without edit-op assigned.
nodes = [n for n in nodes if not (
n['xpath'].endswith(']') and
not n.get('edit-op')
)]
if self.msg_type in ['get', 'subscribe'] and len(nodes) > 1:
# Prune nodes with xpaths already in other node's xpath.
nodes = self._trim_nodes(nodes)
module = self.request.get('module')
self.namespace_modules = self.request.get("namespace_modules", {})
parent_edit_op = None
for node in nodes:
if "xpath" not in node:
log.error("Xpath is not in message")
else:
xpath = node["xpath"]
value = node.get("value", "")
datatype = node.get('datatype', 'string')
edit_op = node.get("edit-op", "")
if(xpath.endswith("]")):
nodetype = "list"
else:
nodetype = node.get("nodetype","")
if nodetype in ['list', 'container']:
parent_edit_op = edit_op
# Ready value for proper JSON conversion.
if datatype == 'boolean':
if isinstance(value, string_types):
if value.lower() == 'true':
value = True
elif value.lower() == 'false':
value = False
elif datatype.startswith('int') or \
datatype.startswith('uint'):
if value:
value = int(value)
elif ('float' in datatype or
'double' in datatype):
if value:
value = float(value)
elif 'decimal' in datatype:
if value:
value = float(value['digits']) / \
(10 ** value['precision'])
if xpath.startswith('/'):
xp = xpath.split('/')[1:]
else:
node['xpath'] = '/' + xpath
xp = xpath.split('/')
if not module:
# First segment of xpath has prefix of module.
if ':' in xp[0]:
pfx = xp[0].split(':')[0]
if pfx not in self.namespace_modules:
if pfx in self.namespace_modules.values():
# This xpath has a gNMI type prefix
module = pfx
else:
module = self.namespace_modules[pfx]
else:
module = ''
if self.prefix is None:
# Should be in format so this is an older test
if 'Cisco-IOS-XE' in module:
self.prefix = True
self.origin = 'rfc7951'
elif 'openconfig' in module:
self.origin = 'openconfig'
for pfx, mod in self.namespace_modules.items():
if isinstance(value, string_types) and pfx in value:
if mod != module and self.origin == 'rfc7951':
value = value.replace(pfx + ":", mod + ':')
else:
value = value.replace(pfx + ":", '')
# gNMI prefixes require entire module name.
for i, seg in enumerate(xp):
if pfx not in xpath:
continue
if i == 0 and self.prefix:
# Only needed for first path elem.
seg = seg.replace(pfx + ":", module + ':')
xp[i] = seg
continue
if mod != module and self.origin == 'rfc7951':
# From another module so this is required.
seg = seg.replace(pfx + ":", mod + ':')
else:
seg = seg.replace(pfx + ':', '')
xp[i] = seg
if not xpath.endswith(']'):
node['name'] = xp[-1:][0]
else:
node['name'] = ''
node['xpath'] = '/'.join(xp)
node['value'] = value
if self.msg_type == 'set':
if not edit_op:
if parent_edit_op:
edit_op = parent_edit_op
else:
edit_op = 'merge'
if self.edit_op[edit_op] in ["update", "replace"]:
if self.edit_op[edit_op] == "replace":
message["replace"] += [node]
elif self.edit_op[edit_op] == "update":
message["update"] += [node]
elif self.edit_op[edit_op] in ["delete", 'remove']:
message["delete"].append(node['xpath'])
elif self.msg_type in ['get', 'subscribe']:
if not message[self.msg_type]:
message[self.msg_type] = [node['xpath']]
elif node['xpath'] not in message[self.msg_type]:
message[self.msg_type].append(node['xpath'])
else:
log.error('gNMI message type "{0}" is invalid.'.format(
str(self.msg_type)
))
self.nodes = message
@classmethod
def parse_xpath_to_gnmi_path(cls, xpath, origin=None):
"""Parses an XPath to proto.gnmi_pb2.Path.
Effectively wraps the std XML XPath tokenizer and traverses
the identified groups. Parsing robustness needs to be validated.
Probably best to formalize as a state machine sometime.
TODO: Formalize tokenizer traversal via state machine.
"""
if not isinstance(xpath, string_types):
raise Exception("xpath must be a string!")
path = proto.gnmi_pb2.Path()
if origin:
if not isinstance(origin, string_types):
raise Exception("origin must be a string!")
path.origin = origin
curr_elem = proto.gnmi_pb2.PathElem()
in_filter = False
just_filtered = False
curr_key = None
# TODO: Lazy
xpath = xpath.strip("/")
xpath_elements = xpath_tokenizer_re.findall(xpath)
path_elems = []
for element in xpath_elements:
# stripped initial /, so this indicates a completed element
if element[0] == "/":
if not curr_elem.name:
# Trying to append to path without a name.
raise Exception(
"Current PathElem has no name! Invalid XPath?"
)
path_elems.append(curr_elem)
curr_elem = proto.gnmi_pb2.PathElem()
continue
# We are entering a filter
elif element[0] == "[":
in_filter = True
continue
# We are exiting a filter
elif element[0] == "]":
in_filter = False
continue
# If we're not in a filter then we're a PathElem name
elif not in_filter:
curr_elem.name = element[1]
# Skip blank spaces
elif not any([element[0], element[1]]):
continue
# If we're in the filter and just completed a filter expr,
# "and" as a junction should just be ignored.
elif in_filter and just_filtered and element[1] == "and":
just_filtered = False
continue
# Otherwise we're in a filter and this term is a key name
elif curr_key is None:
curr_key = element[1]
continue
# Otherwise we're an operator or the key value
elif curr_key is not None:
if element[0] in [">", "<"]:
raise Exception("Only = supported as filter operand!")
if element[0] == "=":
continue
else:
# We have a full key here, put it in the map
if curr_key in curr_elem.key.keys():
raise Exception("Key already in key map!")
curr_elem.key[curr_key] = element[0].strip("'\"")
curr_key = None
just_filtered = True
# Keys/filters in general should be totally cleaned up at this point.
if curr_key:
raise Exception("Hanging key filter! Incomplete XPath?")
# If we have a dangling element that hasn't been completed due to no
# / element then let's just append the final element.
if curr_elem:
path_elems.append(curr_elem)
curr_elem = None
if any([curr_elem, curr_key, in_filter]):
raise Exception("Unfinished elements in XPath parsing!")
path.elem.extend(path_elems)
return path
class GnmiSubscription(ABC, Thread):
RE_FIND_KEYS = re.compile(r'\[.*?\]')
def __init__(self, device: Gnmi = None, **request):
Thread.__init__(self)
self.delay = 0
self._stop_event = Event()
self.log = request.get('log')
if self.log is None:
self.log = logging.getLogger(__name__)
self.log.setLevel(logging.DEBUG)
self.request = request
self.verifier = request.get('verifier')
self.namespace = request.get('namespace')
self.sub_mode = request.get('sub_mode')
self.encoding = request.get('encoding')
self.transaction_time = request.get('transaction_time', 0)
self._result = True
self.errors: List[Exception] = []
self.negative_test = request.get('negative_test', False)
self.log.info(banner('GNMI Subscription reciever started'))
self.ntp_server = ""
if device is not None:
self.metadata = [
("username", device.device.credentials.default.get('username', '')),
("password", to_plaintext(
device.device.credentials.default.get('password', ''))),
]
if self.transaction_time:
self.ntp_server = device.device.testbed.servers.get(
'ntp', {}).get('server', {})
self.stream_max = request.get('stream_max', 60)
self.sample_poll = request.get(
'sample_interval', request.get('sample_poll', 5))
if self.stream_max:
self.log.info('Notification MAX timeout {0} seconds.'.format(
str(self.stream_max)))
class NoNtpConfigured(Exception):
pass
class DevieOutOfSyncWithNtp(Exception):
def __init__(self, response_timestamp: int, arrive_timestamp: int, ntp_server: str, *args: object) -> None:
super().__init__(*args)
self.response_dt = datetime.fromtimestamp(
response_timestamp)
self.ntp_dt = datetime.fromtimestamp(arrive_timestamp)
self.ntp_server = ntp_server
log.error(banner(
f"""Device is out of sync with NTP server {self.ntp_server}
Device time: {self.ntp_dt.strftime('%m/%d/%Y %H:%M:%S.%f')}
NTP time: {self.response_dt.strftime('%m/%d/%Y %H:%M:%S.%f')}"""))
class TransactionTimeExceeded(Exception):
def __init__(self, delta_time: float, transaction_time: float, *args: object) -> None:
super().__init__(*args)
self.delta_time = delta_time
log.error(banner(
f'Response time: {delta_time} seconds exceeded transaction_time {transaction_time}',
))
@property
def result(self):
return self.negative_test != self._result
@result.setter
def result(self, value):
self._result = value
def stop(self):
self.log.info("Stopping notification stream")
self._stop_event.set()
def stopped(self):
return self._stop_event.is_set()
@classmethod
def iter_subscribe_request(self,
payloads: List[gnmi_pb2.SubscribeRequest],
delay: int = 0,
sample_poll: int = 0):
"""Generator passed to Subscribe service to handle stream payloads.
Args:
payload (list): proto.gnmi_pb2.SubscribeRequest
"""
for payload in payloads:
if delay:
time.sleep(delay)
if payload.HasField('poll'):
time.sleep(sample_poll)
log.info('gNMI SUBSCRIBE POLL\n' + '=' * 19 + '\n{0}'.format(
str(payload)
)
)
elif sample_poll:
log.info('Sample poll ignored for non-poll request')
yield payload
def cover_exceptions(func):
"""Decorator to catch exceptions, log them and stop the thread."""
def inner(self):
try:
func(self)
except grpc.RpcError as exc:
if exc.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
self.log.info("Notification MAX timeout")
self.result = self.verifier.end_subscription(self.errors)
self.log.info("Terminating notification thread")
self.stop()
else:
self.log.error("Unknown error: %s", exc)
self.result = False
self.errors.append(exc)
except Exception as exc:
msg = ''
if hasattr(exc, 'details'):
msg += 'details: ' + exc.details()
if hasattr(exc, 'debug_error_string'):
msg += exc.debug_error_string()
if not msg:
msg = str(exc)
self.log.error("Unknown error: %s", exc)
self.errors.append(exc)
finally:
if not self.stopped():
self.log.error("Error while terminating notification thread")
self.result = False
self.stop()
return inner
class GnmiSubscriptionStream(GnmiSubscription):
def __init__(self,
device: Gnmi = None,
payload: List[gnmi_pb2.SubscribeRequest] = None,
responses: List[gnmi_pb2.SubscribeResponse] = None,
**request):
super().__init__(device, **request)
# For transaction_time subscribtion NTP servers must be configured
if self.transaction_time and not self.ntp_server:
self.log.error(
banner('For transaction_time to work with STREAM Subscribtions, NTP servers must be configured.')) # noqa
raise self.NoNtpConfigured('NTP servers not configured')
if responses is not None:
self.responses = responses
elif device is not None and payload is not None:
self.responses = device.gnmi.service.Subscribe(
self.iter_subscribe_request(payload, self.delay),
timeout=self.stream_max,
metadata=self.metadata
)
@GnmiSubscription.cover_exceptions
def run(self):
"""Check for inbound notifications."""
self.log.info('Subscribe notification active')
for response in self.responses:
if response.HasField('sync_response'):
# Don't count sync_response as a response for transaction_time
self.log.info("Initial updates received")
continue
if response.HasField('update') and not self.stopped():
arrive_time = time.time()
if self.transaction_time:
timestamp = response.update.timestamp / 10 ** 9
delta_time = arrive_time - timestamp
if delta_time < 0:
self.errors.append(
self.DevieOutOfSyncWithNtp(timestamp, arrive_time, self.ntp_server))
elif delta_time > self.transaction_time:
self.errors.append(self.TransactionTimeExceeded(
delta_time, self.transaction_time))
if self.verifier.validation_on:
self.log.info('Processing returns...')
self.verifier.subscribe_verify(
response, 'STREAM', self.namespace)
self.result = self.verifier.end_subscription(self.errors)
self.stop()
class GnmiSubscriptionOnce(GnmiSubscription):
def __init__(self,
device: Gnmi = None,
payload: List[gnmi_pb2.SubscribeRequest] = None,
responses: List[gnmi_pb2.SubscribeResponse] = None,
** request):
super().__init__(device, **request)
if responses is not None:
self.responses = responses
elif device is not None and payload is not None:
self.responses = device.gnmi.service.Subscribe(
self.iter_subscribe_request(payload, self.delay),
metadata=self.metadata
)
@GnmiSubscription.cover_exceptions
def run(self):
"""Check for inbound notifications."""
self.log.info('Subscribe notification active')
stop_receiver = False
t = time.time()
for response in self.responses:
delta_time = time.time() - t
# Subscribe response ends here
if response.HasField('sync_response'):
self.log.info('Subscribe sync_response')
stop_receiver = True
elif self.transaction_time and delta_time > self.transaction_time:
self.errors.append(self.TransactionTimeExceeded(
delta_time, self.transaction_time))
if response.HasField('update') and not self.stopped():
self.log.info(
"gNMI SUBSCRIBE Response\n" + "=" * 23 + "\n{}"
.format(response)
)
if self.verifier.validation_on:
self.log.info('Processing returns...')
self.verifier.subscribe_verify(
response, 'ONCE', self.namespace)
if stop_receiver:
self.result = self.verifier.end_subscription(self.errors)
self.log.info('Subscribe ONCE processed')
self.stop()
self.result = self.verifier.end_subscription(self.errors)
self.log.info('Subscribe ONCE processed')
self.stop()
class GnmiSubscriptionPoll(GnmiSubscription):
def __init__(self,
device: Gnmi = None,
payload: List[gnmi_pb2.SubscribeRequest] = None,
responses: List[gnmi_pb2.SubscribeResponse] = None,
**request):
super().__init__(device, **request)
if responses is not None:
self.responses = responses
elif device is not None and payload is not None:
polls_number = self.stream_max // self.sample_poll
for _ in range(polls_number):
payload.append(GnmiMessageConstructor.get_subscribe_poll())
self.responses = device.gnmi.service.Subscribe(
self.iter_subscribe_request(
payload, sample_poll=self.sample_poll),
timeout=self.stream_max,
metadata=self.metadata
)
@GnmiSubscription.cover_exceptions
def run(self):
"""Check for inbound notifications."""
self.log.info('Subscribe notification active')
t = time.time()
for (i, response) in enumerate(self.responses):
if (i == 0):
delta_time = time.time() - t
else:
delta_time = time.time() - t - self.sample_poll
if (self.transaction_time and t and delta_time > self.transaction_time):
self.errors.append(self.TransactionTimeExceeded(
delta_time, self.transaction_time))
if response.HasField('sync_response'):
self.log.info('Subscribe sync_response')
if response.HasField('update') and not self.stopped():
self.log.info(
"gNMI SUBSCRIBE Response\n" + "=" * 23 + "\n{}"
.format(response)
)
if self.verifier.validation_on:
self.log.info('Processing returns...')
self.verifier.subscribe_verify(
response, 'POLL', self.namespace)
t = time.time()
self.result = self.verifier.end_subscription(self.errors)
self.stop()
|
PypiClean
|
/solvebio-2.24.0.tar.gz/solvebio-2.24.0/README.md
|

SolveBio Python Client
======================
This is the SolveBio Python package and command-line interface (CLI).
This module is tested against Python 2.7, 3.6, 3.7, 3.8, 3.10, PyPy and PyPy3.
Developer documentation is available at [docs.solvebio.com](https://docs.solvebio.com). For more information about SolveBio visit [www.solvebio.com](https://www.solvebio.com).
Installation & Setup
--------------------
Install `solvebio` using `pip`:
pip install solvebio
For interactive use, we recommend installing `IPython` and `gnureadline`:
pip install ipython
pip install gnureadline
To log in, type:
solvebio login
Enter your SolveBio credentials and you should be good to go!
Install from Git
----------------
pip install -e git+https://github.com/solvebio/solvebio-python.git#egg=solve
Development
-----------
git clone https://github.com/solvebio/solvebio-python.git
cd solve-python/
python setup.py develop
Or install `tox` and run:
pip install tox
tox
Releasing
---------
You will need to [configure Twine](https://twine.readthedocs.io/en/latest/#installation) in order to push to PyPI.
Maintainers can release solvebio-python to PyPI with the following steps:
bumpversion <major|minor|patch>
git push --tags
make changelog
make release
Support
-------
Developer documentation is available at [docs.solvebio.com](https://docs.solvebio.com).
If you experience problems with this package, please [create a GitHub Issue](https://github.com/solvebio/solvebio-python/issues).
For all other requests, please [email SolveBio Support](mailto:[email protected]).
|
PypiClean
|
/secrets_env-0.28.0-py3-none-any.whl/secrets_env/providers/vault/auth/userpass.py
|
import abc
import logging
import typing
import urllib.parse
from dataclasses import dataclass, field
from typing import Any, Dict, Optional
from secrets_env.exceptions import TypeError
from secrets_env.utils import (
create_keyring_login_key,
get_env_var,
prompt,
read_keyring,
)
from .base import Auth
if typing.TYPE_CHECKING:
import httpx
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class UserPasswordAuth(Auth):
"""Username and password based authentication."""
@classmethod
@abc.abstractmethod
def path(cls) -> str:
"""Returns method name used by Vault."""
raise NotImplementedError()
_TIMEOUT = None
username: str
"""User name."""
password: str = field(repr=False)
"""Password."""
def __init__(self, username: str, password: str) -> None:
if not isinstance(username, str):
raise TypeError("username", str, username)
if not isinstance(password, str):
raise TypeError("password", str, password)
object.__setattr__(self, "username", username)
object.__setattr__(self, "password", password)
@classmethod
def load(cls, url: str, data: Dict[str, Any]) -> Optional["UserPasswordAuth"]:
username = cls._load_username(data)
if not isinstance(username, str) or not username:
logger.error(
"Missing username for %s auth.",
cls.method(),
)
return None
password = cls._load_password(url, username)
if not isinstance(password, str) or not password:
logger.error(
"Missing password for %s auth.",
cls.method(),
)
return None
return cls(username, password)
@classmethod
def _load_username(cls, data: Dict[str, Any]) -> Optional[str]:
username = get_env_var("SECRETS_ENV_USERNAME")
if username:
logger.debug("Found username from environment variable.")
return username
username = data.get("username")
if username:
return username
return prompt(f"Username for {cls.method()} auth")
@classmethod
def _load_password(cls, url: str, username: str) -> Optional[str]:
password = get_env_var("SECRETS_ENV_PASSWORD")
if password:
logger.debug("Found password from environment variable.")
return password
password = read_keyring(create_keyring_login_key(url, username))
if password:
logger.debug("Found password in keyring")
return password
return prompt(f"Password for {username}", hide_input=True)
def login(self, client: "httpx.Client") -> Optional[str]:
username = urllib.parse.quote(self.username)
resp = client.post(
f"/v1/auth/{self.path()}/login/{username}",
json={
"username": self.username,
"password": self.password,
},
timeout=self._TIMEOUT,
)
if not resp.is_success:
logger.error("Failed to login with %s method", self.method())
logger.debug(
"Login failed. URL= %s, Code= %d. Msg= %s",
resp.url,
resp.status_code,
resp.text,
)
return
return resp.json()["auth"]["client_token"]
@dataclass(frozen=True)
class BasicAuth(UserPasswordAuth):
"""Login to Vault using user name and password."""
@classmethod
def method(cls):
return "basic"
@classmethod
def path(cls):
return "userpass"
@dataclass(frozen=True)
class LDAPAuth(UserPasswordAuth):
"""Login with LDAP credentials."""
@classmethod
def method(cls):
return "LDAP"
@classmethod
def path(cls):
return "ldap"
@dataclass(frozen=True)
class OktaAuth(UserPasswordAuth):
"""Okta authentication."""
# Okta 2FA got triggerred within the api call, so needs a longer timeout
_TIMEOUT = 60.0
@classmethod
def method(cls):
return "Okta"
@classmethod
def path(cls):
return "okta"
class RADIUSAuth(UserPasswordAuth):
"""RADIUS authentication with PAP authentication scheme."""
@classmethod
def method(cls):
return "RADIUS"
@classmethod
def path(cls):
return "radius"
|
PypiClean
|
/django-mssql-backend-2.8.1.tar.gz/django-mssql-backend-2.8.1/sql_server/pyodbc/operations.py
|
import datetime
import uuid
import warnings
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.utils import timezone
from django.utils.encoding import force_str
import pytz
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = 'sql_server.pyodbc.compiler'
cast_char_field_without_max_length = 'nvarchar(max)'
def _convert_field_to_tz(self, field_name, tzname):
if settings.USE_TZ and not tzname == 'UTC':
offset = self._get_utcoffset(tzname)
field_name = 'DATEADD(second, %d, %s)' % (offset, field_name)
return field_name
def _get_utcoffset(self, tzname):
"""
Returns UTC offset for given time zone in seconds
"""
# SQL Server has no built-in support for tz database, see:
# http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx
zone = pytz.timezone(tzname)
# no way to take DST into account at this point
now = datetime.datetime.now()
delta = zone.localize(now, is_dst=False).utcoffset()
return delta.days * 86400 + delta.seconds
def bulk_batch_size(self, fields, objs):
"""
Returns the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
objs_len, fields_len, max_row_values = len(objs), len(fields), 1000
if (objs_len * fields_len) <= max_row_values:
size = objs_len
else:
size = max_row_values // fields_len
return size
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
return "VALUES " + values_sql
def cache_key_culling_sql(self):
"""
Returns a SQL query that retrieves the first cache key greater than the
smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
return "SELECT cache_key FROM (SELECT cache_key, " \
"ROW_NUMBER() OVER (ORDER BY cache_key) AS rn FROM %s" \
") cache WHERE rn = %%s + 1"
def combine_duration_expression(self, connector, sub_expressions):
lhs, rhs = sub_expressions
sign = ' * -1' if connector == '-' else ''
if lhs.startswith('DATEADD'):
col, sql = rhs, lhs
else:
col, sql = lhs, rhs
params = [sign for _ in range(sql.count('DATEADD'))]
params.append(col)
return sql % tuple(params)
def combine_expression(self, connector, sub_expressions):
"""
SQL Server requires special cases for some operators in query expressions
"""
if connector == '^':
return 'POWER(%s)' % ','.join(sub_expressions)
elif connector == '<<':
return '%s * (2 * %s)' % tuple(sub_expressions)
elif connector == '>>':
return '%s / (2 * %s)' % tuple(sub_expressions)
return super().combine_expression(connector, sub_expressions)
def convert_datetimefield_value(self, value, expression, connection):
if value is not None:
if settings.USE_TZ:
value = timezone.make_aware(value, self.connection.timezone)
return value
def convert_floatfield_value(self, value, expression, connection):
if value is not None:
value = float(value)
return value
def convert_uuidfield_value(self, value, expression, connection):
if value is not None:
value = uuid.UUID(value)
return value
def convert_booleanfield_value(self, value, expression, connection):
return bool(value) if value in (0, 1) else value
def date_extract_sql(self, lookup_type, field_name):
if lookup_type == 'week_day':
return "DATEPART(weekday, %s)" % field_name
elif lookup_type == 'week':
return "DATEPART(iso_week, %s)" % field_name
else:
return "DATEPART(%s, %s)" % (lookup_type, field_name)
def date_interval_sql(self, timedelta):
"""
implements the interval functionality for expressions
"""
sec = timedelta.seconds + timedelta.days * 86400
sql = 'DATEADD(second, %d%%s, CAST(%%s AS datetime2))' % sec
if timedelta.microseconds:
sql = 'DATEADD(microsecond, %d%%s, CAST(%s AS datetime2))' % (timedelta.microseconds, sql)
return sql
def date_trunc_sql(self, lookup_type, field_name):
CONVERT_YEAR = 'CONVERT(varchar, DATEPART(year, %s))' % field_name
CONVERT_QUARTER = 'CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3))' % field_name
CONVERT_MONTH = 'CONVERT(varchar, DATEPART(month, %s))' % field_name
if lookup_type == 'year':
return "CONVERT(datetime2, %s + '/01/01')" % CONVERT_YEAR
if lookup_type == 'quarter':
return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_QUARTER)
if lookup_type == 'month':
return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_MONTH)
if lookup_type == 'week':
CONVERT = "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name
return "DATEADD(DAY, (DATEPART(weekday, %s) + 5) %%%% 7 * -1, %s)" % (CONVERT, field_name)
if lookup_type == 'day':
return "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name
def datetime_cast_date_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
sql = 'CAST(%s AS date)' % field_name
return sql
def datetime_cast_time_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
sql = 'CAST(%s AS time)' % field_name
return sql
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return self.date_extract_sql(lookup_type, field_name)
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
sql = ''
if lookup_type in ('year', 'quarter', 'month', 'week', 'day'):
sql = self.date_trunc_sql(lookup_type, field_name)
elif lookup_type == 'hour':
sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 14) + ':00:00')" % field_name
elif lookup_type == 'minute':
sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 17) + ':00')" % field_name
elif lookup_type == 'second':
sql = "CONVERT(datetime2, CONVERT(varchar, %s, 20))" % field_name
return sql
def for_update_sql(self, nowait=False, skip_locked=False, of=()):
if skip_locked:
return 'WITH (ROWLOCK, UPDLOCK, READPAST)'
elif nowait:
return 'WITH (NOWAIT, ROWLOCK, UPDLOCK)'
else:
return 'WITH (ROWLOCK, UPDLOCK)'
def format_for_duration_arithmetic(self, sql):
if sql == '%s':
# use DATEADD only once because Django prepares only one parameter for this
fmt = 'DATEADD(second, %s / 1000000%%s, CAST(%%s AS datetime2))'
sql = '%%s'
else:
# use DATEADD twice to avoid arithmetic overflow for number part
MICROSECOND = "DATEADD(microsecond, %s %%%%%%%% 1000000%%s, CAST(%%s AS datetime2))"
fmt = 'DATEADD(second, %s / 1000000%%s, {})'.format(MICROSECOND)
sql = (sql, sql)
return fmt % sql
def fulltext_search_sql(self, field_name):
"""
Returns the SQL WHERE clause to use in order to perform a full-text
search of the given field_name. Note that the resulting string should
contain a '%s' placeholder for the value being searched against.
"""
return 'CONTAINS(%s, %%s)' % field_name
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
internal_type = expression.output_field.get_internal_type()
if internal_type == 'DateTimeField':
converters.append(self.convert_datetimefield_value)
elif internal_type == 'FloatField':
converters.append(self.convert_floatfield_value)
elif internal_type == 'UUIDField':
converters.append(self.convert_uuidfield_value)
elif internal_type in ('BooleanField', 'NullBooleanField'):
converters.append(self.convert_booleanfield_value)
return converters
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, returns the newly created ID.
This method also receives the table name and the name of the primary-key
column.
"""
# TODO: Check how the `last_insert_id` is being used in the upper layers
# in context of multithreaded access, compare with other backends
# IDENT_CURRENT: http://msdn2.microsoft.com/en-us/library/ms175098.aspx
# SCOPE_IDENTITY: http://msdn2.microsoft.com/en-us/library/ms190315.aspx
# @@IDENTITY: http://msdn2.microsoft.com/en-us/library/ms187342.aspx
# IDENT_CURRENT is not limited by scope and session; it is limited to
# a specified table. IDENT_CURRENT returns the value generated for
# a specific table in any session and any scope.
# SCOPE_IDENTITY and @@IDENTITY return the last identity values that
# are generated in any table in the current session. However,
# SCOPE_IDENTITY returns values inserted only within the current scope;
# @@IDENTITY is not limited to a specific scope.
table_name = self.quote_name(table_name)
cursor.execute("SELECT CAST(IDENT_CURRENT(%s) AS int)", [table_name])
return cursor.fetchone()[0]
def lookup_cast(self, lookup_type, internal_type=None):
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
return "UPPER(%s)"
return "%s"
def max_name_length(self):
return 128
def no_limit_value(self):
return None
def prepare_sql_script(self, sql, _allow_fallback=False):
return [sql]
def quote_name(self, name):
"""
Returns a quoted version of the given table, index or column name. Does
not quote the given name if it's already been quoted.
"""
if name.startswith('[') and name.endswith(']'):
return name # Quoting once is enough.
return '[%s]' % name
def random_function_sql(self):
"""
Returns a SQL expression that returns a random value.
"""
return "RAND()"
def regex_lookup(self, lookup_type):
"""
Returns the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). The resulting string should
contain a '%s' placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), a
NotImplementedError exception can be raised.
"""
match_option = {'iregex': 0, 'regex': 1}[lookup_type]
return "dbo.REGEXP_LIKE(%%s, %%s, %s)=1" % (match_option,)
def limit_offset_sql(self, low_mark, high_mark):
"""Return LIMIT/OFFSET SQL clause."""
limit, offset = self._get_limit_offset_params(low_mark, high_mark)
return '%s%s' % (
(' OFFSET %d ROWS' % offset) if offset else '',
(' FETCH FIRST %d ROWS ONLY' % limit) if limit else '',
)
def last_executed_query(self, cursor, sql, params):
"""
Returns a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders, and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
return super().last_executed_query(cursor, cursor.last_sql, cursor.last_params)
def savepoint_create_sql(self, sid):
"""
Returns the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVE TRANSACTION %s" % sid
def savepoint_rollback_sql(self, sid):
"""
Returns the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TRANSACTION %s" % sid
def sql_flush(self, style, tables, sequences, allow_cascade=False):
"""
Returns a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The returned value also includes SQL statements required to reset DB
sequences passed in :param sequences:.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
"""
if tables:
# Cannot use TRUNCATE on tables that are referenced by a FOREIGN KEY
# So must use the much slower DELETE
from django.db import connections
cursor = connections[self.connection.alias].cursor()
# Try to minimize the risks of the braindeaded inconsistency in
# DBCC CHEKIDENT(table, RESEED, n) behavior.
seqs = []
for seq in sequences:
cursor.execute("SELECT COUNT(*) FROM %s" % self.quote_name(seq["table"]))
rowcnt = cursor.fetchone()[0]
elem = {}
if rowcnt:
elem['start_id'] = 0
else:
elem['start_id'] = 1
elem.update(seq)
seqs.append(elem)
COLUMNS = "TABLE_NAME, CONSTRAINT_NAME"
WHERE = "CONSTRAINT_TYPE not in ('PRIMARY KEY','UNIQUE')"
cursor.execute(
"SELECT {} FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE {}".format(COLUMNS, WHERE))
fks = cursor.fetchall()
sql_list = ['ALTER TABLE %s NOCHECK CONSTRAINT %s;' %
(self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks]
sql_list.extend(['%s %s %s;' % (style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))) for table in tables])
if self.connection.to_azure_sql_db and self.connection.sql_server_version < 2014:
warnings.warn("Resetting identity columns is not supported "
"on this versios of Azure SQL Database.",
RuntimeWarning)
else:
# Then reset the counters on each table.
sql_list.extend(['%s %s (%s, %s, %s) %s %s;' % (
style.SQL_KEYWORD('DBCC'),
style.SQL_KEYWORD('CHECKIDENT'),
style.SQL_FIELD(self.quote_name(seq["table"])),
style.SQL_KEYWORD('RESEED'),
style.SQL_FIELD('%d' % seq['start_id']),
style.SQL_KEYWORD('WITH'),
style.SQL_KEYWORD('NO_INFOMSGS'),
) for seq in seqs])
sql_list.extend(['ALTER TABLE %s CHECK CONSTRAINT %s;' %
(self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks])
return sql_list
else:
return []
def start_transaction_sql(self):
"""
Returns the SQL statement required to start a transaction.
"""
return "BEGIN TRANSACTION"
def subtract_temporals(self, internal_type, lhs, rhs):
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
if internal_type == 'DateField':
sql = "CAST(DATEDIFF(day, %(rhs)s, %(lhs)s) AS bigint) * 86400 * 1000000"
params = rhs_params + lhs_params
else:
SECOND = "DATEDIFF(second, %(rhs)s, %(lhs)s)"
MICROSECOND = "DATEPART(microsecond, %(lhs)s) - DATEPART(microsecond, %(rhs)s)"
sql = "CAST({} AS bigint) * 1000000 + {}".format(SECOND, MICROSECOND)
params = rhs_params + lhs_params * 2 + rhs_params
return sql % {'lhs': lhs_sql, 'rhs': rhs_sql}, params
def tablespace_sql(self, tablespace, inline=False):
"""
Returns the SQL that will be appended to tables or rows to define
a tablespace. Returns '' if the backend doesn't use tablespaces.
"""
return "ON %s" % self.quote_name(tablespace)
def prep_for_like_query(self, x):
"""Prepares a value for use in a LIKE query."""
# http://msdn2.microsoft.com/en-us/library/ms179859.aspx
return force_str(x).replace('\\', '\\\\').replace('[', '[[]').replace('%', '[%]').replace('_', '[_]')
def prep_for_iexact_query(self, x):
"""
Same as prep_for_like_query(), but called for "iexact" matches, which
need not necessarily be implemented using "LIKE" in the backend.
"""
return x
def adapt_datetimefield_value(self, value):
"""
Transforms a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
if settings.USE_TZ and timezone.is_aware(value):
# pyodbc donesn't support datetimeoffset
value = value.astimezone(self.connection.timezone).replace(tzinfo=None)
return value
def time_trunc_sql(self, lookup_type, field_name):
# if self.connection.sql_server_version >= 2012:
# fields = {
# 'hour': 'DATEPART(hour, %s)' % field_name,
# 'minute': 'DATEPART(minute, %s)' % field_name if lookup_type != 'hour' else '0',
# 'second': 'DATEPART(second, %s)' % field_name if lookup_type == 'second' else '0',
# }
# sql = 'TIMEFROMPARTS(%(hour)s, %(minute)s, %(second)s, 0, 0)' % fields
if lookup_type == 'hour':
sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 3) + ':00:00')" % field_name
elif lookup_type == 'minute':
sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 6) + ':00')" % field_name
elif lookup_type == 'second':
sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 9))" % field_name
return sql
|
PypiClean
|
/iotlabsshcli-1.1.0.tar.gz/iotlabsshcli-1.1.0/README.rst
|
SSH CLI Tools
=============
|PyPI| |CI| |Codecov|
**SSH CLI Tools** provides a set of commands for interacting remotely and easily
with IoT-Lab Linux nodes. See `here <https://www.iot-lab.info/docs/boards/iot-lab-a8-m3/>`_
to get more information on this kind of node.
All available actions provided by **SSH CLI Tools** are available as sub-commands
of `iotlab-ssh`.
The provided sub-commands are:
=================== ==========================================================================================
Sub-command Function
=================== ==========================================================================================
**flash** Flash the given firmware on the nodes co-microcontroller
**reset** Reset the nodes co-microcontroller
**wait-for-boot** Block the execution until all nodes have booted or maximum wait time has expired
**run-script** Run a given script in background (screen session) on the given nodes
**run-cmd** Run a command on the given nodes
**copy-file** Copy a file on SSH frontend homedir directory (*~/shared/.iotlabsshcli*)
=================== ==========================================================================================
**SSH CLI Tools** can be used in conjunction with the
`IoT-Lab CLI Tools <https://github.com/iot-lab/cli-tools>`_ commands like
`iotlab-auth` and `iotlab-experiment`.
Installation:
-------------
You need python `pip <https://pip.pypa.io/en/stable/>`_.
To do a system-wide install of the ssh-cli-tools use pip (or pip3 for
Python 3)::
$ sudo pip install iotlabsshcli
Pip version >= 9.0 is known to work ; you may need to upgrade.
See `<INSTALLING.md>`_ for details on installation options.
Requirements:
-------------
Linux nodes are reachable through a gateway SSH server (IoT-LAB SSH
frontend). For this reason you must verify that your SSH public key used by
ssh-cli-tools has been recorded in your IoT-LAB user profile. You can find how
to configure your IoT-LAB SSH access in this
`documentation <https://www.iot-lab.info/docs/getting-started/ssh-access/>`_.
Examples:
---------
Start an experiment, wait for it to be ready, wait for all nodes boot:
......................................................................
.. code-block::
$ iotlab-experiment submit -d 120 -l saclay,a8,1-10
{
"id": 65535
}
$ iotlab-experiment wait
Waiting that experiment 65535 gets in state Running
"Running"
$ iotlab-ssh wait-for-boot
{
"wait-for-boot": {
"0": [
"node-a8-2.saclay.iot-lab.info",
"node-a8-3.saclay.iot-lab.info",
"node-a8-5.saclay.iot-lab.info",
"node-a8-6.saclay.iot-lab.info",
"node-a8-7.saclay.iot-lab.info",
"node-a8-9.saclay.iot-lab.info",
"node-a8-10.saclay.iot-lab.info"
],
"1": [
"node-a8-4.saclay.iot-lab.info",
"node-a8-8.saclay.iot-lab.info"
]
}
}
**Note:** node-a8-4 and node-a8-8 are broken in Saclay.
Flash a firmware on the co-microcontroller of the working nodes:
................................................................
.. code-block::
$ iotlab-ssh flash <firmware.elf> -l saclay,a8,2-3+5-7+9-10
{
"flash": {
"0": [
"node-a8-2.saclay.iot-lab.info",
"node-a8-3.saclay.iot-lab.info",
"node-a8-5.saclay.iot-lab.info",
"node-a8-6.saclay.iot-lab.info",
"node-a8-7.saclay.iot-lab.info",
"node-a8-9.saclay.iot-lab.info",
"node-a8-10.saclay.iot-lab.info"
]
}
}
Reset the co-microcontroller of one node:
.........................................
.. code-block::
$ iotlab-ssh reset -l saclay,a8,2
{
"reset": {
"0": [
"node-a8-2.saclay.iot-lab.info"
]
}
}
Use the *--verbose* option to get the commands output:
......................................................
.. code-block::
$ iotlab-ssh --verbose reset -l saclay,a8,2
Connecting via SSH proxy saclay.iot-lab.info:22 -> node-a8-2.saclay.iot-lab.info:22
[node-a8-2.saclay.iot-lab.info] Open On-Chip Debugger 0.9.0-dirty (2016-04-15-00:55)
[node-a8-2.saclay.iot-lab.info] Licensed under GNU GPL v2
[node-a8-2.saclay.iot-lab.info] For bug reports, read
[node-a8-2.saclay.iot-lab.info] http://openocd.org/doc/doxygen/bugs.html
[node-a8-2.saclay.iot-lab.info] adapter speed: 1000 kHz
[...]
[node-a8-2.saclay.iot-lab.info] TargetName Type Endian TapName State
[node-a8-2.saclay.iot-lab.info] -- ------------------ ---------- ------ ------------------ ------------
[node-a8-2.saclay.iot-lab.info] 0* stm32f1x.cpu cortex_m little stm32f1x.cpu running
[node-a8-2.saclay.iot-lab.info] Info : JTAG tap: stm32f1x.cpu tap/device found: 0x3ba00477 (mfg: 0x23b, part: 0xba00, ver: 0x3)
[node-a8-2.saclay.iot-lab.info] Info : JTAG tap: stm32f1x.bs tap/device found: 0x06414041 (mfg: 0x020, part: 0x6414, ver: 0x0)
[node-a8-2.saclay.iot-lab.info] shutdown command invoked
[node-a8-2.saclay.iot-lab.info] Return Value: 0
{
"reset": {
"0": [
"node-a8-2.saclay.iot-lab.info"
]
}
}
Run a command on two nodes:
...........................
.. code-block::
$ iotlab-ssh --verbose run-cmd "uname -a" -l saclay,a8,2-3
Connecting via SSH proxy saclay.iot-lab.info:22 -> node-a8-2.saclay.iot-lab.info:22
[node-a8-2.saclay.iot-lab.info] Linux node-a8-2 3.18.5-iotlab+ #9 Thu Sep 1 16:17:22 CEST 2016 armv7l GNU/Linux
[node-a8-3.saclay.iot-lab.info] Linux node-a8-3 3.18.5-iotlab+ #9 Thu Sep 1 16:17:22 CEST 2016 armv7l GNU/Linux
{
"run-cmd": {
"0": [
"node-a8-2.saclay.iot-lab.info",
"node-a8-3.saclay.iot-lab.info"
]
}
}
Run a command on SSH frontend:
..............................
.. code-block::
$ iotlab-ssh --verbose run-cmd "uname -a" --frontend
[saclay.iot-lab.info] Linux saclay 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1+deb8u1 (2016-09-03) x86_64 GNU/Linux
{
"run-cmd": {
"0": [
"saclay.iot-lab.info"
]
}
}
Copy file on SSH frontend homedir directory (~/shared/.iotlabsshcli):
.....................................................................
.. code-block::
$ iotlab-ssh copy-file test.tar.gz
{
"run-cmd": {
"0": [
"saclay.iot-lab.info"
]
}
}
$ iotlab-ssh run-cmd "tar -xzvf ~/shared/.iotlabsshcli/test.tar.gz -C ~/shared/.iotlabsshcli/" --frontend
{
"run-cmd": {
"0": [
"saclay.iot-lab.info"
]
}
}
**Note:** shared homedir directory is mounted (via NFS) by Linux nodes during experiment.
Run the script `/tmp/test.sh` on `node-a8-2` in saclay:
.......................................................
.. code-block::
$ iotlab-ssh run-script /tmp/test.sh -l saclay,a8,2
{
"run-script": {
"0": [
"node-a8-2.saclay.iot-lab.info"
]
}
}
**Note:** a screen session is launched on the node
to actually run the script and provide easy access to outputs if needed.
When the script ends, the screen session is terminated and the logs are gone.
.. code-block::
root@node-a8-2:~# screen -ls
There is a screen on:
1877.<login>-<exp_id> (Detached)
1 Socket in /tmp/screens/S-root.
**Note:** similar to run command you can pass the *--frontend* option if
you want to launch a script in background on the SSH frontend.
.. |PyPI| image:: https://badge.fury.io/py/iotlabsshcli.svg
:target: https://badge.fury.io/py/iotlabsshcli
:alt: PyPI package status
.. |CI| image:: https://github.com/iot-lab/ssh-cli-tools/workflows/CI/badge.svg
:target: https://github.com/iot-lab/ssh-cli-tools/actions?query=workflow%3ACI+branch%3Amaster
:alt: CI status
.. |Codecov| image:: https://codecov.io/gh/iot-lab/ssh-cli-tools/branch/master/graph/badge.svg
:target: https://codecov.io/gh/iot-lab/ssh-cli-tools/branch/master
:alt: Codecov coverage status
|
PypiClean
|
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/pure1/Pure1_1_1/api/file_system_snapshots_api.py
|
from __future__ import absolute_import
import re
# python 2 and python 3 compatibility library
import six
from typing import List, Optional
from .. import models
class FileSystemSnapshotsApi(object):
def __init__(self, api_client):
self.api_client = api_client
def api11_file_system_snapshots_get_with_http_info(
self,
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
source_ids=None, # type: List[str]
source_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemSnapshotGetResponse
"""Get FlashBlade file system snapshots
Retrieves snapshots of FlashBlade file systems.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api11_file_system_snapshots_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`)
:param str x_request_id: Supplied by client during request or generated by server.
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result. Single quotes are required around all strings.
:param str filter: Exclude resources that don't match the specified criteria. Single quotes are required around all strings inside the filters.
:param list[str] ids: A comma-separated list of resource IDs. If there is not at least one resource that matches each `id` element, an error is returned. Single quotes are required around all strings.
:param int limit: Limit the size of the response to the specified number of resources. A limit of 0 can be used to get the number of resources without getting all of the resources. It will be returned in the total_item_count field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request. If not specified, defaults to 1000.
:param list[str] names: A comma-separated list of resource names. If there is not at least one resource that matches each `name` element, an error is returned. Single quotes are required around all strings.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). If you provide a sort you will not get a continuation token in the response.
:param list[str] source_ids: A comma-separated list of ids for the source of the object. If there is not at least one resource that matches each `source_id` element, an error is returned. Single quotes are required around all strings.
:param list[str] source_names: A comma-separated list of names for the source of the object. If there is not at least one resource that matches each `source_name` element, an error is returned. Single quotes are required around all strings.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: FileSystemSnapshotGetResponse
If the method is called asynchronously,
returns the request thread.
"""
continuation_token = models.quoteString(continuation_token)
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
ids = models.quoteStrings(ids)
if names is not None:
if not isinstance(names, list):
names = [names]
names = models.quoteStrings(names)
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
if source_ids is not None:
if not isinstance(source_ids, list):
source_ids = [source_ids]
source_ids = models.quoteStrings(source_ids)
if source_names is not None:
if not isinstance(source_names, list):
source_names = [source_names]
source_names = models.quoteStrings(source_names)
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api11_file_system_snapshots_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
if 'source_ids' in params:
query_params.append(('source_ids', params['source_ids']))
collection_formats['source_ids'] = 'csv'
if 'source_names' in params:
query_params.append(('source_names', params['source_names']))
collection_formats['source_names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/1.1/file-system-snapshots', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemSnapshotGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api11_file_system_snapshots_policies_get_with_http_info(
self,
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""Get FlashBlade file system snapshot / policy pairs
Retrieves pairs of FlashBlade file system snapshot members and their policies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api11_file_system_snapshots_policies_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`)
:param str x_request_id: Supplied by client during request or generated by server.
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result. Single quotes are required around all strings.
:param str filter: Exclude resources that don't match the specified criteria. Single quotes are required around all strings inside the filters.
:param int limit: Limit the size of the response to the specified number of resources. A limit of 0 can be used to get the number of resources without getting all of the resources. It will be returned in the total_item_count field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request. If not specified, defaults to 1000.
:param list[str] member_ids: A comma-separated list of member IDs. If there is not at least one resource that matches each `member_id` element, an error is returned. Single quotes are required around all strings.
:param list[str] member_names: A comma-separated list of member names. If there is not at least one resource that matches each `member_name` element, an error is returned. Single quotes are required around all strings.
:param list[str] policy_ids: A comma-separated list of policy IDs. If there is not at least one resource that matches each `policy_id` element, an error is returned. Single quotes are required around all strings.
:param list[str] policy_names: A comma-separated list of policy names. If there is not at least one resource that matches each `policy_name` element, an error is returned. Single quotes are required around all strings.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). If you provide a sort you will not get a continuation token in the response.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: PolicyMembersGetResponse
If the method is called asynchronously,
returns the request thread.
"""
continuation_token = models.quoteString(continuation_token)
if member_ids is not None:
if not isinstance(member_ids, list):
member_ids = [member_ids]
member_ids = models.quoteStrings(member_ids)
if member_names is not None:
if not isinstance(member_names, list):
member_names = [member_names]
member_names = models.quoteStrings(member_names)
if policy_ids is not None:
if not isinstance(policy_ids, list):
policy_ids = [policy_ids]
policy_ids = models.quoteStrings(policy_ids)
if policy_names is not None:
if not isinstance(policy_names, list):
policy_names = [policy_names]
policy_names = models.quoteStrings(policy_names)
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api11_file_system_snapshots_policies_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'member_ids' in params:
query_params.append(('member_ids', params['member_ids']))
collection_formats['member_ids'] = 'csv'
if 'member_names' in params:
query_params.append(('member_names', params['member_names']))
collection_formats['member_names'] = 'csv'
if 'policy_ids' in params:
query_params.append(('policy_ids', params['policy_ids']))
collection_formats['policy_ids'] = 'csv'
if 'policy_names' in params:
query_params.append(('policy_names', params['policy_names']))
collection_formats['policy_names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/1.1/file-system-snapshots/policies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PolicyMembersGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
|
PypiClean
|
/django-twitter-tag-1.2.1.tar.gz/django-twitter-tag-1.2.1/README.rst
|
Django Twitter Tag
==================
.. image:: https://secure.travis-ci.org/coagulant/django-twitter-tag.png?branch=dev
:target: https://travis-ci.org/coagulant/django-twitter-tag
.. image:: https://coveralls.io/repos/coagulant/django-twitter-tag/badge.png?branch=dev
:target: https://coveralls.io/r/coagulant/django-twitter-tag/
A django template tag to display user's recent tweets / search results.
Version 1.0 uses Twitter API 1.1.
Basic features are limiting numbers of displayed tweets, filtering out replies and retweets.
Library exposes each tweet ``json`` in template, adding extra attributes: ``html`` and ``datetime``.
First one makes urls, hashtags or twitter usernames clickable, juts like you expect them to be.
Last one provides python datetime object to ease output in templates.
Urls are expanded by default. Library handles twitter exceptions gracefully,
returning last successful response.
Usage
-----
* Load tag in your template like this::
{% load twitter_tag %}
* Get user's (``futurecolors`` in example) most recent tweets and store them in ``tweets`` variable::
{% get_tweets for "futurecolors" as tweets %}
* Now you have a list of tweets in your template context, which you can iterate over like this::
<ul>
{% for tweet in tweets %}
<li>{{ tweet.html|safe }}</li>
{% endfor %}
</ul>
Installation
------------
This app works with python 2.7 and 3.3, Django 1.4-1.6.
Recommended way to install is pip::
pip install django-twitter-tag
Add ``twitter_tag`` to ``INSTALLED_APPS`` in settings.py::
INSTALLED_APPS = (...
'twitter_tag',
...
)
Configuration
-------------
Twitter `API 1.1`_ requires authentication for every request you make,
so you have to provide some credentials for oauth dance to work.
First, `create an application`_, second, request access token on newly created
app page. The `process of obtaining a token`_ is explained in detail in docs.
Here is an example of how your config might look like::
# settings.py
# Make sure to replace with your own values, theses are just made up
# Your access token: Access token
TWITTER_OAUTH_TOKEN = '91570701-BQMM5Ix9AJUC5JtM5Ix9DtwNAiaaYIYGN2CyPgduPVZKSX'
# Your access token: Access token secret
TWITTER_OAUTH_SECRET = 'hi1UiXm8rF4essN3HlaqMz7GoUvy3e4DsVkBAVsg4M'
# OAuth settings: Consumer key
TWITTER_CONSUMER_KEY = '3edIOec4uu00IGFxvQcwJe'
# OAuth settings: Consumer secret
TWITTER_CONSUMER_SECRET = 'YBD6GyFpvumNbNA218RAphszFnkifxR8K9h8Rdtq1A'
For best performance you should set up `django cache framework`_. Cache is used both internally
to store last successful json response and externally (see Caching below).
.. _API 1.1: https://dev.twitter.com/docs/api/1.1
.. _create an application: https://dev.twitter.com/apps
.. _process of obtaining a token: https://dev.twitter.com/docs/auth/tokens-devtwittercom
.. _django cache framework: https://docs.djangoproject.com/en/dev/topics/cache/
Examples
--------
You can specify number of tweets to show::
{% get_tweets for "futurecolors" as tweets limit 10 %}
To filter out tweet replies (that start with @ char)::
{% get_tweets for "futurecolors" as tweets exclude "replies" %}
To ignore native retweets::
{% get_tweets for "futurecolors" as tweets exclude "retweets" %}
Or everything from above together::
{% get_tweets for "futurecolors" as tweets exclude "replies, retweets" limit 10 %}
Search tag (experimental)
-------------------------
You can search for tweets::
{% search_tweets for "python 3" as tweets limit 5 %}
Search api arguments are supported via key=value pairs::
{% search_tweets for "python 3" as tweets lang='eu' result_type='popular' %}
Relevant `API docs for search`_.
.. _API docs for search: https://dev.twitter.com/docs/api/1.1/get/search/tweets
Caching
-------
It's strongly advised to use template caching framework to reduce the amount of twitter API calls
and avoid reaching `rate limit`_ (currently, 180 reqs in 15 minutes)::
{% load twitter_tag cache %}
{% cache 60 my_tweets %}
{% get_tweets for "futurecolors" as tweets exclude "retweets" %}
...
{% endcache %}
.. _rate limit: https://dev.twitter.com/docs/rate-limiting/1.1
Extra
-----
Tweet's properties
~~~~~~~~~~~~~~~~~~
get_tweets returns a list of tweets into context. Each tweets is a json dict, that has
exactly the same attributes, as stated in API 1.1 docs, describing `tweet json`_.
Tweet's created timestamp is converted to python object and is available in templates::
{{ tweet.datetime|date:"D d M Y" }}
.. _tweet json: https://dev.twitter.com/docs/platform-objects/tweets
Tweet's html
~~~~~~~~~~~~
Tweet also has extra ``html`` property, which contains tweet, formatted for html output
with all needed links. Note, Twitter has `guidelines for developers`_ on how embeded tweets
should look like.
.. _guidelines for developers: https://dev.twitter.com/terms/display-requirements
Exception handling
~~~~~~~~~~~~~~~~~~
Any Twitter API exceptions like 'Over capacity' are silenced and logged.
Django cache is used internally to store last successful response in case `twitter is down`_.
.. _twitter is down: https://dev.twitter.com/docs/error-codes-responses
Going beyond
~~~~~~~~~~~~
Since version 1.0 you can create your own template tags for specific twitter queries,
not supported by this library. Simply inherit from ``twitter_tag.templatetags.twitter_tag.BaseTwitterTag``
and implement your own ``get_json`` method (tag syntax is contolled by django-classy-tags).
Development
-----------
To install `development version`_, use ``pip install django-twitter-tag==dev``
.. _development version: https://github.com/coagulant/django-twitter-tag/archive/dev.tar.gz#egg=django_twitter_tag-dev
Tests
-----
Run::
DJANGO_SETTINGS_MODULE = twitter_tag.test_settings python setup.py test
|
PypiClean
|
/syntaxnet_with_tensorflow-0.2-cp27-cp27mu-manylinux1_x86_64.whl/syntaxnet_with_tensorflow-0.2.data/purelib/syntaxnet/util/registry.py
|
import inspect
import sys
from tensorflow.python.platform import tf_logging as logging
def _GetClass(name):
"""Looks up a class by name.
Args:
name: The fully-qualified type name of the class to return.
Returns:
The class associated with the |name|, or None on error.
"""
elements = name.split('.')
# Need at least "module.Class".
if len(elements) < 2:
logging.debug('Malformed type: "%s"', name)
return None
module_path = '.'.join(elements[:-1])
class_name = elements[-1]
# Import the module.
try:
__import__(module_path)
except ImportError as e:
logging.debug('Unable to find module "%s": "%s"', module_path, e)
return None
module = sys.modules[module_path]
# Look up the class.
if not hasattr(module, class_name):
logging.debug('Name "%s" not found in module: "%s"', class_name,
module_path)
return None
class_obj = getattr(module, class_name)
# Check that it is actually a class.
if not inspect.isclass(class_obj):
logging.debug('Name does not refer to a class: "%s"', name)
return None
return class_obj
def _Create(baseclass, subclass_name, *args, **kwargs):
"""Creates an instance of a named subclass.
Args:
baseclass: The expected base class.
subclass_name: The fully-qualified type name of the subclass to create.
*args: Passed to the subclass constructor.
**kwargs: Passed to the subclass constructor.
Returns:
An instance of the named subclass, or None on error.
"""
subclass = _GetClass(subclass_name)
if subclass is None:
return None # _GetClass() already logged an error
if not issubclass(subclass, baseclass):
logging.debug('Class "%s" is not a subclass of "%s"', subclass_name,
baseclass.__name__)
return None
return subclass(*args, **kwargs)
def _ResolveAndCreate(baseclass, path, subclass_name, *args, **kwargs):
"""Resolves the name of a subclass and creates an instance of it.
The subclass is resolved with respect to a package path in an inside-out
manner. For example, if |path| is 'google3.foo.bar' and |subclass_name| is
'baz.ClassName', then attempts are made to create instances of the following
fully-qualified class names:
'google3.foo.bar.baz.ClassName'
'google3.foo.baz.ClassName'
'google3.baz.ClassName'
'baz.ClassName'
An instance corresponding to the first successful attempt is returned.
Args:
baseclass: The expected base class.
path: The path to use to resolve the subclass.
subclass_name: The name of the subclass to create.
*args: Passed to the subclass constructor.
**kwargs: Passed to the subclass constructor.
Returns:
An instance of the named subclass corresponding to the inner-most successful
name resolution, or None if the name could not be resolved.
Raises:
ValueError: If the subclass cannot be resolved and created.
"""
elements = path.split('.')
while True:
resolved_subclass_name = '.'.join(elements + [subclass_name])
subclass = _Create(baseclass, resolved_subclass_name, *args, **kwargs)
if subclass: return subclass # success
if not elements: break # no more paths to try
elements.pop() # try resolving against the next-outer path
raise ValueError(
'Failed to create subclass "%s" of base class %s using path %s' %
(subclass_name, baseclass.__name__, path))
def RegisteredClass(baseclass):
"""Decorates the |baseclass| with a static Create() method."""
assert not hasattr(baseclass, 'Create')
def Create(subclass_name, *args, **kwargs):
"""A wrapper around _Create() that curries the |baseclass|."""
path = inspect.getmodule(baseclass).__name__
return _ResolveAndCreate(baseclass, path, subclass_name, *args, **kwargs)
baseclass.Create = staticmethod(Create)
return baseclass
|
PypiClean
|
/python-network-0.0.1.tar.gz/python-network-0.0.1/pynet/datasets/brats.py
|
# Imports
from collections import namedtuple
import os
import logging
import numpy as np
import pandas as pd
import nibabel as nib
import progressbar
from pynet.datasets import Fetchers
# Global parameters
MODALITIES = ("t1", "t1ce", "t2", "flair")
Item = namedtuple("Item", ["input_path", "output_path", "metadata_path"])
logger = logging.getLogger("pynet")
@Fetchers.register
def fetch_brats(datasetdir):
""" Fetch/prepare the Brats dataset for pynet.
Parameters
----------
datasetdir: str
the dataset destination folder.
Returns
-------
item: namedtuple
a named tuple containing 'input_path', 'output_path', and
'metadata_path'.
"""
logger.info("Loading brats dataset.")
def _crop(arr):
return arr[45: 195, 30: 220, 10: 145]
def _norm(arr):
logical_mask = (arr != 0)
mean = arr[logical_mask].mean()
std = arr[logical_mask].std()
return ((arr - mean) / std).astype(np.single)
traindir = os.path.join(datasetdir, "MICCAI_BraTS_2019_Data_Training")
mapping_path = os.path.join(traindir, "name_mapping.csv")
if not os.path.isfile(mapping_path):
raise ValueError(
"You must first download the Brats data in the '{0}' folder "
"following the 'https://www.med.upenn.edu/sbia/brats2018/"
"registration.html' instructions.".format(datasetdir))
desc_path = os.path.join(datasetdir, "pynet_brats.tsv")
input_path = os.path.join(datasetdir, "pynet_brats_inputs.npy")
output_path = os.path.join(datasetdir, "pynet_brats_outputs.npy")
if not os.path.isfile(desc_path):
df = pd.read_csv(mapping_path, sep=",")
arr = df[["BraTS_2019_subject_ID", "Grade"]].values
input_dataset = []
output_dataset = []
nb_subjects = len(arr)
with progressbar.ProgressBar(max_value=nb_subjects,
redirect_stdout=True) as bar:
for cnt, (sid, grade) in enumerate(arr):
logger.debug("Processing {0}...".format(sid))
datadir = os.path.join(traindir, grade, sid)
data = []
for mod in MODALITIES:
path = os.path.join(
datadir, "{0}_{1}.nii.gz".format(sid, mod))
data.append(_norm(_crop(nib.load(path).get_data())))
data = np.asarray(data)
input_dataset.append(data)
path = os.path.join(datadir, "{0}_seg.nii.gz".format(sid))
_arr = nib.load(path).get_data()
data = []
for value in (0, 1, 2, 4):
data.append(_crop(_arr == value))
data = np.asarray(data)
output_dataset.append(data)
bar.update(cnt)
input_dataset = np.asarray(input_dataset)
np.save(input_path, input_dataset)
output_dataset = np.asarray(output_dataset)
np.save(output_path, output_dataset)
dataset_desc = pd.DataFrame(
arr, columns=["participant_id", "grade"])
dataset_desc.to_csv(desc_path, sep="\t")
return Item(input_path=input_path, output_path=output_path,
metadata_path=desc_path)
|
PypiClean
|
/python-bitcoinlib-0.12.2.tar.gz/python-bitcoinlib-0.12.2/bitcoin/bloom.py
|
import struct
import math
import bitcoin.core
import bitcoin.core.serialize
def _ROTL32(x, r):
assert x <= 0xFFFFFFFF
return ((x << r) & 0xFFFFFFFF) | (x >> (32 - r))
def MurmurHash3(nHashSeed, vDataToHash):
"""MurmurHash3 (x86_32)
Used for bloom filters. See http://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp
"""
assert nHashSeed <= 0xFFFFFFFF
h1 = nHashSeed
c1 = 0xcc9e2d51
c2 = 0x1b873593
# body
i = 0
while (i < len(vDataToHash) - len(vDataToHash) % 4
and len(vDataToHash) - i >= 4):
k1 = struct.unpack(b"<L", vDataToHash[i:i+4])[0]
k1 = (k1 * c1) & 0xFFFFFFFF
k1 = _ROTL32(k1, 15)
k1 = (k1 * c2) & 0xFFFFFFFF
h1 ^= k1
h1 = _ROTL32(h1, 13)
h1 = (((h1*5) & 0xFFFFFFFF) + 0xe6546b64) & 0xFFFFFFFF
i += 4
# tail
k1 = 0
j = (len(vDataToHash) // 4) * 4
if len(vDataToHash) & 3 >= 3:
k1 ^= vDataToHash[j+2] << 16
if len(vDataToHash) & 3 >= 2:
k1 ^= vDataToHash[j+1] << 8
if len(vDataToHash) & 3 >= 1:
k1 ^= vDataToHash[j]
k1 &= 0xFFFFFFFF
k1 = (k1 * c1) & 0xFFFFFFFF
k1 = _ROTL32(k1, 15)
k1 = (k1 * c2) & 0xFFFFFFFF
h1 ^= k1
# finalization
h1 ^= len(vDataToHash) & 0xFFFFFFFF
h1 ^= (h1 & 0xFFFFFFFF) >> 16
h1 *= 0x85ebca6b
h1 ^= (h1 & 0xFFFFFFFF) >> 13
h1 *= 0xc2b2ae35
h1 ^= (h1 & 0xFFFFFFFF) >> 16
return h1 & 0xFFFFFFFF
class CBloomFilter(bitcoin.core.serialize.Serializable):
# 20,000 items with fp rate < 0.1% or 10,000 items and <0.0001%
MAX_BLOOM_FILTER_SIZE = 36000
MAX_HASH_FUNCS = 50
UPDATE_NONE = 0
UPDATE_ALL = 1
UPDATE_P2PUBKEY_ONLY = 2
UPDATE_MASK = 3
def __init__(self, nElements, nFPRate, nTweak, nFlags):
"""Create a new bloom filter
The filter will have a given false-positive rate when filled with the
given number of elements.
Note that if the given parameters will result in a filter outside the
bounds of the protocol limits, the filter created will be as close to
the given parameters as possible within the protocol limits. This will
apply if nFPRate is very low or nElements is unreasonably high.
nTweak is a constant which is added to the seed value passed to the
hash function It should generally always be a random value (and is
largely only exposed for unit testing)
nFlags should be one of the UPDATE_* enums (but not _MASK)
"""
LN2SQUARED = 0.4804530139182014246671025263266649717305529515945455
LN2 = 0.6931471805599453094172321214581765680755001343602552
self.vData = bytearray(int(min(-1 / LN2SQUARED * nElements * math.log(nFPRate), self.MAX_BLOOM_FILTER_SIZE * 8) / 8))
self.nHashFuncs = int(min(len(self.vData) * 8 / nElements * LN2, self.MAX_HASH_FUNCS))
self.nTweak = nTweak
self.nFlags = nFlags
def bloom_hash(self, nHashNum, vDataToHash):
return MurmurHash3(((nHashNum * 0xFBA4C795) + self.nTweak) & 0xFFFFFFFF, vDataToHash) % (len(self.vData) * 8)
__bit_mask = bytearray([0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80])
def insert(self, elem):
"""Insert an element in the filter.
elem may be a COutPoint or bytes
"""
if isinstance(elem, bitcoin.core.COutPoint):
elem = elem.serialize()
if len(self.vData) == 1 and self.vData[0] == 0xff:
return
for i in range(0, self.nHashFuncs):
nIndex = self.bloom_hash(i, elem)
# Sets bit nIndex of vData
self.vData[nIndex >> 3] |= self.__bit_mask[7 & nIndex]
def contains(self, elem):
"""Test if the filter contains an element
elem may be a COutPoint or bytes
"""
if isinstance(elem, bitcoin.core.COutPoint):
elem = elem.serialize()
if len(self.vData) == 1 and self.vData[0] == 0xff:
return True
for i in range(0, self.nHashFuncs):
nIndex = self.bloom_hash(i, elem)
if not (self.vData[nIndex >> 3] & self.__bit_mask[7 & nIndex]):
return False
return True
def IsWithinSizeConstraints(self):
return len(self.vData) <= self.MAX_BLOOM_FILTER_SIZE and self.nHashFuncs <= self.MAX_HASH_FUNCS
def IsRelevantAndUpdate(tx, tx_hash):
# Not useful for a client, so not implemented yet.
raise NotImplementedError
__struct = struct.Struct(b'<IIB')
@classmethod
def stream_deserialize(cls, f):
vData = bytearray(bitcoin.core.serialize.BytesSerializer.stream_deserialize(f))
(nHashFuncs,
nTweak,
nFlags) = CBloomFilter.__struct.unpack(bitcoin.core.ser_read(f, CBloomFilter.__struct.size))
# These arguments can be fake, the real values are set just after
deserialized = cls(1, 0.01, 0, CBloomFilter.UPDATE_ALL)
deserialized.vData = vData
deserialized.nHashFuncs = nHashFuncs
deserialized.nTweak = nTweak
deserialized.nFlags = nFlags
return deserialized
def stream_serialize(self, f):
bitcoin.core.serialize.BytesSerializer.stream_serialize(self.vData, f)
f.write(self.__struct.pack(self.nHashFuncs, self.nTweak, self.nFlags))
__all__ = (
'MurmurHash3',
'CBloomFilter',
)
|
PypiClean
|
/v3/model/instance_item.py
|
import pprint
import re
import six
class InstanceItem:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'instance_id': 'str',
'instance_name': 'str',
'tags': 'list[InstanceItemTagItem]'
}
attribute_map = {
'instance_id': 'instance_id',
'instance_name': 'instance_name',
'tags': 'tags'
}
def __init__(self, instance_id=None, instance_name=None, tags=None):
"""InstanceItem - a model defined in huaweicloud sdk"""
self._instance_id = None
self._instance_name = None
self._tags = None
self.discriminator = None
self.instance_id = instance_id
self.instance_name = instance_name
self.tags = tags
@property
def instance_id(self):
"""Gets the instance_id of this InstanceItem.
实例ID。
:return: The instance_id of this InstanceItem.
:rtype: str
"""
return self._instance_id
@instance_id.setter
def instance_id(self, instance_id):
"""Sets the instance_id of this InstanceItem.
实例ID。
:param instance_id: The instance_id of this InstanceItem.
:type: str
"""
self._instance_id = instance_id
@property
def instance_name(self):
"""Gets the instance_name of this InstanceItem.
实例名称
:return: The instance_name of this InstanceItem.
:rtype: str
"""
return self._instance_name
@instance_name.setter
def instance_name(self, instance_name):
"""Sets the instance_name of this InstanceItem.
实例名称
:param instance_name: The instance_name of this InstanceItem.
:type: str
"""
self._instance_name = instance_name
@property
def tags(self):
"""Gets the tags of this InstanceItem.
标签列表。如果没有标签,默认为空数组。
:return: The tags of this InstanceItem.
:rtype: list[InstanceItemTagItem]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""Sets the tags of this InstanceItem.
标签列表。如果没有标签,默认为空数组。
:param tags: The tags of this InstanceItem.
:type: list[InstanceItemTagItem]
"""
self._tags = tags
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InstanceItem):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/fastapi_user_auth-0.4.3.tar.gz/fastapi_user_auth-0.4.3/fastapi_user_auth/site.py
|
from typing import Type
from fastapi import FastAPI
from fastapi_amis_admin.admin import AdminSite, Settings
from fastapi_amis_admin.amis.components import ActionType, App, Dialog, Flex, Service
from fastapi_amis_admin.amis.constants import SizeEnum
from fastapi_amis_admin.amis.types import AmisAPI
from fastapi_amis_admin.crud.utils import SqlalchemyDatabase
from fastapi_amis_admin.utils.translation import i18n as _
from starlette.requests import Request
from fastapi_user_auth.app import UserAuthApp as DefaultUserAuthApp
from fastapi_user_auth.auth import Auth
class AuthAdminSite(AdminSite):
auth: Auth = None
UserAuthApp: Type[DefaultUserAuthApp] = DefaultUserAuthApp
def __init__(self, settings: Settings, fastapi: FastAPI = None, engine: SqlalchemyDatabase = None, auth: Auth = None):
super().__init__(settings, fastapi, engine)
self.auth = auth or self.auth or Auth(db=self.db)
self.register_admin(self.UserAuthApp)
async def get_page(self, request: Request) -> App:
app = await super().get_page(request)
user_auth_app = self.get_admin_or_create(self.UserAuthApp)
app.header = Flex(
className="w-full",
justify="flex-end",
alignItems="flex-end",
items=[
app.header,
{
"type": "dropdown-button",
"label": f"{request.user.username}",
"trigger": "hover",
"icon": "fa fa-user",
"buttons": [
ActionType.Dialog(
label=_("User Profile"),
dialog=Dialog(
title=_("User Profile"),
actions=[],
size=SizeEnum.lg,
body=Service(
schemaApi=AmisAPI(
method="post",
url=f"{user_auth_app.router_path}/form/userinfo",
cache=600000,
responseData={"&": "${body}"},
)
),
),
),
ActionType.Url(label=_("Sign out"), url=f"{user_auth_app.router_path}/logout"),
],
},
],
)
return app
async def has_page_permission(self, request: Request) -> bool:
return await self.auth.requires(response=False)(request)
|
PypiClean
|
/Craft_xai-0.0.3-py3-none-any.whl/craft/craft_tf.py
|
from abc import ABC, abstractmethod
from typing import Union, Tuple, List, Callable, Dict, Optional, Any
import tensorflow as tf
import numpy as np
from sklearn.decomposition import NMF
from sklearn.exceptions import NotFittedError
from .sobol.sampler import HaltonSequence
from .sobol.estimators import JansenEstimator
class BaseConceptExtractor(ABC):
"""
Base class for concept extraction models.
Parameters
----------
input_to_latent : Callable
The first part of the model taking an input and returning
positive activations, g(.) in the original paper.
latent_to_logit : Callable
The second part of the model taking activation and returning
logits, h(.) in the original paper.
number_of_concepts : int
The number of concepts to extract.
batch_size : int, optional
The batch size to use during training and prediction. Default is 64.
"""
def __init__(self, input_to_latent : Callable,
latent_to_logit : Optional[Callable] = None,
number_of_concepts: int = 20,
batch_size: int = 64):
# sanity checks
assert(number_of_concepts > 0), "number_of_concepts must be greater than 0"
assert(batch_size > 0), "batch_size must be greater than 0"
assert(callable(input_to_latent)), "input_to_latent must be a callable function"
self.input_to_latent = input_to_latent
self.latent_to_logit = latent_to_logit
self.number_of_concepts = number_of_concepts
self.batch_size = batch_size
@abstractmethod
def fit(self, inputs):
"""
Fit the CAVs to the input data.
Parameters
----------
inputs : array-like
The input data to fit the model on.
Returns
-------
tuple
A tuple containing the input data and the matrices (U, W) that factorize the data.
"""
raise NotImplementedError
@abstractmethod
def transform(self, inputs):
"""
Transform the input data into a concepts embedding.
Parameters
----------
inputs : array-like
The input data to transform.
Returns
-------
array-like
The transformed embedding of the input data.
"""
raise NotImplementedError
class Craft(BaseConceptExtractor):
"""
Class Implementing the CRAFT Concept Extraction Mechanism.
Parameters
----------
input_to_latent : Callable
The first part of the model taking an input and returning
positive activations, g(.) in the original paper.
latent_to_logit : Callable
The second part of the model taking activation and returning
logits, h(.) in the original paper.
number_of_concepts : int
The number of concepts to extract.
batch_size : int, optional
The batch size to use during training and prediction. Default is 64.
patch_size : int, optional
The size of the patches to extract from the input data. Default is 64.
"""
def __init__(self, input_to_latent : Callable,
latent_to_logit : Optional[Callable] = None,
number_of_concepts: int = 20,
batch_size: int = 64,
patch_size: int = 64):
super().__init__(input_to_latent, latent_to_logit, number_of_concepts, batch_size)
self.patch_size = patch_size
self.activation_shape = None
def fit(self, inputs : np.ndarray):
"""
Fit the Craft model to the input data.
Parameters
----------
inputs : np.ndarray
Input data of shape (n_samples, height, width, channels).
(x1, x2, ..., xn) in the paper.
Returns
-------
(X, U, W)
A tuple containing the crops (X in the paper),
the concepts values (U) and the concepts basis (W).
"""
# extract patches from the input data
strides = int(self.patch_size * 0.80)
patches = tf.image.extract_patches(images=inputs,
sizes=[1, self.patch_size, self.patch_size, 1],
strides=[1, strides, strides, 1],
rates=[1, 1, 1, 1],
padding='VALID')
patches = tf.reshape(patches, (-1, self.patch_size, self.patch_size, 3))
# encode the patches and obtain the activations
input_width, input_height = inputs.shape[1], inputs.shape[2]
activations = self.input_to_latent.predict(tf.image.resize(patches, (input_width, input_height), method="bicubic"),
batch_size=self.batch_size,
verbose=False)
assert np.min(activations) >= 0.0, "Activations must be positive."
# if the activations have shape (n_samples, height, width, n_channels),
# apply average pooling
if len(activations.shape) == 4:
activations = tf.reduce_mean(activations, axis=(1, 2))
# apply NMF to the activations to obtain matrices U and W
reducer = NMF(n_components=self.number_of_concepts, alpha_W=1e-2)
U = tf.cast(reducer.fit_transform(tf.nn.relu(activations)), tf.float32)
W = tf.cast(reducer.components_, tf.float32)
# store the factorizer and W as attributes of the Craft instance
self.reducer = reducer
self.W = tf.cast(W, tf.float32)
return patches, U, W
def check_if_fitted(self):
"""Checks if the factorization model has been fitted to input data.
Raises
------
NotFittedError
If the factorization model has not been fitted to input data.
"""
if not hasattr(self, 'reducer'):
raise NotFittedError("The factorization model has not been fitted to input data yet.")
def transform(self, inputs : np.ndarray, activations : np.ndarray = None):
"""Transforms the inputs data into its concept representation.
Parameters
----------
inputs : numpy array or Tensor
The input data to be transformed.
activations: numpy array or Tensor, optional
Pre-computed activations of the input data. If not provided, the activations
will be computed using the input_to_latent model.
Returns
-------
U : Tensor
The concept value (U) of the inputs.
"""
self.check_if_fitted()
activations = self.input_to_latent.predict(inputs, batch_size=self.batch_size,
verbose=False)
if len(activations.shape) == 4:
original_shape = activations.shape[:-1]
activations = np.reshape(activations, (-1, activations.shape[-1]))
else:
original_shape = (len(activations),)
W_dtype = self.reducer.components_.dtype
U = self.reducer.transform(np.array(activations, dtype=W_dtype))
U = np.reshape(U, (*original_shape, U.shape[-1]))
return tf.cast(U, tf.float32)
def estimate_importance(self, inputs, class_id, nb_design = 32):
"""
Estimates the importance of each concept for a given class.
Parameters
----------
inputs : numpy array or Tensor
The input data to be transformed.
class_id : int
The class id to estimate the importance for.
nb_design : int, optional
The number of design to use for the importance estimation. Default is 32.
Returns
-------
importances : list
The Sobol total index (importance score) for each concept.
"""
self.check_if_fitted()
U = self.transform(inputs)
masks = HaltonSequence()(self.number_of_concepts, nb_design = nb_design)
estimator = JansenEstimator()
importances = []
if len(U.shape) == 2:
# apply the original method of the paper
for u in U:
u_perturbated = u[None, :] * masks
a_perturbated = u_perturbated @ self.W
y_pred = self.latent_to_logit.predict(a_perturbated, batch_size=self.batch_size,
verbose=False)
y_pred = y_pred[:, class_id]
stis = estimator(masks, y_pred, nb_design)
importances.append(stis)
elif len(U.shape) == 4:
# apply a re-parameterization trick and use mask on all localization for a given
# concept id to estimate sobol indices
for u in U:
u_perturbated = u[None, :] * masks[:, None, None, :]
a_perturbated = np.reshape(u_perturbated, (-1, u.shape[-1])) @ self.W
a_perturbated = np.reshape(a_perturbated, (len(masks), U.shape[1], U.shape[2], -1))
y_pred = self.latent_to_logit.predict(a_perturbated, batch_size=self.batch_size,
verbose=False)
y_pred = y_pred[:, class_id]
stis = estimator(masks, y_pred, nb_design)
importances.append(stis)
return np.mean(importances, 0)
|
PypiClean
|
/moodle-sdk-2.0.8.tar.gz/moodle-sdk-2.0.8/mdk/css.py
|
import logging
import os
from .tools import process
from .config import Conf
C = Conf()
class Css(object):
"""Class wrapping CSS related functions"""
_M = None
_debug = False
_compiler = 'grunt'
def __init__(self, M):
self._M = M
def setCompiler(self, compiler):
self._compiler = compiler
def setDebug(self, debug):
self._debug = debug
def compile(self, theme='bootstrapbase', sheets=None):
"""Compile LESS sheets contained within a theme"""
source = self.getThemeLessPath(theme)
dest = self.getThemeCssPath(theme)
if not os.path.isdir(source):
raise Exception('Unknown theme %s, or less directory not found' % (theme))
if not sheets:
# Guess the sheets from the theme less folder.
sheets = []
for candidate in os.listdir(source):
if os.path.isfile(os.path.join(source, candidate)) and candidate.endswith('.less'):
sheets.append(os.path.splitext(candidate)[0])
elif type(sheets) != list:
sheets = [sheets]
if len(sheets) < 1:
logging.warning('Could not find any sheets')
return False
hadErrors = False
if self._compiler == 'grunt':
sheets = ['moodle']
for name in sheets:
sheet = name + '.less'
destSheet = name + '.css'
if not os.path.isfile(os.path.join(source, sheet)):
logging.warning('Could not find file %s' % (sheet))
hadErrors = True
continue
try:
if self._compiler == 'grunt':
compiler = Grunt(source, os.path.join(source, sheet), os.path.join(dest, destSheet))
elif self._compiler == 'recess':
compiler = Recess(source, os.path.join(source, sheet), os.path.join(dest, destSheet))
elif self._compiler == 'lessc':
compiler = Lessc(self.getThemeDir(), os.path.join(source, sheet), os.path.join(dest, destSheet))
compiler.setDebug(self._debug)
compiler.execute()
except CssCompileFailed:
logging.warning('Failed compilation of %s' % (sheet))
hadErrors = True
continue
else:
logging.info('Compiled %s to %s' % (sheet, destSheet))
return not hadErrors
def getThemeCssPath(self, theme):
return os.path.join(self.getThemePath(theme), 'style')
def getThemeLessPath(self, theme):
return os.path.join(self.getThemePath(theme), 'less')
def getThemeDir(self):
return os.path.join(self._M.get('path'), 'theme')
def getThemePath(self, theme):
return os.path.join(self.getThemeDir(), theme)
class Compiler(object):
"""LESS compiler abstract"""
_compress = True
_debug = False
_cwd = None
_source = None
_dest = None
def __init__(self, cwd, source, dest):
self._cwd = cwd
self._source = source
self._dest = dest
def execute(self):
raise Exception('Compiler does not implement execute() method')
def setCompress(self, compress):
self._compress = compress
def setDebug(self, debug):
self._debug = debug
class Grunt(Compiler):
"""Grunt compiler"""
def execute(self):
executable = C.get('grunt')
if not executable:
raise Exception('Could not find executable path')
cmd = [executable, 'css']
(code, out, err) = process(cmd, self._cwd)
if code != 0 or len(out) == 0:
raise CssCompileFailed('Error during compile')
class Recess(Compiler):
"""Recess compiler"""
def execute(self):
executable = C.get('recess')
if not executable:
raise Exception('Could not find executable path')
cmd = [executable, self._source, '--compile']
if self._compress:
cmd.append('--compress')
(code, out, err) = process(cmd, self._cwd)
if code != 0 or len(out) == 0:
raise CssCompileFailed('Error during compile')
# Saving to destination
with open(self._dest, 'w') as f:
f.write(out)
class Lessc(Compiler):
"""Lessc compiler"""
def execute(self):
executable = C.get('lessc')
if not executable:
raise Exception('Could not find executable path')
cmd = [executable]
sourcePath = os.path.relpath(self._source, self._cwd)
sourceDir = os.path.dirname(sourcePath)
if self._debug:
cmd.append('--source-map-rootpath=' + sourceDir)
cmd.append('--source-map-map-inline')
self.setCompress(False)
if self._compress:
cmd.append('--compress')
# Append the source and destination.
cmd.append(sourcePath)
cmd.append(os.path.relpath(self._dest, self._cwd))
(code, out, err) = process(cmd, self._cwd)
if code != 0 or len(out) != 0:
raise CssCompileFailed('Error during compile')
class CssCompileFailed(Exception):
pass
|
PypiClean
|
/flora-blockchain-0.2.5.tar.gz/flora-blockchain-0.2.5/flora/wallet/cc_wallet/cc_wallet.py
|
from __future__ import annotations
import logging
import time
from dataclasses import replace
from secrets import token_bytes
from typing import Any, Dict, List, Optional, Set
from blspy import AugSchemeMPL, G2Element
from flora.consensus.cost_calculator import calculate_cost_of_program, NPCResult
from flora.full_node.bundle_tools import simple_solution_generator
from flora.full_node.mempool_check_conditions import get_name_puzzle_conditions
from flora.protocols.wallet_protocol import PuzzleSolutionResponse
from flora.types.blockchain_format.coin import Coin
from flora.types.blockchain_format.program import Program
from flora.types.blockchain_format.sized_bytes import bytes32
from flora.types.coin_solution import CoinSolution
from flora.types.generator_types import BlockGenerator
from flora.types.spend_bundle import SpendBundle
from flora.util.byte_types import hexstr_to_bytes
from flora.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from flora.util.ints import uint8, uint32, uint64, uint128
from flora.util.json_util import dict_to_json_str
from flora.wallet.block_record import HeaderBlockRecord
from flora.wallet.cc_wallet.cc_info import CCInfo
from flora.wallet.cc_wallet.cc_utils import (
CC_MOD,
SpendableCC,
cc_puzzle_for_inner_puzzle,
cc_puzzle_hash_for_inner_puzzle_hash,
get_lineage_proof_from_coin_and_puz,
spend_bundle_for_spendable_ccs,
uncurry_cc,
)
from flora.wallet.derivation_record import DerivationRecord
from flora.wallet.puzzles.genesis_by_coin_id_with_0 import (
create_genesis_or_zero_coin_checker,
genesis_coin_id_for_genesis_coin_checker,
lineage_proof_for_genesis,
)
from flora.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
)
from flora.wallet.transaction_record import TransactionRecord
from flora.wallet.util.transaction_type import TransactionType
from flora.wallet.util.wallet_types import WalletType
from flora.wallet.wallet import Wallet
from flora.wallet.wallet_coin_record import WalletCoinRecord
from flora.wallet.wallet_info import WalletInfo
class CCWallet:
wallet_state_manager: Any
log: logging.Logger
wallet_info: WalletInfo
cc_coin_record: WalletCoinRecord
cc_info: CCInfo
standard_wallet: Wallet
base_puzzle_program: Optional[bytes]
base_inner_puzzle_hash: Optional[bytes32]
cost_of_single_tx: Optional[int]
@staticmethod
async def create_new_cc(
wallet_state_manager: Any,
wallet: Wallet,
amount: uint64,
):
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(None, [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise ValueError("Internal Error")
try:
spend_bundle = await self.generate_new_coloured_coin(amount)
except Exception:
await wallet_state_manager.user_store.delete_wallet(self.id())
raise
await self.wallet_state_manager.add_new_wallet(self, self.id())
# Change and actual coloured coin
non_ephemeral_spends: List[Coin] = spend_bundle.not_ephemeral_additions()
cc_coin = None
puzzle_store = self.wallet_state_manager.puzzle_store
for c in non_ephemeral_spends:
info = await puzzle_store.wallet_info_for_puzzle_hash(c.puzzle_hash)
if info is None:
raise ValueError("Internal Error")
id, wallet_type = info
if id == self.id():
cc_coin = c
if cc_coin is None:
raise ValueError("Internal Error, unable to generate new coloured coin")
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.wallet_state_manager.main_wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
await self.standard_wallet.push_transaction(regular_record)
await self.standard_wallet.push_transaction(cc_record)
return self
@staticmethod
async def create_wallet_for_cc(
wallet_state_manager: Any,
wallet: Wallet,
genesis_checker_hex: str,
) -> CCWallet:
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(Program.from_bytes(bytes.fromhex(genesis_checker_hex)), [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise Exception("wallet_info is None")
await self.wallet_state_manager.add_new_wallet(self, self.id())
return self
@staticmethod
async def create(
wallet_state_manager: Any,
wallet: Wallet,
wallet_info: WalletInfo,
) -> CCWallet:
self = CCWallet()
self.log = logging.getLogger(__name__)
self.cost_of_single_tx = None
self.wallet_state_manager = wallet_state_manager
self.wallet_info = wallet_info
self.standard_wallet = wallet
self.cc_info = CCInfo.from_bytes(hexstr_to_bytes(self.wallet_info.data))
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.COLOURED_COIN)
def id(self) -> uint32:
return self.wallet_info.id
async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint64:
if record_list is None:
record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.id())
amount: uint64 = uint64(0)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
amount = uint64(amount + record.coin.amount)
self.log.info(f"Confirmed balance for cc wallet {self.id()} is {amount}")
return uint64(amount)
async def get_unconfirmed_balance(self, unspent_records=None) -> uint128:
confirmed = await self.get_confirmed_balance(unspent_records)
unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
self.id()
)
addition_amount = 0
removal_amount = 0
for record in unconfirmed_tx:
if record.type is TransactionType.INCOMING_TX:
addition_amount += record.amount
else:
removal_amount += record.amount
result = confirmed - removal_amount + addition_amount
self.log.info(f"Unconfirmed balance for cc wallet {self.id()} is {result}")
return uint128(result)
async def get_max_send_amount(self, records=None):
spendable: List[WalletCoinRecord] = list(
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records)
)
if len(spendable) == 0:
return 0
spendable.sort(reverse=True, key=lambda record: record.coin.amount)
if self.cost_of_single_tx is None:
coin = spendable[0].coin
tx = await self.generate_signed_transaction(
[coin.amount], [coin.puzzle_hash], coins={coin}, ignore_max_send_amount=True
)
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
safe_mode=True,
)
cost_result: uint64 = calculate_cost_of_program(
program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE
)
self.cost_of_single_tx = cost_result
self.log.info(f"Cost of a single tx for standard wallet: {self.cost_of_single_tx}")
max_cost = self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 2 # avoid full block TXs
current_cost = 0
total_amount = 0
total_coin_count = 0
for record in spendable:
current_cost += self.cost_of_single_tx
total_amount += record.coin.amount
total_coin_count += 1
if current_cost + self.cost_of_single_tx > max_cost:
break
return total_amount
async def get_name(self):
return self.wallet_info.name
async def set_name(self, new_name: str):
new_info = replace(self.wallet_info, name=new_name)
self.wallet_info = new_info
await self.wallet_state_manager.user_store.update_wallet(self.wallet_info, False)
def get_colour(self) -> str:
assert self.cc_info.my_genesis_checker is not None
return bytes(self.cc_info.my_genesis_checker).hex()
async def coin_added(self, coin: Coin, height: uint32):
"""Notification from wallet state manager that wallet has been received."""
self.log.info(f"CC wallet has been notified that {coin} was added")
search_for_parent: bool = True
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
lineage_proof = Program.to((1, [coin.parent_coin_info, inner_puzzle.get_tree_hash(), coin.amount]))
await self.add_lineage(coin.name(), lineage_proof, True)
for name, lineage_proofs in self.cc_info.lineage_proofs:
if coin.parent_coin_info == name:
search_for_parent = False
break
if search_for_parent:
data: Dict[str, Any] = {
"data": {
"action_data": {
"api_name": "request_puzzle_solution",
"height": height,
"coin_name": coin.parent_coin_info,
"received_coin": coin.name(),
}
}
}
data_str = dict_to_json_str(data)
await self.wallet_state_manager.create_action(
name="request_puzzle_solution",
wallet_id=self.id(),
wallet_type=self.type(),
callback="puzzle_solution_received",
done=False,
data=data_str,
in_transaction=True,
)
async def puzzle_solution_received(self, response: PuzzleSolutionResponse, action_id: int):
coin_name = response.coin_name
height = response.height
puzzle: Program = response.puzzle
r = uncurry_cc(puzzle)
header_hash = self.wallet_state_manager.blockchain.height_to_hash(height)
block: Optional[
HeaderBlockRecord
] = await self.wallet_state_manager.blockchain.block_store.get_header_block_record(header_hash)
if block is None:
return None
removals = block.removals
if r is not None:
mod_hash, genesis_coin_checker, inner_puzzle = r
self.log.info(f"parent: {coin_name} inner_puzzle for parent is {inner_puzzle}")
parent_coin = None
for coin in removals:
if coin.name() == coin_name:
parent_coin = coin
if parent_coin is None:
raise ValueError("Error in finding parent")
lineage_proof = get_lineage_proof_from_coin_and_puz(parent_coin, puzzle)
await self.add_lineage(coin_name, lineage_proof)
await self.wallet_state_manager.action_store.action_done(action_id)
async def get_new_inner_hash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
async def get_new_inner_puzzle(self) -> Program:
return await self.standard_wallet.get_new_puzzle()
async def get_puzzle_hash(self, new: bool):
return await self.standard_wallet.get_puzzle_hash(new)
async def get_new_puzzlehash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
def puzzle_for_pk(self, pubkey) -> Program:
inner_puzzle = self.standard_wallet.puzzle_for_pk(bytes(pubkey))
cc_puzzle: Program = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, inner_puzzle)
self.base_puzzle_program = bytes(cc_puzzle)
self.base_inner_puzzle_hash = inner_puzzle.get_tree_hash()
return cc_puzzle
async def get_new_cc_puzzle_hash(self):
return (await self.wallet_state_manager.get_unused_derivation_record(self.id())).puzzle_hash
# Create a new coin of value 0 with a given colour
async def generate_zero_val_coin(self, send=True, exclude: List[Coin] = None) -> SpendBundle:
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
if exclude is None:
exclude = []
coins = await self.standard_wallet.select_coins(0, exclude)
assert coins != set()
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner = await self.get_new_inner_hash()
cc_puzzle_hash: Program = cc_puzzle_hash_for_inner_puzzle_hash(
CC_MOD, self.cc_info.my_genesis_checker, cc_inner
)
tx: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
uint64(0), cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx.spend_bundle is not None
full_spend: SpendBundle = tx.spend_bundle
self.log.info(f"Generate zero val coin: cc_puzzle_hash is {cc_puzzle_hash}")
# generate eve coin so we can add future lineage_proofs even if we don't eve spend
eve_coin = Coin(origin_id, cc_puzzle_hash, uint64(0))
await self.add_lineage(
eve_coin.name(),
Program.to(
(
1,
[eve_coin.parent_coin_info, cc_inner, eve_coin.amount],
)
),
)
await self.add_lineage(eve_coin.parent_coin_info, Program.to((0, [origin.as_list(), 1])))
if send:
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=uint32(1),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=full_spend.name(),
)
await self.wallet_state_manager.add_transaction(regular_record)
await self.wallet_state_manager.add_pending_transaction(cc_record)
return full_spend
async def get_spendable_balance(self, records=None) -> uint64:
coins = await self.get_cc_spendable_coins(records)
amount = 0
for record in coins:
amount += record.coin.amount
return uint64(amount)
async def get_pending_change_balance(self) -> uint64:
unconfirmed_tx = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.id())
addition_amount = 0
for record in unconfirmed_tx:
if not record.is_in_mempool():
continue
our_spend = False
for coin in record.removals:
# Don't count eve spend as change
if coin.parent_coin_info.hex() == self.get_colour():
continue
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
our_spend = True
break
if our_spend is not True:
continue
for coin in record.additions:
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
addition_amount += coin.amount
return uint64(addition_amount)
async def get_cc_spendable_coins(self, records=None) -> List[WalletCoinRecord]:
result: List[WalletCoinRecord] = []
record_list: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet(
self.id(), records
)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
result.append(record)
return result
async def select_coins(self, amount: uint64) -> Set[Coin]:
"""
Returns a set of coins that can be used for generating a new transaction.
Note: Must be called under wallet state manager lock
"""
spendable_am = await self.get_confirmed_balance()
if amount > spendable_am:
error_msg = f"Can't select amount higher than our spendable balance {amount}, spendable {spendable_am}"
self.log.warning(error_msg)
raise ValueError(error_msg)
self.log.info(f"About to select coins for amount {amount}")
spendable: List[WalletCoinRecord] = await self.get_cc_spendable_coins()
sum = 0
used_coins: Set = set()
# Use older coins first
spendable.sort(key=lambda r: r.confirmed_block_height)
# Try to use coins from the store, if there isn't enough of "unused"
# coins use change coins that are not confirmed yet
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
self.id()
)
for coinrecord in spendable:
if sum >= amount and len(used_coins) > 0:
break
if coinrecord.coin.name() in unconfirmed_removals:
continue
sum += coinrecord.coin.amount
used_coins.add(coinrecord.coin)
self.log.info(f"Selected coin: {coinrecord.coin.name()} at height {coinrecord.confirmed_block_height}!")
# This happens when we couldn't use one of the coins because it's already used
# but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
if sum < amount:
raise ValueError(
"Can't make this transaction at the moment. Waiting for the change from the previous transaction."
)
self.log.info(f"Successfully selected coins: {used_coins}")
return used_coins
async def get_sigs(self, innerpuz: Program, innersol: Program, coin_name: bytes32) -> List[G2Element]:
puzzle_hash = innerpuz.get_tree_hash()
pubkey, private = await self.wallet_state_manager.get_keys(puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
sigs: List[G2Element] = []
error, conditions, cost = conditions_dict_for_solution(
innerpuz, innersol, self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM
)
if conditions is not None:
for _, msg in pkm_pairs_for_conditions_dict(
conditions, coin_name, self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
):
signature = AugSchemeMPL.sign(synthetic_secret_key, msg)
sigs.append(signature)
return sigs
async def inner_puzzle_for_cc_puzhash(self, cc_hash: bytes32) -> Program:
record: DerivationRecord = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(
cc_hash.hex()
)
inner_puzzle: Program = self.standard_wallet.puzzle_for_pk(bytes(record.pubkey))
return inner_puzzle
async def get_lineage_proof_for_coin(self, coin) -> Optional[Program]:
for name, proof in self.cc_info.lineage_proofs:
if name == coin.parent_coin_info:
return proof
return None
async def generate_signed_transaction(
self,
amounts: List[uint64],
puzzle_hashes: List[bytes32],
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
ignore_max_send_amount: bool = False,
) -> TransactionRecord:
# Get coins and calculate amount of change required
outgoing_amount = uint64(sum(amounts))
total_outgoing = outgoing_amount + fee
if not ignore_max_send_amount:
max_send = await self.get_max_send_amount()
if total_outgoing > max_send:
raise ValueError(f"Can't send more than {max_send} in a single transaction")
if coins is None:
selected_coins: Set[Coin] = await self.select_coins(uint64(total_outgoing))
else:
selected_coins = coins
total_amount = sum([x.amount for x in selected_coins])
change = total_amount - total_outgoing
primaries = []
for amount, puzzle_hash in zip(amounts, puzzle_hashes):
primaries.append({"puzzlehash": puzzle_hash, "amount": amount})
if change > 0:
changepuzzlehash = await self.get_new_inner_hash()
primaries.append({"puzzlehash": changepuzzlehash, "amount": change})
coin = list(selected_coins)[0]
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
genesis_id = genesis_coin_id_for_genesis_coin_checker(self.cc_info.my_genesis_checker)
spendable_cc_list = []
innersol_list = []
sigs: List[G2Element] = []
first = True
for coin in selected_coins:
coin_inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if first:
first = False
if fee > 0:
innersol = self.standard_wallet.make_solution(primaries=primaries, fee=fee)
else:
innersol = self.standard_wallet.make_solution(primaries=primaries)
else:
innersol = self.standard_wallet.make_solution()
innersol_list.append(innersol)
lineage_proof = await self.get_lineage_proof_for_coin(coin)
assert lineage_proof is not None
spendable_cc_list.append(SpendableCC(coin, genesis_id, inner_puzzle, lineage_proof))
sigs = sigs + await self.get_sigs(coin_inner_puzzle, innersol, coin.name())
spend_bundle = spend_bundle_for_spendable_ccs(
CC_MOD,
self.cc_info.my_genesis_checker,
spendable_cc_list,
innersol_list,
sigs,
)
# TODO add support for array in stored records
return TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=puzzle_hashes[0],
amount=uint64(outgoing_amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=spend_bundle.name(),
)
async def add_lineage(self, name: bytes32, lineage: Optional[Program], in_transaction=False):
self.log.info(f"Adding parent {name}: {lineage}")
current_list = self.cc_info.lineage_proofs.copy()
current_list.append((name, lineage))
cc_info: CCInfo = CCInfo(self.cc_info.my_genesis_checker, current_list)
await self.save_info(cc_info, in_transaction)
async def save_info(self, cc_info: CCInfo, in_transaction):
self.cc_info = cc_info
current_info = self.wallet_info
data_str = bytes(cc_info).hex()
wallet_info = WalletInfo(current_info.id, current_info.name, current_info.type, data_str)
self.wallet_info = wallet_info
await self.wallet_state_manager.user_store.update_wallet(wallet_info, in_transaction)
async def generate_new_coloured_coin(self, amount: uint64) -> SpendBundle:
coins = await self.standard_wallet.select_coins(amount)
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner_hash = await self.get_new_inner_hash()
await self.add_lineage(origin_id, Program.to((0, [origin.as_list(), 0])))
genesis_coin_checker = create_genesis_or_zero_coin_checker(origin_id)
minted_cc_puzzle_hash = cc_puzzle_hash_for_inner_puzzle_hash(CC_MOD, genesis_coin_checker, cc_inner_hash)
tx_record: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
amount, minted_cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx_record.spend_bundle is not None
lineage_proof: Optional[Program] = lineage_proof_for_genesis(origin)
lineage_proofs = [(origin_id, lineage_proof)]
cc_info: CCInfo = CCInfo(genesis_coin_checker, lineage_proofs)
await self.save_info(cc_info, False)
return tx_record.spend_bundle
async def create_spend_bundle_relative_amount(self, cc_amount, zero_coin: Coin = None) -> Optional[SpendBundle]:
# If we're losing value then get coloured coins with at least that much value
# If we're gaining value then our amount doesn't matter
if cc_amount < 0:
cc_spends = await self.select_coins(abs(cc_amount))
else:
if zero_coin is None:
return None
cc_spends = set()
cc_spends.add(zero_coin)
if cc_spends is None:
return None
# Calculate output amount given relative difference and sum of actual values
spend_value = sum([coin.amount for coin in cc_spends])
cc_amount = spend_value + cc_amount
# Loop through coins and create solution for innerpuzzle
list_of_solutions = []
output_created = None
sigs: List[G2Element] = []
for coin in cc_spends:
if output_created is None:
newinnerpuzhash = await self.get_new_inner_hash()
innersol = self.standard_wallet.make_solution(
primaries=[{"puzzlehash": newinnerpuzhash, "amount": cc_amount}]
)
output_created = coin
else:
innersol = self.standard_wallet.make_solution(consumed=[output_created.name()])
innerpuz: Program = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
sigs = sigs + await self.get_sigs(innerpuz, innersol, coin.name())
lineage_proof = await self.get_lineage_proof_for_coin(coin)
puzzle_reveal = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, innerpuz)
# Use coin info to create solution and add coin and solution to list of CoinSolutions
solution = [
innersol,
coin.as_list(),
lineage_proof,
None,
None,
None,
None,
None,
]
list_of_solutions.append(CoinSolution(coin, puzzle_reveal, Program.to(solution)))
aggsig = AugSchemeMPL.aggregate(sigs)
return SpendBundle(list_of_solutions, aggsig)
|
PypiClean
|
/zeno_etl_libs-1.0.126.tar.gz/zeno_etl_libs-1.0.126/glue-jobs/src/scripts/distributor_ranking2/distributor_ranking2_main.py
|
import os
import sys
import argparse
import pandas as pd
import numpy as np
import datetime as dt
from dateutil.tz import gettz
from ast import literal_eval
from zeno_etl_libs.helper.aws.s3 import S3
from zeno_etl_libs.db.db import DB, MySQL
from zeno_etl_libs.logger import get_logger
from zeno_etl_libs.helper import helper
from zeno_etl_libs.helper.email.email import Email
from zeno_etl_libs.utils.distributor_ranking2.distributor_ranking_calc import \
ranking_calc_dc, ranking_calc_franchisee
from zeno_etl_libs.utils.distributor_ranking2.tech_processing import \
process_tech_df
def main(debug_mode, reset_date, time_interval_dc, time_interval_franchisee,
volume_fraction, franchisee_ranking_active, franchisee_stores,
as_ms_weights_dc_drug_lvl, as_ms_weights_dc_type_lvl,
pr_weights_dc_drug_lvl, pr_weights_dc_type_lvl,
weights_franchisee_drug_lvl, weights_franchisee_type_lvl, s3,
rs_db_read, rs_db_write, read_schema, write_schema):
mysql_write = MySQL(read_only=False)
logger.info(f"Debug Mode: {debug_mode}")
status = 'Failed'
# define empty variables to return in case of fail
final_ranks_franchisee = pd.DataFrame()
ranked_features_franchisee = pd.DataFrame()
dc_evaluated = []
franchisee_stores_evaluated = []
# ensure all weights adds upto 1
sum_all_wts = sum(list(as_ms_weights_dc_drug_lvl.values())) + \
sum(list(as_ms_weights_dc_type_lvl.values())) + \
sum(list(pr_weights_dc_drug_lvl.values())) + \
sum(list(pr_weights_dc_type_lvl.values())) + \
sum(list(weights_franchisee_drug_lvl.values())) + \
sum(list(weights_franchisee_type_lvl.values()))
if sum_all_wts == 6:
logger.info("All input weights add upto 1 | Continue Execution")
else:
logger.info("Input weights does not add upto 1 | Stop Execution")
return status, reset_date, dc_evaluated, franchisee_stores_evaluated
try:
# calculate ranks
logger.info("Calculating Zippin DC-level Ranking")
ranked_features_dc, final_ranks_dc = ranking_calc_dc(
reset_date, time_interval_dc, as_ms_weights_dc_drug_lvl,
as_ms_weights_dc_type_lvl, pr_weights_dc_drug_lvl,
pr_weights_dc_type_lvl, logger, db=rs_db_read, schema=read_schema)
if franchisee_ranking_active == 'Y':
logger.info("Calculating Franchisee-level Ranking")
ranked_features_franchisee, \
final_ranks_franchisee = ranking_calc_franchisee(
reset_date, time_interval_franchisee, franchisee_stores,
weights_franchisee_drug_lvl, weights_franchisee_type_lvl,
logger, db=rs_db_read, schema=read_schema)
else:
logger.info("Skipping Franchisee-level Ranking")
# process ranked dfs to tech required format
distributor_ranking_rules, \
distributor_ranking_rule_values = process_tech_df(
final_ranks_dc, final_ranks_franchisee, volume_fraction)
# combine rank df and feature df (dc & franchisee)
final_ranks = pd.concat([final_ranks_dc, final_ranks_franchisee], axis=0)
ranked_features = pd.concat([ranked_features_dc, ranked_features_franchisee], axis=0)
ranked_features.rename(
{"partial_dc_id": "dc_id", "partial_distributor_id": "distributor_id",
"partial_distributor_credit_period": "distributor_credit_period",
"partial_distributor_name": "distributor_name"}, axis=1, inplace=True)
final_ranks.rename(
{"partial_dc_id": "dc_id"}, axis=1, inplace=True)
# for email info
dc_evaluated = distributor_ranking_rules["dc_id"].unique().tolist()
franchisee_stores_evaluated = distributor_ranking_rules[
"store_id"].unique().tolist()
# adding required fields in tech df
distributor_ranking_rules['rule_start_date'] = reset_date
distributor_ranking_rules['is_active'] = 1
distributor_ranking_rules['created_at'] = dt.datetime.now(
tz=gettz('Asia/Kolkata')).strftime('%Y-%m-%d %H:%M:%S')
distributor_ranking_rules['created_by'] = 'etl-automation'
# adding required fields in ds-internal df
final_ranks.loc[:, 'reset_date'] = reset_date
final_ranks['created_at'] = dt.datetime.now(
tz=gettz('Asia/Kolkata')).strftime('%Y-%m-%d %H:%M:%S')
final_ranks['created_by'] = 'etl-automation'
final_ranks['updated_at'] = dt.datetime.now(
tz=gettz('Asia/Kolkata')).strftime('%Y-%m-%d %H:%M:%S')
final_ranks['updated_by'] = 'etl-automation'
ranked_features.loc[:, 'reset_date'] = reset_date
ranked_features['created_at'] = dt.datetime.now(
tz=gettz('Asia/Kolkata')).strftime('%Y-%m-%d %H:%M:%S')
ranked_features['created_by'] = 'etl-automation'
ranked_features['updated_at'] = dt.datetime.now(
tz=gettz('Asia/Kolkata')).strftime('%Y-%m-%d %H:%M:%S')
ranked_features['updated_by'] = 'etl-automation'
# formatting column names
distributor_ranking_rule_values.columns = [c.replace('_', '-') for c in
distributor_ranking_rule_values.columns]
distributor_ranking_rules.columns = [c.replace('_', '-') for c in
distributor_ranking_rules.columns]
final_ranks.columns = [c.replace('_', '-') for c in final_ranks.columns]
ranked_features.columns = [c.replace('_', '-') for c in ranked_features.columns]
if debug_mode == 'N':
logger.info("Writing table to RS-DB")
logger.info("Writing to table: distributor-ranking2-features")
table_info = helper.get_table_info(db=rs_db_write,
table_name='distributor-ranking2-features',
schema=write_schema)
columns = list(table_info['column_name'])
ranked_features = ranked_features[columns] # required column order
s3.write_df_to_db(df=ranked_features,
table_name='distributor-ranking2-features',
db=rs_db_write, schema=write_schema)
logger.info("Writing to table: distributor-ranking2-final-ranks")
table_info = helper.get_table_info(db=rs_db_write,
table_name='distributor-ranking2-final-ranks',
schema=write_schema)
columns = list(table_info['column_name'])
final_ranks = final_ranks[columns] # required column order
s3.write_df_to_db(df=final_ranks,
table_name='distributor-ranking2-final-ranks',
db=rs_db_write, schema=write_schema)
logger.info("Writing table to RS-DB completed!")
mysql_write.open_connection()
logger.info("Updating table to MySQL")
try:
index_increment = int(
pd.read_sql(
'select max(id) from `distributor-ranking-rules`',
con=mysql_write.connection).values[0]) + 1
redundant_increment = int(
pd.read_sql(
'select max(id) from `distributor-ranking-rule-values`',
con=mysql_write.connection).values[0]) + 1
except:
index_increment = 1
redundant_increment = 1
logger.info(f"Incremented distributor-ranking-rules by {index_increment}")
logger.info(f"Incremented distributor-ranking-rule-values by {redundant_increment}")
distributor_ranking_rules['id'] = distributor_ranking_rules['id'] + index_increment
distributor_ranking_rule_values['distributor-ranking-rule-id'] = distributor_ranking_rule_values[
'distributor-ranking-rule-id'] + index_increment
distributor_ranking_rule_values['id'] = distributor_ranking_rule_values['id'] + redundant_increment
logger.info("Setting existing rules to inactive")
mysql_write.engine.execute("UPDATE `distributor-ranking-rules` SET `is-active` = 0")
# mysql_write.engine.execute("SET FOREIGN_KEY_CHECKS=0") # use only in staging
logger.info("Writing to table: distributor-ranking-rules")
distributor_ranking_rules.to_sql(
name='distributor-ranking-rules',
con=mysql_write.engine,
if_exists='append', index=False,
method='multi', chunksize=10000)
logger.info("Writing to table: distributor-ranking-rule-values")
distributor_ranking_rule_values.to_sql(
name='distributor-ranking-rule-values',
con=mysql_write.engine,
if_exists='append', index=False,
method='multi', chunksize=10000)
# mysql_write.engine.execute("SET FOREIGN_KEY_CHECKS=1") # use only in staging
logger.info("Updating table to MySQL completed!")
mysql_write.close()
else:
logger.info("Writing to RS-DB & MySQL skipped")
status = 'Success'
logger.info(f"Distributor Ranking code execution status: {status}")
except Exception as error:
logger.exception(error)
logger.info(f"Distributor Ranking code execution status: {status}")
return status, reset_date, dc_evaluated, franchisee_stores_evaluated
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="This is ETL script.")
parser.add_argument('-e', '--env', default="dev", type=str, required=False)
parser.add_argument('-et', '--email_to',
default="[email protected]", type=str,
required=False)
parser.add_argument('-d', '--debug_mode', default="N", type=str,
required=False)
parser.add_argument('-rd', '--reset_date', default="YYYY-MM-DD", type=str,
required=False)
parser.add_argument('-ti', '--time_interval_dc', default=90, type=int,
required=False)
parser.add_argument('-tif', '--time_interval_franchisee', default=180, type=int,
required=False)
parser.add_argument('-vf', '--volume_fraction', default="0.5-0.3-0.2", type=str,
required=False)
parser.add_argument('-fra', '--franchisee_ranking_active', default="Y", type=str,
required=False)
parser.add_argument('-fs', '--franchisee_stores', default=[319, 320],
nargs='+', type=int, required=False)
parser.add_argument('-amwdcdl', '--as_ms_weights_dc_drug_lvl',
default="{'margin':0.5,'ff':0.5}",
type=str, required=False)
parser.add_argument('-amwdctl', '--as_ms_weights_dc_type_lvl',
default="{'margin':0.3,'ff':0.3, 'portfolio_size':0.4}",
type=str, required=False)
parser.add_argument('-prwdcdl', '--pr_weights_dc_drug_lvl',
default="{'margin':0.4,'ff':0.6}",
type=str, required=False)
parser.add_argument('-prwdctl', '--pr_weights_dc_type_lvl',
default="{'margin':0.2,'ff':0.4, 'portfolio_size':0.4}",
type=str, required=False)
parser.add_argument('-wfdl', '--weights_franchisee_drug_lvl',
default="{'margin':0.5,'ff':0.5}",
type=str, required=False)
parser.add_argument('-wftl', '--weights_franchisee_type_lvl',
default="{'margin':0.3,'ff':0.3, 'portfolio_size':0.4}",
type=str, required=False)
args, unknown = parser.parse_known_args()
env = args.env
os.environ['env'] = env
email_to = args.email_to
# JOB EXCLUSIVE PARAMS
debug_mode = args.debug_mode
reset_date = args.reset_date
time_interval_dc = args.time_interval_dc
time_interval_franchisee = args.time_interval_franchisee
volume_fraction = args.volume_fraction
franchisee_ranking_active = args.franchisee_ranking_active
franchisee_stores = args.franchisee_stores
as_ms_weights_dc_drug_lvl = args.as_ms_weights_dc_drug_lvl
as_ms_weights_dc_type_lvl = args.as_ms_weights_dc_type_lvl
pr_weights_dc_drug_lvl = args.pr_weights_dc_drug_lvl
pr_weights_dc_type_lvl = args.pr_weights_dc_type_lvl
weights_franchisee_drug_lvl = args.weights_franchisee_drug_lvl
weights_franchisee_type_lvl = args.weights_franchisee_type_lvl
# EVALUATE REQUIRED JSON PARAMS
as_ms_weights_dc_drug_lvl = literal_eval(as_ms_weights_dc_drug_lvl)
as_ms_weights_dc_type_lvl = literal_eval(as_ms_weights_dc_type_lvl)
pr_weights_dc_drug_lvl = literal_eval(pr_weights_dc_drug_lvl)
pr_weights_dc_type_lvl = literal_eval(pr_weights_dc_type_lvl)
weights_franchisee_drug_lvl = literal_eval(weights_franchisee_drug_lvl)
weights_franchisee_type_lvl = literal_eval(weights_franchisee_type_lvl)
if reset_date == 'YYYY-MM-DD':
reset_date = dt.date.today()
else:
reset_date = dt.datetime.strptime(reset_date, "%Y-%m-%d").date()
logger = get_logger()
s3 = S3()
rs_db_read = DB(read_only=True)
rs_db_write = DB(read_only=False)
read_schema = 'prod2-generico'
write_schema = 'prod2-generico'
# open RS connection
rs_db_read.open_connection()
rs_db_write.open_connection()
""" calling the main function """
status, reset_date, dc_evaluated, \
franchisee_stores_evaluated = main(
debug_mode, reset_date, time_interval_dc, time_interval_franchisee,
volume_fraction, franchisee_ranking_active, franchisee_stores,
as_ms_weights_dc_drug_lvl, as_ms_weights_dc_type_lvl,
pr_weights_dc_drug_lvl, pr_weights_dc_type_lvl,
weights_franchisee_drug_lvl, weights_franchisee_type_lvl, s3,
rs_db_read, rs_db_write, read_schema, write_schema)
# close RS connection
rs_db_read.close_connection()
rs_db_write.close_connection()
# SEND EMAIL ATTACHMENTS
logger.info("Sending email attachments..")
email = Email()
email.send_email_file(
subject=f"Distributor Ranking 2.0 Reset (SM-{env}) {reset_date}: {status}",
mail_body=f"""
Debug Mode: {debug_mode}
DC's Evaluated: {dc_evaluated}
Franchisee Stores Evaluated: {franchisee_stores_evaluated}
Job Params: {args}
""",
to_emails=email_to)
logger.info("Script ended")
|
PypiClean
|
/bpy_2.79-1.0.0-py3-none-manylinux2014_x86_64.whl/bpy/2.79/scripts/addons/uv_magic_uv/muv_preserve_uv_aspect.py
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
__author__ = "Nutti <[email protected]>"
__status__ = "production"
__version__ = "4.4"
__date__ = "2 Aug 2017"
import bpy
import bmesh
from bpy.props import StringProperty
from mathutils import Vector
from . import muv_common
class MUV_PreserveUVAspect(bpy.types.Operator):
"""
Operation class: Preserve UV Aspect
"""
bl_idname = "uv.muv_preserve_uv_aspect"
bl_label = "Preserve UV Aspect"
bl_description = "Choose Image"
bl_options = {'REGISTER', 'UNDO'}
dest_img_name = StringProperty(options={'HIDDEN'})
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and obj.type == 'MESH'
def execute(self, context):
# Note: the current system only works if the
# f[tex_layer].image doesn't return None
# which will happen in certain cases
obj = context.active_object
bm = bmesh.from_edit_mesh(obj.data)
if muv_common.check_version(2, 73, 0) >= 0:
bm.faces.ensure_lookup_table()
if not bm.loops.layers.uv:
self.report({'WARNING'}, "Object must have more than one UV map")
return {'CANCELLED'}
uv_layer = bm.loops.layers.uv.verify()
tex_layer = bm.faces.layers.tex.verify()
sel_faces = [f for f in bm.faces if f.select]
dest_img = bpy.data.images[self.dest_img_name]
info = {}
for f in sel_faces:
if not f[tex_layer].image in info.keys():
info[f[tex_layer].image] = {}
info[f[tex_layer].image]['faces'] = []
info[f[tex_layer].image]['faces'].append(f)
for img in info:
if img is None:
continue
src_img = img
ratio = Vector((
dest_img.size[0] / src_img.size[0],
dest_img.size[1] / src_img.size[1]))
origin = Vector((100000.0, 100000.0))
for f in info[img]['faces']:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = min(uv.x, origin.x)
origin.y = min(uv.y, origin.y)
info[img]['ratio'] = ratio
info[img]['origin'] = origin
for img in info:
if img is None:
continue
for f in info[img]['faces']:
f[tex_layer].image = dest_img
for l in f.loops:
uv = l[uv_layer].uv
diff = uv - info[img]['origin']
diff.x = diff.x / info[img]['ratio'].x
diff.y = diff.y / info[img]['ratio'].y
uv.x = origin.x + diff.x
uv.y = origin.y + diff.y
bmesh.update_edit_mesh(obj.data)
return {'FINISHED'}
class MUV_PreserveUVAspectMenu(bpy.types.Menu):
"""
Menu class: Preserve UV Aspect
"""
bl_idname = "uv.muv_preserve_uv_aspect_menu"
bl_label = "Preserve UV Aspect"
bl_description = "Preserve UV Aspect"
def draw(self, _):
layout = self.layout
# create sub menu
for key in bpy.data.images.keys():
layout.operator(
MUV_PreserveUVAspect.bl_idname,
text=key, icon="IMAGE_COL").dest_img_name = key
|
PypiClean
|
/fds.sdk.StocksAPIforDigitalPortals-0.10.12-py3-none-any.whl/fds/sdk/StocksAPIforDigitalPortals/model/post_stock_notation_screener_search_request_data_performance_end_of_day_year_to_date.py
|
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.StocksAPIforDigitalPortals.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.StocksAPIforDigitalPortals.exceptions import ApiAttributeError
def lazy_import():
from fds.sdk.StocksAPIforDigitalPortals.model.post_stock_notation_screener_search_request_data_ebit_margin_maximum import PostStockNotationScreenerSearchRequestDataEbitMarginMaximum
from fds.sdk.StocksAPIforDigitalPortals.model.post_stock_notation_screener_search_request_data_ebit_margin_minimum import PostStockNotationScreenerSearchRequestDataEbitMarginMinimum
globals()['PostStockNotationScreenerSearchRequestDataEbitMarginMaximum'] = PostStockNotationScreenerSearchRequestDataEbitMarginMaximum
globals()['PostStockNotationScreenerSearchRequestDataEbitMarginMinimum'] = PostStockNotationScreenerSearchRequestDataEbitMarginMinimum
class PostStockNotationScreenerSearchRequestDataPerformanceEndOfDayYearToDate(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'minimum': (PostStockNotationScreenerSearchRequestDataEbitMarginMinimum,), # noqa: E501
'maximum': (PostStockNotationScreenerSearchRequestDataEbitMarginMaximum,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'minimum': 'minimum', # noqa: E501
'maximum': 'maximum', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""PostStockNotationScreenerSearchRequestDataPerformanceEndOfDayYearToDate - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
minimum (PostStockNotationScreenerSearchRequestDataEbitMarginMinimum): [optional] # noqa: E501
maximum (PostStockNotationScreenerSearchRequestDataEbitMarginMaximum): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PostStockNotationScreenerSearchRequestDataPerformanceEndOfDayYearToDate - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
minimum (PostStockNotationScreenerSearchRequestDataEbitMarginMinimum): [optional] # noqa: E501
maximum (PostStockNotationScreenerSearchRequestDataEbitMarginMaximum): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/lyrebird-ios-0.3.12.tar.gz/lyrebird-ios-0.3.12/lyrebird_ios/apis.py
|
import os
import lyrebird
from flask import request, jsonify, send_from_directory
from lyrebird import application
from lyrebird.mock.context import make_ok_response, make_fail_response
from .device_service import DeviceService
device_service = DeviceService()
storage = lyrebird.get_plugin_storage()
screenshot_dir = os.path.abspath(os.path.join(storage, 'screenshot'))
def conf():
plugin_conf = application.config.get('plugin.ios', {})
default_bundle_id = plugin_conf.get('bundle_id', '')
return make_ok_response(bundle_id=default_bundle_id)
def device_list():
device_list = device_service.devices_to_dict()
return make_ok_response(device_list=device_list)
def device_detail(device_id):
device = device_service.devices.get(device_id)
device_detail = '\n'.join(device.device_info)
return make_ok_response(device_detail=device_detail)
def get_app_info(device_id, bundle_id):
def send_device_event():
device_service.publish_devices_info_event(device_service.devices, bundle_id)
lyrebird.add_background_task('SendDeviceEvent', send_device_event)
device = device_service.devices.get(device_id)
app_info = device.get_app_info(bundle_id)
return make_ok_response(app_info=app_info)
def app_list(device_id):
device = device_service.devices.get(device_id)
if device:
app_list = device.get_apps_list(device_id)
return make_ok_response(app_list=app_list)
else:
return make_fail_response(f'Device id {device_id} not found!')
def start_app(device_id, bundle_id):
device = device_service.devices.get(device_id)
ip = application.config.get('ip')
port = application.config.get('mock.port')
res = device.start_app(bundle_id, ip, port)
if res:
return make_fail_response(res)
return make_ok_response()
def stop_app(device_id, bundle_id):
device = device_service.devices.get(device_id)
res = device.stop_app()
if 'NoneType' in res:
return make_fail_response(f'Cannot stop app {bundle_id} before start it')
return make_ok_response()
def logcat_start(device_id):
print('Logcat start', device_id)
device_service.start_log_recorder(device_id)
def take_screen_shot(device_id):
device = device_service.devices.get(device_id)
img_info = device.take_screen_shot()
if img_info['returncode'] != 0:
return make_fail_response(img_info['stdout'])
timestamp = img_info.get('timestamp')
return make_ok_response(imgUrl=f'/plugins/ios/api/src/screenshot/{device_id}?time={timestamp}')
def get_screen_shot(message):
if message.get('cmd') != 'screenshot':
return
screen_shots = []
device_list = message.get('device_id')
for device_id in device_list:
device = device_service.devices.get(device_id)
if not device:
continue
screen_shot_info = device.take_screen_shot()
screen_shots.append(
{
'id': device_id,
'screenshot': {
'name': os.path.basename(screen_shot_info.get('screen_shot_file')),
'path': screen_shot_info.get('screen_shot_file')
}
}
)
lyrebird.publish('ios.screenshot', screen_shots, state=True)
def get_screenshot_image(device_id):
if request.args.get('time'):
model = device_service.devices.get(device_id).model.replace(' ', '_')
timestamp = request.args.get('time')
return send_from_directory(screenshot_dir, f'{model}_{timestamp}.png')
else:
return None
def check_env():
msg = device_service.check_env()
if device_service.status == device_service.RUNNING:
return make_ok_response()
else:
return make_fail_response(msg)
|
PypiClean
|
/capsolver_python-1.0.tar.gz/capsolver_python-1.0/capsolver_python/capsolver.py
|
import asyncio
import requests
from time import sleep
from .request_type import RequestType
from .utils import CapSolverException
class CapSolver:
_HOST_URL = "https://api.capsolver.com"
_BETA_HOST_URL = "https://api-beta.capsolver.com"
def __init__(self, client_key: str, beta: bool = False) -> None:
self.client_key = client_key
self.beta = beta
def get_balance(self) -> float:
'''
Returns balance for API key.
'''
data = {"clientKey": self.client_key}
return self._make_request(RequestType.GetBalance, data).get("balance")
def get_packages(self) -> list:
'''
Returns a list of monthly packages.
'''
data = {"clientKey": self.client_key}
return self._make_request(RequestType.GetBalance, data).get("packages")
def get_task_result(self, task_id: str):
data = {
"clientKey": self.client_key,
"taskId": task_id
}
task_result = self._make_request(RequestType.GetTaskResult, data)
return self._is_ready(task_result)
def join_task_result(self, task_id: str, maximum_time: int = 90):
for i in range(0, maximum_time + 1, 1):
result = self.get_task_result(task_id)
if result is not False and result is not None:
return result
elif result is False:
i += 1
sleep(1)
raise CapSolverException(61, "ERROR_MAXIMUM_TIME_EXCEED", "Maximum time is exceed.")
async def join_task_result_async(self, task_id: str, maximum_time: int = 90):
for i in range(0, maximum_time + 1, 1):
result = self.get_task_result(task_id)
if result is not False and result is not None:
return result
elif result is False:
i += 1
await asyncio.sleep(1)
raise CapSolverException(61, "ERROR_MAXIMUM_TIME_EXCEED", "Maximum time is exceed.")
# TODO: Get a soft id for this one
def _make_request(self, method: RequestType, data: dict):
if method == RequestType.CreateTask or method == RequestType.CreateTaskAntiAkamai or method == RequestType.CreateTaskKasada:
data["appId"] = "0C39FC4D-C1FB-4F4E-975B-89E93B78A97A"
try:
response = requests.post("{}{}".format(self._BETA_HOST_URL if self.beta else self._HOST_URL, method), json=data).json()
except Exception as err:
raise CapSolverException(-1, type(err).__name__, str(err))
return response
@staticmethod
def _is_ready(response: dict):
status = response.get("status")
if status == "ready" or status == "processing":
return False if status == "processing" else response.get("solution")
else:
raise CapSolverException(response.get("errorId"), response.get("errorCode"), response.get("errorDescription"))
@staticmethod
def _add_cookies(cookies, data):
if cookies is None:
return data
str_cookies = ""
if type(cookies) == dict:
for key, value in cookies.items():
if value == list(cookies.items())[-1][1]:
str_cookies += "{}={}".format(key, value)
else:
str_cookies += "{}={};".format(key, value)
elif type(cookies) == list:
for i in cookies:
if not len(cookies) % 2 == 0:
raise AttributeError("List cookies length must be even numbers")
if cookies.index(i) % 2 == 0:
str_cookies += "{}=".format(i)
elif cookies[cookies.index(i)] == cookies[-1]:
str_cookies += "{}".format(i)
elif cookies.index(i) % 2 == 1:
str_cookies += "{};".format(i)
elif type(cookies) == str:
data["task"]["cookies"] = cookies
return data
data["task"]["cookies"] = str_cookies
return data
|
PypiClean
|
/rh_elliott-1.0.15-py3-none-any.whl/elliottlib/cli/common.py
|
import asyncio
import sys
from functools import update_wrapper
import click
from elliottlib import Runtime, constants, dotconfig, version
from elliottlib.cli import cli_opts
from elliottlib.util import green_prefix, red_prefix, yellow_print
def print_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo('Elliott v{}'.format(version()))
click.echo("Python v{}".format(sys.version))
ctx.exit()
context_settings = dict(help_option_names=['-h', '--help'])
@click.group(context_settings=context_settings)
@click.option(
'--version',
is_flag=True, callback=print_version, expose_value=False, is_eager=True)
@click.option(
'--working-dir',
metavar='PATH', envvar='ELLIOTT_WORKING_DIR', default=None,
help='Existing directory in which file operations should be performed.')
@click.option(
'--data-path',
metavar='PATH', default=None,
help='Git repo or directory containing groups metadata')
@click.option(
'--group', '-g',
default=None, metavar='NAME',
help='The group of images on which to operate.')
@click.option(
'--branch',
default=None, metavar='BRANCH',
help='Branch to override any default in group.yml.')
@click.option(
'-i', '--images',
default=[], metavar='NAME', multiple=True,
help='Name of group image member to include in operation (all by default). Can be comma delimited list.')
@click.option(
'-x', '--exclude',
default=[], metavar='NAME', multiple=True,
help='Name of group image or rpm member to exclude in operation (none by default). Can be comma delimited list.')
@click.option(
'--quiet', '-q',
default=False, is_flag=True,
help='Suppress non-critical output')
@click.option(
'--debug',
default=False, is_flag=True,
help='Show debug output on console.')
@click.pass_context
def cli(ctx, **kwargs):
cfg = dotconfig.Config(
'elliott', 'settings',
template=cli_opts.CLI_CONFIG_TEMPLATE,
envvars=cli_opts.CLI_ENV_VARS,
cli_args=kwargs)
ctx.obj = Runtime(cfg_obj=cfg, **cfg.to_dict())
#
# Look up a default advisory specified for the branch in ocp-build-data
# Advisory types are in elliottlib.constants.standard_advisory_types
# but this doesn't validate against that.
#
def find_default_advisory(runtime, default_advisory_type, quiet=False):
'''The `quiet` parameter will disable printing the informational message'''
default_advisory = runtime.group_config.advisories.get(default_advisory_type, None)
if default_advisory is None:
red_prefix('No value defined for default advisory:')
click.echo(' The key advisories.{} is not defined for group {} in group.yml'.format(
default_advisory_type, runtime.group))
exit(1)
if not quiet:
green_prefix('Default advisory detected: ')
click.echo(default_advisory)
return default_advisory
use_default_advisory_option = click.option(
'--use-default-advisory', 'default_advisory_type',
metavar='ADVISORY_TYPE',
type=click.Choice(constants.standard_advisory_types),
help='Use the default value from group.yml for ADVISORY_TYPE [{}]'.format(
', '.join(constants.standard_advisory_types)))
pass_runtime = click.make_pass_decorator(Runtime)
def click_coroutine(f):
""" A wrapper to allow to use asyncio with click.
https://github.com/pallets/click/issues/85
"""
f = asyncio.coroutine(f)
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
return loop.run_until_complete(f(*args, **kwargs))
return update_wrapper(wrapper, f)
|
PypiClean
|
/otlmow_model-2.8.0-py3-none-any.whl/otlmow_model/BaseClasses/OTLObject.py
|
import math
import random
import warnings
from datetime import date, time
from datetime import datetime
from typing import Union, Dict, List, Generator
from otlmow_model.BaseClasses.DateField import DateField
from otlmow_model.BaseClasses.DateTimeField import DateTimeField
from otlmow_model.BaseClasses.KeuzelijstField import KeuzelijstField
from otlmow_model.BaseClasses.OTLField import OTLField
from otlmow_model.BaseClasses.TimeField import TimeField
from otlmow_model.BaseClasses.URIField import URIField
from otlmow_model.BaseClasses.UnionTypeField import UnionTypeField
from otlmow_model.BaseClasses.UnionWaarden import UnionWaarden
from otlmow_model.Exceptions.AttributeDeprecationWarning import AttributeDeprecationWarning
from otlmow_model.Exceptions.ClassDeprecationWarning import ClassDeprecationWarning
from otlmow_model.Exceptions.MethodNotApplicableError import MethodNotApplicableError
from otlmow_model.Helpers.AssetCreator import dynamic_create_instance_from_uri
class OTLAttribuut:
def __init__(self, naam='', label='', objectUri='', definition='', constraints='', usagenote='',
deprecated_version='', kardinaliteit_min='1', kardinaliteit_max='1', field=OTLField, readonly=False,
readonlyValue=None, owner=None):
super().__init__()
self.naam = naam
self.label = label
self.objectUri = objectUri
self.definition = definition
self.constraints = constraints
self.usagenote = usagenote
self.deprecated_version = deprecated_version
self.readonly = readonly
self.kardinaliteit_min = kardinaliteit_min
self.kardinaliteit_max = kardinaliteit_max
self._dotnotation = ''
self.owner = owner
self.readonlyValue = None
self.waarde = None
self.field = field
if self.field.waardeObject:
def add_empty_value():
prev_value = self.waarde
if kardinaliteit_max == '1':
if prev_value is None:
new_value_object = self.field.waardeObject()
new_value_object._parent = self
self.set_waarde(new_value_object)
else:
raise RuntimeError(
"This attribute does not have a cardinality other than 1, therefore you can only call this method once per instance")
else:
if prev_value is None:
prev_value = []
new_value_object = self.field.waardeObject()
new_value_object._parent = self
prev_value.append(new_value_object)
self.set_waarde(prev_value)
self.add_empty_value = add_empty_value
if kardinaliteit_max != '1':
def add_value(value):
l = self.waarde
if self.waarde is None:
l = []
l.append(value)
self.set_waarde(l)
self.add_value = add_value
if readonly:
self.__dict__["waarde"] = readonlyValue
def add_value(self, value):
raise MethodNotApplicableError(
"This attribute does not have a cardinality other than 1 so simply assign your value directly instead of "
"using this method")
def get_waarde(self):
if self.field.waardeObject and self.waarde is None:
self.add_empty_value()
return self.waarde
def add_empty_value(self):
"""Helper method for datatypes UnionType, ComplexType, KwantWrd and Dte to add the underlying waarde object"""
if not self.field.waardeObject:
raise MethodNotApplicableError(
"In order to use this method this object must be one of these types: UnionType, ComplexType, KwantWrd, "
"Dte")
def default(self):
if self.waarde is not dict and isinstance(self.waarde, list):
value_list = []
for item in self.waarde:
if self.field.waardeObject is not None:
waarde_dict = vars(item)
value_dict = {}
for k, v in waarde_dict.items():
if v.default() is not None:
value_dict[k[1:]] = v.default()
if len(value_dict) != 0:
value_list.append(value_dict)
else:
value_list.append(item)
return value_list
if self.field.waardeObject is not None:
if self.field.waarde_shortcut_applicable:
waarde_dict = vars(self.waarde)
value_dict = {}
for k, v in waarde_dict.items():
if v.default() is not None:
value_dict[k[1:]] = v.default()
if len(value_dict) == 0:
return None
return value_dict
else:
if self.waarde.waarde is not None:
if hasattr(self.waarde.waarde, 'default'):
return self.waarde.waarde.default()
else:
return self.waarde.waarde
return None
else:
if isinstance(self.waarde, datetime):
if self.waarde.hour == 0 and self.waarde.minute == 0 and self.waarde.second == 0:
return self.waarde.strftime("%Y-%m-%d")
else:
return self.waarde.strftime("%Y-%m-%d %H:%M:%S")
else:
if hasattr(self.waarde, 'default'):
return self.waarde.default()
else:
return self.waarde
def _perform_cardinality_check(self, owner, value, kardinaliteit_max):
kardinaliteit_min = int(self.kardinaliteit_min)
if not isinstance(value, list):
raise TypeError(f'expecting a list in {owner.__class__.__name__}.{self.naam}')
elif isinstance(value, list) and isinstance(value, set):
raise TypeError(f'expecting a non set type of list in {owner.__class__.__name__}.{self.naam}')
elif 0 < len(value) < kardinaliteit_min:
raise ValueError(
f'expecting at least {kardinaliteit_min} element(s) in {owner.__class__.__name__}.{self.naam}')
elif len(value) > kardinaliteit_max:
raise ValueError(
f'expecting at most {kardinaliteit_max} element(s) in {owner.__class__.__name__}.{self.naam}')
def set_waarde(self, value, owner=None):
self._perform_deprecation_check(self)
if value is None:
self.waarde = None
return
if self.kardinaliteit_max != '1':
if self.kardinaliteit_max == '*':
kardinaliteit_max = math.inf
else:
kardinaliteit_max = int(self.kardinaliteit_max)
self._perform_cardinality_check(owner, value, kardinaliteit_max)
converted_values = []
for el_value in value:
converted_value = self.field.convert_to_correct_type(el_value)
if issubclass(self.field, KeuzelijstField):
converted_value = self.field.convert_to_invulwaarde(converted_value, self.field)
field_validated = self.field.validate(converted_value, self)
if not field_validated:
raise ValueError(
f'invalid value in list for {owner.__class__.__name__}.{self.naam}: {el_value} is not '
f'valid, must be valid for {self.field.naam}')
converted_values.append(converted_value)
self.waarde = converted_values
else:
if self.field.waardeObject is not None and isinstance(value, self.field.waardeObject):
self.waarde = value
else:
converted_value = self.field.convert_to_correct_type(value)
if issubclass(self.field, KeuzelijstField):
converted_value = self.field.convert_to_invulwaarde(converted_value, self.field)
if self.field.validate(value=converted_value, attribuut=self):
self.waarde = converted_value
else:
raise ValueError(
f'Could not assign the best effort converted value to {owner.__class__.__name__}.{self.naam}')
# check if kwant Wrd inside a union type, if so, call clear_props
if owner is not None and value is not None and hasattr(owner, 'field') and owner.field.waardeObject is not None:
if owner.field.waarde_shortcut_applicable and not isinstance(
owner.field, UnionTypeField) and owner.owner is not None and isinstance(owner.owner, UnionWaarden):
owner.owner.clear_other_props('_' + owner.naam)
@staticmethod
def _perform_deprecation_check(owner):
if owner is not None:
if owner.naam == 'waarde':
owner = owner.owner._parent
if hasattr(owner, 'deprecated_version'):
if owner.deprecated_version != '':
if hasattr(owner, 'objectUri'):
warnings.warn(
message=f'{owner.objectUri} is deprecated since version {owner.deprecated_version}',
category=AttributeDeprecationWarning)
elif hasattr(owner, 'typeURI'):
warnings.warn(message=f'{owner.typeURI} is deprecated since version {owner.deprecated_version}',
category=AttributeDeprecationWarning)
else:
warnings.warn(
message=f'used a class that is deprecated since version {owner.deprecated_version}',
category=AttributeDeprecationWarning)
def __str__(self):
s = (f'information about {self.naam}:\n'
f'naam: {self.naam}\n'
f'uri: {self.objectUri}\n'
f'definition: {self.definition}\n'
f'label: {self.label}\n'
f'usagenote: {self.usagenote}\n'
f'constraints: {self.constraints}\n'
f'readonly: {self.readonly}\n'
f'kardinaliteit_min: {self.kardinaliteit_min}\n'
f'kardinaliteit_max: {self.kardinaliteit_max}\n'
f'deprecated_version: {self.deprecated_version}\n')
return s
def fill_with_dummy_data(self):
if self.readonly:
return
if self.field.waardeObject is None:
if self.naam == 'geometry':
first_geom_type = self.owner._geometry_types[0]
if first_geom_type == 'POINT Z':
self.set_waarde('POINT Z (200000 200000 0)')
elif first_geom_type == 'LINESTRING Z':
self.set_waarde('LINESTRING Z (200000 200000 0, 200001 200001 1)')
elif first_geom_type == 'POLYGON Z':
self.set_waarde('POLYGON Z ((200000 200000 0, 200001 200001 1, 200002 200002 2))')
return
else:
data = self.field.create_dummy_data()
if self.kardinaliteit_max != '1':
self.set_waarde([data])
else:
self.set_waarde(data)
return
new_value_object = self.field.waardeObject()
new_value_object._parent = self
if isinstance(new_value_object, UnionWaarden):
selected_attr = random.choice(list(new_value_object))
selected_attr.fill_with_dummy_data()
else:
for a in new_value_object:
a.fill_with_dummy_data()
if self.kardinaliteit_max != '1':
self.set_waarde([new_value_object])
else:
self.set_waarde(new_value_object)
class OTLObject(object):
typeURI: str = None
"""De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI."""
def __setattr__(self, name, value):
if name != 'typeURI':
super(OTLObject, self).__setattr__(name, value)
else:
if hasattr(self, 'typeURI') and (value is not None or self.typeURI is not None):
raise ValueError("The typeURI is an OSLOAttribute that indicates the class of the instance. "
"Within a class this value is predefined and cannot be changed.")
else:
if URIField.validate(value, OTLAttribuut(naam='typeURI')):
self.__dict__['value'] = value
else:
raise ValueError(f'{value} is not a valid value for typeURI.')
def __init__(self):
if hasattr(self, 'deprecated_version'):
if self.deprecated_version is not None:
try:
warnings.warn(message=f'{self.typeURI} is deprecated since version {self.deprecated_version}',
category=ClassDeprecationWarning)
except KeyError:
warnings.warn(
message=f'used a class ({self.__class__.__name__}) that is deprecated since version {self.deprecated_version}',
category=ClassDeprecationWarning)
def create_dict_from_asset(self, waarde_shortcut: bool = False, rdf: bool = False) -> Dict:
"""Converts this asset into a dictionary representation"""
return create_dict_from_asset(otl_object=self, waarde_shortcut=waarde_shortcut, rdf=rdf)
def fill_with_dummy_data(self):
for attr in self:
if attr is not None:
attr.fill_with_dummy_data()
def __repr__(self):
return build_string_version(asset=self)
def __iter__(self) -> Generator[OTLAttribuut, None, None]:
yield from sorted(filter(lambda v: isinstance(v, OTLAttribuut), (vars(self).values())), key=lambda x: x.naam)
def __eq__(self, other):
return create_dict_from_asset(self) == create_dict_from_asset(other)
@classmethod
def from_dict(cls, input_dict: Dict, directory: str = 'otlmow_model.Classes', rdf: bool = False,
waarde_shortcut: bool = False) -> object:
"""Alternative constructor. Allows the instantiation of an object using a dictionary. Either start from the
appropriate class or add a typeURI entry to the dictionary to get an instance of that type.
:param input_dict: input dictionary, containing key value pairs for the attributes of the instance
:type: dict
:param directory: directory where the class modules are located, defaults to otlmow_model.Classes
:type: str
:param rdf: whether to use uri's as keys instead of the names, defaults to False
:type: bool
:param waarde_shortcut: whether to use the waarde shortcut when processing the dictionary, defaults to False
:type: bool
:return: returns an instance where the values of the attributes matches the given dictionary
:rtype: OTLObject"""
if not rdf and 'typeURI' in input_dict:
type_uri = input_dict['typeURI']
elif rdf and 'https://wegenenverkeer.data.vlaanderen.be/ns/implementatieelement#AIMObject.typeURI' in input_dict:
type_uri = input_dict['https://wegenenverkeer.data.vlaanderen.be/ns/implementatieelement#AIMObject.typeURI']
else:
type_uri = cls.typeURI
if type_uri is None:
raise ValueError(
'typeURI is None. Add a valid typeURI to the input dictionary or change the class you are using "from_dict" from.')
try:
o = dynamic_create_instance_from_uri(type_uri, directory=directory)
except TypeError:
raise ValueError(
'typeURI is invalid. Add a valid typeURI to the input dictionary or change the class you are using "from_dict" from.')
for k, v in input_dict.items():
if k == 'typeURI' or k == 'https://wegenenverkeer.data.vlaanderen.be/ns/implementatieelement#AIMObject.typeURI':
continue
set_value_by_dictitem(o, k, v, waarde_shortcut=waarde_shortcut, rdf=rdf)
return o
def create_dict_from_asset(otl_object: OTLObject, waarde_shortcut=False, rdf: bool = False) -> Dict:
"""Creates a dictionary from an OTLObject with key value pairs for attributes and their values. Saves the type of the object in typeURI (or @type for the RDF dict)
:param otl_object: input object to be transformed
:type: OTLObject
:param waarde_shortcut: whether to use the waarde shortcut when processing the dictionary, defaults to False
:type: bool
:param rdf: whether to generate a dictionary where the key's are the URI's of the attributes rather than the names, defaults to False
:type: bool
:return: returns an instance where the values of the attributes matches the given dictionary
:rtype: OTLObject"""
if rdf:
d = _recursive_create_rdf_dict_from_asset(asset=otl_object, waarde_shortcut=waarde_shortcut)
else:
d = _recursive_create_dict_from_asset(otl_object, waarde_shortcut=waarde_shortcut)
if d is None:
d = {}
if rdf:
d['@type'] = otl_object.typeURI
else:
d['typeURI'] = otl_object.typeURI
return d
def _recursive_create_dict_from_asset(asset: Union[OTLObject, OTLAttribuut, list, dict],
waarde_shortcut: bool = False) -> Union[Dict, List[Dict]]:
if isinstance(asset, list) and not isinstance(asset, dict):
l = []
for item in asset:
dict_item = _recursive_create_dict_from_asset(asset=item, waarde_shortcut=waarde_shortcut)
if dict_item is not None:
l.append(dict_item)
return l
else:
d = {}
for attr in asset:
if attr.waarde is None:
continue
if attr.waarde == []:
d[attr.naam] = []
continue
if attr.field.waardeObject is not None: # complex
if waarde_shortcut and attr.field.waarde_shortcut_applicable:
if isinstance(attr.waarde, list):
dict_item = [item.waarde for item in attr.waarde]
if len(dict_item) > 0:
d[attr.naam] = dict_item
else:
dict_item = attr.waarde.waarde
if dict_item is not None:
d[attr.naam] = dict_item
else:
dict_item = _recursive_create_dict_from_asset(asset=attr.waarde, waarde_shortcut=waarde_shortcut)
if dict_item is not None:
d[attr.naam] = dict_item
else:
if attr.field == TimeField:
d[attr.naam] = time.strftime(attr.waarde, "%H:%M:%S")
elif attr.field == DateField:
d[attr.naam] = date.strftime(attr.waarde, "%Y-%m-%d")
elif attr.field == DateTimeField:
d[attr.naam] = datetime.strftime(attr.waarde, "%Y-%m-%d %H:%M:%S")
else:
d[attr.naam] = attr.waarde
if len(d.items()) > 0:
return d
def _recursive_create_rdf_dict_from_asset(asset: Union[OTLObject, OTLAttribuut, list, dict],
waarde_shortcut: bool = False) -> Union[Dict, List[Dict]]:
if isinstance(asset, list) and not isinstance(asset, dict):
l = []
for item in asset:
dict_item = _recursive_create_rdf_dict_from_asset(asset=item, waarde_shortcut=waarde_shortcut)
if dict_item is not None:
l.append(dict_item)
if len(l) > 0:
return l
else:
d = {}
for attr in asset:
if attr.waarde is None or attr.waarde == []:
continue
if attr.field.waardeObject is not None: # complex
if waarde_shortcut and attr.field.waarde_shortcut_applicable:
if isinstance(attr.waarde, list):
dict_item = [item.waarde for item in attr.waarde]
if len(dict_item) > 0:
d[attr.objectUri] = dict_item
else:
dict_item = attr.waarde.waarde
if dict_item is not None:
d[attr.objectUri] = dict_item
else:
dict_item = _recursive_create_rdf_dict_from_asset(asset=attr.waarde,
waarde_shortcut=waarde_shortcut)
if dict_item is not None:
d[attr.objectUri] = dict_item
else:
if attr.field == TimeField:
d[attr.objectUri] = time.strftime(attr.waarde, "%H:%M:%S")
elif attr.field == DateField:
d[attr.objectUri] = date.strftime(attr.waarde, "%Y-%m-%d")
elif attr.field == DateTimeField:
d[attr.objectUri] = datetime.strftime(attr.waarde, "%Y-%m-%d %H:%M:%S")
elif issubclass(attr.field, KeuzelijstField):
if isinstance(attr.waarde, list):
if attr.waarde == [None]:
d[attr.objectUri] = []
else:
d[attr.objectUri] = [attr.field.options[list_item].objectUri for list_item in attr.waarde]
else:
d[attr.objectUri] = attr.field.options[attr.waarde].objectUri
else:
d[attr.objectUri] = attr.waarde
if len(d.items()) > 0:
return d
def clean_dict(d) -> Union[Dict, None]:
"""Recursively remove None values and empty dicts from input dict"""
if d is None:
return None
for k in list(d):
v = d[k]
if isinstance(v, dict):
clean_dict(v)
if len(v.items()) == 0:
del d[k]
if v is None:
del d[k]
return d
def build_string_version(asset, indent: int = 4) -> str:
if indent < 4:
indent = 4
d = create_dict_from_asset(asset)
string_version = '\n'.join(_make_string_version_from_dict(d, level=1, indent=indent, prefix=' '))
if string_version != '':
string_version = '\n' + string_version
return f'<{asset.__class__.__name__}> object\n{(" " * indent)}typeURI : {asset.typeURI}' + string_version
def _make_string_version_from_dict(d, level: int = 0, indent: int = 4, list_index: int = -1, prefix: str = '') -> List:
lines = []
if list_index != -1:
index_string = f'[{list_index}]'
index_string += ' ' * (indent - len(index_string))
prefix += index_string
for key in sorted(d):
if key == 'typeURI':
continue
value = d[key]
if isinstance(value, dict):
lines.append(prefix + f'{key} :')
lines.extend(_make_string_version_from_dict(value, level=level + 1, indent=indent,
prefix=prefix + ' ' * indent * level))
elif isinstance(value, list):
lines.append(prefix + f'{key} :')
for index, item in enumerate(value):
if index == 10:
if len(value) == 11:
lines.append(prefix + '...(1 more item)')
else:
lines.append(prefix + f'...({len(value) - 10} more items)')
break
if isinstance(item, dict):
lines.extend(_make_string_version_from_dict(item, level=level, indent=indent, list_index=index,
prefix=prefix))
else:
index_string = f'[{index}]'
index_string += ' ' * (indent - len(index_string))
lines.append(prefix + index_string + f'{item}')
else:
lines.append(prefix + f'{key} : {value}')
return lines
def get_attribute_by_uri(instance_or_attribute, key: str) -> OTLAttribuut:
return next(v for v in instance_or_attribute if v.objectUri == key)
def get_attribute_by_name(instance_or_attribute, key: str) -> OTLAttribuut:
return getattr(instance_or_attribute, '_' + key)
# dict encoder = asset object to dict
# dict decoder = dict to asset object
def set_value_by_dictitem(instance_or_attribute: Union[OTLObject, OTLAttribuut], key: str, value,
waarde_shortcut: bool = False, rdf: bool = False):
if rdf:
attribute_to_set = get_attribute_by_uri(instance_or_attribute, key)
else:
attribute_to_set = get_attribute_by_name(instance_or_attribute, key)
if attribute_to_set.field.waardeObject is not None: # complex / union / KwantWrd / dte
if isinstance(value, list):
for index, list_item in enumerate(value):
if attribute_to_set.waarde is None or len(attribute_to_set.waarde) <= index:
attribute_to_set.add_empty_value()
if attribute_to_set.field.waarde_shortcut_applicable and waarde_shortcut: # dte / kwantWrd
attribute_to_set.waarde[index]._waarde.set_waarde(list_item)
else: # complex / union
for k, v in list_item.items():
set_value_by_dictitem(attribute_to_set.waarde[index], k, v, waarde_shortcut, rdf=rdf)
elif isinstance(value, dict): # only complex / union possible
if attribute_to_set.waarde is None:
attribute_to_set.add_empty_value()
if attribute_to_set.kardinaliteit_max != '1':
for k, v in value.items():
set_value_by_dictitem(attribute_to_set.waarde[0], k, v, waarde_shortcut, rdf=rdf)
else:
for k, v in value.items():
set_value_by_dictitem(attribute_to_set.waarde, k, v, waarde_shortcut, rdf=rdf)
else: # must be a dte / kwantWrd
if attribute_to_set.waarde is None:
attribute_to_set.add_empty_value()
attribute_to_set.waarde._waarde.set_waarde(value)
else:
attribute_to_set.set_waarde(value)
|
PypiClean
|
/azure-mgmt-appplatform-8.0.0.zip/azure-mgmt-appplatform-8.0.0/azure/mgmt/appplatform/v2022_05_01_preview/operations/_build_service_builder_operations.py
|
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
resource_group_name: str,
service_name: str,
build_service_name: str,
builder_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
"buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"),
"builderName": _SERIALIZER.url("builder_name", builder_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_request(
resource_group_name: str,
service_name: str,
build_service_name: str,
builder_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
"buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"),
"builderName": _SERIALIZER.url("builder_name", builder_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str,
service_name: str,
build_service_name: str,
builder_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
"buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"),
"builderName": _SERIALIZER.url("builder_name", builder_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_request(
resource_group_name: str, service_name: str, build_service_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
"buildServiceName": _SERIALIZER.url("build_service_name", build_service_name, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class BuildServiceBuilderOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.appplatform.v2022_05_01_preview.AppPlatformManagementClient`'s
:attr:`build_service_builder` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get(
self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any
) -> _models.BuilderResource:
"""Get a KPack builder.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param builder_name: The name of the builder resource. Required.
:type builder_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BuilderResource or the result of cls(response)
:rtype: ~azure.mgmt.appplatform.v2022_05_01_preview.models.BuilderResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
cls: ClsType[_models.BuilderResource] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
builder_name=builder_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("BuilderResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"
}
def _create_or_update_initial(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
builder_name: str,
builder_resource: Union[_models.BuilderResource, IO],
**kwargs: Any
) -> _models.BuilderResource:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.BuilderResource] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(builder_resource, (IO, bytes)):
_content = builder_resource
else:
_json = self._serialize.body(builder_resource, "BuilderResource")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
builder_name=builder_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("BuilderResource", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("BuilderResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"
}
@overload
def begin_create_or_update(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
builder_name: str,
builder_resource: _models.BuilderResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.BuilderResource]:
"""Create or update a KPack builder.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param builder_name: The name of the builder resource. Required.
:type builder_name: str
:param builder_resource: The target builder for the create or update operation. Required.
:type builder_resource: ~azure.mgmt.appplatform.v2022_05_01_preview.models.BuilderResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either BuilderResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_05_01_preview.models.BuilderResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
builder_name: str,
builder_resource: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.BuilderResource]:
"""Create or update a KPack builder.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param builder_name: The name of the builder resource. Required.
:type builder_name: str
:param builder_resource: The target builder for the create or update operation. Required.
:type builder_resource: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either BuilderResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_05_01_preview.models.BuilderResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
builder_name: str,
builder_resource: Union[_models.BuilderResource, IO],
**kwargs: Any
) -> LROPoller[_models.BuilderResource]:
"""Create or update a KPack builder.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param builder_name: The name of the builder resource. Required.
:type builder_name: str
:param builder_resource: The target builder for the create or update operation. Is either a
BuilderResource type or a IO type. Required.
:type builder_resource: ~azure.mgmt.appplatform.v2022_05_01_preview.models.BuilderResource or
IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either BuilderResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_05_01_preview.models.BuilderResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.BuilderResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
builder_name=builder_name,
builder_resource=builder_resource,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("BuilderResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"
}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
builder_name=builder_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"
}
@distributed_trace
def begin_delete(
self, resource_group_name: str, service_name: str, build_service_name: str, builder_name: str, **kwargs: Any
) -> LROPoller[None]:
"""Delete a KPack builder.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param builder_name: The name of the builder resource. Required.
:type builder_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
builder_name=builder_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders/{builderName}"
}
@distributed_trace
def list(
self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any
) -> Iterable["_models.BuilderResource"]:
"""List KPack builders result.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BuilderResource or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_05_01_preview.models.BuilderResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-05-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-05-01-preview")
)
cls: ClsType[_models.BuilderResourceCollection] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("BuilderResourceCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/builders"
}
|
PypiClean
|
/pyportable_installer-4.4.2-py3-none-any.whl/pyportable_installer/main_flow/step3/step3_1/attachments.py
|
import os
from os import path as xpath
from shutil import copyfile
from lk_utils import filesniff
from .exclusions import attachments_exclusions_handler
from ....global_conf import gconf
from ....path_model import src_2_dst
from ....typehint import *
__all__ = ['copy_attachments']
def find_files(dir_i):
for f in filter(
lambda x: attachments_exclusions_handler.filter_files(
x.path, x.name
),
filesniff.find_files(dir_i)
):
yield f.path, f.name
def find_dirs(dir_i):
for d in filter(
lambda x: attachments_exclusions_handler.filter_dirs(
x.path, x.name
),
filesniff.find_dirs(
dir_i, exclude_protected_folders=False
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# set this param to False, we will use our own
# `exclusions_handler` to filter the protected folders.
)
):
yield d.path, d.name
def _handle_file_exists(file_o, scheme=''):
"""
Returns:
str['go_on', 'done']
'go_on': going on to do the left things.
'done': all things have done, do not handle this file.
"""
if scheme == '': # see `TBuildConf.attachments_exist_scheme`
scheme = gconf.attachments_exist_scheme
if scheme == 'error':
raise FileExistsError(file_o)
elif scheme == 'overwrite':
os.remove(file_o)
return 'go_on'
elif scheme == 'skip':
return 'done'
else:
raise Exception('Unknown scheme', scheme)
_created_dirs = set()
def copy_attachments(attachments: TAttachments) -> Iterator[Tuple[TPath, TPath]]:
""" Specific for handling attachmets in format of `~.typehint.TAttachments`.
Attachments Marks (`~.typehint.TAttachments._TAttachmentsValue.marks`):
See complete docs at `~/docs/attachments-marks-demonstration.md`.
Single Marks:
asset
assets
root_assets
only_folder
only_folders
Double Marks:
asset,compile
assets,compile
root_assets,compile
Args:
attachments
Notice:
This function is a generator, if you just want to call and exhaust it,
use this:
_ = list(copy_attachments(...))
See usages in `pyportable_installer.main_flow.step3.step3_3.create_venv
.create_venv`.
Yields:
Tuple[src_pyfile, dst_pyfile]
"""
global _created_dirs
for k, v in attachments.items():
path_i = k
path_o = v['path'] or src_2_dst(path_i)
# there's no '{name}' placeholder in `path_i` and `path_o` because in
# `~.step1.indexing_paths.indexing_paths > attachments related code`
# we've handled them.
marks = v['marks'] # e.g. ('assets', 'compile')
is_yield_pyfile = 'compile' in marks # type: bool
# True: yield pyfile; False: just copy pyfile
# 1. `path_i` is file
if 'asset' in marks or xpath.isfile(path_i):
if attachments_exclusions_handler.monitor_transferring(
'', path_i, 'file') is False:
print(':v', '[D4756]', 'the file is in exclusion list', path_i)
else:
if (d := xpath.dirname(path_o)) not in _created_dirs:
os.makedirs(d, exist_ok=True)
_created_dirs.add(d)
if is_yield_pyfile:
yield from _handle_compile(path_i, path_o)
else:
_handle_asset(path_i, path_o)
continue
# 2. `path_i` is dir
dir_i = path_i
dir_o = path_o
if dir_o not in _created_dirs:
os.makedirs(dir_o, exist_ok=True)
_created_dirs.add(dir_o)
if attachments_exclusions_handler.monitor_transferring(
'', dir_i, 'dir') is False:
print(':v', '[D4757]', 'the directory is in exclusion list', dir_i)
continue
if not xpath.exists(dir_o):
os.mkdir(dir_o)
if 'root_assets' in marks:
if is_yield_pyfile:
yield from _handle_root_assets_and_compile(dir_i, dir_o)
else:
_handle_root_assets(dir_i, dir_o)
elif 'assets' in marks:
if is_yield_pyfile:
yield from _handle_assets_and_compile(dir_i, dir_o)
else:
_handle_assets(dir_i, dir_o)
elif 'only_folders' in marks:
assert is_yield_pyfile is False
_handle_only_folders(dir_i, dir_o)
elif 'only_folder' in marks:
assert is_yield_pyfile is False
_handle_only_folder(dir_i, dir_o)
else:
raise ValueError('Unknown or incomplete mark', marks)
# -----------------------------------------------------------------------------
def _handle_assets(dir_i, dir_o):
_handle_root_assets(dir_i, dir_o)
for dp, dn in find_dirs(dir_i):
subdir_i, subdir_o = dp, f'{dir_o}/{dn}'
os.mkdir(subdir_o)
# # if not xpath.exists(subdir_o): mkdir(subdir_o)
_handle_assets(subdir_i, subdir_o)
def _handle_root_assets(dir_i, dir_o):
for fp, fn in find_files(dir_i):
file_i, file_o = fp, f'{dir_o}/{fn}'
copyfile(file_i, file_o)
def _handle_assets_and_compile(dir_i, dir_o):
yield from _handle_root_assets_and_compile(dir_i, dir_o)
for dp, dn in find_dirs(dir_i):
subdir_i, subdir_o = dp, f'{dir_o}/{dn}'
if not xpath.exists(subdir_o): os.mkdir(subdir_o)
yield from _handle_assets_and_compile(subdir_i, subdir_o)
def _handle_root_assets_and_compile(dir_i, dir_o):
for fp, fn in find_files(dir_i):
file_i, file_o = fp, f'{dir_o}/{fn}'
if fn.endswith('.py'): # TODO: ~.endswith(('.py', '.pyw', ...))
if xpath.exists(file_o) and _handle_file_exists(file_o) == 'done':
continue
yield file_i, file_o # MARK: 20210913113649
else:
copyfile(file_i, file_o)
def _handle_only_folders(dir_i, dir_o):
for dp, dn in find_dirs(dir_i):
subdir_i, subdir_o = dp, f'{dir_o}/{dn}'
_handle_only_folder(subdir_i, subdir_o)
_handle_only_folders(subdir_i, subdir_o)
# noinspection PyUnusedLocal
def _handle_only_folder(dir_i, dir_o):
if not xpath.exists(dir_o):
os.mkdir(dir_o)
def _handle_asset(file_i, file_o):
copyfile(file_i, file_o)
# noinspection PyUnusedLocal
def _handle_compile(file_i, file_o):
assert file_i.endswith('.py')
if xpath.exists(file_o) and _handle_file_exists(file_o) == 'done':
return
yield file_i, file_o # MARK: 20210913113657
|
PypiClean
|
/qwif-0.0.5.tar.gz/qwif-0.0.5/CHANGELOG.md
|
Changelog
==========
This file documents notable changes to `qwif`.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
[0.0.5] - 2020-09-13
---------------------
Fix formatting/style issues, fix `make image`.
### Added
* partial test coverage for CLI
* improved test coverage for contents module
### Changed
* fixed flake8/pep8 issues
* `make image` now completes (was previously failing due to flake8).
[0.0.4] - 2020-09-12
---------------------
Fractionally clearer CLI, changelog.
### Added
* This changelog.
### Changed
* CLI subcommands help menus have a group header of 'fields' to help distinguish
content field arguments from common arguments.
[0.0.3] - 2020-09-12
---------------------
Support for otpauth QR codes, readable by Google Authenticator.
### Added
* Support for optauth strings, to generate QR codes to OTP authenticators.
* Usage information in README for generating a QR code for OTP.
[0.0.2] - 2020-09-12
---------------------
Documentation fixes.
### Added
* Usage information in README, including generating a QR code for wifi.
[0.0.1] - 2020-09-12
---------------------
Initial release of `qwif`.
### Added
* generate QR codes for WIFI configuration, URL/bookmark, contact cards,
calendar events, location, email and raw text.
* support `--output`, printing a scannable QR code to any TTY, or a SVG
otherwise.
|
PypiClean
|
/ZMS-5.1.0-py3-none-any.whl/Products/zms/doc/epydoc/epydoc.js
|
function toggle_private() {
// Search for any private/public links on this page. Store
// their old text in "cmd," so we will know what action to
// take; and change their text to the opposite action.
var cmd = "?";
var elts = document.getElementsByTagName("a");
for(var i=0; i<elts.length; i++) {
if (elts[i].className == "privatelink") {
cmd = elts[i].innerHTML;
elts[i].innerHTML = ((cmd && cmd.substr(0,4)=="show")?
"hide private":"show private");
}
}
// Update all DIVs containing private objects.
var elts = document.getElementsByTagName("div");
for(var i=0; i<elts.length; i++) {
if (elts[i].className == "private") {
elts[i].style.display = ((cmd && cmd.substr(0,4)=="hide")?"none":"block");
}
else if (elts[i].className == "public") {
elts[i].style.display = ((cmd && cmd.substr(0,4)=="hide")?"block":"none");
}
}
// Update all table rows containing private objects. Note, we
// use "" instead of "block" becaue IE & firefox disagree on what
// this should be (block vs table-row), and "" just gives the
// default for both browsers.
var elts = document.getElementsByTagName("tr");
for(var i=0; i<elts.length; i++) {
if (elts[i].className == "private") {
elts[i].style.display = ((cmd && cmd.substr(0,4)=="hide")?"none":"");
}
}
// Update all list items containing private objects.
var elts = document.getElementsByTagName("li");
for(var i=0; i<elts.length; i++) {
if (elts[i].className == "private") {
elts[i].style.display = ((cmd && cmd.substr(0,4)=="hide")?
"none":"");
}
}
// Update all list items containing private objects.
var elts = document.getElementsByTagName("ul");
for(var i=0; i<elts.length; i++) {
if (elts[i].className == "private") {
elts[i].style.display = ((cmd && cmd.substr(0,4)=="hide")?"none":"block");
}
}
// Set a cookie to remember the current option.
document.cookie = "EpydocPrivate="+cmd;
}
function show_private() {
var elts = document.getElementsByTagName("a");
for(var i=0; i<elts.length; i++) {
if (elts[i].className == "privatelink") {
cmd = elts[i].innerHTML;
if (cmd && cmd.substr(0,4)=="show")
toggle_private();
}
}
}
function getCookie(name) {
var dc = document.cookie;
var prefix = name + "=";
var begin = dc.indexOf("; " + prefix);
if (begin == -1) {
begin = dc.indexOf(prefix);
if (begin != 0) return null;
} else
{ begin += 2; }
var end = document.cookie.indexOf(";", begin);
if (end == -1)
{ end = dc.length; }
return unescape(dc.substring(begin + prefix.length, end));
}
function setFrame(url1, url2) {
parent.frames[1].location.href = url1;
parent.frames[2].location.href = url2;
}
function checkCookie() {
var cmd=getCookie("EpydocPrivate");
if (cmd && cmd.substr(0,4)!="show" && location.href.indexOf("#_") < 0)
toggle_private();
}
function toggleCallGraph(id) {
var elt = document.getElementById(id);
if (elt.style.display == "none")
elt.style.display = "block";
else
elt.style.display = "none";
}
function expand(id) {
var elt = document.getElementById(id+"-expanded");
if (elt) elt.style.display = "block";
var elt = document.getElementById(id+"-expanded-linenums");
if (elt) elt.style.display = "block";
var elt = document.getElementById(id+"-collapsed");
if (elt) { elt.innerHTML = ""; elt.style.display = "none"; }
var elt = document.getElementById(id+"-collapsed-linenums");
if (elt) { elt.innerHTML = ""; elt.style.display = "none"; }
var elt = document.getElementById(id+"-toggle");
if (elt) { elt.innerHTML = "-"; }
}
function collapse(id) {
var elt = document.getElementById(id+"-expanded");
if (elt) elt.style.display = "none";
var elt = document.getElementById(id+"-expanded-linenums");
if (elt) elt.style.display = "none";
var elt = document.getElementById(id+"-collapsed-linenums");
if (elt) { elt.innerHTML = "<br />"; elt.style.display="block"; }
var elt = document.getElementById(id+"-toggle");
if (elt) { elt.innerHTML = "+"; }
var elt = document.getElementById(id+"-collapsed");
if (elt) {
elt.style.display = "block";
var indent = elt.getAttribute("indent");
var pad = elt.getAttribute("pad");
var s = "<tt class='py-lineno'>";
for (var i=0; i<pad.length; i++) { s += " " }
s += "</tt>";
s += " <tt class='py-line'>";
for (var i=0; i<indent.length; i++) { s += " " }
s += "<a href='#' onclick='expand(\"" + id;
s += "\");return false'>...</a></tt><br />";
elt.innerHTML = s;
}
}
function toggle(id) {
elt = document.getElementById(id+"-toggle");
if (elt.innerHTML == "-")
collapse(id);
else
expand(id);
return false;
}
function highlight(id) {
var elt = document.getElementById(id+"-def");
if (elt) elt.className = "py-highlight-hdr";
var elt = document.getElementById(id+"-expanded");
if (elt) elt.className = "py-highlight";
var elt = document.getElementById(id+"-collapsed");
if (elt) elt.className = "py-highlight";
}
function num_lines(s) {
var n = 1;
var pos = s.indexOf("\n");
while ( pos > 0) {
n += 1;
pos = s.indexOf("\n", pos+1);
}
return n;
}
// Collapse all blocks that mave more than `min_lines` lines.
function collapse_all(min_lines) {
var elts = document.getElementsByTagName("div");
for (var i=0; i<elts.length; i++) {
var elt = elts[i];
var split = elt.id.indexOf("-");
if (split > 0)
if (elt.id.substring(split, elt.id.length) == "-expanded")
if (num_lines(elt.innerHTML) > min_lines)
collapse(elt.id.substring(0, split));
}
}
function expandto(href) {
var start = href.indexOf("#")+1;
if (start != 0 && start != href.length) {
if (href.substring(start, href.length) != "-") {
collapse_all(4);
pos = href.indexOf(".", start);
while (pos != -1) {
var id = href.substring(start, pos);
expand(id);
pos = href.indexOf(".", pos+1);
}
var id = href.substring(start, href.length);
expand(id);
highlight(id);
}
}
}
function kill_doclink(id) {
var parent = document.getElementById(id);
parent.removeChild(parent.childNodes.item(0));
}
function auto_kill_doclink(ev) {
if (!ev) var ev = window.event;
if (!this.contains(ev.toElement)) {
var parent = document.getElementById(this.parentID);
parent.removeChild(parent.childNodes.item(0));
}
}
function doclink(id, name, targets_id) {
var elt = document.getElementById(id);
// If we already opened the box, then destroy it.
// (This case should never occur, but leave it in just in case.)
if (elt.childNodes.length > 1) {
elt.removeChild(elt.childNodes.item(0));
}
else {
// The outer box: relative + inline positioning.
var box1 = document.createElement("div");
box1.style.position = "relative";
box1.style.display = "inline";
box1.style.top = 0;
box1.style.left = 0;
// A shadow for fun
var shadow = document.createElement("div");
shadow.style.position = "absolute";
shadow.style.left = "-1.3em";
shadow.style.top = "-1.3em";
shadow.style.background = "#404040";
// The inner box: absolute positioning.
var box2 = document.createElement("div");
box2.style.position = "relative";
box2.style.border = "1px solid #a0a0a0";
box2.style.left = "-.2em";
box2.style.top = "-.2em";
box2.style.background = "white";
box2.style.padding = ".3em .4em .3em .4em";
box2.style.fontStyle = "normal";
box2.onmouseout=auto_kill_doclink;
box2.parentID = id;
// Get the targets
var targets_elt = document.getElementById(targets_id);
var targets = targets_elt.getAttribute("targets");
var links = "";
target_list = targets.split(",");
for (var i=0; i<target_list.length; i++) {
var target = target_list[i].split("=");
links += "<li><a href='" + target[1] +
"' style='text-decoration:none'>" +
target[0] + "</a></li>";
}
// Put it all together.
elt.insertBefore(box1, elt.childNodes.item(0));
//box1.appendChild(box2);
box1.appendChild(shadow);
shadow.appendChild(box2);
box2.innerHTML =
"Which <b>"+name+"</b> do you want to see documentation for?" +
"<ul style='margin-bottom: 0;'>" +
links +
"<li><a href='#' style='text-decoration:none' " +
"onclick='kill_doclink(\""+id+"\");return false;'>"+
"<i>None of the above</i></a></li></ul>";
}
return false;
}
function get_anchor() {
var href = location.href;
var start = href.indexOf("#")+1;
if ((start != 0) && (start != href.length))
return href.substring(start, href.length);
}
function redirect_url(dottedName) {
// Scan through each element of the "pages" list, and check
// if "name" matches with any of them.
for (var i=0; i<pages.length; i++) {
// Each page has the form "<pagename>-m" or "<pagename>-c";
// extract the <pagename> portion & compare it to dottedName.
var pagename = pages[i].substring(0, pages[i].length-2);
if (pagename == dottedName.substring(0,pagename.length)) {
// We've found a page that matches `dottedName`;
// construct its URL, using leftover `dottedName`
// content to form an anchor.
var pagetype = pages[i].charAt(pages[i].length-1);
var url = pagename + ((pagetype=="m")?"-module.html":
"-class.html");
if (dottedName.length > pagename.length)
url += "#" + dottedName.substring(pagename.length+1,
dottedName.length);
return url;
}
}
}
|
PypiClean
|
/jupyterhub_url_sharing-0.1.0.tar.gz/jupyterhub_url_sharing-0.1.0/node_modules/webpack/lib/dependencies/RequireContextPlugin.js
|
"use strict";
const {
JAVASCRIPT_MODULE_TYPE_AUTO,
JAVASCRIPT_MODULE_TYPE_DYNAMIC
} = require("../ModuleTypeConstants");
const { cachedSetProperty } = require("../util/cleverMerge");
const ContextElementDependency = require("./ContextElementDependency");
const RequireContextDependency = require("./RequireContextDependency");
const RequireContextDependencyParserPlugin = require("./RequireContextDependencyParserPlugin");
/** @typedef {import("../../declarations/WebpackOptions").ResolveOptions} ResolveOptions */
/** @typedef {import("../Compiler")} Compiler */
/** @type {ResolveOptions} */
const EMPTY_RESOLVE_OPTIONS = {};
const PLUGIN_NAME = "RequireContextPlugin";
class RequireContextPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
PLUGIN_NAME,
(compilation, { contextModuleFactory, normalModuleFactory }) => {
compilation.dependencyFactories.set(
RequireContextDependency,
contextModuleFactory
);
compilation.dependencyTemplates.set(
RequireContextDependency,
new RequireContextDependency.Template()
);
compilation.dependencyFactories.set(
ContextElementDependency,
normalModuleFactory
);
const handler = (parser, parserOptions) => {
if (
parserOptions.requireContext !== undefined &&
!parserOptions.requireContext
)
return;
new RequireContextDependencyParserPlugin().apply(parser);
};
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_AUTO)
.tap(PLUGIN_NAME, handler);
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_DYNAMIC)
.tap(PLUGIN_NAME, handler);
contextModuleFactory.hooks.alternativeRequests.tap(
PLUGIN_NAME,
(items, options) => {
if (items.length === 0) return items;
const finalResolveOptions = compiler.resolverFactory.get(
"normal",
cachedSetProperty(
options.resolveOptions || EMPTY_RESOLVE_OPTIONS,
"dependencyType",
options.category
)
).options;
let newItems;
if (!finalResolveOptions.fullySpecified) {
newItems = [];
for (const item of items) {
const { request, context } = item;
for (const ext of finalResolveOptions.extensions) {
if (request.endsWith(ext)) {
newItems.push({
context,
request: request.slice(0, -ext.length)
});
}
}
if (!finalResolveOptions.enforceExtension) {
newItems.push(item);
}
}
items = newItems;
newItems = [];
for (const obj of items) {
const { request, context } = obj;
for (const mainFile of finalResolveOptions.mainFiles) {
if (request.endsWith(`/${mainFile}`)) {
newItems.push({
context,
request: request.slice(0, -mainFile.length)
});
newItems.push({
context,
request: request.slice(0, -mainFile.length - 1)
});
}
}
newItems.push(obj);
}
items = newItems;
}
newItems = [];
for (const item of items) {
let hideOriginal = false;
for (const modulesItems of finalResolveOptions.modules) {
if (Array.isArray(modulesItems)) {
for (const dir of modulesItems) {
if (item.request.startsWith(`./${dir}/`)) {
newItems.push({
context: item.context,
request: item.request.slice(dir.length + 3)
});
hideOriginal = true;
}
}
} else {
const dir = modulesItems.replace(/\\/g, "/");
const fullPath =
item.context.replace(/\\/g, "/") + item.request.slice(1);
if (fullPath.startsWith(dir)) {
newItems.push({
context: item.context,
request: fullPath.slice(dir.length + 1)
});
}
}
}
if (!hideOriginal) {
newItems.push(item);
}
}
return newItems;
}
);
}
);
}
}
module.exports = RequireContextPlugin;
|
PypiClean
|
/proxycurl-py_test-0.0.16.tar.gz/proxycurl-py_test-0.0.16/proxycurl_py_test/gevent/library.py
|
from proxycurl_py_test.config import (
BASE_URL, PROXYCURL_API_KEY, TIMEOUT, MAX_RETRIES, MAX_BACKOFF_SECONDS
)
from proxycurl_py_test.gevent.base import ProxycurlBase
from proxycurl_py_test.models import (
PersonEndpointResponse,
UrlResult,
ExtractionEmailResult,
PDLPhoneNumberResult,
PDLEmailResult,
LinkedinCompany,
JobListPage,
EmployeeCount,
EmployeeList,
RoleSearchResult,
CompanyReveal,
LinkedinSchool,
LinkedinJob,
CreditBalance,
)
class _LinkedinPerson:
def __init__(self, linkedin):
self.linkedin = linkedin
def get(
self,
url: str,
use_cache: str = 'if-recent',
skills: str = 'exclude',
inferred_salary: str = 'exclude',
extra: str = 'exclude',
) -> PersonEndpointResponse:
"""Person Profile Endpoint
Get structured data of a Personal Profile
:param url: URL of the LinkedIn Profile to crawl.
URL should be in the format of `https://www.linkedin.com/in/<public-identifier>`
:type url: str
:param use_cache: `if-present` Fetches profile from cache regardless of age of profile. If profile is not available in cache, API will attempt to source profile externally.
`if-recent` The default behavior. API will make a best effort to return a fresh profile no older than 29 days., defaults to 'if-recent'
:type use_cache: str
:param skills: Include skills data from external sources.
This parameter accepts the following values:
- `exclude` (default value) - Does not provide skills data field.
- `include` - Append skills data to the person profile object. Costs an extra `1` credit on top of the cost of the base endpoint (if data is available)., defaults to 'exclude'
:type skills: str
:param inferred_salary: Include inferred salary range from external sources.
This parameter accepts the following values:
- `exclude` (default value) - Does not provide inferred salary data field.
- `include` - Append inferred salary range data to the person profile object. Costs an extra `1` credit on top of the cost of the base endpoint (if data is available)., defaults to 'exclude'
:type inferred_salary: str
:param extra: Enriches the Person Profile with extra details from external sources. Extra details include IDs of social media accounts such as Github and Facebook, gender, birth date, industry and interests.
This parameter accepts the following values:
- `exclude` (default value) - Does not provide extra data field.
- `include` - Append extra data to the person profile object. Costs an extra `1` credit on top of the cost of the base endpoint (if data is available)., defaults to 'exclude'
:type extra: str
:return: An object of :class:`proxycurl.models.PersonEndpointResponse` or **None** if there is an error.
:rtype: :class:`proxycurl.models.PersonEndpointResponse`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/v2/linkedin',
params={
'url': url,
'use_cache': use_cache,
'skills': skills,
'inferred_salary': inferred_salary,
'extra': extra,
},
data={
},
result_class=PersonEndpointResponse
)
def resolve(
self,
first_name: str,
company_domain: str,
location: str = '',
title: str = '',
last_name: str = '',
) -> UrlResult:
"""Person Lookup Endpoint
Resolve LinkedIn Profile
:param first_name: First name of the user
:type first_name: str
:param company_domain: Company name or domain
:type company_domain: str
:param location: The location of this user.
Name of country, city or state.
:type location: str
:param title: Title that user is holding at his/her current job
:type title: str
:param last_name: Last name of the user
:type last_name: str
:return: An object of :class:`proxycurl.models.UrlResult` or **None** if there is an error.
:rtype: :class:`proxycurl.models.UrlResult`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/profile/resolve',
params={
'first_name': first_name,
'company_domain': company_domain,
'location': location,
'title': title,
'last_name': last_name,
},
data={
},
result_class=UrlResult
)
def resolve_by_email(
self,
work_email: str,
) -> UrlResult:
"""Reverse Work Email Lookup Endpoint
Resolve LinkedIn Profile from a work email address
:param work_email: Work email address of the user
:type work_email: str
:return: An object of :class:`proxycurl.models.UrlResult` or **None** if there is an error.
:rtype: :class:`proxycurl.models.UrlResult`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/profile/resolve/email',
params={
'work_email': work_email,
},
data={
},
result_class=UrlResult
)
def lookup_email(
self,
linkedin_profile_url: str,
callback_url: str = '',
) -> ExtractionEmailResult:
"""Work Email Lookup Endpoint
Lookup work email address of a LinkedIn Person Profile.
Email addresses returned are verified to not be role-based or catch-all emails. Email addresses
returned by our API endpoint come with a 95+% deliverability guarantee
**Endpoint behavior**
*This endpoint* **_may not_** *return results immediately.*
For some profiles, email addresses are returned immediately when the endpoint is called. For such
requests, we will respond with a `200` status code. Credits will be consumed immediately
Some profiles require more time to extract email address from. For such requests, we will respond
with a `202` status code. No credits are consumed.
If you provided a webhook in your request parameter, our application will call your webhook with
the result once. See `Webhook payload` below.
Alternatively, you can also poll (repeat the request) our API and we will return the result once it is
successful.
Successful responses to requests are cached for up to 24 hours. We will also not charge you for the
same request in a 24 hour window
:param linkedin_profile_url: Linkedin Profile URL of the person you want to
extract work email address from.
:type linkedin_profile_url: str
:param callback_url: Webhook to notify your application when
the request has finished processing.
:type callback_url: str
:return: An object of :class:`proxycurl.models.ExtractionEmailResult` or **None** if there is an error.
:rtype: :class:`proxycurl.models.ExtractionEmailResult`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/profile/email',
params={
'linkedin_profile_url': linkedin_profile_url,
'callback_url': callback_url,
},
data={
},
result_class=ExtractionEmailResult
)
def personal_contact(
self,
linkedin_profile_url: str,
) -> PDLPhoneNumberResult:
"""Personal Contact Number Lookup Endpoint
Given an LinkedIn profile, returns a list of personal contact numbers belonging to this identity.
:param linkedin_profile_url: LinkedIn Profile URL of the person you want to extract personal contact numbers from.
:type linkedin_profile_url: str
:return: An object of :class:`proxycurl.models.PDLPhoneNumberResult` or **None** if there is an error.
:rtype: :class:`proxycurl.models.PDLPhoneNumberResult`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/contact-api/personal-contact',
params={
'linkedin_profile_url': linkedin_profile_url,
},
data={
},
result_class=PDLPhoneNumberResult
)
def personal_email(
self,
linkedin_profile_url: str,
) -> PDLEmailResult:
"""Personal Email Lookup Endpoint
Given an LinkedIn profile, returns a list of personal emails belonging to this identity. Emails are verified to be deliverable.
:param linkedin_profile_url: LinkedIn Profile URL of the person you want to extract personal email addresses from.
:type linkedin_profile_url: str
:return: An object of :class:`proxycurl.models.PDLEmailResult` or **None** if there is an error.
:rtype: :class:`proxycurl.models.PDLEmailResult`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/contact-api/personal-email',
params={
'linkedin_profile_url': linkedin_profile_url,
},
data={
},
result_class=PDLEmailResult
)
class _LinkedinCompany:
def __init__(self, linkedin):
self.linkedin = linkedin
def get(
self,
url: str,
categories: str = 'exclude',
funding_data: str = 'exclude',
extra: str = 'exclude',
exit_data: str = 'exclude',
acquisitions: str = 'exclude',
use_cache: str = 'if-recent',
) -> LinkedinCompany:
"""Company Profile Endpoint
Get structured data of a Company Profile
:param url: URL of the LinkedIn Company Profile to crawl.
URL should be in the format of `https://www.linkedin.com/company/<public_identifier>`
:type url: str
:param categories: Appends categories data of this company.
Default value is `"exclude"`.
The other acceptable value is `"include"`, which will include these categories (if available) for `1` extra credit., defaults to 'exclude'
:type categories: str
:param funding_data: Returns a list of funding rounds that this company has received.
Default value is `"exclude"`.
The other acceptable value is `"include"`, which will include these categories (if available) for `1` extra credit., defaults to 'exclude'
:type funding_data: str
:param extra: Enriches the Company Profile with extra details from external sources. Details include Crunchbase ranking, contact email, phone number, Facebook account, Twitter account, funding rounds and amount, IPO status, investor information, etc.
Default value is `"exclude"`.
The other acceptable value is `"include"`, which will include these extra details (if available) for `1` extra credit., defaults to 'exclude'
:type extra: str
:param exit_data: Returns a list of investment portfolio exits.
Default value is `"exclude"`.
The other acceptable value is `"include"`, which will include these categories (if available) for `1` extra credit., defaults to 'exclude'
:type exit_data: str
:param acquisitions: Provides further enriched data on acquisitions made by this company from external sources.
Default value is `"exclude"`.
The other acceptable value is `"include"`, which will include these acquisition data (if available) for `1` extra credit., defaults to 'exclude'
:type acquisitions: str
:param use_cache: `if-present` Fetches profile from cache regardless of age of profile. If profile is not available in cache, API will attempt to source profile externally.
`if-recent` The default behavior. API will make a best effort to return a fresh profile no older than 29 days., defaults to 'if-recent'
:type use_cache: str
:return: An object of :class:`proxycurl.models.LinkedinCompany` or **None** if there is an error.
:rtype: :class:`proxycurl.models.LinkedinCompany`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/company',
params={
'url': url,
'categories': categories,
'funding_data': funding_data,
'extra': extra,
'exit_data': exit_data,
'acquisitions': acquisitions,
'use_cache': use_cache,
},
data={
},
result_class=LinkedinCompany
)
def resolve(
self,
location: str = '',
company_domain: str = '',
company_name: str = '',
) -> UrlResult:
"""Company Lookup Endpoint
Resolve Company LinkedIn Profile from company name, domain name and location.
:param location: The location / region of company.
ISO 3166-1 alpha-2 codes
:type location: str
:param company_domain: Company website or Company domain
:type company_domain: str
:param company_name: Company Name
:type company_name: str
:return: An object of :class:`proxycurl.models.UrlResult` or **None** if there is an error.
:rtype: :class:`proxycurl.models.UrlResult`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/company/resolve',
params={
'location': location,
'company_domain': company_domain,
'company_name': company_name,
},
data={
},
result_class=UrlResult
)
def find_job(
self,
search_id: str,
) -> JobListPage:
"""Jobs Listing Endpoint
List jobs posted by a company on LinkedIn
:param search_id: The `search_id` of the company on LinkedIn.
You can get the `search_id` of a LinkedIn company via [Company Profile API](#company-api-linkedin-company-profile-endpoint).
:type search_id: str
:return: An object of :class:`proxycurl.models.JobListPage` or **None** if there is an error.
:rtype: :class:`proxycurl.models.JobListPage`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/v2/linkedin/company/job',
params={
'search_id': search_id,
},
data={
},
result_class=JobListPage
)
def employee_count(
self,
url: str,
employment_status: str = 'current',
) -> EmployeeCount:
"""Employee Count Endpoint
Get a number of total employees of a Company.
This API endpoint is limited by LinkDB which is populated with profiles in the US, UK, Canada, Israel, Australia and Singapore. As such, this endpoint is best used to list employees working in companies based in the US, UK, Canada, Israel, Australia and Singapore only.
:param url: URL of the LinkedIn Company Profile to target.
URL should be in the format of `https://www.linkedin.com/company/<public_identifier>`
:type url: str
:param employment_status: Parameter to tell the API to filter past or current employees.
Valid values are `current`, `past`, and `all`:
* `current` (default) : count current employees
* `past` : count past employees
* `all` : count current & past employees, defaults to 'current'
:type employment_status: str
:return: An object of :class:`proxycurl.models.EmployeeCount` or **None** if there is an error.
:rtype: :class:`proxycurl.models.EmployeeCount`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/company/employees/count',
params={
'url': url,
'employment_status': employment_status,
},
data={
},
result_class=EmployeeCount
)
def employee_list(
self,
url: str,
employment_status: str = 'current',
) -> EmployeeList:
"""Employee Listing Endpoint
Get a list of employees of a Company.
This API endpoint is limited by LinkDB which is populated with profiles in the US, UK, Canada, Israel, Australia and Singapore. As such, this endpoint is best used to list employees working in companies based in the US, UK, Canada, Israel, Australia and Singapore only.
:param url: URL of the LinkedIn Company Profile to target.
URL should be in the format of `https://www.linkedin.com/company/<public_identifier>`
:type url: str
:param employment_status: Parameter to tell the API to return past or current employees.
Valid values are `current`, `past`, and `all`:
* `current` (default) : lists current employees
* `past` : lists past employees
* `all` : lists current & past employees, defaults to 'current'
:type employment_status: str
:return: An object of :class:`proxycurl.models.EmployeeList` or **None** if there is an error.
:rtype: :class:`proxycurl.models.EmployeeList`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/company/employees',
params={
'url': url,
'employment_status': employment_status,
},
data={
},
result_class=EmployeeList
)
def role_lookup(
self,
company_name: str,
role: str,
) -> RoleSearchResult:
"""Role Lookup Endpoint
Finds the closest (person) profile with a given role in a Company. For example, you can use this endpoint to find the "CTO" of "Apple".
:param company_name: Name of the company that you are searching for
:type company_name: str
:param role: Role of the profile that you are lookin up
:type role: str
:return: An object of :class:`proxycurl.models.RoleSearchResult` or **None** if there is an error.
:rtype: :class:`proxycurl.models.RoleSearchResult`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/find/company/role',
params={
'company_name': company_name,
'role': role,
},
data={
},
result_class=RoleSearchResult
)
def reveal(
self,
ip: str,
role: str = '',
role_personal_email: str = 'exclude',
role_contact_number: str = 'exclude',
) -> CompanyReveal:
"""Reveal Endpoint
Deanonymize an IPv4 address and associate the Company behind the IPv4 address.
:param ip: The target IPv4 address.
:type ip: str
:param role: Lookup and append an employee of a certain role of the company.
Within the same API call, you can choose to lookup a person with a given role within this organisation that you might want to reach out to.
:type role: str
:param role_personal_email: Append personal email addresses to the response if the system finds a relevant person profile., defaults to 'exclude'
:type role_personal_email: str
:param role_contact_number: Append personal contact numbers to teh response if the system finds a relevant person profile., defaults to 'exclude'
:type role_contact_number: str
:return: An object of :class:`proxycurl.models.CompanyReveal` or **None** if there is an error.
:rtype: :class:`proxycurl.models.CompanyReveal`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/reveal/company',
params={
'ip': ip,
'role': role,
'role_personal_email': role_personal_email,
'role_contact_number': role_contact_number,
},
data={
},
result_class=CompanyReveal
)
class _LinkedinSchool:
def __init__(self, linkedin):
self.linkedin = linkedin
def get(
self,
url: str,
use_cache: str = 'if-recent',
) -> LinkedinSchool:
"""School Profile Endpoint
Get structured data of a LinkedIn School Profile
:param url: URL of the LinkedIn School Profile to crawl.
URL should be in the format of `https://www.linkedin.com/school/<public_identifier>`
:type url: str
:param use_cache: `if-present` Fetches profile from cache regardless of age of profile. If profile is not available in cache, API will attempt to source profile externally..
`if-recent` The default behavior. API will make a best effort to return a fresh profile no older than 29 days., defaults to 'if-recent'
:type use_cache: str
:return: An object of :class:`proxycurl.models.LinkedinSchool` or **None** if there is an error.
:rtype: :class:`proxycurl.models.LinkedinSchool`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/school',
params={
'url': url,
'use_cache': use_cache,
},
data={
},
result_class=LinkedinSchool
)
class _LinkedinJob:
def __init__(self, linkedin):
self.linkedin = linkedin
def get(
self,
url: str,
) -> LinkedinJob:
"""Job Profile Endpoint
Get structured data of a LinkedIn Job Profile
:param url: URL of the LinkedIn Job Profile to target.
URL should be in the format of `https://www.linkedin.com/jobs/view/<job_id>`.
[Jobs Listing Endpoint](#jobs-api-linkedin-jobs-listing-endpoint) can be used to retrieve a job URL.
:type url: str
:return: An object of :class:`proxycurl.models.LinkedinJob` or **None** if there is an error.
:rtype: :class:`proxycurl.models.LinkedinJob`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/linkedin/job',
params={
'url': url,
},
data={
},
result_class=LinkedinJob
)
class _Linkedin:
person: _LinkedinPerson
company: _LinkedinCompany
school: _LinkedinSchool
job: _LinkedinJob
def __init__(self, proxycurl):
self.proxycurl = proxycurl
self.person = _LinkedinPerson(self)
self.company = _LinkedinCompany(self)
self.school = _LinkedinSchool(self)
self.job = _LinkedinJob(self)
class Proxycurl(ProxycurlBase):
linkedin: _Linkedin
def __init__(
self,
api_key: str = PROXYCURL_API_KEY,
base_url: str = BASE_URL,
timeout: int = TIMEOUT,
max_retries: int = MAX_RETRIES,
max_backoff_seconds: int = MAX_BACKOFF_SECONDS
) -> None:
self.api_key = api_key
self.base_url = base_url
self.timeout = timeout
self.max_retries = max_retries
self.max_backoff_seconds = max_backoff_seconds
self.linkedin = _Linkedin(self)
def get_balance(
self,
) -> CreditBalance:
"""View Credit Balance Endpoint
Get your current credit(s) balance
:return: An object of :class:`proxycurl.models.CreditBalance` or **None** if there is an error.
:rtype: :class:`proxycurl.models.CreditBalance`
:raise ProxycurlException: Every error will raise a :class:`proxycurl.gevent.ProxycurlException`
"""
return self.linkedin.proxycurl.request(
method='GET',
url='/proxycurl/api/credit-balance',
params={
},
data={
},
result_class=CreditBalance
)
|
PypiClean
|
/nm_transformers-1.5.1.42301-py3-none-any.whl/transformers/models/deberta/__init__.py
|
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_deberta": ["DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaConfig", "DebertaOnnxConfig"],
"tokenization_deberta": ["DebertaTokenizer"],
}
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_deberta_fast"] = ["DebertaTokenizerFast"]
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_deberta"] = [
"DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"DebertaForMaskedLM",
"DebertaForQuestionAnswering",
"DebertaForSequenceClassification",
"DebertaForTokenClassification",
"DebertaModel",
"DebertaPreTrainedModel",
]
try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_deberta"] = [
"TF_DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFDebertaForMaskedLM",
"TFDebertaForQuestionAnswering",
"TFDebertaForSequenceClassification",
"TFDebertaForTokenClassification",
"TFDebertaModel",
"TFDebertaPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig, DebertaOnnxConfig
from .tokenization_deberta import DebertaTokenizer
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_deberta_fast import DebertaTokenizerFast
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_deberta import (
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
DebertaForMaskedLM,
DebertaForQuestionAnswering,
DebertaForSequenceClassification,
DebertaForTokenClassification,
DebertaModel,
DebertaPreTrainedModel,
)
try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_deberta import (
TF_DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDebertaForMaskedLM,
TFDebertaForQuestionAnswering,
TFDebertaForSequenceClassification,
TFDebertaForTokenClassification,
TFDebertaModel,
TFDebertaPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
|
PypiClean
|
/love_course_2016_2019-2023.3.1.0-py3-none-any.whl/LoveCourse20162019/docs/mi-guo-qing-gan/蜜果情感《苏晨私教课程》:恋爱技巧强化课:2.2男士主流穿衣风格.md
|
# 蜜果情感《苏晨私教课程》:恋爱技巧强化课:2.2男士主流穿衣风格
OK,那么今天呢,老师给大家讲一讲,男是主流的川伊风格。那么这些风格呢,是可以大家去进行参考的,根据自己的职业爱好去进行一个选择。那么有一些风格啊,今天老师只是讲主流风格,比如说一些西哈风啊。
包括彭克风格老师都没有讲为什么,因为这些他不是一个主流风格,比如说你想想想,你穿那一蛇西哈去见你外来的女朋友,对不对,第一次跟他约会,他呢正好怎么样。,刚刚下班穿了一身off-slade ON的职业状。
那你们两个走在一起,是不是看起来不像是一路人,对不对,他看起来很像你的姐姐。那么今天老师给大家推荐的这几种风格,都是非常适合都比较百大了。那么第一种风格叫什么呢,叫做姓永难风格。。
一般我们说的西永难风格其实他都是一种极监主的风格,那么颜色上以黑白灰灯自然色为主,形式能减化就减化,也不会出现这种任何点,或者什么华重取统的这种设计,强调的是普足不奢华。,但是这个呢,非常适合什么呢。
这个高冷的文艺法,还有什么紧御主义,紧御者这种感觉。,那么这个风格适合一些身材偏受的难受,如果觉得自己这个像电线呐一样又高优受这样的风格,我觉得这个是,兄弟们可以去尝试一下。,那么接下来还有什么风格呢。
比较好,高街风。这个呢,你可以理解为传统休闲风格的高端版本,街头风格的高端版本,配色的大蓝心潮,很浪,对不对。,那么现在所有的网红几乎都是高街风格,也是我们认为高分女生,正妹最喜欢的这样的风格。。
那是我们站在这个西盐女生的第一体对,那这些代表的品牌呢,就有这个机繁西巴黎市家,估计这个巴黙跟凹布尔特等等等等。,这些比如说,这个无艺法全制冷了,还有包括最近变得特别潮流的这个临军节。
他们都是这个高街风的代表。,接下来呢,叫做设始商务风,其实呢,你可以理解为牙皮风格。,那么如果你看过联系剧,欢乐送里面呢,小包装,就是这种设始商务风的代表了。,总的来说呢,是商务范的。
但是每个西洁搭配呢,都变成什么,变成潮流的感觉,对不对。搭配那种马达的西装技巧法又不是正式,对不对。,那么这种呢,是我们认为最高级别,男生到最后,又潮又这个商务的这种感觉,你可以理解为什么呢,。
除了一身西装,带着这个西装,带着各种各样的印划的logo,非常的潮流,也是不是这个男人的这种很如丫的感觉。,OK,那么接下来呢,我相信兄弟们这个每个男生都有一个阶段会非常喜欢,对不对,叫做日式工装风。
点行了代表呢,就是我们这个六书愚蚂乐。,这个风格呢,这个是OK的,每位兄弟们都比较喜欢,但是最近呢,日式工装风在前三年,我们做过调查,女生是最喜欢的。,不过这两年呢,慢慢的降下来了。
一是因为高级风格越来越火,往红这些都在穿高级风,第二点就是什么,这个工装风嘛,顾明思义,你穿看起来就,如果你搭配不好,就会显得你很穷。,这个风格呢,其实是比较摆达,比较容易摸好的,这个愚蚂乐的风格。
很多男生都喜欢,如果你搭得好的话是OK的。,那么接下来啊,英伦风格,我是非常不建议,二十六岁以下的兄弟们,这个穿成这种风格,因为他对这个服装剪裁跟工具有很大的要求,,只有意味着诈歌不便宜。
而且你没有一定越力的时候穿这种风格,这个会让女生感觉到你好像偷偷穿了你自己爸爸的衣服。这个英伦风格最典型的品牌就是这个主线,主线的伯伯,诈歌昂贵啊,你可以选一些类似于五厘米啊,这样的牌子。。
那么接下来是美食学院风格,这个呢是全天下女人都会喜欢的这种风格,早些年大家推崇的这种致名风格,就是标准的美食学院风,有点点书件的气息又不会失去啊,实上男人的元素。,那么最后一个风格,不是最后一个风格。
这个接头风格,那么这种风格大概是我们男生最喜欢的风格,无论是小白鞋还是耐克敲暂器裂或者是万丝框威,对吧,配上一双配上一条李维斯的女生,,上班谁呢是位于趁商或者是踢越商都OK,都是摆搭那种的推荐啊。
但是如果你搭得好你就会搭乘书乘书一样,但是绝丫肚书跟可能跟所有人的风格一样,这种是可以推荐,但是不建议学员们去传承这样的风格,没有什么自己的特点。,OK,那么最后一个风格叫做美食富古。
它的这个专业的名字叫做阿美卡机,美食富古就是这种书起游头啊,穿着这个皮甲卡的机车的形男,这种风格呢,会让你看上去很硬的啊,但是衣服呢,这个可能全年的衣服就那么几家,每一家包括靴子都很贵,。
这个是最容易参考的方向,一双高帮靴子,T-Bland跟卡特的都可以,或者说一些更贵的安某啊,这样呢,配上一条牛奶裤,然后上班再加上皮甲,加上太阳眼镜,可以甩出这些小奶狗几条机。,那么说完了这些。
只是为了让兄弟们做一个参考,那么各种效果的方法是什么呢,就去找到,找到你身边这个最会穿衣服,最有品味的女生啊,条件最好的女生,见过室温的女生,让她跟你一起去逛街,找到这种品牌很多的商圈花一天的时间。
你把所有的风格都试一遍,让她呢帮你穿梦一下,什么风格是真正适合你的。,很多兄弟们,你们都喜欢穿这种宽大舒适的衣服,那你想想一下,为什么女人要穿这么内脚的高跟鞋呢,为什么我们这个男人的症状啊。
穿这个西装是女人最喜欢的,对不对。,然后这个,生日是这个西装不舒服啊,穿车都很困难,生日是哥伯都抬不起来,对吧,我们症状的鞋子会有很多鞋大上,就是因为你越不舒服,女生看你就越舒服啊,越白症,看你越顺眼。
对吧。,那我也是比较建议,兄弟们有网上去逃一些这个有风格的这种,这个这个这个电面啊,淘宝的小店,佳格便宜又好看,对吧。,那么接下来呢,这个跟大家说一个事啊,如果你想去淘宝上买一份买的准的话。
一定要去买一个良衣服的这样的卷尺啊,清晰的记录下,你的腰围啊,尖欢呢,各种各样的数据,,最后让呢,你在网上买衣服的时候,减少很多啊,买衣服买错的,这样的情况发生。,OK,那么这一件呢,就是我们买生哪。
穿衣服的风格推荐。
|
PypiClean
|
/safe_etmp_py-0.0.4-py3-none-any.whl/gnosis/safe/proxy_factory.py
|
from logging import getLogger
from typing import Optional
from eth_account.signers.local import LocalAccount
from eth_typing import ChecksumAddress
from web3.contract import Contract
from gnosis.eth import EthereumClient
from gnosis.eth.contracts import (
get_paying_proxy_deployed_bytecode,
get_proxy_1_0_0_deployed_bytecode,
get_proxy_1_1_1_deployed_bytecode,
get_proxy_1_1_1_mainnet_deployed_bytecode,
get_proxy_1_3_0_deployed_bytecode,
get_proxy_factory_contract,
get_proxy_factory_V1_0_0_contract,
get_proxy_factory_V1_1_1_contract,
)
from gnosis.eth.ethereum_client import EthereumTxSent
from gnosis.eth.utils import compare_byte_code, fast_is_checksum_address
try:
from functools import cache
except ImportError:
from functools import lru_cache
cache = lru_cache(maxsize=None)
logger = getLogger(__name__)
class ProxyFactory:
def __init__(self, address: ChecksumAddress, ethereum_client: EthereumClient):
assert fast_is_checksum_address(address), (
"%s proxy factory address not valid" % address
)
self.address = address
self.ethereum_client = ethereum_client
self.w3 = ethereum_client.w3
@staticmethod
def _deploy_proxy_factory_contract(
ethereum_client: EthereumClient,
deployer_account: LocalAccount,
contract: Contract,
) -> EthereumTxSent:
tx = contract.constructor().buildTransaction({"from": deployer_account.address})
tx_hash = ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=120)
assert tx_receipt
assert tx_receipt["status"]
contract_address = tx_receipt["contractAddress"]
logger.info(
"Deployed and initialized Proxy Factory Contract=%s by %s",
contract_address,
deployer_account.address,
)
return EthereumTxSent(tx_hash, tx, contract_address)
@classmethod
def deploy_proxy_factory_contract(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract last version (v1.3.0)
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
@classmethod
def deploy_proxy_factory_contract_v1_1_1(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract v1.1.1
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_V1_1_1_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
@classmethod
def deploy_proxy_factory_contract_v1_0_0(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract v1.0.0
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_V1_0_0_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
def check_proxy_code(self, address: ChecksumAddress) -> bool:
"""
Check if proxy is valid
:param address: Ethereum address to check
:return: True if proxy is valid, False otherwise
"""
deployed_proxy_code = self.w3.eth.get_code(address)
proxy_code_fns = (
get_proxy_1_3_0_deployed_bytecode,
get_proxy_1_1_1_deployed_bytecode,
get_proxy_1_1_1_mainnet_deployed_bytecode,
get_proxy_1_0_0_deployed_bytecode,
get_paying_proxy_deployed_bytecode,
self.get_proxy_runtime_code,
)
for proxy_code_fn in proxy_code_fns:
if compare_byte_code(deployed_proxy_code, proxy_code_fn()):
return True
return False
def deploy_proxy_contract(
self,
deployer_account: LocalAccount,
master_copy: ChecksumAddress,
initializer: bytes = b"",
gas: Optional[int] = None,
gas_price: Optional[int] = None,
) -> EthereumTxSent:
"""
Deploy proxy contract via ProxyFactory using `createProxy` function
:param deployer_account: Ethereum account
:param master_copy: Address the proxy will point at
:param initializer: Initializer
:param gas: Gas
:param gas_price: Gas Price
:return: EthereumTxSent
"""
proxy_factory_contract = self.get_contract()
create_proxy_fn = proxy_factory_contract.functions.createProxy(
master_copy, initializer
)
tx_parameters = {"from": deployer_account.address}
contract_address = create_proxy_fn.call(tx_parameters)
if gas_price is not None:
tx_parameters["gasPrice"] = gas_price
if gas is not None:
tx_parameters["gas"] = gas
tx = create_proxy_fn.buildTransaction(tx_parameters)
# Auto estimation of gas does not work. We use a little more gas just in case
tx["gas"] = tx["gas"] + 50000
tx_hash = self.ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
return EthereumTxSent(tx_hash, tx, contract_address)
def deploy_proxy_contract_with_nonce(
self,
deployer_account: LocalAccount,
master_copy: ChecksumAddress,
initializer: bytes,
salt_nonce: int,
gas: Optional[int] = None,
gas_price: Optional[int] = None,
nonce: Optional[int] = None,
) -> EthereumTxSent:
"""
Deploy proxy contract via Proxy Factory using `createProxyWithNonce` (create2)
:param deployer_account: Ethereum account
:param master_copy: Address the proxy will point at
:param initializer: Data for safe creation
:param salt_nonce: Uint256 for `create2` salt
:param gas: Gas
:param gas_price: Gas Price
:param nonce: Nonce
:return: Tuple(tx-hash, tx, deployed contract address)
"""
proxy_factory_contract = self.get_contract()
create_proxy_fn = proxy_factory_contract.functions.createProxyWithNonce(
master_copy, initializer, salt_nonce
)
tx_parameters = {"from": deployer_account.address}
contract_address = create_proxy_fn.call(tx_parameters)
if gas_price is not None:
tx_parameters["gasPrice"] = gas_price
if gas is not None:
tx_parameters["gas"] = gas
if nonce is not None:
tx_parameters["nonce"] = nonce
tx = create_proxy_fn.buildTransaction(tx_parameters)
# Auto estimation of gas does not work. We use a little more gas just in case
tx["gas"] = tx["gas"] + 50000
tx_hash = self.ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
return EthereumTxSent(tx_hash, tx, contract_address)
def get_contract(self, address: Optional[ChecksumAddress] = None):
address = address or self.address
return get_proxy_factory_contract(self.ethereum_client.w3, address)
@cache
def get_proxy_runtime_code(self, address: Optional[ChecksumAddress] = None):
"""
Get runtime code for current proxy factory
"""
address = address or self.address
return self.get_contract(address=address).functions.proxyRuntimeCode().call()
|
PypiClean
|
/django-smart-selects-1.6.0.tar.gz/django-smart-selects-1.6.0/smart_selects/static/smart-selects/admin/js/chainedm2m.js
|
(function ($) {
"use strict";
window.chainedm2m = function () {
return {
fireEvent: function (element, event) {
var evt, rtn;
if (document.createEventObject) {
// dispatch for IE
evt = document.createEventObject();
rtn = element.fireEvent('on' + event, evt);
} else {
// dispatch for firefox + others
evt = document.createEvent("HTMLEvents");
evt.initEvent(event, true, true); // event type,bubbling,cancelable
rtn = !element.dispatchEvent(evt);
}
return rtn;
},
dismissRelatedLookupPopup: function(win, chosenId) {
var name = windowname_to_id(win.name),
elem = document.getElementById(name);
if (elem.className.indexOf('vManyToManyRawIdAdminField') !== -1 && elem.value) {
elem.value += ',' + chosenId;
} else {
elem.value = chosenId;
}
fireEvent(elem, 'change');
win.close();
},
fill_field: function (val, initial_value, elem_id, url, initial_parent, auto_choose) {
var $selectField = $(elem_id),
$selectedto = $(elem_id + '_to'),
cache_to = elem_id.replace('#', '') + '_to',
cache_from = elem_id.replace('#', '') + '_from';
if (!$selectField.length) {
$selectField = $(elem_id + '_from');
}
function trigger_chosen_updated() {
if ($.fn.chosen !== undefined) {
$selectField.trigger('chosen:updated');
}
}
// SelectBox is a global var from djangojs "admin/js/SelectBox.js"
// Clear cache to avoid the elements duplication
if (typeof SelectBox !== 'undefined') {
if (typeof SelectBox.cache[cache_to] !== 'undefined') {
SelectBox.cache[cache_to].splice(0);
}
if (typeof SelectBox.cache[cache_from] !== 'undefined') {
SelectBox.cache[cache_from].splice(0);
}
}
if (!val || val === '') {
$selectField.html('');
$selectedto.html('');
trigger_chosen_updated();
return;
}
// Make sure that these are always an arrays
val = [].concat(val);
initial_parent = [].concat(initial_parent);
var target_url = url + "/" + val + "/",
options = [],
selectedoptions = [];
$.getJSON(target_url, function (j) {
var i, width;
auto_choose = j.length === 1 && auto_choose;
var selected_values = {};
// if val and initial_parent have any common values, we need to set selected options.
if ($(val).filter(initial_parent).length >= 0 && initial_value) {
for (i = 0; i < initial_value.length; i = i + 1) {
selected_values[initial_value[i]] = true;
}
}
// select values which were previously selected (for many2many - many2many chain)
$(elem_id + ' option:selected').each(function () {
selected_values[$(this).val()] = true;
});
$.each(j, function (index, optionData) {
var option = $('<option></option>')
.attr('value', optionData.value)
.text(optionData.display)
.attr('title', optionData.display);
if (auto_choose === "true" || auto_choose === "True") {
auto_choose = true;
} else if (auto_choose === "false" || auto_choose === "False") {
auto_choose = false;
}
if (auto_choose || selected_values[optionData.value] === true) {
if ($selectedto.length) {
selectedoptions.push(option);
} else {
option.prop('selected', true);
options.push(option);
}
} else {
options.push(option);
}
});
$selectField.html(options);
if ($selectedto.length) {
$selectedto.html(selectedoptions);
var node;
// SelectBox is a global var from djangojs "admin/js/SelectBox.js"
for (i = 0, j = selectedoptions.length; i < j; i = i + 1) {
node = selectedoptions[i];
SelectBox.cache[cache_to].push({value: node.prop("value"), text: node.prop("text"), displayed: 1});
}
for (i = 0, j = options.length; i < j; i = i + 1) {
node = options[i];
SelectBox.cache[cache_from].push({value: node.prop("value"), text: node.prop("text"), displayed: 1});
}
}
width = $selectField.outerWidth();
if (navigator.appVersion.indexOf("MSIE") !== -1) {
$selectField.width(width + 'px');
}
$selectField.trigger('change');
trigger_chosen_updated();
});
},
init: function (chainfield, url, id, value, auto_choose) {
var fill_field, val, initial_parent = $(chainfield).val(),
initial_value = value;
if (!$(chainfield).hasClass("chained")) {
val = $(chainfield).val();
this.fill_field(val, initial_value, id, url, initial_parent, auto_choose);
}
fill_field = this.fill_field;
$(chainfield).change(function () {
var prefix, start_value, this_val, localID = id;
if (localID.indexOf("__prefix__") > -1) {
prefix = $(this).attr("id").match(/\d+/)[0];
localID = localID.replace("__prefix__", prefix);
}
start_value = $(localID).val();
this_val = $(this).val();
fill_field(this_val, initial_value, localID, url, initial_parent, auto_choose);
});
// allait en bas, hors du documentready
if (typeof(dismissAddAnotherPopup) !== 'undefined') {
var oldDismissAddAnotherPopup = dismissAddAnotherPopup;
dismissAddAnotherPopup = function (win, newId, newRepr) {
oldDismissAddAnotherPopup(win, newId, newRepr);
if ("#" + windowname_to_id(win.name) === chainfield) {
$(chainfield).change();
}
};
}
if (typeof(dismissRelatedLookupPopup) !== 'undefined') {
var oldDismissRelatedLookupPopup = dismissRelatedLookupPopup;
dismissRelatedLookupPopup = function (win, chosenId) {
oldDismissRelatedLookupPopup(win, chosenId);
if ("#" + windowname_to_id(win.name) === chainfield) {
$(chainfield).change();
}
};
}
}
};
}();
}(jQuery || django.jQuery));
|
PypiClean
|
/tensorflow_directml-1.15.8-cp37-cp37m-win_amd64.whl/tensorflow_core/core/framework/reader_base_pb2.py
|
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/framework/reader_base.proto',
package='tensorflow',
syntax='proto3',
serialized_options=_b('\n\030org.tensorflow.frameworkB\020ReaderBaseProtosP\001Z=github.com/tensorflow/tensorflow/tensorflow/go/core/framework\370\001\001'),
serialized_pb=_b('\n+tensorflow/core/framework/reader_base.proto\x12\ntensorflow\"r\n\x0fReaderBaseState\x12\x14\n\x0cwork_started\x18\x01 \x01(\x03\x12\x15\n\rwork_finished\x18\x02 \x01(\x03\x12\x1c\n\x14num_records_produced\x18\x03 \x01(\x03\x12\x14\n\x0c\x63urrent_work\x18\x04 \x01(\x0c\x42p\n\x18org.tensorflow.frameworkB\x10ReaderBaseProtosP\x01Z=github.com/tensorflow/tensorflow/tensorflow/go/core/framework\xf8\x01\x01\x62\x06proto3')
)
_READERBASESTATE = _descriptor.Descriptor(
name='ReaderBaseState',
full_name='tensorflow.ReaderBaseState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='work_started', full_name='tensorflow.ReaderBaseState.work_started', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='work_finished', full_name='tensorflow.ReaderBaseState.work_finished', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_records_produced', full_name='tensorflow.ReaderBaseState.num_records_produced', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='current_work', full_name='tensorflow.ReaderBaseState.current_work', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=59,
serialized_end=173,
)
DESCRIPTOR.message_types_by_name['ReaderBaseState'] = _READERBASESTATE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ReaderBaseState = _reflection.GeneratedProtocolMessageType('ReaderBaseState', (_message.Message,), {
'DESCRIPTOR' : _READERBASESTATE,
'__module__' : 'tensorflow.core.framework.reader_base_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.ReaderBaseState)
})
_sym_db.RegisterMessage(ReaderBaseState)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
PypiClean
|
/vortexpy-4.0.0.tar.gz/vortexpy-4.0.0/vortex/restful/RestfulResource.py
|
import json
import logging
from enum import Enum
from typing import Callable
from typing import List
from twisted.internet.defer import inlineCallbacks
from twisted.python.compat import nativeString
from twisted.web.server import NOT_DONE_YET
from txhttputil.site.BasicResource import BasicResource
from vortex import Tuple
from vortex.DeferUtil import deferToThreadWrapWithLogger
from vortex.DeferUtil import vortexLogFailure
logger = logging.getLogger(__name__)
class HTTP_REQUEST(Enum):
GET = "GET"
POST = "POST"
PUT = "PUT"
DELETE = "DELETE"
class _JsonResource(BasicResource):
isLeaf = True
isGzipped = True
def __init__(self, handler: Callable, TupleClass: Tuple):
BasicResource.__init__(self)
self._handler = handler
self._tupleClass = TupleClass
@deferToThreadWrapWithLogger(logger)
def _requestToTuple(self, request) -> Tuple:
req = json.load(request.content)
tuple_ = self._tupleClass()
tuple_.fromJsonDict(req)
return tuple_
@deferToThreadWrapWithLogger(logger)
def _tupleToResponse(self, tuple_: Tuple) -> bytes:
dict_ = tuple_.tupleToRestfulJsonDict()
json_ = json.dumps(dict_)
return json_.encode()
def _writeSuccessResponse(self, response: bytes, request):
request.setResponseCode(200)
request.setHeader("Content-Type", "application/json")
request.write(response)
request.finish()
def _writeErrorResponse(self, request):
request.setResponseCode(500)
request.setHeader("Content-Type", "application/json")
request.write(b'{"error":"internal error"}')
request.finish()
@inlineCallbacks
def _renderAsync(self, request):
tupleIn = None
try:
tupleIn = yield self._requestToTuple(request)
tupleOut = yield self._handler(tupleIn)
response = yield self._tupleToResponse(tupleOut)
self._writeSuccessResponse(response, request)
except Exception as e:
self._writeErrorResponse(request)
logger.debug(
f"Error while processing REST Tuple\n{tupleIn}\n{request.path}"
)
logger.exception(e)
def render(self, request):
d = self._renderAsync(request)
d.addErrback(vortexLogFailure, logger=logger, consumeError=True)
return NOT_DONE_YET
class ErrorJsonResource(BasicResource):
isLeaf = True
isGzipped = True
def __init__(self, errorCode: int):
self._errorCode = errorCode
def render(self, request):
request.setResponseCode(self._errorCode)
return b""
class PluginRestfulResource(BasicResource):
def __init__(self):
BasicResource.__init__(self)
self._registeredMethods = {}
def registerMethod(
self,
handlerFunction: Callable,
TupleClass: Tuple,
urlPrefix: bytes,
registeredRequestMethod: List[HTTP_REQUEST],
):
if urlPrefix in self._registeredMethods:
raise ValueError(f'Route "{urlPrefix}" already exists')
if not callable(handlerFunction):
raise TypeError(f'"{str(handlerFunction)}" is not a callable')
# register handler as partial function (poor man's Template<type>)
# as callback function whose input is an instance of a Tuple
self._registeredMethods[urlPrefix] = {
"handler": handlerFunction,
"allowedMethods": set(
[method.value for method in registeredRequestMethod]
),
"tupleClass": TupleClass,
}
def getChild(self, path, request):
"""Get requests routed to an handler
Routes to a registered handler to process the request.
This function routes resources to handlers which eventually returns
JSON response resource in json. Everything returned here should be
final.
Once url prefix for a resource is matched, it checks the
request method is allowed. If granted, it delegates the request to the
registered handler to process the request. The handler should convert
request to a Tuple and should respond a tuple.
:param path: route url for a resource, with plugin name prefix stripped
:param request: twisted.web.http.Request
:return: twisted.web.resource.Resource
"""
# route check
if path not in self._registeredMethods.keys():
return ErrorJsonResource(404)
# request method check
requestMethod = nativeString(request.method)
if requestMethod not in self._registeredMethods[path]["allowedMethods"]:
return ErrorJsonResource(403)
# invoke resource handler
TupleClass = self._registeredMethods[path]["tupleClass"]
handler = self._registeredMethods[path]["handler"]
return _JsonResource(handler, TupleClass)
|
PypiClean
|
/eagle-cli-0.5.tar.gz/eagle-cli-0.5/eagle/tasks.py
|
import calendar
from datetime import date, datetime, timedelta
from .groups import add_group, group_exist
from .storage import Task, get_storage
from .tools import err_print
def parse_frequency(f, silent=True):
# 1. specific date.
if f.startswith("@"):
# Try (D)D/(M)M/YYYY
# or fallback to (D)D/(M)M where year will be the current one.
try:
return datetime.strptime(f[1:], "%d/%m/%Y")
except ValueError:
d = datetime.strptime(f[1:], "%d/%m")
return d.replace(year=date.today().year)
# 2. Magic date name
if "today" == f:
return datetime.now()
if "tomorrow" == f:
return datetime.now() + timedelta(days=1)
# Try to seek nearest weekday.
for i in range(1, 7):
the_date = datetime.now() + timedelta(days=i)
day_index = the_date.weekday()
if calendar.day_name[day_index].lower().startswith(f.lower()):
return the_date
# 3. +XY days
# Handles the "+X" days - like "+5".
# If cannot parse days number fallbacks to "today".
if f.startswith("+"):
try:
days = int(f[1:])
except:
days = 0
return datetime.now() + timedelta(days=days)
# 4. X(d|w|m|y) - i.e. "2w".
if 2 <= len(f) and f[-1] in ["d", "w", "m", "y"]:
return f
# 5. No date at all - fallback.
if "-" == f:
return None
# No frequecy has been recognized.
if not silent:
err_print("No known frequency recognized. Task added without frequency.")
def add_task(tasks):
"""
Creates new task.
1. place takes the task name.
2. place takes date/frequency [optional] - or "-".
3. place takes group [optional].
Frequency might be "@dd/mm/yyyy" or "@dd/mm" or just:
* Xd - i.e.: 1d (every day)
* Xw - i.e.: 1w (every week)
* Xm - i.e.: 1m (every month)
* Xy - i.e.: 1y (every year)
or
+X - X is number of days in future
or magic names:
* today
* tomorrow
:param list groups: List of lists of task params(task, frequency, group).
"""
# Append new task to the todo list.
with get_storage() as s:
for t in tasks:
# Check if group was mentioned.
if 3 == len(t) and not group_exist(t[2]):
add_group([[t[2]]])
# If a frequency was given "t" variable has 2 items.
s["tasks"].append(
Task(
t[0],
parse_frequency(t[1], silent=False) if 1 < len(t) else None,
t[2] if 3 == len(t) else None,
datetime.now(),
)
)
def edit_task(task):
"""
Edits a tasks with a helpo with input() functions.
:param list task: List of tasks to be edited.
"""
with get_storage() as s:
index = task[0] - 1
origin_task = s["tasks"][index]
print("\nHere you can edit a task be rewriting current values.")
print(
"If you wanna remove current value (frequency, group) enter one space (hit spacebar) instead.\n"
)
# Title.
while True:
title = input("Enter task title: ")
# If user does not enter valid title or a space
# we do prompt him again and again.
if "" == title:
title = origin_task.title
break
elif title.strip():
break
else:
print("Title is mandatory. Please enter one.\n")
# Freq.
freq = input("Enter frequency: ")
if " " == freq:
freq = None
elif "" == freq:
freq = origin_task.frequency
else:
freq = parse_frequency(freq)
# Group.
group = input("Enter group (empty space to remove group): ")
if " " == group:
group = None
elif "" == group:
group = origin_task.group
# Save.
s["tasks"].pop(index)
s["tasks"].insert(index, Task(title, freq, group, origin_task.created))
print("\nTask was successfully updated.\n")
def delete_task(index_list):
"""
Deletes a task from storage by index.
:param list index: List of lists of task indexes to be deleted.
"""
# Sort the IDs descending so while we pop item by item
# the task indexes remains the same.
to_delete = sorted([i[0] for i in index_list], reverse=True)
with get_storage() as s:
for i in to_delete:
try:
s["tasks"].pop(int(i) - 1)
except IndexError:
print(f"Cannot delete {i}")
def prune():
"""
Removes all overdue tasks. Overdue task is such task
which has a fixed date set as frequency and the date
is lower than today's date.
"""
with get_storage() as s:
to_delete = []
# Find tasks to delete.
for i, t in enumerate(s["tasks"]):
if isinstance(t.frequency, datetime) and t.frequency.date() < date.today():
to_delete.append(i)
# Sort the IDs descending so while we pop item by item
# the task indexes remains the same.
to_delete = sorted(to_delete, reverse=True)
# Delete the tasks.
for i in to_delete:
task = s["tasks"].pop(i)
print(f'Task "{task.title}" has been deleted.')
|
PypiClean
|
/collective.recipe.celery-1.0.tar.gz/collective.recipe.celery-1.0/collective/recipe/celery/README.txt
|
Example usage
=============
We'll start by creating a buildout that uses the recipe::
>>> write(sample_buildout, 'buildout.cfg',
... """
... [buildout]
... parts = celery
... index = %(server)s/index
... find-links = %(server)s
...
... [celery]
... recipe = collective.recipe.celery
... broker-transport = sqlakombu.transport.Transport
... broker-host = sqlite:///celery_broker.db
... result-backend = database
... result-dburi = sqlite:///celery_results.db
... imports = myapp.tasks
... """% dict(server=link_server))
Running the buildout gives us::
>>> print system(buildout)
Installing celery.
celery: Creating directory /sample-buildout/parts/celery.
celery: Generated config file /sample-buildout/parts/celery/celeryconfig.py.
Getting distribution for 'celery'.
Got celery 2.3.1.
Generated script '/sample-buildout/bin/celeryctl'.
Generated script '/sample-buildout/bin/celeryd'.
<BLANKLINE>
Check that we have the celery scripts::
>>> ls(sample_buildout, 'bin')
- buildout
- celeryctl
- celeryd
Check that we got a celery config file::
>>> ls(sample_buildout, 'parts', 'celery')
- celeryconfig.py
If we run the celeryd script, it prints out the config data::
>>> print(system(join(sample_buildout, 'bin', 'celeryd')))
BROKER_HOST='sqlite:///celery_broker.db'
BROKER_TRANSPORT='sqlakombu.transport.Transport'
CELERY_IMPORTS=('myapp.tasks',)
CELERY_RESULT_BACKEND='database'
CELERY_RESULT_DBURI='sqlite:///celery_results.db'
<BLANKLINE>
We can include additional eggs using the eggs option::
>>> write(sample_buildout, 'buildout.cfg',
... """
... [buildout]
... parts = celery
... index = %(server)s/index
... find-links = %(server)s
...
... [celery]
... recipe = collective.recipe.celery
... eggs =
... other
... """% dict(server=link_server))
>>> print system(buildout),
Uninstalling celery.
Installing celery.
celery: Generated config file /sample-buildout/parts/celery/celeryconfig.py.
Getting distribution for 'other'.
Got other 1.0.
Generated script '/sample-buildout/bin/celeryctl'.
Generated script '/sample-buildout/bin/celeryd'.
We can control which scripts are generated using the scripts option.
If no value is given, then script generation is disabled::
>>> write(sample_buildout, 'buildout.cfg',
... """
... [buildout]
... parts = celery
... index = %(server)s/index
... find-links = %(server)s
...
... [celery]
... recipe = collective.recipe.celery
... scripts =
... """% dict(server=link_server))
>>> print system(buildout),
Uninstalling celery.
Installing celery.
celery: Generated config file /sample-buildout/parts/celery/celeryconfig.py.
>>> ls(sample_buildout, 'bin')
- buildout
Let's create the celeryd script only::
>>> write(sample_buildout, 'buildout.cfg',
... """
... [buildout]
... parts = celery
... index = %(server)s/index
... find-links = %(server)s
...
... [celery]
... recipe = collective.recipe.celery
... scripts =
... celeryd
... """% dict(server=link_server))
>>> print system(buildout),
Uninstalling celery.
Installing celery.
celery: Generated config file /sample-buildout/parts/celery/celeryconfig.py.
Generated script '/sample-buildout/bin/celeryd'.
>>> ls(sample_buildout, 'bin')
- buildout
- celeryd
The supported configuration directives may be of various types including
strings, integers and tuples::
>>> write(sample_buildout, 'buildout.cfg',
... """
... [buildout]
... parts = celery
... index = %(server)s/index
... find-links = %(server)s
...
... [celery]
... recipe = collective.recipe.celery
... broker-port = 8080
... broker-user = guest
... imports =
... myapp.tasks
... other.tasks
... """% dict(server=link_server))
>>> print system(buildout),
Uninstalling celery.
Installing celery.
celery: Generated config file /sample-buildout/parts/celery/celeryconfig.py.
Generated script '/sample-buildout/bin/celeryctl'.
Generated script '/sample-buildout/bin/celeryd'.
Let's verify the generated config data::
>>> cat(sample_buildout, 'parts', 'celery', 'celeryconfig.py')
BROKER_PORT = 8080
BROKER_USER = 'guest'
CELERY_IMPORTS = ('myapp.tasks', 'other.tasks')
<BLANKLINE>
The recipe supports a limited set of celery's configuration directives. Any
additional directives can be added using the additional-config option::
>>> write(sample_buildout, 'buildout.cfg',
... """
... [buildout]
... parts = celery
... index = %(server)s/index
... find-links = %(server)s
...
... [celery]
... recipe = collective.recipe.celery
... additional-config =
... CELERY_TASK_PUBLISH_RETRY = True
... CELERY_TASK_PUBLISH_RETRY_POLICY = {"max_retries": 2,
... "interval_start": 10,
... "interval_step": 0,
... "interval_max": 10}
... """% dict(server=link_server))
>>> print system(buildout),
Uninstalling celery.
Installing celery.
celery: Generated config file /sample-buildout/parts/celery/celeryconfig.py.
Generated script '/sample-buildout/bin/celeryctl'.
Generated script '/sample-buildout/bin/celeryd'.
Let's verify the generated config data::
>>> cat(sample_buildout, 'parts', 'celery', 'celeryconfig.py')
CELERY_TASK_PUBLISH_RETRY = True
CELERY_TASK_PUBLISH_RETRY_POLICY = {"max_retries": 2,
"interval_start": 10,
"interval_step": 0,
"interval_max": 10}
<BLANKLINE>
|
PypiClean
|
/Cherrydoor-0.8.1-py3-none-any.whl/cherrydoor/static/js/socket.io.slim.js
|
!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.io=e():t.io=e()}(self,(function(){return function(t){var e={};function n(r){if(e[r])return e[r].exports;var o=e[r]={i:r,l:!1,exports:{}};return t[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=t,n.c=e,n.d=function(t,e,r){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:r})},n.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var o in t)n.d(r,o,function(e){return t[e]}.bind(null,o));return r},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="",n(n.s=18)}([function(t,e,n){var r=n(24),o=n(25),i=String.fromCharCode(30);t.exports={protocol:4,encodePacket:r,encodePayload:function(t,e){var n=t.length,o=new Array(n),s=0;t.forEach((function(t,c){r(t,!1,(function(t){o[c]=t,++s===n&&e(o.join(i))}))}))},decodePacket:o,decodePayload:function(t,e){for(var n=t.split(i),r=[],s=0;s<n.length;s++){var c=o(n[s],e);if(r.push(c),"error"===c.type)break}return r}}},function(t,e,n){function r(t){if(t)return function(t){for(var e in r.prototype)t[e]=r.prototype[e];return t}(t)}t.exports=r,r.prototype.on=r.prototype.addEventListener=function(t,e){return this._callbacks=this._callbacks||{},(this._callbacks["$"+t]=this._callbacks["$"+t]||[]).push(e),this},r.prototype.once=function(t,e){function n(){this.off(t,n),e.apply(this,arguments)}return n.fn=e,this.on(t,n),this},r.prototype.off=r.prototype.removeListener=r.prototype.removeAllListeners=r.prototype.removeEventListener=function(t,e){if(this._callbacks=this._callbacks||{},0==arguments.length)return this._callbacks={},this;var n,r=this._callbacks["$"+t];if(!r)return this;if(1==arguments.length)return delete this._callbacks["$"+t],this;for(var o=0;o<r.length;o++)if((n=r[o])===e||n.fn===e){r.splice(o,1);break}return 0===r.length&&delete this._callbacks["$"+t],this},r.prototype.emit=function(t){this._callbacks=this._callbacks||{};for(var e=new Array(arguments.length-1),n=this._callbacks["$"+t],r=1;r<arguments.length;r++)e[r-1]=arguments[r];if(n){r=0;for(var o=(n=n.slice(0)).length;r<o;++r)n[r].apply(this,e)}return this},r.prototype.listeners=function(t){return this._callbacks=this._callbacks||{},this._callbacks["$"+t]||[]},r.prototype.hasListeners=function(t){return!!this.listeners(t).length}},function(t,e){t.exports="undefined"!=typeof self?self:"undefined"!=typeof window?window:Function("return this")()},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function i(t,e){return(i=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function s(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=a(t);if(e){var o=a(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return c(this,n)}}function c(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function a(t){return(a=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}var u=n(0),f=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&i(t,e)}(a,t);var e,n,r,c=s(a);function a(t){var e;return function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}(this,a),(e=c.call(this)).opts=t,e.query=t.query,e.readyState="",e.socket=t.socket,e}return e=a,(n=[{key:"onError",value:function(t,e){var n=new Error(t);return n.type="TransportError",n.description=e,this.emit("error",n),this}},{key:"open",value:function(){return"closed"!==this.readyState&&""!==this.readyState||(this.readyState="opening",this.doOpen()),this}},{key:"close",value:function(){return"opening"!==this.readyState&&"open"!==this.readyState||(this.doClose(),this.onClose()),this}},{key:"send",value:function(t){"open"===this.readyState&&this.write(t)}},{key:"onOpen",value:function(){this.readyState="open",this.writable=!0,this.emit("open")}},{key:"onData",value:function(t){var e=u.decodePacket(t,this.socket.binaryType);this.onPacket(e)}},{key:"onPacket",value:function(t){this.emit("packet",t)}},{key:"onClose",value:function(){this.readyState="closed",this.emit("close")}}])&&o(e.prototype,n),r&&o(e,r),a}(n(1));t.exports=f},function(t,e){e.encode=function(t){var e="";for(var n in t)t.hasOwnProperty(n)&&(e.length&&(e+="&"),e+=encodeURIComponent(n)+"="+encodeURIComponent(t[n]));return e},e.decode=function(t){for(var e={},n=t.split("&"),r=0,o=n.length;r<o;r++){var i=n[r].split("=");e[decodeURIComponent(i[0])]=decodeURIComponent(i[1])}return e}},function(t,e,n){"use strict";function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e,n){return(o="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(t,e,n){var r=function(t,e){for(;!Object.prototype.hasOwnProperty.call(t,e)&&null!==(t=a(t)););return t}(t,e);if(r){var o=Object.getOwnPropertyDescriptor(r,e);return o.get?o.get.call(n):o.value}})(t,e,n||t)}function i(t,e){return(i=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function s(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=a(t);if(e){var o=a(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return c(this,n)}}function c(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function a(t){return(a=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}function u(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function f(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function l(t,e,n){return e&&f(t.prototype,e),n&&f(t,n),t}Object.defineProperty(e,"__esModule",{value:!0}),e.Decoder=e.Encoder=e.PacketType=e.protocol=void 0;var p,h=n(1),y=n(30),d=n(15);e.protocol=5,function(t){t[t.CONNECT=0]="CONNECT",t[t.DISCONNECT=1]="DISCONNECT",t[t.EVENT=2]="EVENT",t[t.ACK=3]="ACK",t[t.CONNECT_ERROR=4]="CONNECT_ERROR",t[t.BINARY_EVENT=5]="BINARY_EVENT",t[t.BINARY_ACK=6]="BINARY_ACK"}(p=e.PacketType||(e.PacketType={}));var v=function(){function t(){u(this,t)}return l(t,[{key:"encode",value:function(t){return t.type!==p.EVENT&&t.type!==p.ACK||!d.hasBinary(t)?[this.encodeAsString(t)]:(t.type=t.type===p.EVENT?p.BINARY_EVENT:p.BINARY_ACK,this.encodeAsBinary(t))}},{key:"encodeAsString",value:function(t){var e=""+t.type;return t.type!==p.BINARY_EVENT&&t.type!==p.BINARY_ACK||(e+=t.attachments+"-"),t.nsp&&"/"!==t.nsp&&(e+=t.nsp+","),null!=t.id&&(e+=t.id),null!=t.data&&(e+=JSON.stringify(t.data)),e}},{key:"encodeAsBinary",value:function(t){var e=y.deconstructPacket(t),n=this.encodeAsString(e.packet),r=e.buffers;return r.unshift(n),r}}]),t}();e.Encoder=v;var b=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&i(t,e)}(n,t);var e=s(n);function n(){return u(this,n),e.call(this)}return l(n,[{key:"add",value:function(t){var e;if("string"==typeof t)(e=this.decodeString(t)).type===p.BINARY_EVENT||e.type===p.BINARY_ACK?(this.reconstructor=new m(e),0===e.attachments&&o(a(n.prototype),"emit",this).call(this,"decoded",e)):o(a(n.prototype),"emit",this).call(this,"decoded",e);else{if(!d.isBinary(t)&&!t.base64)throw new Error("Unknown type: "+t);if(!this.reconstructor)throw new Error("got binary data when not reconstructing a packet");(e=this.reconstructor.takeBinaryData(t))&&(this.reconstructor=null,o(a(n.prototype),"emit",this).call(this,"decoded",e))}}},{key:"decodeString",value:function(t){var e=0,r={type:Number(t.charAt(0))};if(void 0===p[r.type])throw new Error("unknown packet type "+r.type);if(r.type===p.BINARY_EVENT||r.type===p.BINARY_ACK){for(var o=e+1;"-"!==t.charAt(++e)&&e!=t.length;);var i=t.substring(o,e);if(i!=Number(i)||"-"!==t.charAt(e))throw new Error("Illegal attachments");r.attachments=Number(i)}if("/"===t.charAt(e+1)){for(var s=e+1;++e;){if(","===t.charAt(e))break;if(e===t.length)break}r.nsp=t.substring(s,e)}else r.nsp="/";var c=t.charAt(e+1);if(""!==c&&Number(c)==c){for(var a=e+1;++e;){var u=t.charAt(e);if(null==u||Number(u)!=u){--e;break}if(e===t.length)break}r.id=Number(t.substring(a,e+1))}if(t.charAt(++e)){var f=function(t){try{return JSON.parse(t)}catch(t){return!1}}(t.substr(e));if(!n.isPayloadValid(r.type,f))throw new Error("invalid payload");r.data=f}return r}},{key:"destroy",value:function(){this.reconstructor&&this.reconstructor.finishedReconstruction()}}],[{key:"isPayloadValid",value:function(t,e){switch(t){case p.CONNECT:return"object"===r(e);case p.DISCONNECT:return void 0===e;case p.CONNECT_ERROR:return"string"==typeof e||"object"===r(e);case p.EVENT:case p.BINARY_EVENT:return Array.isArray(e)&&e.length>0;case p.ACK:case p.BINARY_ACK:return Array.isArray(e)}}}]),n}(h);e.Decoder=b;var m=function(){function t(e){u(this,t),this.packet=e,this.buffers=[],this.reconPack=e}return l(t,[{key:"takeBinaryData",value:function(t){if(this.buffers.push(t),this.buffers.length===this.reconPack.attachments){var e=y.reconstructPacket(this.reconPack,this.buffers);return this.finishedReconstruction(),e}return null}},{key:"finishedReconstruction",value:function(){this.reconPack=null,this.buffers=[]}}]),t}()},function(t,e){var n=/^(?:(?![^:@]+:[^:@\/]*@)(http|https|ws|wss):\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?((?:[a-f0-9]{0,4}:){2,7}[a-f0-9]{0,4}|[^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/,r=["source","protocol","authority","userInfo","user","password","host","port","relative","path","directory","file","query","anchor"];t.exports=function(t){var e=t,o=t.indexOf("["),i=t.indexOf("]");-1!=o&&-1!=i&&(t=t.substring(0,o)+t.substring(o,i).replace(/:/g,";")+t.substring(i,t.length));for(var s,c,a=n.exec(t||""),u={},f=14;f--;)u[r[f]]=a[f]||"";return-1!=o&&-1!=i&&(u.source=e,u.host=u.host.substring(1,u.host.length-1).replace(/;/g,":"),u.authority=u.authority.replace("[","").replace("]","").replace(/;/g,":"),u.ipv6uri=!0),u.pathNames=function(t,e){var n=e.replace(/\/{2,9}/g,"/").split("/");"/"!=e.substr(0,1)&&0!==e.length||n.splice(0,1);"/"==e.substr(e.length-1,1)&&n.splice(n.length-1,1);return n}(0,u.path),u.queryKey=(s=u.query,c={},s.replace(/(?:^|&)([^&=]*)=?([^&]*)/g,(function(t,e,n){e&&(c[e]=n)})),c),u}},function(t,e,n){"use strict";function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function i(t,e){return(i=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function s(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=a(t);if(e){var o=a(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return c(this,n)}}function c(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function a(t){return(a=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}Object.defineProperty(e,"__esModule",{value:!0}),e.Manager=void 0;var u=n(20),f=n(14),l=n(5),p=n(16),h=n(31),y=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&i(t,e)}(y,t);var e,n,c,a=s(y);function y(t,e){var n;!function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}(this,y),(n=a.call(this)).nsps={},n.subs=[],t&&"object"===r(t)&&(e=t,t=void 0),(e=e||{}).path=e.path||"/socket.io",n.opts=e,n.reconnection(!1!==e.reconnection),n.reconnectionAttempts(e.reconnectionAttempts||1/0),n.reconnectionDelay(e.reconnectionDelay||1e3),n.reconnectionDelayMax(e.reconnectionDelayMax||5e3),n.randomizationFactor(e.randomizationFactor||.5),n.backoff=new h({min:n.reconnectionDelay(),max:n.reconnectionDelayMax(),jitter:n.randomizationFactor()}),n.timeout(null==e.timeout?2e4:e.timeout),n._readyState="closed",n.uri=t;var o=e.parser||l;return n.encoder=new o.Encoder,n.decoder=new o.Decoder,n._autoConnect=!1!==e.autoConnect,n._autoConnect&&n.open(),n}return e=y,(n=[{key:"reconnection",value:function(t){return arguments.length?(this._reconnection=!!t,this):this._reconnection}},{key:"reconnectionAttempts",value:function(t){return void 0===t?this._reconnectionAttempts:(this._reconnectionAttempts=t,this)}},{key:"reconnectionDelay",value:function(t){var e;return void 0===t?this._reconnectionDelay:(this._reconnectionDelay=t,null===(e=this.backoff)||void 0===e||e.setMin(t),this)}},{key:"randomizationFactor",value:function(t){var e;return void 0===t?this._randomizationFactor:(this._randomizationFactor=t,null===(e=this.backoff)||void 0===e||e.setJitter(t),this)}},{key:"reconnectionDelayMax",value:function(t){var e;return void 0===t?this._reconnectionDelayMax:(this._reconnectionDelayMax=t,null===(e=this.backoff)||void 0===e||e.setMax(t),this)}},{key:"timeout",value:function(t){return arguments.length?(this._timeout=t,this):this._timeout}},{key:"maybeReconnectOnOpen",value:function(){!this._reconnecting&&this._reconnection&&0===this.backoff.attempts&&this.reconnect()}},{key:"open",value:function(t){var e=this;if(~this._readyState.indexOf("open"))return this;this.engine=u(this.uri,this.opts);var n=this.engine,r=this;this._readyState="opening",this.skipReconnect=!1;var o=p.on(n,"open",(function(){r.onopen(),t&&t()})),i=p.on(n,"error",(function(n){r.cleanup(),r._readyState="closed",e.emitReserved("error",n),t?t(n):r.maybeReconnectOnOpen()}));if(!1!==this._timeout){var s=this._timeout;0===s&&o();var c=setTimeout((function(){o(),n.close(),n.emit("error",new Error("timeout"))}),s);this.opts.autoUnref&&c.unref(),this.subs.push((function(){clearTimeout(c)}))}return this.subs.push(o),this.subs.push(i),this}},{key:"connect",value:function(t){return this.open(t)}},{key:"onopen",value:function(){this.cleanup(),this._readyState="open",this.emitReserved("open");var t=this.engine;this.subs.push(p.on(t,"ping",this.onping.bind(this)),p.on(t,"data",this.ondata.bind(this)),p.on(t,"error",this.onerror.bind(this)),p.on(t,"close",this.onclose.bind(this)),p.on(this.decoder,"decoded",this.ondecoded.bind(this)))}},{key:"onping",value:function(){this.emitReserved("ping")}},{key:"ondata",value:function(t){this.decoder.add(t)}},{key:"ondecoded",value:function(t){this.emitReserved("packet",t)}},{key:"onerror",value:function(t){this.emitReserved("error",t)}},{key:"socket",value:function(t,e){var n=this.nsps[t];return n||(n=new f.Socket(this,t,e),this.nsps[t]=n),n}},{key:"_destroy",value:function(t){for(var e=0,n=Object.keys(this.nsps);e<n.length;e++){var r=n[e];if(this.nsps[r].active)return}this._close()}},{key:"_packet",value:function(t){for(var e=this.encoder.encode(t),n=0;n<e.length;n++)this.engine.write(e[n],t.options)}},{key:"cleanup",value:function(){this.subs.forEach((function(t){return t()})),this.subs.length=0,this.decoder.destroy()}},{key:"_close",value:function(){this.skipReconnect=!0,this._reconnecting=!1,"opening"===this._readyState&&this.cleanup(),this.backoff.reset(),this._readyState="closed",this.engine&&this.engine.close()}},{key:"disconnect",value:function(){return this._close()}},{key:"onclose",value:function(t){this.cleanup(),this.backoff.reset(),this._readyState="closed",this.emitReserved("close",t),this._reconnection&&!this.skipReconnect&&this.reconnect()}},{key:"reconnect",value:function(){var t=this;if(this._reconnecting||this.skipReconnect)return this;var e=this;if(this.backoff.attempts>=this._reconnectionAttempts)this.backoff.reset(),this.emitReserved("reconnect_failed"),this._reconnecting=!1;else{var n=this.backoff.duration();this._reconnecting=!0;var r=setTimeout((function(){e.skipReconnect||(t.emitReserved("reconnect_attempt",e.backoff.attempts),e.skipReconnect||e.open((function(n){n?(e._reconnecting=!1,e.reconnect(),t.emitReserved("reconnect_error",n)):e.onreconnect()})))}),n);this.opts.autoUnref&&r.unref(),this.subs.push((function(){clearTimeout(r)}))}}},{key:"onreconnect",value:function(){var t=this.backoff.attempts;this._reconnecting=!1,this.backoff.reset(),this.emitReserved("reconnect",t)}}])&&o(e.prototype,n),c&&o(e,c),y}(n(17).StrictEventEmitter);e.Manager=y},function(t,e,n){var r=n(9),o=n(23),i=n(27),s=n(28);e.polling=function(t){var e=!1,n=!1,s=!1!==t.jsonp;if("undefined"!=typeof location){var c="https:"===location.protocol,a=location.port;a||(a=c?443:80),e=t.hostname!==location.hostname||a!==t.port,n=t.secure!==c}if(t.xdomain=e,t.xscheme=n,"open"in new r(t)&&!t.forceJSONP)return new o(t);if(!s)throw new Error("JSONP disabled");return new i(t)},e.websocket=s},function(t,e,n){var r=n(22),o=n(2);t.exports=function(t){var e=t.xdomain,n=t.xscheme,i=t.enablesXDR;try{if("undefined"!=typeof XMLHttpRequest&&(!e||r))return new XMLHttpRequest}catch(t){}try{if("undefined"!=typeof XDomainRequest&&!n&&i)return new XDomainRequest}catch(t){}if(!e)try{return new(o[["Active"].concat("Object").join("X")])("Microsoft.XMLHTTP")}catch(t){}}},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function i(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function s(t,e){return(s=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function c(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=u(t);if(e){var o=u(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return a(this,n)}}function a(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function u(t){return(u=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}var f=n(3),l=n(4),p=n(0),h=n(12),y=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&s(t,e)}(u,t);var e,n,r,a=c(u);function u(){return o(this,u),a.apply(this,arguments)}return e=u,(n=[{key:"doOpen",value:function(){this.poll()}},{key:"pause",value:function(t){var e=this;this.readyState="pausing";var n=function(){e.readyState="paused",t()};if(this.polling||!this.writable){var r=0;this.polling&&(r++,this.once("pollComplete",(function(){--r||n()}))),this.writable||(r++,this.once("drain",(function(){--r||n()})))}else n()}},{key:"poll",value:function(){this.polling=!0,this.doPoll(),this.emit("poll")}},{key:"onData",value:function(t){var e=this;p.decodePayload(t,this.socket.binaryType).forEach((function(t){if("opening"===e.readyState&&"open"===t.type&&e.onOpen(),"close"===t.type)return e.onClose(),!1;e.onPacket(t)})),"closed"!==this.readyState&&(this.polling=!1,this.emit("pollComplete"),"open"===this.readyState&&this.poll())}},{key:"doClose",value:function(){var t=this,e=function(){t.write([{type:"close"}])};"open"===this.readyState?e():this.once("open",e)}},{key:"write",value:function(t){var e=this;this.writable=!1,p.encodePayload(t,(function(t){e.doWrite(t,(function(){e.writable=!0,e.emit("drain")}))}))}},{key:"uri",value:function(){var t=this.query||{},e=this.opts.secure?"https":"http",n="";return!1!==this.opts.timestampRequests&&(t[this.opts.timestampParam]=h()),this.supportsBinary||t.sid||(t.b64=1),t=l.encode(t),this.opts.port&&("https"===e&&443!==Number(this.opts.port)||"http"===e&&80!==Number(this.opts.port))&&(n=":"+this.opts.port),t.length&&(t="?"+t),e+"://"+(-1!==this.opts.hostname.indexOf(":")?"["+this.opts.hostname+"]":this.opts.hostname)+n+this.opts.path+t}},{key:"name",get:function(){return"polling"}}])&&i(e.prototype,n),r&&i(e,r),u}(f);t.exports=y},function(t,e){var n=Object.create(null);n.open="0",n.close="1",n.ping="2",n.pong="3",n.message="4",n.upgrade="5",n.noop="6";var r=Object.create(null);Object.keys(n).forEach((function(t){r[n[t]]=t}));t.exports={PACKET_TYPES:n,PACKET_TYPES_REVERSE:r,ERROR_PACKET:{type:"error",data:"parser error"}}},function(t,e,n){"use strict";var r,o="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_".split(""),i={},s=0,c=0;function a(t){var e="";do{e=o[t%64]+e,t=Math.floor(t/64)}while(t>0);return e}function u(){var t=a(+new Date);return t!==r?(s=0,r=t):t+"."+a(s++)}for(;c<64;c++)i[o[c]]=c;u.encode=a,u.decode=function(t){var e=0;for(c=0;c<t.length;c++)e=64*e+i[t.charAt(c)];return e},t.exports=u},function(t,e){t.exports.pick=function(t){for(var e=arguments.length,n=new Array(e>1?e-1:0),r=1;r<e;r++)n[r-1]=arguments[r];return n.reduce((function(e,n){return t.hasOwnProperty(n)&&(e[n]=t[n]),e}),{})}},function(t,e,n){"use strict";function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){var n;if("undefined"==typeof Symbol||null==t[Symbol.iterator]){if(Array.isArray(t)||(n=function(t,e){if(!t)return;if("string"==typeof t)return i(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);"Object"===n&&t.constructor&&(n=t.constructor.name);if("Map"===n||"Set"===n)return Array.from(t);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return i(t,e)}(t))||e&&t&&"number"==typeof t.length){n&&(t=n);var r=0,o=function(){};return{s:o,n:function(){return r>=t.length?{done:!0}:{done:!1,value:t[r++]}},e:function(t){throw t},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var s,c=!0,a=!1;return{s:function(){n=t[Symbol.iterator]()},n:function(){var t=n.next();return c=t.done,t},e:function(t){a=!0,s=t},f:function(){try{c||null==n.return||n.return()}finally{if(a)throw s}}}}function i(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}function s(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function c(t,e,n){return(c="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(t,e,n){var r=function(t,e){for(;!Object.prototype.hasOwnProperty.call(t,e)&&null!==(t=l(t)););return t}(t,e);if(r){var o=Object.getOwnPropertyDescriptor(r,e);return o.get?o.get.call(n):o.value}})(t,e,n||t)}function a(t,e){return(a=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function u(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=l(t);if(e){var o=l(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return f(this,n)}}function f(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function l(t){return(l=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}Object.defineProperty(e,"__esModule",{value:!0}),e.Socket=void 0;var p=n(5),h=n(16),y=n(17),d=Object.freeze({connect:1,connect_error:1,disconnect:1,disconnecting:1,newListener:1,removeListener:1}),v=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&a(t,e)}(f,t);var e,n,r,i=u(f);function f(t,e,n){var r;return function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}(this,f),(r=i.call(this)).receiveBuffer=[],r.sendBuffer=[],r.ids=0,r.acks={},r.flags={},r.io=t,r.nsp=e,r.ids=0,r.acks={},r.receiveBuffer=[],r.sendBuffer=[],r.connected=!1,r.disconnected=!0,r.flags={},n&&n.auth&&(r.auth=n.auth),r.io._autoConnect&&r.open(),r}return e=f,(n=[{key:"subEvents",value:function(){if(!this.subs){var t=this.io;this.subs=[h.on(t,"open",this.onopen.bind(this)),h.on(t,"packet",this.onpacket.bind(this)),h.on(t,"error",this.onerror.bind(this)),h.on(t,"close",this.onclose.bind(this))]}}},{key:"connect",value:function(){return this.connected||(this.subEvents(),this.io._reconnecting||this.io.open(),"open"===this.io._readyState&&this.onopen()),this}},{key:"open",value:function(){return this.connect()}},{key:"send",value:function(){for(var t=arguments.length,e=new Array(t),n=0;n<t;n++)e[n]=arguments[n];return e.unshift("message"),this.emit.apply(this,e),this}},{key:"emit",value:function(t){if(d.hasOwnProperty(t))throw new Error('"'+t+'" is a reserved event name');for(var e=arguments.length,n=new Array(e>1?e-1:0),r=1;r<e;r++)n[r-1]=arguments[r];n.unshift(t);var o={type:p.PacketType.EVENT,data:n,options:{}};o.options.compress=!1!==this.flags.compress,"function"==typeof n[n.length-1]&&(this.acks[this.ids]=n.pop(),o.id=this.ids++);var i=this.io.engine&&this.io.engine.transport&&this.io.engine.transport.writable,s=this.flags.volatile&&(!i||!this.connected);return s||(this.connected?this.packet(o):this.sendBuffer.push(o)),this.flags={},this}},{key:"packet",value:function(t){t.nsp=this.nsp,this.io._packet(t)}},{key:"onopen",value:function(){var t=this;"function"==typeof this.auth?this.auth((function(e){t.packet({type:p.PacketType.CONNECT,data:e})})):this.packet({type:p.PacketType.CONNECT,data:this.auth})}},{key:"onerror",value:function(t){this.connected||this.emitReserved("connect_error",t)}},{key:"onclose",value:function(t){this.connected=!1,this.disconnected=!0,delete this.id,this.emitReserved("disconnect",t)}},{key:"onpacket",value:function(t){if(t.nsp===this.nsp)switch(t.type){case p.PacketType.CONNECT:if(t.data&&t.data.sid){var e=t.data.sid;this.onconnect(e)}else this.emitReserved("connect_error",new Error("It seems you are trying to reach a Socket.IO server in v2.x with a v3.x client, but they are not compatible (more information here: https://socket.io/docs/v3/migrating-from-2-x-to-3-0/)"));break;case p.PacketType.EVENT:case p.PacketType.BINARY_EVENT:this.onevent(t);break;case p.PacketType.ACK:case p.PacketType.BINARY_ACK:this.onack(t);break;case p.PacketType.DISCONNECT:this.ondisconnect();break;case p.PacketType.CONNECT_ERROR:var n=new Error(t.data.message);n.data=t.data.data,this.emitReserved("connect_error",n)}}},{key:"onevent",value:function(t){var e=t.data||[];null!=t.id&&e.push(this.ack(t.id)),this.connected?this.emitEvent(e):this.receiveBuffer.push(Object.freeze(e))}},{key:"emitEvent",value:function(t){if(this._anyListeners&&this._anyListeners.length){var e,n=o(this._anyListeners.slice());try{for(n.s();!(e=n.n()).done;)e.value.apply(this,t)}catch(t){n.e(t)}finally{n.f()}}c(l(f.prototype),"emit",this).apply(this,t)}},{key:"ack",value:function(t){var e=this,n=!1;return function(){if(!n){n=!0;for(var r=arguments.length,o=new Array(r),i=0;i<r;i++)o[i]=arguments[i];e.packet({type:p.PacketType.ACK,id:t,data:o})}}}},{key:"onack",value:function(t){var e=this.acks[t.id];"function"==typeof e&&(e.apply(this,t.data),delete this.acks[t.id])}},{key:"onconnect",value:function(t){this.id=t,this.connected=!0,this.disconnected=!1,this.emitBuffered(),this.emitReserved("connect")}},{key:"emitBuffered",value:function(){var t=this;this.receiveBuffer.forEach((function(e){return t.emitEvent(e)})),this.receiveBuffer=[],this.sendBuffer.forEach((function(e){return t.packet(e)})),this.sendBuffer=[]}},{key:"ondisconnect",value:function(){this.destroy(),this.onclose("io server disconnect")}},{key:"destroy",value:function(){this.subs&&(this.subs.forEach((function(t){return t()})),this.subs=void 0),this.io._destroy(this)}},{key:"disconnect",value:function(){return this.connected&&this.packet({type:p.PacketType.DISCONNECT}),this.destroy(),this.connected&&this.onclose("io client disconnect"),this}},{key:"close",value:function(){return this.disconnect()}},{key:"compress",value:function(t){return this.flags.compress=t,this}},{key:"onAny",value:function(t){return this._anyListeners=this._anyListeners||[],this._anyListeners.push(t),this}},{key:"prependAny",value:function(t){return this._anyListeners=this._anyListeners||[],this._anyListeners.unshift(t),this}},{key:"offAny",value:function(t){if(!this._anyListeners)return this;if(t){for(var e=this._anyListeners,n=0;n<e.length;n++)if(t===e[n])return e.splice(n,1),this}else this._anyListeners=[];return this}},{key:"listenersAny",value:function(){return this._anyListeners||[]}},{key:"active",get:function(){return!!this.subs}},{key:"volatile",get:function(){return this.flags.volatile=!0,this}}])&&s(e.prototype,n),r&&s(e,r),f}(y.StrictEventEmitter);e.Socket=v},function(t,e,n){"use strict";function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}Object.defineProperty(e,"__esModule",{value:!0}),e.hasBinary=e.isBinary=void 0;var o="function"==typeof ArrayBuffer,i=Object.prototype.toString,s="function"==typeof Blob||"undefined"!=typeof Blob&&"[object BlobConstructor]"===i.call(Blob),c="function"==typeof File||"undefined"!=typeof File&&"[object FileConstructor]"===i.call(File);function a(t){return o&&(t instanceof ArrayBuffer||function(t){return"function"==typeof ArrayBuffer.isView?ArrayBuffer.isView(t):t.buffer instanceof ArrayBuffer}(t))||s&&t instanceof Blob||c&&t instanceof File}e.isBinary=a,e.hasBinary=function t(e,n){if(!e||"object"!==r(e))return!1;if(Array.isArray(e)){for(var o=0,i=e.length;o<i;o++)if(t(e[o]))return!0;return!1}if(a(e))return!0;if(e.toJSON&&"function"==typeof e.toJSON&&1===arguments.length)return t(e.toJSON(),!0);for(var s in e)if(Object.prototype.hasOwnProperty.call(e,s)&&t(e[s]))return!0;return!1}},function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.on=void 0,e.on=function(t,e,n){return t.on(e,n),function(){t.off(e,n)}}},function(t,e,n){"use strict";function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function i(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function s(t,e,n){return(s="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(t,e,n){var r=function(t,e){for(;!Object.prototype.hasOwnProperty.call(t,e)&&null!==(t=f(t)););return t}(t,e);if(r){var o=Object.getOwnPropertyDescriptor(r,e);return o.get?o.get.call(n):o.value}})(t,e,n||t)}function c(t,e){return(c=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function a(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=f(t);if(e){var o=f(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return u(this,n)}}function u(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function f(t){return(f=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}Object.defineProperty(e,"__esModule",{value:!0}),e.StrictEventEmitter=void 0;var l=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&c(t,e)}(l,t);var e,n,r,u=a(l);function l(){return o(this,l),u.apply(this,arguments)}return e=l,(n=[{key:"on",value:function(t,e){return s(f(l.prototype),"on",this).call(this,t,e),this}},{key:"once",value:function(t,e){return s(f(l.prototype),"once",this).call(this,t,e),this}},{key:"emit",value:function(t){for(var e,n=arguments.length,r=new Array(n>1?n-1:0),o=1;o<n;o++)r[o-1]=arguments[o];return(e=s(f(l.prototype),"emit",this)).call.apply(e,[this,t].concat(r)),this}},{key:"emitReserved",value:function(t){for(var e,n=arguments.length,r=new Array(n>1?n-1:0),o=1;o<n;o++)r[o-1]=arguments[o];return(e=s(f(l.prototype),"emit",this)).call.apply(e,[this,t].concat(r)),this}},{key:"listeners",value:function(t){return s(f(l.prototype),"listeners",this).call(this,t)}}])&&i(e.prototype,n),r&&i(e,r),l}(n(1));e.StrictEventEmitter=l},function(t,e,n){"use strict";function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}Object.defineProperty(e,"__esModule",{value:!0}),e.io=e.Socket=e.Manager=e.protocol=void 0;var o=n(19),i=n(7);t.exports=e=c;var s=e.managers={};function c(t,e){"object"===r(t)&&(e=t,t=void 0),e=e||{};var n,c=o.url(t,e.path||"/socket.io"),a=c.source,u=c.id,f=c.path,l=s[u]&&f in s[u].nsps;return e.forceNew||e["force new connection"]||!1===e.multiplex||l?n=new i.Manager(a,e):(s[u]||(s[u]=new i.Manager(a,e)),n=s[u]),c.query&&!e.query&&(e.query=c.queryKey),n.socket(c.path,e)}e.io=c;var a=n(5);Object.defineProperty(e,"protocol",{enumerable:!0,get:function(){return a.protocol}}),e.connect=c;var u=n(7);Object.defineProperty(e,"Manager",{enumerable:!0,get:function(){return u.Manager}});var f=n(14);Object.defineProperty(e,"Socket",{enumerable:!0,get:function(){return f.Socket}}),e.default=c},function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.url=void 0;var r=n(6);e.url=function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",n=arguments.length>2?arguments[2]:void 0,o=t;n=n||"undefined"!=typeof location&&location,null==t&&(t=n.protocol+"//"+n.host),"string"==typeof t&&("/"===t.charAt(0)&&(t="/"===t.charAt(1)?n.protocol+t:n.host+t),/^(https?|wss?):\/\//.test(t)||(t=void 0!==n?n.protocol+"//"+t:"https://"+t),o=r(t)),o.port||(/^(http|ws)$/.test(o.protocol)?o.port="80":/^(http|ws)s$/.test(o.protocol)&&(o.port="443")),o.path=o.path||"/";var i=-1!==o.host.indexOf(":"),s=i?"["+o.host+"]":o.host;return o.id=o.protocol+"://"+s+":"+o.port+e,o.href=o.protocol+"://"+s+(n&&n.port===o.port?"":":"+o.port),o}},function(t,e,n){var r=n(21);t.exports=function(t,e){return new r(t,e)},t.exports.Socket=r,t.exports.protocol=r.protocol,t.exports.Transport=n(3),t.exports.transports=n(8),t.exports.parser=n(0)},function(t,e,n){function r(){return(r=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(t[r]=n[r])}return t}).apply(this,arguments)}function o(t){return(o="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function s(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function c(t,e){return(c=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function a(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=f(t);if(e){var o=f(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return u(this,n)}}function u(t,e){return!e||"object"!==o(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function f(t){return(f=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}var l=n(8),p=n(1),h=n(0),y=n(6),d=n(4),v=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&c(t,e)}(p,t);var e,n,u,f=a(p);function p(t){var e,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return i(this,p),e=f.call(this),t&&"object"===o(t)&&(n=t,t=null),t?(t=y(t),n.hostname=t.host,n.secure="https"===t.protocol||"wss"===t.protocol,n.port=t.port,t.query&&(n.query=t.query)):n.host&&(n.hostname=y(n.host).host),e.secure=null!=n.secure?n.secure:"undefined"!=typeof location&&"https:"===location.protocol,n.hostname&&!n.port&&(n.port=e.secure?"443":"80"),e.hostname=n.hostname||("undefined"!=typeof location?location.hostname:"localhost"),e.port=n.port||("undefined"!=typeof location&&location.port?location.port:e.secure?443:80),e.transports=n.transports||["polling","websocket"],e.readyState="",e.writeBuffer=[],e.prevBufferLen=0,e.opts=r({path:"/engine.io",agent:!1,withCredentials:!1,upgrade:!0,jsonp:!0,timestampParam:"t",rememberUpgrade:!1,rejectUnauthorized:!0,perMessageDeflate:{threshold:1024},transportOptions:{},closeOnBeforeunload:!0},n),e.opts.path=e.opts.path.replace(/\/$/,"")+"/","string"==typeof e.opts.query&&(e.opts.query=d.decode(e.opts.query)),e.id=null,e.upgrades=null,e.pingInterval=null,e.pingTimeout=null,e.pingTimeoutTimer=null,"function"==typeof addEventListener&&(e.opts.closeOnBeforeunload&&addEventListener("beforeunload",(function(){e.transport&&(e.transport.removeAllListeners(),e.transport.close())}),!1),"localhost"!==e.hostname&&(e.offlineEventListener=function(){e.onClose("transport close")},addEventListener("offline",e.offlineEventListener,!1))),e.open(),e}return e=p,(n=[{key:"createTransport",value:function(t){var e=function(t){var e={};for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n]);return e}(this.opts.query);e.EIO=h.protocol,e.transport=t,this.id&&(e.sid=this.id);var n=r({},this.opts.transportOptions[t],this.opts,{query:e,socket:this,hostname:this.hostname,secure:this.secure,port:this.port});return new l[t](n)}},{key:"open",value:function(){var t,e=this;if(this.opts.rememberUpgrade&&p.priorWebsocketSuccess&&-1!==this.transports.indexOf("websocket"))t="websocket";else{if(0===this.transports.length)return void setTimeout((function(){e.emit("error","No transports available")}),0);t=this.transports[0]}this.readyState="opening";try{t=this.createTransport(t)}catch(t){return this.transports.shift(),void this.open()}t.open(),this.setTransport(t)}},{key:"setTransport",value:function(t){var e=this;this.transport&&this.transport.removeAllListeners(),this.transport=t,t.on("drain",this.onDrain.bind(this)).on("packet",this.onPacket.bind(this)).on("error",this.onError.bind(this)).on("close",(function(){e.onClose("transport close")}))}},{key:"probe",value:function(t){var e=this,n=this.createTransport(t,{probe:1}),r=!1;p.priorWebsocketSuccess=!1;var o=function(){r||(n.send([{type:"ping",data:"probe"}]),n.once("packet",(function(t){if(!r)if("pong"===t.type&&"probe"===t.data){if(e.upgrading=!0,e.emit("upgrading",n),!n)return;p.priorWebsocketSuccess="websocket"===n.name,e.transport.pause((function(){r||"closed"!==e.readyState&&(f(),e.setTransport(n),n.send([{type:"upgrade"}]),e.emit("upgrade",n),n=null,e.upgrading=!1,e.flush())}))}else{var o=new Error("probe error");o.transport=n.name,e.emit("upgradeError",o)}})))};function i(){r||(r=!0,f(),n.close(),n=null)}var s=function(t){var r=new Error("probe error: "+t);r.transport=n.name,i(),e.emit("upgradeError",r)};function c(){s("transport closed")}function a(){s("socket closed")}function u(t){n&&t.name!==n.name&&i()}var f=function(){n.removeListener("open",o),n.removeListener("error",s),n.removeListener("close",c),e.removeListener("close",a),e.removeListener("upgrading",u)};n.once("open",o),n.once("error",s),n.once("close",c),this.once("close",a),this.once("upgrading",u),n.open()}},{key:"onOpen",value:function(){if(this.readyState="open",p.priorWebsocketSuccess="websocket"===this.transport.name,this.emit("open"),this.flush(),"open"===this.readyState&&this.opts.upgrade&&this.transport.pause)for(var t=0,e=this.upgrades.length;t<e;t++)this.probe(this.upgrades[t])}},{key:"onPacket",value:function(t){if("opening"===this.readyState||"open"===this.readyState||"closing"===this.readyState)switch(this.emit("packet",t),this.emit("heartbeat"),t.type){case"open":this.onHandshake(JSON.parse(t.data));break;case"ping":this.resetPingTimeout(),this.sendPacket("pong"),this.emit("pong");break;case"error":var e=new Error("server error");e.code=t.data,this.onError(e);break;case"message":this.emit("data",t.data),this.emit("message",t.data)}}},{key:"onHandshake",value:function(t){this.emit("handshake",t),this.id=t.sid,this.transport.query.sid=t.sid,this.upgrades=this.filterUpgrades(t.upgrades),this.pingInterval=t.pingInterval,this.pingTimeout=t.pingTimeout,this.onOpen(),"closed"!==this.readyState&&this.resetPingTimeout()}},{key:"resetPingTimeout",value:function(){var t=this;clearTimeout(this.pingTimeoutTimer),this.pingTimeoutTimer=setTimeout((function(){t.onClose("ping timeout")}),this.pingInterval+this.pingTimeout),this.opts.autoUnref&&this.pingTimeoutTimer.unref()}},{key:"onDrain",value:function(){this.writeBuffer.splice(0,this.prevBufferLen),this.prevBufferLen=0,0===this.writeBuffer.length?this.emit("drain"):this.flush()}},{key:"flush",value:function(){"closed"!==this.readyState&&this.transport.writable&&!this.upgrading&&this.writeBuffer.length&&(this.transport.send(this.writeBuffer),this.prevBufferLen=this.writeBuffer.length,this.emit("flush"))}},{key:"write",value:function(t,e,n){return this.sendPacket("message",t,e,n),this}},{key:"send",value:function(t,e,n){return this.sendPacket("message",t,e,n),this}},{key:"sendPacket",value:function(t,e,n,r){if("function"==typeof e&&(r=e,e=void 0),"function"==typeof n&&(r=n,n=null),"closing"!==this.readyState&&"closed"!==this.readyState){(n=n||{}).compress=!1!==n.compress;var o={type:t,data:e,options:n};this.emit("packetCreate",o),this.writeBuffer.push(o),r&&this.once("flush",r),this.flush()}}},{key:"close",value:function(){var t=this,e=function(){t.onClose("forced close"),t.transport.close()},n=function n(){t.removeListener("upgrade",n),t.removeListener("upgradeError",n),e()},r=function(){t.once("upgrade",n),t.once("upgradeError",n)};return"opening"!==this.readyState&&"open"!==this.readyState||(this.readyState="closing",this.writeBuffer.length?this.once("drain",(function(){t.upgrading?r():e()})):this.upgrading?r():e()),this}},{key:"onError",value:function(t){p.priorWebsocketSuccess=!1,this.emit("error",t),this.onClose("transport error",t)}},{key:"onClose",value:function(t,e){"opening"!==this.readyState&&"open"!==this.readyState&&"closing"!==this.readyState||(clearTimeout(this.pingIntervalTimer),clearTimeout(this.pingTimeoutTimer),this.transport.removeAllListeners("close"),this.transport.close(),this.transport.removeAllListeners(),"function"==typeof removeEventListener&&removeEventListener("offline",this.offlineEventListener,!1),this.readyState="closed",this.id=null,this.emit("close",t,e),this.writeBuffer=[],this.prevBufferLen=0)}},{key:"filterUpgrades",value:function(t){for(var e=[],n=0,r=t.length;n<r;n++)~this.transports.indexOf(t[n])&&e.push(t[n]);return e}}])&&s(e.prototype,n),u&&s(e,u),p}(p);v.priorWebsocketSuccess=!1,v.protocol=h.protocol,t.exports=v},function(t,e){try{t.exports="undefined"!=typeof XMLHttpRequest&&"withCredentials"in new XMLHttpRequest}catch(e){t.exports=!1}},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(){return(o=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(t[r]=n[r])}return t}).apply(this,arguments)}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function s(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function c(t,e,n){return e&&s(t.prototype,e),n&&s(t,n),t}function a(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&u(t,e)}function u(t,e){return(u=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function f(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=p(t);if(e){var o=p(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return l(this,n)}}function l(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function p(t){return(p=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}var h=n(9),y=n(10),d=n(1),v=n(13).pick,b=n(2);function m(){}var g=null!=new h({xdomain:!1}).responseType,k=function(t){a(n,t);var e=f(n);function n(t){var r;if(i(this,n),r=e.call(this,t),"undefined"!=typeof location){var o="https:"===location.protocol,s=location.port;s||(s=o?443:80),r.xd="undefined"!=typeof location&&t.hostname!==location.hostname||s!==t.port,r.xs=t.secure!==o}var c=t&&t.forceBase64;return r.supportsBinary=g&&!c,r}return c(n,[{key:"request",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return o(t,{xd:this.xd,xs:this.xs},this.opts),new w(this.uri(),t)}},{key:"doWrite",value:function(t,e){var n=this,r=this.request({method:"POST",data:t});r.on("success",e),r.on("error",(function(t){n.onError("xhr post error",t)}))}},{key:"doPoll",value:function(){var t=this,e=this.request();e.on("data",this.onData.bind(this)),e.on("error",(function(e){t.onError("xhr poll error",e)})),this.pollXhr=e}}]),n}(y),w=function(t){a(n,t);var e=f(n);function n(t,r){var o;return i(this,n),(o=e.call(this)).opts=r,o.method=r.method||"GET",o.uri=t,o.async=!1!==r.async,o.data=void 0!==r.data?r.data:null,o.create(),o}return c(n,[{key:"create",value:function(){var t=this,e=v(this.opts,"agent","enablesXDR","pfx","key","passphrase","cert","ca","ciphers","rejectUnauthorized","autoUnref");e.xdomain=!!this.opts.xd,e.xscheme=!!this.opts.xs;var r=this.xhr=new h(e);try{r.open(this.method,this.uri,this.async);try{if(this.opts.extraHeaders)for(var o in r.setDisableHeaderCheck&&r.setDisableHeaderCheck(!0),this.opts.extraHeaders)this.opts.extraHeaders.hasOwnProperty(o)&&r.setRequestHeader(o,this.opts.extraHeaders[o])}catch(t){}if("POST"===this.method)try{r.setRequestHeader("Content-type","text/plain;charset=UTF-8")}catch(t){}try{r.setRequestHeader("Accept","*/*")}catch(t){}"withCredentials"in r&&(r.withCredentials=this.opts.withCredentials),this.opts.requestTimeout&&(r.timeout=this.opts.requestTimeout),this.hasXDR()?(r.onload=function(){t.onLoad()},r.onerror=function(){t.onError(r.responseText)}):r.onreadystatechange=function(){4===r.readyState&&(200===r.status||1223===r.status?t.onLoad():setTimeout((function(){t.onError("number"==typeof r.status?r.status:0)}),0))},r.send(this.data)}catch(e){return void setTimeout((function(){t.onError(e)}),0)}"undefined"!=typeof document&&(this.index=n.requestsCount++,n.requests[this.index]=this)}},{key:"onSuccess",value:function(){this.emit("success"),this.cleanup()}},{key:"onData",value:function(t){this.emit("data",t),this.onSuccess()}},{key:"onError",value:function(t){this.emit("error",t),this.cleanup(!0)}},{key:"cleanup",value:function(t){if(void 0!==this.xhr&&null!==this.xhr){if(this.hasXDR()?this.xhr.onload=this.xhr.onerror=m:this.xhr.onreadystatechange=m,t)try{this.xhr.abort()}catch(t){}"undefined"!=typeof document&&delete n.requests[this.index],this.xhr=null}}},{key:"onLoad",value:function(){var t=this.xhr.responseText;null!==t&&this.onData(t)}},{key:"hasXDR",value:function(){return"undefined"!=typeof XDomainRequest&&!this.xs&&this.enablesXDR}},{key:"abort",value:function(){this.cleanup()}}]),n}(d);if(w.requestsCount=0,w.requests={},"undefined"!=typeof document)if("function"==typeof attachEvent)attachEvent("onunload",O);else if("function"==typeof addEventListener){addEventListener("onpagehide"in b?"pagehide":"unload",O,!1)}function O(){for(var t in w.requests)w.requests.hasOwnProperty(t)&&w.requests[t].abort()}t.exports=k,t.exports.Request=w},function(t,e,n){var r=n(11).PACKET_TYPES,o="function"==typeof Blob||"undefined"!=typeof Blob&&"[object BlobConstructor]"===Object.prototype.toString.call(Blob),i="function"==typeof ArrayBuffer,s=function(t,e){var n=new FileReader;return n.onload=function(){var t=n.result.split(",")[1];e("b"+t)},n.readAsDataURL(t)};t.exports=function(t,e,n){var c,a=t.type,u=t.data;return o&&u instanceof Blob?e?n(u):s(u,n):i&&(u instanceof ArrayBuffer||(c=u,"function"==typeof ArrayBuffer.isView?ArrayBuffer.isView(c):c&&c.buffer instanceof ArrayBuffer))?e?n(u instanceof ArrayBuffer?u:u.buffer):s(new Blob([u]),n):n(r[a]+(u||""))}},function(t,e,n){var r,o=n(11),i=o.PACKET_TYPES_REVERSE,s=o.ERROR_PACKET;"function"==typeof ArrayBuffer&&(r=n(26));var c=function(t,e){if(r){var n=r.decode(t);return a(n,e)}return{base64:!0,data:t}},a=function(t,e){switch(e){case"blob":return t instanceof ArrayBuffer?new Blob([t]):t;case"arraybuffer":default:return t}};t.exports=function(t,e){if("string"!=typeof t)return{type:"message",data:a(t,e)};var n=t.charAt(0);return"b"===n?{type:"message",data:c(t.substring(1),e)}:i[n]?t.length>1?{type:i[n],data:t.substring(1)}:{type:i[n]}:s}},function(t,e){!function(t){"use strict";e.encode=function(e){var n,r=new Uint8Array(e),o=r.length,i="";for(n=0;n<o;n+=3)i+=t[r[n]>>2],i+=t[(3&r[n])<<4|r[n+1]>>4],i+=t[(15&r[n+1])<<2|r[n+2]>>6],i+=t[63&r[n+2]];return o%3==2?i=i.substring(0,i.length-1)+"=":o%3==1&&(i=i.substring(0,i.length-2)+"=="),i},e.decode=function(e){var n,r,o,i,s,c=.75*e.length,a=e.length,u=0;"="===e[e.length-1]&&(c--,"="===e[e.length-2]&&c--);var f=new ArrayBuffer(c),l=new Uint8Array(f);for(n=0;n<a;n+=4)r=t.indexOf(e[n]),o=t.indexOf(e[n+1]),i=t.indexOf(e[n+2]),s=t.indexOf(e[n+3]),l[u++]=r<<2|o>>4,l[u++]=(15&o)<<4|i>>2,l[u++]=(3&i)<<6|63&s;return f}}("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function i(t,e,n){return(i="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(t,e,n){var r=function(t,e){for(;!Object.prototype.hasOwnProperty.call(t,e)&&null!==(t=f(t)););return t}(t,e);if(r){var o=Object.getOwnPropertyDescriptor(r,e);return o.get?o.get.call(n):o.value}})(t,e,n||t)}function s(t,e){return(s=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function c(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=f(t);if(e){var o=f(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return a(this,n)}}function a(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?u(t):e}function u(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}function f(t){return(f=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}var l,p=n(10),h=n(2),y=/\n/g,d=/\\n/g,v=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&s(t,e)}(p,t);var e,n,r,a=c(p);function p(t){var e;return function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}(this,p),(e=a.call(this,t)).query=e.query||{},l||(l=h.___eio=h.___eio||[]),e.index=l.length,l.push(e.onData.bind(u(e))),e.query.j=e.index,e}return e=p,(n=[{key:"doClose",value:function(){this.script&&(this.script.onerror=function(){},this.script.parentNode.removeChild(this.script),this.script=null),this.form&&(this.form.parentNode.removeChild(this.form),this.form=null,this.iframe=null),i(f(p.prototype),"doClose",this).call(this)}},{key:"doPoll",value:function(){var t=this,e=document.createElement("script");this.script&&(this.script.parentNode.removeChild(this.script),this.script=null),e.async=!0,e.src=this.uri(),e.onerror=function(e){t.onError("jsonp poll error",e)};var n=document.getElementsByTagName("script")[0];n?n.parentNode.insertBefore(e,n):(document.head||document.body).appendChild(e),this.script=e,"undefined"!=typeof navigator&&/gecko/i.test(navigator.userAgent)&&setTimeout((function(){var t=document.createElement("iframe");document.body.appendChild(t),document.body.removeChild(t)}),100)}},{key:"doWrite",value:function(t,e){var n,r=this;if(!this.form){var o=document.createElement("form"),i=document.createElement("textarea"),s=this.iframeId="eio_iframe_"+this.index;o.className="socketio",o.style.position="absolute",o.style.top="-1000px",o.style.left="-1000px",o.target=s,o.method="POST",o.setAttribute("accept-charset","utf-8"),i.name="d",o.appendChild(i),document.body.appendChild(o),this.form=o,this.area=i}function c(){a(),e()}this.form.action=this.uri();var a=function(){if(r.iframe)try{r.form.removeChild(r.iframe)}catch(t){r.onError("jsonp polling iframe removal error",t)}try{var t='<iframe src="javascript:0" name="'+r.iframeId+'">';n=document.createElement(t)}catch(t){(n=document.createElement("iframe")).name=r.iframeId,n.src="javascript:0"}n.id=r.iframeId,r.form.appendChild(n),r.iframe=n};a(),t=t.replace(d,"\\\n"),this.area.value=t.replace(y,"\\n");try{this.form.submit()}catch(t){}this.iframe.attachEvent?this.iframe.onreadystatechange=function(){"complete"===r.iframe.readyState&&c()}:this.iframe.onload=c}},{key:"supportsBinary",get:function(){return!1}}])&&o(e.prototype,n),r&&o(e,r),p}(p);t.exports=v},function(t,e,n){function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function o(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function i(t,e){return(i=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function s(t){var e=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}();return function(){var n,r=a(t);if(e){var o=a(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return c(this,n)}}function c(t,e){return!e||"object"!==r(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function a(t){return(a=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}var u=n(3),f=n(0),l=n(4),p=n(12),h=n(13).pick,y=n(29),d=y.WebSocket,v=y.usingBrowserWebSocket,b=y.defaultBinaryType,m="undefined"!=typeof navigator&&"string"==typeof navigator.product&&"reactnative"===navigator.product.toLowerCase(),g=function(t){!function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&i(t,e)}(a,t);var e,n,r,c=s(a);function a(t){var e;return function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}(this,a),(e=c.call(this,t)).supportsBinary=!t.forceBase64,e}return e=a,(n=[{key:"doOpen",value:function(){if(this.check()){var t=this.uri(),e=this.opts.protocols,n=m?{}:h(this.opts,"agent","perMessageDeflate","pfx","key","passphrase","cert","ca","ciphers","rejectUnauthorized","localAddress","protocolVersion","origin","maxPayload","family","checkServerIdentity");this.opts.extraHeaders&&(n.headers=this.opts.extraHeaders);try{this.ws=v&&!m?e?new d(t,e):new d(t):new d(t,e,n)}catch(t){return this.emit("error",t)}this.ws.binaryType=this.socket.binaryType||b,this.addEventListeners()}}},{key:"addEventListeners",value:function(){var t=this;this.ws.onopen=function(){t.opts.autoUnref&&t.ws._socket.unref(),t.onOpen()},this.ws.onclose=this.onClose.bind(this),this.ws.onmessage=function(e){return t.onData(e.data)},this.ws.onerror=function(e){return t.onError("websocket error",e)}}},{key:"write",value:function(t){var e=this;this.writable=!1;for(var n=function(n){var r=t[n],o=n===t.length-1;f.encodePacket(r,e.supportsBinary,(function(t){var n={};v||(r.options&&(n.compress=r.options.compress),e.opts.perMessageDeflate&&("string"==typeof t?Buffer.byteLength(t):t.length)<e.opts.perMessageDeflate.threshold&&(n.compress=!1));try{v?e.ws.send(t):e.ws.send(t,n)}catch(t){}o&&setTimeout((function(){e.writable=!0,e.emit("drain")}),0)}))},r=0;r<t.length;r++)n(r)}},{key:"onClose",value:function(){u.prototype.onClose.call(this)}},{key:"doClose",value:function(){void 0!==this.ws&&(this.ws.close(),this.ws=null)}},{key:"uri",value:function(){var t=this.query||{},e=this.opts.secure?"wss":"ws",n="";return this.opts.port&&("wss"===e&&443!==Number(this.opts.port)||"ws"===e&&80!==Number(this.opts.port))&&(n=":"+this.opts.port),this.opts.timestampRequests&&(t[this.opts.timestampParam]=p()),this.supportsBinary||(t.b64=1),(t=l.encode(t)).length&&(t="?"+t),e+"://"+(-1!==this.opts.hostname.indexOf(":")?"["+this.opts.hostname+"]":this.opts.hostname)+n+this.opts.path+t}},{key:"check",value:function(){return!(!d||"__initialize"in d&&this.name===a.prototype.name)}},{key:"name",get:function(){return"websocket"}}])&&o(e.prototype,n),r&&o(e,r),a}(u);t.exports=g},function(t,e,n){var r=n(2);t.exports={WebSocket:r.WebSocket||r.MozWebSocket,usingBrowserWebSocket:!0,defaultBinaryType:"arraybuffer"}},function(t,e,n){"use strict";function r(t){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}Object.defineProperty(e,"__esModule",{value:!0}),e.reconstructPacket=e.deconstructPacket=void 0;var o=n(15);e.deconstructPacket=function(t){var e=[],n=t.data,i=t;return i.data=function t(e,n){if(!e)return e;if(o.isBinary(e)){var i={_placeholder:!0,num:n.length};return n.push(e),i}if(Array.isArray(e)){for(var s=new Array(e.length),c=0;c<e.length;c++)s[c]=t(e[c],n);return s}if("object"===r(e)&&!(e instanceof Date)){var a={};for(var u in e)e.hasOwnProperty(u)&&(a[u]=t(e[u],n));return a}return e}(n,e),i.attachments=e.length,{packet:i,buffers:e}},e.reconstructPacket=function(t,e){return t.data=function t(e,n){if(!e)return e;if(e&&e._placeholder)return n[e.num];if(Array.isArray(e))for(var o=0;o<e.length;o++)e[o]=t(e[o],n);else if("object"===r(e))for(var i in e)e.hasOwnProperty(i)&&(e[i]=t(e[i],n));return e}(t.data,e),t.attachments=void 0,t}},function(t,e){function n(t){t=t||{},this.ms=t.min||100,this.max=t.max||1e4,this.factor=t.factor||2,this.jitter=t.jitter>0&&t.jitter<=1?t.jitter:0,this.attempts=0}t.exports=n,n.prototype.duration=function(){var t=this.ms*Math.pow(this.factor,this.attempts++);if(this.jitter){var e=Math.random(),n=Math.floor(e*this.jitter*t);t=0==(1&Math.floor(10*e))?t-n:t+n}return 0|Math.min(t,this.max)},n.prototype.reset=function(){this.attempts=0},n.prototype.setMin=function(t){this.ms=t},n.prototype.setMax=function(t){this.max=t},n.prototype.setJitter=function(t){this.jitter=t}}])}));
//# sourceMappingURL=socket.io.min.js.map
|
PypiClean
|
/streamlit-code-editor-0.1.10.tar.gz/streamlit-code-editor-0.1.10/code_editor/frontend/build/f42a62d762c34d1ca7c418ea25726655.js
|
ace.define("ace/mode/verilog_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],(function(e,t,n){"use strict";var i=e("../lib/oop"),r=e("./text_highlight_rules").TextHighlightRules,o=function(){var e=this.createKeywordMapper({"support.function":"count|min|max|avg|sum|rank|now|coalesce|main",keyword:"always|and|assign|automatic|begin|buf|bufif0|bufif1|case|casex|casez|cell|cmos|config|deassign|default|defparam|design|disable|edge|else|end|endcase|endconfig|endfunction|endgenerate|endmodule|endprimitive|endspecify|endtable|endtask|event|for|force|forever|fork|function|generate|genvar|highz0|highz1|if|ifnone|incdir|include|initial|inout|input|instance|integer|join|large|liblist|library|localparam|macromodule|medium|module|nand|negedge|nmos|nor|noshowcancelled|not|notif0|notif1|or|output|parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|pulsestyle_onevent|pulsestyle_ondetect|rcmos|real|realtime|reg|release|repeat|rnmos|rpmos|rtran|rtranif0|rtranif1|scalared|showcancelled|signed|small|specify|specparam|strong0|strong1|supply0|supply1|table|task|time|tran|tranif0|tranif1|tri|tri0|tri1|triand|trior|trireg|unsigned|use|vectored|wait|wand|weak0|weak1|while|wire|wor|xnor|xorbegin|bufif0|bufif1|case|casex|casez|config|else|end|endcase|endconfig|endfunction|endgenerate|endmodule|endprimitive|endspecify|endtable|endtask|for|forever|function|generate|if|ifnone|macromodule|module|primitive|repeat|specify|table|task|while","constant.language":"true|false|null"},"identifier",!0);this.$rules={start:[{token:"comment",regex:"//.*$"},{token:"comment.start",regex:"/\\*",next:[{token:"comment.end",regex:"\\*/",next:"start"},{defaultToken:"comment"}]},{token:"string.start",regex:'"',next:[{token:"constant.language.escape",regex:/\\(?:[ntvfa\\"]|[0-7]{1,3}|\x[a-fA-F\d]{1,2}|)/,consumeLineEnd:!0},{token:"string.end",regex:'"|$',next:"start"},{defaultToken:"string"}]},{token:"string",regex:"'^[']'"},{token:"constant.numeric",regex:"[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"},{token:e,regex:"[a-zA-Z_$][a-zA-Z0-9_$]*\\b"},{token:"keyword.operator",regex:"\\+|\\-|\\/|\\/\\/|%|<@>|@>|<@|&|\\^|~|<|>|<=|=>|==|!=|<>|="},{token:"paren.lparen",regex:"[\\(]"},{token:"paren.rparen",regex:"[\\)]"},{token:"text",regex:"\\s+"}]},this.normalizeRules()};i.inherits(o,r),t.VerilogHighlightRules=o})),ace.define("ace/mode/verilog",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/verilog_highlight_rules","ace/range"],(function(e,t,n){"use strict";var i=e("../lib/oop"),r=e("./text").Mode,o=e("./verilog_highlight_rules").VerilogHighlightRules,a=(e("../range").Range,function(){this.HighlightRules=o,this.$behaviour=this.$defaultBehaviour});i.inherits(a,r),function(){this.lineCommentStart="//",this.blockComment={start:"/*",end:"*/"},this.$quotes={'"':'"'},this.$id="ace/mode/verilog"}.call(a.prototype),t.Mode=a})),ace.require(["ace/mode/verilog"],(function(e){"object"==typeof module&&"object"==typeof exports&&module&&(module.exports=e)}));
|
PypiClean
|
/lavue-2.83.4.tar.gz/lavue-2.83.4/doc/gui/configuration/hidrahttp.rst
|
Hidra/HTTP/ZMQ/ASAPO/NeXus Image Source Settings
================================================
The third tab of the configuration dialog contains **Image Sources** configuration.
It allows for the user to adjust image sources and their dialogs.
.. figure:: ../../_images/lavue_config_isources.png
**Hidra**
* **Detector servers:** possible hidra detector hosts separated by spaces
* **Data port:** hidra data port or ports separated by spaces (for the multi image source mode). The default port gap in the multi image source mode is 200. Each instance of lavue on the same host should use a different data port.
* **Use default detector servers:** add default detector server names from the DESY pool
**HTTP response**
* **URLs:** `JSON <https://www.json.org/json-en.html>`_ dictionary assigning labels to HTTP urls
**ZMQ Stream**
* **Servers and Ports:** `JSON <https://www.json.org/json-en.html>`_ dictionary assigning labels to server and port of ZMQ stream
* **Automatic datasources:** selects possible datasources from ZMQ stream metadata
* **Datasources:** defines possible datasources, i.e. ZMQ topics, separated by spaces
* **Colon conf. separator:** use ":" character to separate topic and HWM in the ZMQ configuration string i.e. host:port[:topic[:HWM]] ZMQ configuration syntax instead of host:port[/topic[/HWM]]
**ASAPO**
* **Server:** ASAPO servers, i.e. host:port
* **Token:** token to asapo servers
* **Beamtime:** a name of beamtime used by asapo
* **Datasources:** defines possible datasource names which are separated by spaces
* **Source Path:** a path to ASAPO files, e.g. /asap3/petra.gpfs/{beamline}/{year}/data/{beamtime_id}
**Nexus File**
* **Keep the file open:** does not close the nexus file after reading the field frame
* **Start from the last image:** displays only the last image frame
.. |br| raw:: html
<br>
|
PypiClean
|
/PyAmur-1.0.0.tar.gz/PyAmur-1.0.0/README.md
|
# PyAmur
PyAmur is an object-oriented Python interface to the Amur blockchain platform.
## Getting Started
You can install PyAmur using:
pip install pyamur
## Documentation
The library utilizes classes to represent various Amur data structures:
- pyamur.Address
- pyamur.Asset
- pyamur.AssetPair
- pyamur.Order
#### Code Example
```python
import pyamur as pw
myAddress = pw.Address(privateKey='CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S')
otherAddress = pw.Address('3PNTcNiUzppQXDL9RZrK3BcftbujiFqrAfM')
myAddress.sendAmur(otherAddress, 10000000)
myToken = myAddress.issueAsset('Token1', 'My Token', 1000, 0)
while not myToken.status():
pass
myAddress.sendAsset(otherAddress, myToken, 50)
```
### Address Class
__pyamur.Address(address, publicKey, privateKey, seed)__ _Creates a new Address object_
#### attributes:
- _address_
- _publicKey_
- _privateKey_
- _seed_
#### methods:
`balance(assetId='', confirmations=0)` returns balance of Amur or other assets
`assets()` returns a list of assets owned by the address
`issueAsset(name, description, quantity, decimals=0, reissuable=False, txFee=DEFAULT_ASSET_FEE, timestamp=0)` issue a new asset
`reissueAsset(Asset, quantity, reissuable=False, txFee=DEFAULT_ASSET_FEE, timestamp=0)` reissue an asset
`burnAsset(Asset, quantity, txFee=DEFAULT_ASSET_FEE, timestamp=0)` burn the specified quantity of an asset
`sendAmur(recipient, amount, attachment='', txFee=DEFAULT_TX_FEE, timestamp=0)` send specified amount of Amur to recipient
`massTransferAmur(transfers, attachment='', timestamp=0)` sending Amur tokens via a mass transfer
`sendAsset(recipient, asset, amount, attachment='', txFee=DEFAULT_TX_FEE, timestamp=0)` send specified amount of an asset to recipient
`massTransferAmur(self, transfers, attachment='', timestamp=0)` sending an asset via mass transfer
`cancelOrder(assetPair, order)` cancel an order
`buy(assetPair, amount price, maxLifetime=30*86400, matcherFee=DEFAULT_MATCHER_FEE, timestamp=0)` post a buy order
`tradableBalance(assetPair)` get tradable balance for the specified asset pair
`sell(assetPair, amount, price, maxLifetime=30*86400, matcherFee=DEFAULT_MATCHER_FEE, timestamp=0)` post a sell order
`lease(recipient, amount, txFee=DEFAULT_LEASE_FEE, timestamp=0)` post a lease transaction
`leaseCancel(leaseId, txFee=DEFAULT_LEASE_FEE, timestamp=0)` cancel a lease
`getOrderHistory(assetPair)` get order history for the specified asset pair
`cancelOpenOrders(assetPair)` cancel all open orders for the specified asset pair
`deleteOrderHistory(assetPair)` delete order history for the specified asset pair
`createAlias(alias, txFee=DEFAULT_ALIAS_FEE, timestamp=0)` create alias
`sponsorAsset(assetId, minimalFeeInAssets, txFee=pyamur.DEFAULT_SPONSOR_FEE, timestamp=0)` sponsoring assets
`setScript(script, txFee=pyamur.DEFAULT_SCRIPT_FEE, timestamp=0)` sets a script for this address
`dataTransaction(data, timestamp=0)` sets data for the account. data should be a json array with entries including type (bool, binary, int, string), key and value
### Asset Class
__pyamur.Asset(assetId)__ _Creates a new Asset object_
#### attributes:
- _status_
- _assetId_
- _issuer_
- _name_
- _description_
- _quantity_
- _decimals_ = 0
- _reissuable = False_
#### methods:
`status()` returns 'Issued' if the asset exists
### AssetPair Class
__pyamur.AssetPair(asset1, asset2)__ _Creates a new AssetPair object with 2 Asset objects_
#### attributes:
- _asset1_
- _asset2_
#### methods:
`orderbook()` get order book
`ticker()` get ticker with 24h ohlcv data
`last()` get traded price
`open()` get 24h open price
`high()` get 24h high price
`low()` get 24h low price
`close()` get 24h close price (same as last())
`vwap()` get 24h vwap price
`volume()` get 24h volume
`priceVolume()` get 24h price volume
`trades(n)` get the last n trades
`trades(from, to)` get the trades in from/to interval
`candles(timeframe, n)` get the last n candles in the specified timeframe
`candles(timeframe, from, to)` get the candles in from/to interval in the specified timeframe
### Order Class
__pyamur.Order(orderId, assetPair, address='')__ Creates a new Order object
#### attributes:
- _status_
- _orderId_
- _assetPair_
- _address_
- _matcher_
- _matcherPublicKey_
#### methods:
`status()` returns current order status
`cancel()` cancel the order
## Other functions
`pyamur.setNode(node, chain, chain_id)` set node URL ('http://ip-address:port') and chain (either 'mainnet' or 'testnet', or any other chain, if you also define the chain id)
`pyamur.setChain(chain, chain_id)` set chain (either 'mainnet' or 'testnet', or any other chain if you also supply the chain id)
`pyamur.setOffline()` switch to offline mode; sign tx locally without broadcasting to network
`pyamur.setOnline()` switch to online mode; sign tx locally a broadcast to network
`pyamur.validateAddress(address)` checks if the provided address is a valid Amur address
`pyamur.setMatcher(node)` set matcher URL ('http://ip-address:port')
`pyamur.setDatafeed(node)` set datafeed URL ('http://ip-address:port')
`pyamur.height()` get blockchain height
`pyamur.lastblock()` get last block
`pyamur.block(n)` get block at specified height
`pyamur.tx(id)` get transaction details
`pyamur.symbols()` get list of symbol-asset mapping
`pyamur.markets()` get all traded markets with tickers
`pyamur.{SYMBOL_NAME}` get predefined asset for the specified symbol (pyamur.AMUR, pyamur.BTC, pyamur.USD,...)
### Default Fees
The fees for amur/asset transfers, asset issue/reissue/burn and matcher transactions are set by default as follows:
* DEFAULT_TX_FEE = 100000
* DEFAULT_ASSET_FEE = 100000000
* DEFAULT_MATCHER_FEE = 1000000
* DEFAULT_LEASE_FEE = 100000
* DEFAULT_ALIAS_FEE = 100000
* DEFAULT_SPONSOR_FEE = 100000000
* DEFAULT_SCRIPT_FEE = 100000
## More Examples
#### Playing with addresses:
```python
import pyamur as pw
# generate a new address
myAddress = pw.Address()
# set an address with an address
myAddress = pw.Address('3P6WfA4qYtkgwVAsWiiB6yaea2X8zyXncJh')
# get an existing address from seed
myAddress = pw.Address(seed='seven wrist bargain hope pattern banner plastic maple student chaos grit next space visa answer')
# get an existing address from privateKey
myAddress = pw.Address(privateKey='CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S')
# get an existing address from a publicKey
address = pw.Address(publicKey=“EYNuSmW4Adtcc6AMCZyxkiHMPmF2BZ2XxvjpBip3UFZL”)
# get an address from a seed with a different nonce (This is especially useful for accessing addresses generated by nodes)
myAddress = pw.Address(seed='seven wrist bargain hope pattern banner plastic maple student chaos grit next space visa answer', nonce=1)
```
#### Balances:
```python
import pyamur as pw
myAddress = pw.Address('3P6WfA4qYtkgwVAsWiiB6yaea2X8zyXncJh')
# get Amur balance
print("Your balance is %18d" % myAddress.balance())
# get Amur balance after 20 confirmations
print("Your balance is %18d" % myAddress.balance(confirmations = 20))
# get an asset balance
print("Your asset balance is %18d" % myAddress.balance('DHgwrRvVyqJsepd32YbBqUeDH4GJ1N984X8QoekjgH8J'))
```
#### Amur and asset transfers:
```python
import pyamur as pw
myAddress = pw.Address(privateKey='CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S')
# send Amur to another address
myAddress.sendAmur(recipient = pw.Address('3PNTcNiUzppQXDL9RZrK3BcftbujiFqrAfM'),
amount = 100000000)
# send asset to another address
myToken = pw.Asset('4ZzED8WJXsvuo2MEm2BmZ87Azw8Sx7TVC6ufSUA5LyTV')
myAddress.sendAsset(recipient = pw.Address('3PNTcNiUzppQXDL9RZrK3BcftbujiFqrAfM'),
asset = myToken,
amount = 1000)
```
#### Issuing an asset:
```python
import pyamur as pw
myToken = myAddress.issueAsset( name = "MyToken",
description = "This is my first token",
quantity = 1000000,
decimals = 2 )
```
#### Create an alias:
```python
import pyamur as pw
pw.setNode(node = 'http://127.0.0.1:6869', chain = 'testnet')
myAddress = pw.Address(privateKey='CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S')
myAddress.createAlias("MYALIAS1")
```
#### Mass payment:
```python
import pyamur as pw
recipients = ['3PBbp6bg2YEnHfdJtYM7jzzXYQeb7sx5oFg',
'3P4A27aCd3skNja46pcgrLYEnK36TkSzgUp',
'3P81U3ujotNUwZMWALdcJQLzBVbrAuUQMfs',
'3PGcKEMwQcEbmeL8Jhe9nZQRBNCNdcHCoZP',
'3PKjtzZ4FhKrJUikbQ1hRk5xbwVKDyTyvkn']
myAddress = pw.Address(privateKey = "CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S")
for address in recipients:
myAddress.sendAmur(pw.Address(address), 1000000)
```
#### Mass transfer of Amur (feature 11)
```python
import pyamur as pw
transfers = [
{ 'recipient': '3N1xca2DY8AEwqRDAJpzUgY99eq8J9h4rB3', 'amount': 1 },
{ 'recipient': '3N3YWbQ27NnK7tek6ASFh38Bj93guLxxSi1', 'amount': 2 },
{ 'recipient': '3MwiB5UkWxt4X1qJ8DQpP2LpM3m48V1z5rC', 'amount': 3 }
]
address = pw.Address(privateKey = "CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S")
address.massTransferAmur(transfers)
```
#### Mass transfer of Assets (feature 11)
```python
import pyamur as pw
transfers = [
{ 'recipient': '3N1xca2DY8AEwqRDAJpzUgY99eq8J9h4rB3', 'amount': 1 },
{ 'recipient': '3N3YWbQ27NnK7tek6ASFh38Bj93guLxxSi1', 'amount': 2 },
{ 'recipient': '3MwiB5UkWxt4X1qJ8DQpP2LpM3m48V1z5rC', 'amount': 3 }
]
address = pw.Address(privateKey = "CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S")
address.massTransferAssets(transfers, pw.Asset('9DtBNdyBCyViLZHptyF1HbQk73F6s7nQ5dXhNHubtBhd'))
```
#### Token airdrop:
```python
import pyamur as pw
myAddress = pw.Address(privateKey = '`')
myToken = pw.Asset('4ZzED8WJXsvuo2MEm2BmZ87Azw8Sx7TVC6ufSUA5LyTV')
amount = 1000
with open('recipients.txt') as f:
lines = f.readlines()
for address in lines:
myAddress.sendAsset(pw.Address(address.strip()), myToken, amount)
```
#### Playing with Amur Matcher node (DEX):
```python
import pyamur as pw
# set Matcher node to use
pw.setMatcher(node = 'http://127.0.0.1:6886')
# post a buy order
BTC = pw.Asset('4ZzED8WJXsvuo2MEm2BmZ87Azw8Sx7TVC6ufSUA5LyTV')
USD = pw.Asset('6wuo2hTaDyPQVceETj1fc5p4WoMVCGMYNASN8ym4BGiL')
BTC_USD = pw.AssetPair(BTC, USD)
myOrder = myAddress.buy(assetPair = BTC_USD, amount = 15e8, price = 95075)
# post a sell order
WCT = pw.Asset('6wuo2hTaDyPQVceETj1fc5p4WoMVCGMYNASN8ym4BGiL')
Incent = pw.Asset('FLbGXzrpqkvucZqsHDcNxePTkh2ChmEi4GdBfDRRJVof')
WCT_Incent = pw.AssetPair(WCT, Incent)
myOrder = myAddress.sell(assetPair = WCT_Incent, amount = 100e8, price = 25e8)
# post a buy order using Amur as price asset
BTC = pw.Asset('4ZzED8WJXsvuo2MEm2BmZ87Azw8Sx7TVC6ufSUA5LyTV')
BTC_AMUR = pw.AssetPair(BTC, pw.AMUR)
myOrder = myAddress.buy(assetPair = BTC_AMUR, amount = 1e8, price = 50e8)
# cancel an order
myOrder.cancel()
# or
myAddress.cancelOrder(assetPair, myOrder)
```
#### Getting Market Data from Amur Data Feed (WDF):
```python
import pyamur as pw
# set the asset pair
AMUR_BTC = pw.AssetPair(pw.AMUR, pw.BTC)
# get last price and volume
print("%s %s" % (AMUR_BTC.last(), AMUR_BTC.volume()))
# get ticker
ticker = AMUR_BTC.ticker()
print(ticker['24h_open'])
print(ticker['24h_vwap'])
# get last 10 trades
trades = AMUR_BTC.trades(10)
for t in trades:
print("%s %s %s %s" % (t['buyer'], t['seller'], t['price'], t['amount']))
# get last 10 daily OHLCV candles
ohlcv = AMUR_BTC.candles(1440, 10)
for t in ohlcv:
print("%s %s %s %s %s" % (t['open'], t['high'], t['low'], t['close'], t['volume']))
```
#### LPOS
```python
import pyamur as pw
# connect to a local testnet node
pw.setNode(node = 'http://127.0.0.1:6869', chain = 'testnet')
myAddress = pw.Address(privateKey = 'CsBpQpNE3Z1THNMS9vJPaXqYwN9Hgmhd9AsAPrM3tiuJ')
minerAddress = pw.Address('3NBThmVJmcexzJ9itP9KiiC2K6qnGQwpqMq')
# lease 1000 Amur to minerAddress
leaseId = myAddress.lease(minerAddress, 100000000000)
# revoke the lease
myAddress.leaseCancel(leaseId)
```
### Using PyAmur in a Python shell
#### Check an address balance:
```
>>> import pyamur as pw
>>> pw.Address('3P31zvGdh6ai6JK6zZ18TjYzJsa1B83YPoj')
address = 3P31zvGdh6ai6JK6zZ18TjYzJsa1B83YPoj
publicKey =
privateKey =
seed =
balances:
Amur = 1186077288304570
BDMRyZsmDZpgKhdM7fUTknKcUbVVkDpMcqEj31PUzjMy (Tokes) = 43570656915
RRBqh2XxcwAdLYEdSickM589Vb4RCemBCPH5mJaWhU9 (Ripto Bux) = 4938300000000
4rmhfoscYcjz1imNDvtz45doouvrQqDpbX7xdfLB4guF (incentCoffee) = 7
Ftim86CXM6hANxArJXZs2Fq7XLs3nJvgBzzEwQWwQn6N (Amur) = 2117290600000000
E4ip4jzTc4PCvebYn1818T4LNoYBVL3Y4Y4dMPatGwa9 (BitCoin) = 500000000000
FLbGXzrpqkvucZqsHDcNxePTkh2ChmEi4GdBfDRRJVof (Incent) = 12302659925430
GQr2fpkfmWjMaZCbqMxefbiwgvpcNgYdev7xpuX6xqcE (KISS) = 1000
DxG3PLganyNzajHGzvWLjc4P3T2CpkBGxY4J9eJAAUPw (UltraCoin) = 200000000000000
4eWBPyY4XNPsFLoQK3iuVUfamqKLDu5o6zQCYyp9d8Ae (LIKE) = 1000
>>>
```
#### Generate a new address:
```
>>> import pyamur as pw
>>> pw.Address()
address = 3P6WfA4qYtkgwVAsWiiB6yaea2X8zyXncJh
publicKey = EYNuSmW4Adtcc6AMCZyxkiHMPmF2BZ2XxvjpBip3UFZL
privateKey = CtMQWJZqfc7PRzSWiMKaGmWFm4q2VN5fMcYyKDBPDx6S
seed = seven wrist bargain hope pattern banner plastic maple student chaos grit next space visa answer
balances:
Amur = 0
>>>
```
#### Check an asset:
```
>>> import pyamur as pw
>>> pw.Asset('DHgwrRvVyqJsepd32YbBqUeDH4GJ1N984X8QoekjgH8J')
status = Issued
assetId = DHgwrRvVyqJsepd32YbBqUeDH4GJ1N984X8QoekjgH8J
issuer = 3PPKF2pH4KMYgsDixjrhnWrPycVHr1Ye37V
name = AmurCommunity
description = Amur community token.
quantity = 1000000000
decimals = 2
reissuable = False
```
#### Post an order and check its status:
```
>>> myOrder = myAddress.buy(pw.AssetPair(token1, token2), 1, 25)
>>> myOrder
status = Accepted
id = ARZdYgfXz3ksRMvhnGeLLJnn3CQnz7RCa7U6dVw3zert
asset1 = AFzL992FQbhcgSZGKDKAiRWcjtthM55yVCE99hwbHf88
asset2 = 49Aha2RR2eunR3KZFwedfdi7K9v5MLQbLYcmVdp2QkZT
sender.address = 3P6WfA4qYtkgwVAsWiiB6yaea2X8zyXncJh
sender.publicKey = EYNuSmW4Adtcc6AMCZyxkiHMPmF2BZ2XxvjpBip3UFZL
matcher = http://127.0.0.1:6886
```
#### Cancel the order
```
>>> myOrder.cancel()
>>> myOrder
status = Cancelled
id = ARZdYgfXz3ksRMvhnGeLLJnn3CQnz7RCa7U6dVw3zert
asset1 = AFzL992FQbhcgSZGKDKAiRWcjtthM55yVCE99hwbHf88
asset2 = 49Aha2RR2eunR3KZFwedfdi7K9v5MLQbLYcmVdp2QkZT
sender.address = 3P6WfA4qYtkgwVAsWiiB6yaea2X8zyXncJh
sender.publicKey = EYNuSmW4Adtcc6AMCZyxkiHMPmF2BZ2XxvjpBip3UFZL
matcher = http://127.0.0.1:6886
```
### Offline signing and custom timestamps
#### Offline signing a future transaction:
```
>>> import pyamur as pw
>>> pw.setOffline()
>>> myAddress=pw.Address(privateKey="F2jVbjrKzjUsZ1AQRdnd8MmxFc85NQz5jwvZX4BXswXv")
>>> recipient=pw.Address("3P8Ya6Ary5gzwnzbBXDp3xjeNG97JEiPcdA")
# sign a future tx to transfer 100 AMUR to recipient
# the tx is valid on Jan 1st, 2020 12:00pm
>>> myAddress.sendAmur(recipient, amount=100e8, timestamp=1577880000000)
{'api-endpoint': '/assets/broadcast/transfer',
'api-type': 'POST',
'api-data': '{"fee": 100000,
"timestamp": 1577880000000,
"senderPublicKey": "27zdzBa1q46RCMamZ8gw2xrTGypZnbzXs5J1Y2HbUmEv",
"amount": 10000000000,
"attachment": "",
"recipient": "3P8Ya6Ary5gzwnzbBXDp3xjeNG97JEiPcdA"
"signature": "YetPopTJWC4WBPXbneWv9g6YEp6J9g9rquZWjewjdQnFbmaxtXjrRsUu69NZzHebVzUGLrhQiFFoguXJwdUn8BH"}'}
```
#### Offline signing time lock/unlock transactions:
```
>>> import pyamur as pw
>>> pw.setOffline()
>>> myAddress=pw.Address(privateKey="F2jVbjrKzjUsZ1AQRdnd8MmxFc85NQz5jwvZX4BXswXv")
# generate a lockbox address
>>> lockAddress=pw.Address()
# sign the 'lock' tx to send 100e8 to the lockbox (valid on Nov 1st, 2017)
>>> myAddress.sendAmur(lockAddress, 100e8, timestamp=1509537600000)
{'api-endpoint': '/assets/broadcast/transfer',
'api-type': 'POST',
'api-data': '{"fee": 100000,
"timestamp": 1509537600000,
"senderPublicKey": "27zdzBa1q46RCMamZ8gw2xrTGypZnbzXs5J1Y2HbUmEv",
"amount": 10000000000,
"attachment": "",
"recipient": "3P3UbyQM9W7WzTgjYkLuBrPZZeWsiUtCcpv",
"signature": "5VgT6qWxJwxEyrxFNfsi67QqbyUiGq9Ka7HVzgovRTTDT8nLRyuQv2wBAJQhRiXDkTTV6zsQmHnBkh8keCaFPoNT"}'}
# sign the 'unlock' tx to send funds back to myAddress (valid on Jan 1st, 2020)
>>> lockAddress.sendAmur(myAddress, 100e8-200000, txFee=200000, timestamp=1577880000000)
{'api-endpoint': '/assets/broadcast/transfer',
'api-type': 'POST',
'api-data': '{"fee": 200000,
"timestamp": 1577880000000,
"senderPublicKey": "52XnBGnAVZmw1CHo9aJPiMsVMiTWeNGSNN9aYJ7cDtx4",
"amount": 9999800000,
"attachment": "",
"recipient": "3P7tfdCaTyYCfg5ojxNahEJDSS4MZ7ybXBY",
"signature": "3beyz1sqKefP96LaXWT3CxdPRW86DAxcj6wgWPyyKq3SgdotVqnKyWXDyeHnBzCq1nC7JA9CChTmo1c1iVAv6C4T"}'}
# delete lockbox address and private key
>>> del lockAddress
```
## Connecting to a different node or chain
PyAmur supports both mainnet and testnet chains. By default, PyAmur connects to the mainnet RPC server at https://nodes.amur.host. It's possible to specify a different server and chain with the setNode() function
```python
import pyamur as pw
# connects to a local testnet node
pw.setNode(node = 'http://127.0.0.1:6869', chain = 'testnet')
# connects to a local mainnet node
pw.setNode(node = 'http://127.0.0.1:6869', chain = 'mainnet')
```
## License
Code released under the [MIT License](https://github.com/PyAmur/PyAmur/blob/master/LICENSE).
|
PypiClean
|