repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
aloa04/practice | python/modules_packages_libraries/models/animal_kigdom/animals.py | 0f11874a597450a70f3c6f01fe64b6aa9e9d5b9f | class Animal():
edad:int
patas:int
ruido:str
nombre: str
kgComida: float = 0
def __init__(self, edad, patas, ruido, nombre):
self.edad =edad
self.patas = patas
self.ruido = ruido
self.nombre = nombre
def comer(self, alimento):
self.kgComida += alimento
print('Hola,', self.nombre, 'comes', self.kgComida)
def hacerRuido(self):
print('Hola', self.nombre, 'haces' , self.ruido) | [] |
klmcguir/tensortools | tensortools/optimize/mncp_hals.py | 38262f5bad9d3171286e34e5f15d196752dda939 | """
Nonnegative CP decomposition by Hierarchical alternating least squares (HALS).
With support for missing data.
"""
import numpy as np
import scipy as sci
from scipy import linalg
from tensortools.operations import unfold, khatri_rao
from tensortools.tensors import KTensor
from tensortools.optimize import FitResult, optim_utils
from .._hals_update import _hals_update
def mncp_hals(X, rank, mask, random_state=None, init='rand', **options):
"""
Fits nonnegtaive CP Decomposition using the Hierarcial Alternating Least
Squares (HALS) Method. Supports missing data.
Parameters
----------
X : (I_1, ..., I_N) array_like
A real array with nonnegative entries and ``X.ndim >= 3``.
rank : integer
The `rank` sets the number of components to be computed.
mask : (I_1, ..., I_N) array_like
A binary tensor with the same shape as ``X``. All entries equal to zero
correspond to held out or missing data in ``X``. All entries equal to
one correspond to observed entries in ``X`` and the decomposition is
fit to these datapoints.
random_state : integer, RandomState instance or None, optional (default ``None``)
If integer, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used by np.random.
init : str, or KTensor, optional (default ``'rand'``).
Specifies initial guess for KTensor factor matrices.
If ``'randn'``, Gaussian random numbers are used to initialize.
If ``'rand'``, uniform random numbers are used to initialize.
If KTensor instance, a copy is made to initialize the optimization.
options : dict, specifying fitting options.
tol : float, optional (default ``tol=1E-5``)
Stopping tolerance for reconstruction error.
max_iter : integer, optional (default ``max_iter = 500``)
Maximum number of iterations to perform before exiting.
min_iter : integer, optional (default ``min_iter = 1``)
Minimum number of iterations to perform before exiting.
max_time : integer, optional (default ``max_time = np.inf``)
Maximum computational time before exiting.
verbose : bool ``{'True', 'False'}``, optional (default ``verbose=True``)
Display progress.
Returns
-------
result : FitResult instance
Object which holds the fitted results. It provides the factor matrices
in form of a KTensor, ``result.factors``.
Notes
-----
This implemenation is using the Hierarcial Alternating Least Squares Method.
References
----------
Cichocki, Andrzej, and P. H. A. N. Anh-Huy. "Fast local algorithms for
large scale nonnegative matrix and tensor factorizations."
IEICE transactions on fundamentals of electronics, communications and
computer sciences 92.3: 708-721, 2009.
Examples
--------
"""
# Mask missing elements.
X = np.copy(X)
X[~mask] = np.linalg.norm(X[mask])
# Check inputs.
optim_utils._check_cpd_inputs(X, rank)
# Initialize problem.
U, normX = optim_utils._get_initial_ktensor(init, X, rank, random_state)
result = FitResult(U, 'NCP_HALS', **options)
# Store problem dimensions.
normX = linalg.norm(X[mask].ravel())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Iterate the HALS algorithm until convergence or maxiter is reached
# i) compute the N gram matrices and multiply
# ii) Compute Khatri-Rao product
# iii) Update component U_1, U_2, ... U_N
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
while result.still_optimizing:
# First, HALS update.
for n in range(X.ndim):
# Select all components, but U_n
components = [U[j] for j in range(X.ndim) if j != n]
# i) compute the N-1 gram matrices
grams = sci.multiply.reduce([arr.T.dot(arr) for arr in components])
# ii) Compute Khatri-Rao product
kr = khatri_rao(components)
p = unfold(X, n).dot(kr)
# iii) Update component U_n
_hals_update(U[n], grams, p)
# Then, update masked elements.
pred = U.full()
X[~mask] = pred[~mask]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Update the optimization result, checks for convergence.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Compute objective function
# grams *= U[X.ndim - 1].T.dot(U[X.ndim - 1])
# obj = np.sqrt( (sci.sum(grams) - 2 * sci.sum(U[X.ndim - 1] * p) + normX**2)) / normX
resid = X - pred
result.update(linalg.norm(resid.ravel()) / normX)
# end optimization loop, return result.
return result.finalize() | [((3086, 3096), 'numpy.copy', 'np.copy', (['X'], {}), '(X)\n', (3093, 3096), True, 'import numpy as np\n'), ((3112, 3135), 'numpy.linalg.norm', 'np.linalg.norm', (['X[mask]'], {}), '(X[mask])\n', (3126, 3135), True, 'import numpy as np\n'), ((3161, 3199), 'tensortools.optimize.optim_utils._check_cpd_inputs', 'optim_utils._check_cpd_inputs', (['X', 'rank'], {}), '(X, rank)\n', (3190, 3199), False, 'from tensortools.optimize import FitResult, optim_utils\n'), ((3242, 3303), 'tensortools.optimize.optim_utils._get_initial_ktensor', 'optim_utils._get_initial_ktensor', (['init', 'X', 'rank', 'random_state'], {}), '(init, X, rank, random_state)\n', (3274, 3303), False, 'from tensortools.optimize import FitResult, optim_utils\n'), ((3317, 3352), 'tensortools.optimize.FitResult', 'FitResult', (['U', '"""NCP_HALS"""'], {}), "(U, 'NCP_HALS', **options)\n", (3326, 3352), False, 'from tensortools.optimize import FitResult, optim_utils\n'), ((4201, 4223), 'tensortools.operations.khatri_rao', 'khatri_rao', (['components'], {}), '(components)\n', (4211, 4223), False, 'from tensortools.operations import unfold, khatri_rao\n'), ((4240, 4252), 'tensortools.operations.unfold', 'unfold', (['X', 'n'], {}), '(X, n)\n', (4246, 4252), False, 'from tensortools.operations import unfold, khatri_rao\n')] |
jonas-eschle/raredecay | raredecay/tools/data_tools.py | 6285f91e0819d01c80125f50b24e60ee5353ae2e | """
@author: Jonas Eschle "Mayou36"
DEPRECEATED! USE OTHER MODULES LIKE rd.data, rd.ml, rd.reweight, rd.score and rd.stat
DEPRECEATED!DEPRECEATED!DEPRECEATED!DEPRECEATED!DEPRECEATED!
Contains several tools to convert, load, save and plot data
"""
import warnings
import os
import copy
import pandas as pd
import numpy as np
import uproot
import pickle
from . import dev_tool
# both produce error (27.07.2016) when importing them if run from main.py.
# No problem when run as main...
# from raredecay.tools import dev_tool
from .. import meta_config as meta_cfg
def apply_cuts(signal_data, bkg_data, percent_sig_to_keep=100, bkg_length=None):
"""Search for best cut on value to still keep percent_sig_to_keep of signal
Parameters
----------
signal_data : 1-D numpy array
The signal
bkg_data : 1-D numpy array
The background data
percent_sig_to_keep : 0 < float <= 100
What percentage of the data to keep in order to apply the cuts.
"""
# if percent_sig_to_keep < 100:
# raise NotImplementedError("percentage of < 100 not yet imlemented")
percentile = [0, percent_sig_to_keep] # TODO: modify for percent_sig_to_keep
bkg_length_before = len(bkg_data)
bkg_length = len(bkg_data) if bkg_length in (None, 0) else bkg_length
lower_cut, upper_cut = np.percentile(signal_data, percentile)
cut_bkg = np.count_nonzero(
np.logical_or(bkg_data < lower_cut, bkg_data > upper_cut)
)
rejected_bkg = (bkg_length_before - cut_bkg) / bkg_length
return [lower_cut, upper_cut], rejected_bkg
def make_root_dict(path_to_rootfile, tree_name, branches):
"""Returns a root_numpy compatible "root-dict" of a root-tree.
Parameters
----------
path_to_rootfile : str
The exact path to the root-tree including the filename. Example:
/home/user1/data/myRootTree1.root
tree_name : str
The name of the tree
branches : str or list[str, str, str,... ]
The branches of the tree to use
"""
output = dict(filenames=path_to_rootfile, treename=tree_name, branches=branches)
output = dev_tool.entries_to_str(output)
return output
def add_to_rootfile(rootfile, new_branch, branch_name=None, overwrite=True):
"""Adds a new branch to a given root file.
.. warning:: Overwrite not working currently!
Parameters
----------
rootfile : root-dict
The ROOT-file where the data should be added
new_branch : numpy.array 1-D, list, root-dict
A one-dimensional numpy array that contains the data.
branch_name : str
The name of the branche resp. the name in the dtype of the array.
"""
from root_numpy import array2root
from rootpy.io import root_open
rootfile = dev_tool.entries_to_str(rootfile)
new_branch = dev_tool.entries_to_str(new_branch)
branch_name = dev_tool.entries_to_str(branch_name)
# get the right parameters
# TODO: what does that if there? an assertion maybe?
write_mode = "update"
branch_name = "new_branch1" if branch_name is None else branch_name
if isinstance(rootfile, dict):
filename = rootfile.get("filenames")
treename = rootfile.get("treename")
new_branch = to_ndarray(new_branch)
# new_branch.dtype = [(branch_name, 'f8')]
# write to ROOT-file
write_to_root = False
if os.path.isfile(filename):
with root_open(filename, mode="a") as root_file:
tree = getattr(root_file, treename) # test
if not tree.has_branch(branch_name):
write_to_root = True
# array2tree(new_branch, tree=tree)
# f.write("", TObject.kOverwrite) # overwrite, does not create friends
else:
write_mode = "recreate"
write_to_root = True
if write_to_root:
arr = np.core.records.fromarrays([new_branch], names=branch_name)
array2root(arr=arr, filename=filename, treename=treename, mode=write_mode)
return 0
else:
return 1
# TODO: remove? outdated
def format_data_weights(data_to_shape, weights):
"""Format the data and the weights perfectly. Same length and more.
Change the data to pandas.DataFrame and fill the weights with ones where
nothing or None is specified. Returns both in lists.
Very useful to loop over several data and weights.
Parameters
----------
data_to_shape : (root_dict, numpy.array, pandas.DataFrame)
The data for which we apply the weights. Usual 2-D shape.
weights : (list, numpy.array, pandas.DataFrame, None)
The weights to be reshaped
*Best format* :
[array(weights),array(weights), None, array(weights),...]
*None* can be used if no special weights are specified.
If weights contains less "weight-containing array-like objects" then
data_to_shape does, the difference will be filled with *1*
Return
------
out : list(pandas.DataFrame(data), pandas.DataFrame(data),...)
Return a list containing data
out : list(numpy.array(weight), numpy.array(weight),...)
Return a list with the weights, converted and filled.
"""
# conver the data
if not isinstance(data_to_shape, list):
data_to_shape = [data_to_shape]
data_to_shape = list(map(to_pandas, data_to_shape))
# convert the weights
if not isinstance(weights, list):
weights = [weights]
if weights[0] is not None:
if len(weights[0]) == 1:
weights = [weights]
# convert to pandas
assert isinstance(weights, list), "weights could not be converted to list"
for data_id, data in enumerate(data_to_shape):
if data_id >= len(weights):
weights.append(None)
if weights[data_id] is None:
weights[data_id] = np.array([1] * len(data))
weights[data_id] = to_pandas(weights[data_id]).squeeze().values
return data_to_shape, weights
def obj_to_string(objects, separator=None):
"""Return a string containing all objects as strings, separated by the separator.
Useful for automatic conversion for different types. The following objects
will automatically be converted:
- None will be omitted
Parameters
----------
objects : any object or list(obj, obj, ...) with a string representation
The objects will be converted to a string and concatenated, separated
by the separator.
separator : str
The separator between the objects. Default is " - ".
"""
objects = dev_tool.entries_to_str(objects)
if isinstance(objects, str): # no need to change things
return objects
separator = " - " if separator is None else separator
assert isinstance(separator, str), "Separator not a str"
objects = to_list(objects)
objects = [str(obj) for obj in objects if obj not in (None, "")] # remove Nones
string_out = ""
for word in objects:
string_out += word + separator if word != objects[-1] else word
return string_out
def is_root(data_to_check):
"""Check whether a given data is a root file. Needs dicts to be True."""
flag = False
data_to_check = dev_tool.entries_to_str(data_to_check)
if isinstance(data_to_check, dict):
path_name = data_to_check.get("filenames")
# assert isinstance(path_name, str), ("'filenames' of the dictionary " +
# str(data_to_check) + "is not a string")
if path_name.endswith(meta_cfg.ROOT_DATATYPE):
flag = True
return flag
def is_list(data_to_check):
"""Check whether the given data is a list."""
flag = False
if isinstance(data_to_check, list):
flag = True
return flag
def is_ndarray(data_to_check):
"""Check whether a given data is an ndarray."""
flag = False
if isinstance(data_to_check, np.ndarray):
flag = True
return flag
def is_pickle(data_to_check):
"""Check if the file is a pickled file (checks the ending)."""
flag = False
data_to_check = dev_tool.entries_to_str(data_to_check)
if isinstance(data_to_check, str):
if data_to_check.endswith(meta_cfg.PICKLE_DATATYPE):
flag = True
return flag
def to_list(data_in):
"""Convert the data into a list. Does not pack lists into a new one.
If your input is, for example, a string or a list of strings, or a
tuple filled with strings, you have, in general, a problem:
- just iterate through the object will fail because it iterates through the
characters of the string.
- using list(obj) converts the tuple, leaves the list but splits the strings
characters into single elements of a new list.
- using [obj] creates a list containing a string, but also a list containing
a list or a tuple, which you did not want to.
Solution: use to_list(obj), which creates a new list in case the object is
a single object (a string is a single object in this sence) or converts
to a list if the object is already a container for several objects.
Parameters
----------
data_in : any obj
So far, any object can be entered.
Returns
-------
out : list
Return a list containing the object or the object converted to a list.
"""
if isinstance(data_in, (str, int, float)):
data_in = [data_in]
data_in = list(data_in)
return data_in
def to_ndarray(data_in, float_array=False):
"""Convert data to numpy array (containing only floats).
Parameters
----------
data_in : any reasonable data
The data to be converted
"""
import uproot
if is_root(data_in):
with uproot.open(data_in["filenames"]) as file:
tree = file[data_in["treename"]]
branches = to_list(data_in["branches"])
loaded = tree.arrays(branches, library="np")
loaded = np.stack([loaded[branch] for branch in branches])
if len(branches) == 1:
loaded = loaded[0]
data_in = loaded
# change numpy.void to normal floats
if isinstance(data_in, (pd.Series, pd.DataFrame)):
test_sample = data_in.iloc[0]
else:
test_sample = data_in[0]
if isinstance(test_sample, np.void):
data_in = np.array([val[0] for val in data_in])
if isinstance(data_in, (np.recarray, np.ndarray)):
data_in = data_in.tolist()
if is_list(data_in) or isinstance(data_in, pd.Series):
data_in = np.array(data_in)
if not isinstance(data_in[0], (int, float, str, bool)):
if float_array:
iter_data = copy.deepcopy(data_in)
# HACK
data_in = np.ndarray(shape=len(data_in), dtype=data_in.dtype)
# HACK END
for i, element in enumerate(iter_data):
if not isinstance(element, (int, float, str, bool)):
# does that work or should we iterate over copy?
try:
element_len = len(element)
except TypeError:
element_len = 1
if element_len > 1:
data_in[i] = to_ndarray(element)
float_array = False
elif element_len == 1:
data_in[i] = float(element)
warnings.warn("Could not force float array")
if float_array:
data_in = np.asfarray(data_in)
assert is_ndarray(data_in), "Error, could not convert data to numpy array"
return data_in
def to_pandas_old(data_in, index=None, columns=None):
"""Convert data from numpy or root to pandas dataframe.
Convert data safely to pandas, whatever the format is.
Parameters
----------
data_in : any reasonable data
The data to be converted
"""
# TODO: generalize
root_index_name = "__index__"
data_in = dev_tool.entries_to_str(data_in)
if is_root(data_in):
root_index = None
import root_numpy
if root_index_name in root_numpy.list_branches(
filename=data_in["filenames"], treename=data_in.get("treename")
):
root_index = root_numpy.root2array(
filenames=data_in["filenames"],
treename=data_in.get("treename"),
selection=data_in.get("selection"),
branches=root_index_name,
)
data_in = root_numpy.root2array(**data_in) # why **? it's a root dict
if is_list(data_in):
data_in = np.array(data_in)
if is_ndarray(data_in):
if (isinstance(columns, (list, tuple)) and len(columns) == 1) or isinstance(
columns, str
):
data_in = to_ndarray(data_in)
data_in = pd.DataFrame(data_in, columns=columns, index=root_index)
if index is not None:
data_in = data_in.loc[index]
elif isinstance(data_in, pd.DataFrame):
pass
else:
raise TypeError("Could not convert data to pandas. Data: " + data_in)
return data_in
def to_pandas(data_in, index=None, columns=None):
"""Convert data from numpy or root to pandas dataframe.
Convert data safely to pandas, whatever the format is.
Parameters
----------
data_in : any reasonable data
The data to be converted
"""
data_in = dev_tool.entries_to_str(data_in)
if is_root(data_in):
if columns is None:
columns = data_in["branches"]
with uproot.open(data_in["filenames"]) as file:
tree = file[data_in["treename"]]
if "__index__" in tree.keys(): # legacy, we can also convert this
return to_pandas_old(data_in=data_in, index=index, columns=columns)
branches = to_list(columns)
loaded = tree.arrays(branches, library="pd")
if index is not None:
loaded = loaded.loc[index]
return loaded
else:
# HACK START
return to_pandas_old(data_in=data_in, index=index, columns=columns)
# HACK END
# from root_pandas import read_root
#
# root_pandas_numpy_map = dict(filenames='paths', treename='key', branches='columns',
# selection='where')
#
# if is_root(data_in):
# is_root2array = False
# for key, val in copy.deepcopy(list(data_in.items())):
# if key in root_pandas_numpy_map:
# is_root2array = True
# del data_in[key]
# data_in[root_pandas_numpy_map[key]] = val
# data_in['columns'] = to_list(data_in['columns'])
# if is_root2array:
# data_in['columns'] = ['noexpand:'+col for col in data_in['columns'] if not col.startswith('noexpand:')]
# remove the noexpand:
# data_in = read_root(**data_in) # why **? it's a root dict
# if is_list(data_in):
# data_in = np.array(data_in)
# if is_ndarray(data_in):
# if ((isinstance(columns, (list, tuple)) and len(columns) == 1) or
# isinstance(columns, string)):
#
# data_in = to_ndarray(data_in)
# data_in = pd.DataFrame(data_in, columns=columns)
# if index is not None:
# data_in = data_in.loc[index]
# elif isinstance(data_in, pd.DataFrame):
# pass
# else:
# raise TypeError("Could not convert data to pandas. Data: " + data_in)
# return data_in
def adv_return(return_value, save_name=None):
"""Save the value if save_name specified, otherwise just return input.
Can be wrapped around the return value. Without any arguments, the return
of your function will be exactly the same. With arguments, the value can
be saved (**pickled**) before it is returned.
Parameters
----------
return_value : any python object
The python object which should be pickled.
save_name : str, None
| The (file-)name for the pickled file. File-extension will be added \
automatically if specified in *raredecay.meta_config*.
| If *None* is passed, the object won't be pickled.
Return
------
out : python object
Return return_value without changes.
**Usage**:
Instead of a simple return statement
>>> return my_variable/my_object
one can use the **completely equivalent** statement
>>> return adv_return(my_variable/my_object)
If the return value should be saved in addition to be returned, use
>>> return adv_return(my_variable/my_object, save_name='my_object.pickle')
(*the .pickle ending is not required but added automatically if omitted*)
which returns the value and saves it.
"""
save_name = dev_tool.entries_to_str(save_name)
if save_name not in (None, False):
if isinstance(save_name, str):
save_name = meta_cfg.PICKLE_PATH + save_name
if not is_pickle(save_name):
save_name += "." + meta_cfg.PICKLE_DATATYPE
with open(str(save_name), "wb") as f:
pickle.dump(return_value, f, meta_cfg.PICKLE_PROTOCOL)
print(str(return_value) + " pickled to " + save_name)
else:
pass
# HACK how to solve logger problem?
# logger.error("Could not pickle data, name for file (" +
# str(save_name) + ") is not a string!" +
# "\n Therefore, the following data was only returned" +
# " but not saved! \n Data:" + str(return_value))
return return_value
def try_unpickle(file_to_unpickle, use_metapath_bkwcomp=False):
"""Try to unpickle a file and return, otherwise just return input."""
file_to_unpickle = dev_tool.entries_to_str(file_to_unpickle)
if is_pickle(file_to_unpickle):
extra_path = meta_cfg.PICKLE_PATH if use_metapath_bkwcomp else ""
with open(extra_path + file_to_unpickle, "rb") as f:
file_to_unpickle = pickle.load(f)
return file_to_unpickle
| [((1346, 1384), 'numpy.percentile', 'np.percentile', (['signal_data', 'percentile'], {}), '(signal_data, percentile)\n', (1359, 1384), True, 'import numpy as np\n'), ((3388, 3412), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (3402, 3412), False, 'import os\n'), ((1425, 1482), 'numpy.logical_or', 'np.logical_or', (['(bkg_data < lower_cut)', '(bkg_data > upper_cut)'], {}), '(bkg_data < lower_cut, bkg_data > upper_cut)\n', (1438, 1482), True, 'import numpy as np\n'), ((3847, 3906), 'numpy.core.records.fromarrays', 'np.core.records.fromarrays', (['[new_branch]'], {'names': 'branch_name'}), '([new_branch], names=branch_name)\n', (3873, 3906), True, 'import numpy as np\n'), ((3915, 3989), 'root_numpy.array2root', 'array2root', ([], {'arr': 'arr', 'filename': 'filename', 'treename': 'treename', 'mode': 'write_mode'}), '(arr=arr, filename=filename, treename=treename, mode=write_mode)\n', (3925, 3989), False, 'from root_numpy import array2root\n'), ((9935, 9984), 'numpy.stack', 'np.stack', (['[loaded[branch] for branch in branches]'], {}), '([loaded[branch] for branch in branches])\n', (9943, 9984), True, 'import numpy as np\n'), ((10308, 10345), 'numpy.array', 'np.array', (['[val[0] for val in data_in]'], {}), '([val[0] for val in data_in])\n', (10316, 10345), True, 'import numpy as np\n'), ((10513, 10530), 'numpy.array', 'np.array', (['data_in'], {}), '(data_in)\n', (10521, 10530), True, 'import numpy as np\n'), ((11455, 11475), 'numpy.asfarray', 'np.asfarray', (['data_in'], {}), '(data_in)\n', (11466, 11475), True, 'import numpy as np\n'), ((12453, 12485), 'root_numpy.root2array', 'root_numpy.root2array', ([], {}), '(**data_in)\n', (12474, 12485), False, 'import root_numpy\n'), ((12558, 12575), 'numpy.array', 'np.array', (['data_in'], {}), '(data_in)\n', (12566, 12575), True, 'import numpy as np\n'), ((12785, 12841), 'pandas.DataFrame', 'pd.DataFrame', (['data_in'], {'columns': 'columns', 'index': 'root_index'}), '(data_in, columns=columns, index=root_index)\n', (12797, 12841), True, 'import pandas as pd\n'), ((3427, 3456), 'rootpy.io.root_open', 'root_open', (['filename'], {'mode': '"""a"""'}), "(filename, mode='a')\n", (3436, 3456), False, 'from rootpy.io import root_open\n'), ((9721, 9754), 'uproot.open', 'uproot.open', (["data_in['filenames']"], {}), "(data_in['filenames'])\n", (9732, 9754), False, 'import uproot\n'), ((10639, 10661), 'copy.deepcopy', 'copy.deepcopy', (['data_in'], {}), '(data_in)\n', (10652, 10661), False, 'import copy\n'), ((11371, 11415), 'warnings.warn', 'warnings.warn', (['"""Could not force float array"""'], {}), "('Could not force float array')\n", (11384, 11415), False, 'import warnings\n'), ((13509, 13542), 'uproot.open', 'uproot.open', (["data_in['filenames']"], {}), "(data_in['filenames'])\n", (13520, 13542), False, 'import uproot\n'), ((18036, 18050), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (18047, 18050), False, 'import pickle\n'), ((17063, 17117), 'pickle.dump', 'pickle.dump', (['return_value', 'f', 'meta_cfg.PICKLE_PROTOCOL'], {}), '(return_value, f, meta_cfg.PICKLE_PROTOCOL)\n', (17074, 17117), False, 'import pickle\n')] |
LittleNed/toontown-stride | toontown/coghq/boardbothq/BoardOfficeManagerAI.py | 1252a8f9a8816c1810106006d09c8bdfe6ad1e57 | from direct.directnotify import DirectNotifyGlobal
import DistributedBoardOfficeAI
from toontown.toonbase import ToontownGlobals
from toontown.coghq.boardbothq import BoardOfficeLayout
from direct.showbase import DirectObject
import random
class BoardOfficeManagerAI(DirectObject.DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory('BoardOfficeManagerAI')
boardofficeId = None
def __init__(self, air):
DirectObject.DirectObject.__init__(self)
self.air = air
def getDoId(self):
return 0
def createBoardOffice(self, boardofficeId, players):
for avId in players:
if bboard.has('boardofficeId-%s' % avId):
boardofficeId = bboard.get('boardofficeId-%s' % avId)
break
numFloors = ToontownGlobals.BoardOfficeNumFloors[boardofficeId]
floor = random.randrange(numFloors)
for avId in players:
if bboard.has('mintFloor-%s' % avId):
floor = bboard.get('mintFloor-%s' % avId)
floor = max(0, floor)
floor = min(floor, numFloors - 1)
break
for avId in players:
if bboard.has('mintRoom-%s' % avId):
roomId = bboard.get('mintRoom-%s' % avId)
for i in xrange(numFloors):
layout = BoardOfficeLayout.BoardOfficeLayout(boardofficeId, i)
if roomId in layout.getRoomIds():
floor = i
else:
from toontown.coghq.boardbothq import BoardOfficeRoomSpecs
roomName = BoardOfficeRoomSpecs.BoardOfficeRoomId2RoomName[roomId]
BoardOfficeManagerAI.notify.warning('room %s (%s) not found in any floor of mint %s' % (roomId, roomName, boardofficeId))
mintZone = self.air.allocateZone()
mint = DistributedBoardOfficeAI.DistributedBoardOfficeAI(self.air, boardofficeId, mintZone, floor, players)
mint.generateWithRequired(mintZone)
return mintZone
| [((309, 376), 'direct.directnotify.DirectNotifyGlobal.directNotify.newCategory', 'DirectNotifyGlobal.directNotify.newCategory', (['"""BoardOfficeManagerAI"""'], {}), "('BoardOfficeManagerAI')\n", (352, 376), False, 'from direct.directnotify import DirectNotifyGlobal\n'), ((440, 480), 'direct.showbase.DirectObject.DirectObject.__init__', 'DirectObject.DirectObject.__init__', (['self'], {}), '(self)\n', (474, 480), False, 'from direct.showbase import DirectObject\n'), ((867, 894), 'random.randrange', 'random.randrange', (['numFloors'], {}), '(numFloors)\n', (883, 894), False, 'import random\n'), ((1883, 1987), 'DistributedBoardOfficeAI.DistributedBoardOfficeAI', 'DistributedBoardOfficeAI.DistributedBoardOfficeAI', (['self.air', 'boardofficeId', 'mintZone', 'floor', 'players'], {}), '(self.air, boardofficeId,\n mintZone, floor, players)\n', (1932, 1987), False, 'import DistributedBoardOfficeAI\n'), ((1352, 1405), 'toontown.coghq.boardbothq.BoardOfficeLayout.BoardOfficeLayout', 'BoardOfficeLayout.BoardOfficeLayout', (['boardofficeId', 'i'], {}), '(boardofficeId, i)\n', (1387, 1405), False, 'from toontown.coghq.boardbothq import BoardOfficeLayout\n')] |
radon-h2020/AnsibleMetrics | ansiblemetrics/utils.py | 8a8e27d9b54fc1578d00526c8663184a2e686cb2 | from typing import Union
def key_value_list(d: Union[dict, list], key=None) -> list:
"""
This function iterates over all the key-value pairs of a dictionary and returns a list of tuple (key, value) where the key contain only primitive value (i.e., no list or dict), e.g., string, number etc.
d -- a dictionary to iterate through
"""
if not d:
return []
if not isinstance(d, dict) and not isinstance(d, list):
return []
key_values = []
if isinstance(d, list):
for entry in d:
if isinstance(entry, dict):
key_values.extend(key_value_list(entry))
else:
key_values.append((key, entry))
else:
for k, v in d.items():
if k is None or v is None:
continue
if not isinstance(v, dict) and type(v) != list:
key_values.append((k, v))
elif isinstance(v, list):
key_values.extend(key_value_list(v, k))
else:
key_values.extend(key_value_list(v))
return key_values
def all_keys(d: Union[dict, list]) -> list:
"""
Returns a list of all the keys of a dictionary (duplicates included)
d -- a dictionary to iterate through
"""
if not d:
return []
if d is None or not isinstance(d, dict) and not isinstance(d, list):
return []
keys = []
if isinstance(d, list):
for entry in d:
keys.extend(all_keys(entry))
else:
for k, v in d.items():
keys.append(k)
keys.extend(all_keys(v))
return keys
def all_values(d: Union[dict, list]) -> list:
"""
Returns a list of all the primitive values of a dictionary (duplicates included)
d -- a dictionary to iterate through
"""
if not d:
return []
if not isinstance(d, dict) and not isinstance(d, list):
return [d]
values = []
if isinstance(d, list):
for entry in d:
values.extend(all_values(entry))
else:
for k, v in d.items():
values.extend(all_values(v))
return values
| [] |
Kunal-Shah-Bose/yam-python | yampy/apis/groups.py | 1d24b4b5c4bfb512804183efe741a2f7a75889e5 | from yampy.apis.utils import ArgumentConverter, none_filter, stringify_booleans
from yampy.models import extract_id
class GroupsAPI(object):
"""
Provides an interface for accessing the groups related endpoints of the
Yammer API. You should not instantiate this class directly; use the
:meth:`yampy.Yammer.groups` method instead.
"""
def __init__(self, client):
"""
Initializes a new GroupsAPI that will use the given client object
to make HTTP requests.
"""
self._client = client
self._argument_converter = ArgumentConverter(
none_filter, stringify_booleans,
)
def all(self, mine=None, reverse=None):
"""
Returns all the groups in the current user's network.
Customize the response using the keyword arguments:
* mine -- Only return group of current user.
* reverse -- return group in descending order by name.
"""
return self._client.get("/groups", **self._argument_converter(
mine=mine,
reverse=reverse,
))
def find(self, group_id):
"""
Returns the group identified by the given group_id.
"""
return self._client.get(self._group_path(group_id))
def members(self, group_id, page=None, reverse=None):
"""
Returns the group identified by the given group_id.
Customize the response using the keyword arguments:
* page -- Enable pagination, and return the nth page of 50 users.
"""
path = "/group_memberships"
return self._client.get(path, **self._argument_converter(
page=page,
reverse=reverse,
))
def join(self, group_id):
"""
Join the group identified by the given group_id.
Return True
"""
path = "/group_memberships"
group_id = extract_id(group_id)
return self._client.post(path, **self._argument_converter(
group_id=group_id,
))
def leave(self, group_id):
"""
Leave the group identified by the given group_id.
Return True
"""
path = "/group_memberships"
group_id = extract_id(group_id)
return self._client.delete(path, **self._argument_converter(
group_id=group_id,
))
def create(self, name, private=False):
"""
Create a group.
Return Group info
"""
path = "/groups"
return self._client.post(path, **self._argument_converter(
name=name,
private=private,
))
def delete(self, group_id):
"""
Delete a group.
Return True if success
"""
return self._client.delete(self._group_path(group_id), delete="true")
def _group_path(self, group_id):
return "/groups/%d" % extract_id(group_id)
| [((583, 633), 'yampy.apis.utils.ArgumentConverter', 'ArgumentConverter', (['none_filter', 'stringify_booleans'], {}), '(none_filter, stringify_booleans)\n', (600, 633), False, 'from yampy.apis.utils import ArgumentConverter, none_filter, stringify_booleans\n'), ((1907, 1927), 'yampy.models.extract_id', 'extract_id', (['group_id'], {}), '(group_id)\n', (1917, 1927), False, 'from yampy.models import extract_id\n'), ((2227, 2247), 'yampy.models.extract_id', 'extract_id', (['group_id'], {}), '(group_id)\n', (2237, 2247), False, 'from yampy.models import extract_id\n'), ((2892, 2912), 'yampy.models.extract_id', 'extract_id', (['group_id'], {}), '(group_id)\n', (2902, 2912), False, 'from yampy.models import extract_id\n')] |
ycanerol/phy | phy/gui/actions.py | 7a247f926dd5bf5d8ab95fe138e8f4a0db11b068 | # -*- coding: utf-8 -*-
"""Actions and snippets."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
import inspect
from functools import partial, wraps
import logging
import re
import sys
import traceback
from .qt import QKeySequence, QAction, require_qt, input_dialog, busy_cursor, _get_icon
from phylib.utils import Bunch
logger = logging.getLogger(__name__)
# -----------------------------------------------------------------------------
# Snippet parsing utilities
# -----------------------------------------------------------------------------
def _parse_arg(s):
"""Parse a number or string."""
try:
return int(s)
except ValueError:
pass
try:
return float(s)
except ValueError:
pass
return s
def _parse_list(s):
"""Parse a comma-separated list of values (strings or numbers)."""
# Range: 'x-y'
if '-' in s:
m, M = map(_parse_arg, s.split('-'))
return list(range(m, M + 1))
# List of ids: 'x,y,z'
elif ',' in s:
return list(map(_parse_arg, s.split(',')))
else:
return _parse_arg(s)
def _parse_snippet(s):
"""Parse an entire snippet command."""
return tuple(map(_parse_list, s.split(' ')))
def _prompt_args(title, docstring, default=None):
"""Display a prompt dialog requesting function arguments.
'default' is a function returning the default value for the proposed input dialog.
"""
# There are args, need to display the dialog.
# Extract Example: `...` in the docstring to put a predefined text
# in the input dialog.
logger.debug("Prompting arguments for %s", title)
r = re.search('Example: `([^`]+)`', docstring)
docstring_ = docstring[:r.start()].strip() if r else docstring
try:
text = str(default()) if default else (r.group(1) if r else None)
except Exception as e: # pragma: no cover
logger.error("Error while handling user input: %s", str(e))
return
s, ok = input_dialog(title, docstring_, text)
if not ok or not s:
return
# Parse user-supplied arguments and call the function.
args = _parse_snippet(s)
return args
# -----------------------------------------------------------------------------
# Show shortcut utility functions
# -----------------------------------------------------------------------------
def _get_shortcut_string(shortcut):
"""Return a string representation of a shortcut."""
if not shortcut:
return ''
if isinstance(shortcut, (tuple, list)):
return ', '.join([_get_shortcut_string(s) for s in shortcut])
if isinstance(shortcut, str):
if hasattr(QKeySequence, shortcut):
shortcut = QKeySequence(getattr(QKeySequence, shortcut))
else:
return shortcut.lower()
assert isinstance(shortcut, QKeySequence)
s = shortcut.toString() or ''
return str(s).lower()
def _get_qkeysequence(shortcut):
"""Return a QKeySequence or list of QKeySequence from a shortcut string."""
if shortcut is None:
return []
if isinstance(shortcut, (tuple, list)):
return [_get_qkeysequence(s) for s in shortcut]
assert isinstance(shortcut, str)
if hasattr(QKeySequence, shortcut):
return QKeySequence(getattr(QKeySequence, shortcut))
sequence = QKeySequence.fromString(shortcut)
assert not sequence.isEmpty()
return sequence
def _show_shortcuts(shortcuts):
"""Display shortcuts."""
out = []
for n in sorted(shortcuts):
shortcut = _get_shortcut_string(shortcuts[n])
if not n.startswith('_') and not shortcut.startswith('-'):
out.append('- {0:<40} {1:s}'.format(n, shortcut))
if out:
print('Keyboard shortcuts')
print('\n'.join(out))
print('')
def _show_snippets(snippets):
"""Display snippets."""
out = []
for n in sorted(snippets):
snippet = snippets[n]
if not n.startswith('_'):
out.append('- {0:<40} :{1:s}'.format(n, snippet))
if out:
print('Snippets')
print('\n'.join(out))
print('')
def show_shortcuts_snippets(actions):
"""Show the shortcuts and snippets of an Actions instance."""
print(actions.name)
print('-' * len(actions.name))
print()
_show_shortcuts(actions.shortcuts)
_show_snippets(actions._default_snippets)
# -----------------------------------------------------------------------------
# Actions
# -----------------------------------------------------------------------------
def _alias(name):
# Get the alias from the character after & if it exists.
alias = name[name.index('&') + 1] if '&' in name else name
alias = alias.replace(' ', '_').lower()
return alias
def _expected_args(f):
if isinstance(f, partial):
argspec = inspect.getfullargspec(f.func)
else:
argspec = inspect.getfullargspec(f)
f_args = argspec.args
if 'self' in f_args:
f_args.remove('self')
# Remove arguments with defaults from the list.
if len(argspec.defaults or ()):
f_args = f_args[:-len(argspec.defaults)]
# Remove arguments supplied in a partial.
if isinstance(f, partial):
f_args = f_args[len(f.args):]
f_args = [arg for arg in f_args if arg not in f.keywords]
return tuple(f_args)
@require_qt
def _create_qaction(gui, **kwargs):
# Create the QAction instance.
name = kwargs.get('name', '')
name = name[0].upper() + name[1:].replace('_', ' ')
action = QAction(name, gui)
# Show an input dialog if there are args.
callback = kwargs.get('callback', None)
title = getattr(callback, '__name__', 'action')
# Number of expected arguments.
n_args = kwargs.get('n_args', None) or len(_expected_args(callback))
@wraps(callback)
def wrapped(is_checked, *args):
if kwargs.get('checkable', None):
args = (is_checked,) + args
if kwargs.get('prompt', None):
args += _prompt_args(
title, docstring, default=kwargs.get('prompt_default', None)) or ()
if not args: # pragma: no cover
logger.debug("User cancelled input prompt, aborting.")
return
if len(args) < n_args:
logger.warning(
"Invalid function arguments: expecting %d but got %d", n_args, len(args))
return
try:
# Set a busy cursor if set_busy is True.
with busy_cursor(kwargs.get('set_busy', None)):
return callback(*args)
except Exception: # pragma: no cover
logger.warning("Error when executing action %s.", name)
logger.debug(''.join(traceback.format_exception(*sys.exc_info())))
action.triggered.connect(wrapped)
sequence = _get_qkeysequence(kwargs.get('shortcut', None))
if not isinstance(sequence, (tuple, list)):
sequence = [sequence]
action.setShortcuts(sequence)
assert kwargs.get('docstring', None)
docstring = re.sub(r'\s+', ' ', kwargs.get('docstring', None))
docstring += ' (alias: {})'.format(kwargs.get('alias', None))
action.setStatusTip(docstring)
action.setWhatsThis(docstring)
action.setCheckable(kwargs.get('checkable', None))
action.setChecked(kwargs.get('checked', None))
if kwargs.get('icon', None):
action.setIcon(_get_icon(kwargs['icon']))
return action
class Actions(object):
"""Group of actions bound to a GUI.
This class attaches to a GUI and implements the following features:
* Add and remove actions
* Keyboard shortcuts for the actions
* Display all shortcuts
Constructor
-----------
gui : GUI instance
name : str
Name of this group of actions.
menu : str
Name of the GUI menu that will contain the actions.
submenu : str
Name of the GUI submenu that will contain the actions.
default_shortcuts : dict
Map action names to keyboard shortcuts (regular strings).
default_snippets : dict
Map action names to snippets (regular strings).
"""
def __init__(
self, gui, name=None, menu=None, submenu=None, view=None,
insert_menu_before=None, default_shortcuts=None, default_snippets=None):
self._actions_dict = {}
self._aliases = {}
self._default_shortcuts = default_shortcuts or {}
self._default_snippets = default_snippets or {}
assert name
self.name = name
self.menu = menu
self.submenu = submenu
self.view = view
self.view_submenu = None
self.insert_menu_before = insert_menu_before
self._view_submenus = {}
self.gui = gui
gui.actions.append(self)
# Create the menu when creating the Actions instance.
if menu:
gui.get_menu(menu, insert_menu_before)
def _get_menu(self, menu=None, submenu=None, view=None, view_submenu=None):
"""Return the QMenu depending on a combination of keyword arguments."""
# Defaults.
menu = menu or self.menu
submenu = submenu or self.submenu
view = view or self.view
view_submenu = view_submenu or self.view_submenu
# If the action is a view action, it should be added to the view's menu in the dock widget.
if view:
if view_submenu and view_submenu not in self._view_submenus:
self._view_submenus[view_submenu] = view.dock._menu.addMenu(view_submenu)
if view_submenu:
return self._view_submenus[view_submenu]
else:
return view.dock._menu
# Create the submenu if there is one.
if submenu:
# Create the submenu.
self.gui.get_submenu(menu, submenu)
# Make sure the action gets added to the submenu.
menu = submenu
if menu:
return self.gui.get_menu(menu)
def add(self, callback=None, name=None, shortcut=None, alias=None, prompt=False, n_args=None,
docstring=None, menu=None, submenu=None, view=None, view_submenu=None, verbose=True,
checkable=False, checked=False, set_busy=False, prompt_default=None,
show_shortcut=True, icon=None, toolbar=False):
"""Add an action with a keyboard shortcut.
Parameters
----------
callback : function
Take no argument if checkable is False, or a boolean (checked) if it is True
name : str
Action name, the callback's name by default.
shortcut : str
The keyboard shortcut for this action.
alias : str
Snippet, the name by default.
prompt : boolean
Whether this action should display a dialog with an input box where the user can
write arguments to the callback function.
n_args : int
If prompt is True, specify the number of expected arguments.
set_busy : boolean
Whether to use a busy cursor while performing the action.
prompt_default : str
The default text in the input text box, if prompt is True.
docstring : str
The action docstring, to be displayed in the status bar when hovering over the action
item in the menu. By default, the function's docstring.
menu : str
The name of the menu where the action should be added. It is automatically created
if it doesn't exist.
submenu : str
The name of the submenu where the action should be added. It is automatically created
if it doesn't exist.
view : QWidget
A view that belongs to the GUI, if the actions are to be added to the view's menu bar.
view_submenu : str
The name of a submenu in the view menu.
checkable : boolean
Whether the action is checkable (toggle on/off).
checked : boolean
Whether the checkable action is initially checked or not.
show_shortcut : boolean
Whether to show the shortcut in the Help action that displays all GUI shortcuts.
icon : str
Hexadecimal code of the font-awesome icon.
toolbar : boolean
Whether to add the action to the toolbar.
"""
param_names = sorted(inspect.signature(Actions.add).parameters)
l = locals()
kwargs = {param_name: l[param_name] for param_name in param_names if param_name != 'self'}
if callback is None:
# Allow to use either add(func) or @add or @add(...).
kwargs.pop('callback', None)
return partial(self.add, **kwargs)
assert callback
# Get the name from the callback function if needed.
name = name or callback.__name__
alias = alias or self._default_snippets.get(name, _alias(name)).split(' ')[0]
name = name.replace('&', '')
shortcut = shortcut or self._default_shortcuts.get(name, None)
# Skip existing action.
if name in self._actions_dict:
return
# Set the status tip from the function's docstring.
docstring = docstring or callback.__doc__ or name
docstring = re.sub(r'[ \t\r\f\v]{2,}', ' ', docstring.strip())
# Create and register the action.
kwargs.update(name=name, alias=alias, shortcut=shortcut, docstring=docstring)
action = _create_qaction(self.gui, **kwargs)
action_obj = Bunch(qaction=action, **kwargs)
if verbose and not name.startswith('_'):
logger.log(5, "Add action `%s` (%s).", name, _get_shortcut_string(action.shortcut()))
self.gui.addAction(action)
# Do not show private actions in the menu.
if not name.startswith('_'):
# Find the menu in which the action should be added.
qmenu = self._get_menu(
menu=menu, submenu=submenu, view=view, view_submenu=view_submenu)
if qmenu:
qmenu.addAction(action)
# Add the action to the toolbar.
if toolbar:
self.gui._toolbar.show()
self.gui._toolbar.addAction(action)
self._actions_dict[name] = action_obj
# Register the alias -> name mapping.
self._aliases[alias] = name
# Set the callback method.
if callback:
setattr(self, name.lower().replace(' ', '_').replace(':', ''), callback)
def separator(self, **kwargs):
"""Add a separator.
Parameters
----------
menu : str
The name of the menu where the separator should be added. It is automatically created
if it doesn't exist.
submenu : str
The name of the submenu where the separator should be added. It is automatically
created if it doesn't exist.
view : QWidget
A view that belongs to the GUI, if the separator is to be added to the view's menu bar.
view_submenu : str
The name of a submenu in the view menu.
"""
self._get_menu(**kwargs).addSeparator()
def disable(self, name=None):
"""Disable all actions, or only one if a name is passed."""
if name is None:
for name in self._actions_dict:
self.disable(name)
return
self._actions_dict[name].qaction.setEnabled(False)
def enable(self, name=None):
"""Enable all actions, or only one if a name is passed.."""
if name is None:
for name in self._actions_dict:
self.enable(name)
return
self._actions_dict[name].qaction.setEnabled(True)
def get(self, name):
"""Get a QAction instance from its name."""
return self._actions_dict[name].qaction if name in self._actions_dict else None
def run(self, name, *args):
"""Run an action as specified by its name."""
assert isinstance(name, str)
# Resolve the alias if it is an alias.
name = self._aliases.get(name, name)
# Get the action.
action = self._actions_dict.get(name, None)
if not action:
raise ValueError("Action `{}` doesn't exist.".format(name))
if not name.startswith('_'):
logger.debug("Execute action `%s`.", name)
try:
return action.callback(*args)
except TypeError as e:
logger.warning("Invalid action arguments: " + str(e))
return
def remove(self, name):
"""Remove an action."""
self.gui.removeAction(self._actions_dict[name].qaction)
del self._actions_dict[name]
delattr(self, name)
def remove_all(self):
"""Remove all actions."""
names = sorted(self._actions_dict.keys())
for name in names:
self.remove(name)
@property
def shortcuts(self):
"""A dictionary mapping action names to keyboard shortcuts."""
out = {}
for name in sorted(self._actions_dict):
action = self._actions_dict[name]
if not action.show_shortcut:
continue
# Discard actions without shortcut and without an alias.
if not action.shortcut and not action.alias:
continue
# Only show alias for actions with no shortcut.
alias_str = ' (:%s)' % action.alias if action.alias != name else ''
shortcut = action.shortcut or '-'
shortcut = shortcut if isinstance(action.shortcut, str) else ', '.join(shortcut)
out[name] = '%s%s' % (shortcut, alias_str)
return out
def show_shortcuts(self):
"""Display all shortcuts in the console."""
show_shortcuts_snippets(self)
def __contains__(self, name):
"""Whether the Actions group contains a specified action."""
return name in self._actions_dict
def __repr__(self):
return '<Actions {}>'.format(sorted(self._actions_dict))
# -----------------------------------------------------------------------------
# Snippets
# -----------------------------------------------------------------------------
class Snippets(object):
"""Provide keyboard snippets to quickly execute actions from a GUI.
This class attaches to a GUI and an `Actions` instance. To every command
is associated a snippet with the same name, or with an alias as indicated
in the action. The arguments of the action's callback functions can be
provided in the snippet's command with a simple syntax. For example, the
following command:
```
:my_action string 3-6
```
corresponds to:
```python
my_action('string', (3, 4, 5, 6))
```
The snippet mode is activated with the `:` keyboard shortcut. A snippet
command is activated with `Enter`, and one can leave the snippet mode
with `Escape`.
When the snippet mode is enabled (with `:`), this object adds a hidden Qt action
for every keystroke. These actions are removed when the snippet mode is disabled.
Constructor
-----------
gui : GUI instance
"""
# HACK: Unicode characters do not seem to work on Python 2
cursor = '\u200A\u258C'
# Allowed characters in snippet mode.
# A Qt shortcut will be created for every character.
_snippet_chars = r"abcdefghijklmnopqrstuvwxyz0123456789 ,.;?!_-+~=*/\(){}[]<>&|"
def __init__(self, gui):
self.gui = gui
self._status_message = gui.status_message
self.actions = Actions(gui, name='Snippets', menu='&File')
# Register snippet mode shortcut.
@self.actions.add(shortcut=':')
def enable_snippet_mode():
"""Enable the snippet mode (type action alias in the status
bar)."""
self.mode_on()
self._create_snippet_actions()
self.mode_off()
@property
def command(self):
"""This is used to write a snippet message in the status bar. A cursor is appended at
the end."""
msg = self.gui.status_message
n = len(msg)
n_cur = len(self.cursor)
return msg[:n - n_cur]
@command.setter
def command(self, value):
value += self.cursor
self.gui.unlock_status()
self.gui.status_message = value
self.gui.lock_status()
def _backspace(self):
"""Erase the last character in the snippet command."""
if self.command == ':':
return
logger.log(5, "Snippet keystroke `Backspace`.")
self.command = self.command[:-1]
def _enter(self):
"""Disable the snippet mode and execute the command."""
command = self.command
logger.log(5, "Snippet keystroke `Enter`.")
# NOTE: we need to set back the actions (mode_off) before running
# the command.
self.mode_off()
self.run(command)
def _create_snippet_actions(self):
"""Add mock Qt actions for snippet keystrokes.
Used to enable snippet mode.
"""
# One action per allowed character.
for i, char in enumerate(self._snippet_chars):
def _make_func(char):
def callback():
logger.log(5, "Snippet keystroke `%s`.", char)
self.command += char
return callback
# Lowercase letters.
self.actions.add(
name='_snippet_{}'.format(i),
shortcut=char,
callback=_make_func(char))
# Uppercase letters.
if char in self._snippet_chars[:26]:
self.actions.add(
name='_snippet_{}_upper'.format(i),
shortcut='shift+' + char,
callback=_make_func(char.upper()))
self.actions.add(
name='_snippet_backspace', shortcut='backspace', callback=self._backspace)
self.actions.add(
name='_snippet_activate', shortcut=('enter', 'return'), callback=self._enter)
self.actions.add(
name='_snippet_disable', shortcut='escape', callback=self.mode_off)
def run(self, snippet):
"""Execute a snippet command.
May be overridden.
"""
assert snippet[0] == ':'
snippet = snippet[1:]
snippet_args = _parse_snippet(snippet)
name = snippet_args[0]
logger.debug("Processing snippet `%s`.", snippet)
try:
# Try to run the snippet on all attached Actions instances.
for actions in self.gui.actions:
try:
actions.run(name, *snippet_args[1:])
return
except ValueError:
# This Actions instance doesn't contain the requested
# snippet, trying the next attached Actions instance.
pass
logger.warning("Couldn't find action `%s`.", name)
except Exception as e:
logger.warning("Error when executing snippet: \"%s\".", str(e))
logger.debug(''.join(traceback.format_exception(*sys.exc_info())))
def is_mode_on(self):
"""Whether the snippet mode is enabled."""
return self.command.startswith(':')
def mode_on(self):
"""Enable the snippet mode."""
logger.debug("Snippet mode enabled, press `escape` to leave this mode.")
# Save the current status message.
self._status_message = self.gui.status_message
self.gui.lock_status()
# Silent all actions except the Snippets actions.
for actions in self.gui.actions:
if actions != self.actions:
actions.disable()
self.actions.enable()
self.command = ':'
def mode_off(self):
"""Disable the snippet mode."""
self.gui.unlock_status()
# Reset the GUI status message that was set before the mode was
# activated.
self.gui.status_message = self._status_message
# Re-enable all actions except the Snippets actions.
self.actions.disable()
for actions in self.gui.actions:
if actions != self.actions:
actions.enable()
# The `:` shortcut should always be enabled.
self.actions.enable('enable_snippet_mode')
| [((461, 488), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (478, 488), False, 'import logging\n'), ((1769, 1811), 're.search', 're.search', (['"""Example: `([^`]+)`"""', 'docstring'], {}), "('Example: `([^`]+)`', docstring)\n", (1778, 1811), False, 'import re\n'), ((5918, 5933), 'functools.wraps', 'wraps', (['callback'], {}), '(callback)\n', (5923, 5933), False, 'from functools import partial, wraps\n'), ((4944, 4974), 'inspect.getfullargspec', 'inspect.getfullargspec', (['f.func'], {}), '(f.func)\n', (4966, 4974), False, 'import inspect\n'), ((5003, 5028), 'inspect.getfullargspec', 'inspect.getfullargspec', (['f'], {}), '(f)\n', (5025, 5028), False, 'import inspect\n'), ((13643, 13674), 'phylib.utils.Bunch', 'Bunch', ([], {'qaction': 'action'}), '(qaction=action, **kwargs)\n', (13648, 13674), False, 'from phylib.utils import Bunch\n'), ((12810, 12837), 'functools.partial', 'partial', (['self.add'], {}), '(self.add, **kwargs)\n', (12817, 12837), False, 'from functools import partial, wraps\n'), ((12492, 12522), 'inspect.signature', 'inspect.signature', (['Actions.add'], {}), '(Actions.add)\n', (12509, 12522), False, 'import inspect\n'), ((6856, 6870), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6868, 6870), False, 'import sys\n'), ((23300, 23314), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (23312, 23314), False, 'import sys\n')] |
AngelLiang/PP4E | PP4E-Examples-1.4/Examples/PP4E/Tools/cleanpyc.py | 3a7f63b366e1e4700b4d2524884696999a87ba9d | """
delete all .pyc bytecode files in a directory tree: use the
command line arg as root if given, else current working dir
"""
import os, sys
findonly = False
rootdir = os.getcwd() if len(sys.argv) == 1 else sys.argv[1]
found = removed = 0
for (thisDirLevel, subsHere, filesHere) in os.walk(rootdir):
for filename in filesHere:
if filename.endswith('.pyc'):
fullname = os.path.join(thisDirLevel, filename)
print('=>', fullname)
if not findonly:
try:
os.remove(fullname)
removed += 1
except:
type, inst = sys.exc_info()[:2]
print('*'*4, 'Failed:', filename, type, inst)
found += 1
print('Found', found, 'files, removed', removed)
| [((286, 302), 'os.walk', 'os.walk', (['rootdir'], {}), '(rootdir)\n', (293, 302), False, 'import os, sys\n'), ((171, 182), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (180, 182), False, 'import os, sys\n'), ((396, 432), 'os.path.join', 'os.path.join', (['thisDirLevel', 'filename'], {}), '(thisDirLevel, filename)\n', (408, 432), False, 'import os, sys\n'), ((539, 558), 'os.remove', 'os.remove', (['fullname'], {}), '(fullname)\n', (548, 558), False, 'import os, sys\n'), ((649, 663), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (661, 663), False, 'import os, sys\n')] |
louxfaure/sudoc_recouv | apps.py | da3f094a0a9554c0b3911a365d1feea6d2758fec | from django.apps import AppConfig
class SudocRecouvConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'sudoc_recouv'
verbose_name = 'Analyses de recouvrement SUDOC'
| [] |
amancevice/terraform-aws-slack-interactive-components | src/states.py | 819a9b6a408b36cd1a0100859801bc47c437fdc8 | import boto3
from logger import logger
class States:
def __init__(self, boto3_session=None):
self.boto3_session = boto3_session or boto3.Session()
self.client = self.boto3_session.client('stepfunctions')
def fail(self, task_token, error, cause):
params = dict(taskToken=task_token, error=error, cause=cause)
logger.info('SEND TASK FAILURE %s', logger.json(params))
return self.client.send_task_failure(**params)
def heartbeat(self, task_token):
params = dict(taskToken=task_token)
logger.info('SEND TASK HEARTBEAT %s', logger.json(params))
return self.client.send_task_heartbeat(**params)
def succeed(self, task_token, output):
params = dict(taskToken=task_token, output=output)
logger.info('SEND TASK SUCCESS %s', logger.json(params))
return self.client.send_task_success(**params)
| [((146, 161), 'boto3.Session', 'boto3.Session', ([], {}), '()\n', (159, 161), False, 'import boto3\n'), ((388, 407), 'logger.logger.json', 'logger.json', (['params'], {}), '(params)\n', (399, 407), False, 'from logger import logger\n'), ((592, 611), 'logger.logger.json', 'logger.json', (['params'], {}), '(params)\n', (603, 611), False, 'from logger import logger\n'), ((817, 836), 'logger.logger.json', 'logger.json', (['params'], {}), '(params)\n', (828, 836), False, 'from logger import logger\n')] |
clach04/controllerx | apps/controllerx/cx_core/type/light_controller.py | b5cd92d3371c352c50f7d5ba7dae4538d7c15dfe | from typing import Any, Dict, Optional, Type, Union
from cx_const import Light, PredefinedActionsMapping
from cx_core.color_helper import get_color_wheel
from cx_core.controller import action
from cx_core.feature_support.light import LightSupport
from cx_core.integration import EventData
from cx_core.integration.deconz import DeCONZIntegration
from cx_core.integration.z2m import Z2MIntegration
from cx_core.release_hold_controller import ReleaseHoldController
from cx_core.stepper import Stepper
from cx_core.stepper.circular_stepper import CircularStepper
from cx_core.stepper.minmax_stepper import MinMaxStepper
from cx_core.type_controller import Entity, TypeController
DEFAULT_MANUAL_STEPS = 10
DEFAULT_AUTOMATIC_STEPS = 10
DEFAULT_MIN_BRIGHTNESS = 1
DEFAULT_MAX_BRIGHTNESS = 255
DEFAULT_MIN_WHITE_VALUE = 1
DEFAULT_MAX_WHITE_VALUE = 255
DEFAULT_MIN_COLOR_TEMP = 153
DEFAULT_MAX_COLOR_TEMP = 500
DEFAULT_TRANSITION = 300
DEFAULT_ADD_TRANSITION = True
DEFAULT_TRANSITION_TURN_TOGGLE = False
ColorMode = str
# Once the minimum supported version of Python is 3.8,
# we can declare the ColorMode as a Literal
# ColorMode = Literal["auto", "xy_color", "color_temp"]
class LightEntity(Entity):
color_mode: ColorMode
def __init__(self, name: str, color_mode: ColorMode = "auto") -> None:
super().__init__(name)
self.color_mode = color_mode
class LightController(TypeController[LightEntity], ReleaseHoldController):
"""
This is the main class that controls the lights for different devices.
Type of actions:
- On/Off/Toggle
- Brightness click and hold
- Color temperature click and hold
- xy color click and hold
If a light supports xy_color and color_temperature, then xy_color will be the
default functionality. Parameters taken:
- controller (required): Inherited from Controller
- light (required): This is either the light entity name or a dictionary as
{name: string, color_mode: auto | xy_color | color_temp}
- delay (optional): Inherited from ReleaseHoldController
- manual_steps (optional): Number of steps to go from min to max when clicking.
- automatic_steps (optional): Number of steps to go from min to max when smoothing.
"""
ATTRIBUTE_BRIGHTNESS = "brightness"
ATTRIBUTE_WHITE_VALUE = "white_value"
# With the following attribute, it will select color_temp or xy_color, depending on the light.
ATTRIBUTE_COLOR = "color"
ATTRIBUTE_COLOR_TEMP = "color_temp"
ATTRIBUTE_XY_COLOR = "xy_color"
index_color = 0
value_attribute = None
# These are intermediate variables to store the checked value
smooth_power_on_check: bool
remove_transition_check: bool
domains = ["light"]
entity_arg = "light"
async def init(self) -> None:
manual_steps = self.args.get("manual_steps", DEFAULT_MANUAL_STEPS)
automatic_steps = self.args.get("automatic_steps", DEFAULT_AUTOMATIC_STEPS)
self.min_brightness = self.args.get("min_brightness", DEFAULT_MIN_BRIGHTNESS)
self.max_brightness = self.args.get("max_brightness", DEFAULT_MAX_BRIGHTNESS)
self.min_white_value = self.args.get("min_white_value", DEFAULT_MIN_WHITE_VALUE)
self.max_white_value = self.args.get("max_white_value", DEFAULT_MAX_WHITE_VALUE)
self.min_color_temp = self.args.get("min_color_temp", DEFAULT_MIN_COLOR_TEMP)
self.max_color_temp = self.args.get("max_color_temp", DEFAULT_MAX_COLOR_TEMP)
self.transition = self.args.get("transition", DEFAULT_TRANSITION)
self.color_wheel = get_color_wheel(
self.args.get("color_wheel", "default_color_wheel")
)
color_stepper = CircularStepper(
0, len(self.color_wheel) - 1, len(self.color_wheel)
)
self.manual_steppers: Dict[str, Stepper] = {
LightController.ATTRIBUTE_BRIGHTNESS: MinMaxStepper(
self.min_brightness, self.max_brightness, manual_steps
),
LightController.ATTRIBUTE_WHITE_VALUE: MinMaxStepper(
self.min_white_value, self.max_white_value, manual_steps
),
LightController.ATTRIBUTE_COLOR_TEMP: MinMaxStepper(
self.min_color_temp, self.max_color_temp, manual_steps
),
LightController.ATTRIBUTE_XY_COLOR: color_stepper,
}
self.automatic_steppers: Dict[str, Stepper] = {
LightController.ATTRIBUTE_BRIGHTNESS: MinMaxStepper(
self.min_brightness, self.max_brightness, automatic_steps
),
LightController.ATTRIBUTE_WHITE_VALUE: MinMaxStepper(
self.min_white_value, self.max_white_value, automatic_steps
),
LightController.ATTRIBUTE_COLOR_TEMP: MinMaxStepper(
self.min_color_temp, self.max_color_temp, automatic_steps
),
LightController.ATTRIBUTE_XY_COLOR: color_stepper,
}
self.smooth_power_on = self.args.get(
"smooth_power_on", self.supports_smooth_power_on()
)
self.add_transition = self.args.get("add_transition", DEFAULT_ADD_TRANSITION)
self.add_transition_turn_toggle = self.args.get(
"add_transition_turn_toggle", DEFAULT_TRANSITION_TURN_TOGGLE
)
await super().init()
def _get_entity_type(self) -> Type[LightEntity]:
return LightEntity
def get_predefined_actions_mapping(self) -> PredefinedActionsMapping:
return {
Light.ON: self.on,
Light.OFF: self.off,
Light.TOGGLE: self.toggle,
Light.TOGGLE_FULL_BRIGHTNESS: (
self.toggle_full,
(LightController.ATTRIBUTE_BRIGHTNESS,),
),
Light.TOGGLE_FULL_WHITE_VALUE: (
self.toggle_full,
(LightController.ATTRIBUTE_WHITE_VALUE,),
),
Light.TOGGLE_FULL_COLOR_TEMP: (
self.toggle_full,
(LightController.ATTRIBUTE_COLOR_TEMP,),
),
Light.TOGGLE_MIN_BRIGHTNESS: (
self.toggle_min,
(LightController.ATTRIBUTE_BRIGHTNESS,),
),
Light.TOGGLE_MIN_WHITE_VALUE: (
self.toggle_min,
(LightController.ATTRIBUTE_WHITE_VALUE,),
),
Light.TOGGLE_MIN_COLOR_TEMP: (
self.toggle_min,
(LightController.ATTRIBUTE_COLOR_TEMP,),
),
Light.RELEASE: self.release,
Light.ON_FULL_BRIGHTNESS: (
self.on_full,
(LightController.ATTRIBUTE_BRIGHTNESS,),
),
Light.ON_FULL_WHITE_VALUE: (
self.on_full,
(LightController.ATTRIBUTE_WHITE_VALUE,),
),
Light.ON_FULL_COLOR_TEMP: (
self.on_full,
(LightController.ATTRIBUTE_COLOR_TEMP,),
),
Light.ON_MIN_BRIGHTNESS: (
self.on_min,
(LightController.ATTRIBUTE_BRIGHTNESS,),
),
Light.ON_MIN_WHITE_VALUE: (
self.on_min,
(LightController.ATTRIBUTE_WHITE_VALUE,),
),
Light.ON_MIN_COLOR_TEMP: (
self.on_min,
(LightController.ATTRIBUTE_COLOR_TEMP,),
),
Light.SET_HALF_BRIGHTNESS: (
self.set_value,
(
LightController.ATTRIBUTE_BRIGHTNESS,
0.5,
),
),
Light.SET_HALF_WHITE_VALUE: (
self.set_value,
(
LightController.ATTRIBUTE_WHITE_VALUE,
0.5,
),
),
Light.SET_HALF_COLOR_TEMP: (
self.set_value,
(
LightController.ATTRIBUTE_COLOR_TEMP,
0.5,
),
),
Light.SYNC: self.sync,
Light.CLICK_BRIGHTNESS_UP: (
self.click,
(
LightController.ATTRIBUTE_BRIGHTNESS,
Stepper.UP,
),
),
Light.CLICK_BRIGHTNESS_DOWN: (
self.click,
(
LightController.ATTRIBUTE_BRIGHTNESS,
Stepper.DOWN,
),
),
Light.CLICK_WHITE_VALUE_UP: (
self.click,
(
LightController.ATTRIBUTE_WHITE_VALUE,
Stepper.UP,
),
),
Light.CLICK_WHITE_VALUE_DOWN: (
self.click,
(
LightController.ATTRIBUTE_WHITE_VALUE,
Stepper.DOWN,
),
),
Light.CLICK_COLOR_UP: (
self.click,
(
LightController.ATTRIBUTE_COLOR,
Stepper.UP,
),
),
Light.CLICK_COLOR_DOWN: (
self.click,
(
LightController.ATTRIBUTE_COLOR,
Stepper.DOWN,
),
),
Light.CLICK_COLOR_TEMP_UP: (
self.click,
(
LightController.ATTRIBUTE_COLOR_TEMP,
Stepper.UP,
),
),
Light.CLICK_COLOR_TEMP_DOWN: (
self.click,
(
LightController.ATTRIBUTE_COLOR_TEMP,
Stepper.DOWN,
),
),
Light.CLICK_XY_COLOR_UP: (
self.click,
(
LightController.ATTRIBUTE_XY_COLOR,
Stepper.UP,
),
),
Light.CLICK_XY_COLOR_DOWN: (
self.click,
(
LightController.ATTRIBUTE_XY_COLOR,
Stepper.DOWN,
),
),
Light.HOLD_BRIGHTNESS_UP: (
self.hold,
(
LightController.ATTRIBUTE_BRIGHTNESS,
Stepper.UP,
),
),
Light.HOLD_BRIGHTNESS_DOWN: (
self.hold,
(
LightController.ATTRIBUTE_BRIGHTNESS,
Stepper.DOWN,
),
),
Light.HOLD_BRIGHTNESS_TOGGLE: (
self.hold,
(
LightController.ATTRIBUTE_BRIGHTNESS,
Stepper.TOGGLE,
),
),
Light.HOLD_WHITE_VALUE_UP: (
self.hold,
(
LightController.ATTRIBUTE_WHITE_VALUE,
Stepper.UP,
),
),
Light.HOLD_WHITE_VALUE_DOWN: (
self.hold,
(
LightController.ATTRIBUTE_WHITE_VALUE,
Stepper.DOWN,
),
),
Light.HOLD_WHITE_VALUE_TOGGLE: (
self.hold,
(
LightController.ATTRIBUTE_WHITE_VALUE,
Stepper.TOGGLE,
),
),
Light.HOLD_COLOR_UP: (
self.hold,
(
LightController.ATTRIBUTE_COLOR,
Stepper.UP,
),
),
Light.HOLD_COLOR_DOWN: (
self.hold,
(
LightController.ATTRIBUTE_COLOR,
Stepper.DOWN,
),
),
Light.HOLD_COLOR_TOGGLE: (
self.hold,
(
LightController.ATTRIBUTE_COLOR,
Stepper.TOGGLE,
),
),
Light.HOLD_COLOR_TEMP_UP: (
self.hold,
(
LightController.ATTRIBUTE_COLOR_TEMP,
Stepper.UP,
),
),
Light.HOLD_COLOR_TEMP_DOWN: (
self.hold,
(
LightController.ATTRIBUTE_COLOR_TEMP,
Stepper.DOWN,
),
),
Light.HOLD_COLOR_TEMP_TOGGLE: (
self.hold,
(
LightController.ATTRIBUTE_COLOR_TEMP,
Stepper.TOGGLE,
),
),
Light.HOLD_XY_COLOR_UP: (
self.hold,
(
LightController.ATTRIBUTE_XY_COLOR,
Stepper.UP,
),
),
Light.HOLD_XY_COLOR_DOWN: (
self.hold,
(
LightController.ATTRIBUTE_XY_COLOR,
Stepper.DOWN,
),
),
Light.HOLD_XY_COLOR_TOGGLE: (
self.hold,
(
LightController.ATTRIBUTE_XY_COLOR,
Stepper.TOGGLE,
),
),
Light.XYCOLOR_FROM_CONTROLLER: self.xycolor_from_controller,
Light.COLORTEMP_FROM_CONTROLLER: self.colortemp_from_controller,
}
async def check_remove_transition(self, on_from_user: bool) -> bool:
return (
not self.add_transition
or (on_from_user and not self.add_transition_turn_toggle)
or await self.feature_support.not_supported(LightSupport.TRANSITION)
)
async def call_light_service(self, service: str, **attributes) -> None:
if "transition" not in attributes:
attributes["transition"] = self.transition / 1000
if self.remove_transition_check:
del attributes["transition"]
await self.call_service(service, entity_id=self.entity.name, **attributes)
async def _on(self, **attributes) -> None:
await self.call_light_service("light/turn_on", **attributes)
@action
async def on(self, **attributes) -> None:
await self._on(**attributes)
async def _off(self, **attributes) -> None:
await self.call_light_service("light/turn_off", **attributes)
@action
async def off(self, **attributes) -> None:
await self._off(**attributes)
async def _toggle(self, **attributes) -> None:
await self.call_light_service("light/toggle", **attributes)
@action
async def toggle(self, **attributes) -> None:
await self._toggle(**attributes)
async def _set_value(self, attribute: str, fraction: float) -> None:
fraction = max(0, min(fraction, 1))
stepper = self.automatic_steppers[attribute]
if isinstance(stepper, MinMaxStepper):
min_ = stepper.minmax.min
max_ = stepper.minmax.max
value = (max_ - min_) * fraction + min_
await self._on(**{attribute: value})
@action
async def set_value(self, attribute: str, fraction: float) -> None:
await self._set_value(attribute, fraction)
@action
async def toggle_full(self, attribute: str) -> None:
stepper = self.automatic_steppers[attribute]
if isinstance(stepper, MinMaxStepper):
await self._toggle(**{attribute: stepper.minmax.max})
@action
async def toggle_min(self, attribute: str) -> None:
stepper = self.automatic_steppers[attribute]
if isinstance(stepper, MinMaxStepper):
await self._toggle(**{attribute: stepper.minmax.min})
async def _on_full(self, attribute: str) -> None:
await self._set_value(attribute, 1)
@action
async def on_full(self, attribute: str) -> None:
await self._on_full(attribute)
async def _on_min(self, attribute: str) -> None:
await self._set_value(attribute, 0)
@action
async def on_min(self, attribute: str) -> None:
await self._on_min(attribute)
@action
async def sync(self) -> None:
attributes: Dict[Any, Any] = {}
try:
color_attribute = await self.get_attribute(LightController.ATTRIBUTE_COLOR)
if color_attribute == LightController.ATTRIBUTE_COLOR_TEMP:
attributes[color_attribute] = 370 # 2700K light
else:
attributes[color_attribute] = (0.323, 0.329) # white colour
except ValueError:
self.log(
"⚠️ `sync` action will only change brightness",
level="WARNING",
ascii_encode=False,
)
await self._on(**attributes, brightness=self.max_brightness)
@action
async def xycolor_from_controller(self, extra: Optional[EventData]) -> None:
if extra is None:
self.log("No event data present", level="WARNING")
return
if isinstance(self.integration, Z2MIntegration):
if "action_color" not in extra:
self.log(
"`action_color` is not present in the MQTT payload", level="WARNING"
)
return
xy_color = extra["action_color"]
await self._on(xy_color=(xy_color["x"], xy_color["y"]))
elif isinstance(self.integration, DeCONZIntegration):
if "xy" not in extra:
self.log("`xy` is not present in the deCONZ event", level="WARNING")
return
await self._on(xy_color=extra["xy"])
@action
async def colortemp_from_controller(self, extra: Optional[EventData]) -> None:
if extra is None:
self.log("No event data present", level="WARNING")
return
if isinstance(self.integration, Z2MIntegration):
if "action_color_temperature" not in extra:
self.log(
"`action_color_temperature` is not present in the MQTT payload",
level="WARNING",
)
return
await self._on(color_temp=extra["action_color_temperature"])
async def get_attribute(self, attribute: str) -> str:
if attribute == LightController.ATTRIBUTE_COLOR:
if self.entity.color_mode == "auto":
if await self.feature_support.is_supported(LightSupport.COLOR):
return LightController.ATTRIBUTE_XY_COLOR
elif await self.feature_support.is_supported(LightSupport.COLOR_TEMP):
return LightController.ATTRIBUTE_COLOR_TEMP
else:
raise ValueError(
"This light does not support xy_color or color_temp"
)
else:
return self.entity.color_mode
else:
return attribute
async def get_value_attribute(self, attribute: str) -> Union[float, int]:
if self.smooth_power_on_check:
return 0
if attribute == LightController.ATTRIBUTE_XY_COLOR:
return 0
elif (
attribute == LightController.ATTRIBUTE_BRIGHTNESS
or attribute == LightController.ATTRIBUTE_WHITE_VALUE
or attribute == LightController.ATTRIBUTE_COLOR_TEMP
):
value = await self.get_entity_state(self.entity.name, attribute)
if value is None:
raise ValueError(
f"Value for `{attribute}` attribute could not be retrieved "
f"from `{self.entity.name}`. "
"Check the FAQ to know more about this error: "
"https://xaviml.github.io/controllerx/faq"
)
else:
try:
return float(value)
except ValueError:
raise ValueError(
f"Attribute `{attribute}` with `{value}` as a value "
"could not be converted to float"
)
else:
raise ValueError(f"Attribute `{attribute}` not expected")
def check_smooth_power_on(
self, attribute: str, direction: str, light_state: str
) -> bool:
return (
direction != Stepper.DOWN
and attribute == self.ATTRIBUTE_BRIGHTNESS
and self.smooth_power_on
and light_state == "off"
)
async def before_action(self, action: str, *args, **kwargs) -> bool:
to_return = True
if action in ("click", "hold"):
attribute, direction = args
light_state: str = await self.get_entity_state(self.entity.name)
self.smooth_power_on_check = self.check_smooth_power_on(
attribute, direction, light_state
)
self.remove_transition_check = await self.check_remove_transition(
on_from_user=False
)
to_return = (light_state == "on") or self.smooth_power_on_check
else:
self.remove_transition_check = await self.check_remove_transition(
on_from_user=True
)
self.smooth_power_on_check = False
return await super().before_action(action, *args, **kwargs) and to_return
@action
async def click(self, attribute: str, direction: str) -> None:
attribute = await self.get_attribute(attribute)
self.value_attribute = await self.get_value_attribute(attribute)
await self.change_light_state(
self.value_attribute,
attribute,
direction,
self.manual_steppers[attribute],
"click",
)
@action
async def hold(self, attribute: str, direction: str) -> None: # type: ignore
attribute = await self.get_attribute(attribute)
self.value_attribute = await self.get_value_attribute(attribute)
self.log(
f"Attribute value before running the hold action: {self.value_attribute}",
level="DEBUG",
)
if direction == Stepper.TOGGLE:
self.log(
f"Previous direction: {self.automatic_steppers[attribute].previous_direction}",
level="DEBUG",
)
direction = self.automatic_steppers[attribute].get_direction(
self.value_attribute, direction
)
self.log(f"Going direction: {direction}", level="DEBUG")
await super().hold(attribute, direction)
async def hold_loop(self, attribute: str, direction: str) -> bool: # type: ignore
if self.value_attribute is None:
return True
return await self.change_light_state(
self.value_attribute,
attribute,
direction,
self.automatic_steppers[attribute],
"hold",
)
async def change_light_state(
self,
old: float,
attribute: str,
direction: str,
stepper: Stepper,
action_type: str,
) -> bool:
"""
This functions changes the state of the light depending on the previous
value and attribute. It returns True when no more changes will need to be done.
Otherwise, it returns False.
"""
attributes: Dict[str, Any]
if attribute == LightController.ATTRIBUTE_XY_COLOR:
index_color, _ = stepper.step(self.index_color, direction)
self.index_color = int(index_color)
xy_color = self.color_wheel[self.index_color]
attributes = {attribute: xy_color}
if action_type == "hold":
attributes["transition"] = self.delay / 1000
await self._on(**attributes)
# In case of xy_color mode it never finishes the loop, the hold loop
# will only stop if the hold action is called when releasing the button.
# I haven't experimented any problems with it, but a future implementation
# would be to force the loop to stop after 4 or 5 loops as a safety measure.
return False
if self.smooth_power_on_check:
await self._on_min(attribute)
# # After smooth power on, the light should not brighten up.
return True
new_state_attribute, exceeded = stepper.step(old, direction)
new_state_attribute = round(new_state_attribute, 3)
attributes = {attribute: new_state_attribute}
if action_type == "hold":
attributes["transition"] = self.delay / 1000
await self._on(**attributes)
self.value_attribute = new_state_attribute
return exceeded
def supports_smooth_power_on(self) -> bool:
"""
This function can be overrided for each device to indicate the default behaviour of the controller
when the associated light is off and an event for incrementing brightness is received.
Returns True if the associated light should be turned on with minimum brightness if an event for incrementing
brightness is received, while the lamp is off.
The behaviour can be overridden by the user with the 'smooth_power_on' option in app configuration.
"""
return False
| [((3924, 3993), 'cx_core.stepper.minmax_stepper.MinMaxStepper', 'MinMaxStepper', (['self.min_brightness', 'self.max_brightness', 'manual_steps'], {}), '(self.min_brightness, self.max_brightness, manual_steps)\n', (3937, 3993), False, 'from cx_core.stepper.minmax_stepper import MinMaxStepper\n'), ((4076, 4147), 'cx_core.stepper.minmax_stepper.MinMaxStepper', 'MinMaxStepper', (['self.min_white_value', 'self.max_white_value', 'manual_steps'], {}), '(self.min_white_value, self.max_white_value, manual_steps)\n', (4089, 4147), False, 'from cx_core.stepper.minmax_stepper import MinMaxStepper\n'), ((4229, 4298), 'cx_core.stepper.minmax_stepper.MinMaxStepper', 'MinMaxStepper', (['self.min_color_temp', 'self.max_color_temp', 'manual_steps'], {}), '(self.min_color_temp, self.max_color_temp, manual_steps)\n', (4242, 4298), False, 'from cx_core.stepper.minmax_stepper import MinMaxStepper\n'), ((4509, 4581), 'cx_core.stepper.minmax_stepper.MinMaxStepper', 'MinMaxStepper', (['self.min_brightness', 'self.max_brightness', 'automatic_steps'], {}), '(self.min_brightness, self.max_brightness, automatic_steps)\n', (4522, 4581), False, 'from cx_core.stepper.minmax_stepper import MinMaxStepper\n'), ((4664, 4738), 'cx_core.stepper.minmax_stepper.MinMaxStepper', 'MinMaxStepper', (['self.min_white_value', 'self.max_white_value', 'automatic_steps'], {}), '(self.min_white_value, self.max_white_value, automatic_steps)\n', (4677, 4738), False, 'from cx_core.stepper.minmax_stepper import MinMaxStepper\n'), ((4820, 4892), 'cx_core.stepper.minmax_stepper.MinMaxStepper', 'MinMaxStepper', (['self.min_color_temp', 'self.max_color_temp', 'automatic_steps'], {}), '(self.min_color_temp, self.max_color_temp, automatic_steps)\n', (4833, 4892), False, 'from cx_core.stepper.minmax_stepper import MinMaxStepper\n')] |
konodyuk/kts | kts/core/types.py | 3af5ccbf1d2089cb41d171626fcde4b0ba5aa8a7 | from typing import Union
import pandas as pd
from kts.core.frame import KTSFrame
AnyFrame = Union[pd.DataFrame, KTSFrame]
| [] |
jlaura/krispy | krispy/mod_user/models.py | b1b2bf8a3e315608152c7dad15d384d0669f5e27 | from app import db
from flask.ext.login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'oauth2users'
id = db.Column(db.Integer, primary_key=True)
social_id = db.Column(db.String(64), nullable=False, unique=True)
nickname = db.Column(db.String(64), nullable=False)
email = db.Column(db.String(64), nullable=True)
| [((134, 173), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (143, 173), False, 'from app import db\n'), ((200, 213), 'app.db.String', 'db.String', (['(64)'], {}), '(64)\n', (209, 213), False, 'from app import db\n'), ((269, 282), 'app.db.String', 'db.String', (['(64)'], {}), '(64)\n', (278, 282), False, 'from app import db\n'), ((322, 335), 'app.db.String', 'db.String', (['(64)'], {}), '(64)\n', (331, 335), False, 'from app import db\n')] |
flxj/Django_blog | blog_app/blog/views.py | 01eb12553335115fee5faecafe8cacf2f0615135 | import markdown
from comments.forms import CommentForm,BookCommentForm,MovieCommentForm
from django.shortcuts import render, get_object_or_404
from.models import Post,Category,Tag, Book,Movie
#from django.http import HttpResponse
from django.views.generic import ListView, DetailView
from django.utils.text import slugify
from markdown.extensions.toc import TocExtension
from django.db.models import Q
"""
def index(request):
#post_list = Post.objects.all().order_by('-created_time')
post_list = Post.objects.all()
return render(request, 'blog/index.html', context={'post_list': post_list})
"""
class IndexView(ListView):
model = Post
template_name = 'blog/index.html'
context_object_name = 'post_list'
paginate_by = 10
def get_context_data(self, **kwargs):
"""
在视图函数中将模板变量传递给模板是通过给 render 函数的 context 参数传递一个字典实现的,
例如 render(request, 'blog/index.html', context={'post_list': post_list}),
这里传递了一个 {'post_list': post_list} 字典给模板。
在类视图中,这个需要传递的模板变量字典是通过 get_context_data 获得的,
所以我们复写该方法,以便我们能够自己再插入一些我们自定义的模板变量进去。
"""
# 首先获得父类生成的传递给模板的字典。
context = super().get_context_data(**kwargs)
# 父类生成的字典中已有 paginator、page_obj、is_paginated 这三个模板变量,
# paginator 是 Paginator 的一个实例,
# page_obj 是 Page 的一个实例,
# is_paginated 是一个布尔变量,用于指示是否已分页。
# 例如如果规定每页 10 个数据,而本身只有 5 个数据,其实就用不着分页,此时 is_paginated=False。
# 关于什么是 Paginator,Page 类在 Django Pagination 简单分页:http://zmrenwu.com/post/34/ 中已有详细说明。
# 由于 context 是一个字典,所以调用 get 方法从中取出某个键对应的值。
paginator = context.get('paginator')
page = context.get('page_obj')
is_paginated = context.get('is_paginated')
# 调用自己写的 pagination_data 方法获得显示分页导航条需要的数据,见下方。
pagination_data = self.pagination_data(paginator, page, is_paginated)
# 将分页导航条的模板变量更新到 context 中,注意 pagination_data 方法返回的也是一个字典。
context.update(pagination_data)
# 将更新后的 context 返回,以便 ListView 使用这个字典中的模板变量去渲染模板。
# 注意此时 context 字典中已有了显示分页导航条所需的数据。
return context
def pagination_data(self, paginator, page, is_paginated):
if not is_paginated:
# 如果没有分页,则无需显示分页导航条,不用任何分页导航条的数据,因此返回一个空的字典
return {}
# 当前页左边连续的页码号,初始值为空
left = []
# 当前页右边连续的页码号,初始值为空
right = []
# 标示第 1 页页码后是否需要显示省略号
left_has_more = False
# 标示最后一页页码前是否需要显示省略号
right_has_more = False
# 标示是否需要显示第 1 页的页码号。
# 因为如果当前页左边的连续页码号中已经含有第 1 页的页码号,此时就无需再显示第 1 页的页码号,
# 其它情况下第一页的页码是始终需要显示的。
# 初始值为 False
first = False
# 标示是否需要显示最后一页的页码号。
# 需要此指示变量的理由和上面相同。
last = False
# 获得用户当前请求的页码号
page_number = page.number
# 获得分页后的总页数
total_pages = paginator.num_pages
# 获得整个分页页码列表,比如分了四页,那么就是 [1, 2, 3, 4]
page_range = paginator.page_range
if page_number == 1:
# 如果用户请求的是第一页的数据,那么当前页左边的不需要数据,因此 left=[](已默认为空)。
# 此时只要获取当前页右边的连续页码号,
# 比如分页页码列表是 [1, 2, 3, 4],那么获取的就是 right = [2, 3]。
# 注意这里只获取了当前页码后连续两个页码,你可以更改这个数字以获取更多页码。
right = page_range[page_number:page_number + 2]
# 如果最右边的页码号比最后一页的页码号减去 1 还要小,
# 说明最右边的页码号和最后一页的页码号之间还有其它页码,因此需要显示省略号,通过 right_has_more 来指示。
if right[-1] < total_pages - 1:
right_has_more = True
# 如果最右边的页码号比最后一页的页码号小,说明当前页右边的连续页码号中不包含最后一页的页码
# 所以需要显示最后一页的页码号,通过 last 来指示
if right[-1] < total_pages:
last = True
elif page_number == total_pages:
# 如果用户请求的是最后一页的数据,那么当前页右边就不需要数据,因此 right=[](已默认为空),
# 此时只要获取当前页左边的连续页码号。
# 比如分页页码列表是 [1, 2, 3, 4],那么获取的就是 left = [2, 3]
# 这里只获取了当前页码后连续两个页码,你可以更改这个数字以获取更多页码。
left = page_range[(page_number - 3) if (page_number - 3) > 0 else 0:page_number - 1]
# 如果最左边的页码号比第 2 页页码号还大,
# 说明最左边的页码号和第 1 页的页码号之间还有其它页码,因此需要显示省略号,通过 left_has_more 来指示。
if left[0] > 2:
left_has_more = True
# 如果最左边的页码号比第 1 页的页码号大,说明当前页左边的连续页码号中不包含第一页的页码,
# 所以需要显示第一页的页码号,通过 first 来指示
if left[0] > 1:
first = True
else:
# 用户请求的既不是最后一页,也不是第 1 页,则需要获取当前页左右两边的连续页码号,
# 这里只获取了当前页码前后连续两个页码,你可以更改这个数字以获取更多页码。
left = page_range[(page_number - 3) if (page_number - 3) > 0 else 0:page_number - 1]
right = page_range[page_number:page_number + 2]
# 是否需要显示最后一页和最后一页前的省略号
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
# 是否需要显示第 1 页和第 1 页后的省略号
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
data = {
'left': left,
'right': right,
'left_has_more': left_has_more,
'right_has_more': right_has_more,
'first': first,
'last': last,
}
return data
#显示全文
"""
def detail(request, pk):
post = get_object_or_404(Post, pk=pk)
# 阅读量 +1
post.increase_views()
post.body = markdown.markdown(post.body,
extensions=[
'markdown.extensions.extra',
'markdown.extensions.codehilite',
'markdown.extensions.toc',
'markdown.extensions.tables',
])
form = CommentForm()
# 获取这篇 post 下的全部评论
comment_list = post.comment_set.all()
# 将文章、表单、以及文章下的评论列表作为模板变量传给 detail.html 模板,以便渲染相应数据。
context = {'post': post,
'form': form,
'comment_list': comment_list
}
return render(request, 'blog/detail.html', context=context)
"""
class PostDetailView(DetailView):
model = Post
template_name = 'blog/detail.html'
context_object_name = 'post'
def get(self, request, *args, **kwargs):
# 覆写 get 方法的目的是因为每当文章被访问一次,就得将文章阅读量 +1
# get 方法返回的是一个 HttpResponse 实例
# 之所以需要先调用父类的 get 方法,是因为只有当 get 方法被调用后,
# 才有 self.object 属性,其值为 Post 模型实例,即被访问的文章 post
response = super(PostDetailView, self).get(request, *args, **kwargs)
# 将文章阅读量 +1
# 注意 self.object 的值就是被访问的文章 post
self.object.increase_views()
# 视图必须返回一个 HttpResponse 对象
return response
def get_object(self, queryset=None):
# 覆写 get_object 方法的目的是因为需要对 post 的 body 值进行渲染
post = super(PostDetailView, self).get_object(queryset=None)
#此处先将markdown禁掉,因为显然经过markdown渲染的文本,再经过MathJax渲染就不能看了
#但是不经markdown渲染,代码段又不能正常显示,淦
#所以以后写带公式的博文,公式格式参考MathJax附带的样例,防止自己写的经过markdown渲染后抽风
md = markdown.Markdown(extensions=[
'markdown.extensions.extra',
'markdown.extensions.codehilite',
'markdown.extensions.toc',
TocExtension(slugify=slugify),
])
post.body = md.convert(post.body)
post.toc = md.toc
return post
def get_context_data(self, **kwargs):
# 覆写 get_context_data 的目的是因为除了将 post 传递给模板外(DetailView 已经帮我们完成),
# 还要把评论表单、post 下的评论列表传递给模板。
context = super(PostDetailView, self).get_context_data(**kwargs)
form = CommentForm()
comment_list = self.object.comment_set.all()
context.update({
'form': form,
'comment_list': comment_list
})
return context
#查看归档
"""
def archives(request, year, month):
post_list = Post.objects.filter(created_time__year=year,
created_time__month=month
).order_by('-created_time')
return render(request, 'blog/index.html', context={'post_list': post_list})
"""
class ArchivesView(ListView):
model = Post
template_name = 'blog/index.html'
context_object_name = 'post_list'
def get_queryset(self):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
return super(ArchivesView, self).get_queryset().filter(created_time__year=year,
created_time__month=month
)
#查看分类文章
"""
def category(request, pk):
cate = get_object_or_404(Category, pk=pk)
post_list = Post.objects.filter(category=cate).order_by('-created_time')
return render(request, 'blog/index.html', context={'post_list': post_list})
"""
class CategoryView(ListView):
model = Post
template_name = 'blog/index.html'
context_object_name = 'post_list'
def get_queryset(self):
cate = get_object_or_404(Category, pk=self.kwargs.get('pk'))
return super(CategoryView, self).get_queryset().filter(category=cate)
#查看标签文章
class TagView(ListView):
model = Post
template_name = 'blog/index.html'
context_object_name = 'post_list'
def get_queryset(self):
tag = get_object_or_404(Tag, pk=self.kwargs.get('pk'))
return super(TagView, self).get_queryset().filter(tags=tag)
#文章搜索
def search(request):
q = request.GET.get('q')
error_msg = ''
if not q:
error_msg = "请输入关键词"
return render(request, 'blog/index.html', {'error_msg': error_msg})
post_list = Post.objects.filter(Q(title__icontains=q) | Q(body__icontains=q))
return render(request, 'blog/index.html', {'error_msg': error_msg,
'post_list': post_list})
#查看书评
class BookView(ListView):
model = Book
template_name = 'blog/book.html'
context_object_name = 'book_list'
paginate_by = 20
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
paginator = context.get('paginator')
page = context.get('page_obj')
is_paginated = context.get('is_paginated')
pagination_data = self.pagination_data(paginator, page, is_paginated)
context.update(pagination_data)
return context
def pagination_data(self, paginator, page, is_paginated):
if not is_paginated:
return {}
left = []
right = []
left_has_more = False
right_has_more = False
first = False
last = False
page_number = page.number
total_pages = paginator.num_pages
page_range = paginator.page_range
if page_number == 1:
right = page_range[page_number:page_number + 2]
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
elif page_number == total_pages:
left = page_range[(page_number - 3) if (page_number - 3) > 0 else 0:page_number - 1]
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
else:
left = page_range[(page_number - 3) if (page_number - 3) > 0 else 0:page_number - 1]
right = page_range[page_number:page_number + 2]
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
data = {
'left': left,
'right': right,
'left_has_more': left_has_more,
'right_has_more': right_has_more,
'first': first,
'last': last,
}
return data
class BookDetailView(DetailView):
model = Book
template_name = 'blog/bookdetail.html'
context_object_name = 'book'
def get_object(self, queryset=None):
# 覆写 get_object 方法的目的是因为需要对 book 的 review 值进行渲染
book = super(BookDetailView, self).get_object(queryset=None)
md = markdown.Markdown(extensions=[
'markdown.extensions.extra',
'markdown.extensions.codehilite',
#'markdown.extensions.toc',
#TocExtension(slugify=slugify),
])
book.review = md.convert(book.review)
#book.toc = md.toc
return book
def get_context_data(self, **kwargs):
context = super(BookDetailView, self).get_context_data(**kwargs)
form = BookCommentForm()
comment_list = self.object.bookcomment_set.all()
context.update({
'form': form,
'comment_list': comment_list
})
return context
#书评归档
class BookArchivesView(ListView):
model = Book
template_name = 'blog/book.html'
context_object_name = 'book_list'
def get_queryset(self):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
return super(BookArchivesView, self).get_queryset().filter(created_time__year=year,
created_time__month=month
)
###影评相关
class FilmView(ListView):
model = Movie
template_name = 'blog/film.html'
context_object_name = 'film_list'
paginate_by = 36
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
paginator = context.get('paginator')
page = context.get('page_obj')
is_paginated = context.get('is_paginated')
pagination_data = self.pagination_data(paginator, page, is_paginated)
context.update(pagination_data)
return context
def pagination_data(self, paginator, page, is_paginated):
if not is_paginated:
return {}
left = []
right = []
left_has_more = False
right_has_more = False
first = False
last = False
page_number = page.number
total_pages = paginator.num_pages
page_range = paginator.page_range
if page_number == 1:
right = page_range[page_number:page_number + 2]
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
elif page_number == total_pages:
left = page_range[(page_number - 3) if (page_number - 3) > 0 else 0:page_number - 1]
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
else:
left = page_range[(page_number - 3) if (page_number - 3) > 0 else 0:page_number - 1]
right = page_range[page_number:page_number + 2]
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
data = {
'left': left,
'right': right,
'left_has_more': left_has_more,
'right_has_more': right_has_more,
'first': first,
'last': last,
}
return data
class FilmDetailView(DetailView):
model = Movie
template_name = 'blog/filmdetail.html'
context_object_name = 'film'
def get_object(self, queryset=None):
film = super(FilmDetailView, self).get_object(queryset=None)
md = markdown.Markdown(extensions=[
'markdown.extensions.extra',
'markdown.extensions.codehilite',
#'markdown.extensions.toc',
#TocExtension(slugify=slugify),
])
film.review = md.convert(film.review)
#film.toc = md.toc
return film
def get_context_data(self, **kwargs):
context = super(FilmDetailView, self).get_context_data(**kwargs)
form = MovieCommentForm()
comment_list = self.object.moviecomment_set.all()
context.update({
'form': form,
'comment_list': comment_list
})
return context
#影评归档
class FilmArchivesView(ListView):
model = Movie
template_name = 'blog/film.html'
context_object_name = 'film_list'
def get_queryset(self):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
return super(FilmArchivesView, self).get_queryset().filter(created_time__year=year,
created_time__month=month
)
def about(request):
return render(request, 'blog/about.html') | [((9568, 9656), 'django.shortcuts.render', 'render', (['request', '"""blog/index.html"""', "{'error_msg': error_msg, 'post_list': post_list}"], {}), "(request, 'blog/index.html', {'error_msg': error_msg, 'post_list':\n post_list})\n", (9574, 9656), False, 'from django.shortcuts import render, get_object_or_404\n'), ((16712, 16746), 'django.shortcuts.render', 'render', (['request', '"""blog/about.html"""'], {}), "(request, 'blog/about.html')\n", (16718, 16746), False, 'from django.shortcuts import render, get_object_or_404\n'), ((7461, 7474), 'comments.forms.CommentForm', 'CommentForm', ([], {}), '()\n', (7472, 7474), False, 'from comments.forms import CommentForm, BookCommentForm, MovieCommentForm\n'), ((9413, 9473), 'django.shortcuts.render', 'render', (['request', '"""blog/index.html"""', "{'error_msg': error_msg}"], {}), "(request, 'blog/index.html', {'error_msg': error_msg})\n", (9419, 9473), False, 'from django.shortcuts import render, get_object_or_404\n'), ((12089, 12186), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': "['markdown.extensions.extra', 'markdown.extensions.codehilite']"}), "(extensions=['markdown.extensions.extra',\n 'markdown.extensions.codehilite'])\n", (12106, 12186), False, 'import markdown\n'), ((12525, 12542), 'comments.forms.BookCommentForm', 'BookCommentForm', ([], {}), '()\n', (12540, 12542), False, 'from comments.forms import CommentForm, BookCommentForm, MovieCommentForm\n'), ((15552, 15649), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': "['markdown.extensions.extra', 'markdown.extensions.codehilite']"}), "(extensions=['markdown.extensions.extra',\n 'markdown.extensions.codehilite'])\n", (15569, 15649), False, 'import markdown\n'), ((15988, 16006), 'comments.forms.MovieCommentForm', 'MovieCommentForm', ([], {}), '()\n', (16004, 16006), False, 'from comments.forms import CommentForm, BookCommentForm, MovieCommentForm\n'), ((9511, 9532), 'django.db.models.Q', 'Q', ([], {'title__icontains': 'q'}), '(title__icontains=q)\n', (9512, 9532), False, 'from django.db.models import Q\n'), ((9535, 9555), 'django.db.models.Q', 'Q', ([], {'body__icontains': 'q'}), '(body__icontains=q)\n', (9536, 9555), False, 'from django.db.models import Q\n'), ((7090, 7119), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'slugify': 'slugify'}), '(slugify=slugify)\n', (7102, 7119), False, 'from markdown.extensions.toc import TocExtension\n')] |
jfcoz/azure-cli | src/command_modules/azure-cli-security/azure/cli/command_modules/security/_params.py | 8459ef3fd3c76d9f99defd95d4c980923891fa6d | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
from azure.cli.core.commands.parameters import resource_group_name_type
from knack.arguments import CLIArgumentType
from ._validators import (validate_alert_status,
validate_auto_provisioning_toggle,
validate_pricing_tier)
name_arg_type = CLIArgumentType(options_list=('--name', '-n'), metavar='NAME', help='name of the resource to be fetched')
home_region_arg_type = CLIArgumentType(options_list=('--home-region', '-hr'), metavar='HOMEREGION', help='home region that was selected for the subscription')
location_arg_type = CLIArgumentType(options_list=('--location', '-l'), metavar='LOCATION', help='location of the resource')
# Alerts
alert_status_arg_type = CLIArgumentType(options_list=('--status'), metavar='STATUS', help='target status of the alert. possible values are "dismiss" and "activate"')
# Auto Provisioning
auto_provisioning_auto_provision_arg_type = CLIArgumentType(options_list=('--auto-provision'), metavar='AUTOPROVISION', help='Automatic provisioning toggle. possible values are "on" or "off"')
# Contacts
contact_email_arg_type = CLIArgumentType(options_list=('--email'), metavar='EMAIL', help='E-mail of the security contact')
contact_phone_arg_type = CLIArgumentType(options_list=('--phone'), metavar='PHONE', help='Phone of the security contact')
contact_alert_notifications_arg_type = CLIArgumentType(options_list=('--alert-notifications'), metavar='ALERTNOTIFICATIONS', help='Whether to send mail notifications to the security contacts')
contact_alerts_admins_arg_type = CLIArgumentType(options_list=('--alerts-admins'), metavar='ALERTADMINS', help='Whether to send mail notifications to the subscription administrators')
# Pricing
pricing_tier_arg_type = CLIArgumentType(options_list=('--tier'), metavar='TIER', help='pricing tier type')
# Workspace settings
workspace_setting_target_workspace_arg_type = CLIArgumentType(options_list=('--target-workspace'), metavar='TARGETWORKSPACE', help='An ID of the workspace resource that will hold the security data')
def load_arguments(self, _):
for scope in ['alert',
'task',
'setting',
'contact',
'auto-provisioning-setting',
'discovered-security-solution',
'external-security-solution',
'jit-policy',
'location',
'pricing',
'topology',
'workspace-setting']:
with self.argument_context('security {}'.format(scope)) as c:
c.argument(
'resource_group_name',
options_list=['--resource-group', '-g'],
arg_type=resource_group_name_type)
c.argument(
'resource_name',
arg_type=name_arg_type)
c.argument(
'location',
arg_type=location_arg_type)
for scope in ['alert update']:
with self.argument_context('security {}'.format(scope)) as c:
c.argument(
'status',
validator=validate_alert_status,
arg_type=alert_status_arg_type)
for scope in ['auto-provisioning-setting update']:
with self.argument_context('security {}'.format(scope)) as c:
c.argument(
'auto_provision',
validator=validate_auto_provisioning_toggle,
arg_type=auto_provisioning_auto_provision_arg_type)
for scope in ['contact create']:
with self.argument_context('security {}'.format(scope)) as c:
c.argument(
'email',
arg_type=contact_email_arg_type)
c.argument(
'phone',
arg_type=contact_phone_arg_type)
c.argument(
'alert_notifications',
arg_type=contact_alert_notifications_arg_type)
c.argument(
'alerts_admins',
arg_type=contact_alerts_admins_arg_type)
for scope in ['pricing create']:
with self.argument_context('security {}'.format(scope)) as c:
c.argument(
'tier',
validator=validate_pricing_tier,
arg_type=pricing_tier_arg_type)
for scope in ['workspace-setting create']:
with self.argument_context('security {}'.format(scope)) as c:
c.argument(
'target_workspace',
arg_type=workspace_setting_target_workspace_arg_type)
| [((671, 781), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': "('--name', '-n')", 'metavar': '"""NAME"""', 'help': '"""name of the resource to be fetched"""'}), "(options_list=('--name', '-n'), metavar='NAME', help=\n 'name of the resource to be fetched')\n", (686, 781), False, 'from knack.arguments import CLIArgumentType\n'), ((800, 939), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': "('--home-region', '-hr')", 'metavar': '"""HOMEREGION"""', 'help': '"""home region that was selected for the subscription"""'}), "(options_list=('--home-region', '-hr'), metavar='HOMEREGION',\n help='home region that was selected for the subscription')\n", (815, 939), False, 'from knack.arguments import CLIArgumentType\n'), ((956, 1064), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': "('--location', '-l')", 'metavar': '"""LOCATION"""', 'help': '"""location of the resource"""'}), "(options_list=('--location', '-l'), metavar='LOCATION', help\n ='location of the resource')\n", (971, 1064), False, 'from knack.arguments import CLIArgumentType\n'), ((1094, 1238), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--status"""', 'metavar': '"""STATUS"""', 'help': '"""target status of the alert. possible values are "dismiss" and "activate\\""""'}), '(options_list=\'--status\', metavar=\'STATUS\', help=\n \'target status of the alert. possible values are "dismiss" and "activate"\')\n', (1109, 1238), False, 'from knack.arguments import CLIArgumentType\n'), ((1301, 1451), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--auto-provision"""', 'metavar': '"""AUTOPROVISION"""', 'help': '"""Automatic provisioning toggle. possible values are "on" or "off\\""""'}), '(options_list=\'--auto-provision\', metavar=\'AUTOPROVISION\',\n help=\'Automatic provisioning toggle. possible values are "on" or "off"\')\n', (1316, 1451), False, 'from knack.arguments import CLIArgumentType\n'), ((1487, 1587), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--email"""', 'metavar': '"""EMAIL"""', 'help': '"""E-mail of the security contact"""'}), "(options_list='--email', metavar='EMAIL', help=\n 'E-mail of the security contact')\n", (1502, 1587), False, 'from knack.arguments import CLIArgumentType\n'), ((1610, 1709), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--phone"""', 'metavar': '"""PHONE"""', 'help': '"""Phone of the security contact"""'}), "(options_list='--phone', metavar='PHONE', help=\n 'Phone of the security contact')\n", (1625, 1709), False, 'from knack.arguments import CLIArgumentType\n'), ((1746, 1907), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--alert-notifications"""', 'metavar': '"""ALERTNOTIFICATIONS"""', 'help': '"""Whether to send mail notifications to the security contacts"""'}), "(options_list='--alert-notifications', metavar=\n 'ALERTNOTIFICATIONS', help=\n 'Whether to send mail notifications to the security contacts')\n", (1761, 1907), False, 'from knack.arguments import CLIArgumentType\n'), ((1933, 2086), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--alerts-admins"""', 'metavar': '"""ALERTADMINS"""', 'help': '"""Whether to send mail notifications to the subscription administrators"""'}), "(options_list='--alerts-admins', metavar='ALERTADMINS', help\n ='Whether to send mail notifications to the subscription administrators')\n", (1948, 2086), False, 'from knack.arguments import CLIArgumentType\n'), ((2119, 2204), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--tier"""', 'metavar': '"""TIER"""', 'help': '"""pricing tier type"""'}), "(options_list='--tier', metavar='TIER', help='pricing tier type'\n )\n", (2134, 2204), False, 'from knack.arguments import CLIArgumentType\n'), ((2270, 2430), 'knack.arguments.CLIArgumentType', 'CLIArgumentType', ([], {'options_list': '"""--target-workspace"""', 'metavar': '"""TARGETWORKSPACE"""', 'help': '"""An ID of the workspace resource that will hold the security data"""'}), "(options_list='--target-workspace', metavar=\n 'TARGETWORKSPACE', help=\n 'An ID of the workspace resource that will hold the security data')\n", (2285, 2430), False, 'from knack.arguments import CLIArgumentType\n')] |
kuyu12/pygame_fight_game | utils/path_utils.py | 3bbc286b9f33c6d6d9db9bea21f9b7af15247df5 | import sys
IMAGES_PATH = sys.path[1] + "/Images"
BACKGROUND_IMAGES_PATH = IMAGES_PATH + '/background'
USER_INFO_BACKGROUND_PATH = BACKGROUND_IMAGES_PATH+"/blue_background.jpg"
SPRINT_IMAGE_PATH = IMAGES_PATH + '/sprite'
PROFILE_IMAGES_PATH = IMAGES_PATH + '/profile'
CONFIGURATION_FILES_PATH = sys.path[1] + "/configuration_files" | [] |
Alicegaz/torchok | tests/models/test_transformers.py | 7b8f95df466a25b1ad8ee93bed1a3c7516440cf4 | import unittest
import torch
from parameterized import parameterized
from src.constructor import create_backbone
from src.models.backbones.utils import list_models
from .test_segmentation import example_backbones
def inp(bsize, in_ch, w, h):
return torch.ones(bsize, in_ch, w, h)
class TestBackboneCorrectness(unittest.TestCase):
def setUp(self) -> None:
self.device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
@parameterized.expand(list_models(module='vision_transformer', exclude_filters=''))
def test_vit_torchscript_conversion(self, backbone_name):
model = create_backbone(backbone_name, img_size=self.input.shape[2]).to(self.device).eval()
with torch.no_grad():
torch.jit.trace(model, self.input)
torch.cuda.empty_cache()
@parameterized.expand(list_models(module='coat', exclude_filters=''))
def test_coat_torchscript_conversion(self, backbone_name):
model = create_backbone(backbone_name, img_size=self.input.shape[2]).to(self.device).eval()
with torch.no_grad():
torch.jit.trace(model, self.input)
torch.cuda.empty_cache()
@parameterized.expand(list_models(module='swin_transformer', exclude_filters=''))
def test_swin_torchscript_conversion(self, backbone_name):
model = create_backbone(backbone_name).to(self.device).eval()
input = torch.rand(2, 3, *model.img_size, device=self.device)
with torch.no_grad():
torch.jit.trace(model, input)
torch.cuda.empty_cache()
| [((257, 287), 'torch.ones', 'torch.ones', (['bsize', 'in_ch', 'w', 'h'], {}), '(bsize, in_ch, w, h)\n', (267, 287), False, 'import torch\n'), ((790, 814), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (812, 814), False, 'import torch\n'), ((481, 541), 'src.models.backbones.utils.list_models', 'list_models', ([], {'module': '"""vision_transformer"""', 'exclude_filters': '""""""'}), "(module='vision_transformer', exclude_filters='')\n", (492, 541), False, 'from src.models.backbones.utils import list_models\n'), ((1138, 1162), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (1160, 1162), False, 'import torch\n'), ((842, 888), 'src.models.backbones.utils.list_models', 'list_models', ([], {'module': '"""coat"""', 'exclude_filters': '""""""'}), "(module='coat', exclude_filters='')\n", (853, 888), False, 'from src.models.backbones.utils import list_models\n'), ((1399, 1452), 'torch.rand', 'torch.rand', (['(2)', '(3)', '*model.img_size'], {'device': 'self.device'}), '(2, 3, *model.img_size, device=self.device)\n', (1409, 1452), False, 'import torch\n'), ((1533, 1557), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (1555, 1557), False, 'import torch\n'), ((1190, 1248), 'src.models.backbones.utils.list_models', 'list_models', ([], {'module': '"""swin_transformer"""', 'exclude_filters': '""""""'}), "(module='swin_transformer', exclude_filters='')\n", (1201, 1248), False, 'from src.models.backbones.utils import list_models\n'), ((718, 733), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (731, 733), False, 'import torch\n'), ((747, 781), 'torch.jit.trace', 'torch.jit.trace', (['model', 'self.input'], {}), '(model, self.input)\n', (762, 781), False, 'import torch\n'), ((1066, 1081), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1079, 1081), False, 'import torch\n'), ((1095, 1129), 'torch.jit.trace', 'torch.jit.trace', (['model', 'self.input'], {}), '(model, self.input)\n', (1110, 1129), False, 'import torch\n'), ((1466, 1481), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1479, 1481), False, 'import torch\n'), ((1495, 1524), 'torch.jit.trace', 'torch.jit.trace', (['model', 'input'], {}), '(model, input)\n', (1510, 1524), False, 'import torch\n'), ((416, 441), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (439, 441), False, 'import torch\n'), ((621, 681), 'src.constructor.create_backbone', 'create_backbone', (['backbone_name'], {'img_size': 'self.input.shape[2]'}), '(backbone_name, img_size=self.input.shape[2])\n', (636, 681), False, 'from src.constructor import create_backbone\n'), ((969, 1029), 'src.constructor.create_backbone', 'create_backbone', (['backbone_name'], {'img_size': 'self.input.shape[2]'}), '(backbone_name, img_size=self.input.shape[2])\n', (984, 1029), False, 'from src.constructor import create_backbone\n'), ((1329, 1359), 'src.constructor.create_backbone', 'create_backbone', (['backbone_name'], {}), '(backbone_name)\n', (1344, 1359), False, 'from src.constructor import create_backbone\n')] |
SvineruS/aiogram | aiogram/types/inline_query.py | 7892edf45302fa195544430ac5db11dcbcbf7ae6 | import typing
from . import base
from . import fields
from .inline_query_result import InlineQueryResult
from .location import Location
from .user import User
class InlineQuery(base.TelegramObject):
"""
This object represents an incoming inline query.
When the user sends an empty query, your bot could return some default or trending results.
https://core.telegram.org/bots/api#inlinequery
"""
id: base.String = fields.Field()
from_user: User = fields.Field(alias='from', base=User)
location: Location = fields.Field(base=Location)
query: base.String = fields.Field()
offset: base.String = fields.Field()
async def answer(self,
results: typing.List[InlineQueryResult],
cache_time: typing.Optional[base.Integer] = None,
is_personal: typing.Optional[base.Boolean] = None,
next_offset: typing.Optional[base.String] = None,
switch_pm_text: typing.Optional[base.String] = None,
switch_pm_parameter: typing.Optional[base.String] = None):
"""
Use this method to send answers to an inline query.
No more than 50 results per query are allowed.
Source: https://core.telegram.org/bots/api#answerinlinequery
:param results: A JSON-serialized array of results for the inline query
:type results: :obj:`typing.List[types.InlineQueryResult]`
:param cache_time: The maximum amount of time in seconds that the result of the
inline query may be cached on the server. Defaults to 300.
:type cache_time: :obj:`typing.Optional[base.Integer]`
:param is_personal: Pass True, if results may be cached on the server side only
for the user that sent the query. By default, results may be returned to any user who sends the same query
:type is_personal: :obj:`typing.Optional[base.Boolean]`
:param next_offset: Pass the offset that a client should send in the
next query with the same text to receive more results.
Pass an empty string if there are no more results or if you don‘t support pagination.
Offset length can’t exceed 64 bytes.
:type next_offset: :obj:`typing.Optional[base.String]`
:param switch_pm_text: If passed, clients will display a button with specified text that
switches the user to a private chat with the bot and sends the bot a start message
with the parameter switch_pm_parameter
:type switch_pm_text: :obj:`typing.Optional[base.String]`
:param switch_pm_parameter: Deep-linking parameter for the /start message sent to the bot when
user presses the switch button. 1-64 characters, only A-Z, a-z, 0-9, _ and - are allowed.
:type switch_pm_parameter: :obj:`typing.Optional[base.String]`
:return: On success, True is returned
:rtype: :obj:`base.Boolean`
"""
return await self.bot.answer_inline_query(self.id,
results=results,
cache_time=cache_time,
is_personal=is_personal,
next_offset=next_offset,
switch_pm_text=switch_pm_text,
switch_pm_parameter=switch_pm_parameter)
| [] |
shaswat01/Disaster_Response_ETL | app/app.py | c441514fb5231d193cd4b29afad00fe0f3513562 | import nltk
import json
import plotly
import pandas as pd
import plotly.graph_objects as go
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
nltk.download(['punkt','wordnet'])
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar, Histogram
import joblib
from sqlalchemy import create_engine
app = Flask(__name__)
def tokenize(text):
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
# load data
engine = create_engine('sqlite:///data/DisasterResponse.db')
df = pd.read_sql_table('messages', engine)
# load model
model = joblib.load("models/model.pkl")
# index webpage displays cool visuals and receives user input text for model
@app.route('/')
@app.route('/index')
def index():
# extract data needed for visuals
# Viz 1
genre = df.groupby('genre').count()['id'].sort_values()
# Viz 2
df['text length'] = df['message'].apply(lambda x: len(x.split()))
histogram = df[df['text length'] < 100].groupby('text length').count()['id']
# Viz 3
total_category = df.drop(columns=['id','message','original','genre', 'text length']).sum().sort_values(ascending=False).head(5)
# create visuals
graphs = [
{
'data': [
Bar(
x=genre.values,
y=genre.index,
orientation='h'
)
],
'layout': {
'title': 'Distribution of Message Genres',
'yaxis': {
'title': "Genre"
},
'xaxis': {
'title': "Counts"
}
}
},
{
'data': [
Bar(
x=histogram.index,
y=histogram.values
)
],
'layout': {
'title': 'Distribution of Messages Length',
'yaxis': {
'title': "Total Messages"
},
'xaxis': {
'title': "Total Words"
}
}
},
{
'data': [
Bar(
x=total_category.index,
y=total_category.values
)
],
'layout': {
'title': 'Total Messages per Category (Top 5)',
'yaxis': {
'title': "Total"
},
'xaxis': {
'title': "Category"
}
}
}
]
# encode plotly graphs in JSON
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
# render web page with plotly graphs
return render_template('master.html', ids=ids, graphJSON=graphJSON)
# web page that handles user query and displays model results
@app.route('/go')
def go():
# save user input in query
query = request.args.get('query', '')
# use model to predict classification for query
classification_labels = model.predict([query])[0]
classification_results = dict(zip(df.columns[4:], classification_labels))
# This will render the go.html Please see that file.
return render_template(
'go.html',
query=query,
classification_result=classification_results
)
def main():
app.run()
#app.run(host='0.0.0.0', port=3001, debug=True)
if __name__ == '__main__':
main()
| [((172, 207), 'nltk.download', 'nltk.download', (["['punkt', 'wordnet']"], {}), "(['punkt', 'wordnet'])\n", (185, 207), False, 'import nltk\n'), ((388, 403), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (393, 403), False, 'from flask import Flask\n'), ((689, 740), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///data/DisasterResponse.db"""'], {}), "('sqlite:///data/DisasterResponse.db')\n", (702, 740), False, 'from sqlalchemy import create_engine\n'), ((746, 783), 'pandas.read_sql_table', 'pd.read_sql_table', (['"""messages"""', 'engine'], {}), "('messages', engine)\n", (763, 783), True, 'import pandas as pd\n'), ((806, 837), 'joblib.load', 'joblib.load', (['"""models/model.pkl"""'], {}), "('models/model.pkl')\n", (817, 837), False, 'import joblib\n'), ((438, 457), 'nltk.tokenize.word_tokenize', 'word_tokenize', (['text'], {}), '(text)\n', (451, 457), False, 'from nltk.tokenize import word_tokenize\n'), ((475, 494), 'nltk.stem.WordNetLemmatizer', 'WordNetLemmatizer', ([], {}), '()\n', (492, 494), False, 'from nltk.stem import WordNetLemmatizer\n'), ((2936, 2990), 'json.dumps', 'json.dumps', (['graphs'], {'cls': 'plotly.utils.PlotlyJSONEncoder'}), '(graphs, cls=plotly.utils.PlotlyJSONEncoder)\n', (2946, 2990), False, 'import json\n'), ((3048, 3108), 'flask.render_template', 'render_template', (['"""master.html"""'], {'ids': 'ids', 'graphJSON': 'graphJSON'}), "('master.html', ids=ids, graphJSON=graphJSON)\n", (3063, 3108), False, 'from flask import render_template, request, jsonify\n'), ((3244, 3273), 'flask.request.args.get', 'request.args.get', (['"""query"""', '""""""'], {}), "('query', '')\n", (3260, 3273), False, 'from flask import render_template, request, jsonify\n'), ((3530, 3620), 'flask.render_template', 'render_template', (['"""go.html"""'], {'query': 'query', 'classification_result': 'classification_results'}), "('go.html', query=query, classification_result=\n classification_results)\n", (3545, 3620), False, 'from flask import render_template, request, jsonify\n'), ((1480, 1531), 'plotly.graph_objs.Bar', 'Bar', ([], {'x': 'genre.values', 'y': 'genre.index', 'orientation': '"""h"""'}), "(x=genre.values, y=genre.index, orientation='h')\n", (1483, 1531), False, 'from plotly.graph_objs import Bar, Histogram\n'), ((1949, 1991), 'plotly.graph_objs.Bar', 'Bar', ([], {'x': 'histogram.index', 'y': 'histogram.values'}), '(x=histogram.index, y=histogram.values)\n', (1952, 1991), False, 'from plotly.graph_objs import Bar, Histogram\n'), ((2404, 2456), 'plotly.graph_objs.Bar', 'Bar', ([], {'x': 'total_category.index', 'y': 'total_category.values'}), '(x=total_category.index, y=total_category.values)\n', (2407, 2456), False, 'from plotly.graph_objs import Bar, Histogram\n')] |
pazamelin/openvino | tools/mo/openvino/tools/mo/front/mxnet/mx_reshape_reverse.py | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
from openvino.tools.mo.front.mxnet.mx_reshape_to_reshape import MXReshapeToReshape
from openvino.tools.mo.ops.Reverse import Reverse
from openvino.tools.mo.ops.mxreshape import MXReshape
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.front.common.replacement import FrontReplacementOp
from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input
from openvino.tools.mo.graph.graph import Graph
from openvino.tools.mo.ops.reshape import Reshape
from openvino.tools.mo.ops.shape import Shape
from openvino.tools.mo.ops.squeeze import Squeeze
from openvino.tools.mo.ops.unsqueeze import Unsqueeze
class MXReshapeReverse(FrontReplacementOp):
"""
If reshape layer with reverse True, special values will inferred from right to left.
The Replacer simulate the behavior. The replaced subgraph reverse input data and special dims,
and after reshape reverse output result to backward.
Resulting subgraph: reshape(reverse=True) -> reverse - reshape(reverse=False) -reverse subgraph.
"""
op = 'MXReshape'
enabled = True
def run_before(self):
return [MXReshapeToReshape]
def replace_sub_graph(self, graph: Graph, match: dict):
mxreshape = match['op']
if not mxreshape.reverse:
return
shape_node = Shape(graph, dict(name=mxreshape.id + '/Shape')).create_node()
forward_reverse_unsqueeze_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
dict(name=str(mxreshape.id) + '/ForwardUnsqueeze'))
forward_reverse_node = Reverse(graph, dict(name=mxreshape.id + '/ForwardReverse', axis=1)).create_node()
forward_reverse_squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]),
dict(name=str(mxreshape.id) + '/ForwardSqueeze'))
reshape_node = Reshape(graph, dict(name=mxreshape.id + '/Reshape')).create_node()
shape_node.in_port(0).connect(mxreshape.in_port(0).get_source())
mxreshape.in_port(0).get_connection().set_destination(reshape_node.in_port(0))
forward_reverse_unsqueeze_node.in_port(0).connect(shape_node.out_port(0))
forward_reverse_node.in_port(0).connect(forward_reverse_unsqueeze_node.out_port(0))
forward_reverse_squeeze_node.in_port(0).connect(forward_reverse_node.out_port(0))
reshape_node.in_port(1).connect(forward_reverse_squeeze_node.out_port(0))
reshape_shape_node = create_op_node_with_second_input(graph, Reshape, int64_array(np.flip(mxreshape.dim, 0)),
dict(name=str(mxreshape.id) + '/ReshapeShape'))
if np.sum(np.in1d([-2, -3, -4], mxreshape.dim), axis=0):
reshape_shape_node = MXReshape(graph, dict(name=mxreshape.id + '/Reshape',
dim=int64_array(np.flip(mxreshape.dim, 0)))).create_node()
reshape_shape_node.in_port(0).connect(reshape_node.out_port(0))
backward_shape_node = Shape(graph, dict(name=mxreshape.id + '/BackwardShape')).create_node()
backward_reverse_unsqueeze_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
dict(name=str(mxreshape.id) + '/BackwardUnsqueeze'))
backward_reverse_node = Reverse(graph, dict(name=mxreshape.id + '/BackwardReverse', axis=1)).create_node()
backward_reverse_squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]),
dict(name=str(mxreshape.id) + '/BackwardSqueeze'))
backward_reshape_node = Reshape(graph, dict(name=mxreshape.id + '/BackwardReshape')).create_node()
backward_shape_node.in_port(0).connect(reshape_shape_node.out_port(0))
backward_reverse_unsqueeze_node.in_port(0).connect(backward_shape_node.out_port(0))
backward_reverse_node.in_port(0).connect(backward_reverse_unsqueeze_node.out_port(0))
backward_reverse_squeeze_node.in_port(0).connect(backward_reverse_node.out_port(0))
backward_reshape_node.in_port(0).connect(reshape_shape_node.out_port(0))
backward_reshape_node.in_port(1).connect(backward_reverse_squeeze_node.out_port(0))
mxreshape.out_port(0).get_connection().set_source(backward_reshape_node.out_port(0))
| [((1606, 1622), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[0]'], {}), '([0])\n', (1617, 1622), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((1952, 1968), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[0]'], {}), '([0])\n', (1963, 1968), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((2936, 2972), 'numpy.in1d', 'np.in1d', (['[-2, -3, -4]', 'mxreshape.dim'], {}), '([-2, -3, -4], mxreshape.dim)\n', (2943, 2972), True, 'import numpy as np\n'), ((3434, 3450), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[0]'], {}), '([0])\n', (3445, 3450), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((3784, 3800), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[0]'], {}), '([0])\n', (3795, 3800), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((2780, 2805), 'numpy.flip', 'np.flip', (['mxreshape.dim', '(0)'], {}), '(mxreshape.dim, 0)\n', (2787, 2805), True, 'import numpy as np\n'), ((3123, 3148), 'numpy.flip', 'np.flip', (['mxreshape.dim', '(0)'], {}), '(mxreshape.dim, 0)\n', (3130, 3148), True, 'import numpy as np\n')] |
MattMarti/Lambda-Trajectory-Sim | Python/Simulation/Numerical_Methods/test_cubic_spline_solve.py | 4155f103120bd49221776cc3b825b104f36817f2 | import unittest;
import numpy as np;
import scipy as sp;
from cubic_spline_solve import cubic_spline_solve;
from cubic_spline_fun import cubic_spline_fun;
class Test_cubic_spline_solve(unittest.TestCase):
'''
Test_cubicsplineSolve
Test case for the cubic spline solver function. This function just solves
for the spline data, so that the spline can be precomputed before code is
run. This improves code performance by removing the need to invert a
matrix every time the spline function is called.
@author: Matt Marti
@date: 2019-06-16
'''
def test_nominal_01(self):
'''Test the spline solve for nominal test case'''
# Function handles for function and derivatives
f = lambda x : sp.sin(x);
df = lambda x : sp.cos(x);
# x from 0 to 30 in the correct format
xrange = np.linspace(0, 10, 20);
xkvec = np.zeros((1, xrange.shape[0]));
for i in range(0, xrange.shape[0]):
xkvec[0,i] = xrange[i];
#
# Generate function values dataset
fkvec = f(xkvec);
xinter = np.linspace(0, 10, 1000);
# Generate parameters for clamped boundary conditions
fslope = np.ndarray((1,2));
fslope[0,0] = sp.cos(xkvec[0,0]);
fslope[0,1] = sp.cos(xkvec[0,-1]);
# Compute already tested spline
_, _, akvec, bkvec, ckvec, dkvec \
= cubic_spline_fun(xkvec, fkvec, xinter, fslope);
splineDataTrue = np.zeros((1, xkvec.shape[1], 5));
splineDataTrue[0,:,0] = akvec.squeeze();
splineDataTrue[0,:,1] = bkvec.squeeze();
splineDataTrue[0,:,2] = ckvec.squeeze();
splineDataTrue[0,:,3] = dkvec.squeeze();
splineDataTrue[0,:,4] = xkvec.squeeze();
# Run spline solve
splineDataMat = cubic_spline_solve( xkvec, fkvec, fslope );
# Test Function truth values
error = splineDataMat - splineDataTrue;
maxerr = np.max(np.abs(error));
self.assertLess(maxerr, 1e-12, 'Spline error too high');
#
def test_multiple_01(self):
'''Test the spline works for a two dimensional case'''
# Definition for two dimensional function output
def func(x):
if type(x) is not np.ndarray:
f = np.zeros((2,1));
else:
f = np.zeros((2,x.shape[0]));
#
f[0,:] = np.sin(x);
f[1,:] = -10*x**2 + 50*x + 1000;
return f;
#
# Definition for derivative function
def dfunc(x):
if type(x) is not np.ndarray:
df = np.zeros((2,1));
else:
df = np.zeros((2,x.shape[0]));
#
df[0,:] = np.cos(x);
df[1,:] = -20*x + 50;
return df;
#
# Given
f = lambda x : func(x);
df = lambda x : dfunc(x);
xkvec = np.linspace(0, 10, 20);
fkvec = f(xkvec);
xinter = np.linspace(0, 10, 1000);
fslope = np.ndarray((2,2)); # Clambed B.C.s
fslope[:,0] = df(xkvec[0]).squeeze();
fslope[:,1] = df(xkvec[-1]).squeeze();
# Preallocate truth spline data
m = 2;
n = xkvec.shape[0];
splineDataTrue = np.zeros((m, n, 5));
splineDataTrue[0,:,4] = xkvec;
# Run true spline for first dataset
_, _, akvec, bkvec, ckvec, dkvec \
= cubic_spline_fun(xkvec, fkvec[0,:], xinter, fslope[0,:]);
splineDataTrue[0,:,0] = akvec.squeeze();
splineDataTrue[0,:,1] = bkvec.squeeze();
splineDataTrue[0,:,2] = ckvec.squeeze();
splineDataTrue[0,:,3] = dkvec.squeeze();
# Run true spline for second dataset
_, _, akvec, bkvec, ckvec, dkvec \
= cubic_spline_fun(xkvec, fkvec[1,:], xinter, fslope[1,:]);
splineDataTrue[1,:,0] = akvec.squeeze();
splineDataTrue[1,:,1] = bkvec.squeeze();
splineDataTrue[1,:,2] = ckvec.squeeze();
splineDataTrue[1,:,3] = dkvec.squeeze();
# Run new spline
splineDataMat = cubic_spline_solve( xkvec, fkvec, fslope );
# Test Function truth values
error = splineDataMat - splineDataTrue;
maxerr = np.max(np.abs(error));
self.assertLess(maxerr, 1e-12, 'Spline error too high');
#
def test_types(self):
'''Test that the function raises type errors on bad input'''
# Function handles for function and derivatives
f = lambda x : sp.sin(x);
df = lambda x : sp.cos(x);
# x from 0 to 30 in the correct format
xrange = np.linspace(0, 10, 20);
xkvec = np.zeros((1, xrange.shape[0]));
for i in range(0, xrange.shape[0]):
xkvec[0,i] = xrange[i];
#
# Generate function values dataset
fkvec = f(xkvec);
xinter = np.linspace(0, 10, 1000);
# Generate parameters for clamped boundary conditions
fslope = np.ndarray((1,2));
fslope[0,0] = sp.cos(xkvec[0,0]);
fslope[0,1] = sp.cos(xkvec[0,-1]);
# Run function without errors
splineDataMat = cubic_spline_solve( xkvec, fkvec, fslope );
# Test with various inputs for xkvec
self.assertRaises(TypeError, cubic_spline_solve, True, fkvec, fslope);
self.assertRaises(TypeError, cubic_spline_solve, 0.1, fkvec, fslope);
self.assertRaises(TypeError, cubic_spline_solve, "AA", fkvec, fslope);
self.assertRaises(TypeError, cubic_spline_solve, 'A', fkvec, fslope);
# Test with various inputs for xkvec
self.assertRaises(TypeError, cubic_spline_solve, xkvec, True, fslope);
self.assertRaises(TypeError, cubic_spline_solve, xkvec, 0.1, fslope);
self.assertRaises(TypeError, cubic_spline_solve, xkvec, "AA", fslope);
self.assertRaises(TypeError, cubic_spline_solve, xkvec, 'A', fslope);
# Test with various inputs for fslope
self.assertRaises(TypeError, cubic_spline_solve, xkvec, fkvec, True);
self.assertRaises(TypeError, cubic_spline_solve, xkvec, fkvec, 0.1);
self.assertRaises(TypeError, cubic_spline_solve, xkvec, fkvec, "AA");
self.assertRaises(TypeError, cubic_spline_solve, xkvec, fkvec, 'A');
#
# | [((887, 909), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(20)'], {}), '(0, 10, 20)\n', (898, 909), True, 'import numpy as np\n'), ((927, 957), 'numpy.zeros', 'np.zeros', (['(1, xrange.shape[0])'], {}), '((1, xrange.shape[0]))\n', (935, 957), True, 'import numpy as np\n'), ((1144, 1168), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(1000)'], {}), '(0, 10, 1000)\n', (1155, 1168), True, 'import numpy as np\n'), ((1258, 1276), 'numpy.ndarray', 'np.ndarray', (['(1, 2)'], {}), '((1, 2))\n', (1268, 1276), True, 'import numpy as np\n'), ((1299, 1318), 'scipy.cos', 'sp.cos', (['xkvec[0, 0]'], {}), '(xkvec[0, 0])\n', (1305, 1318), True, 'import scipy as sp\n'), ((1341, 1361), 'scipy.cos', 'sp.cos', (['xkvec[0, -1]'], {}), '(xkvec[0, -1])\n', (1347, 1361), True, 'import scipy as sp\n'), ((1468, 1514), 'cubic_spline_fun.cubic_spline_fun', 'cubic_spline_fun', (['xkvec', 'fkvec', 'xinter', 'fslope'], {}), '(xkvec, fkvec, xinter, fslope)\n', (1484, 1514), False, 'from cubic_spline_fun import cubic_spline_fun\n'), ((1541, 1573), 'numpy.zeros', 'np.zeros', (['(1, xkvec.shape[1], 5)'], {}), '((1, xkvec.shape[1], 5))\n', (1549, 1573), True, 'import numpy as np\n'), ((1880, 1920), 'cubic_spline_solve.cubic_spline_solve', 'cubic_spline_solve', (['xkvec', 'fkvec', 'fslope'], {}), '(xkvec, fkvec, fslope)\n', (1898, 1920), False, 'from cubic_spline_solve import cubic_spline_solve\n'), ((3024, 3046), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(20)'], {}), '(0, 10, 20)\n', (3035, 3046), True, 'import numpy as np\n'), ((3091, 3115), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(1000)'], {}), '(0, 10, 1000)\n', (3102, 3115), True, 'import numpy as np\n'), ((3134, 3152), 'numpy.ndarray', 'np.ndarray', (['(2, 2)'], {}), '((2, 2))\n', (3144, 3152), True, 'import numpy as np\n'), ((3379, 3398), 'numpy.zeros', 'np.zeros', (['(m, n, 5)'], {}), '((m, n, 5))\n', (3387, 3398), True, 'import numpy as np\n'), ((3549, 3611), 'cubic_spline_fun.cubic_spline_fun', 'cubic_spline_fun', (['xkvec', 'fkvec[(0), :]', 'xinter', 'fslope[(0), :]'], {}), '(xkvec, fkvec[(0), :], xinter, fslope[(0), :])\n', (3565, 3611), False, 'from cubic_spline_fun import cubic_spline_fun\n'), ((3914, 3976), 'cubic_spline_fun.cubic_spline_fun', 'cubic_spline_fun', (['xkvec', 'fkvec[(1), :]', 'xinter', 'fslope[(1), :]'], {}), '(xkvec, fkvec[(1), :], xinter, fslope[(1), :])\n', (3930, 3976), False, 'from cubic_spline_fun import cubic_spline_fun\n'), ((4226, 4266), 'cubic_spline_solve.cubic_spline_solve', 'cubic_spline_solve', (['xkvec', 'fkvec', 'fslope'], {}), '(xkvec, fkvec, fslope)\n', (4244, 4266), False, 'from cubic_spline_solve import cubic_spline_solve\n'), ((4782, 4804), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(20)'], {}), '(0, 10, 20)\n', (4793, 4804), True, 'import numpy as np\n'), ((4822, 4852), 'numpy.zeros', 'np.zeros', (['(1, xrange.shape[0])'], {}), '((1, xrange.shape[0]))\n', (4830, 4852), True, 'import numpy as np\n'), ((5039, 5063), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(1000)'], {}), '(0, 10, 1000)\n', (5050, 5063), True, 'import numpy as np\n'), ((5153, 5171), 'numpy.ndarray', 'np.ndarray', (['(1, 2)'], {}), '((1, 2))\n', (5163, 5171), True, 'import numpy as np\n'), ((5194, 5213), 'scipy.cos', 'sp.cos', (['xkvec[0, 0]'], {}), '(xkvec[0, 0])\n', (5200, 5213), True, 'import scipy as sp\n'), ((5236, 5256), 'scipy.cos', 'sp.cos', (['xkvec[0, -1]'], {}), '(xkvec[0, -1])\n', (5242, 5256), True, 'import scipy as sp\n'), ((5328, 5368), 'cubic_spline_solve.cubic_spline_solve', 'cubic_spline_solve', (['xkvec', 'fkvec', 'fslope'], {}), '(xkvec, fkvec, fslope)\n', (5346, 5368), False, 'from cubic_spline_solve import cubic_spline_solve\n'), ((768, 777), 'scipy.sin', 'sp.sin', (['x'], {}), '(x)\n', (774, 777), True, 'import scipy as sp\n'), ((803, 812), 'scipy.cos', 'sp.cos', (['x'], {}), '(x)\n', (809, 812), True, 'import scipy as sp\n'), ((2042, 2055), 'numpy.abs', 'np.abs', (['error'], {}), '(error)\n', (2048, 2055), True, 'import numpy as np\n'), ((2494, 2503), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (2500, 2503), True, 'import numpy as np\n'), ((2839, 2848), 'numpy.cos', 'np.cos', (['x'], {}), '(x)\n', (2845, 2848), True, 'import numpy as np\n'), ((4388, 4401), 'numpy.abs', 'np.abs', (['error'], {}), '(error)\n', (4394, 4401), True, 'import numpy as np\n'), ((4663, 4672), 'scipy.sin', 'sp.sin', (['x'], {}), '(x)\n', (4669, 4672), True, 'import scipy as sp\n'), ((4698, 4707), 'scipy.cos', 'sp.cos', (['x'], {}), '(x)\n', (4704, 4707), True, 'import scipy as sp\n'), ((2378, 2394), 'numpy.zeros', 'np.zeros', (['(2, 1)'], {}), '((2, 1))\n', (2386, 2394), True, 'import numpy as np\n'), ((2433, 2458), 'numpy.zeros', 'np.zeros', (['(2, x.shape[0])'], {}), '((2, x.shape[0]))\n', (2441, 2458), True, 'import numpy as np\n'), ((2721, 2737), 'numpy.zeros', 'np.zeros', (['(2, 1)'], {}), '((2, 1))\n', (2729, 2737), True, 'import numpy as np\n'), ((2777, 2802), 'numpy.zeros', 'np.zeros', (['(2, x.shape[0])'], {}), '((2, x.shape[0]))\n', (2785, 2802), True, 'import numpy as np\n')] |
IQUBE-X/passGenerator | PassWord.py | a56a5928c1e8ee503d2757ecf0ab4108a52ec677 | # PassWord - The Safe Password Generator App!
# importing the tkinter module for GUI
from tkinter import *
# importing the message box widget from tkinter
from tkinter import messagebox
# importing sqlite3 for database
import sqlite3
# importing random for password generation
import random
# creating fonts
font = ('Fixedsys', 10)
font2 = ('Comic Sans MS', 9)
font3 = ('System', 9)
font4 = ('Two Cen MT', 9)
# creating a database and establishing a connection
conn = sqlite3.connect('password.db')
# creating a cursor to navigate through database
c = conn.cursor()
# creating the table
'''
c.execute("""CREATE TABLE passwords (
password text
)""")
'''
# defining the root variable
root = Tk()
# Naming the app
root.title('PassWord')
# creating a label frame to organize content
label_frame = LabelFrame(root, padx=10, pady=10, text='Password Generator', font=font)
# printing the label frame onto the screen or window
label_frame.grid(row=0, column=0, columnspan=1, padx=10, pady=10, sticky=E + W)
# creating a separate label frame to perform delete functions
delete_labelframe = LabelFrame(root, text='Delete Password', padx=10, pady=10, font=font4)
# printing delete labelframe onto the screen
delete_labelframe.grid(row=5, column=0, columnspan=1, padx=10, pady=10, sticky=E + W)
# making the text box where password is going to be displayed
e = Entry(label_frame, fg='black', bg='white')
# printing the text box to the screen
e.grid(row=0, column=0, padx=10, pady=10, columnspan=1)
# (for the delete function) to give information on input for delete function
# (for the delete function) to give information on input for delete function
info = Label(delete_labelframe, text='Password ID', fg='black', font=font2)
# printing the label onto the screen
info.grid(row=6, column=0, pady=10)
# making the entry for user to input which password
e2 = Entry(delete_labelframe, fg='black', bg='white')
# printing the entry onto the screen
e2.grid(row=6, column=1, pady=10)
# making the password generate function
def generate():
# creating lists
lowercase_letters = ['a', 'b', 'c', 'd', 'e' 'f' 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',
't',
'u' 'v', 'w', 'x', 'y', 'z']
# creating lists
uppercase_letters = ['A', 'B', 'C', 'D', 'E' 'F' 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S',
'T', 'U' 'V', 'W', 'X', 'Y', 'Z']
# creating lists
symbols_list = ['-', '@', '!' '$', '%' '&' '?', '#', '^']
# creating lists
numbers_list = ['1', '2', '3', '4', '5', '6', '7' '8', '9' '0']
# generating a random value from the lists
lowercase_letter = random.choice(lowercase_letters)
# generating a random value from the lists
lowercase_letter2 = random.choice(lowercase_letters)
# generating a random value from the lists
uppercase_letter = random.choice(uppercase_letters)
# generating a random value from the lists
uppercase2_letter = random.choice(uppercase_letters)
# generating a random value from the lists
symbol = random.choice(symbols_list)
# generating a random value from the lists
symbol2 = random.choice(symbols_list)
# generating a random value from the lists
number = random.choice(numbers_list)
# generating a random value from the lists
number2 = random.choice(numbers_list)
# creating a password list made of random values from previous lists
password = [lowercase_letter, uppercase_letter, uppercase2_letter, lowercase_letter2, symbol, symbol2, number,
number2]
# shuffling password list
password1 = random.sample(password, 8)
# concatenating and making final list
final_password = password1[0] + password1[1] + password1[2] + password1[3] + password1[4] + password1[5] + \
password1[6] + password1[7]
# deleting previous item from entry
e.delete(0, END)
# inserting the final password
e.insert(0, final_password)
# making a function to save the password into the database
def save_password():
conn = sqlite3.connect('password.db')
c = conn.cursor()
c.execute("INSERT INTO passwords VALUES (?)", (e.get(),))
e.delete(0, END)
conn.commit()
conn.close()
# making a function to show all the saved passwords
def show_password():
global passcode_label
conn = sqlite3.connect('password.db')
c = conn.cursor()
c.execute("SELECT rowid, * FROM passwords")
passcodes = c.fetchall()
print_code = ''
for passcode in passcodes:
print_code += str(passcode[0]) + '.' + ' ' + str(passcode[1]) + '\n'
passcode_label = Text(label_frame, height=15, width=25)
passcode_label.configure(state='normal')
passcode_label.insert(1.0, print_code)
passcode_label.grid(row=5, column=0, padx=10, pady=10)
passcode_label.configure(state='disabled')
conn.commit()
conn.close()
# making a function to hide the saved passwords
def hide_password():
passcode_label.destroy()
# making a function to delete passwords from database
def delete():
conn = sqlite3.connect('password.db')
c = conn.cursor()
c.execute("DELETE from passwords WHERE oid = (?)", (e2.get(),))
e2.delete(0, END)
passcode_label.destroy()
conn.commit()
conn.close()
# making a function to delete all the passwords in the database
def delete_all():
global number_of_passwords
conn = sqlite3.connect('password.db')
c = conn.cursor()
c.execute("SELECT rowid FROM passwords")
number_of_passwords = c.fetchall()
num_of_passwords = len(number_of_passwords)
confirmation = messagebox.askyesno('Delete All Passwords?',
'You have chosen to delete ' + str(
num_of_passwords) + ' passwords. This action cannot be reversed. Do you wish to proceed?')
if confirmation == 1:
c.execute("DELETE FROM passwords")
conn.commit()
conn.close()
# button for generating password
generate_password = Button(label_frame, text='Generate Strong Password', command=generate, font=font2)
# printing the button onto the screen
generate_password.grid(row=1, padx=10, pady=10, column=0)
# button to save password
save = Button(label_frame, text='Save Password', command=save_password, font=font2)
# printing the button onto the screen
save.grid(row=2, padx=10, pady=10, column=0)
# making a button to show all the passwords
show = Button(label_frame, text='Show Passwords', command=show_password, font=font2)
# printing the button onto the screen
show.grid(row=4, padx=10, pady=10, column=0)
# making a button to hide the shown passwords
hide = Button(label_frame, text='Hide Passwords', command=hide_password, font=font2)
# printing the button onto the screen
hide.grid(row=6, column=0, padx=10, pady=10)
# making a button to delete a password
delete = Button(delete_labelframe, text='Delete Password', command=delete, font=font2)
# printing the button onto the screen
delete.grid(row=8, padx=10, pady=10, column=1)
# making a button to delete all the passwords
delete_all = Button(delete_labelframe, text='Delete All', command=delete_all, fg='dark red', width=20, anchor=CENTER,
font=font3)
# printing the button onto the screen
delete_all.grid(row=9, column=1, padx=10, pady=10, ipadx=15)
# committing the changes to the database
conn.commit()
# closing the connection with database
conn.close()
# making the final loop
root.mainloop()
| [((496, 526), 'sqlite3.connect', 'sqlite3.connect', (['"""password.db"""'], {}), "('password.db')\n", (511, 526), False, 'import sqlite3\n'), ((2799, 2831), 'random.choice', 'random.choice', (['lowercase_letters'], {}), '(lowercase_letters)\n', (2812, 2831), False, 'import random\n'), ((2907, 2939), 'random.choice', 'random.choice', (['lowercase_letters'], {}), '(lowercase_letters)\n', (2920, 2939), False, 'import random\n'), ((3014, 3046), 'random.choice', 'random.choice', (['uppercase_letters'], {}), '(uppercase_letters)\n', (3027, 3046), False, 'import random\n'), ((3122, 3154), 'random.choice', 'random.choice', (['uppercase_letters'], {}), '(uppercase_letters)\n', (3135, 3154), False, 'import random\n'), ((3219, 3246), 'random.choice', 'random.choice', (['symbols_list'], {}), '(symbols_list)\n', (3232, 3246), False, 'import random\n'), ((3312, 3339), 'random.choice', 'random.choice', (['symbols_list'], {}), '(symbols_list)\n', (3325, 3339), False, 'import random\n'), ((3404, 3431), 'random.choice', 'random.choice', (['numbers_list'], {}), '(numbers_list)\n', (3417, 3431), False, 'import random\n'), ((3497, 3524), 'random.choice', 'random.choice', (['numbers_list'], {}), '(numbers_list)\n', (3510, 3524), False, 'import random\n'), ((3793, 3819), 'random.sample', 'random.sample', (['password', '(8)'], {}), '(password, 8)\n', (3806, 3819), False, 'import random\n'), ((4263, 4293), 'sqlite3.connect', 'sqlite3.connect', (['"""password.db"""'], {}), "('password.db')\n", (4278, 4293), False, 'import sqlite3\n'), ((4557, 4587), 'sqlite3.connect', 'sqlite3.connect', (['"""password.db"""'], {}), "('password.db')\n", (4572, 4587), False, 'import sqlite3\n'), ((5309, 5339), 'sqlite3.connect', 'sqlite3.connect', (['"""password.db"""'], {}), "('password.db')\n", (5324, 5339), False, 'import sqlite3\n'), ((5660, 5690), 'sqlite3.connect', 'sqlite3.connect', (['"""password.db"""'], {}), "('password.db')\n", (5675, 5690), False, 'import sqlite3\n')] |
hotternative/leetcode | 1805_number_of_different_integers_in_a_string.py | d0ec225abc2ada1398666641c7872f3eb889e7ed | from string import ascii_lowercase
ts = 'a123bc34d8ef34'
cur = []
res = set()
for c in ts:
if c in ascii_lowercase:
if cur:
s = ''.join(cur)
res.add(int(s))
cur = []
else:
cur.append(c)
else:
if cur:
s = ''.join(cur)
res.add(int(s))
print(res)
| [] |
ahmedriaz9908/memeapiiz | app.py | eef98f837f2ec83edc3dd004f19dcefda9b582a5 | from flask import Flask, render_template, jsonify
from reddit_handler import *
app = Flask(__name__)
meme_subreddits = ['izlam']
@app.route('/')
def index():
return render_template('index.html')
@app.route('/meme')
def one_post():
sub = random.choice(meme_subreddits)
re = get_posts(sub, 100)
r = random.choice(re)
while not is_img_link(r[1]):
r = random.choice(re)
return jsonify({
'title': r[0],
'url': r[1],
'postLink': r[2],
'subreddit': sub
})
@app.route('/sample')
def sample():
re = get_posts(random.choice(meme_subreddits), 100)
r = random.choice(re)
while not is_img_link(r[1]):
r = random.choice(re)
return render_template('sample.html', title=r[0], img_url=r[1], shortlink=r[2])
@app.route('/test')
def test():
re = get_posts(random.choice(meme_subreddits), 100)
return render_template('test.html', re=re)
@app.route('/<something>')
def not_found(something):
return render_template('not_found.html')
| [((89, 104), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (94, 104), False, 'from flask import Flask, render_template, jsonify\n'), ((183, 212), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (198, 212), False, 'from flask import Flask, render_template, jsonify\n'), ((437, 510), 'flask.jsonify', 'jsonify', (["{'title': r[0], 'url': r[1], 'postLink': r[2], 'subreddit': sub}"], {}), "({'title': r[0], 'url': r[1], 'postLink': r[2], 'subreddit': sub})\n", (444, 510), False, 'from flask import Flask, render_template, jsonify\n'), ((763, 835), 'flask.render_template', 'render_template', (['"""sample.html"""'], {'title': 'r[0]', 'img_url': 'r[1]', 'shortlink': 'r[2]'}), "('sample.html', title=r[0], img_url=r[1], shortlink=r[2])\n", (778, 835), False, 'from flask import Flask, render_template, jsonify\n'), ((945, 980), 'flask.render_template', 'render_template', (['"""test.html"""'], {'re': 're'}), "('test.html', re=re)\n", (960, 980), False, 'from flask import Flask, render_template, jsonify\n'), ((1052, 1085), 'flask.render_template', 'render_template', (['"""not_found.html"""'], {}), "('not_found.html')\n", (1067, 1085), False, 'from flask import Flask, render_template, jsonify\n')] |
e-davydenkova/SeleniumWebDriver_Training | 10_compare_between_main_product_pages.py | e03cfbe4ea74ddc8f0c575d8fcaa3a6c7ccb7d0a | import pytest
from selenium import webdriver
import re
@pytest.fixture
def driver(request):
wd = webdriver.Chrome()
wd.get("http://localhost/litecart/en/")
request.addfinalizer(wd.quit)
return wd
# check that product names are identical on the main page and on product page
def test_product_names(driver):
# get a product name on the main page
main_name = driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light .name").text
# get a product name on a product page
driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light").click()
product_name = driver.find_element_by_css_selector("#box-product .title").text
assert main_name == product_name, "Product names on the main page and on product page are NOT identical"
# check that prices (regular and campaign) are identical on the main page and on product page
def test_prices(driver):
prices = driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light div.price-wrapper")
# get a regular price on the main page
main_regular_price = prices.find_element_by_css_selector(".regular-price").text
# get a campaign price on the main page
main_campaign_price = prices.find_element_by_css_selector(".campaign-price").text
# open the product page
driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light").click()
# get a regular price on a product page
product_regular_price = driver.find_element_by_css_selector("#box-product .price-wrapper .regular-price").text
# get a campaign price on a product page
product_campaign_price = driver.find_element_by_css_selector("#box-product .price-wrapper .campaign-price").text
assert main_regular_price == product_regular_price, "Regular prices on the main page and on the product page " \
"are NOT identical"
assert main_campaign_price == product_campaign_price, "Campaign prices on the main page and on the product page " \
"are NOT identical"
# check color of regular and campaign prices and their attributes on the main page
def test_colors_main_page(driver):
prices = driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light div.price-wrapper")
# get a color of the regular price on the main page
regular_color = prices.find_element_by_css_selector(".regular-price").value_of_css_property("color")
# verify that the regular price is grey (values of R,G,B are identical)
color_list = re.findall('\d+',regular_color)
assert(color_list[0] == color_list[1] == color_list[2]), "The regular price on the main page is NOT grey"
# get a color of the campaign price on the main page
campaign_color = prices.find_element_by_css_selector(".campaign-price").value_of_css_property("color")
# verify that the campaign price is red (values of G and B are 0)
color_list = re.findall('\d+',campaign_color)
assert (color_list[1] == '0') and (color_list[2] == '0'), "The campaign price on the main page is NOT red"
regular_attr = prices.find_element_by_css_selector(".regular-price").value_of_css_property("text-decoration-line")
assert regular_attr == 'line-through', "Regular price is NOT line-through on the main page"
campaign_attr = prices.find_element_by_css_selector(".campaign-price").value_of_css_property("font-weight")
assert (campaign_attr == 'bold') or (campaign_attr >= '700'), "Campaign price is NOT bold on the main page"
# check color of regular and campaign prices and their attributes on the product page
def test_colors_product_page(driver):
# open the product page
driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light").click()
prices = driver.find_element_by_css_selector("#box-product .price-wrapper")
# get a color of the regular price on the main page
regular_color = prices.find_element_by_css_selector(".regular-price").value_of_css_property("color")
# verify that the regular price is grey (values of R,G,B are identical)
color_list = re.findall('\d+', regular_color)
assert (color_list[0] == color_list[1] == color_list[2]), "The regular price on the product page is NOT grey"
# get a color of the campaign price on the main page
campaign_color = prices.find_element_by_css_selector(".campaign-price").value_of_css_property("color")
# verify that the campaign price is red (values of G and B are 0)
color_list = re.findall('\d+', campaign_color)
assert (color_list[1] == '0') and (color_list[2] == '0'), "The campaign price on the product page is NOT red"
# verify that the regular price is line-through
regular_attr = prices.find_element_by_css_selector(".regular-price").value_of_css_property(
"text-decoration-line")
assert regular_attr == 'line-through', "Regular price is NOT line-through on the product page"
# verify that the campaign price is bold
campaign_attr = prices.find_element_by_css_selector(".campaign-price").value_of_css_property(
"font-weight")
assert (campaign_attr == 'bold') or (campaign_attr >= '700'), "Campaign price is NOT bold on the product page"
# check that campaign price is bigger than regular prise on the main and product pages
def test_size_comparison(driver):
prices = driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light div.price-wrapper")
regular_size = prices.find_element_by_css_selector(".regular-price").size
campaign_size = prices.find_element_by_css_selector(".campaign-price").size
assert (campaign_size['height'] > regular_size['height']) and \
(campaign_size['width'] > regular_size['width']), \
"Size of campaign price is NOT bigger than size of regular price on the main page"
# open the product page
driver.find_element_by_css_selector("#box-campaigns div li.product.column.shadow.hover-light").click()
prices = driver.find_element_by_css_selector("#box-product .price-wrapper")
regular_size = prices.find_element_by_css_selector(".regular-price").size
campaign_size = prices.find_element_by_css_selector(".campaign-price").size
assert (campaign_size['height'] > regular_size['height']) and \
(campaign_size['width'] > regular_size['width']), \
"Size of campaign price is NOT bigger than size of regular price on the product page"
| [((102, 120), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (118, 120), False, 'from selenium import webdriver\n'), ((2691, 2724), 're.findall', 're.findall', (['"""\\\\d+"""', 'regular_color'], {}), "('\\\\d+', regular_color)\n", (2701, 2724), False, 'import re\n'), ((3086, 3120), 're.findall', 're.findall', (['"""\\\\d+"""', 'campaign_color'], {}), "('\\\\d+', campaign_color)\n", (3096, 3120), False, 'import re\n'), ((4269, 4302), 're.findall', 're.findall', (['"""\\\\d+"""', 'regular_color'], {}), "('\\\\d+', regular_color)\n", (4279, 4302), False, 'import re\n'), ((4669, 4703), 're.findall', 're.findall', (['"""\\\\d+"""', 'campaign_color'], {}), "('\\\\d+', campaign_color)\n", (4679, 4703), False, 'import re\n')] |
iahuang/pyrite | pyrite/llvm.py | 0db83aad6aa8f245edf13d393f65d408eb956c4d | import shutil
from pyrite import fs
from pyrite.command_line import run_command
from pyrite.errors import UserError
from pyrite.globals import Globals
from os.path import join
class LLVMInterface:
_clang_path: str
def __init__(self):
self._clang_path = self._get_clang_path()
def _get_clang_path(self) -> str:
clang_path = shutil.which(Globals.get_compiler_options().clang_command)
if not clang_path:
raise UserError(
"Pyrite requires clang to be installed, but no such installation was found."
)
return clang_path
def compile_ll(self, source: str, output_path: str) -> None:
"""
Compile the contents of [source] as LLVM IR code, outputting a binary
specified by [output_path]. If any errors arise in compilation,
raise an error.
"""
ir_path = join(self.get_build_directory(), "build.ll")
fs.write_file(
path=ir_path,
data=source
)
result = run_command([self._clang_path, ir_path, "-o", output_path])
if result.stderr:
fs.write_file(
path=join(self.get_build_directory(), "llvm_error.txt"),
data=result.stderr
)
raise UserError(
"An unexpected error occurred during the compilation process. A detailed report has been written to {}".format(
self.get_build_directory()
)
)
def get_build_directory(self) -> str:
"""
Pyrite uses a temporary working "build" directory to store files needed for LLVM/Clang
"""
cwd = Globals.get_compiler_options().cwd
return join(cwd, "_build")
| [((953, 993), 'pyrite.fs.write_file', 'fs.write_file', ([], {'path': 'ir_path', 'data': 'source'}), '(path=ir_path, data=source)\n', (966, 993), False, 'from pyrite import fs\n'), ((1046, 1105), 'pyrite.command_line.run_command', 'run_command', (["[self._clang_path, ir_path, '-o', output_path]"], {}), "([self._clang_path, ir_path, '-o', output_path])\n", (1057, 1105), False, 'from pyrite.command_line import run_command\n'), ((1755, 1774), 'os.path.join', 'join', (['cwd', '"""_build"""'], {}), "(cwd, '_build')\n", (1759, 1774), False, 'from os.path import join\n'), ((459, 556), 'pyrite.errors.UserError', 'UserError', (['"""Pyrite requires clang to be installed, but no such installation was found."""'], {}), "(\n 'Pyrite requires clang to be installed, but no such installation was found.'\n )\n", (468, 556), False, 'from pyrite.errors import UserError\n'), ((1704, 1734), 'pyrite.globals.Globals.get_compiler_options', 'Globals.get_compiler_options', ([], {}), '()\n', (1732, 1734), False, 'from pyrite.globals import Globals\n'), ((367, 397), 'pyrite.globals.Globals.get_compiler_options', 'Globals.get_compiler_options', ([], {}), '()\n', (395, 397), False, 'from pyrite.globals import Globals\n')] |
eduardogerentklein/Algoritmos-Geneticos | bag_recursive.py | 499836ac4867240ee3777dcdd554081a480cb8c9 | maxWeight = 30
value = [15, 7, 10, 5, 8, 17]
weight = [15, 3, 2, 5, 9, 20]
def bag(pos, selected):
# calcula o total
totalValue = 0
pesoTotal = 0
for i in selected:
totalValue += value[i]
pesoTotal += weight[i]
if pesoTotal > maxWeight:
return (0,0)
if pos >= len(weight):
return (totalValue, pesoTotal)
answer1 = bag(pos + 1, selected + [pos])
answer2 = bag(pos + 1, list(selected))
if answer1[0] > answer2[0]:
return answer1
else:
return answer2
bestAnswer = bag(0, [])
print(bestAnswer) | [] |
MEfeTiryaki/trpo | train.py | e1c7bc25165730afa60d9733555398e078a13e67 | import argparse
from itertools import count
import signal
import sys
import os
import time
import numpy as np
import gym
import torch
import torch.autograd as autograd
from torch.autograd import Variable
import scipy.optimize
import matplotlib.pyplot as plt
from value import Value
from policy import Policy
from utils import *
from trpo import trpo_step
parser = argparse.ArgumentParser(description='PyTorch actor-critic example')
# Algorithm Parameters
parser.add_argument('--gamma', type=float, default=0.995, metavar='G', help='discount factor (default: 0.995)')
parser.add_argument('--lambda-', type=float, default=0.97, metavar='G', help='gae (default: 0.97)')
# Value Function Learning Parameters
parser.add_argument('--l2-reg', type=float, default=1e-3, metavar='G', help='(NOT USED)l2 regularization regression (default: 1e-3)')
parser.add_argument('--val-opt-iter', type=int, default=200, metavar='G', help='iteration number for value function learning(default: 200)')
parser.add_argument('--lr', type=float, default=1e-3, metavar='G', help='learning rate for value function (default: 1e-3)')
parser.add_argument('--value-memory', type=int, default=1, metavar='G', help='ratio of past value to be used to batch size (default: 1)')
parser.add_argument('--value-memory-shuffle', action='store_true',help='if not shuffled latest memory stay') # TODO: implement
# Policy Optimization parameters
parser.add_argument('--max-kl', type=float, default=1e-2, metavar='G', help='max kl value (default: 1e-2)')
parser.add_argument('--damping', type=float, default=1e-1, metavar='G', help='damping (default: 1e-1)')
parser.add_argument('--fisher-ratio', type=float, default=1, metavar='G', help='ratio of data to calcualte fisher vector product (default: 1)')
# Environment parameters
parser.add_argument('--env-name', default="Pendulum-v0", metavar='G', help='name of the environment to run')
parser.add_argument('--seed', type=int, default=543, metavar='N', help='random seed (default: 1)')
# Training length
parser.add_argument('--batch-size', type=int, default=5000, metavar='N', help='number of steps per iteration')
parser.add_argument('--episode-length', type=int, default=1000, metavar='N', help='max step size for one episode')
parser.add_argument('--max-iteration-number', type=int, default=200, metavar='N', help='max policy iteration number')
# Rendering
parser.add_argument('--render', action='store_true', help='render the environment')
# Logging
parser.add_argument('--log-interval', type=int, default=1, metavar='N', help='interval between training status logs (default: 10)')
parser.add_argument('--log', action='store_true', help='log the results at the end')
parser.add_argument('--log-dir', type=str, default=".", metavar='N', help='log directory')
parser.add_argument('--log-prefix', type=str, default="log", metavar='N', help='log file prefix')
# Load
parser.add_argument('--load', action='store_true', help='load models')
parser.add_argument('--save', action='store_true', help='load models')
parser.add_argument('--load-dir', type=str, default=".", metavar='N', help='')
args = parser.parse_args()
env = gym.make(args.env_name)
env.seed(args.seed)
num_inputs = env.observation_space.shape[0]
num_actions = env.action_space.shape[0]
torch.set_printoptions(profile="full")
if args.load:
policy_net = Policy(num_inputs, num_actions,30)
value_net = Value(num_inputs,30)
set_flat_params_to(value_net, loadParameterCsv(args.load_dir+"/ValueNet"))
set_flat_params_to(policy_net, loadParameterCsv(args.load_dir+"/PolicyNet"))
print("Networks are loaded from "+args.load_dir+"/")
else:
policy_net = Policy(num_inputs, num_actions,30)
value_net = Value(num_inputs,30)
def signal_handler(sig, frame):
""" Signal Handler to save the networks when shutting down via ctrl+C
Parameters:
Returns:
"""
if(args.save):
valueParam = get_flat_params_from(value_net)
policyParam = get_flat_params_from(policy_net)
saveParameterCsv(valueParam,args.load_dir+"/ValueNet")
saveParameterCsv(policyParam,args.load_dir+"/PolicyNet")
print("Networks are saved in "+args.load_dir+"/")
print('Closing!!')
env.close()
sys.exit(0)
def prepare_data(batch,valueBatch,previousBatch):
""" Get the batch data and calculate value,return and generalized advantage
Detail: TODO
Parameters:
batch (dict of arrays of numpy) : TODO
valueBatch (dict of arrays of numpy) : TODO
previousBatch (dict of arrays of numpy) : TODO
Returns:
"""
# TODO : more description above
stateList = [ torch.from_numpy(np.concatenate(x,axis=0)) for x in batch["states"]]
actionsList = [torch.from_numpy(np.concatenate(x,axis=0)) for x in batch["actions"]]
for states in stateList:
value = value_net.forward(states)
batch["values"].append(value)
advantagesList = []
returnsList = []
rewardsList = []
for rewards,values,masks in zip(batch["rewards"],batch["values"],batch["mask"]):
returns = torch.Tensor(len(rewards),1)
advantages = torch.Tensor(len(rewards),1)
deltas = torch.Tensor(len(rewards),1)
prev_return = 0
prev_value = 0
prev_advantage = 0
for i in reversed(range(len(rewards))):
returns[i] = rewards[i] + args.gamma * prev_value * masks[i] # TD
# returns[i] = rewards[i] + args.gamma * prev_return * masks[i] # Monte Carlo
deltas[i] = rewards[i] + args.gamma * prev_value * masks[i]- values.data[i]
advantages[i] = deltas[i] + args.gamma * args.lambda_* prev_advantage* masks[i]
prev_return = returns[i, 0]
prev_value = values.data[i, 0]
prev_advantage = advantages[i, 0]
returnsList.append(returns)
advantagesList.append(advantages)
rewardsList.append(torch.Tensor(rewards))
batch["states"] = torch.cat(stateList,0)
batch["actions"] = torch.cat(actionsList,0)
batch["rewards"] = torch.cat(rewardsList,0)
batch["returns"] = torch.cat(returnsList,0)
advantagesList = torch.cat(advantagesList,0)
batch["advantages"] = (advantagesList- advantagesList.mean()) / advantagesList.std()
valueBatch["states"] = torch.cat(( previousBatch["states"],batch["states"]),0)
valueBatch["targets"] = torch.cat((previousBatch["returns"],batch["returns"]),0)
def update_policy(batch):
""" Get advantage , states and action and calls trpo step
Parameters:
batch (dict of arrays of numpy) : TODO (batch is different than prepare_data by structure)
Returns:
"""
advantages = batch["advantages"]
states = batch["states"]
actions = batch["actions"]
trpo_step(policy_net, states,actions,advantages , args.max_kl, args.damping)
def update_value(valueBatch):
""" Get valueBatch and run adam optimizer to learn value function
Parameters:
valueBatch (dict of arrays of numpy) : TODO
Returns:
"""
# shuffle the data
dataSize = valueBatch["targets"].size()[0]
permutation = torch.randperm(dataSize)
input = valueBatch["states"][permutation]
target = valueBatch["targets"][permutation]
iter = args.val_opt_iter
batchSize = int(dataSize/ iter)
loss_fn = torch.nn.MSELoss(reduction='sum')
optimizer = torch.optim.Adam(value_net.parameters(), lr=args.lr)
for t in range(iter):
prediction = value_net(input[t*batchSize:t*batchSize+batchSize])
loss = loss_fn(prediction, target[t*batchSize:t*batchSize+batchSize])
# XXX : Comment out for debug
# if t%100==0:
# print("\t%f"%loss.data)
optimizer.zero_grad()
loss.backward()
optimizer.step()
def save_to_previousBatch(previousBatch,batch):
""" Save previous batch to use in future value optimization
Details: TODO
Parameters:
Returns:
"""
if args.value_memory<0:
print("Value memory should be equal or greater than zero")
elif args.value_memory>0:
if previousBatch["returns"].size() == 0:
previousBatch= {"states":batch["states"],
"returns":batch["returns"]}
else:
previous_size = previousBatch["returns"].size()[0]
size = batch["returns"].size()[0]
if previous_size/size == args.value_memory:
previousBatch["states"] = torch.cat([previousBatch["states"][size:],batch["states"]],0)
previousBatch["returns"] = torch.cat([previousBatch["returns"][size:],batch["returns"]],0)
else:
previousBatch["states"] = torch.cat([previousBatch["states"],batch["states"]],0)
previousBatch["returns"] = torch.cat([previousBatch["returns"],batch["returns"]],0)
if args.value_memory_shuffle:
permutation = torch.randperm(previousBatch["returns"].size()[0])
previousBatch["states"] = previousBatch["states"][permutation]
previousBatch["returns"] = previousBatch["returns"][permutation]
def calculate_loss(reward_sum_mean,reward_sum_std,test_number = 10):
""" Calculate mean cummulative reward for test_nubmer of trials
Parameters:
reward_sum_mean (list): holds the history of the means.
reward_sum_std (list): holds the history of the std.
Returns:
list: new value appended means
list: new value appended stds
"""
rewardSum = []
for i in range(test_number):
state = env.reset()
rewardSum.append(0)
for t in range(args.episode_length):
state, reward, done, _ = env.step(policy_net.get_action(state)[0] )
state = np.transpose(state)
rewardSum[-1] += reward
if done:
break
reward_sum_mean.append(np.array(rewardSum).mean())
reward_sum_std.append(np.array(rewardSum).std())
return reward_sum_mean, reward_sum_std
def log(rewards):
""" Saves mean and std over episodes in log file
Parameters:
Returns:
"""
# TODO : add duration to log
filename = args.log_dir+"/"+ args.log_prefix \
+ "_env_" + args.env_name \
+ "_maxIter_" + str(args.max_iteration_number) \
+ "_batchSize_" + str(args.batch_size) \
+ "_gamma_" + str(args.gamma) \
+ "_lambda_" + str(args.lambda_) \
+ "_lr_" + str(args.lr) \
+ "_valOptIter_" + str(args.val_opt_iter)
if os.path.exists(filename + "_index_0.csv"):
id = 0
file = filename + "_index_" + str(id)
while os.path.exists(file + ".csv"):
id = id +1
file = filename + "_index_" + str(id)
filename = file
else:
filename = filename + "_index_0"
import csv
filename = filename+ ".csv"
pythonVersion = sys.version_info[0]
if pythonVersion == 3:
with open(filename, 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(rewards)
elif pythonVersion == 2:
with open(filename, 'w', ) as csvfile:
spamwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(rewards)
def main():
"""
Parameters:
Returns:
"""
signal.signal(signal.SIGINT, signal_handler)
time_start = time.time()
reward_sum_mean,reward_sum_std = [], []
previousBatch= {"states":torch.Tensor(0) ,
"returns":torch.Tensor(0)}
reward_sum_mean,reward_sum_std = calculate_loss(reward_sum_mean,reward_sum_std)
print("Initial loss \n\tloss | mean : %6.4f / std : %6.4f"%(reward_sum_mean[-1],reward_sum_std[-1]) )
for i_episode in range(args.max_iteration_number):
time_episode_start = time.time()
# reset batches
batch = {"states":[] ,
"actions":[],
"next_states":[] ,
"rewards":[],
"returns":[],
"values":[],
"advantages":[],
"mask":[]}
valueBatch = {"states" :[],
"targets" : []}
num_steps = 0
while num_steps < args.batch_size:
state = env.reset()
reward_sum = 0
states,actions,rewards,next_states,masks = [],[],[],[],[]
steps = 0
for t in range(args.episode_length):
action = policy_net.get_action(state)[0] # agent
next_state, reward, done, info = env.step(action)
next_state = np.transpose(next_state)
mask = 0 if done else 1
masks.append(mask)
states.append(state)
actions.append(action)
next_states.append(next_state)
rewards.append(reward)
state = next_state
reward_sum += reward
steps+=1
if args.render:
env.render()
if done:
break
batch["states"].append(np.expand_dims(states, axis=1) )
batch["actions"].append(actions)
batch["next_states"].append(np.expand_dims(next_states, axis=1))
batch["rewards"].append(rewards)
batch["mask"].append(masks)
num_steps += steps
prepare_data(batch,valueBatch,previousBatch)
update_policy(batch) # First policy update to avoid overfitting
update_value(valueBatch)
save_to_previousBatch(previousBatch,batch)
print("episode %d | total: %.4f "%( i_episode, time.time()-time_episode_start))
reward_sum_mean,reward_sum_std = calculate_loss(reward_sum_mean,reward_sum_std)
print("\tloss | mean : %6.4f / std : %6.4f"%(reward_sum_mean[-1],reward_sum_std[-1]) )
if args.log:
print("Data is logged in "+args.log_dir+"/")
log(reward_sum_mean)
print("Total training duration: %.4f "%(time.time()-time_start))
env.close()
if __name__ == '__main__':
main()
| [((371, 438), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch actor-critic example"""'}), "(description='PyTorch actor-critic example')\n", (394, 438), False, 'import argparse\n'), ((3141, 3164), 'gym.make', 'gym.make', (['args.env_name'], {}), '(args.env_name)\n', (3149, 3164), False, 'import gym\n'), ((3270, 3308), 'torch.set_printoptions', 'torch.set_printoptions', ([], {'profile': '"""full"""'}), "(profile='full')\n", (3292, 3308), False, 'import torch\n'), ((3341, 3376), 'policy.Policy', 'Policy', (['num_inputs', 'num_actions', '(30)'], {}), '(num_inputs, num_actions, 30)\n', (3347, 3376), False, 'from policy import Policy\n'), ((3392, 3413), 'value.Value', 'Value', (['num_inputs', '(30)'], {}), '(num_inputs, 30)\n', (3397, 3413), False, 'from value import Value\n'), ((3653, 3688), 'policy.Policy', 'Policy', (['num_inputs', 'num_actions', '(30)'], {}), '(num_inputs, num_actions, 30)\n', (3659, 3688), False, 'from policy import Policy\n'), ((3704, 3725), 'value.Value', 'Value', (['num_inputs', '(30)'], {}), '(num_inputs, 30)\n', (3709, 3725), False, 'from value import Value\n'), ((4226, 4237), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4234, 4237), False, 'import sys\n'), ((5937, 5960), 'torch.cat', 'torch.cat', (['stateList', '(0)'], {}), '(stateList, 0)\n', (5946, 5960), False, 'import torch\n'), ((5983, 6008), 'torch.cat', 'torch.cat', (['actionsList', '(0)'], {}), '(actionsList, 0)\n', (5992, 6008), False, 'import torch\n'), ((6031, 6056), 'torch.cat', 'torch.cat', (['rewardsList', '(0)'], {}), '(rewardsList, 0)\n', (6040, 6056), False, 'import torch\n'), ((6079, 6104), 'torch.cat', 'torch.cat', (['returnsList', '(0)'], {}), '(returnsList, 0)\n', (6088, 6104), False, 'import torch\n'), ((6126, 6154), 'torch.cat', 'torch.cat', (['advantagesList', '(0)'], {}), '(advantagesList, 0)\n', (6135, 6154), False, 'import torch\n'), ((6271, 6327), 'torch.cat', 'torch.cat', (["(previousBatch['states'], batch['states'])", '(0)'], {}), "((previousBatch['states'], batch['states']), 0)\n", (6280, 6327), False, 'import torch\n'), ((6356, 6414), 'torch.cat', 'torch.cat', (["(previousBatch['returns'], batch['returns'])", '(0)'], {}), "((previousBatch['returns'], batch['returns']), 0)\n", (6365, 6414), False, 'import torch\n'), ((6735, 6812), 'trpo.trpo_step', 'trpo_step', (['policy_net', 'states', 'actions', 'advantages', 'args.max_kl', 'args.damping'], {}), '(policy_net, states, actions, advantages, args.max_kl, args.damping)\n', (6744, 6812), False, 'from trpo import trpo_step\n'), ((7087, 7111), 'torch.randperm', 'torch.randperm', (['dataSize'], {}), '(dataSize)\n', (7101, 7111), False, 'import torch\n'), ((7287, 7320), 'torch.nn.MSELoss', 'torch.nn.MSELoss', ([], {'reduction': '"""sum"""'}), "(reduction='sum')\n", (7303, 7320), False, 'import torch\n'), ((10475, 10516), 'os.path.exists', 'os.path.exists', (["(filename + '_index_0.csv')"], {}), "(filename + '_index_0.csv')\n", (10489, 10516), False, 'import os\n'), ((11439, 11483), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handler'], {}), '(signal.SIGINT, signal_handler)\n', (11452, 11483), False, 'import signal\n'), ((11501, 11512), 'time.time', 'time.time', ([], {}), '()\n', (11510, 11512), False, 'import time\n'), ((10593, 10622), 'os.path.exists', 'os.path.exists', (["(file + '.csv')"], {}), "(file + '.csv')\n", (10607, 10622), False, 'import os\n'), ((11588, 11603), 'torch.Tensor', 'torch.Tensor', (['(0)'], {}), '(0)\n', (11600, 11603), False, 'import torch\n'), ((11636, 11651), 'torch.Tensor', 'torch.Tensor', (['(0)'], {}), '(0)\n', (11648, 11651), False, 'import torch\n'), ((11930, 11941), 'time.time', 'time.time', ([], {}), '()\n', (11939, 11941), False, 'import time\n'), ((4638, 4663), 'numpy.concatenate', 'np.concatenate', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (4652, 4663), True, 'import numpy as np\n'), ((4726, 4751), 'numpy.concatenate', 'np.concatenate', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (4740, 4751), True, 'import numpy as np\n'), ((5890, 5911), 'torch.Tensor', 'torch.Tensor', (['rewards'], {}), '(rewards)\n', (5902, 5911), False, 'import torch\n'), ((9670, 9689), 'numpy.transpose', 'np.transpose', (['state'], {}), '(state)\n', (9682, 9689), True, 'import numpy as np\n'), ((10969, 11045), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""" """', 'quotechar': '"""|"""', 'quoting': 'csv.QUOTE_MINIMAL'}), "(csvfile, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL)\n", (10979, 11045), False, 'import csv\n'), ((9796, 9815), 'numpy.array', 'np.array', (['rewardSum'], {}), '(rewardSum)\n', (9804, 9815), True, 'import numpy as np\n'), ((9850, 9869), 'numpy.array', 'np.array', (['rewardSum'], {}), '(rewardSum)\n', (9858, 9869), True, 'import numpy as np\n'), ((11224, 11300), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""" """', 'quotechar': '"""|"""', 'quoting': 'csv.QUOTE_MINIMAL'}), "(csvfile, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL)\n", (11234, 11300), False, 'import csv\n'), ((12717, 12741), 'numpy.transpose', 'np.transpose', (['next_state'], {}), '(next_state)\n', (12729, 12741), True, 'import numpy as np\n'), ((13230, 13260), 'numpy.expand_dims', 'np.expand_dims', (['states'], {'axis': '(1)'}), '(states, axis=1)\n', (13244, 13260), True, 'import numpy as np\n'), ((13348, 13383), 'numpy.expand_dims', 'np.expand_dims', (['next_states'], {'axis': '(1)'}), '(next_states, axis=1)\n', (13362, 13383), True, 'import numpy as np\n'), ((14131, 14142), 'time.time', 'time.time', ([], {}), '()\n', (14140, 14142), False, 'import time\n'), ((8419, 8482), 'torch.cat', 'torch.cat', (["[previousBatch['states'][size:], batch['states']]", '(0)'], {}), "([previousBatch['states'][size:], batch['states']], 0)\n", (8428, 8482), False, 'import torch\n'), ((8524, 8589), 'torch.cat', 'torch.cat', (["[previousBatch['returns'][size:], batch['returns']]", '(0)'], {}), "([previousBatch['returns'][size:], batch['returns']], 0)\n", (8533, 8589), False, 'import torch\n'), ((8648, 8704), 'torch.cat', 'torch.cat', (["[previousBatch['states'], batch['states']]", '(0)'], {}), "([previousBatch['states'], batch['states']], 0)\n", (8657, 8704), False, 'import torch\n'), ((8746, 8804), 'torch.cat', 'torch.cat', (["[previousBatch['returns'], batch['returns']]", '(0)'], {}), "([previousBatch['returns'], batch['returns']], 0)\n", (8755, 8804), False, 'import torch\n'), ((13769, 13780), 'time.time', 'time.time', ([], {}), '()\n', (13778, 13780), False, 'import time\n')] |
meck93/intro_ml | task3/task3_xgb_cv.py | 903710b13e9eed8b45fdbd9957c2fb49b2981f62 |
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.feature_selection import f_classif, SelectKBest
import numpy as np
import pandas as pd
import os
mingw_path = 'C:\\Program Files\\mingw-w64\\x86_64-7.2.0-posix-sjlj-rt_v5-rev1\\mingw64\\bin'
os.environ['PATH'] = mingw_path + ';' + os.environ['PATH']
import xgboost as xgb
# Constants
FILE_PATH_TRAIN = "./input/train.h5"
FILE_PATH_TEST = "./input/test.h5"
TEST_SIZE = 0.25
# read training file
# test_data = pd.read_hdf(FILE_PATH_TRAIN, "test")
training_data = pd.read_hdf(FILE_PATH_TRAIN, "train")
# training data
# extracting the x-values
x_values_training = training_data.copy()
x_values_training = x_values_training.drop(labels=['y'], axis=1)
x_component_training = x_values_training.values
# extracting the y-values
y_component_training = training_data['y'].values
# training the scaler
scaler = StandardScaler(with_mean=True, with_std=True)
scaler = scaler.fit(x_component_training)
# scaling the training and test data
x_train_scaled = scaler.transform(x_component_training)
# feature selection
selector = SelectKBest(f_classif, k=25)
selector = selector.fit(x_train_scaled, y_component_training)
x_train_scaled_new = selector.transform(x_train_scaled)
# splitting the training set into a training & validation set
x_train, x_val, y_train, y_val = train_test_split(x_train_scaled_new, y_component_training, test_size=TEST_SIZE, random_state=42)
# training, evaluation and test data in xgboost DMatrix
xg_train = xgb.DMatrix(x_train, label=y_train)
xg_val = xgb.DMatrix(x_val, label=y_val)
# setup parameters for xgboost
params = {}
# use softmax multi-class classification
params['objective'] = 'multi:softmax'
# scale weight of positive examples
params['silent'] = 0
params['num_class'] = 5
params['tree_method'] = 'auto'
params['seed'] = 42
# number of boosting rounds
rounds = 300
# gridsearch_params = [
# (max_depth, min_child_weight)
# for max_depth in range(6,13,2)
# for min_child_weight in range(4,9,2)
# ]
# print(gridsearch_params)
# best_params = None
# min_error = float("Inf")
# for max_depth, min_child_weight in gridsearch_params:
# print("CV with max_depth={}, min_child_weight={}".format(max_depth, min_child_weight))
# # Update our parameters
# params['max_depth'] = max_depth
# params['min_child_weight'] = min_child_weight
# # Run CV
# cv_results = xgb.cv(params, xg_train, num_boost_round=rounds, seed=42, nfold=5, metrics={'merror'}, early_stopping_rounds=10, verbose_eval=True)
# # Update best error
# mean_error = cv_results['test-merror-mean'].min()
# boost_rounds = cv_results['test-merror-mean'].argmin()
# print("\t Multiclass Error {} for {} rounds".format(mean_error, boost_rounds))
# print()
# if mean_error < min_error:
# min_error = mean_error
# best_params = (max_depth, min_child_weight)
# print("Best params: {}, {}, MAE: {}".format(best_params[0], best_params[1], min_error))
# # grid search parameters
# gridsearch_params = []
# # tree depth, gamma, learning rate, regularization lambda
# for max_tree_depth in range(6, 11, 1):
# for gamma in range(0, 13, 2):
# for learn_rate in [0.3, 0.1, 0.05]:
# for reg_lambda in [10.0, 1.0, 0.0, 0.1, 0.01]:
# gridsearch_params.append((max_tree_depth, gamma, learn_rate, reg_lambda))
# print(gridsearch_params)
gridsearch_params = [
(max_depth, gamma)
for max_depth in range(6,13,2)
for gamma in range(0,13,2)
]
print(gridsearch_params)
best_params = None
min_test_error = float("Inf")
min_train_error = float("Inf")
file = open("output.txt", mode="w+", encoding='utf-8', newline='\n')
for max_depth, gamma in gridsearch_params:
print("CV with max_depth={}, gamma={}".format(max_depth, gamma))
file.write("CV with max_depth={}, gamma={}\n".format(max_depth, gamma))
# Update our parameters
params['max_depth'] = max_depth
params['gamma'] = gamma
# Run CV
cv_results = xgb.cv(params, xg_train, num_boost_round=rounds, seed=42, nfold=5, metrics={'merror'}, early_stopping_rounds=10, verbose_eval=True)
# Update best error
test_error = cv_results['test-merror-mean'].min()
train_error = cv_results['train-merror-mean'].min()
boost_rounds = cv_results['test-merror-mean'].argmin()
print("Multiclass Error {} for {} rounds".format(test_error, boost_rounds))
print()
file.write("Multiclass Error - Test: {} - Train: {} for {} rounds\n".format(test_error, train_error, boost_rounds))
file.write("\n")
if test_error < min_test_error:
min_test_error = test_error
min_train_error = train_error
best_params = (max_depth, gamma)
print("Best params: {}, {}, Test Error: {}, Train Error: {}".format(best_params[0], best_params[1], min_test_error, min_train_error))
file.write("Best params: {}, {}, Test Error: {}, Train Error: {}\n".format(best_params[0], best_params[1], min_test_error, min_train_error))
file.close()
| [((625, 662), 'pandas.read_hdf', 'pd.read_hdf', (['FILE_PATH_TRAIN', '"""train"""'], {}), "(FILE_PATH_TRAIN, 'train')\n", (636, 662), True, 'import pandas as pd\n'), ((969, 1014), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {'with_mean': '(True)', 'with_std': '(True)'}), '(with_mean=True, with_std=True)\n', (983, 1014), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1184, 1212), 'sklearn.feature_selection.SelectKBest', 'SelectKBest', (['f_classif'], {'k': '(25)'}), '(f_classif, k=25)\n', (1195, 1212), False, 'from sklearn.feature_selection import f_classif, SelectKBest\n'), ((1427, 1528), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x_train_scaled_new', 'y_component_training'], {'test_size': 'TEST_SIZE', 'random_state': '(42)'}), '(x_train_scaled_new, y_component_training, test_size=\n TEST_SIZE, random_state=42)\n', (1443, 1528), False, 'from sklearn.model_selection import train_test_split\n'), ((1592, 1627), 'xgboost.DMatrix', 'xgb.DMatrix', (['x_train'], {'label': 'y_train'}), '(x_train, label=y_train)\n', (1603, 1627), True, 'import xgboost as xgb\n'), ((1637, 1668), 'xgboost.DMatrix', 'xgb.DMatrix', (['x_val'], {'label': 'y_val'}), '(x_val, label=y_val)\n', (1648, 1668), True, 'import xgboost as xgb\n'), ((4106, 4242), 'xgboost.cv', 'xgb.cv', (['params', 'xg_train'], {'num_boost_round': 'rounds', 'seed': '(42)', 'nfold': '(5)', 'metrics': "{'merror'}", 'early_stopping_rounds': '(10)', 'verbose_eval': '(True)'}), "(params, xg_train, num_boost_round=rounds, seed=42, nfold=5, metrics=\n {'merror'}, early_stopping_rounds=10, verbose_eval=True)\n", (4112, 4242), True, 'import xgboost as xgb\n')] |
atticwip/audius-protocol | discovery-provider/src/queries/get_plays_metrics.py | 9758e849fae01508fa1d27675741228b11533e6e | import logging
import time
from sqlalchemy import func, desc
from src.models import Play
from src.utils import db_session
logger = logging.getLogger(__name__)
def get_plays_metrics(args):
"""
Returns metrics for play counts
Args:
args: dict The parsed args from the request
args.start_time: date The start of the query
args.limit: number The max number of responses to return
args.bucket_size: string A date_trunc operation to aggregate timestamps by
Returns:
Array of dictionaries with the play counts and timestamp
"""
db = db_session.get_db_read_replica()
with db.scoped_session() as session:
return _get_plays_metrics(session, args)
def _get_plays_metrics(session, args):
metrics_query = (
session.query(
func.date_trunc(args.get("bucket_size"), Play.created_at).label(
"timestamp"
),
func.count(Play.id).label("count"),
)
.filter(Play.created_at > args.get("start_time"))
.group_by(func.date_trunc(args.get("bucket_size"), Play.created_at))
.order_by(desc("timestamp"))
.limit(args.get("limit"))
)
metrics = metrics_query.all()
metrics = [
{"timestamp": int(time.mktime(m[0].timetuple())), "count": m[1]}
for m in metrics
]
return metrics
| [((132, 159), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (149, 159), False, 'import logging\n'), ((595, 627), 'src.utils.db_session.get_db_read_replica', 'db_session.get_db_read_replica', ([], {}), '()\n', (625, 627), False, 'from src.utils import db_session\n'), ((1135, 1152), 'sqlalchemy.desc', 'desc', (['"""timestamp"""'], {}), "('timestamp')\n", (1139, 1152), False, 'from sqlalchemy import func, desc\n'), ((936, 955), 'sqlalchemy.func.count', 'func.count', (['Play.id'], {}), '(Play.id)\n', (946, 955), False, 'from sqlalchemy import func, desc\n')] |
Rich9rd/CAutomation | CAutomation/settings.py | d1c1b963e806a216d4c825243c1c405336414413 | """
Django settings for CAutomation project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
import dj_database_url
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
ACCOUNT_LOGOUT_ON_GET = False
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "none"
AUTH_USER_MODEL = 'cleaning.User'
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
)
ACCOUNT_CONFIRM_EMAIL_ON_GET = False
SWAGGER_SETTINGS = {
'SECURITY_DEFINITIONS': {
'api_key': {
'type': 'apiKey',
'in': 'header',
'name': 'Authorization'
}
},
'USE_SESSION_AUTH': False,
'JSON_EDITOR': True,
}
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-=(#vt!5x^l3-j(e*%@p0)d_p&qd2x_#&n*^i=j38@b(26zz^mr'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
REST_FRAMEWORK = {
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema',
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication',
],
}
# Application definition
SITE_ID = 1
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'corsheaders',
'allauth',
'allauth.account',
'allauth.socialaccount',
'drf_yasg',
'rest_framework',
'rest_framework.authtoken',
'rest_auth.registration',
'rest_auth',
'common.apps.CommonConfig',
'cleaning.apps.CleaningConfig',
]
#'corsheaders',
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.common.CommonMiddleware',
'corsheaders.middleware.CorsMiddleware',
]
#'django.middleware.common.CommonMiddleware',
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#'corsheaders.middleware.CommonMiddleware',
ROOT_URLCONF = 'CAutomation.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'CAutomation.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(
default='postgres://mzqgdpoeqiolgg:270514539442574d87e9f9c742314e58d57ff59139679e5c6e46eff5482b5b6e@ec2-52-208-221-89.eu-west-1.compute.amazonaws.com:5432/d96ohaomhouuat'
),
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
CORS_ALLOW_ALL_ORIGINS = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [((560, 601), 'os.path.join', 'os.path.join', (['PROJECT_ROOT', '"""staticfiles"""'], {}), "(PROJECT_ROOT, 'staticfiles')\n", (572, 601), False, 'import os\n'), ((518, 543), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (533, 543), False, 'import os\n'), ((627, 663), 'os.path.join', 'os.path.join', (['PROJECT_ROOT', '"""static"""'], {}), "(PROJECT_ROOT, 'static')\n", (639, 663), False, 'import os\n'), ((4045, 4249), 'dj_database_url.config', 'dj_database_url.config', ([], {'default': '"""postgres://mzqgdpoeqiolgg:270514539442574d87e9f9c742314e58d57ff59139679e5c6e46eff5482b5b6e@ec2-52-208-221-89.eu-west-1.compute.amazonaws.com:5432/d96ohaomhouuat"""'}), "(default=\n 'postgres://mzqgdpoeqiolgg:270514539442574d87e9f9c742314e58d57ff59139679e5c6e46eff5482b5b6e@ec2-52-208-221-89.eu-west-1.compute.amazonaws.com:5432/d96ohaomhouuat'\n )\n", (4067, 4249), False, 'import dj_database_url\n'), ((447, 461), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (451, 461), False, 'from pathlib import Path\n')] |
wanderindev/financial-calculator-backend | calculators/credit_card_calculator.py | ad7e736c858298c240eb9af52fbadcb02c693968 | from .calculator import Calculator
# noinspection PyTypeChecker
class CreditCardCalculator(Calculator):
def __init__(self, **kwargs):
super(CreditCardCalculator, self).__init__(**kwargs)
self.cc_debt = self.get_float(kwargs.get("cc_debt", 0))
self.add_c = self.get_float(kwargs.get("add_c", 0))
self.min_p_perc = self.get_float(kwargs.get("min_p_perc", 0))
self.min_p = self.get_float(kwargs.get("min_p", 0))
self.fix_p = self.get_float(kwargs.get("fix_p", 0))
self.payments = []
self.payments_p = []
def get_payment_cc(self) -> float:
_rate = self.rate / (100 * self.freq)
_min_p_perc = self.min_p_perc / 100
_min_p = self.min_p
_fix_p = self.fix_p
b = self.cc_debt
per = 0
while b > 0:
i = b * _rate
p = max(b * _min_p_perc, _min_p, _fix_p)
if b + i < p:
p = b + i
b += i - p
per += 1
self.periods.append(per)
self.payments.append(p)
self.payments_p.append(p - i)
self.interests.append(i)
self.balances.append(b)
return self.payments[0]
def get_rate_cc(self) -> float:
return self.rate + self.add_c * 1200 / self.cc_debt
| [] |
phaustin/MyST-Parser | setup.py | 181e921cea2794f10ca612df6bf2a2057b66c372 | """myst-parser package setup."""
from importlib import import_module
from setuptools import find_packages, setup
setup(
name="myst-parser",
version=import_module("myst_parser").__version__,
description=(
"An extended commonmark compliant parser, " "with bridges to docutils & sphinx."
),
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/executablebooks/MyST-Parser",
project_urls={"Documentation": "https://myst-parser.readthedocs.io"},
author="Chris Sewell",
author_email="[email protected]",
license="MIT",
packages=find_packages(),
entry_points={
"console_scripts": ["myst-benchmark = myst_parser.cli.benchmark:main"]
},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Markup",
"Framework :: Sphinx :: Extension",
],
keywords="markdown lexer parser development docutils sphinx",
python_requires=">=3.6",
install_requires=["markdown-it-py~=0.4.5"],
extras_require={
"sphinx": ["pyyaml", "docutils>=0.15", "sphinx>=2,<3"],
"code_style": ["flake8<3.8.0,>=3.7.0", "black", "pre-commit==1.17.0"],
"testing": [
"coverage",
"pytest>=3.6,<4",
"pytest-cov",
"pytest-regressions",
"beautifulsoup4",
],
"rtd": ["sphinxcontrib-bibtex", "ipython", "sphinx-book-theme", "sphinx_tabs"],
},
zip_safe=True,
)
| [((649, 664), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (662, 664), False, 'from setuptools import find_packages, setup\n'), ((158, 186), 'importlib.import_module', 'import_module', (['"""myst_parser"""'], {}), "('myst_parser')\n", (171, 186), False, 'from importlib import import_module\n')] |
kho/cdec | python/tests/extractor/refmt.py | d88186af251ecae60974b20395ce75807bfdda35 | #!/usr/bin/env python
import collections, sys
lines = []
f = collections.defaultdict(int)
fe = collections.defaultdict(lambda: collections.defaultdict(int))
for line in sys.stdin:
tok = [x.strip() for x in line.split('|||')]
count = int(tok[4])
f[tok[1]] += count
fe[tok[1]][tok[2]] += count
lines.append(tok)
for tok in lines:
feat = 'IsSingletonF={0}.0 IsSingletonFE={1}.0'.format(
0 if f[tok[1]] > 1 else 1,
0 if fe[tok[1]][tok[2]] > 1 else 1)
print ' ||| '.join((tok[0], tok[1], tok[2], feat, tok[3]))
| [] |
tomitokko/django-blog-with-astradb | blog/models.py | 236aaf625ceb854345b6d6bbdd6d17b81e0e3c4f | from django.db import models
import uuid
from datetime import datetime
from cassandra.cqlengine import columns
from django_cassandra_engine.models import DjangoCassandraModel
# Create your models here.
class PostModel(DjangoCassandraModel):
id = columns.UUID(primary_key=True, default=uuid.uuid4)
title = columns.Text(required=True)
body = columns.Text(required=True)
created_at = columns.DateTime(default=datetime.now) | [((251, 301), 'cassandra.cqlengine.columns.UUID', 'columns.UUID', ([], {'primary_key': '(True)', 'default': 'uuid.uuid4'}), '(primary_key=True, default=uuid.uuid4)\n', (263, 301), False, 'from cassandra.cqlengine import columns\n'), ((314, 341), 'cassandra.cqlengine.columns.Text', 'columns.Text', ([], {'required': '(True)'}), '(required=True)\n', (326, 341), False, 'from cassandra.cqlengine import columns\n'), ((353, 380), 'cassandra.cqlengine.columns.Text', 'columns.Text', ([], {'required': '(True)'}), '(required=True)\n', (365, 380), False, 'from cassandra.cqlengine import columns\n'), ((398, 436), 'cassandra.cqlengine.columns.DateTime', 'columns.DateTime', ([], {'default': 'datetime.now'}), '(default=datetime.now)\n', (414, 436), False, 'from cassandra.cqlengine import columns\n')] |
miczone/python-fedex | fedex/services/availability_commitment_service.py | 1a17b45753b16b2551b0b8ba2c6aa65be8e73931 | """
Service Availability and Commitment Module
This package contains the shipping methods defined by Fedex's
ValidationAvailabilityAndCommitmentService WSDL file. Each is encapsulated in a class for
easy access. For more details on each, refer to the respective class's
documentation.
"""
import datetime
from ..base_service import FedexBaseService
class FedexAvailabilityCommitmentRequest(FedexBaseService):
"""
This class allows you validate service availability
"""
def __init__(self, config_obj, *args, **kwargs):
"""
@type config_obj: L{FedexConfig}
@param config_obj: A valid FedexConfig object.
"""
self._config_obj = config_obj
# Holds version info for the VersionId SOAP object.
self._version_info = {
'service_id': 'vacs',
'major': '14',
'intermediate': '0',
'minor': '0'
}
self.CarrierCode = None
"""@ivar: Carrier Code Default to Fedex (FDXE), or can bbe FDXG."""
self.Origin = None
"""@ivar: Holds Origin Address WSDL object."""
self.Destination = None
"""@ivar: Holds Destination Address WSDL object."""
self.ShipDate = None
"""@ivar: Ship Date date WSDL object."""
self.Service = None
"""@ivar: Service type, if set to None will get all available service information."""
self.Packaging = None
"""@ivar: Type of packaging to narrow down available shipping options or defaults to YOUR_PACKAGING."""
# Call the parent FedexBaseService class for basic setup work.
# Shortened the name of the wsdl, otherwise suds did not load it properly.
# Suds throws the following error when using the long file name from FedEx:
#
# File "/Library/Python/2.7/site-packages/suds/wsdl.py", line 878, in resolve
# raise Exception("binding '%s', not-found" % p.binding)
# Exception: binding 'ns:ValidationAvailabilityAndCommitmentServiceSoapBinding', not-found
super(FedexAvailabilityCommitmentRequest, self).__init__(
self._config_obj, 'ValidationAvailabilityAndCommitmentService_v14.wsdl', *args, **kwargs)
def _prepare_wsdl_objects(self):
"""
Create the data structure and get it ready for the WSDL request.
"""
self.CarrierCode = 'FDXE'
self.Origin = self.client.factory.create('Address')
self.Destination = self.client.factory.create('Address')
self.ShipDate = datetime.date.today().isoformat()
self.Service = None
self.Packaging = 'YOUR_PACKAGING'
def _assemble_and_send_request(self):
"""
Fires off the Fedex request.
@warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(),
WHICH RESIDES ON FedexBaseService AND IS INHERITED.
"""
# We get an exception like this when specifying an IntegratorId:
# suds.TypeNotFound: Type not found: 'IntegratorId'
# Setting it to None does not seem to appease it.
del self.ClientDetail.IntegratorId
self.logger.debug(self.WebAuthenticationDetail)
self.logger.debug(self.ClientDetail)
self.logger.debug(self.TransactionDetail)
self.logger.debug(self.VersionId)
# Fire off the query.
return self.client.service.serviceAvailability(
WebAuthenticationDetail=self.WebAuthenticationDetail,
ClientDetail=self.ClientDetail,
TransactionDetail=self.TransactionDetail,
Version=self.VersionId,
Origin=self.Origin,
Destination=self.Destination,
ShipDate=self.ShipDate,
CarrierCode=self.CarrierCode,
Service=self.Service,
Packaging=self.Packaging)
| [((2548, 2569), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2567, 2569), False, 'import datetime\n')] |
gb-andreygsouza/XuniVerse | xverse/transformer/_woe.py | 74f4b9112c32a8f1411ae0c5a6de906f8d2e895a | import pandas as pd
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
import scipy.stats.stats as stats
import pandas.core.algorithms as algos
#from sklearn.utils.validation import check_is_fitted
from sklearn.utils import check_array
from ..transformer import MonotonicBinning
pd.options.mode.chained_assignment = None
class WOE(BaseEstimator, TransformerMixin):
"""Weight of evidence transformation for categorical variables. For numeric variables,
monotonic operation is provided as default with this package.
Parameters
----------
feature_names: 'all' or list (default='all')
list of features to perform WOE transformation.
- 'all' (default): All categorical features in the dataset will be used
- list of features: ['age', 'income',......]
exclude_features: list (default=None)
list of features to be excluded from WOE transformation.
- Example - ['age', 'income', .......]
woe_prefix: string (default=None)
Variable prefix to be used for the column created by WOE transformer. The default value is set 'None'.
treat_missing: {'separate', 'mode', 'least_frequent'} (default='separate')
This parameter setting is used to handle missing values in the dataset.
'separate' - Missing values are treated as a own group (category)
'mode' - Missing values are combined with the highest frequent item in the dataset
'least_frequent' - Missing values are combined with the least frequent item in the dataset
woe_bins: dict of dicts(default=None)
This feature is added as part of future WOE transformations or scoring. If this value is set,
then WOE values provided for each of the features here will be used for transformation.
Applicable only in the transform method.
Dictionary structure - {'feature_name': float list}
Example - {'education': {'primary' : 0.1, 'tertiary' : 0.5, 'secondary', 0.7}}
monotonic_binning: bool (default=True)
This parameter is used to perform monotonic binning on numeric variables. If set to False,
numeric variables would be ignored.
mono_feature_names: 'all' or list (default='all')
list of features to perform monotonic binning operation.
- 'all' (default): All features in the dataset will be used
- list of features: ['age', 'income',......]
mono_max_bins: int (default=20)
Maximum number of bins that can be created for any given variable. The final number of bins
created will be less than or equal to this number.
mono_force_bins: int (default=3)
It forces the module to create bins for a variable, when it cannot find monotonic relationship
using "max_bins" option. The final number of bins created will be equal to the number specified.
mono_cardinality_cutoff: int (default=5)
Cutoff to determine if a variable is eligible for monotonic binning operation. Any variable
which has unique levels less than this number will be treated as character variables.
At this point no binning operation will be performed on the variable and it will return the
unique levels as bins for these variable.
mono_prefix: string (default=None)
Variable prefix to be used for the column created by monotonic binning.
mono_custom_binning: dict (default=None)
Using this parameter, the user can perform custom binning on variables. This parameter is also
used to apply previously computed bins for each feature (Score new data).
Dictionary structure - {'feature_name': float list}
Example - {'age': [0., 1., 2., 3.]}
"""
# Initialize the parameters for the function
def __init__(self, feature_names='all', exclude_features=None, woe_prefix=None,
treat_missing='separate', woe_bins=None, monotonic_binning=True,
mono_feature_names='all', mono_max_bins=20, mono_force_bins=3,
mono_cardinality_cutoff=5, mono_prefix=None, mono_custom_binning=None):
self.feature_names = feature_names
self.exclude_features = exclude_features
self.woe_prefix = woe_prefix
self.treat_missing = treat_missing
self.woe_bins = woe_bins #only used for future transformations
#these features below are for monotonic operations on numeric variables.
#It uses MonotonicBinning class from binning package.
self.monotonic_binning = monotonic_binning
self.mono_feature_names = mono_feature_names
self.mono_max_bins = mono_max_bins
self.mono_force_bins = mono_force_bins
self.mono_cardinality_cutoff = mono_cardinality_cutoff
self.mono_prefix = mono_prefix
self.mono_custom_binning = mono_custom_binning #only used for monotonic transformations
# check input data type - Only Pandas Dataframe allowed
def check_datatype(self, X):
if not isinstance(X, pd.DataFrame):
raise ValueError("The input data must be pandas dataframe. But the input provided is " + str(type(X)))
return self
# the fit function for WOE transformer
def fit(self, X, y):
#if the function is used as part of pipeline, then try to unpack tuple values
#produced in the previous step. Added as a part of pipeline feature.
try:
X, y = X
except:
pass
#check datatype of X
self.check_datatype(X)
#The length of X and Y should be equal
if X.shape[0] != y.shape[0]:
raise ValueError("Mismatch in input lengths. Length of X is " + str(X.shape[0]) + " \
but length of y is " + str(y.shape[0]) + ".")
# The label must be binary with values {0,1}
unique = np.unique(y)
if len(unique) != 2:
raise ValueError("The target column y must be binary. But the target contains " + str(len(unique)) + \
" unique value(s).")
#apply monotonic binning operation
if self.monotonic_binning:
self.mono_bin_clf = MonotonicBinning(feature_names=self.mono_feature_names,
max_bins=self.mono_max_bins, force_bins=self.mono_force_bins,
cardinality_cutoff=self.mono_cardinality_cutoff,
prefix=self.mono_prefix, custom_binning=self.mono_custom_binning)
if self.mono_custom_binning:
X = self.mono_bin_clf.transform(X)
self.mono_custom_binning = self.mono_bin_clf.bins
else:
X = self.mono_bin_clf.fit_transform(X, y)
self.mono_custom_binning = self.mono_bin_clf.bins
#identify the variables to tranform and assign the bin mapping dictionary
self.woe_bins = {} #bin mapping
if not self.mono_custom_binning:
self.mono_custom_binning= {}
else:
for i in self.mono_custom_binning:
X[i] = X[i].astype('object')
numerical_features = list(X._get_numeric_data().columns)
categorical_features = list(X.columns.difference(numerical_features))
#Identifying the features to perform fit
if self.feature_names == 'all':
self.transform_features = categorical_features
else:
self.transform_features = list(set(self.feature_names))
#Exclude variables provided in the exclusion list
if self.exclude_features:
self.transform_features = list(set(self.transform_features) - set(self.exclude_features))
temp_X = X[self.transform_features] #subset data only on features to fit
temp_X = temp_X.astype('object') #convert categorical columns to object columns
temp_X = self.treat_missing_values(temp_X) #treat missing values function
#apply the WOE train function on dataset
temp_X.apply(lambda x: self.train(x, y), axis=0)
#provide Information value for each variable as a separate dataset
self.iv_df = pd.DataFrame({'Information_Value':self.woe_df.groupby('Variable_Name').Information_Value.max()})
self.iv_df = self.iv_df.reset_index()
self.iv_df = self.iv_df.sort_values('Information_Value', ascending=False)
return self
#treat missing values based on the 'treat_missing' option provided by user
def treat_missing_values(self, X):
"""
treat_missing: {'separate', 'mode', 'least_frequent'} (default='separate')
This parameter setting is used to handle missing values in the dataset.
'separate' - Missing values are treated as a own group (category)
'mode' - Missing values are combined with the highest frequent item in the dataset
'least_frequent' - Missing values are combined with the least frequent item in the dataset
"""
if self.treat_missing == 'separate':
X = X.fillna('NA')
elif self.treat_missing == 'mode':
X = X.fillna(X.mode().iloc[0])
elif self.treat_missing == 'least_frequent':
for i in X:
X[i] = X[i].fillna(X[i].value_counts().index[-1])
else:
raise ValueError("Missing values could be treated with one of these three options - \
'separate', 'mode', 'least_frequent'. \
The provided option is - " + str(self.treat_missing))
return X
#WOE binning - The function is applied on each columns identified in the fit function.
#Here, the input X is a Pandas Series type.
def train(self, X, y):
# Assign values
woe_mapping = {} #dictionary mapping for the current feature
temp_woe = pd.DataFrame({},index=[])
temp_df = pd.DataFrame({'X': X, "Y":y})
grouped_df = temp_df.groupby('X', as_index=True)
#calculate stats for variable and store it in temp_woe
target_sum = grouped_df.Y.sum()
temp_woe['Count'] = grouped_df.Y.count()
temp_woe['Category'] = target_sum.index
temp_woe['Event'] = target_sum
temp_woe['Non_Event'] = temp_woe['Count'] - temp_woe['Event']
temp_woe['Event_Rate'] = temp_woe['Event']/temp_woe['Count']
temp_woe['Non_Event_Rate'] = temp_woe['Non_Event']/temp_woe['Count']
#calculate distributions and woe
total_event = temp_woe['Event'].sum()
total_non_event = temp_woe['Non_Event'].sum()
temp_woe['Event_Distribution'] = temp_woe['Event']/total_event
temp_woe['Non_Event_Distribution'] = temp_woe['Non_Event']/total_non_event
temp_woe['WOE'] = np.log(temp_woe['Event_Distribution']/temp_woe['Non_Event_Distribution'])
temp_woe['Information_Value'] = (temp_woe['Event_Distribution']- \
temp_woe['Non_Event_Distribution'])*temp_woe['WOE']
temp_woe['Variable_Name'] = X.name
temp_woe = temp_woe[['Variable_Name', 'Category', 'Count', 'Event', 'Non_Event', \
'Event_Rate', 'Non_Event_Rate', 'Event_Distribution', 'Non_Event_Distribution', \
'WOE', 'Information_Value']]
temp_woe = temp_woe.replace([np.inf, -np.inf], 0)
temp_woe['Information_Value'] = temp_woe['Information_Value'].sum()
temp_woe = temp_woe.reset_index(drop=True)
woe_mapping[str(X.name)] = dict(zip(temp_woe['Category'], temp_woe['WOE']))
#assign computed values to class variables
try:
self.woe_df = self.woe_df.append(temp_woe, ignore_index=True)
self.woe_bins.update(woe_mapping)
except:
self.woe_df = temp_woe
self.woe_bins = woe_mapping
return self
#Transform new data or existing data based on the fit identified or custom transformation provided by user
def transform(self, X, y=None):
#if the function is used as part of pipeline, then try to unpack tuple values
#produced in the previous step. Added as a part of pipeline feature.
try:
X, y = X
except:
pass
self.check_datatype(X) #check input datatype.
outX = X.copy(deep=True)
#identify the features on which the transformation should be performed
try:
if self.transform_features:
transform_features = self.transform_features
except:
if self.woe_bins:
transform_features = list(self.woe_bins.keys())
else:
raise ValueError("Estimator has to be fitted to make WOE transformations")
#final list of features to be transformed
transform_features = list(set(transform_features) & set(outX.columns))
#raise error if the list is empty
if not transform_features:
raise ValueError("Empty list for WOE transformation. \
Estimator has to be fitted to make WOE transformations")
#use the custom bins provided by user for numeric variables
if self.mono_custom_binning:
try:
if self.mono_bin_clf:
pass
except:
self.mono_bin_clf = MonotonicBinning(feature_names=self.mono_feature_names,
max_bins=self.mono_max_bins, force_bins=self.mono_force_bins,
cardinality_cutoff=self.mono_cardinality_cutoff,
prefix=self.mono_prefix, custom_binning=self.mono_custom_binning)
outX = self.mono_bin_clf.transform(outX)
outX = outX.astype('object') #convert categorical columns to object columns
outX = self.treat_missing_values(outX) #treat missing values function
#iterate through the dataframe and apply the bins
for i in transform_features:
tempX = outX[i] #pandas Series
original_column_name = str(i)
#create the column name based on user provided prefix
if self.woe_prefix:
new_column_name = str(self.woe_prefix) + '_' + str(i)
else:
new_column_name = original_column_name
#check if the bin mapping is present
#check_is_fitted(self, 'woe_bins')
if not self.woe_bins:
raise ValueError("woe_bins variable is not present. \
Estimator has to be fitted to apply transformations.")
outX[new_column_name] = tempX.replace(self.woe_bins[original_column_name])
#transformed dataframe
return outX
#Method that describes what we need this transformer to do
def fit_transform(self, X, y):
return self.fit(X, y).transform(X)
| [] |
okapies/cupy | cupy/linalg/product.py | 4e8394e5e0c4e420295cbc36819e8e0f7de90e9d | import numpy
import six
import cupy
from cupy import core
from cupy import internal
from cupy.linalg.solve import inv
from cupy.util import collections_abc
matmul = core.matmul
def dot(a, b, out=None):
"""Returns a dot product of two arrays.
For arrays with more than one axis, it computes the dot product along the
last axis of ``a`` and the second-to-last axis of ``b``. This is just a
matrix product if the both arrays are 2-D. For 1-D arrays, it uses their
unique axis as an axis to take dot product over.
Args:
a (cupy.ndarray): The left argument.
b (cupy.ndarray): The right argument.
out (cupy.ndarray): Output array.
Returns:
cupy.ndarray: The dot product of ``a`` and ``b``.
.. seealso:: :func:`numpy.dot`
"""
# TODO(okuta): check type
return a.dot(b, out)
def vdot(a, b):
"""Returns the dot product of two vectors.
The input arrays are flattened into 1-D vectors and then it performs inner
product of these vectors.
Args:
a (cupy.ndarray): The first argument.
b (cupy.ndarray): The second argument.
Returns:
cupy.ndarray: Zero-dimensional array of the dot product result.
.. seealso:: :func:`numpy.vdot`
"""
if a.size != b.size:
raise ValueError('Axis dimension mismatch')
if a.dtype.kind == 'c':
a = a.conj()
return core.tensordot_core(a, b, None, 1, 1, a.size, ())
def inner(a, b):
"""Returns the inner product of two arrays.
It uses the last axis of each argument to take sum product.
Args:
a (cupy.ndarray): The first argument.
b (cupy.ndarray): The second argument.
Returns:
cupy.ndarray: The inner product of ``a`` and ``b``.
.. seealso:: :func:`numpy.inner`
"""
a_ndim = a.ndim
b_ndim = b.ndim
if a_ndim == 0 or b_ndim == 0:
return cupy.multiply(a, b)
a_axis = a_ndim - 1
b_axis = b_ndim - 1
if a.shape[-1] != b.shape[-1]:
raise ValueError('Axis dimension mismatch')
if a_axis:
a = cupy.rollaxis(a, a_axis, 0)
if b_axis:
b = cupy.rollaxis(b, b_axis, 0)
ret_shape = a.shape[1:] + b.shape[1:]
k = a.shape[0]
n = a.size // k
m = b.size // k
return core.tensordot_core(a, b, None, n, m, k, ret_shape)
def outer(a, b, out=None):
"""Returns the outer product of two vectors.
The input arrays are flattened into 1-D vectors and then it performs outer
product of these vectors.
Args:
a (cupy.ndarray): The first argument.
b (cupy.ndarray): The second argument.
out (cupy.ndarray): Output array.
Returns:
cupy.ndarray: 2-D array of the outer product of ``a`` and ``b``.
.. seealso:: :func:`numpy.outer`
"""
n = a.size
m = b.size
ret_shape = (n, m)
if out is None:
return core.tensordot_core(a, b, None, n, m, 1, ret_shape)
if out.size != n * m:
raise ValueError('Output array has an invalid size')
if out.flags.c_contiguous:
return core.tensordot_core(a, b, out, n, m, 1, ret_shape)
else:
out[:] = core.tensordot_core(a, b, None, n, m, 1, ret_shape)
return out
def tensordot(a, b, axes=2):
"""Returns the tensor dot product of two arrays along specified axes.
This is equivalent to compute dot product along the specified axes which
are treated as one axis by reshaping.
Args:
a (cupy.ndarray): The first argument.
b (cupy.ndarray): The second argument.
axes:
- If it is an integer, then ``axes`` axes at the last of ``a`` and
the first of ``b`` are used.
- If it is a pair of sequences of integers, then these two
sequences specify the list of axes for ``a`` and ``b``. The
corresponding axes are paired for sum-product.
Returns:
cupy.ndarray: The tensor dot product of ``a`` and ``b`` along the
axes specified by ``axes``.
.. seealso:: :func:`numpy.tensordot`
"""
a_ndim = a.ndim
b_ndim = b.ndim
if a_ndim == 0 or b_ndim == 0:
if axes != 0 and axes != ((), ()):
raise ValueError('An input is zero-dim while axes has dimensions')
return cupy.multiply(a, b)
if isinstance(axes, collections_abc.Sequence):
if len(axes) != 2:
raise ValueError('Axes must consist of two arrays.')
a_axes, b_axes = axes
if numpy.isscalar(a_axes):
a_axes = a_axes,
if numpy.isscalar(b_axes):
b_axes = b_axes,
else:
a_axes = tuple(six.moves.range(a_ndim - axes, a_ndim))
b_axes = tuple(six.moves.range(axes))
sum_ndim = len(a_axes)
if sum_ndim != len(b_axes):
raise ValueError('Axes length mismatch')
for a_axis, b_axis in zip(a_axes, b_axes):
if a.shape[a_axis] != b.shape[b_axis]:
raise ValueError('Axis dimension mismatch')
# Make the axes non-negative
a = _move_axes_to_head(a, [axis % a_ndim for axis in a_axes])
b = _move_axes_to_head(b, [axis % b_ndim for axis in b_axes])
ret_shape = a.shape[sum_ndim:] + b.shape[sum_ndim:]
k = internal.prod(a.shape[:sum_ndim])
# Avoid division by zero: core.tensordot_core returns zeros without
# checking n, m consistency, thus allowing 0-length dimensions to work
n = a.size // k if k != 0 else 0
m = b.size // k if k != 0 else 0
return core.tensordot_core(a, b, None, n, m, k, ret_shape)
def matrix_power(M, n):
"""Raise a square matrix to the (integer) power `n`.
Args:
M (~cupy.ndarray): Matrix to raise by power n.
n (~int): Power to raise matrix to.
Returns:
~cupy.ndarray: Output array.
.. note:: M must be of dtype `float32` or `float64`.
..seealso:: :func:`numpy.linalg.matrix_power`
"""
if M.ndim != 2 or M.shape[0] != M.shape[1]:
raise ValueError('input must be a square array')
if not isinstance(n, six.integer_types):
raise TypeError('exponent must be an integer')
if n == 0:
return cupy.identity(M.shape[0], dtype=M.dtype)
elif n < 0:
M = inv(M)
n *= -1
# short-cuts
if n <= 3:
if n == 1:
return M
elif n == 2:
return cupy.matmul(M, M)
else:
return cupy.matmul(cupy.matmul(M, M), M)
# binary decomposition to reduce the number of Matrix
# multiplications for n > 3.
result, Z = None, None
for b in cupy.binary_repr(n)[::-1]:
Z = M if Z is None else cupy.matmul(Z, Z)
if b == '1':
result = Z if result is None else cupy.matmul(result, Z)
return result
def kron(a, b):
"""Returns the kronecker product of two arrays.
Args:
a (~cupy.ndarray): The first argument.
b (~cupy.ndarray): The second argument.
Returns:
~cupy.ndarray: Output array.
.. seealso:: :func:`numpy.kron`
"""
a_ndim = a.ndim
b_ndim = b.ndim
if a_ndim == 0 or b_ndim == 0:
return cupy.multiply(a, b)
ndim = b_ndim
a_shape = a.shape
b_shape = b.shape
if a_ndim != b_ndim:
if b_ndim > a_ndim:
a_shape = (1,) * (b_ndim - a_ndim) + a_shape
else:
b_shape = (1,) * (a_ndim - b_ndim) + b_shape
ndim = a_ndim
axis = ndim - 1
out = core.tensordot_core(a, b, None, a.size, b.size, 1, a_shape + b_shape)
for _ in six.moves.range(ndim):
out = core.concatenate_method(out, axis=axis)
return out
def _move_axes_to_head(a, axes):
# This function moves the axes of ``s`` to the head of the shape.
for idx, axis in enumerate(axes):
if idx != axis:
break
else:
return a
return a.transpose(
axes + [i for i in six.moves.range(a.ndim) if i not in axes])
| [((1402, 1451), 'cupy.core.tensordot_core', 'core.tensordot_core', (['a', 'b', 'None', '(1)', '(1)', 'a.size', '()'], {}), '(a, b, None, 1, 1, a.size, ())\n', (1421, 1451), False, 'from cupy import core\n'), ((2282, 2333), 'cupy.core.tensordot_core', 'core.tensordot_core', (['a', 'b', 'None', 'n', 'm', 'k', 'ret_shape'], {}), '(a, b, None, n, m, k, ret_shape)\n', (2301, 2333), False, 'from cupy import core\n'), ((5217, 5250), 'cupy.internal.prod', 'internal.prod', (['a.shape[:sum_ndim]'], {}), '(a.shape[:sum_ndim])\n', (5230, 5250), False, 'from cupy import internal\n'), ((5484, 5535), 'cupy.core.tensordot_core', 'core.tensordot_core', (['a', 'b', 'None', 'n', 'm', 'k', 'ret_shape'], {}), '(a, b, None, n, m, k, ret_shape)\n', (5503, 5535), False, 'from cupy import core\n'), ((7425, 7494), 'cupy.core.tensordot_core', 'core.tensordot_core', (['a', 'b', 'None', 'a.size', 'b.size', '(1)', '(a_shape + b_shape)'], {}), '(a, b, None, a.size, b.size, 1, a_shape + b_shape)\n', (7444, 7494), False, 'from cupy import core\n'), ((7508, 7529), 'six.moves.range', 'six.moves.range', (['ndim'], {}), '(ndim)\n', (7523, 7529), False, 'import six\n'), ((1899, 1918), 'cupy.multiply', 'cupy.multiply', (['a', 'b'], {}), '(a, b)\n', (1912, 1918), False, 'import cupy\n'), ((2084, 2111), 'cupy.rollaxis', 'cupy.rollaxis', (['a', 'a_axis', '(0)'], {}), '(a, a_axis, 0)\n', (2097, 2111), False, 'import cupy\n'), ((2139, 2166), 'cupy.rollaxis', 'cupy.rollaxis', (['b', 'b_axis', '(0)'], {}), '(b, b_axis, 0)\n', (2152, 2166), False, 'import cupy\n'), ((2891, 2942), 'cupy.core.tensordot_core', 'core.tensordot_core', (['a', 'b', 'None', 'n', 'm', '(1)', 'ret_shape'], {}), '(a, b, None, n, m, 1, ret_shape)\n', (2910, 2942), False, 'from cupy import core\n'), ((3077, 3127), 'cupy.core.tensordot_core', 'core.tensordot_core', (['a', 'b', 'out', 'n', 'm', '(1)', 'ret_shape'], {}), '(a, b, out, n, m, 1, ret_shape)\n', (3096, 3127), False, 'from cupy import core\n'), ((3155, 3206), 'cupy.core.tensordot_core', 'core.tensordot_core', (['a', 'b', 'None', 'n', 'm', '(1)', 'ret_shape'], {}), '(a, b, None, n, m, 1, ret_shape)\n', (3174, 3206), False, 'from cupy import core\n'), ((4284, 4303), 'cupy.multiply', 'cupy.multiply', (['a', 'b'], {}), '(a, b)\n', (4297, 4303), False, 'import cupy\n'), ((4489, 4511), 'numpy.isscalar', 'numpy.isscalar', (['a_axes'], {}), '(a_axes)\n', (4503, 4511), False, 'import numpy\n'), ((4553, 4575), 'numpy.isscalar', 'numpy.isscalar', (['b_axes'], {}), '(b_axes)\n', (4567, 4575), False, 'import numpy\n'), ((6133, 6173), 'cupy.identity', 'cupy.identity', (['M.shape[0]'], {'dtype': 'M.dtype'}), '(M.shape[0], dtype=M.dtype)\n', (6146, 6173), False, 'import cupy\n'), ((6555, 6574), 'cupy.binary_repr', 'cupy.binary_repr', (['n'], {}), '(n)\n', (6571, 6574), False, 'import cupy\n'), ((7104, 7123), 'cupy.multiply', 'cupy.multiply', (['a', 'b'], {}), '(a, b)\n', (7117, 7123), False, 'import cupy\n'), ((7545, 7584), 'cupy.core.concatenate_method', 'core.concatenate_method', (['out'], {'axis': 'axis'}), '(out, axis=axis)\n', (7568, 7584), False, 'from cupy import core\n'), ((4639, 4677), 'six.moves.range', 'six.moves.range', (['(a_ndim - axes)', 'a_ndim'], {}), '(a_ndim - axes, a_ndim)\n', (4654, 4677), False, 'import six\n'), ((4702, 4723), 'six.moves.range', 'six.moves.range', (['axes'], {}), '(axes)\n', (4717, 4723), False, 'import six\n'), ((6202, 6208), 'cupy.linalg.solve.inv', 'inv', (['M'], {}), '(M)\n', (6205, 6208), False, 'from cupy.linalg.solve import inv\n'), ((6614, 6631), 'cupy.matmul', 'cupy.matmul', (['Z', 'Z'], {}), '(Z, Z)\n', (6625, 6631), False, 'import cupy\n'), ((6338, 6355), 'cupy.matmul', 'cupy.matmul', (['M', 'M'], {}), '(M, M)\n', (6349, 6355), False, 'import cupy\n'), ((6699, 6721), 'cupy.matmul', 'cupy.matmul', (['result', 'Z'], {}), '(result, Z)\n', (6710, 6721), False, 'import cupy\n'), ((6401, 6418), 'cupy.matmul', 'cupy.matmul', (['M', 'M'], {}), '(M, M)\n', (6412, 6418), False, 'import cupy\n'), ((7865, 7888), 'six.moves.range', 'six.moves.range', (['a.ndim'], {}), '(a.ndim)\n', (7880, 7888), False, 'import six\n')] |
aligoren/pyalgo | fibo.py | 8aa58143d3301f70ed7189ca86ce0c7886f92e8c | def fibo(n):
return n <= 1 or fibo(n-1) + fibo(n-2)
def fibo_main():
for n in range(1,47):
res = fibo(n)
print("%s\t%s" % (n, res))
fibo_main()
# profiling result for 47 numbers
# profile: python -m profile fibo.py
"""
-1273940835 function calls (275 primitive calls) in 18966.707 seconds
Ordered by: standard name
ncalls tottime percall cumtime percall filename:lineno(function)
90 0.000 0.000 0.001 0.000 cp857.py:18(encode)
1 0.000 0.000 18966.707 18966.707 fibo.py:1(<module>)
-1273941064/46 18966.697 -0.000 18966.697 412.319 fibo.py:1(fibo)
1 0.001 0.001 18966.707 18966.707 fibo.py:4(main)
90 0.000 0.000 0.000 0.000 {built-in method charmap_encode}
1 0.000 0.000 18966.707 18966.707 {built-in method exec}
45 0.009 0.000 0.010 0.000 {built-in method print}
1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Prof
iler' objects}
""" | [] |
yihui8776/TensorRT-DETR | trt_util/common.py | 1f32e9a2f98e26ec5b2376f9a2695193887430fb | #
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ~~~Medcare AI Lab~~~
# 该部分代码参考了TensorRT官方示例完成,对相关方法进行修改
#
import pycuda.driver as cuda
#https://documen.tician.de/pycuda/driver.html
import pycuda.autoinit
import numpy as np
import tensorrt as trt
from .calibrator import Calibrator
import sys, os
import time
# TRT_LOGGER = trt.Logger(trt.Logger.VERBOSE)
# TRT_LOGGER = trt.Logger(trt.Logger.INFO)
TRT_LOGGER = trt.Logger()
# Allocate host and device buffers, and create a stream.
class HostDeviceMem(object):
def __init__(self, host_mem, device_mem):
self.host = host_mem
self.device = device_mem
def __str__(self):
return "Host:\n" + str(self.host) + "\nDevice:\n" + str(self.device)
def __repr__(self):
return self.__str__()
def allocate_buffers(engine):
inputs = []
outputs = []
bindings = []
stream = cuda.Stream()
for binding in engine:
size = trt.volume(engine.get_binding_shape(binding)) # <--------- the main diff to v2
dtype = trt.nptype(engine.get_binding_dtype(binding))
# Allocate host and device buffers
host_mem = cuda.pagelocked_empty(size, dtype)
device_mem = cuda.mem_alloc(host_mem.nbytes)
# Append the device buffer to device bindings.
bindings.append(int(device_mem))
# Append to the appropriate list.
if engine.binding_is_input(binding):
inputs.append(HostDeviceMem(host_mem, device_mem))
else:
outputs.append(HostDeviceMem(host_mem, device_mem))
return inputs, outputs, bindings, stream
def allocate_buffers_v2(engine):
inputs = []
outputs = []
bindings = []
stream = cuda.Stream()
for binding in engine:
size = trt.volume(engine.get_binding_shape(binding)) * engine.max_batch_size
dtype = trt.nptype(engine.get_binding_dtype(binding))
# Allocate host and device buffers
host_mem = cuda.pagelocked_empty(size, dtype)
device_mem = cuda.mem_alloc(host_mem.nbytes)
# Append the device buffer to device bindings.
bindings.append(int(device_mem))
# Append to the appropriate list.
if engine.binding_is_input(binding):
inputs.append(HostDeviceMem(host_mem, device_mem))
else:
outputs.append(HostDeviceMem(host_mem, device_mem))
return inputs, outputs, bindings, stream
# do inference multi outputs
def do_inference_v2(context, bindings, inputs, outputs, stream, input_tensor):
# Transfer input data to the GPU.
[cuda.memcpy_htod_async(inp.device, inp.host, stream) for inp in inputs]
# Run inference.
context.execute_async_v2(bindings=bindings, stream_handle=stream.handle)
# Transfer predictions back from the GPU.
[cuda.memcpy_dtoh_async(out.host, out.device, stream) for out in outputs]
# Synchronize the stream
stream.synchronize()
# Return only the host outputs.
return [out.host for out in outputs]
# The onnx path is used for Pytorch models.
def build_engine_onnx(model_file,engine_file,FP16=False,verbose=False,dynamic_input=False,batch_size=1):
def get_engine():
EXPLICIT_BATCH = 1 << (int)(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)
# with trt.Builder(TRT_LOGGER) as builder, builder.create_network(EXPLICIT_BATCH) as network,builder.create_builder_config() as config, trt.OnnxParser(network,TRT_LOGGER) as parser:
with trt.Builder(TRT_LOGGER) as builder, builder.create_network(EXPLICIT_BATCH) as network, builder.create_builder_config() as config,\
trt.OnnxParser(network,TRT_LOGGER) as parser:
# Workspace size is the maximum amount of memory available to the builder while building an engine.
#builder.max_workspace_size = 6 << 30 # 6G
config.max_workspace_size = (1 << 30) #for trt8
config.max_batch_size = batch_size #for trt8
#builder.max_batch_size = batch_size
if FP16:
print("[INFO] Open FP16 Mode!")
config.set_flag(tensorrt.BuilderFlag.FP16) # for trt8
#builder.fp16_mode = True #trt7
with open(model_file, 'rb') as model:
parser.parse(model.read())
if verbose:
print(">"*50)
for error in range(parser.num_errors):
print(parser.get_error(error))
network.get_input(0).shape = [ batch_size, 3, 800, 800 ]
if dynamic_input:
profile = builder.create_optimization_profile();
profile.set_shape("inputs", (1,3,800,800), (8,3,800,800), (64,3,800,800))
config.add_optimization_profile(profile)
# builder engine
#engine = builder.build_cuda_engine(network) #trt 7
engine = builder.build_engine(network, config) #trt8
print("[INFO] Completed creating Engine!")
with open(engine_file, "wb") as f:
f.write(engine.serialize())
return engine
if os.path.exists(engine_file):
# If a serialized engine exists, use it instead of building an engine.
print("[INFO] Reading engine from file {}".format(engine_file))
with open(engine_file, "rb") as f, trt.Runtime(TRT_LOGGER) as runtime:
return runtime.deserialize_cuda_engine(f.read())
else:
return get_engine()
# int8 quant
def build_engine_onnx_v2(onnx_file_path="", engine_file_path="",fp16_mode=False, int8_mode=False, \
max_batch_size=1,calibration_stream=None, calibration_table_path="", save_engine=False):
"""Attempts to load a serialized engine if available, otherwise builds a new TensorRT engine and saves it."""
def build_engine(max_batch_size, save_engine):
"""Takes an ONNX file and creates a TensorRT engine to run inference with"""
with trt.Builder(TRT_LOGGER) as builder, builder.create_network(1) as network,\
builder.create_builder_config() as config,trt.OnnxParser(network, TRT_LOGGER) as parser:
# parse onnx model file
if not os.path.exists(onnx_file_path):
quit(f'[Error]ONNX file {onnx_file_path} not found')
print(f'[INFO] Loading ONNX file from path {onnx_file_path}...')
with open(onnx_file_path, 'rb') as model:
print('[INFO] Beginning ONNX file parsing')
parser.parse(model.read())
assert network.num_layers > 0, '[Error] Failed to parse ONNX model. \
Please check if the ONNX model is compatible '
print('[INFO] Completed parsing of ONNX file')
print(f'[INFO] Building an engine from file {onnx_file_path}; this may take a while...')
# build trt engine
# config.max_workspace_size = 2 << 30 # 2GB
builder.max_batch_size = max_batch_size
config.max_workspace_size = 2 << 30 # 2GB
if fp16_mode:
config.set_flag(trt.BuilderFlag.FP16)
if int8_mode:
#builder.int8_mode = int8_mode
config.set_flag(trt.BuilderFlag.INT8)
assert calibration_stream, '[Error] a calibration_stream should be provided for int8 mode'
config.int8_calibrator = Calibrator(calibration_stream, calibration_table_path)
# builder.int8_calibrator = Calibrator(calibration_stream, calibration_table_path)
print('[INFO] Int8 mode enabled')
#engine = builder.build_cuda_engine(network)
engine = builder.build_engine(network, config)
if engine is None:
print('[INFO] Failed to create the engine')
return None
print("[INFO] Completed creating the engine")
if save_engine:
with open(engine_file_path, "wb") as f:
f.write(engine.serialize())
return engine
if os.path.exists(engine_file_path):
# If a serialized engine exists, load it instead of building a new one.
print(f"[INFO] Reading engine from file {engine_file_path}")
with open(engine_file_path, "rb") as f, trt.Runtime(TRT_LOGGER) as runtime:
return runtime.deserialize_cuda_engine(f.read())
else:
return build_engine(max_batch_size, save_engine)
| [((1002, 1014), 'tensorrt.Logger', 'trt.Logger', ([], {}), '()\n', (1012, 1014), True, 'import tensorrt as trt\n'), ((1462, 1475), 'pycuda.driver.Stream', 'cuda.Stream', ([], {}), '()\n', (1473, 1475), True, 'import pycuda.driver as cuda\n'), ((2280, 2293), 'pycuda.driver.Stream', 'cuda.Stream', ([], {}), '()\n', (2291, 2293), True, 'import pycuda.driver as cuda\n'), ((5661, 5688), 'os.path.exists', 'os.path.exists', (['engine_file'], {}), '(engine_file)\n', (5675, 5688), False, 'import sys, os\n'), ((8653, 8685), 'os.path.exists', 'os.path.exists', (['engine_file_path'], {}), '(engine_file_path)\n', (8667, 8685), False, 'import sys, os\n'), ((1723, 1757), 'pycuda.driver.pagelocked_empty', 'cuda.pagelocked_empty', (['size', 'dtype'], {}), '(size, dtype)\n', (1744, 1757), True, 'import pycuda.driver as cuda\n'), ((1779, 1810), 'pycuda.driver.mem_alloc', 'cuda.mem_alloc', (['host_mem.nbytes'], {}), '(host_mem.nbytes)\n', (1793, 1810), True, 'import pycuda.driver as cuda\n'), ((2530, 2564), 'pycuda.driver.pagelocked_empty', 'cuda.pagelocked_empty', (['size', 'dtype'], {}), '(size, dtype)\n', (2551, 2564), True, 'import pycuda.driver as cuda\n'), ((2586, 2617), 'pycuda.driver.mem_alloc', 'cuda.mem_alloc', (['host_mem.nbytes'], {}), '(host_mem.nbytes)\n', (2600, 2617), True, 'import pycuda.driver as cuda\n'), ((3141, 3193), 'pycuda.driver.memcpy_htod_async', 'cuda.memcpy_htod_async', (['inp.device', 'inp.host', 'stream'], {}), '(inp.device, inp.host, stream)\n', (3163, 3193), True, 'import pycuda.driver as cuda\n'), ((3363, 3415), 'pycuda.driver.memcpy_dtoh_async', 'cuda.memcpy_dtoh_async', (['out.host', 'out.device', 'stream'], {}), '(out.host, out.device, stream)\n', (3385, 3415), True, 'import pycuda.driver as cuda\n'), ((4030, 4053), 'tensorrt.Builder', 'trt.Builder', (['TRT_LOGGER'], {}), '(TRT_LOGGER)\n', (4041, 4053), True, 'import tensorrt as trt\n'), ((4173, 4208), 'tensorrt.OnnxParser', 'trt.OnnxParser', (['network', 'TRT_LOGGER'], {}), '(network, TRT_LOGGER)\n', (4187, 4208), True, 'import tensorrt as trt\n'), ((5884, 5907), 'tensorrt.Runtime', 'trt.Runtime', (['TRT_LOGGER'], {}), '(TRT_LOGGER)\n', (5895, 5907), True, 'import tensorrt as trt\n'), ((6501, 6524), 'tensorrt.Builder', 'trt.Builder', (['TRT_LOGGER'], {}), '(TRT_LOGGER)\n', (6512, 6524), True, 'import tensorrt as trt\n'), ((6634, 6669), 'tensorrt.OnnxParser', 'trt.OnnxParser', (['network', 'TRT_LOGGER'], {}), '(network, TRT_LOGGER)\n', (6648, 6669), True, 'import tensorrt as trt\n'), ((8884, 8907), 'tensorrt.Runtime', 'trt.Runtime', (['TRT_LOGGER'], {}), '(TRT_LOGGER)\n', (8895, 8907), True, 'import tensorrt as trt\n'), ((6749, 6779), 'os.path.exists', 'os.path.exists', (['onnx_file_path'], {}), '(onnx_file_path)\n', (6763, 6779), False, 'import sys, os\n')] |
inpanel/inpanel-desktop | src/init.py | bff4a6accdf8a2976c722adc65f3fa2fe6650448 | #!/usr/bin/env python3
# -*- coding:utf-8-*-
import tkinter.messagebox
from tkinter import Button, Label, Tk
from utils.functions import set_window_center
from utils.sqlite_helper import DBHelper
from inpanel import App
class InitWindow(Tk):
"""初始化窗口"""
def __init__(self):
Tk.__init__(self)
self.title("初始化数据")
set_window_center(self, 300, 180)
self.resizable(False, False)
self.win_success = None # 初始化成功的提示窗口
self.init_page()
def init_page(self):
"""加载控件"""
btn_1 = Button(self, text="初始化数据库", command=self.do_init_db)
btn_1.pack(expand="yes", padx=10, pady=10, ipadx=5, ipady=5)
def do_init_db(self):
"""初始化"""
db_helper = DBHelper()
db_helper.reset_database()
db_helper.create_database()
try:
tmp = db_helper.insert_user("admin", "admin") # 默认用户
tmp2 = db_helper.insert_content_by_username(
"admin",
"Hello World !",
"源码仓库地址:https://github.com/doudoudzj/tkinter-app",
"github",
)
tmp3 = db_helper.get_content_by_username("admin")
print("添加用户admin:", tmp)
print("添加内容:", tmp2)
print("查询内容:", tmp3)
self.do_success()
self.destroy()
except KeyError:
print(KeyError)
self.do_failed()
def do_failed(self):
"""是否重试"""
res = tkinter.messagebox.askretrycancel('提示', '初始化失败,是否重试?', parent=self)
if res is True:
self.do_init_db()
elif res is False:
self.destroy()
def do_success(self):
"""初始化成功弹窗"""
self.win_success = Tk()
self.win_success.title("初始化成功")
set_window_center(self.win_success, 250, 150)
self.win_success.resizable(False, False)
msg = Label(self.win_success, text="初始化成功")
msg.pack(expand="yes", fill="both")
btn = Button(self.win_success, text="确定", command=self.quit)
btn.pack(side="right", padx=10, pady=10, ipadx=5, ipady=5)
btn_open_app = Button(self.win_success, text="启动程序", command=self.open_app)
btn_open_app.pack(side="right", padx=10, pady=10, ipadx=5, ipady=5)
def open_app(self):
"""打开应用程序"""
self.quit()
self.win_success.destroy()
self.win_success.quit()
App()
if __name__ == "__main__":
APP_INIT = InitWindow()
APP_INIT.mainloop()
| [((295, 312), 'tkinter.Tk.__init__', 'Tk.__init__', (['self'], {}), '(self)\n', (306, 312), False, 'from tkinter import Button, Label, Tk\n'), ((349, 382), 'utils.functions.set_window_center', 'set_window_center', (['self', '(300)', '(180)'], {}), '(self, 300, 180)\n', (366, 382), False, 'from utils.functions import set_window_center\n'), ((551, 603), 'tkinter.Button', 'Button', (['self'], {'text': '"""初始化数据库"""', 'command': 'self.do_init_db'}), "(self, text='初始化数据库', command=self.do_init_db)\n", (557, 603), False, 'from tkinter import Button, Label, Tk\n'), ((738, 748), 'utils.sqlite_helper.DBHelper', 'DBHelper', ([], {}), '()\n', (746, 748), False, 'from utils.sqlite_helper import DBHelper\n'), ((1736, 1740), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (1738, 1740), False, 'from tkinter import Button, Label, Tk\n'), ((1789, 1834), 'utils.functions.set_window_center', 'set_window_center', (['self.win_success', '(250)', '(150)'], {}), '(self.win_success, 250, 150)\n', (1806, 1834), False, 'from utils.functions import set_window_center\n'), ((1898, 1935), 'tkinter.Label', 'Label', (['self.win_success'], {'text': '"""初始化成功"""'}), "(self.win_success, text='初始化成功')\n", (1903, 1935), False, 'from tkinter import Button, Label, Tk\n'), ((1995, 2049), 'tkinter.Button', 'Button', (['self.win_success'], {'text': '"""确定"""', 'command': 'self.quit'}), "(self.win_success, text='确定', command=self.quit)\n", (2001, 2049), False, 'from tkinter import Button, Label, Tk\n'), ((2140, 2200), 'tkinter.Button', 'Button', (['self.win_success'], {'text': '"""启动程序"""', 'command': 'self.open_app'}), "(self.win_success, text='启动程序', command=self.open_app)\n", (2146, 2200), False, 'from tkinter import Button, Label, Tk\n'), ((2419, 2424), 'inpanel.App', 'App', ([], {}), '()\n', (2422, 2424), False, 'from inpanel import App\n')] |
roscopecoltran/SniperKit-Core | Toolkits/CMake/hunter/packages/sugar/python/sugar/sugar_warnings_wiki_table_generator.py | 4600dffe1cddff438b948b6c22f586d052971e04 | #!/usr/bin/env python3
# Copyright (c) 2014, Ruslan Baratov
# All rights reserved.
"""
* Wiki table for `leathers` C++ project
Expected format:
### Main table
Name | Clang | GCC | MSVC |
-----------------------------|----------|----------|------|
static-ctor-not-thread-safe | *no* | *no* | 4640 |
switch | **same** | **same** | 4062 |
switch-enum | **same** | **same** | 4061 |
### Xcode/Clang table
Clang | Xcode | Objective-C |
-----------------------|--------------------------------|-------------|
bool-conversion | CLANG_WARN_BOOL_CONVERSION | no |
c++11-extensions | CLANG_WARN_CXX0X_EXTENSIONS | no |
strict-selector-match | GCC_WARN_STRICT_SELECTOR_MATCH | yes |
undeclared-selector | GCC_WARN_UNDECLARED_SELECTOR | yes |
"""
def generate(main_warnings_table):
groups = set()
for i in main_warnings_table:
if i.group != "":
groups.add(i.group)
wiki_file = open("wiki-table.txt", "w")
generate_main_table(main_warnings_table, wiki_file)
for group in groups:
generate_group_table(main_warnings_table, wiki_file, group)
generate_xcode_table(main_warnings_table, wiki_file)
def generate_main_table(main_warnings_table, wiki_file):
head_name = "Name"
head_clang = "Clang"
head_gcc = "GCC"
head_msvc = "MSVC"
def calc_max(head, visitor):
max_len = len(head)
for x in main_warnings_table:
cur_len = visitor(x)
if cur_len > max_len:
max_len = cur_len
return max_len + 2
def name_visitor(table_entry):
if table_entry.group != "":
return 0
return len(table_entry.warning_name)
def clang_visitor(table_entry):
if table_entry.group != "":
return 0
return len(table_entry.clang.wiki_entry(table_entry.warning_name))
def gcc_visitor(table_entry):
if table_entry.group != "":
return 0
return len(table_entry.gcc.wiki_entry(table_entry.warning_name))
def msvc_visitor(table_entry):
if table_entry.group != "":
return 0
return len(table_entry.msvc.wiki_entry(table_entry.warning_name))
max_name = calc_max(head_name, name_visitor)
max_clang = calc_max(head_clang, clang_visitor)
max_gcc = calc_max(head_gcc, gcc_visitor)
max_msvc = calc_max(head_msvc, msvc_visitor)
def fill_string(name, max_name):
result = " " + name + " ";
assert(max_name >= len(result))
left = max_name - len(result)
return result + " " * left
wiki_file.write("### Main table\n\n")
s = "{}|{}|{}|{}|\n".format(
fill_string(head_name, max_name),
fill_string(head_clang, max_clang),
fill_string(head_gcc, max_gcc),
fill_string(head_msvc, max_msvc),
)
wiki_file.write(s)
s = "{}|{}|{}|{}|\n".format(
'-' * max_name,
'-' * max_clang,
'-' * max_gcc,
'-' * max_msvc,
)
wiki_file.write(s)
for entry in main_warnings_table:
if entry.group != "":
continue
s = "{}|{}|{}|{}|\n".format(
fill_string(entry.warning_name, max_name),
fill_string(entry.clang.wiki_entry(entry.warning_name), max_clang),
fill_string(entry.gcc.wiki_entry(entry.warning_name), max_gcc),
fill_string(entry.msvc.wiki_entry(entry.warning_name), max_msvc),
)
wiki_file.write(s)
def generate_group_table(main_warnings_table, wiki_file, group):
head_name = "Name"
head_clang = "Clang"
head_gcc = "GCC"
head_msvc = "MSVC"
def calc_max(head, visitor):
max_len = len(head)
for x in main_warnings_table:
cur_len = visitor(x)
if cur_len > max_len:
max_len = cur_len
return max_len + 2
def name_visitor(table_entry):
if table_entry.group != group:
return 0
return len(table_entry.warning_name)
def clang_visitor(table_entry):
if table_entry.group != group:
return 0
return len(table_entry.clang.wiki_entry(table_entry.warning_name))
def gcc_visitor(table_entry):
if table_entry.group != group:
return 0
return len(table_entry.gcc.wiki_entry(table_entry.warning_name))
def msvc_visitor(table_entry):
if table_entry.group != group:
return 0
return len(table_entry.msvc.wiki_entry(table_entry.warning_name))
max_name = calc_max(head_name, name_visitor)
max_clang = calc_max(head_clang, clang_visitor)
max_gcc = calc_max(head_gcc, gcc_visitor)
max_msvc = calc_max(head_msvc, msvc_visitor)
def fill_string(name, max_name):
result = " " + name + " ";
assert(max_name >= len(result))
left = max_name - len(result)
return result + " " * left
wiki_file.write("\n### Table for group: `{}`\n\n".format(group))
s = "{}|{}|{}|{}|\n".format(
fill_string(head_name, max_name),
fill_string(head_clang, max_clang),
fill_string(head_gcc, max_gcc),
fill_string(head_msvc, max_msvc),
)
wiki_file.write(s)
s = "{}|{}|{}|{}|\n".format(
'-' * max_name,
'-' * max_clang,
'-' * max_gcc,
'-' * max_msvc,
)
wiki_file.write(s)
for entry in main_warnings_table:
if entry.group != group:
continue
s = "{}|{}|{}|{}|\n".format(
fill_string(entry.warning_name, max_name),
fill_string(entry.clang.wiki_entry(entry.warning_name), max_clang),
fill_string(entry.gcc.wiki_entry(entry.warning_name), max_gcc),
fill_string(entry.msvc.wiki_entry(entry.warning_name), max_msvc),
)
wiki_file.write(s)
def generate_xcode_table(main_warnings_table, wiki_file):
head_clang = "Clang"
head_xcode = "Xcode"
head_objc = "Objective-C"
def calc_max(head, visitor):
max_len = len(head)
for x in main_warnings_table:
cur_len = visitor(x)
if cur_len > max_len:
max_len = cur_len
return max_len + 2
def clang_visitor(table_entry):
if table_entry.xcode.option == "":
return 0
return len(table_entry.clang.option)
def xcode_visitor(table_entry):
if table_entry.xcode.option == "":
return 0
return len(table_entry.xcode.option)
def objc_visitor(table_entry):
if table_entry.xcode.option == "":
return 0
if table_entry.objc:
return 3 # "yes"
else:
return 2 # "no"
max_clang = calc_max(head_clang, clang_visitor)
max_xcode = calc_max(head_xcode, xcode_visitor)
max_objc = calc_max(head_objc, objc_visitor)
def fill_string(name, max_name):
result = " " + name + " ";
assert(max_name >= len(result))
left = max_name - len(result)
return result + " " * left
wiki_file.write("\n\n### Xcode/Clang table\n\n")
s = "{}|{}|{}|\n".format(
fill_string(head_clang, max_clang),
fill_string(head_xcode, max_xcode),
fill_string(head_objc, max_objc),
)
wiki_file.write(s)
s = "{}|{}|{}|\n".format(
'-' * max_clang,
'-' * max_xcode,
'-' * max_objc,
)
wiki_file.write(s)
done_list = []
for entry in main_warnings_table:
if entry.xcode.option == "":
continue
if entry.clang.option in done_list:
continue
done_list.append(entry.clang.option)
if entry.objc:
objc = "yes"
else:
objc = "no"
s = "{}|{}|{}|\n".format(
fill_string(entry.clang.option, max_clang),
fill_string(entry.xcode.option, max_xcode),
fill_string(objc, max_objc),
)
wiki_file.write(s)
| [] |
armando-migliaccio/neutron-1 | neutron/plugins/ofagent/agent/ports.py | e31861c15bc73e65a7c22212df2a56f9e45aa0e4 | # Copyright (C) 2014 VA Linux Systems Japan K.K.
# Copyright (C) 2014 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class OFPort(object):
def __init__(self, port_name, ofport):
self.port_name = port_name
self.ofport = ofport
@classmethod
def from_ofp_port(cls, ofp_port):
"""Convert from ryu OFPPort."""
return cls(port_name=ofp_port.name, ofport=ofp_port.port_no)
PORT_NAME_LEN = 14
PORT_NAME_PREFIXES = [
"tap", # common cases, including ovs_use_veth=True
"qvo", # nova hybrid interface driver
"qr-", # l3-agent INTERNAL_DEV_PREFIX (ovs_use_veth=False)
"qg-", # l3-agent EXTERNAL_DEV_PREFIX (ovs_use_veth=False)
]
def _is_neutron_port(name):
"""Return True if the port name looks like a neutron port."""
if len(name) != PORT_NAME_LEN:
return False
for pref in PORT_NAME_PREFIXES:
if name.startswith(pref):
return True
return False
def get_normalized_port_name(interface_id):
"""Convert from neutron device id (uuid) to "normalized" port name.
This needs to be synced with ML2 plugin's _device_to_port_id().
An assumption: The switch uses an OS's interface name as the
corresponding OpenFlow port name.
NOTE(yamamoto): While it's true for Open vSwitch, it isn't
necessarily true everywhere. For example, LINC uses something
like "LogicalSwitch0-Port2".
NOTE(yamamoto): The actual prefix might be different. For example,
with the hybrid interface driver, it's "qvo". However, we always
use "tap" prefix throughout the agent and plugin for simplicity.
Some care should be taken when talking to the switch.
"""
return ("tap" + interface_id)[0:PORT_NAME_LEN]
def _normalize_port_name(name):
"""Normalize port name.
See comments in _get_ofport_name.
"""
for pref in PORT_NAME_PREFIXES:
if name.startswith(pref):
return "tap" + name[len(pref):]
return name
class Port(OFPort):
def __init__(self, *args, **kwargs):
super(Port, self).__init__(*args, **kwargs)
self.vif_mac = None
def is_neutron_port(self):
"""Return True if the port looks like a neutron port."""
return _is_neutron_port(self.port_name)
def normalized_port_name(self):
return _normalize_port_name(self.port_name)
| [] |
damaainan/html2md | pdf/wechat/step.py | 0d241381e716d64bbcacad013c108857e815bb15 | # -*- coding=utf-8 -*-
from zwechathihu.mypdf import GenPdf
from db.mysqlite import simpleToolSql
data=[{"url": "http://mp.weixin.qq.com/s?__biz=MzAxODQxMDM0Mw==&mid=2247484852&idx=1&sn=85b50b8b0470bb4897e517955f4e5002&chksm=9bd7fbbcaca072aa75e2a241064a403fde1e579d57ab846cd8537a54253ceb2c8b93cc3bf38e&scene=21#wechat_redirect", "name": "001学习算法和刷题的框架思维"}
]
# path = '***/' || ''
# for val in data:
# # print(val["url"])
# # print(val["name"])
# pdf = GenPdf()
# title = val["name"].replace("/", "-")
# print(title)
# pdf.deal(val["url"], title, '')
# sql = simpleToolSql("url")
# # sql.execute("insert into wx_article (id,name,age) values (?,?,?);",[(1,'abc',15),(2,'bca',16)])
# res = sql.query("select * from wx_article;")
# print(res)
# res = sql.query("select * from wx_article where id=?;",(3,))
# print(res)
# sql.close()
# 从 db 获取需要生成的url
def getListByTitle(title:str):
sql = simpleToolSql("url")
res = sql.query("select * from wx_article where title="+title+";")
print(res)
sql.close()
return res
# 从 db 获取需要生成的url
def getListFromSql():
sql = simpleToolSql("url")
# res = sql.query("select * from wx_article where state=0;")
res = sql.query("select * from wx_article;")
print(res)
sql.close()
return res
# 更新 db
def updateUrl(id:int):
sql = simpleToolSql("url")
res = sql.execute("update wx_article set state=1 where id = ?;",(id,))
# 需要加逗号 https://blog.csdn.net/yimaoyingbi/article/details/104323701
print(res)
sql.close()
return
def addUrl():
sql = simpleToolSql("url")
sql.execute(
"insert into wx_article (url,folder,title,state,turn,create_at,update_at) values (?,?,?,?,?,?);",
[("http",'test',"01",0,1,"2020-12-03 09:38:25","2020-12-03 09:38:25")]
)
res = sql.query("select * from wx_article;")
print(res)
sql.close()
return
# addUrl()
updateUrl(1)
res = getListFromSql()
print(res) | [((918, 938), 'db.mysqlite.simpleToolSql', 'simpleToolSql', (['"""url"""'], {}), "('url')\n", (931, 938), False, 'from db.mysqlite import simpleToolSql\n'), ((1107, 1127), 'db.mysqlite.simpleToolSql', 'simpleToolSql', (['"""url"""'], {}), "('url')\n", (1120, 1127), False, 'from db.mysqlite import simpleToolSql\n'), ((1330, 1350), 'db.mysqlite.simpleToolSql', 'simpleToolSql', (['"""url"""'], {}), "('url')\n", (1343, 1350), False, 'from db.mysqlite import simpleToolSql\n'), ((1567, 1587), 'db.mysqlite.simpleToolSql', 'simpleToolSql', (['"""url"""'], {}), "('url')\n", (1580, 1587), False, 'from db.mysqlite import simpleToolSql\n')] |
ZhuoZhuoCrayon/bk-nodeman | pipeline/validators/handlers.py | 76cb71fcc971c2a0c2be161fcbd6b019d4a7a8ab | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.dispatch import receiver
from pipeline.core.flow.event import EndEvent
from pipeline.core.flow.signals import post_new_end_event_register
from pipeline.validators import rules
@receiver(post_new_end_event_register, sender=EndEvent)
def post_new_end_event_register_handler(sender, node_type, node_cls, **kwargs):
rules.NODE_RULES[node_type] = rules.SINK_RULE
rules.FLOW_NODES_WITHOUT_STARTEVENT.append(node_type)
| [((925, 979), 'django.dispatch.receiver', 'receiver', (['post_new_end_event_register'], {'sender': 'EndEvent'}), '(post_new_end_event_register, sender=EndEvent)\n', (933, 979), False, 'from django.dispatch import receiver\n'), ((1114, 1167), 'pipeline.validators.rules.FLOW_NODES_WITHOUT_STARTEVENT.append', 'rules.FLOW_NODES_WITHOUT_STARTEVENT.append', (['node_type'], {}), '(node_type)\n', (1156, 1167), False, 'from pipeline.validators import rules\n')] |
jetbrains-academy/Python-Libraries-NumPy | NumPy/Array Basics/Random Shuffle/tests/test_task.py | 7ce0f2d08f87502d5d97bbc6921f0566184d4ebb | import unittest
import numpy as np
from task import arr, permuted_2d, fully_random
class TestCase(unittest.TestCase):
def test_shape(self):
self.assertEqual((5, 20), arr.shape, msg="Wrong shape of the array 'arr'.")
self.assertEqual((5, 20), permuted_2d.shape, msg="Wrong shape of the array 'permuted_2d'.")
self.assertEqual((5, 20), fully_random.shape, msg="Wrong shape of the array 'fully_random'.")
def test_arr(self):
for i in arr:
# This test checks if in each row the minimum element goes first and maximum - last.
self.assertTrue(i[0] == min(i) and i[-1] == max(i), msg="'arr' should be shuffled along the 0th axis.")
def test_two_d(self):
for i in permuted_2d:
# This test checks that differences between all neighboring elements in rows of the array
# are not equal to 1 (in non-shuffled rows they would be).
self.assertFalse(all([(x - i[i.tolist().index(x) - 1]) == 1 for x in i if i.tolist().index(x) > 0]),
msg="'permuted_2d' should be shuffled along the 1st axis.")
def test_random(self):
# This test checks if elements were also randomized between the rows.
for i in fully_random:
self.assertTrue(max(i) - min(i) > 19, "'fully_random' needs to be fully shuffled.")
| [] |
lausitzer/plugin.video.mediathekview | resources/lib/channelui.py | 7f2086240625b9b4f8d50af114f8f47654346ed1 | # -*- coding: utf-8 -*-
"""
The channel model UI module
Copyright 2017-2018, Leo Moll and Dominik Schlösser
SPDX-License-Identifier: MIT
"""
# pylint: disable=import-error
import os
import xbmcgui
import xbmcplugin
import resources.lib.mvutils as mvutils
from resources.lib.channel import Channel
class ChannelUI(Channel):
"""
The channel model view class
Args:
plugin(MediathekView): the plugin object
sortmethods(array, optional): an array of sort methods
for the directory representation. Default is
`[ xbmcplugin.SORT_METHOD_TITLE ]`
nextdir(str, optional):
"""
def __init__(self, plugin, sortmethods=None, nextdir='initial'):
super(ChannelUI, self).__init__()
self.plugin = plugin
self.handle = plugin.addon_handle
self.nextdir = nextdir
self.sortmethods = sortmethods if sortmethods is not None else [
xbmcplugin.SORT_METHOD_TITLE]
self.count = 0
def begin(self):
"""
Begin a directory containing channels
"""
for method in self.sortmethods:
xbmcplugin.addSortMethod(self.handle, method)
def add(self, altname=None):
"""
Add the current entry to the directory
Args:
altname(str, optional): alternative name for the entry
"""
resultingname = self.channel if self.count == 0 else '%s (%d)' % (
self.channel, self.count, )
list_item = xbmcgui.ListItem(
label=resultingname if altname is None else altname)
icon = os.path.join(
self.plugin.path,
'resources',
'icons',
self.channel.lower() + '-m.png'
)
list_item.setArt({
'thumb': icon,
'icon': icon
})
info_labels = {
'title': resultingname,
'sorttitle': resultingname.lower()
}
list_item.setInfo(type='video', infoLabels=info_labels)
xbmcplugin.addDirectoryItem(
handle=self.handle,
url=mvutils.build_url({
'mode': self.nextdir,
'channel': self.channelid
}),
listitem=list_item,
isFolder=True
)
def end(self):
""" Finish a directory containing channels """
xbmcplugin.endOfDirectory(self.handle)
| [((1503, 1572), 'xbmcgui.ListItem', 'xbmcgui.ListItem', ([], {'label': '(resultingname if altname is None else altname)'}), '(label=resultingname if altname is None else altname)\n', (1519, 1572), False, 'import xbmcgui\n'), ((2370, 2408), 'xbmcplugin.endOfDirectory', 'xbmcplugin.endOfDirectory', (['self.handle'], {}), '(self.handle)\n', (2395, 2408), False, 'import xbmcplugin\n'), ((1135, 1180), 'xbmcplugin.addSortMethod', 'xbmcplugin.addSortMethod', (['self.handle', 'method'], {}), '(self.handle, method)\n', (1159, 1180), False, 'import xbmcplugin\n'), ((2103, 2171), 'resources.lib.mvutils.build_url', 'mvutils.build_url', (["{'mode': self.nextdir, 'channel': self.channelid}"], {}), "({'mode': self.nextdir, 'channel': self.channelid})\n", (2120, 2171), True, 'import resources.lib.mvutils as mvutils\n')] |
smk762/Dragonhound | getconf.py | 7cbaed2779afec47fcbf2481d0dae61daa4c11da | #!/usr/bin/env python3
#Credit to @Alright for the RPCs
import re
import os
import requests
import json
import platform
# define function that fetchs rpc creds from .conf
def def_credentials(chain):
operating_system = platform.system()
if operating_system == 'Darwin':
ac_dir = os.environ['HOME'] + '/Library/Application Support/Komodo'
elif operating_system == 'Linux':
ac_dir = os.environ['HOME'] + '/.komodo'
elif operating_system == 'Win64':
ac_dir = "dont have windows machine now to test"
# define config file path
if chain == 'KMD':
coin_config_file = str(ac_dir + '/komodo.conf')
else:
coin_config_file = str(ac_dir + '/' + chain + '/' + chain + '.conf')
#define rpc creds
with open(coin_config_file, 'r') as f:
#print("Reading config file for credentials:", coin_config_file)
for line in f:
l = line.rstrip()
if re.search('rpcuser', l):
rpcuser = l.replace('rpcuser=', '')
elif re.search('rpcpassword', l):
rpcpassword = l.replace('rpcpassword=', '')
elif re.search('rpcport', l):
rpcport = l.replace('rpcport=', '')
return('http://' + rpcuser + ':' + rpcpassword + '@127.0.0.1:' + rpcport)
# define function that posts json data
def post_rpc(url, payload, auth=None):
try:
r = requests.post(url, data=json.dumps(payload), auth=auth)
return(json.loads(r.text))
except Exception as e:
raise Exception("Couldn't connect to " + url + ": ", e)
# Return current -pubkey=
def getpubkey_rpc(chain):
getinfo_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "getinfo",
"params": []}
getinfo_result = post_rpc(def_credentials(chain), getinfo_payload)
return(getinfo_result['result']['pubkey'])
# return latest batontxid from all publishers
def get_latest_batontxids(chain, oracletxid):
oraclesinfo_result = oraclesinfo_rpc(chain, oracletxid)
latest_batontxids = {}
# fill "latest_batontxids" dictionary with publisher:batontxid data
for i in oraclesinfo_result['registered']:
latest_batontxids[i['publisher']] = i['batontxid']
return(latest_batontxids)
#VANILLA RPC
def sendrawtx_rpc(chain, rawtx):
sendrawtx_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "sendrawtransaction",
"params": [rawtx]}
#rpcurl = def_credentials(chain)
return(post_rpc(def_credentials(chain), sendrawtx_payload))
def signmessage_rpc(chain, address, message):
signmessage_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "signmessage",
"params": [
address,
message
]
}
signmessage_result = post_rpc(def_credentials(chain), signmessage_payload)
return(signmessage_result['result'])
def verifymessage_rpc(chain, address, signature, message):
verifymessage_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "verifymessage",
"params": [
address,
signature,
message
]
}
verifymessage_result = post_rpc(def_credentials(chain), verifymessage_payload)
return(verifymessage_result['result'])
def kvsearch_rpc(chain, key):
kvsearch_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "kvsearch",
"params": [
key
]
}
kvsearch_result = post_rpc(def_credentials(chain), kvsearch_payload)
return(kvsearch_result['result'])
def kvupdate_rpc(chain, key, value, days, password):
# create dynamic oraclessamples payload
kvupdate_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "kvupdate",
"params": [
key,
value,
str(days),
password]}
# make kvupdate rpc call
kvupdate_result = post_rpc(def_credentials(chain), kvupdate_payload)
return(kvupdate_result)
def oraclesdata_rpc(chain, oracletxid, hexstr):
oraclesdata_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oraclesdata",
"params": [
oracletxid,
hexstr]}
oraclesdata_result = post_rpc(def_credentials(chain), oraclesdata_payload)
return(oraclesdata_result['result'])
def oraclescreate_rpc(chain, name, description, oracle_type):
oraclescreate_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oraclescreate",
"params": [
name,
description,
oracle_type]}
oraclescreate_result = post_rpc(def_credentials(chain), oraclescreate_payload)
return(oraclescreate_result['result'])
def oraclesinfo_rpc(chain, oracletxid):
oraclesinfo_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oraclesinfo",
"params": [oracletxid]}
oraclesinfo_result = post_rpc(def_credentials(chain), oraclesinfo_payload)
return(oraclesinfo_result['result'])
def oracleslist_rpc(chain):
oracleslist_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oracleslist",
"params": []}
oracleslist_result = post_rpc(def_credentials(chain), oracleslist_payload)
return(oracleslist_result['result'])
def oraclessubscribe_rpc(chain, oracletxid, publisher, amount):
oraclessubscribe_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oraclessubscribe",
"params": [oracletxid, publisher, amount]}
oraclessubscribe_result = post_rpc(def_credentials(chain), oraclessubscribe_payload)
return(oraclessubscribe_result['result'])
def oraclesregister_rpc(chain, oracletxid, datafee):
oraclesregister_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oraclesregister",
"params": [
oracletxid,
str(datafee)]}
oraclesregister_result = post_rpc(def_credentials(chain), oraclesregister_payload)
return(oraclesregister_result['result'])
def oraclessamples_rpc(chain, oracletxid, batonutxo, num):
oraclessamples_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oraclessamples",
"params": [
oracletxid,
batonutxo,
str(num)]}
oraclessamples_result = post_rpc(def_credentials(chain), oraclessamples_payload)
return(oraclessamples_result['result'])
def getlastsegidstakes_rpc(chain, depth):
oraclessubscribe_payload = {
"jsonrpc": "1.0",
"id": "python",
"method": "oraclessubscribe",
"params": [depth]}
getlastsegidstakes_result = post_rpc(def_credentials(chain), oraclessubscribe_payload)
return(getlastsegidstakes_result['result'])
| [((225, 242), 'platform.system', 'platform.system', ([], {}), '()\n', (240, 242), False, 'import platform\n'), ((1466, 1484), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (1476, 1484), False, 'import json\n'), ((940, 963), 're.search', 're.search', (['"""rpcuser"""', 'l'], {}), "('rpcuser', l)\n", (949, 963), False, 'import re\n'), ((1034, 1061), 're.search', 're.search', (['"""rpcpassword"""', 'l'], {}), "('rpcpassword', l)\n", (1043, 1061), False, 'import re\n'), ((1419, 1438), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (1429, 1438), False, 'import json\n'), ((1140, 1163), 're.search', 're.search', (['"""rpcport"""', 'l'], {}), "('rpcport', l)\n", (1149, 1163), False, 'import re\n')] |
orenyodfat/CWR-DataApi | cwr/parser/decoder/dictionary.py | f3b6ba8308c901b6ab87073c155c08e30692333c | # -*- coding: utf-8 -*-
from cwr.acknowledgement import AcknowledgementRecord, MessageRecord
from cwr.agreement import AgreementRecord, AgreementTerritoryRecord, \
InterestedPartyForAgreementRecord
from cwr.group import Group, GroupHeader, GroupTrailer
from cwr.info import AdditionalRelatedInfoRecord
from cwr.parser.decoder.common import Decoder
from cwr.interested_party import IPTerritoryOfControlRecord, Publisher, \
PublisherRecord, Writer, PublisherForWriterRecord, WriterRecord
from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, \
NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, \
NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, \
NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord
from cwr.transmission import Transmission, TransmissionTrailer, \
TransmissionHeader
from cwr.work import RecordingDetailRecord, ComponentRecord, \
AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, \
InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, \
WorkRecord
from cwr.file import CWRFile, FileTag
from cwr.other import AVIKey, VISAN
from cwr.table_value import MediaTypeValue, TableValue, InstrumentValue
"""
Classes for transforming dictionaries into instances of the CWR model.
There is a decoder for each of the model classes, and all of them expect a
dictionary having at least one key for each field, having the same name as the
field, which will refer to a valid value.
As said, the values on the dictionary should be valid values, for example if
an integer is expected, then the dictionary contains an integer. The values
contained in the dictionary entries should not need to be parsed.
These decoders are useful for handling JSON transmissions or Mongo databases.
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__status__ = 'Development'
class TransactionRecordDictionaryDecoder(Decoder):
def __init__(self):
super(TransactionRecordDictionaryDecoder, self).__init__()
self._decoders = {}
self._decoders['ACK'] = AcknowledgementDictionaryDecoder()
self._decoders['AGR'] = AgreementDictionaryDecoder()
self._decoders['TER'] = AgreementTerritoryDictionaryDecoder()
self._decoders['ARI'] = AdditionalRelatedInformationDictionaryDecoder()
self._decoders['ALT'] = AlternateTitleDictionaryDecoder()
self._decoders['EWT'] = AuthoredWorkDictionaryDecoder()
self._decoders['VER'] = AuthoredWorkDictionaryDecoder()
self._decoders['COM'] = ComponentDictionaryDecoder()
self._decoders['IPA'] = InterestedPartyForAgreementDictionaryDecoder()
self._decoders['SPT'] = IPTerritoryOfControlDictionaryDecoder()
self._decoders['SWT'] = IPTerritoryOfControlDictionaryDecoder()
self._decoders['IND'] = InstrumentationDetailDictionaryDecoder()
self._decoders['INS'] = InstrumentationSummaryDictionaryDecoder()
self._decoders['MSG'] = MessageDictionaryDecoder()
self._decoders['PER'] = PerformingArtistDictionaryDecoder()
self._decoders['PWR'] = PublisherForWriterDictionaryDecoder()
self._decoders['REC'] = RecordingDetailDictionaryDecoder()
self._decoders['EXC'] = WorkDictionaryDecoder()
self._decoders['ISW'] = WorkDictionaryDecoder()
self._decoders['NWR'] = WorkDictionaryDecoder()
self._decoders['REV'] = WorkDictionaryDecoder()
self._decoders['ORN'] = WorkOriginDictionaryDecoder()
self._decoders['SWR'] = WriterRecordDictionaryDecoder()
self._decoders['OWR'] = WriterRecordDictionaryDecoder()
self._decoders['OWR'] = WriterRecordDictionaryDecoder()
self._decoders[
'NPA'] = NonRomanAlphabetAgreementPartyDictionaryDecoder()
self._decoders['NOW'] = NonRomanAlphabetOtherWriterDictionaryDecoder()
self._decoders[
'NPR'] = NonRomanAlphabetPerformanceDataDictionaryDecoder()
self._decoders['NPN'] = NonRomanAlphabetPublisherNameDictionaryDecoder()
self._decoders['NAT'] = NonRomanAlphabetTitleDictionaryDecoder()
self._decoders['NET'] = NonRomanAlphabetWorkDictionaryDecoder()
self._decoders['NCT'] = NonRomanAlphabetWorkDictionaryDecoder()
self._decoders['NVT'] = NonRomanAlphabetWorkDictionaryDecoder()
self._decoders['NWN'] = NonRomanAlphabetWriterNameDictionaryDecoder()
self._decoders['SPU'] = PublisherRecordDictionaryDecoder()
self._decoders['OPU'] = PublisherRecordDictionaryDecoder()
def decode(self, data):
return self._decoders[data['record_type']].decode(data)
class AcknowledgementDictionaryDecoder(Decoder):
def __init__(self):
super(AcknowledgementDictionaryDecoder, self).__init__()
def decode(self, data):
return AcknowledgementRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
original_group_id=data[
'original_group_id'],
original_transaction_sequence_n=data[
'original_transaction_sequence_n'],
original_transaction_type=data[
'original_transaction_type'],
transaction_status=data[
'transaction_status'],
creation_date_time=data[
'creation_date_time'],
processing_date=data['processing_date'],
creation_title=data['creation_title'],
submitter_creation_n=data[
'submitter_creation_n'],
recipient_creation_n=data[
'recipient_creation_n'])
class AgreementDictionaryDecoder(Decoder):
def __init__(self):
super(AgreementDictionaryDecoder, self).__init__()
def decode(self, data):
return AgreementRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
submitter_agreement_n=data[
'submitter_agreement_n'],
agreement_type=data['agreement_type'],
agreement_start_date=data[
'agreement_start_date'],
prior_royalty_status=data[
'prior_royalty_status'],
post_term_collection_status=data[
'post_term_collection_status'],
number_of_works=data['number_of_works'],
society_assigned_agreement_n=data[
'society_assigned_agreement_n'],
international_standard_code=data[
'international_standard_code'],
sales_manufacture_clause=data[
'sales_manufacture_clause'],
agreement_end_date=data['agreement_end_date'],
date_of_signature=data['date_of_signature'],
retention_end_date=data['retention_end_date'],
prior_royalty_start_date=data[
'prior_royalty_start_date'],
post_term_collection_end_date=data[
'post_term_collection_end_date'],
shares_change=data['shares_change'],
advance_given=data['advance_given'])
class AgreementTerritoryDictionaryDecoder(Decoder):
def __init__(self):
super(AgreementTerritoryDictionaryDecoder, self).__init__()
def decode(self, data):
return AgreementTerritoryRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
tis_numeric_code=data[
'tis_numeric_code'],
inclusion_exclusion_indicator=data[
'inclusion_exclusion_indicator'])
class AdditionalRelatedInformationDictionaryDecoder(Decoder):
def __init__(self):
super(AdditionalRelatedInformationDictionaryDecoder, self).__init__()
def decode(self, data):
return AdditionalRelatedInfoRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
society_n=data['society_n'],
type_of_right=data['type_of_right'],
work_n=data['work_n'],
subject_code=data['subject_code'],
note=data['note'])
class AlternateTitleDictionaryDecoder(Decoder):
def __init__(self):
super(AlternateTitleDictionaryDecoder, self).__init__()
def decode(self, data):
return AlternateTitleRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
alternate_title=data['alternate_title'],
title_type=data['title_type'],
language_code=data['language_code'])
class AuthoredWorkDictionaryDecoder(Decoder):
def __init__(self, ipi_base_decoder=None):
super(AuthoredWorkDictionaryDecoder, self).__init__()
if ipi_base_decoder:
self._ipi_base_decoder = ipi_base_decoder
else:
self._ipi_base_decoder = IPIBaseDictionaryDecoder()
def decode(self, data):
ipi_base_1 = self._ipi_base_decoder.decode(data[
'writer_1_ipi_base_n'])
ipi_base_2 = self._ipi_base_decoder.decode(data[
'writer_2_ipi_base_n'])
return AuthoredWorkRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
title=data['title'],
submitter_work_n=data['submitter_work_n'],
writer_1_first_name=data[
'writer_1_first_name'],
writer_1_last_name=data['writer_1_last_name'],
writer_2_first_name=data[
'writer_2_first_name'],
writer_2_last_name=data['writer_2_last_name'],
writer_1_ipi_base_n=ipi_base_1,
writer_1_ipi_name_n=data[
'writer_1_ipi_name_n'],
writer_2_ipi_base_n=ipi_base_2,
writer_2_ipi_name_n=data[
'writer_2_ipi_name_n'],
source=data['source'],
language_code=data['language_code'],
iswc=data['iswc'])
class ComponentDictionaryDecoder(Decoder):
def __init__(self, ipi_base_decoder=None):
super(ComponentDictionaryDecoder, self).__init__()
if ipi_base_decoder:
self._ipi_base_decoder = ipi_base_decoder
else:
self._ipi_base_decoder = IPIBaseDictionaryDecoder()
def decode(self, data):
ipi_base_1 = self._ipi_base_decoder.decode(data['writer_1_ipi_base_n'])
ipi_base_2 = self._ipi_base_decoder.decode(data['writer_2_ipi_base_n'])
return ComponentRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
title=data['title'],
submitter_work_n=data['submitter_work_n'],
writer_1_last_name=data['writer_1_last_name'],
writer_1_first_name=data['writer_1_first_name'],
writer_2_last_name=data['writer_2_last_name'],
writer_2_first_name=data['writer_2_first_name'],
writer_1_ipi_base_n=ipi_base_1,
writer_1_ipi_name_n=data['writer_1_ipi_name_n'],
writer_2_ipi_base_n=ipi_base_2,
writer_2_ipi_name_n=data['writer_2_ipi_name_n'],
iswc=data['iswc'],
duration=data['duration'])
class GroupHeaderDictionaryDecoder(Decoder):
def __init__(self):
super(GroupHeaderDictionaryDecoder, self).__init__()
def decode(self, data):
return GroupHeader(record_type=data['record_type'],
group_id=data['group_id'],
transaction_type=data['transaction_type'],
version_number=data['version_number'],
batch_request_id=data['batch_request_id'])
class GroupTrailerDictionaryDecoder(Decoder):
def __init__(self):
super(GroupTrailerDictionaryDecoder, self).__init__()
def decode(self, data):
total_monetary_value = None
if 'total_monetary_value' in data:
total_monetary_value = data['total_monetary_value']
currency_indicator = None
if 'currency_indicator' in data:
currency_indicator = data['currency_indicator']
return GroupTrailer(record_type=data['record_type'],
group_id=data['group_id'],
transaction_count=data['transaction_count'],
record_count=data['record_count'],
currency_indicator=currency_indicator,
total_monetary_value=total_monetary_value,
)
class InterestedPartyForAgreementDictionaryDecoder(Decoder):
def __init__(self, ipi_base_decoder=None):
super(InterestedPartyForAgreementDictionaryDecoder, self).__init__()
if ipi_base_decoder:
self._ipi_base_decoder = ipi_base_decoder
else:
self._ipi_base_decoder = IPIBaseDictionaryDecoder()
def decode(self, data):
ipi_base = self._ipi_base_decoder.decode(data['ipi_base_n'])
return InterestedPartyForAgreementRecord(
record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
ip_n=data['ip_n'],
ip_last_name=data['ip_last_name'],
agreement_role_code=data['agreement_role_code'],
ip_writer_first_name=data['ip_writer_first_name'],
ipi_name_n=data['ipi_name_n'], ipi_base_n=ipi_base,
pr_society=data['pr_society'], pr_share=data['pr_share'],
mr_society=data['mr_society'], mr_share=data['mr_share'],
sr_society=data['sr_society'], sr_share=data['sr_share'])
class IPTerritoryOfControlDictionaryDecoder(Decoder):
def __init__(self):
super(IPTerritoryOfControlDictionaryDecoder, self).__init__()
def decode(self, data):
record = IPTerritoryOfControlRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
ip_n=data['ip_n'],
inclusion_exclusion_indicator=data[
'inclusion_exclusion_indicator'],
tis_numeric_code=data[
'tis_numeric_code'],
sequence_n=data['sequence_n'],
pr_collection_share=data[
'pr_collection_share'],
mr_collection_share=data[
'mr_collection_share'],
shares_change=data['shares_change'])
if 'sr_collection_share' in data:
record.sr_collection_share = data['sr_collection_share']
return record
class InstrumentationDetailDictionaryDecoder(Decoder):
def __init__(self):
super(InstrumentationDetailDictionaryDecoder, self).__init__()
def decode(self, data):
return InstrumentationDetailRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
instrument_code=data[
'instrument_code'],
number_players=data[
'number_players'])
class InstrumentationSummaryDictionaryDecoder(Decoder):
def __init__(self):
super(InstrumentationSummaryDictionaryDecoder, self).__init__()
def decode(self, data):
return InstrumentationSummaryRecord(
record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
number_voices=data['number_voices'],
standard_instrumentation_type=data['standard_instrumentation_type'],
instrumentation_description=data['instrumentation_description'])
class MessageDictionaryDecoder(Decoder):
def __init__(self):
super(MessageDictionaryDecoder, self).__init__()
def decode(self, data):
return MessageRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
message_type=data['message_type'],
message_text=data['message_text'],
original_record_sequence_n=data[
'original_record_sequence_n'],
message_record_type=data['message_record_type'],
message_level=data['message_level'],
validation_n=data['validation_n'])
class PerformingArtistDictionaryDecoder(Decoder):
def __init__(self, ipi_base_decoder=None):
super(PerformingArtistDictionaryDecoder, self).__init__()
if ipi_base_decoder:
self._ipi_base_decoder = ipi_base_decoder
else:
self._ipi_base_decoder = IPIBaseDictionaryDecoder()
def decode(self, data):
ipi_base = None
if 'performing_artist_ipi_base_n' in data:
ipi_base = self._ipi_base_decoder.decode(data['performing_artist_ipi_base_n'])
performing_artist_first_name = None
if 'performing_artist_first_name' in data:
performing_artist_first_name = data['performing_artist_first_name']
performing_artist_ipi_name_n = None
if 'performing_artist_ipi_name_n' in data:
performing_artist_ipi_name_n = data['performing_artist_ipi_name_n']
return PerformingArtistRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
performing_artist_last_name=data[
'performing_artist_last_name'],
performing_artist_first_name=performing_artist_first_name,
performing_artist_ipi_name_n=performing_artist_ipi_name_n,
performing_artist_ipi_base_n=ipi_base)
class PublisherForWriterDictionaryDecoder(Decoder):
def __init__(self):
super(PublisherForWriterDictionaryDecoder, self).__init__()
def decode(self, data):
publisher_name = None
if 'publisher_name' in data:
publisher_name = data['publisher_name']
return PublisherForWriterRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
publisher_ip_n=data['publisher_ip_n'],
publisher_name=publisher_name,
writer_ip_n=data['writer_ip_n'],
submitter_agreement_n=data[
'submitter_agreement_n'],
society_assigned_agreement_n=data[
'society_assigned_agreement_n'])
class RecordingDetailDictionaryDecoder(Decoder):
def __init__(self):
super(RecordingDetailDictionaryDecoder, self).__init__()
def decode(self, data):
media_type = None
if 'media_type' in data:
media_type = data['media_type']
return RecordingDetailRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
first_release_date=data[
'first_release_date'],
first_release_duration=data[
'first_release_duration'],
first_album_title=data[
'first_album_title'],
first_album_label=data[
'first_album_label'],
first_release_catalog_n=data[
'first_release_catalog_n'],
ean=data['ean'],
isrc=data['isrc'],
recording_format=data['recording_format'],
recording_technique=data[
'recording_technique'],
media_type=media_type)
class FileDictionaryDecoder(Decoder):
def __init__(self):
super(FileDictionaryDecoder, self).__init__()
self._tag_decoder = FileTagDictionaryDecoder()
self._transmission_decoder = TransmissionDictionaryDecoder()
def decode(self, data):
tag = data['tag']
if isinstance(tag, dict):
tag = self._tag_decoder.decode(tag)
transmission = data['transmission']
if isinstance(transmission, dict):
transmission = self._transmission_decoder.decode(transmission)
return CWRFile(tag, transmission)
class TransmissionDictionaryDecoder(Decoder):
def __init__(self):
super(TransmissionDictionaryDecoder, self).__init__()
self._header_decoder = TransmissionHeaderDictionaryDecoder()
self._trailer_decoder = TransmissionTrailerDictionaryDecoder()
self._group_decoder = GroupDictionaryDecoder()
def decode(self, data):
header = data['header']
if isinstance(header, dict):
header = self._header_decoder.decode(header)
trailer = data['trailer']
if isinstance(trailer, dict):
trailer = self._trailer_decoder.decode(trailer)
groups = []
if len(data['groups']) > 0:
if isinstance(data['groups'][0], dict):
for group in data['groups']:
groups.append(self._group_decoder.decode(group))
else:
groups = data['groups']
return Transmission(header, trailer, groups)
class GroupDictionaryDecoder(Decoder):
def __init__(self):
super(GroupDictionaryDecoder, self).__init__()
self._header_decoder = GroupHeaderDictionaryDecoder()
self._trailer_decoder = GroupTrailerDictionaryDecoder()
self._transaction_decoder = TransactionRecordDictionaryDecoder()
def decode(self, data):
header = data['group_header']
if isinstance(header, dict):
header = self._header_decoder.decode(header)
trailer = data['group_trailer']
if isinstance(trailer, dict):
trailer = self._trailer_decoder.decode(trailer)
transactions = []
if len(data['transactions']) > 0:
if isinstance(data['transactions'][0][0], dict):
for transaction in data['transactions']:
transaction_records = []
for record in transaction:
transaction_records.append(
self._transaction_decoder.decode(record))
transactions.append(transaction_records)
else:
transactions = data['transactions']
return Group(header, trailer, transactions)
class TransmissionHeaderDictionaryDecoder(Decoder):
def __init__(self):
super(TransmissionHeaderDictionaryDecoder, self).__init__()
def decode(self, data):
header = TransmissionHeader(record_type=data['record_type'],
sender_id=data['sender_id'],
sender_name=data['sender_name'],
sender_type=data['sender_type'],
creation_date_time=data[
'creation_date_time'],
transmission_date=data['transmission_date'],
edi_standard=data['edi_standard'])
if 'character_set' in data:
header.character_set = data['character_set']
return header
class TransmissionTrailerDictionaryDecoder(Decoder):
def __init__(self):
super(TransmissionTrailerDictionaryDecoder, self).__init__()
def decode(self, data):
return TransmissionTrailer(record_type=data['record_type'],
group_count=data['group_count'],
transaction_count=data['transaction_count'],
record_count=data['record_count'])
class WorkDictionaryDecoder(Decoder):
def __init__(self):
super(WorkDictionaryDecoder, self).__init__()
def decode(self, data):
catalogue_number = None
if 'catalogue_number' in data:
catalogue_number = data['catalogue_number']
exceptional_clause = None
if 'exceptional_clause' in data:
exceptional_clause = data['exceptional_clause']
opus_number = None
if 'opus_number' in data:
opus_number = data['opus_number']
priority_flag = None
if 'priority_flag' in data:
priority_flag = data['priority_flag']
return WorkRecord(record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
submitter_work_n=data['submitter_work_n'],
title=data['title'],
version_type=data['version_type'],
musical_work_distribution_category=data[
'musical_work_distribution_category'],
date_publication_printed_edition=data[
'date_publication_printed_edition'],
text_music_relationship=data[
'text_music_relationship'],
language_code=data['language_code'],
copyright_number=data['copyright_number'],
copyright_date=data['copyright_date'],
music_arrangement=data['music_arrangement'],
lyric_adaptation=data['lyric_adaptation'],
excerpt_type=data['excerpt_type'],
composite_type=data['composite_type'],
composite_component_count=data[
'composite_component_count'],
iswc=data['iswc'],
work_type=data['work_type'],
duration=data['duration'],
catalogue_number=catalogue_number,
opus_number=opus_number,
contact_id=data['contact_id'],
contact_name=data['contact_name'],
recorded_indicator=data['recorded_indicator'],
priority_flag=priority_flag,
exceptional_clause=exceptional_clause,
grand_rights_indicator=data['grand_rights_indicator'])
class WorkOriginDictionaryDecoder(Decoder):
def __init__(self):
super(WorkOriginDictionaryDecoder, self).__init__()
def decode(self, data):
return WorkOriginRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
intended_purpose=data['intended_purpose'],
production_title=data['production_title'],
cd_identifier=data['cd_identifier'],
cut_number=data['cut_number'],
library=data['library'],
bltvr=data['bltvr'],
visan=data['visan'],
production_n=data['production_n'],
episode_title=data['episode_title'],
episode_n=data['episode_n'],
year_production=data['year_production'],
audio_visual_key=data['audio_visual_key'])
class WriterDictionaryDecoder(Decoder):
def __init__(self, ipi_base_decoder=None):
super(WriterDictionaryDecoder, self).__init__()
if ipi_base_decoder:
self._ipi_base_decoder = ipi_base_decoder
else:
self._ipi_base_decoder = IPIBaseDictionaryDecoder()
def decode(self, data):
ipi_base_n = self._ipi_base_decoder.decode(data['ipi_base_n'])
return Writer(ip_n=data['ip_n'],
personal_number=data['personal_number'],
ipi_base_n=ipi_base_n,
writer_first_name=data['writer_first_name'],
writer_last_name=data['writer_last_name'],
tax_id=data['tax_id'],
ipi_name_n=data['ipi_name_n'])
class WriterRecordDictionaryDecoder(Decoder):
def __init__(self):
super(WriterRecordDictionaryDecoder, self).__init__()
self._writer_decoder = WriterDictionaryDecoder()
def decode(self, data):
writer = self._writer_decoder.decode(data['writer'])
usa_license = None
if 'usa_license' in data:
usa_license = data['usa_license']
return WriterRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
writer=writer,
writer_designation=data['writer_designation'],
work_for_hire=data['work_for_hire'],
writer_unknown=data['writer_unknown'],
reversionary=data['reversionary'],
first_recording_refusal=data[
'first_recording_refusal'],
usa_license=usa_license,
pr_society=data['pr_society'],
pr_ownership_share=data['pr_ownership_share'],
mr_society=data['mr_society'],
mr_ownership_share=data['mr_ownership_share'],
sr_society=data['sr_society'],
sr_ownership_share=data['sr_ownership_share'])
class NonRomanAlphabetAgreementPartyDictionaryDecoder(Decoder):
def __init__(self):
super(NonRomanAlphabetAgreementPartyDictionaryDecoder, self).__init__()
def decode(self, data):
return NonRomanAlphabetAgreementPartyRecord(
record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
ip_name=data['ip_name'],
ip_writer_name=data['ip_writer_name'],
ip_n=data['ip_n'],
language_code=data['language_code'])
class NonRomanAlphabetOtherWriterDictionaryDecoder(Decoder):
def __init__(self):
super(NonRomanAlphabetOtherWriterDictionaryDecoder, self).__init__()
def decode(self, data):
return NonRomanAlphabetOtherWriterRecord(
record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
writer_first_name=data['writer_first_name'],
writer_name=data['writer_name'],
position=data['position'],
language_code=data['language_code'])
class NonRomanAlphabetPerformanceDataDictionaryDecoder(Decoder):
def __init__(self, ipi_base_decoder=None):
super(NonRomanAlphabetPerformanceDataDictionaryDecoder, self).__init__()
if ipi_base_decoder:
self._ipi_base_decoder = ipi_base_decoder
else:
self._ipi_base_decoder = IPIBaseDictionaryDecoder()
def decode(self, data):
ipi_base = self._ipi_base_decoder.decode(
data['performing_artist_ipi_base_n'])
return NonRomanAlphabetPerformanceDataRecord(
record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
performing_artist_first_name=data['performing_artist_first_name'],
performing_artist_name=data['performing_artist_name'],
performing_artist_ipi_name_n=data['performing_artist_ipi_name_n'],
performing_artist_ipi_base_n=ipi_base,
language_code=data['language_code'],
performance_language=data['performance_language'],
performance_dialect=data['performance_dialect'])
class NonRomanAlphabetPublisherNameDictionaryDecoder(Decoder):
def __init__(self):
super(NonRomanAlphabetPublisherNameDictionaryDecoder, self).__init__()
def decode(self, data):
return NonRomanAlphabetPublisherNameRecord(
record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
publisher_sequence_n=data['publisher_sequence_n'],
ip_n=data['ip_n'],
publisher_name=data['publisher_name'],
language_code=data['language_code'])
class NonRomanAlphabetTitleDictionaryDecoder(Decoder):
def __init__(self):
super(NonRomanAlphabetTitleDictionaryDecoder, self).__init__()
def decode(self, data):
return NonRomanAlphabetTitleRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
title=data['title'],
title_type=data['title_type'],
language_code=data['language_code'])
class NonRomanAlphabetWorkDictionaryDecoder(Decoder):
def __init__(self):
super(NonRomanAlphabetWorkDictionaryDecoder, self).__init__()
def decode(self, data):
return NonRomanAlphabetWorkRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
title=data['title'],
language_code=data['language_code'])
class NonRomanAlphabetWriterNameDictionaryDecoder(Decoder):
def __init__(self):
super(NonRomanAlphabetWriterNameDictionaryDecoder, self).__init__()
def decode(self, data):
return NonRomanAlphabetWriterNameRecord(record_type=data['record_type'],
transaction_sequence_n=data[
'transaction_sequence_n'],
record_sequence_n=data[
'record_sequence_n'],
writer_first_name=data[
'writer_first_name'],
writer_last_name=data[
'writer_last_name'],
ip_n=data['ip_n'],
language_code=data[
'language_code'])
class PublisherDictionaryDecoder(Decoder):
def __init__(self, ipi_base_decoder=None):
super(PublisherDictionaryDecoder, self).__init__()
if ipi_base_decoder:
self._ipi_base_decoder = ipi_base_decoder
else:
self._ipi_base_decoder = IPIBaseDictionaryDecoder()
def decode(self, data):
if 'ipi_base_n' in data:
ipi_base = self._ipi_base_decoder.decode(data['ipi_base_n'])
else:
ipi_base = None
return Publisher(ip_n=data['ip_n'],
publisher_name=data['publisher_name'],
ipi_name_n=data['ipi_name_n'],
ipi_base_n=ipi_base,
tax_id=data['tax_id'])
class PublisherRecordDictionaryDecoder(Decoder):
def __init__(self):
super(PublisherRecordDictionaryDecoder, self).__init__()
self._publisher_decoder = PublisherDictionaryDecoder()
def decode(self, data):
publisher = self._publisher_decoder.decode(data['publisher'])
special_agreements = None
if 'special_agreements' in data:
special_agreements = data['special_agreements']
first_recording_refusal = None
if 'first_recording_refusal' in data:
first_recording_refusal = data['first_recording_refusal']
agreement_type = None
if 'agreement_type' in data:
agreement_type = data['agreement_type']
usa_license = None
if 'usa_license' in data:
usa_license = data['usa_license']
international_standard_code = None
if 'international_standard_code' in data:
international_standard_code = data['international_standard_code']
society_assigned_agreement_n = None
if 'society_assigned_agreement_n' in data:
society_assigned_agreement_n = data['society_assigned_agreement_n']
return PublisherRecord(
record_type=data['record_type'],
transaction_sequence_n=data['transaction_sequence_n'],
record_sequence_n=data['record_sequence_n'],
publisher=publisher,
publisher_sequence_n=data['publisher_sequence_n'],
submitter_agreement_n=data['submitter_agreement_n'],
publisher_type=data['publisher_type'],
publisher_unknown=data['publisher_unknown'],
pr_society=data['pr_society'],
pr_ownership_share=data['pr_ownership_share'],
mr_society=data['mr_society'],
mr_ownership_share=data['mr_ownership_share'],
sr_society=data['sr_society'],
sr_ownership_share=data['sr_ownership_share'],
special_agreements=special_agreements,
first_recording_refusal=first_recording_refusal,
international_standard_code=international_standard_code,
society_assigned_agreement_n=society_assigned_agreement_n,
agreement_type=agreement_type,
usa_license=usa_license)
class TableValueDictionaryDecoder(Decoder):
def __init__(self):
super(TableValueDictionaryDecoder, self).__init__()
def decode(self, data):
return TableValue(code=data['code'],
name=data['name'],
description=data['description'])
class MediaTypeValueDictionaryDecoder(Decoder):
def __init__(self):
super(MediaTypeValueDictionaryDecoder, self).__init__()
def decode(self, data):
return MediaTypeValue(code=data['code'],
name=data['name'],
media_type=data['media_type'],
duration_max=data['duration_max'],
works_max=data['works_max'],
fragments_max=data['fragments_max'])
class InstrumentValueDictionaryDecoder(Decoder):
def __init__(self):
super(InstrumentValueDictionaryDecoder, self).__init__()
def decode(self, data):
return InstrumentValue(code=data['code'],
name=data['name'],
family=data['family'],
description=data['description'])
class FileTagDictionaryDecoder(Decoder):
def __init__(self):
super(FileTagDictionaryDecoder, self).__init__()
def decode(self, data):
return FileTag(data['year'],
data['sequence_n'],
data['sender'],
data['receiver'],
data['version'])
class AVIKeyDictionaryDecoder(Decoder):
def __init__(self):
super(AVIKeyDictionaryDecoder, self).__init__()
def decode(self, data):
return AVIKey(data['society_code'],
data['av_number'])
class IPIBaseDictionaryDecoder(Decoder):
def __init__(self):
super(IPIBaseDictionaryDecoder, self).__init__()
def decode(self, data):
if data:
result = data
else:
result = None
return result
class ISWCDictionaryDecoder(Decoder):
def __init__(self):
super(ISWCDictionaryDecoder, self).__init__()
def decode(self, data):
if data:
result = data
else:
result = None
return result
class VISANDictionaryDecoder(Decoder):
def __init__(self):
super(VISANDictionaryDecoder, self).__init__()
def decode(self, data):
return data
| [((4867, 5517), 'cwr.acknowledgement.AcknowledgementRecord', 'AcknowledgementRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'original_group_id': "data['original_group_id']", 'original_transaction_sequence_n': "data['original_transaction_sequence_n']", 'original_transaction_type': "data['original_transaction_type']", 'transaction_status': "data['transaction_status']", 'creation_date_time': "data['creation_date_time']", 'processing_date': "data['processing_date']", 'creation_title': "data['creation_title']", 'submitter_creation_n': "data['submitter_creation_n']", 'recipient_creation_n': "data['recipient_creation_n']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], original_group_id=data[\n 'original_group_id'], original_transaction_sequence_n=data[\n 'original_transaction_sequence_n'], original_transaction_type=data[\n 'original_transaction_type'], transaction_status=data[\n 'transaction_status'], creation_date_time=data['creation_date_time'],\n processing_date=data['processing_date'], creation_title=data[\n 'creation_title'], submitter_creation_n=data['submitter_creation_n'],\n recipient_creation_n=data['recipient_creation_n'])\n", (4888, 5517), False, 'from cwr.acknowledgement import AcknowledgementRecord, MessageRecord\n'), ((6434, 7476), 'cwr.agreement.AgreementRecord', 'AgreementRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'submitter_agreement_n': "data['submitter_agreement_n']", 'agreement_type': "data['agreement_type']", 'agreement_start_date': "data['agreement_start_date']", 'prior_royalty_status': "data['prior_royalty_status']", 'post_term_collection_status': "data['post_term_collection_status']", 'number_of_works': "data['number_of_works']", 'society_assigned_agreement_n': "data['society_assigned_agreement_n']", 'international_standard_code': "data['international_standard_code']", 'sales_manufacture_clause': "data['sales_manufacture_clause']", 'agreement_end_date': "data['agreement_end_date']", 'date_of_signature': "data['date_of_signature']", 'retention_end_date': "data['retention_end_date']", 'prior_royalty_start_date': "data['prior_royalty_start_date']", 'post_term_collection_end_date': "data['post_term_collection_end_date']", 'shares_change': "data['shares_change']", 'advance_given': "data['advance_given']"}), "(record_type=data['record_type'], transaction_sequence_n=\n data['transaction_sequence_n'], record_sequence_n=data[\n 'record_sequence_n'], submitter_agreement_n=data[\n 'submitter_agreement_n'], agreement_type=data['agreement_type'],\n agreement_start_date=data['agreement_start_date'], prior_royalty_status\n =data['prior_royalty_status'], post_term_collection_status=data[\n 'post_term_collection_status'], number_of_works=data['number_of_works'],\n society_assigned_agreement_n=data['society_assigned_agreement_n'],\n international_standard_code=data['international_standard_code'],\n sales_manufacture_clause=data['sales_manufacture_clause'],\n agreement_end_date=data['agreement_end_date'], date_of_signature=data[\n 'date_of_signature'], retention_end_date=data['retention_end_date'],\n prior_royalty_start_date=data['prior_royalty_start_date'],\n post_term_collection_end_date=data['post_term_collection_end_date'],\n shares_change=data['shares_change'], advance_given=data['advance_given'])\n", (6449, 7476), False, 'from cwr.agreement import AgreementRecord, AgreementTerritoryRecord, InterestedPartyForAgreementRecord\n'), ((8523, 8810), 'cwr.agreement.AgreementTerritoryRecord', 'AgreementTerritoryRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'tis_numeric_code': "data['tis_numeric_code']", 'inclusion_exclusion_indicator': "data['inclusion_exclusion_indicator']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], tis_numeric_code=data[\n 'tis_numeric_code'], inclusion_exclusion_indicator=data[\n 'inclusion_exclusion_indicator'])\n", (8547, 8810), False, 'from cwr.agreement import AgreementRecord, AgreementTerritoryRecord, InterestedPartyForAgreementRecord\n'), ((9343, 9663), 'cwr.info.AdditionalRelatedInfoRecord', 'AdditionalRelatedInfoRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'society_n': "data['society_n']", 'type_of_right': "data['type_of_right']", 'work_n': "data['work_n']", 'subject_code': "data['subject_code']", 'note': "data['note']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], society_n=data['society_n'\n ], type_of_right=data['type_of_right'], work_n=data['work_n'],\n subject_code=data['subject_code'], note=data['note'])\n", (9370, 9663), False, 'from cwr.info import AdditionalRelatedInfoRecord\n'), ((10226, 10506), 'cwr.work.AlternateTitleRecord', 'AlternateTitleRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'alternate_title': "data['alternate_title']", 'title_type': "data['title_type']", 'language_code': "data['language_code']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], alternate_title=data[\n 'alternate_title'], title_type=data['title_type'], language_code=data[\n 'language_code'])\n", (10246, 10506), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((11346, 12035), 'cwr.work.AuthoredWorkRecord', 'AuthoredWorkRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'title': "data['title']", 'submitter_work_n': "data['submitter_work_n']", 'writer_1_first_name': "data['writer_1_first_name']", 'writer_1_last_name': "data['writer_1_last_name']", 'writer_2_first_name': "data['writer_2_first_name']", 'writer_2_last_name': "data['writer_2_last_name']", 'writer_1_ipi_base_n': 'ipi_base_1', 'writer_1_ipi_name_n': "data['writer_1_ipi_name_n']", 'writer_2_ipi_base_n': 'ipi_base_2', 'writer_2_ipi_name_n': "data['writer_2_ipi_name_n']", 'source': "data['source']", 'language_code': "data['language_code']", 'iswc': "data['iswc']"}), "(record_type=data['record_type'], transaction_sequence_n=\n data['transaction_sequence_n'], record_sequence_n=data[\n 'record_sequence_n'], title=data['title'], submitter_work_n=data[\n 'submitter_work_n'], writer_1_first_name=data['writer_1_first_name'],\n writer_1_last_name=data['writer_1_last_name'], writer_2_first_name=data\n ['writer_2_first_name'], writer_2_last_name=data['writer_2_last_name'],\n writer_1_ipi_base_n=ipi_base_1, writer_1_ipi_name_n=data[\n 'writer_1_ipi_name_n'], writer_2_ipi_base_n=ipi_base_2,\n writer_2_ipi_name_n=data['writer_2_ipi_name_n'], source=data['source'],\n language_code=data['language_code'], iswc=data['iswc'])\n", (11364, 12035), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((13218, 13873), 'cwr.work.ComponentRecord', 'ComponentRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'title': "data['title']", 'submitter_work_n': "data['submitter_work_n']", 'writer_1_last_name': "data['writer_1_last_name']", 'writer_1_first_name': "data['writer_1_first_name']", 'writer_2_last_name': "data['writer_2_last_name']", 'writer_2_first_name': "data['writer_2_first_name']", 'writer_1_ipi_base_n': 'ipi_base_1', 'writer_1_ipi_name_n': "data['writer_1_ipi_name_n']", 'writer_2_ipi_base_n': 'ipi_base_2', 'writer_2_ipi_name_n': "data['writer_2_ipi_name_n']", 'iswc': "data['iswc']", 'duration': "data['duration']"}), "(record_type=data['record_type'], transaction_sequence_n=\n data['transaction_sequence_n'], record_sequence_n=data[\n 'record_sequence_n'], title=data['title'], submitter_work_n=data[\n 'submitter_work_n'], writer_1_last_name=data['writer_1_last_name'],\n writer_1_first_name=data['writer_1_first_name'], writer_2_last_name=\n data['writer_2_last_name'], writer_2_first_name=data[\n 'writer_2_first_name'], writer_1_ipi_base_n=ipi_base_1,\n writer_1_ipi_name_n=data['writer_1_ipi_name_n'], writer_2_ipi_base_n=\n ipi_base_2, writer_2_ipi_name_n=data['writer_2_ipi_name_n'], iswc=data[\n 'iswc'], duration=data['duration'])\n", (13233, 13873), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((14477, 14682), 'cwr.group.GroupHeader', 'GroupHeader', ([], {'record_type': "data['record_type']", 'group_id': "data['group_id']", 'transaction_type': "data['transaction_type']", 'version_number': "data['version_number']", 'batch_request_id': "data['batch_request_id']"}), "(record_type=data['record_type'], group_id=data['group_id'],\n transaction_type=data['transaction_type'], version_number=data[\n 'version_number'], batch_request_id=data['batch_request_id'])\n", (14488, 14682), False, 'from cwr.group import Group, GroupHeader, GroupTrailer\n'), ((15240, 15487), 'cwr.group.GroupTrailer', 'GroupTrailer', ([], {'record_type': "data['record_type']", 'group_id': "data['group_id']", 'transaction_count': "data['transaction_count']", 'record_count': "data['record_count']", 'currency_indicator': 'currency_indicator', 'total_monetary_value': 'total_monetary_value'}), "(record_type=data['record_type'], group_id=data['group_id'],\n transaction_count=data['transaction_count'], record_count=data[\n 'record_count'], currency_indicator=currency_indicator,\n total_monetary_value=total_monetary_value)\n", (15252, 15487), False, 'from cwr.group import Group, GroupHeader, GroupTrailer\n'), ((16108, 16690), 'cwr.agreement.InterestedPartyForAgreementRecord', 'InterestedPartyForAgreementRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'ip_n': "data['ip_n']", 'ip_last_name': "data['ip_last_name']", 'agreement_role_code': "data['agreement_role_code']", 'ip_writer_first_name': "data['ip_writer_first_name']", 'ipi_name_n': "data['ipi_name_n']", 'ipi_base_n': 'ipi_base', 'pr_society': "data['pr_society']", 'pr_share': "data['pr_share']", 'mr_society': "data['mr_society']", 'mr_share': "data['mr_share']", 'sr_society': "data['sr_society']", 'sr_share': "data['sr_share']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], ip_n=data['ip_n'],\n ip_last_name=data['ip_last_name'], agreement_role_code=data[\n 'agreement_role_code'], ip_writer_first_name=data[\n 'ip_writer_first_name'], ipi_name_n=data['ipi_name_n'], ipi_base_n=\n ipi_base, pr_society=data['pr_society'], pr_share=data['pr_share'],\n mr_society=data['mr_society'], mr_share=data['mr_share'], sr_society=\n data['sr_society'], sr_share=data['sr_share'])\n", (16141, 16690), False, 'from cwr.agreement import AgreementRecord, AgreementTerritoryRecord, InterestedPartyForAgreementRecord\n'), ((16984, 17466), 'cwr.interested_party.IPTerritoryOfControlRecord', 'IPTerritoryOfControlRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'ip_n': "data['ip_n']", 'inclusion_exclusion_indicator': "data['inclusion_exclusion_indicator']", 'tis_numeric_code': "data['tis_numeric_code']", 'sequence_n': "data['sequence_n']", 'pr_collection_share': "data['pr_collection_share']", 'mr_collection_share': "data['mr_collection_share']", 'shares_change': "data['shares_change']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], ip_n=data['ip_n'],\n inclusion_exclusion_indicator=data['inclusion_exclusion_indicator'],\n tis_numeric_code=data['tis_numeric_code'], sequence_n=data['sequence_n'\n ], pr_collection_share=data['pr_collection_share'], mr_collection_share\n =data['mr_collection_share'], shares_change=data['shares_change'])\n", (17010, 17466), False, 'from cwr.interested_party import IPTerritoryOfControlRecord, Publisher, PublisherRecord, Writer, PublisherForWriterRecord, WriterRecord\n'), ((18462, 18715), 'cwr.work.InstrumentationDetailRecord', 'InstrumentationDetailRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'instrument_code': "data['instrument_code']", 'number_players': "data['number_players']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], instrument_code=data[\n 'instrument_code'], number_players=data['number_players'])\n", (18489, 18715), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((19265, 19620), 'cwr.work.InstrumentationSummaryRecord', 'InstrumentationSummaryRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'number_voices': "data['number_voices']", 'standard_instrumentation_type': "data['standard_instrumentation_type']", 'instrumentation_description': "data['instrumentation_description']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], number_voices=data[\n 'number_voices'], standard_instrumentation_type=data[\n 'standard_instrumentation_type'], instrumentation_description=data[\n 'instrumentation_description'])\n", (19293, 19620), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((19839, 20261), 'cwr.acknowledgement.MessageRecord', 'MessageRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'message_type': "data['message_type']", 'message_text': "data['message_text']", 'original_record_sequence_n': "data['original_record_sequence_n']", 'message_record_type': "data['message_record_type']", 'message_level': "data['message_level']", 'validation_n': "data['validation_n']"}), "(record_type=data['record_type'], transaction_sequence_n=data[\n 'transaction_sequence_n'], record_sequence_n=data['record_sequence_n'],\n message_type=data['message_type'], message_text=data['message_text'],\n original_record_sequence_n=data['original_record_sequence_n'],\n message_record_type=data['message_record_type'], message_level=data[\n 'message_level'], validation_n=data['validation_n'])\n", (19852, 20261), False, 'from cwr.acknowledgement import AcknowledgementRecord, MessageRecord\n'), ((21430, 21831), 'cwr.work.PerformingArtistRecord', 'PerformingArtistRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'performing_artist_last_name': "data['performing_artist_last_name']", 'performing_artist_first_name': 'performing_artist_first_name', 'performing_artist_ipi_name_n': 'performing_artist_ipi_name_n', 'performing_artist_ipi_base_n': 'ipi_base'}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'],\n performing_artist_last_name=data['performing_artist_last_name'],\n performing_artist_first_name=performing_artist_first_name,\n performing_artist_ipi_name_n=performing_artist_ipi_name_n,\n performing_artist_ipi_base_n=ipi_base)\n", (21452, 21831), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((22474, 22876), 'cwr.interested_party.PublisherForWriterRecord', 'PublisherForWriterRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'publisher_ip_n': "data['publisher_ip_n']", 'publisher_name': 'publisher_name', 'writer_ip_n': "data['writer_ip_n']", 'submitter_agreement_n': "data['submitter_agreement_n']", 'society_assigned_agreement_n': "data['society_assigned_agreement_n']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], publisher_ip_n=data[\n 'publisher_ip_n'], publisher_name=publisher_name, writer_ip_n=data[\n 'writer_ip_n'], submitter_agreement_n=data['submitter_agreement_n'],\n society_assigned_agreement_n=data['society_assigned_agreement_n'])\n", (22498, 22876), False, 'from cwr.interested_party import IPTerritoryOfControlRecord, Publisher, PublisherRecord, Writer, PublisherForWriterRecord, WriterRecord\n'), ((23603, 24193), 'cwr.work.RecordingDetailRecord', 'RecordingDetailRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'first_release_date': "data['first_release_date']", 'first_release_duration': "data['first_release_duration']", 'first_album_title': "data['first_album_title']", 'first_album_label': "data['first_album_label']", 'first_release_catalog_n': "data['first_release_catalog_n']", 'ean': "data['ean']", 'isrc': "data['isrc']", 'recording_format': "data['recording_format']", 'recording_technique': "data['recording_technique']", 'media_type': 'media_type'}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], first_release_date=data[\n 'first_release_date'], first_release_duration=data[\n 'first_release_duration'], first_album_title=data['first_album_title'],\n first_album_label=data['first_album_label'], first_release_catalog_n=\n data['first_release_catalog_n'], ean=data['ean'], isrc=data['isrc'],\n recording_format=data['recording_format'], recording_technique=data[\n 'recording_technique'], media_type=media_type)\n", (23624, 24193), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((25497, 25523), 'cwr.file.CWRFile', 'CWRFile', (['tag', 'transmission'], {}), '(tag, transmission)\n', (25504, 25523), False, 'from cwr.file import CWRFile, FileTag\n'), ((26439, 26476), 'cwr.transmission.Transmission', 'Transmission', (['header', 'trailer', 'groups'], {}), '(header, trailer, groups)\n', (26451, 26476), False, 'from cwr.transmission import Transmission, TransmissionTrailer, TransmissionHeader\n'), ((27645, 27681), 'cwr.group.Group', 'Group', (['header', 'trailer', 'transactions'], {}), '(header, trailer, transactions)\n', (27650, 27681), False, 'from cwr.group import Group, GroupHeader, GroupTrailer\n'), ((27874, 28166), 'cwr.transmission.TransmissionHeader', 'TransmissionHeader', ([], {'record_type': "data['record_type']", 'sender_id': "data['sender_id']", 'sender_name': "data['sender_name']", 'sender_type': "data['sender_type']", 'creation_date_time': "data['creation_date_time']", 'transmission_date': "data['transmission_date']", 'edi_standard': "data['edi_standard']"}), "(record_type=data['record_type'], sender_id=data[\n 'sender_id'], sender_name=data['sender_name'], sender_type=data[\n 'sender_type'], creation_date_time=data['creation_date_time'],\n transmission_date=data['transmission_date'], edi_standard=data[\n 'edi_standard'])\n", (27892, 28166), False, 'from cwr.transmission import Transmission, TransmissionTrailer, TransmissionHeader\n'), ((28713, 28887), 'cwr.transmission.TransmissionTrailer', 'TransmissionTrailer', ([], {'record_type': "data['record_type']", 'group_count': "data['group_count']", 'transaction_count': "data['transaction_count']", 'record_count': "data['record_count']"}), "(record_type=data['record_type'], group_count=data[\n 'group_count'], transaction_count=data['transaction_count'],\n record_count=data['record_count'])\n", (28732, 28887), False, 'from cwr.transmission import Transmission, TransmissionTrailer, TransmissionHeader\n'), ((29635, 30882), 'cwr.work.WorkRecord', 'WorkRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'submitter_work_n': "data['submitter_work_n']", 'title': "data['title']", 'version_type': "data['version_type']", 'musical_work_distribution_category': "data['musical_work_distribution_category']", 'date_publication_printed_edition': "data['date_publication_printed_edition']", 'text_music_relationship': "data['text_music_relationship']", 'language_code': "data['language_code']", 'copyright_number': "data['copyright_number']", 'copyright_date': "data['copyright_date']", 'music_arrangement': "data['music_arrangement']", 'lyric_adaptation': "data['lyric_adaptation']", 'excerpt_type': "data['excerpt_type']", 'composite_type': "data['composite_type']", 'composite_component_count': "data['composite_component_count']", 'iswc': "data['iswc']", 'work_type': "data['work_type']", 'duration': "data['duration']", 'catalogue_number': 'catalogue_number', 'opus_number': 'opus_number', 'contact_id': "data['contact_id']", 'contact_name': "data['contact_name']", 'recorded_indicator': "data['recorded_indicator']", 'priority_flag': 'priority_flag', 'exceptional_clause': 'exceptional_clause', 'grand_rights_indicator': "data['grand_rights_indicator']"}), "(record_type=data['record_type'], transaction_sequence_n=data[\n 'transaction_sequence_n'], record_sequence_n=data['record_sequence_n'],\n submitter_work_n=data['submitter_work_n'], title=data['title'],\n version_type=data['version_type'], musical_work_distribution_category=\n data['musical_work_distribution_category'],\n date_publication_printed_edition=data[\n 'date_publication_printed_edition'], text_music_relationship=data[\n 'text_music_relationship'], language_code=data['language_code'],\n copyright_number=data['copyright_number'], copyright_date=data[\n 'copyright_date'], music_arrangement=data['music_arrangement'],\n lyric_adaptation=data['lyric_adaptation'], excerpt_type=data[\n 'excerpt_type'], composite_type=data['composite_type'],\n composite_component_count=data['composite_component_count'], iswc=data[\n 'iswc'], work_type=data['work_type'], duration=data['duration'],\n catalogue_number=catalogue_number, opus_number=opus_number, contact_id=\n data['contact_id'], contact_name=data['contact_name'],\n recorded_indicator=data['recorded_indicator'], priority_flag=\n priority_flag, exceptional_clause=exceptional_clause,\n grand_rights_indicator=data['grand_rights_indicator'])\n", (29645, 30882), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((31802, 32396), 'cwr.work.WorkOriginRecord', 'WorkOriginRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'intended_purpose': "data['intended_purpose']", 'production_title': "data['production_title']", 'cd_identifier': "data['cd_identifier']", 'cut_number': "data['cut_number']", 'library': "data['library']", 'bltvr': "data['bltvr']", 'visan': "data['visan']", 'production_n': "data['production_n']", 'episode_title': "data['episode_title']", 'episode_n': "data['episode_n']", 'year_production': "data['year_production']", 'audio_visual_key': "data['audio_visual_key']"}), "(record_type=data['record_type'], transaction_sequence_n=\n data['transaction_sequence_n'], record_sequence_n=data[\n 'record_sequence_n'], intended_purpose=data['intended_purpose'],\n production_title=data['production_title'], cd_identifier=data[\n 'cd_identifier'], cut_number=data['cut_number'], library=data['library'\n ], bltvr=data['bltvr'], visan=data['visan'], production_n=data[\n 'production_n'], episode_title=data['episode_title'], episode_n=data[\n 'episode_n'], year_production=data['year_production'], audio_visual_key\n =data['audio_visual_key'])\n", (31818, 32396), False, 'from cwr.work import RecordingDetailRecord, ComponentRecord, AlternateTitleRecord, AuthoredWorkRecord, InstrumentationDetailRecord, InstrumentationSummaryRecord, PerformingArtistRecord, WorkOriginRecord, WorkRecord\n'), ((33266, 33509), 'cwr.interested_party.Writer', 'Writer', ([], {'ip_n': "data['ip_n']", 'personal_number': "data['personal_number']", 'ipi_base_n': 'ipi_base_n', 'writer_first_name': "data['writer_first_name']", 'writer_last_name': "data['writer_last_name']", 'tax_id': "data['tax_id']", 'ipi_name_n': "data['ipi_name_n']"}), "(ip_n=data['ip_n'], personal_number=data['personal_number'],\n ipi_base_n=ipi_base_n, writer_first_name=data['writer_first_name'],\n writer_last_name=data['writer_last_name'], tax_id=data['tax_id'],\n ipi_name_n=data['ipi_name_n'])\n", (33272, 33509), False, 'from cwr.interested_party import IPTerritoryOfControlRecord, Publisher, PublisherRecord, Writer, PublisherForWriterRecord, WriterRecord\n'), ((34035, 34710), 'cwr.interested_party.WriterRecord', 'WriterRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'writer': 'writer', 'writer_designation': "data['writer_designation']", 'work_for_hire': "data['work_for_hire']", 'writer_unknown': "data['writer_unknown']", 'reversionary': "data['reversionary']", 'first_recording_refusal': "data['first_recording_refusal']", 'usa_license': 'usa_license', 'pr_society': "data['pr_society']", 'pr_ownership_share': "data['pr_ownership_share']", 'mr_society': "data['mr_society']", 'mr_ownership_share': "data['mr_ownership_share']", 'sr_society': "data['sr_society']", 'sr_ownership_share': "data['sr_ownership_share']"}), "(record_type=data['record_type'], transaction_sequence_n=data[\n 'transaction_sequence_n'], record_sequence_n=data['record_sequence_n'],\n writer=writer, writer_designation=data['writer_designation'],\n work_for_hire=data['work_for_hire'], writer_unknown=data[\n 'writer_unknown'], reversionary=data['reversionary'],\n first_recording_refusal=data['first_recording_refusal'], usa_license=\n usa_license, pr_society=data['pr_society'], pr_ownership_share=data[\n 'pr_ownership_share'], mr_society=data['mr_society'],\n mr_ownership_share=data['mr_ownership_share'], sr_society=data[\n 'sr_society'], sr_ownership_share=data['sr_ownership_share'])\n", (34047, 34710), False, 'from cwr.interested_party import IPTerritoryOfControlRecord, Publisher, PublisherRecord, Writer, PublisherForWriterRecord, WriterRecord\n'), ((35370, 35676), 'cwr.non_roman_alphabet.NonRomanAlphabetAgreementPartyRecord', 'NonRomanAlphabetAgreementPartyRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'ip_name': "data['ip_name']", 'ip_writer_name': "data['ip_writer_name']", 'ip_n': "data['ip_n']", 'language_code': "data['language_code']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], ip_name=data['ip_name'],\n ip_writer_name=data['ip_writer_name'], ip_n=data['ip_n'], language_code\n =data['language_code'])\n", (35406, 35676), False, 'from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord\n'), ((35953, 36279), 'cwr.non_roman_alphabet.NonRomanAlphabetOtherWriterRecord', 'NonRomanAlphabetOtherWriterRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'writer_first_name': "data['writer_first_name']", 'writer_name': "data['writer_name']", 'position': "data['position']", 'language_code': "data['language_code']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], writer_first_name=data[\n 'writer_first_name'], writer_name=data['writer_name'], position=data[\n 'position'], language_code=data['language_code'])\n", (35986, 36279), False, 'from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord\n'), ((36849, 37417), 'cwr.non_roman_alphabet.NonRomanAlphabetPerformanceDataRecord', 'NonRomanAlphabetPerformanceDataRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'performing_artist_first_name': "data['performing_artist_first_name']", 'performing_artist_name': "data['performing_artist_name']", 'performing_artist_ipi_name_n': "data['performing_artist_ipi_name_n']", 'performing_artist_ipi_base_n': 'ipi_base', 'language_code': "data['language_code']", 'performance_language': "data['performance_language']", 'performance_dialect': "data['performance_dialect']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'],\n performing_artist_first_name=data['performing_artist_first_name'],\n performing_artist_name=data['performing_artist_name'],\n performing_artist_ipi_name_n=data['performing_artist_ipi_name_n'],\n performing_artist_ipi_base_n=ipi_base, language_code=data[\n 'language_code'], performance_language=data['performance_language'],\n performance_dialect=data['performance_dialect'])\n", (36886, 37417), False, 'from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord\n'), ((37718, 38050), 'cwr.non_roman_alphabet.NonRomanAlphabetPublisherNameRecord', 'NonRomanAlphabetPublisherNameRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'publisher_sequence_n': "data['publisher_sequence_n']", 'ip_n': "data['ip_n']", 'publisher_name': "data['publisher_name']", 'language_code': "data['language_code']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], publisher_sequence_n=data[\n 'publisher_sequence_n'], ip_n=data['ip_n'], publisher_name=data[\n 'publisher_name'], language_code=data['language_code'])\n", (37753, 38050), False, 'from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord\n'), ((38314, 38575), 'cwr.non_roman_alphabet.NonRomanAlphabetTitleRecord', 'NonRomanAlphabetTitleRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'title': "data['title']", 'title_type': "data['title_type']", 'language_code': "data['language_code']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], title=data['title'],\n title_type=data['title_type'], language_code=data['language_code'])\n", (38341, 38575), False, 'from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord\n'), ((39069, 39298), 'cwr.non_roman_alphabet.NonRomanAlphabetWorkRecord', 'NonRomanAlphabetWorkRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'title': "data['title']", 'language_code': "data['language_code']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], title=data['title'],\n language_code=data['language_code'])\n", (39095, 39298), False, 'from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord\n'), ((39755, 40082), 'cwr.non_roman_alphabet.NonRomanAlphabetWriterNameRecord', 'NonRomanAlphabetWriterNameRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'writer_first_name': "data['writer_first_name']", 'writer_last_name': "data['writer_last_name']", 'ip_n': "data['ip_n']", 'language_code': "data['language_code']"}), "(record_type=data['record_type'],\n transaction_sequence_n=data['transaction_sequence_n'],\n record_sequence_n=data['record_sequence_n'], writer_first_name=data[\n 'writer_first_name'], writer_last_name=data['writer_last_name'], ip_n=\n data['ip_n'], language_code=data['language_code'])\n", (39787, 40082), False, 'from cwr.non_roman_alphabet import NonRomanAlphabetAgreementPartyRecord, NonRomanAlphabetOtherWriterRecord, NonRomanAlphabetPerformanceDataRecord, NonRomanAlphabetPublisherNameRecord, NonRomanAlphabetTitleRecord, NonRomanAlphabetWorkRecord, NonRomanAlphabetWriterNameRecord\n'), ((41124, 41270), 'cwr.interested_party.Publisher', 'Publisher', ([], {'ip_n': "data['ip_n']", 'publisher_name': "data['publisher_name']", 'ipi_name_n': "data['ipi_name_n']", 'ipi_base_n': 'ipi_base', 'tax_id': "data['tax_id']"}), "(ip_n=data['ip_n'], publisher_name=data['publisher_name'],\n ipi_name_n=data['ipi_name_n'], ipi_base_n=ipi_base, tax_id=data['tax_id'])\n", (41133, 41270), False, 'from cwr.interested_party import IPTerritoryOfControlRecord, Publisher, PublisherRecord, Writer, PublisherForWriterRecord, WriterRecord\n'), ((42553, 43463), 'cwr.interested_party.PublisherRecord', 'PublisherRecord', ([], {'record_type': "data['record_type']", 'transaction_sequence_n': "data['transaction_sequence_n']", 'record_sequence_n': "data['record_sequence_n']", 'publisher': 'publisher', 'publisher_sequence_n': "data['publisher_sequence_n']", 'submitter_agreement_n': "data['submitter_agreement_n']", 'publisher_type': "data['publisher_type']", 'publisher_unknown': "data['publisher_unknown']", 'pr_society': "data['pr_society']", 'pr_ownership_share': "data['pr_ownership_share']", 'mr_society': "data['mr_society']", 'mr_ownership_share': "data['mr_ownership_share']", 'sr_society': "data['sr_society']", 'sr_ownership_share': "data['sr_ownership_share']", 'special_agreements': 'special_agreements', 'first_recording_refusal': 'first_recording_refusal', 'international_standard_code': 'international_standard_code', 'society_assigned_agreement_n': 'society_assigned_agreement_n', 'agreement_type': 'agreement_type', 'usa_license': 'usa_license'}), "(record_type=data['record_type'], transaction_sequence_n=\n data['transaction_sequence_n'], record_sequence_n=data[\n 'record_sequence_n'], publisher=publisher, publisher_sequence_n=data[\n 'publisher_sequence_n'], submitter_agreement_n=data[\n 'submitter_agreement_n'], publisher_type=data['publisher_type'],\n publisher_unknown=data['publisher_unknown'], pr_society=data[\n 'pr_society'], pr_ownership_share=data['pr_ownership_share'],\n mr_society=data['mr_society'], mr_ownership_share=data[\n 'mr_ownership_share'], sr_society=data['sr_society'],\n sr_ownership_share=data['sr_ownership_share'], special_agreements=\n special_agreements, first_recording_refusal=first_recording_refusal,\n international_standard_code=international_standard_code,\n society_assigned_agreement_n=society_assigned_agreement_n,\n agreement_type=agreement_type, usa_license=usa_license)\n", (42568, 43463), False, 'from cwr.interested_party import IPTerritoryOfControlRecord, Publisher, PublisherRecord, Writer, PublisherForWriterRecord, WriterRecord\n'), ((43820, 43906), 'cwr.table_value.TableValue', 'TableValue', ([], {'code': "data['code']", 'name': "data['name']", 'description': "data['description']"}), "(code=data['code'], name=data['name'], description=data[\n 'description'])\n", (43830, 43906), False, 'from cwr.table_value import MediaTypeValue, TableValue, InstrumentValue\n'), ((44136, 44330), 'cwr.table_value.MediaTypeValue', 'MediaTypeValue', ([], {'code': "data['code']", 'name': "data['name']", 'media_type': "data['media_type']", 'duration_max': "data['duration_max']", 'works_max': "data['works_max']", 'fragments_max': "data['fragments_max']"}), "(code=data['code'], name=data['name'], media_type=data[\n 'media_type'], duration_max=data['duration_max'], works_max=data[\n 'works_max'], fragments_max=data['fragments_max'])\n", (44150, 44330), False, 'from cwr.table_value import MediaTypeValue, TableValue, InstrumentValue\n'), ((44655, 44768), 'cwr.table_value.InstrumentValue', 'InstrumentValue', ([], {'code': "data['code']", 'name': "data['name']", 'family': "data['family']", 'description': "data['description']"}), "(code=data['code'], name=data['name'], family=data['family'],\n description=data['description'])\n", (44670, 44768), False, 'from cwr.table_value import MediaTypeValue, TableValue, InstrumentValue\n'), ((45026, 45122), 'cwr.file.FileTag', 'FileTag', (["data['year']", "data['sequence_n']", "data['sender']", "data['receiver']", "data['version']"], {}), "(data['year'], data['sequence_n'], data['sender'], data['receiver'],\n data['version'])\n", (45033, 45122), False, 'from cwr.file import CWRFile, FileTag\n'), ((45377, 45424), 'cwr.other.AVIKey', 'AVIKey', (["data['society_code']", "data['av_number']"], {}), "(data['society_code'], data['av_number'])\n", (45383, 45424), False, 'from cwr.other import AVIKey, VISAN\n')] |
imranpopz/android_bootable_recovery-1 | prebuilt/twrp_fonts.py | ec4512ad1e20f640b3dcd6faf8c04cae711e4f30 | #!/usr/bin/env python
# -*- coding: utf8 -*-
import codecs,os,gzip,ctypes,ctypes.util,sys
from struct import *
from PIL import Image, ImageDraw, ImageFont
# ====== Python script to convert TrueTypeFonts to TWRP's .dat format ======
# This script was originally made by https://github.com/suky for his chinese version of TWRP
# and then translated to English by feilplane at #twrp of irc.freenode.net.
# However, it was not compatible with vanilla TWRP, so https://github.com/Tasssadar rewrote
# most of it and it now has very little in common with the original script.
class Reference():
def __init__(self, val):
self.__value = val
def get(self):
return self.__value
def set(self, val):
self.__value = val
quiet = Reference(False)
def log(text):
if not quiet.get():
sys.stdout.write(text)
def write_data(f, width, height, offsets, data):
f.write(pack("<I", width))
f.write(pack("<I", height))
for off in offsets:
f.write(pack("<I", off))
f.write(data)
if __name__ == "__main__":
fontsize = Reference(20)
out_fname = Reference("font.dat")
voffset = Reference(None)
padding = Reference(0)
font_fname = Reference(None)
preview = Reference(None)
arg_parser = [
["-s", "--size=", fontsize, int],
["-o", "--output=", out_fname, str],
["-p", "--preview=", preview, str],
[None, "--padding=", padding, int],
["-q", "--quiet", quiet, None],
[None, "--voffset=", voffset, int]
]
argv = sys.argv
argc = len(argv)
i = 1
while i < argc:
arg = argv[i]
arg_next = argv[i+1] if i+1 < argc else None
if arg == "--help" or arg == "-h":
print ("This script converts TrueTypeFonts to .dat file for TWRP recovery.\n\n"
"Usage: %s [SWITCHES] [TRUETYPE FILE]\n\n"
" -h, --help - print help\n"
" -o, --output=[FILE] - output file or '-' for stdout (default: font.dat)\n"
" -p, --preview=[FILE] - generate font preview to png file\n"
" --padding=[PIXELS] - horizontal padding around each character (default: 0)\n"
" -q, --quiet - Do not print any output\n"
" -s, --size=[SIZE IN PIXELS] - specify font size in points (default: 20)\n"
" --voffset=[PIXELS] - vertical offset (default: font size*0.25)\n\n"
"Example:\n"
" %s -s 40 -o ComicSans_40.dat -p preview.png ComicSans.ttf\n") % (
sys.argv[0], sys.argv[0]
)
exit(0)
found = False
for p in arg_parser:
if p[0] and arg == p[0] and (arg_next or not p[3]):
if p[3]:
p[2].set(p[3](arg_next))
else:
p[2].set(True)
i += 1
found = True
break
elif p[1] and arg.startswith(p[1]):
if p[3]:
p[2].set(p[3](arg[len(p[1]):]))
else:
p[2].set(True)
found = True
break
if not found:
font_fname.set(arg)
i += 1
if not voffset.get():
voffset.set(int(fontsize.get()*0.25))
if out_fname.get() == "-":
quiet.set(True)
log("Loading font %s...\n" % font_fname.get())
font = ImageFont.truetype(font_fname.get(), fontsize.get(), 0, "utf-32be")
cwidth = 0
cheight = font.getsize('A')[1]
offsets = []
renders = []
data = bytes()
# temp Image and ImageDraw to get access to textsize
res = Image.new('L', (1, 1), 0)
res_draw = ImageDraw.Draw(res)
# Measure each character and render it to separate Image
log("Rendering characters...\n")
for i in range(32, 128):
w, h = res_draw.textsize(chr(i), font)
w += padding.get()*2
offsets.append(cwidth)
cwidth += w
if h > cheight:
cheight = h
ichr = Image.new('L', (w, cheight*2))
ichr_draw = ImageDraw.Draw(ichr)
ichr_draw.text((padding.get(), 0), chr(i), 255, font)
renders.append(ichr)
# Twice the height to account for under-the-baseline characters
cheight *= 2
# Create the result bitmap
log("Creating result bitmap...\n")
res = Image.new('L', (cwidth, cheight), 0)
res_draw = ImageDraw.Draw(res)
# Paste all characters into result bitmap
for i in range(len(renders)):
res.paste(renders[i], (offsets[i], 0))
# uncomment to draw lines separating each character (for debug)
#res_draw.rectangle([offsets[i], 0, offsets[i], cheight], outline="blue")
# crop the blank areas on top and bottom
(_, start_y, _, end_y) = res.getbbox()
res = res.crop((0, start_y, cwidth, end_y))
cheight = (end_y - start_y) + voffset.get()
new_res = Image.new('L', (cwidth, cheight))
new_res.paste(res, (0, voffset.get()))
res = new_res
# save the preview
if preview.get():
log("Saving preview to %s...\n" % preview.get())
res.save(preview.get())
# Pack the data.
# The "data" is a B/W bitmap with all 96 characters next to each other
# on one line. It is as wide as all the characters combined and as
# high as the tallest character, plus padding.
# Each byte contains info about eight pixels, starting from
# highest to lowest bit:
# bits: | 7 6 5 4 3 2 1 0 | 15 14 13 12 11 10 9 8 | ...
# pixels: | 0 1 2 3 4 5 6 7 | 8 9 10 11 12 13 14 15 | ...
log("Packing data...\n")
bit = 0
bit_itr = 0
for c in res.tostring():
# FIXME: How to handle antialiasing?
# if c != '\x00':
# In Python3, c is int, in Python2, c is string. Because of reasons.
try:
fill = (ord(c) >= 127)
except TypeError:
fill = (c >= 127)
if fill:
bit |= (1 << (7-bit_itr))
bit_itr += 1
if bit_itr >= 8:
data += pack("<B", bit)
bit_itr = 0
bit = 0
# Write them to the file.
# Format:
# 000: width
# 004: height
# 008: offsets of each characters (96*uint32)
# 392: data as described above
log("Writing to %s...\n" % out_fname.get())
if out_fname.get() == "-":
write_data(sys.stdout, cwidth, cheight, offsets, data)
else:
with open(out_fname.get(), 'wb') as f:
write_data(f, cwidth, cheight, offsets, data)
exit(0)
| [((3751, 3776), 'PIL.Image.new', 'Image.new', (['"""L"""', '(1, 1)', '(0)'], {}), "('L', (1, 1), 0)\n", (3760, 3776), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((3792, 3811), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['res'], {}), '(res)\n', (3806, 3811), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((4460, 4496), 'PIL.Image.new', 'Image.new', (['"""L"""', '(cwidth, cheight)', '(0)'], {}), "('L', (cwidth, cheight), 0)\n", (4469, 4496), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((4512, 4531), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['res'], {}), '(res)\n', (4526, 4531), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((5013, 5046), 'PIL.Image.new', 'Image.new', (['"""L"""', '(cwidth, cheight)'], {}), "('L', (cwidth, cheight))\n", (5022, 5046), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((820, 842), 'sys.stdout.write', 'sys.stdout.write', (['text'], {}), '(text)\n', (836, 842), False, 'import codecs, os, gzip, ctypes, ctypes.util, sys\n'), ((4130, 4162), 'PIL.Image.new', 'Image.new', (['"""L"""', '(w, cheight * 2)'], {}), "('L', (w, cheight * 2))\n", (4139, 4162), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((4181, 4201), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['ichr'], {}), '(ichr)\n', (4195, 4201), False, 'from PIL import Image, ImageDraw, ImageFont\n')] |
lawrendran/open | open/users/serializers.py | d136f694bafab647722c78be6f39ec79d589f774 | import pytz
from rest_auth.serializers import TokenSerializer
from rest_framework.authtoken.models import Token
from rest_framework.exceptions import ValidationError
from rest_framework.fields import (
CharField,
CurrentUserDefault,
HiddenField,
UUIDField,
ChoiceField,
)
from rest_framework.serializers import ModelSerializer, Serializer
from rest_framework.validators import UniqueValidator
from django.contrib.auth.hashers import check_password
from open.users.models import User
class SimpleUserReadSerializer(ModelSerializer):
class Meta:
model = User
fields = (
"name",
"uuid",
)
class UserReadSerializer(ModelSerializer):
class Meta:
model = User
fields = (
"name",
"uuid",
"signed_up_from",
"date_joined",
"username",
"email",
"created",
"modified",
)
class UserTokenSerializer(TokenSerializer):
user = UserReadSerializer()
class Meta:
model = Token
fields = ["key", "user"]
# TODO - this view and serializer is on hold as you figure out registration (later)
class UserCreateSerializer(ModelSerializer):
username = CharField(validators=[UniqueValidator(queryset=User.objects.all())])
# need to make email optional ... prob should think through signup form a little
email = CharField(
validators=[UniqueValidator(queryset=User.objects.all())], required=False
)
password = CharField(write_only=True, min_length=8)
signed_up_from = CharField(
write_only=True, min_length=8, required=False, default="", trim_whitespace=True
)
timezone_string = ChoiceField(
choices=pytz.all_timezones, required=False, default="US/Eastern"
)
class Meta:
model = User
fields = ["username", "email", "password", "signed_up_from", "timezone_string"]
# TODO test - does this work with just username / no email, etc.
def create(self, validated_data):
username = validated_data.pop("username")
password = validated_data.pop("password")
is_betterself_user = False
if validated_data["signed_up_from"] == "betterself":
is_betterself_user = True
validated_data["is_betterself_user"] = is_betterself_user
user = User.objects.create(username=username, **validated_data)
user.set_password(password)
user.save()
return user
class UserDeleteSerializer(Serializer):
# most of this is actually redundant, i don't need to have a validation step, but i do this
# out of paranoia reasons that someone may delete their account by mistake
password = CharField()
user = HiddenField(default=CurrentUserDefault())
uuid = UUIDField()
def validate(self, data):
user = data["user"]
validated_password = check_password(data["password"], user.password)
if not validated_password:
raise ValidationError("Invalid Password Entered")
validated_uuid = str(user.uuid) == str(data["uuid"])
if not validated_uuid:
raise ValidationError("Invalid UUID", str(user.uuid))
validate_user = user.username != "[email protected]"
if not validate_user:
raise ValidationError(
f"This is a protected user and cannot be deleted. {user.username}"
)
return data
| [((1538, 1578), 'rest_framework.fields.CharField', 'CharField', ([], {'write_only': '(True)', 'min_length': '(8)'}), '(write_only=True, min_length=8)\n', (1547, 1578), False, 'from rest_framework.fields import CharField, CurrentUserDefault, HiddenField, UUIDField, ChoiceField\n'), ((1600, 1694), 'rest_framework.fields.CharField', 'CharField', ([], {'write_only': '(True)', 'min_length': '(8)', 'required': '(False)', 'default': '""""""', 'trim_whitespace': '(True)'}), "(write_only=True, min_length=8, required=False, default='',\n trim_whitespace=True)\n", (1609, 1694), False, 'from rest_framework.fields import CharField, CurrentUserDefault, HiddenField, UUIDField, ChoiceField\n'), ((1727, 1804), 'rest_framework.fields.ChoiceField', 'ChoiceField', ([], {'choices': 'pytz.all_timezones', 'required': '(False)', 'default': '"""US/Eastern"""'}), "(choices=pytz.all_timezones, required=False, default='US/Eastern')\n", (1738, 1804), False, 'from rest_framework.fields import CharField, CurrentUserDefault, HiddenField, UUIDField, ChoiceField\n'), ((2738, 2749), 'rest_framework.fields.CharField', 'CharField', ([], {}), '()\n', (2747, 2749), False, 'from rest_framework.fields import CharField, CurrentUserDefault, HiddenField, UUIDField, ChoiceField\n'), ((2814, 2825), 'rest_framework.fields.UUIDField', 'UUIDField', ([], {}), '()\n', (2823, 2825), False, 'from rest_framework.fields import CharField, CurrentUserDefault, HiddenField, UUIDField, ChoiceField\n'), ((2372, 2428), 'open.users.models.User.objects.create', 'User.objects.create', ([], {'username': 'username'}), '(username=username, **validated_data)\n', (2391, 2428), False, 'from open.users.models import User\n'), ((2914, 2961), 'django.contrib.auth.hashers.check_password', 'check_password', (["data['password']", 'user.password'], {}), "(data['password'], user.password)\n", (2928, 2961), False, 'from django.contrib.auth.hashers import check_password\n'), ((2781, 2801), 'rest_framework.fields.CurrentUserDefault', 'CurrentUserDefault', ([], {}), '()\n', (2799, 2801), False, 'from rest_framework.fields import CharField, CurrentUserDefault, HiddenField, UUIDField, ChoiceField\n'), ((3016, 3059), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['"""Invalid Password Entered"""'], {}), "('Invalid Password Entered')\n", (3031, 3059), False, 'from rest_framework.exceptions import ValidationError\n'), ((3336, 3424), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['f"""This is a protected user and cannot be deleted. {user.username}"""'], {}), "(\n f'This is a protected user and cannot be deleted. {user.username}')\n", (3351, 3424), False, 'from rest_framework.exceptions import ValidationError\n'), ((1305, 1323), 'open.users.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (1321, 1323), False, 'from open.users.models import User\n'), ((1480, 1498), 'open.users.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (1496, 1498), False, 'from open.users.models import User\n')] |
rhasspy/rhasspy-test | tests/en/test_asr.py | 0c180bfdd370f18ad2f8b9ee483ea5520161ab74 | """Automated speech recognition tests."""
import os
import sys
import unittest
from pathlib import Path
import requests
from rhasspyhermes.asr import AsrTextCaptured
from rhasspyhermes.nlu import NluIntent
class AsrEnglishTests(unittest.TestCase):
"""Test automated speech recognition (English)"""
def setUp(self):
self.http_host = os.environ.get("RHASSPY_HTTP_HOST", "localhost")
self.http_port = os.environ.get("RHASSPY_HTTP_PORT", 12101)
self.wav_bytes = Path("wav/en/turn_on_the_living_room_lamp.wav").read_bytes()
def api_url(self, fragment):
return f"http://{self.http_host}:{self.http_port}/api/{fragment}"
def check_status(self, response):
if response.status_code != 200:
print(response.text, file=sys.stderr)
response.raise_for_status()
def test_http_speech_to_text(self):
"""Test speech-to-text HTTP endpoint"""
response = requests.post(self.api_url("speech-to-text"), data=self.wav_bytes)
self.check_status(response)
text = response.content.decode()
self.assertEqual(text, "turn on the living room lamp")
def test_http_speech_to_text_json(self):
"""Text speech-to-text HTTP endpoint (Rhasspy JSON format)"""
response = requests.post(
self.api_url("speech-to-text"),
data=self.wav_bytes,
headers={"Accept": "application/json"},
)
self.check_status(response)
result = response.json()
self.assertEqual(result["text"], "turn on the living room lamp")
def test_http_speech_to_text_hermes(self):
"""Text speech-to-text HTTP endpoint (Hermes format)"""
response = requests.post(
self.api_url("speech-to-text"),
data=self.wav_bytes,
params={"outputFormat": "hermes"},
)
self.check_status(response)
result = response.json()
self.assertEqual(result["type"], "textCaptured")
text_captured = AsrTextCaptured.from_dict(result["value"])
self.assertEqual(text_captured.text, "turn on the living room lamp")
def test_http_speech_to_intent(self):
response = requests.post(self.api_url("speech-to-intent"), data=self.wav_bytes)
self.check_status(response)
result = response.json()
self.assertEqual(result["intent"]["name"], "ChangeLightState")
self.assertEqual(result["text"], "turn on the living room lamp")
self.assertEqual(result["slots"]["name"], "living room lamp")
self.assertEqual(result["slots"]["state"], "on")
def test_http_speech_to_intent_hermes(self):
response = requests.post(
self.api_url("speech-to-intent"),
data=self.wav_bytes,
params={"outputFormat": "hermes"},
)
self.check_status(response)
result = response.json()
self.assertEqual(result["type"], "intent")
nlu_intent = NluIntent.from_dict(result["value"])
self.assertEqual(nlu_intent.raw_input, "turn on the living room lamp")
self.assertEqual(nlu_intent.input, "turn on the living room lamp")
# Intent name and slots
self.assertEqual(nlu_intent.intent.intent_name, "ChangeLightState")
slots_by_name = {slot.slot_name: slot for slot in nlu_intent.slots}
self.assertIn("name", slots_by_name)
self.assertEqual(slots_by_name["name"].value["value"], "living room lamp")
self.assertIn("state", slots_by_name)
self.assertEqual(slots_by_name["state"].value["value"], "on")
| [((353, 401), 'os.environ.get', 'os.environ.get', (['"""RHASSPY_HTTP_HOST"""', '"""localhost"""'], {}), "('RHASSPY_HTTP_HOST', 'localhost')\n", (367, 401), False, 'import os\n'), ((427, 469), 'os.environ.get', 'os.environ.get', (['"""RHASSPY_HTTP_PORT"""', '(12101)'], {}), "('RHASSPY_HTTP_PORT', 12101)\n", (441, 469), False, 'import os\n'), ((2010, 2052), 'rhasspyhermes.asr.AsrTextCaptured.from_dict', 'AsrTextCaptured.from_dict', (["result['value']"], {}), "(result['value'])\n", (2035, 2052), False, 'from rhasspyhermes.asr import AsrTextCaptured\n'), ((2966, 3002), 'rhasspyhermes.nlu.NluIntent.from_dict', 'NluIntent.from_dict', (["result['value']"], {}), "(result['value'])\n", (2985, 3002), False, 'from rhasspyhermes.nlu import NluIntent\n'), ((495, 542), 'pathlib.Path', 'Path', (['"""wav/en/turn_on_the_living_room_lamp.wav"""'], {}), "('wav/en/turn_on_the_living_room_lamp.wav')\n", (499, 542), False, 'from pathlib import Path\n')] |
OthmaneJ/deep-tts | speech/melgan/model/multiscale.py | 93059d568c5b458d3f0d80eb294d397ecace8731 | import torch
import torch.nn as nn
import torch.nn.functional as F
from .discriminator import Discriminator
from .identity import Identity
class MultiScaleDiscriminator(nn.Module):
def __init__(self):
super(MultiScaleDiscriminator, self).__init__()
self.discriminators = nn.ModuleList(
[Discriminator() for _ in range(3)]
)
self.pooling = nn.ModuleList(
[Identity()] +
[nn.AvgPool1d(kernel_size=4, stride=2, padding=2) for _ in range(1, 3)]
)
def forward(self, x):
ret = list()
for pool, disc in zip(self.pooling, self.discriminators):
x = pool(x)
ret.append(disc(x))
return ret # [(feat, score), (feat, score), (feat, score)]
| [((455, 503), 'torch.nn.AvgPool1d', 'nn.AvgPool1d', ([], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(2)'}), '(kernel_size=4, stride=2, padding=2)\n', (467, 503), True, 'import torch.nn as nn\n')] |
AntonioLourencos/jogo-da-velha | main.py | 3b3e46e2d2f8c064f0df6a383bc5a0fe6bb01f63 | from game import about_button, start_button, play_sound, center_pos
import pygame
WHITE = (255,255,255)
BLACK = (0,0,0)
GREEN = (0, 255, 0)
pygame.init()
pygame.font.init()
pygame.mixer.init()
FONT = pygame.font.Font("assets/font.ttf", 70)
FONT_MIN = pygame.font.Font("assets/font.ttf", 30)
window = pygame.display.set_mode([600,600])
running = True
clock = pygame.time.Clock()
nickname = " "
me = "X"
ia = "O"
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
play_sound("minimize_001")
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE and len(nickname) > 2:
nickname = list(nickname)
nickname.pop(-2)
nickname = "".join(nickname)
play_sound("error_001")
elif len(nickname.strip()) <= 10:
play_sound("bong_001")
if len(nickname) > 1:
nickname = list(nickname)
nickname.pop(-1)
nickname = "".join(nickname)
nickname += event.unicode
nickname += " "
if event.key == pygame.K_UP or event.key == pygame.K_DOWN:
if me == "X":
me = "O"
ia = "X"
else:
me = "X"
ia = "O"
window.fill(BLACK)
title = FONT.render("JOGO DA VELHA", True, WHITE)
title_pos = center_pos(title.get_rect(), 10)
window.blit(title, title_pos)
nickname_label = FONT.render("SEU NOME", True, WHITE)
nickname_label_pos = center_pos(nickname_label.get_rect(), 100)
window.blit(nickname_label, nickname_label_pos)
nickname_render = FONT.render(nickname, True, BLACK)
nickname_rect = nickname_render.get_rect()
nickname_pos = center_pos(nickname_rect, 180)
pygame.draw.rect(window, WHITE, (nickname_pos[0], 180, nickname_rect[2], nickname_rect[3]))
window.blit(nickname_render, nickname_pos)
choice_render = FONT.render(f"JOGUE COM {me}", True, WHITE)
window.blit(choice_render, center_pos(choice_render.get_rect(), 280))
my_name = FONT_MIN.render(f"DESENVOLVIDO POR MARIA EDUARDA DE AZEVEDO", True, WHITE)
window.blit(my_name, center_pos(my_name.get_rect(), 560))
start_button(window, "JOGAR", 380, me, ia, nickname.strip(), 10)
about_button(window, 450, 10)
pygame.display.flip()
clock.tick(60) | [((142, 155), 'pygame.init', 'pygame.init', ([], {}), '()\n', (153, 155), False, 'import pygame\n'), ((156, 174), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (172, 174), False, 'import pygame\n'), ((175, 194), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (192, 194), False, 'import pygame\n'), ((203, 242), 'pygame.font.Font', 'pygame.font.Font', (['"""assets/font.ttf"""', '(70)'], {}), "('assets/font.ttf', 70)\n", (219, 242), False, 'import pygame\n'), ((254, 293), 'pygame.font.Font', 'pygame.font.Font', (['"""assets/font.ttf"""', '(30)'], {}), "('assets/font.ttf', 30)\n", (270, 293), False, 'import pygame\n'), ((304, 339), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[600, 600]'], {}), '([600, 600])\n', (327, 339), False, 'import pygame\n'), ((363, 382), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (380, 382), False, 'import pygame\n'), ((451, 469), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (467, 469), False, 'import pygame\n'), ((1902, 1932), 'game.center_pos', 'center_pos', (['nickname_rect', '(180)'], {}), '(nickname_rect, 180)\n', (1912, 1932), False, 'from game import about_button, start_button, play_sound, center_pos\n'), ((1937, 2032), 'pygame.draw.rect', 'pygame.draw.rect', (['window', 'WHITE', '(nickname_pos[0], 180, nickname_rect[2], nickname_rect[3])'], {}), '(window, WHITE, (nickname_pos[0], 180, nickname_rect[2],\n nickname_rect[3]))\n', (1953, 2032), False, 'import pygame\n'), ((2441, 2470), 'game.about_button', 'about_button', (['window', '(450)', '(10)'], {}), '(window, 450, 10)\n', (2453, 2470), False, 'from game import about_button, start_button, play_sound, center_pos\n'), ((2476, 2497), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (2495, 2497), False, 'import pygame\n'), ((549, 575), 'game.play_sound', 'play_sound', (['"""minimize_001"""'], {}), "('minimize_001')\n", (559, 575), False, 'from game import about_button, start_button, play_sound, center_pos\n'), ((832, 855), 'game.play_sound', 'play_sound', (['"""error_001"""'], {}), "('error_001')\n", (842, 855), False, 'from game import about_button, start_button, play_sound, center_pos\n'), ((918, 940), 'game.play_sound', 'play_sound', (['"""bong_001"""'], {}), "('bong_001')\n", (928, 940), False, 'from game import about_button, start_button, play_sound, center_pos\n')] |
1donggri/teamProject | schedule/views.py | 9b4f37c2a93b065529ce9dd245f9717a783dd456 | from django.shortcuts import render, redirect
from .models import Post
from .forms import ScheduleForm
from django.core.paginator import Paginator
# Create your views here.
def view_schedule(request):
all_posts = Post.objects.all().order_by('pub_date')
page = int(request.GET.get('p', 1))
pagenator = Paginator(all_posts, 5)
posts = pagenator.get_page(page)
return render(request, 'schedule/view_schedule.html', {'posts': posts})
def write_schedule(request):
if request.method == "POST":
form = ScheduleForm(request.POST)
if form.is_valid():
# form의 모든 validators 호출 유효성 검증 수행
# user_id = request.session.get('user')
# user = User.objects.get(pk=user_id)
schedule = Post()
schedule.title = form.cleaned_data['title']
# # 검증에 성공한 값들은 사전타입으로 제공 (form.cleaned_data)
# # 검증에 실패시 form.error 에 오류 정보를 저장
schedule.username = form.cleaned_data['username']
schedule.pub_date = form.cleaned_data['pub_date']
schedule.save()
return redirect('schedule:view_schedule')
else:
form = ScheduleForm()
return render(request, 'schedule/write_schedule.html', {'form': form})
def delete(request, posts_id):
post = Post.objects.get(id=posts_id)
post.delete()
posts = Post.objects.all().order_by('-id')
return render(request, 'schedule/view_schedule.html', {'posts': posts}) | [((314, 337), 'django.core.paginator.Paginator', 'Paginator', (['all_posts', '(5)'], {}), '(all_posts, 5)\n', (323, 337), False, 'from django.core.paginator import Paginator\n'), ((386, 450), 'django.shortcuts.render', 'render', (['request', '"""schedule/view_schedule.html"""', "{'posts': posts}"], {}), "(request, 'schedule/view_schedule.html', {'posts': posts})\n", (392, 450), False, 'from django.shortcuts import render, redirect\n'), ((1186, 1249), 'django.shortcuts.render', 'render', (['request', '"""schedule/write_schedule.html"""', "{'form': form}"], {}), "(request, 'schedule/write_schedule.html', {'form': form})\n", (1192, 1249), False, 'from django.shortcuts import render, redirect\n'), ((1399, 1463), 'django.shortcuts.render', 'render', (['request', '"""schedule/view_schedule.html"""', "{'posts': posts}"], {}), "(request, 'schedule/view_schedule.html', {'posts': posts})\n", (1405, 1463), False, 'from django.shortcuts import render, redirect\n'), ((1098, 1132), 'django.shortcuts.redirect', 'redirect', (['"""schedule:view_schedule"""'], {}), "('schedule:view_schedule')\n", (1106, 1132), False, 'from django.shortcuts import render, redirect\n')] |
kingsdigitallab/archetype-django | archetype/settings/local_stg.py | 6315c8f38e873e2d3b2d99fcfd47d01ce0ae35bc | from .base import * # noqa
CACHE_REDIS_DATABASE = '1'
CACHES['default']['LOCATION'] = '127.0.0.1:6379:' + CACHE_REDIS_DATABASE
INTERNAL_IPS = INTERNAL_IPS + ('', )
ALLOWED_HOSTS = ['']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'app_archetype_stg',
'USER': 'app_archetype',
'PASSWORD': '',
'HOST': ''
},
}
| [] |
vnaskos/Website | website/sites/admin.py | 1c2adb0985f3932ddeca12025a2d216d2470cb63 | from django.contrib import admin
# Register your models here.]
from website.sites.models import Post
@admin.register(Post)
class TestAdmin2(admin.ModelAdmin):
pass | [((108, 128), 'django.contrib.admin.register', 'admin.register', (['Post'], {}), '(Post)\n', (122, 128), False, 'from django.contrib import admin\n')] |
korbi98/TicTacToeGo_Zero | mcts.py | b8ea4562f3ddf914a53fc380f2266f13ab887e04 | # Monte Carlo tree search for TicTacToe
import numpy as np
from tictactoe import Tictactoe
import copy
from random import choice
from tree import Node
import time
class MCTS:
'''
Class defining a simple monte carlo tree search algorithm.
Attributes:
- game: instance of TicTacToe game
- current_player: player to perform next move
- number_of_rollouts: number of simulations for generating one move
- tree: list containing all possible and impossible (taken) leaf nodes
'''
def __init__(self, game, number_of_rollouts):
self.game = game
self.current_player = game.move_number%2 + 1
print(self.current_player)
self.tree = Node(None, -1, 3 - self.current_player) # Root node of tree
self.number_of_rollouts = number_of_rollouts
print("Initial game state:\n",self.game.board)
def perform_search(self):
'''Perfoming the mcts by performing the specified number of
simulations and updating the corresponding leaf node.
leaf node is choosen by traverse_tree function
'''
start_time = time.clock()
for i in range(self.number_of_rollouts):
simulated_game = copy.deepcopy(self.game)
# Traverse to leaf
leaf = self.traverse_tree(simulated_game)
# Random simulation for leaf
result = self.rollout(simulated_game)
# Update all visited nodes
self.update_tree(result, leaf)
end_time = time.clock()
print("\nFirst layer:")
for child in self.tree.children:
child.print(self.tree)
second_layer = max(self.tree.children, key= lambda x: x.visits)
print("\nSecond layer:")
for child in second_layer.children:
child.print(self.tree)
print("\nSearch took:", round(end_time-start_time, 4), "seconds")
result = [0 for i in range(self.game.size**2)]
for child in self.tree.children:
result[child.boardposition] = child.visits
return result
def traverse_tree(self, simulated_game):
'''Choose next leaf for performing rollout. When node is fully
expanded, child with highest UCT is choosen. If not a
random unexplored node is choosen.
'''
current_node = self.tree #root
while current_node.isExpanded():
current_node = current_node.UTC_traverse(self.tree)
x,y = simulated_game.get_coords(current_node.boardposition)
simulated_game.setField(x,y)
# create children if empty
if not current_node.children:
current_node.getPossibleChildren(simulated_game.board)
# terminate if board is full
if not simulated_game.move_number < simulated_game.size**2 or simulated_game.checkboard():
return current_node
x,y = simulated_game.get_coords(current_node.boardposition)
simulated_game.setField(x,y)
# Choose random unexplored leaf
unexplored_leafs = list(filter(lambda x: x.visits == 0, current_node.children))
return choice(unexplored_leafs)
def rollout(self, simulated_game):
'''perform random play for choosen leaf node till terminal
state is reached'''
while (not simulated_game.checkboard()) and simulated_game.move_number < simulated_game.size**2:
simulated_game.perform_random_move()
res = simulated_game.checkboard()
print("Finished simulation player", res, "won. Terminal state is:")
simulated_game.printBoard()
return res
def update_tree(self, result, leaf):
'''update all visited nodes in tree'''
self.tree.visits += 1
current_node = leaf
while current_node.parent:
#current_node.print(self.tree)
current_node.update(result)
current_node = current_node.parent
| [((708, 747), 'tree.Node', 'Node', (['None', '(-1)', '(3 - self.current_player)'], {}), '(None, -1, 3 - self.current_player)\n', (712, 747), False, 'from tree import Node\n'), ((1135, 1147), 'time.clock', 'time.clock', ([], {}), '()\n', (1145, 1147), False, 'import time\n'), ((1543, 1555), 'time.clock', 'time.clock', ([], {}), '()\n', (1553, 1555), False, 'import time\n'), ((3183, 3207), 'random.choice', 'choice', (['unexplored_leafs'], {}), '(unexplored_leafs)\n', (3189, 3207), False, 'from random import choice\n'), ((1226, 1250), 'copy.deepcopy', 'copy.deepcopy', (['self.game'], {}), '(self.game)\n', (1239, 1250), False, 'import copy\n')] |
pirovc/grimer | grimer/metadata.py | 169f8d3009004d6d2f4ca4d3e7dfec819078cb34 | import pandas as pd
from pandas.api.types import is_numeric_dtype
from grimer.utils import print_log
class Metadata:
valid_types = ["categorical", "numeric"]
default_type = "categorical"
def __init__(self, metadata_file, samples: list=[]):
# Read metadata and let pandas guess dtypes, index as str
self.data = pd.read_table(metadata_file, sep='\t', header=0, skiprows=0, index_col=0, dtype={0:str})
# Enforce string index
self.data.index = self.data.index.astype('str')
# Define all COLUMN TYPES as default
self.types = pd.Series(self.default_type, index=self.data.columns)
# Set types
if str(self.data.index[0]).startswith("#"):
# types defined on file
self.set_hard_types()
else:
# guessed types from read_table
self.types[self.data.dtypes.map(is_numeric_dtype)] = "numeric"
# Convert datatypes to adequate numeric values (int, float)
self.data = self.data.convert_dtypes(infer_objects=False, convert_string=False)
# Re-convert everython to object to standardize (int64 NA is not seriazable on bokeh)
self.data = self.data.astype("object")
# Remove empty fields
null_cols = self.data.isna().all(axis=0)
if any(null_cols):
self.data = self.data.loc[:, ~null_cols]
self.types = self.types[~null_cols]
print_log(str(sum(null_cols)) + " fields removed without valid values")
# Convert NaN on categorical to ""
self.data[self.types[self.types == "categorical"].index] = self.data[self.types[self.types == "categorical"].index].fillna('')
# Remove names
self.data.index.names = [None]
self.types.name = None
# sort and filter by given samples
if samples:
self.data = self.data.reindex(samples)
# Check if matched metadata and samples
null_rows = self.data.isna().all(axis=1)
if any(null_rows):
#self.data = self.data.loc[~null_rows, :]
print_log(str(sum(null_rows)) + " samples without valid metadata")
def __repr__(self):
args = ['{}={}'.format(k, repr(v)) for (k, v) in vars(self).items()]
return 'Metadata({})'.format(', '.join(args))
def set_hard_types(self):
# Get values defined on the first row
self.types = self.data.iloc[0]
# Drop row with types from main data
self.data.drop(self.types.name, inplace=True)
# Validate declared types
idx_valid = self.types.isin(self.valid_types)
if not idx_valid.all():
print_log("Invalid metadata types replaced by: " + self.default_type)
self.types[~idx_valid] = self.default_type
# Enforce column type on dataframe
self.data[self.types[self.types == "categorical"].index] = self.data[self.types[self.types == "categorical"].index].astype(str)
self.data[self.types[self.types == "numeric"].index] = self.data[self.types[self.types == "numeric"].index].apply(pd.to_numeric)
def get_col_headers(self):
return self.data.columns
def get_data(self, metadata_type: str=None):
if metadata_type is not None:
return self.data[self.types[self.types == metadata_type].index]
else:
return self.data
def get_col(self, col):
return self.data[col]
def get_unique_values(self, col):
return sorted(self.get_col(col).dropna().unique())
def get_formatted_unique_values(self, col):
if self.types[col] == "categorical":
return self.get_unique_values(col)
else:
return list(map('{:.16g}'.format, self.get_unique_values(col)))
def get_type(self, col):
return self.types[col]
def get_subset(self, column, value):
return self.data[self.data[column] == value]
| [((341, 436), 'pandas.read_table', 'pd.read_table', (['metadata_file'], {'sep': '"""\t"""', 'header': '(0)', 'skiprows': '(0)', 'index_col': '(0)', 'dtype': '{(0): str}'}), "(metadata_file, sep='\\t', header=0, skiprows=0, index_col=0,\n dtype={(0): str})\n", (354, 436), True, 'import pandas as pd\n'), ((585, 638), 'pandas.Series', 'pd.Series', (['self.default_type'], {'index': 'self.data.columns'}), '(self.default_type, index=self.data.columns)\n', (594, 638), True, 'import pandas as pd\n'), ((2654, 2723), 'grimer.utils.print_log', 'print_log', (["('Invalid metadata types replaced by: ' + self.default_type)"], {}), "('Invalid metadata types replaced by: ' + self.default_type)\n", (2663, 2723), False, 'from grimer.utils import print_log\n')] |
MSLars/allennlp | allennlp/training/metric_tracker.py | 2cdb8742c8c8c3c38ace4bdfadbdc750a1aa2475 | from typing import Optional, Dict, Any, List, Union
from allennlp.common.checks import ConfigurationError
class MetricTracker:
"""
This class tracks a metric during training for the dual purposes of early stopping
and for knowing whether the current value is the best so far. It mimics the PyTorch
`state_dict` / `load_state_dict` interface, so that it can be checkpointed along with
your model and optimizer.
Some metrics improve by increasing; others by decreasing. You can provide a
`metric_name` that starts with "+" to indicate an increasing metric, or "-"
to indicate a decreasing metric.
# Parameters
metric_name : `Union[str, List[str]]`
Specifies the metric or metrics to track. Metric names have to start with
"+" for increasing metrics or "-" for decreasing ones. If you specify more
than one, it tracks the sum of the increasing metrics metrics minus the sum
of the decreasing metrics.
patience : `int`, optional (default = `None`)
If provided, then `should_stop_early()` returns True if we go this
many epochs without seeing a new best value.
"""
def __init__(
self,
metric_name: Union[str, List[str]],
patience: Optional[int] = None,
) -> None:
self._patience = patience
self._best_so_far: Optional[float] = None
self._epochs_with_no_improvement = 0
self._is_best_so_far = True
self._epoch_number = 0
self.best_epoch: Optional[int] = None
self.best_epoch_metrics: Dict[str, float] = {}
if isinstance(metric_name, str):
metric_name = [metric_name]
self.tracked_metrics = []
for name in metric_name:
if name.startswith("+"):
self.tracked_metrics.append((1.0, name[1:]))
elif name.startswith("-"):
self.tracked_metrics.append((-1.0, name[1:]))
else:
raise ConfigurationError("metric_name must start with + or -")
def clear(self) -> None:
"""
Clears out the tracked metrics, but keeps the patience
"""
self._best_so_far = None
self._epochs_with_no_improvement = 0
self._is_best_so_far = True
self._epoch_number = 0
self.best_epoch = None
self.best_epoch_metrics.clear()
def state_dict(self) -> Dict[str, Any]:
"""
A `Trainer` can use this to serialize the state of the metric tracker.
"""
return {
"best_so_far": self._best_so_far,
"epochs_with_no_improvement": self._epochs_with_no_improvement,
"is_best_so_far": self._is_best_so_far,
"epoch_number": self._epoch_number,
"best_epoch": self.best_epoch,
"best_epoch_metrics": self.best_epoch_metrics,
}
def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
"""
A `Trainer` can use this to hydrate a metric tracker from a serialized state.
"""
self._best_so_far = state_dict["best_so_far"]
self._epochs_with_no_improvement = state_dict["epochs_with_no_improvement"]
self._is_best_so_far = state_dict["is_best_so_far"]
self._epoch_number = state_dict["epoch_number"]
self.best_epoch = state_dict["best_epoch"]
# Even though we don't promise backwards compatibility for the --recover flag,
# it's particularly easy and harmless to provide it here, so we do it.
self.best_epoch_metrics = state_dict.get("best_epoch_metrics", {})
def add_metrics(self, metrics: Dict[str, float]) -> None:
"""
Record a new value of the metric and update the various things that depend on it.
"""
combined_score = self.combined_score(metrics)
new_best = (self._best_so_far is None) or (combined_score > self._best_so_far)
if new_best:
self._best_so_far = combined_score
self._epochs_with_no_improvement = 0
self._is_best_so_far = True
self.best_epoch = self._epoch_number
else:
self._epochs_with_no_improvement += 1
self._is_best_so_far = False
self._epoch_number += 1
def is_best_so_far(self) -> bool:
"""
Returns true if the most recent value of the metric is the best so far.
"""
return self._is_best_so_far
def should_stop_early(self) -> bool:
"""
Returns true if improvement has stopped for long enough.
"""
if self._patience is None:
return False
else:
return self._epochs_with_no_improvement >= self._patience
def combined_score(self, metrics: Dict[str, float]) -> float:
try:
return sum(
factor * metrics[metric_name] for factor, metric_name in self.tracked_metrics
)
except KeyError as e:
raise ConfigurationError(
f"You configured the trainer to use the {e.args[0]} "
"metric for early stopping, but the model did not produce that metric."
)
| [((4970, 5122), 'allennlp.common.checks.ConfigurationError', 'ConfigurationError', (['f"""You configured the trainer to use the {e.args[0]} metric for early stopping, but the model did not produce that metric."""'], {}), "(\n f'You configured the trainer to use the {e.args[0]} metric for early stopping, but the model did not produce that metric.'\n )\n", (4988, 5122), False, 'from allennlp.common.checks import ConfigurationError\n'), ((1979, 2035), 'allennlp.common.checks.ConfigurationError', 'ConfigurationError', (['"""metric_name must start with + or -"""'], {}), "('metric_name must start with + or -')\n", (1997, 2035), False, 'from allennlp.common.checks import ConfigurationError\n')] |
MuhweziDeo/Ah-backend-xmen | authors/apps/profiles/renderers.py | 60c830977fa39a7eea9ab978a9ba0c3beb0c4d88 | from authors.apps.utils.renderers import AppJSONRenderer
import json
from rest_framework.renderers import JSONRenderer
class UserProfileJSONRenderer(AppJSONRenderer):
name = 'profile'
class UserProfileListRenderer(JSONRenderer):
"""
Returns profiles of existing users
"""
charset = 'utf-8'
def render(self, data, media_type=None, renderer_context=None):
""" present a list of
user profiles in json format
"""
return json.dumps({
'profiles':data
})
class ReadStatsJsonRenderer(AppJSONRenderer):
name = 'read_stats'
| [((482, 512), 'json.dumps', 'json.dumps', (["{'profiles': data}"], {}), "({'profiles': data})\n", (492, 512), False, 'import json\n')] |
bantenz/NetworkConfigParser | json_analyzer.py | e1aa8385540823340e8278c7d7af0201399efd8f | import json
from deepdiff import DeepDiff
import pprint
def get_json(file_name):
with open(file_name) as json_file:
json_data = json.load(json_file)
return json_data
def compare_json(Hostname, Command, Data1, Data2):
if (Data1 == Data2):
print ("%s - %s output is same" % (Hostname, Command))
else:
print ("%s - %s output is different" % (Hostname, Command))
pprint.pprint(DeepDiff(Data1, Data2))
def main():
Hostname = raw_input('Input Hostname of the device : ').lower()
Command = raw_input('Input Command : ').lower()
Filename1 = raw_input('Input First JSON File : ').lower()
Filename2 = raw_input('Input Second JSON File : ').lower()
Data1 = get_json(Filename1)
Data2 = get_json(Filename2)
compare_json(Hostname, Command, Data1, Data2)
if __name__ == "__main__":
# If this Python file runs by itself, run below command. If imported, this section is not run
main()
| [((141, 161), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (150, 161), False, 'import json\n'), ((427, 449), 'deepdiff.DeepDiff', 'DeepDiff', (['Data1', 'Data2'], {}), '(Data1, Data2)\n', (435, 449), False, 'from deepdiff import DeepDiff\n')] |
telefonicaid/fiware-glancesync | fiwareglancesync/sync.py | 5ad0c80e12b9384473f31bf336015c75cf02a2a2 | #!/usr/bin/env python
# -- encoding: utf-8 --
#
# Copyright 2015-2016 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with [email protected]
#
import sys
import StringIO
import os
import os.path
import datetime
import argparse
import logging
from fiwareglancesync.glancesync import GlanceSync
class Sync(object):
def __init__(self, regions, override_d=None):
"""init object"""
GlanceSync.init_logs()
self.glancesync = GlanceSync(options_dict=override_d)
regions_expanded = list()
already_sorted = True
for region in regions:
if region.endswith(':'):
regions_expanded.extend(self.glancesync.get_regions(
target=region[:-1]))
already_sorted = False
else:
regions_expanded.append(region)
regions = regions_expanded
if not regions:
regions = self.glancesync.get_regions()
already_sorted = False
if not already_sorted:
regions_unsorted = regions
regions = list()
for region in self.glancesync.preferable_order:
if region in regions_unsorted:
regions.append(region)
regions_unsorted.remove(region)
regions.extend(regions_unsorted)
self.regions = regions
def report_status(self):
"""Report the synchronisation status of the regions"""
for region in self.regions:
try:
stream = StringIO.StringIO()
self.glancesync.export_sync_region_status(region, stream)
print(stream.getvalue())
except Exception:
# Don't do anything. Message has been already printed
# try next region
continue
def parallel_sync(self):
"""Run the synchronisation in several regions in parallel. The
synchronisation inside the region is sequential (i.e. several
regions are synchronised simultaneously, but only one image at time
is uploaded for each region)"""
max_children = self.glancesync.max_children
now = datetime.datetime.now()
datestr = str(now.year) + str(now.month).zfill(2) + \
str(now.day).zfill(2) + '_' + str(now.hour).zfill(2) +\
str(now.minute).zfill(2)
msg = '======Master is ' + self.glancesync.master_region
print(msg)
sys.stdout.flush()
os.mkdir('sync_' + datestr)
children = dict()
for region in self.regions:
try:
if len(children) >= max_children:
self._wait_child(children)
pid = os.fork()
if pid > 0:
children[pid] = region
continue
else:
path = os.path.join('sync_' + datestr, region + '.txt')
handler = logging.FileHandler(path)
handler.setFormatter(logging.Formatter('%(message)s'))
logger = self.glancesync.log
# Remove old handlers
for h in logger.handlers:
logger.removeHandler(h)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
logger.propagate = 0
self.glancesync.sync_region(region)
# After a fork, os_exit() and not sys.exit() must be used.
os._exit(0)
except Exception:
raise
sys.stderr.flush()
sys.exit(-1)
while len(children) > 0:
self._wait_child(children)
print('All is done.')
def sequential_sync(self, dry_run=False):
"""Run the synchronisation sequentially (that is, do not start the
synchronisation to a region before the previous one was completed or
failed
:param dry_run: if true, do not synchronise images actually
"""
msg = '======Master is ' + self.glancesync.master_region
print(msg)
for region in self.regions:
try:
msg = "======" + region
print(msg)
sys.stdout.flush()
self.glancesync.sync_region(region, dry_run=dry_run)
except Exception:
# Don't do anything. Message has been already printed
# try next region
continue
def _wait_child(self, children):
""" Wait until one of the regions ends its synchronisation and then
print the result
:param children:
:return: a dictionary or regions, indexed by the pid of the process
"""
finish_direct_child = False
while not finish_direct_child:
(pid, status) = os.wait()
if pid not in children:
continue
else:
finish_direct_child = True
if status == 0:
msg = 'Region {0} has finished'.format(children[pid])
print(msg)
else:
msg = 'Region {0} has finished with errors'
print(msg.format(children[pid]))
del children[pid]
sys.stdout.flush()
def show_regions(self):
"""print a full list of the regions available (excluding the
master region) in all the targets defined in the configuration file"""
regions = self.glancesync.get_regions()
for target in self.glancesync.targets.keys():
if target == 'facade' or target == 'master':
continue
regions.extend(self.glancesync.get_regions(target=target))
print(' '.join(regions))
def make_backup(self):
"""make a backup of the metadata in the regions specified at the
constructor (in addition to the master region). The backup is created
in a directory named 'backup_glance_' with the date and time as suffix
There is a file for each region (the name is backup_<region>.csv) and
inside the file a line for each image.
Only the information about public images/ the images owned by
the tenant, can be obtained, regardless if the user is an admin. This
is a limitation of the glance API"""
now = datetime.datetime.now().isoformat()
directory = 'backup_glance_' + now
os.mkdir(directory)
regions = set(self.regions)
regions.add(self.glancesync.master_region)
for region in regions:
try:
self.glancesync.backup_glancemetadata_region(region, directory)
except Exception:
# do nothing. Already logged.
continue
if __name__ == '__main__':
# Parse cmdline
description = 'A tool to sync images from a master region to other '\
'regions'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('regions', metavar='region', type=str, nargs='*',
help='region where the images are uploaded to')
parser.add_argument('--parallel', action='store_true',
help='sync several regions in parallel')
parser.add_argument(
'--config', nargs='+', help='override configuration options. (e.g. ' +
"main.master_region=Valladolid metadata_condition='image.name=name1')")
group = parser.add_mutually_exclusive_group()
group.add_argument('--dry-run', action='store_true',
help='do not upload actually the images')
group.add_argument('--show-status', action='store_true',
help='do not sync, but show the synchronisation status')
group.add_argument('--show-regions', action='store_true',
help='don not sync, only show the available regions')
group.add_argument(
'--make-backup', action='store_true',
help="do no sync, make a backup of the regions' metadata")
meta = parser.parse_args()
options = dict()
if meta.config:
for option in meta.config:
pair = option.split('=')
if len(pair) != 2:
parser.error('config options must have the format key=value')
sys.exit(-1)
options[pair[0].strip()] = pair[1]
# Run cmd
sync = Sync(meta.regions, options)
if meta.show_status:
sync.report_status()
elif meta.parallel:
sync.parallel_sync()
elif meta.show_regions:
sync.show_regions()
elif meta.make_backup:
sync.make_backup()
else:
sync.sequential_sync(meta.dry_run)
| [((7681, 7729), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'description'}), '(description=description)\n', (7704, 7729), False, 'import argparse\n'), ((1071, 1093), 'fiwareglancesync.glancesync.GlanceSync.init_logs', 'GlanceSync.init_logs', ([], {}), '()\n', (1091, 1093), False, 'from fiwareglancesync.glancesync import GlanceSync\n'), ((1120, 1155), 'fiwareglancesync.glancesync.GlanceSync', 'GlanceSync', ([], {'options_dict': 'override_d'}), '(options_dict=override_d)\n', (1130, 1155), False, 'from fiwareglancesync.glancesync import GlanceSync\n'), ((2848, 2871), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2869, 2871), False, 'import datetime\n'), ((3132, 3150), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3148, 3150), False, 'import sys\n'), ((3159, 3186), 'os.mkdir', 'os.mkdir', (["('sync_' + datestr)"], {}), "('sync_' + datestr)\n", (3167, 3186), False, 'import os\n'), ((7180, 7199), 'os.mkdir', 'os.mkdir', (['directory'], {}), '(directory)\n', (7188, 7199), False, 'import os\n'), ((5556, 5565), 'os.wait', 'os.wait', ([], {}), '()\n', (5563, 5565), False, 'import os\n'), ((2201, 2220), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (2218, 2220), False, 'import StringIO\n'), ((3387, 3396), 'os.fork', 'os.fork', ([], {}), '()\n', (3394, 3396), False, 'import os\n'), ((4953, 4971), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4969, 4971), False, 'import sys\n'), ((6014, 6032), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (6030, 6032), False, 'import sys\n'), ((7093, 7116), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (7114, 7116), False, 'import datetime\n'), ((9052, 9064), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (9060, 9064), False, 'import sys\n'), ((3546, 3594), 'os.path.join', 'os.path.join', (["('sync_' + datestr)", "(region + '.txt')"], {}), "('sync_' + datestr, region + '.txt')\n", (3558, 3594), False, 'import os\n'), ((3625, 3650), 'logging.FileHandler', 'logging.FileHandler', (['path'], {}), '(path)\n', (3644, 3650), False, 'import logging\n'), ((4207, 4218), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (4215, 4218), False, 'import os\n'), ((4287, 4305), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (4303, 4305), False, 'import sys\n'), ((4322, 4334), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (4330, 4334), False, 'import sys\n'), ((3692, 3724), 'logging.Formatter', 'logging.Formatter', (['"""%(message)s"""'], {}), "('%(message)s')\n", (3709, 3724), False, 'import logging\n')] |
Pandinosaurus/models-intelai | models/object_detection/pytorch/ssd-resnet34/training/cpu/mlperf_logger.py | 60f5712d79a363bdb7624e3116a66a4f1a7fe208 | ### This file is originally from: [mlcommons repo](https://github.com/mlcommons/training/tree/9947bdf21ee3f2488fa4b362eec2ce7deb2ec4dd/single_stage_detector/ssd/mlperf_logger.py)
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import os
from mlperf_logging import mllog
from mlperf_logging.mllog import constants as mllog_const
mllogger = mllog.get_mllogger()
mllog.config(
filename=(os.getenv("COMPLIANCE_FILE") or "mlperf_compliance.log"),
root_dir=os.path.normpath(os.path.dirname(os.path.realpath(__file__))))
def ssd_print(*args, sync=True, **kwargs):
use_cuda = os.getenv('USE_CUDA')
if sync and use_cuda=='True':
barrier()
if get_rank() == 0:
kwargs['stack_offset'] = 2
mllogger.event(*args, **kwargs)
def barrier():
"""
Works as a temporary distributed barrier, currently pytorch
doesn't implement barrier for NCCL backend.
Calls all_reduce on dummy tensor and synchronizes with GPU.
"""
if torch.distributed.is_initialized():
torch.distributed.all_reduce(torch.cuda.FloatTensor(1))
torch.cuda.synchronize()
def get_rank():
"""
Gets distributed rank or returns zero if distributed is not initialized.
"""
if torch.distributed.is_initialized():
rank = torch.distributed.get_rank()
else:
rank = os.getenv('RANK', os.getenv('LOCAL_RANK', 0))
return rank
def broadcast_seeds(seed, device):
if torch.distributed.is_initialized():
seeds_tensor = torch.LongTensor([seed]).to(device)
torch.distributed.broadcast(seeds_tensor, 0)
seed = seeds_tensor.item()
return seed
| [((933, 953), 'mlperf_logging.mllog.get_mllogger', 'mllog.get_mllogger', ([], {}), '()\n', (951, 953), False, 'from mlperf_logging import mllog\n'), ((1175, 1196), 'os.getenv', 'os.getenv', (['"""USE_CUDA"""'], {}), "('USE_CUDA')\n", (1184, 1196), False, 'import os\n'), ((1564, 1598), 'torch.distributed.is_initialized', 'torch.distributed.is_initialized', ([], {}), '()\n', (1596, 1598), False, 'import torch\n'), ((1815, 1849), 'torch.distributed.is_initialized', 'torch.distributed.is_initialized', ([], {}), '()\n', (1847, 1849), False, 'import torch\n'), ((2025, 2059), 'torch.distributed.is_initialized', 'torch.distributed.is_initialized', ([], {}), '()\n', (2057, 2059), False, 'import torch\n'), ((1672, 1696), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (1694, 1696), False, 'import torch\n'), ((1866, 1894), 'torch.distributed.get_rank', 'torch.distributed.get_rank', ([], {}), '()\n', (1892, 1894), False, 'import torch\n'), ((2128, 2172), 'torch.distributed.broadcast', 'torch.distributed.broadcast', (['seeds_tensor', '(0)'], {}), '(seeds_tensor, 0)\n', (2155, 2172), False, 'import torch\n'), ((982, 1010), 'os.getenv', 'os.getenv', (['"""COMPLIANCE_FILE"""'], {}), "('COMPLIANCE_FILE')\n", (991, 1010), False, 'import os\n'), ((1637, 1662), 'torch.cuda.FloatTensor', 'torch.cuda.FloatTensor', (['(1)'], {}), '(1)\n', (1659, 1662), False, 'import torch\n'), ((1938, 1964), 'os.getenv', 'os.getenv', (['"""LOCAL_RANK"""', '(0)'], {}), "('LOCAL_RANK', 0)\n", (1947, 1964), False, 'import os\n'), ((1086, 1112), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1102, 1112), False, 'import os\n'), ((2084, 2108), 'torch.LongTensor', 'torch.LongTensor', (['[seed]'], {}), '([seed])\n', (2100, 2108), False, 'import torch\n')] |
CDufour909/omtk_unreal | omtk/models/model_avar_surface_lips.py | 64ae76a7b0a3f73a4b32d3b330f3174d02c54234 | import math
import pymel.core as pymel
from omtk.core.classNode import Node
from omtk.libs import libAttr
from omtk.libs import libRigging
from . import model_avar_surface
class SplitterNode(Node):
"""
A splitter is a node network that take the parameterV that is normally sent through the follicles and
split it between two destination: the follicles and the jaw ref constraint.
The more the jaw is opened, the more we'll transfer to the jaw ref before sending to the follicle.
This is mainly used to ensure that any lip movement created by the jaw is canceled when the
animator try to correct the lips and the jaw is open. Otherwise since the jaw space and the surface space
To compute the displacement caused by the was, we'll usethe circumference around the jaw pivot.
This create an 'approximation' that might be wrong if some translation also occur in the jaw.
todo: test with corrective jaw translation
"""
def __init__(self):
super(SplitterNode, self).__init__() # useless
self.attr_inn_jaw_pt = None
self.attr_inn_jaw_radius = None
self.attr_inn_surface_v = None
self.attr_inn_surface_range_v = None
self.attr_inn_jaw_default_ratio = None
self.attr_out_surface_v = None
self.attr_out_jaw_ratio = None
def build(self, nomenclature_rig, **kwargs):
super(SplitterNode, self).build(**kwargs)
#
# Create inn and out attributes.
#
grp_splitter_inn = pymel.createNode(
'network',
name=nomenclature_rig.resolve('udSplitterInn')
)
# The jaw opening amount in degree.
self.attr_inn_jaw_pt = libAttr.addAttr(grp_splitter_inn, 'innJawOpen')
# The relative uv coordinates normally sent to the follicles.
# Note that this value is expected to change at the output of the SplitterNode (see outSurfaceU and outSurfaceV)
self.attr_inn_surface_u = libAttr.addAttr(grp_splitter_inn, 'innSurfaceU')
self.attr_inn_surface_v = libAttr.addAttr(grp_splitter_inn, 'innSurfaceV')
# Use this switch to disable completely the splitter.
self.attr_inn_bypass = libAttr.addAttr(grp_splitter_inn, 'innBypassAmount')
# The arc length in world space of the surface controlling the follicles.
self.attr_inn_surface_range_v = libAttr.addAttr(grp_splitter_inn,
'innSurfaceRangeV') # How many degree does take the jaw to create 1 unit of surface deformation? (ex: 20)
# How much inn percent is the lips following the jaw by default.
# Note that this value is expected to change at the output of the SplitterNode (see attr_out_jaw_ratio)
self.attr_inn_jaw_default_ratio = libAttr.addAttr(grp_splitter_inn, 'jawDefaultRatio')
# The radius of the influence circle normally resolved by using the distance between the jaw and the avar as radius.
self.attr_inn_jaw_radius = libAttr.addAttr(grp_splitter_inn, 'jawRadius')
grp_splitter_out = pymel.createNode(
'network',
name=nomenclature_rig.resolve('udSplitterOut')
)
self.attr_out_surface_u = libAttr.addAttr(grp_splitter_out, 'outSurfaceU')
self.attr_out_surface_v = libAttr.addAttr(grp_splitter_out, 'outSurfaceV')
self.attr_out_jaw_ratio = libAttr.addAttr(grp_splitter_out,
'outJawRatio') # How much percent this influence follow the jaw after cancellation.
#
# Connect inn and out network nodes so they can easily be found from the SplitterNode.
#
attr_inn = libAttr.addAttr(grp_splitter_inn, longName='inn', attributeType='message')
attr_out = libAttr.addAttr(grp_splitter_out, longName='out', attributeType='message')
pymel.connectAttr(self.node.message, attr_inn)
pymel.connectAttr(self.node.message, attr_out)
#
# Create node networks
# Step 1: Get the jaw displacement in uv space (parameterV only).
#
attr_jaw_circumference = libRigging.create_utility_node(
'multiplyDivide',
name=nomenclature_rig.resolve('getJawCircumference'),
input1X=self.attr_inn_jaw_radius,
input2X=(math.pi * 2.0)
).outputX
attr_jaw_open_circle_ratio = libRigging.create_utility_node(
'multiplyDivide',
name=nomenclature_rig.resolve('getJawOpenCircleRatio'),
operation=2, # divide
input1X=self.attr_inn_jaw_pt,
input2X=360.0
).outputX
attr_jaw_active_circumference = libRigging.create_utility_node(
'multiplyDivide',
name=nomenclature_rig.resolve('getJawActiveCircumference'),
input1X=attr_jaw_circumference,
input2X=attr_jaw_open_circle_ratio
).outputX
attr_jaw_v_range = libRigging.create_utility_node(
'multiplyDivide',
name=nomenclature_rig.resolve('getActiveJawRangeInSurfaceSpace'),
operation=2, # divide
input1X=attr_jaw_active_circumference,
input2X=self.attr_inn_surface_range_v
).outputX
#
# Step 2: Resolve the output jaw_ratio
#
# Note that this can throw a zero division warning in Maya.
# To prevent that we'll use some black-magic-ugly-ass-trick.
attr_jaw_ratio_cancelation = libRigging.create_safe_division(
self.attr_inn_surface_v,
attr_jaw_v_range,
nomenclature_rig,
'getJawRatioCancellation'
)
attr_jaw_ratio_out_raw = libRigging.create_utility_node(
'plusMinusAverage',
name=nomenclature_rig.resolve('getJawRatioOutUnlimited'),
operation=2, # substraction,
input1D=(
self.attr_inn_jaw_default_ratio,
attr_jaw_ratio_cancelation
)
).output1D
attr_jaw_ratio_out_limited = libRigging.create_utility_node(
'clamp',
name=nomenclature_rig.resolve('getJawRatioOutLimited'),
inputR=attr_jaw_ratio_out_raw,
minR=0.0,
maxR=1.0
).outputR
#
# Step 3: Resolve attr_out_surface_u & attr_out_surface_v
#
attr_inn_jaw_default_ratio_inv = libRigging.create_utility_node(
'reverse',
name=nomenclature_rig.resolve('getJawDefaultRatioInv'),
inputX=self.attr_inn_jaw_default_ratio
).outputX
util_jaw_uv_default_ratio = libRigging.create_utility_node(
'multiplyDivide',
name=nomenclature_rig.resolve('getJawDefaultRatioUvSpace'),
input1X=self.attr_inn_jaw_default_ratio,
input1Y=attr_inn_jaw_default_ratio_inv,
input2X=attr_jaw_v_range,
input2Y=attr_jaw_v_range
)
attr_jaw_uv_default_ratio = util_jaw_uv_default_ratio.outputX
attr_jaw_uv_default_ratio_inv = util_jaw_uv_default_ratio.outputY
attr_jaw_uv_limit_max = libRigging.create_utility_node(
'plusMinusAverage',
name=nomenclature_rig.resolve('getJawSurfaceLimitMax'),
operation=2, # substract
input1D=(attr_jaw_v_range, attr_jaw_uv_default_ratio_inv)
).output1D
attr_jaw_uv_limit_min = libRigging.create_utility_node(
'plusMinusAverage',
name=nomenclature_rig.resolve('getJawSurfaceLimitMin'),
operation=2, # substract
input1D=(attr_jaw_uv_default_ratio, attr_jaw_v_range)
).output1D
attr_jaw_cancel_range = libRigging.create_utility_node(
'clamp',
name=nomenclature_rig.resolve('getJawCancelRange'),
inputR=self.attr_inn_surface_v,
minR=attr_jaw_uv_limit_min,
maxR=attr_jaw_uv_limit_max
).outputR
attr_out_surface_v_cancelled = libRigging.create_utility_node(
'plusMinusAverage',
name=nomenclature_rig.resolve('getCanceledUv'),
operation=2, # substraction
input1D=(self.attr_inn_surface_v, attr_jaw_cancel_range)
).output1D
#
# Connect output attributes
#
attr_inn_bypass_inv = libRigging.create_utility_node(
'reverse',
name=nomenclature_rig.resolve('getBypassInv'),
inputX=self.attr_inn_bypass
).outputX
# Connect output jaw_ratio
attr_output_jaw_ratio = libRigging.create_utility_node(
'blendWeighted',
input=(attr_jaw_ratio_out_limited, self.attr_inn_jaw_default_ratio),
weight=(attr_inn_bypass_inv, self.attr_inn_bypass)
).output
pymel.connectAttr(attr_output_jaw_ratio, self.attr_out_jaw_ratio)
# Connect output surface u
pymel.connectAttr(self.attr_inn_surface_u, self.attr_out_surface_u)
# Connect output surface_v
attr_output_surface_v = libRigging.create_utility_node(
'blendWeighted',
input=(attr_out_surface_v_cancelled, self.attr_inn_surface_v),
weight=(attr_inn_bypass_inv, self.attr_inn_bypass)
).output
pymel.connectAttr(attr_output_surface_v, self.attr_out_surface_v)
class AvarSurfaceLipModel(model_avar_surface.AvarSurfaceModel):
"""
Custom avar model for the complex situation that is the lips.
This ensure that we are moving according to the jaw before sliding on the surface.
"""
def __init__(self, *args, **kwargs):
super(AvarSurfaceLipModel, self).__init__(*args, **kwargs)
self._attr_inn_jaw_bindpose = None
self._attr_inn_jaw_pitch = None
self._attr_inn_jaw_ratio_default = None
self._attr_inn_bypass_splitter = None
self._attr_out_jaw_ratio = None
def _create_interface(self):
super(AvarSurfaceLipModel, self)._create_interface()
self._attr_inn_jaw_bindpose = libAttr.addAttr(self.grp_rig, 'innJawBindPose', dataType='matrix')
self._attr_inn_jaw_pitch = libAttr.addAttr(self.grp_rig, 'innJawPitch', defaultValue=0)
self._attr_inn_jaw_ratio_default = libAttr.addAttr(self.grp_rig, 'innJawRatioDefault', defaultValue=0)
self._attr_inn_bypass_splitter = libAttr.addAttr(self.grp_rig, 'innBypassSplitter')
self._attr_inn_ud_bypass = libAttr.addAttr(self.grp_rig, 'innBypassUD')
# self._attr_inn_surface_length_u = libAttr.addAttr(self.grp_rig, 'innSurfaceLengthU', defaultValue=0)
# self._attr_inn_surface_length_v = libAttr.addAttr(self.grp_rig, 'innSurfaceLengthV', defaultValue=0)
self._attr_out_jaw_ratio = libAttr.addAttr(self.grp_rig, 'outJawRatio')
def connect_avar(self, avar):
super(AvarSurfaceLipModel, self).connect_avar(avar)
# Note: We expect a FaceLipAvar
pymel.connectAttr(avar._attr_jaw_bind_tm, self._attr_inn_jaw_bindpose)
pymel.connectAttr(avar._attr_jaw_pitch, self._attr_inn_jaw_pitch)
pymel.connectAttr(avar._attr_inn_jaw_ratio_default, self._attr_inn_jaw_ratio_default)
pymel.connectAttr(avar._attr_bypass_splitter, self._attr_inn_bypass_splitter)
pymel.connectAttr(avar.attr_ud_bypass, self._attr_inn_ud_bypass)
def _get_follicle_relative_uv_attr(self, **kwargs):
nomenclature_rig = self.get_nomenclature_rig()
attr_u, attr_v = super(AvarSurfaceLipModel, self)._get_follicle_relative_uv_attr(**kwargs)
util_decompose_jaw_bind_tm = libRigging.create_utility_node(
'decomposeMatrix',
inputMatrix=self._attr_inn_jaw_bindpose,
)
#
# Create and connect Splitter Node
#
splitter = SplitterNode()
splitter.build(
nomenclature_rig,
name=nomenclature_rig.resolve('splitter')
)
splitter.setParent(self.grp_rig)
# Resolve the radius of the jaw influence. Used by the splitter.
attr_jaw_radius = libRigging.create_utility_node(
'distanceBetween',
name=nomenclature_rig.resolve('getJawRadius'),
point1=self.grp_offset.translate,
point2=util_decompose_jaw_bind_tm.outputTranslate
).distance
# Resolve the jaw pitch. Used by the splitter.
attr_jaw_pitch = self._attr_inn_jaw_pitch
# Connect the splitter inputs
pymel.connectAttr(attr_u, splitter.attr_inn_surface_u)
pymel.connectAttr(attr_v, splitter.attr_inn_surface_v)
pymel.connectAttr(self._attr_inn_jaw_ratio_default, splitter.attr_inn_jaw_default_ratio)
pymel.connectAttr(self._attr_length_v, splitter.attr_inn_surface_range_v)
pymel.connectAttr(attr_jaw_radius, splitter.attr_inn_jaw_radius)
pymel.connectAttr(attr_jaw_pitch, splitter.attr_inn_jaw_pt)
pymel.connectAttr(self._attr_inn_bypass_splitter, splitter.attr_inn_bypass)
attr_u = splitter.attr_out_surface_u
attr_v = splitter.attr_out_surface_v
# Create constraint to controller the jaw reference
pymel.connectAttr(splitter.attr_out_jaw_ratio, self._attr_out_jaw_ratio)
#
# Implement the 'bypass' avars.
# Thoses avars bypass the splitter, used in corner cases only.
#
attr_attr_ud_bypass_adjusted = libRigging.create_utility_node(
'multiplyDivide',
name=nomenclature_rig.resolve('getAdjustedUdBypass'),
input1X=self._attr_inn_ud_bypass,
input2X=self.multiplier_ud
).outputX
attr_v = libRigging.create_utility_node(
'addDoubleLinear',
name=nomenclature_rig.resolve('addBypassAvar'),
input1=attr_v,
input2=attr_attr_ud_bypass_adjusted
).output
return attr_u, attr_v
| [((1698, 1745), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn', '"""innJawOpen"""'], {}), "(grp_splitter_inn, 'innJawOpen')\n", (1713, 1745), False, 'from omtk.libs import libAttr\n'), ((1972, 2020), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn', '"""innSurfaceU"""'], {}), "(grp_splitter_inn, 'innSurfaceU')\n", (1987, 2020), False, 'from omtk.libs import libAttr\n'), ((2055, 2103), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn', '"""innSurfaceV"""'], {}), "(grp_splitter_inn, 'innSurfaceV')\n", (2070, 2103), False, 'from omtk.libs import libAttr\n'), ((2198, 2250), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn', '"""innBypassAmount"""'], {}), "(grp_splitter_inn, 'innBypassAmount')\n", (2213, 2250), False, 'from omtk.libs import libAttr\n'), ((2374, 2427), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn', '"""innSurfaceRangeV"""'], {}), "(grp_splitter_inn, 'innSurfaceRangeV')\n", (2389, 2427), False, 'from omtk.libs import libAttr\n'), ((2799, 2851), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn', '"""jawDefaultRatio"""'], {}), "(grp_splitter_inn, 'jawDefaultRatio')\n", (2814, 2851), False, 'from omtk.libs import libAttr\n'), ((3013, 3059), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn', '"""jawRadius"""'], {}), "(grp_splitter_inn, 'jawRadius')\n", (3028, 3059), False, 'from omtk.libs import libAttr\n'), ((3233, 3281), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_out', '"""outSurfaceU"""'], {}), "(grp_splitter_out, 'outSurfaceU')\n", (3248, 3281), False, 'from omtk.libs import libAttr\n'), ((3316, 3364), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_out', '"""outSurfaceV"""'], {}), "(grp_splitter_out, 'outSurfaceV')\n", (3331, 3364), False, 'from omtk.libs import libAttr\n'), ((3399, 3447), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_out', '"""outJawRatio"""'], {}), "(grp_splitter_out, 'outJawRatio')\n", (3414, 3447), False, 'from omtk.libs import libAttr\n'), ((3703, 3777), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_inn'], {'longName': '"""inn"""', 'attributeType': '"""message"""'}), "(grp_splitter_inn, longName='inn', attributeType='message')\n", (3718, 3777), False, 'from omtk.libs import libAttr\n'), ((3797, 3871), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['grp_splitter_out'], {'longName': '"""out"""', 'attributeType': '"""message"""'}), "(grp_splitter_out, longName='out', attributeType='message')\n", (3812, 3871), False, 'from omtk.libs import libAttr\n'), ((3880, 3926), 'pymel.core.connectAttr', 'pymel.connectAttr', (['self.node.message', 'attr_inn'], {}), '(self.node.message, attr_inn)\n', (3897, 3926), True, 'import pymel.core as pymel\n'), ((3935, 3981), 'pymel.core.connectAttr', 'pymel.connectAttr', (['self.node.message', 'attr_out'], {}), '(self.node.message, attr_out)\n', (3952, 3981), True, 'import pymel.core as pymel\n'), ((5508, 5631), 'omtk.libs.libRigging.create_safe_division', 'libRigging.create_safe_division', (['self.attr_inn_surface_v', 'attr_jaw_v_range', 'nomenclature_rig', '"""getJawRatioCancellation"""'], {}), "(self.attr_inn_surface_v, attr_jaw_v_range,\n nomenclature_rig, 'getJawRatioCancellation')\n", (5539, 5631), False, 'from omtk.libs import libRigging\n'), ((8853, 8918), 'pymel.core.connectAttr', 'pymel.connectAttr', (['attr_output_jaw_ratio', 'self.attr_out_jaw_ratio'], {}), '(attr_output_jaw_ratio, self.attr_out_jaw_ratio)\n', (8870, 8918), True, 'import pymel.core as pymel\n'), ((8963, 9030), 'pymel.core.connectAttr', 'pymel.connectAttr', (['self.attr_inn_surface_u', 'self.attr_out_surface_u'], {}), '(self.attr_inn_surface_u, self.attr_out_surface_u)\n', (8980, 9030), True, 'import pymel.core as pymel\n'), ((9323, 9388), 'pymel.core.connectAttr', 'pymel.connectAttr', (['attr_output_surface_v', 'self.attr_out_surface_v'], {}), '(attr_output_surface_v, self.attr_out_surface_v)\n', (9340, 9388), True, 'import pymel.core as pymel\n'), ((10086, 10152), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['self.grp_rig', '"""innJawBindPose"""'], {'dataType': '"""matrix"""'}), "(self.grp_rig, 'innJawBindPose', dataType='matrix')\n", (10101, 10152), False, 'from omtk.libs import libAttr\n'), ((10188, 10248), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['self.grp_rig', '"""innJawPitch"""'], {'defaultValue': '(0)'}), "(self.grp_rig, 'innJawPitch', defaultValue=0)\n", (10203, 10248), False, 'from omtk.libs import libAttr\n'), ((10292, 10359), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['self.grp_rig', '"""innJawRatioDefault"""'], {'defaultValue': '(0)'}), "(self.grp_rig, 'innJawRatioDefault', defaultValue=0)\n", (10307, 10359), False, 'from omtk.libs import libAttr\n'), ((10401, 10451), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['self.grp_rig', '"""innBypassSplitter"""'], {}), "(self.grp_rig, 'innBypassSplitter')\n", (10416, 10451), False, 'from omtk.libs import libAttr\n'), ((10487, 10531), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['self.grp_rig', '"""innBypassUD"""'], {}), "(self.grp_rig, 'innBypassUD')\n", (10502, 10531), False, 'from omtk.libs import libAttr\n'), ((10790, 10834), 'omtk.libs.libAttr.addAttr', 'libAttr.addAttr', (['self.grp_rig', '"""outJawRatio"""'], {}), "(self.grp_rig, 'outJawRatio')\n", (10805, 10834), False, 'from omtk.libs import libAttr\n'), ((10979, 11049), 'pymel.core.connectAttr', 'pymel.connectAttr', (['avar._attr_jaw_bind_tm', 'self._attr_inn_jaw_bindpose'], {}), '(avar._attr_jaw_bind_tm, self._attr_inn_jaw_bindpose)\n', (10996, 11049), True, 'import pymel.core as pymel\n'), ((11058, 11123), 'pymel.core.connectAttr', 'pymel.connectAttr', (['avar._attr_jaw_pitch', 'self._attr_inn_jaw_pitch'], {}), '(avar._attr_jaw_pitch, self._attr_inn_jaw_pitch)\n', (11075, 11123), True, 'import pymel.core as pymel\n'), ((11132, 11222), 'pymel.core.connectAttr', 'pymel.connectAttr', (['avar._attr_inn_jaw_ratio_default', 'self._attr_inn_jaw_ratio_default'], {}), '(avar._attr_inn_jaw_ratio_default, self.\n _attr_inn_jaw_ratio_default)\n', (11149, 11222), True, 'import pymel.core as pymel\n'), ((11226, 11303), 'pymel.core.connectAttr', 'pymel.connectAttr', (['avar._attr_bypass_splitter', 'self._attr_inn_bypass_splitter'], {}), '(avar._attr_bypass_splitter, self._attr_inn_bypass_splitter)\n', (11243, 11303), True, 'import pymel.core as pymel\n'), ((11312, 11376), 'pymel.core.connectAttr', 'pymel.connectAttr', (['avar.attr_ud_bypass', 'self._attr_inn_ud_bypass'], {}), '(avar.attr_ud_bypass, self._attr_inn_ud_bypass)\n', (11329, 11376), True, 'import pymel.core as pymel\n'), ((11627, 11722), 'omtk.libs.libRigging.create_utility_node', 'libRigging.create_utility_node', (['"""decomposeMatrix"""'], {'inputMatrix': 'self._attr_inn_jaw_bindpose'}), "('decomposeMatrix', inputMatrix=self.\n _attr_inn_jaw_bindpose)\n", (11657, 11722), False, 'from omtk.libs import libRigging\n'), ((12512, 12566), 'pymel.core.connectAttr', 'pymel.connectAttr', (['attr_u', 'splitter.attr_inn_surface_u'], {}), '(attr_u, splitter.attr_inn_surface_u)\n', (12529, 12566), True, 'import pymel.core as pymel\n'), ((12575, 12629), 'pymel.core.connectAttr', 'pymel.connectAttr', (['attr_v', 'splitter.attr_inn_surface_v'], {}), '(attr_v, splitter.attr_inn_surface_v)\n', (12592, 12629), True, 'import pymel.core as pymel\n'), ((12638, 12731), 'pymel.core.connectAttr', 'pymel.connectAttr', (['self._attr_inn_jaw_ratio_default', 'splitter.attr_inn_jaw_default_ratio'], {}), '(self._attr_inn_jaw_ratio_default, splitter.\n attr_inn_jaw_default_ratio)\n', (12655, 12731), True, 'import pymel.core as pymel\n'), ((12735, 12808), 'pymel.core.connectAttr', 'pymel.connectAttr', (['self._attr_length_v', 'splitter.attr_inn_surface_range_v'], {}), '(self._attr_length_v, splitter.attr_inn_surface_range_v)\n', (12752, 12808), True, 'import pymel.core as pymel\n'), ((12817, 12881), 'pymel.core.connectAttr', 'pymel.connectAttr', (['attr_jaw_radius', 'splitter.attr_inn_jaw_radius'], {}), '(attr_jaw_radius, splitter.attr_inn_jaw_radius)\n', (12834, 12881), True, 'import pymel.core as pymel\n'), ((12890, 12949), 'pymel.core.connectAttr', 'pymel.connectAttr', (['attr_jaw_pitch', 'splitter.attr_inn_jaw_pt'], {}), '(attr_jaw_pitch, splitter.attr_inn_jaw_pt)\n', (12907, 12949), True, 'import pymel.core as pymel\n'), ((12958, 13033), 'pymel.core.connectAttr', 'pymel.connectAttr', (['self._attr_inn_bypass_splitter', 'splitter.attr_inn_bypass'], {}), '(self._attr_inn_bypass_splitter, splitter.attr_inn_bypass)\n', (12975, 13033), True, 'import pymel.core as pymel\n'), ((13194, 13266), 'pymel.core.connectAttr', 'pymel.connectAttr', (['splitter.attr_out_jaw_ratio', 'self._attr_out_jaw_ratio'], {}), '(splitter.attr_out_jaw_ratio, self._attr_out_jaw_ratio)\n', (13211, 13266), True, 'import pymel.core as pymel\n'), ((8623, 8801), 'omtk.libs.libRigging.create_utility_node', 'libRigging.create_utility_node', (['"""blendWeighted"""'], {'input': '(attr_jaw_ratio_out_limited, self.attr_inn_jaw_default_ratio)', 'weight': '(attr_inn_bypass_inv, self.attr_inn_bypass)'}), "('blendWeighted', input=(\n attr_jaw_ratio_out_limited, self.attr_inn_jaw_default_ratio), weight=(\n attr_inn_bypass_inv, self.attr_inn_bypass))\n", (8653, 8801), False, 'from omtk.libs import libRigging\n'), ((9099, 9271), 'omtk.libs.libRigging.create_utility_node', 'libRigging.create_utility_node', (['"""blendWeighted"""'], {'input': '(attr_out_surface_v_cancelled, self.attr_inn_surface_v)', 'weight': '(attr_inn_bypass_inv, self.attr_inn_bypass)'}), "('blendWeighted', input=(\n attr_out_surface_v_cancelled, self.attr_inn_surface_v), weight=(\n attr_inn_bypass_inv, self.attr_inn_bypass))\n", (9129, 9271), False, 'from omtk.libs import libRigging\n')] |
dataesr/harvest-theses | project/server/main/feed.py | 1725b3ec3a944526fe62941d554bc3de6209cd28 | import datetime
import os
import pymongo
import requests
from urllib import parse
from urllib.parse import quote_plus
import json
from retry import retry
from bs4 import BeautifulSoup
import math
from project.server.main.logger import get_logger
from project.server.main.utils_swift import upload_object
from project.server.main.parse import parse_theses, get_idref_from_OS
from project.server.main.referentiel import harvest_and_save_idref
logger = get_logger(__name__)
def get_num_these(soup):
num_theses = []
for d in soup.find_all('doc'):
num_theses.append(d.find('str', {'name': 'num'}).text)
return num_theses
@retry(delay=60, tries=5)
def get_num_these_between_dates(start_date, end_date):
start_date_str = start_date.strftime("%d/%m/%Y")
end_date_str = end_date.strftime("%d/%m/%Y")
start_date_str_iso = start_date.strftime("%Y%m%d")
end_date_str_iso = end_date.strftime("%Y%m%d")
start = 0
url = "http://theses.fr/?q=&zone1=titreRAs&val1=&op1=AND&zone2=auteurs&val2=&op2=AND&zone3=etabSoutenances&val3=&op3=AND&zone4=dateSoutenance&val4a={}&val4b={}&start={}&format=xml"
logger.debug(url.format(start_date_str, end_date_str, start))
r = requests.get(url.format(start_date_str, end_date_str, start))
soup = BeautifulSoup(r.text, 'lxml')
nb_res = soup.find('result', {'name': 'response'}).attrs['numfound']
logger.debug("{} resultats entre {} et {}".format(nb_res, start_date_str_iso, end_date_str_iso ))
num_theses = get_num_these(soup)
nb_pages_remaining = math.ceil(int(nb_res)/1000)
for p in range(1, nb_pages_remaining):
logger.debug("page {} for entre {} et {}".format(p, start_date_str_iso, end_date_str_iso))
r = requests.get(url.format(start_date_str, end_date_str, p * 1000))
soup = BeautifulSoup(r.text, 'lxml')
num_theses += get_num_these(soup)
return num_theses
def save_data(data, collection_name, year_start, year_end, chunk_index, referentiel):
logger.debug(f'save_data theses {collection_name} {chunk_index}')
year_start_end = 'all_years'
if year_start and year_end:
year_start_end = f'{year_start}_{year_end}'
# 1. save raw data to OS
current_file = f'theses_{year_start_end}_{chunk_index}.json'
json.dump(data, open(current_file, 'w'))
os.system(f'gzip {current_file}')
upload_object('theses', f'{current_file}.gz', f'{collection_name}/raw/{current_file}.gz')
os.system(f'rm -rf {current_file}.gz')
# 2.transform data and save in mongo
current_file_parsed = f'theses_parsed_{year_start_end}_{chunk_index}.json'
data_parsed = [parse_theses(e, referentiel, collection_name) for e in data]
json.dump(data_parsed, open(current_file_parsed, 'w'))
# insert_data(collection_name, current_file_parsed)
os.system(f'gzip {current_file_parsed}')
upload_object('theses', f'{current_file_parsed}.gz', f'{collection_name}/parsed/{current_file_parsed}.gz')
os.system(f'rm -rf {current_file_parsed}.gz')
def harvest_and_insert(collection_name):
# 1. save aurehal structures
harvest_and_save_idref(collection_name)
referentiel = get_idref_from_OS(collection_name)
# 2. drop mongo
#logger.debug(f'dropping {collection_name} collection before insertion')
#myclient = pymongo.MongoClient('mongodb://mongo:27017/')
#myclient['theses'][collection_name].drop()
# 3. save publications
year_start = None
year_end = None
if year_start is None:
year_start = 1990
if year_end is None:
year_end = datetime.date.today().year
harvest_and_insert_one_year(collection_name, year_start, year_end, referentiel)
@retry(delay=60, tries=5)
def download_these_notice(these_id):
res = {'id': these_id}
r_tefudoc = requests.get("http://www.theses.fr/{}.tefudoc".format(these_id))
r_xml = requests.get("http://www.theses.fr/{}.xml".format(these_id))
if r_tefudoc.text[0:5] == "<?xml":
res['tefudoc'] = r_tefudoc.text
if r_xml.text[0:5] == "<?xml":
res['xml'] = r_xml.text
return res
def harvest_and_insert_one_year(collection_name, year_start, year_end, referentiel):
year_start_end = 'all_years'
if year_start and year_end:
year_start_end = f'{year_start}_{year_end}'
start_date = datetime.datetime(year_start,1,1)
end_date = datetime.datetime(year_end + 1,1,1) + datetime.timedelta(days = -1)
all_num_theses = get_num_these_between_dates(start_date, end_date)
# todo save by chunk
chunk_index = 0
data = []
MAX_DATA_SIZE = 25000
nb_theses = len(all_num_theses)
logger.debug(f'{nb_theses} theses to download and parse')
for ix, nnt in enumerate(all_num_theses):
if ix % 100 == 0:
logger.debug(f'theses {year_start_end} {ix}')
res = download_these_notice(nnt)
data.append(res)
if (len(data) > MAX_DATA_SIZE) or (ix == nb_theses - 1):
if data:
save_data(data, collection_name, year_start, year_end, chunk_index, referentiel)
data = []
chunk_index += 1
def insert_data(collection_name, output_file):
myclient = pymongo.MongoClient('mongodb://mongo:27017/')
mydb = myclient['theses']
## mongo start
start = datetime.datetime.now()
mongoimport = f"mongoimport --numInsertionWorkers 2 --uri mongodb://mongo:27017/theses --file {output_file}" \
f" --collection {collection_name} --jsonArray"
logger.debug(f'Mongoimport {output_file} start at {start}')
logger.debug(f'{mongoimport}')
os.system(mongoimport)
logger.debug(f'Checking indexes on collection {collection_name}')
mycol = mydb[collection_name]
#mycol.create_index('docid')
end = datetime.datetime.now()
delta = end - start
logger.debug(f'Mongoimport done in {delta}')
## mongo done
| [((453, 473), 'project.server.main.logger.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (463, 473), False, 'from project.server.main.logger import get_logger\n'), ((642, 666), 'retry.retry', 'retry', ([], {'delay': '(60)', 'tries': '(5)'}), '(delay=60, tries=5)\n', (647, 666), False, 'from retry import retry\n'), ((3688, 3712), 'retry.retry', 'retry', ([], {'delay': '(60)', 'tries': '(5)'}), '(delay=60, tries=5)\n', (3693, 3712), False, 'from retry import retry\n'), ((1282, 1311), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""lxml"""'], {}), "(r.text, 'lxml')\n", (1295, 1311), False, 'from bs4 import BeautifulSoup\n'), ((2334, 2367), 'os.system', 'os.system', (['f"""gzip {current_file}"""'], {}), "(f'gzip {current_file}')\n", (2343, 2367), False, 'import os\n'), ((2372, 2465), 'project.server.main.utils_swift.upload_object', 'upload_object', (['"""theses"""', 'f"""{current_file}.gz"""', 'f"""{collection_name}/raw/{current_file}.gz"""'], {}), "('theses', f'{current_file}.gz',\n f'{collection_name}/raw/{current_file}.gz')\n", (2385, 2465), False, 'from project.server.main.utils_swift import upload_object\n'), ((2466, 2504), 'os.system', 'os.system', (['f"""rm -rf {current_file}.gz"""'], {}), "(f'rm -rf {current_file}.gz')\n", (2475, 2504), False, 'import os\n'), ((2825, 2865), 'os.system', 'os.system', (['f"""gzip {current_file_parsed}"""'], {}), "(f'gzip {current_file_parsed}')\n", (2834, 2865), False, 'import os\n'), ((2870, 2980), 'project.server.main.utils_swift.upload_object', 'upload_object', (['"""theses"""', 'f"""{current_file_parsed}.gz"""', 'f"""{collection_name}/parsed/{current_file_parsed}.gz"""'], {}), "('theses', f'{current_file_parsed}.gz',\n f'{collection_name}/parsed/{current_file_parsed}.gz')\n", (2883, 2980), False, 'from project.server.main.utils_swift import upload_object\n'), ((2981, 3026), 'os.system', 'os.system', (['f"""rm -rf {current_file_parsed}.gz"""'], {}), "(f'rm -rf {current_file_parsed}.gz')\n", (2990, 3026), False, 'import os\n'), ((3106, 3145), 'project.server.main.referentiel.harvest_and_save_idref', 'harvest_and_save_idref', (['collection_name'], {}), '(collection_name)\n', (3128, 3145), False, 'from project.server.main.referentiel import harvest_and_save_idref\n'), ((3164, 3198), 'project.server.main.parse.get_idref_from_OS', 'get_idref_from_OS', (['collection_name'], {}), '(collection_name)\n', (3181, 3198), False, 'from project.server.main.parse import parse_theses, get_idref_from_OS\n'), ((4322, 4357), 'datetime.datetime', 'datetime.datetime', (['year_start', '(1)', '(1)'], {}), '(year_start, 1, 1)\n', (4339, 4357), False, 'import datetime\n'), ((5206, 5251), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""mongodb://mongo:27017/"""'], {}), "('mongodb://mongo:27017/')\n", (5225, 5251), False, 'import pymongo\n'), ((5318, 5341), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5339, 5341), False, 'import datetime\n'), ((5625, 5647), 'os.system', 'os.system', (['mongoimport'], {}), '(mongoimport)\n', (5634, 5647), False, 'import os\n'), ((5795, 5818), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5816, 5818), False, 'import datetime\n'), ((1813, 1842), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""lxml"""'], {}), "(r.text, 'lxml')\n", (1826, 1842), False, 'from bs4 import BeautifulSoup\n'), ((2645, 2690), 'project.server.main.parse.parse_theses', 'parse_theses', (['e', 'referentiel', 'collection_name'], {}), '(e, referentiel, collection_name)\n', (2657, 2690), False, 'from project.server.main.parse import parse_theses, get_idref_from_OS\n'), ((4371, 4408), 'datetime.datetime', 'datetime.datetime', (['(year_end + 1)', '(1)', '(1)'], {}), '(year_end + 1, 1, 1)\n', (4388, 4408), False, 'import datetime\n'), ((4409, 4436), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(-1)'}), '(days=-1)\n', (4427, 4436), False, 'import datetime\n'), ((3575, 3596), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3594, 3596), False, 'import datetime\n')] |
ckamtsikis/cmssw | DQM/L1TMonitor/python/L1TGCT_cfi.py | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
l1tGct = DQMEDAnalyzer('L1TGCT',
gctCentralJetsSource = cms.InputTag("gctDigis","cenJets"),
gctForwardJetsSource = cms.InputTag("gctDigis","forJets"),
gctTauJetsSource = cms.InputTag("gctDigis","tauJets"),
gctIsoTauJetsSource = cms.InputTag("gctDigis","fake"),
gctEnergySumsSource = cms.InputTag("gctDigis"),
gctIsoEmSource = cms.InputTag("gctDigis","isoEm"),
gctNonIsoEmSource = cms.InputTag("gctDigis","nonIsoEm"),
monitorDir = cms.untracked.string("L1T/L1TGCT"),
verbose = cms.untracked.bool(False),
stage1_layer2_ = cms.bool(False),
DQMStore = cms.untracked.bool(True),
disableROOToutput = cms.untracked.bool(True),
filterTriggerType = cms.int32(1)
)
| [((159, 194), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""gctDigis"""', '"""cenJets"""'], {}), "('gctDigis', 'cenJets')\n", (171, 194), True, 'import FWCore.ParameterSet.Config as cms\n'), ((222, 257), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""gctDigis"""', '"""forJets"""'], {}), "('gctDigis', 'forJets')\n", (234, 257), True, 'import FWCore.ParameterSet.Config as cms\n'), ((281, 316), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""gctDigis"""', '"""tauJets"""'], {}), "('gctDigis', 'tauJets')\n", (293, 316), True, 'import FWCore.ParameterSet.Config as cms\n'), ((343, 375), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""gctDigis"""', '"""fake"""'], {}), "('gctDigis', 'fake')\n", (355, 375), True, 'import FWCore.ParameterSet.Config as cms\n'), ((402, 426), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""gctDigis"""'], {}), "('gctDigis')\n", (414, 426), True, 'import FWCore.ParameterSet.Config as cms\n'), ((449, 482), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""gctDigis"""', '"""isoEm"""'], {}), "('gctDigis', 'isoEm')\n", (461, 482), True, 'import FWCore.ParameterSet.Config as cms\n'), ((507, 543), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""gctDigis"""', '"""nonIsoEm"""'], {}), "('gctDigis', 'nonIsoEm')\n", (519, 543), True, 'import FWCore.ParameterSet.Config as cms\n'), ((561, 595), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""L1T/L1TGCT"""'], {}), "('L1T/L1TGCT')\n", (581, 595), True, 'import FWCore.ParameterSet.Config as cms\n'), ((611, 636), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(False)'], {}), '(False)\n', (629, 636), True, 'import FWCore.ParameterSet.Config as cms\n'), ((659, 674), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(False)'], {}), '(False)\n', (667, 674), True, 'import FWCore.ParameterSet.Config as cms\n'), ((691, 715), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (709, 715), True, 'import FWCore.ParameterSet.Config as cms\n'), ((741, 765), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (759, 765), True, 'import FWCore.ParameterSet.Config as cms\n'), ((791, 803), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (800, 803), True, 'import FWCore.ParameterSet.Config as cms\n')] |
gandhiy/lipMIP | utilities.py | 11843e6bf2223acca44f57d29791521aac15caf3 | """ General all-purpose utilities """
import sys
import torch
import torch.nn.functional as F
import numpy as np
import gurobipy as gb
import matplotlib.pyplot as plt
import io
import contextlib
import tempfile
import time
import re
import pickle
import inspect
import glob
import os
COMPLETED_JOB_DIR = os.path.join(os.path.dirname(__file__), 'jobs', 'completed')
# ===============================================================================
# = Helpful all-purpose functions =
# ===============================================================================
class ParameterObject:
def __init__(self, **kwargs):
self.attr_list = []
assert 'attr_list' not in kwargs
for k,v in kwargs.items():
setattr(self, k, v)
self.attr_list.append(k)
def change_attrs(self, **kwargs):
new_kwargs = {}
for attr in self.attr_list:
if attr in kwargs:
new_kwargs[attr] = kwargs[attr]
else:
new_kwargs[attr] = getattr(self, attr)
return self.__class__(**new_kwargs)
class Factory(ParameterObject):
def __init__(self, constructor, **kwargs):
self.constructor = constructor
super(Factory, self).__init__(**kwargs)
def __call__(self, **kwargs):
cons_args = inspect.getfullargspec(self.constructor).args
# Make default args from attributes
args = {k: getattr(self, k) for k in self.attr_list if k in cons_args}
# Update the default args
for k,v in kwargs.items():
if k in cons_args:
args[k] = v
# Build object
return self.constructor(**args)
def __repr__(self):
return '<Factory: %s>' % self.constructor.__self__.__name__
class DoEvery:
@classmethod
def dummy(cls, *args, **kwargs):
pass
def __init__(self, func, freq):
""" Simple class that holds onto a function and it returns
this function every freq iterations
ARGS:
func: function object to be returned every freq iterations
freq: int - how often to return the function
"""
self.func = func
self.freq = freq
self.i = 0
def __call__(self, *args, **kwargs):
if self.i % self.freq == 0:
returner = self.func
else:
returner = self.dummy
self.i += 1
return returner(*args, **kwargs)
class Timer:
def __init__(self, start_on_init=True):
if start_on_init:
self.start()
def start(self):
self.start_time = time.time()
def stop(self):
self.stop_time = time.time()
return self.stop_time - self.start_time
def reset(self):
self.start_time = self.stop_time = None
def cpufy(tensor_iter):
""" Takes a list of tensors and safely pushes them back onto the cpu"""
return [_.cpu() for _ in tensor_iter]
def cudafy(tensor_iter):
""" Takes a list of tensors and safely converts all of them to cuda"""
def safe_cuda(el):
try:
return el.cuda()
except AssertionError:
return el
return [safe_cuda(_) for _ in tensor_iter]
def prod(num_iter):
""" returns product of all elements in this iterator *'ed together"""
cumprod = 1
for el in num_iter:
cumprod *= el
return cumprod
def partition(n, m):
""" Given ints n > m, partitions n into an iterable where all
elements are m, except for the last one which is (n % m)
"""
count = 0
while count < n:
yield min([m, n - count])
count += m
def flatten_list(lol):
""" Given list of lists, flattens it into a single list. """
output = []
for el in lol:
if not isinstance(el, list):
output.append(el)
continue
output.extend(flatten_list(el))
return output
def partition_by_suffix(iterable, func):
""" Given an iterable and a boolean-valued function which takes in
elements of that iterable, outputs a list of lists, where each list
ends in an element for which the func returns true, (except for the
last one)
e.g.
iterable := [1, 2, 3, 4, 5,5, 5]
func := lambda x: (x % 2) == 0
returns [[1,2], [3,4], [5, 5, 5]]
"""
output = []
sublist = []
for el in iterable:
sublist.append(el)
if func(el):
output.append(sublist)
sublist = []
if len(sublist) > 0:
output.append(sublist)
return output
def arraylike(obj):
return isinstance(obj, (torch.Tensor, np.ndarray))
def as_numpy(tensor_or_array):
""" If given a tensor or numpy array returns that object cast numpy array
"""
if isinstance(tensor_or_array, torch.Tensor):
tensor_or_array = tensor_or_array.cpu().detach().numpy()
return tensor_or_array
def two_col(l, r):
""" Takes two numpy arrays of size N and makes a numpy array of size Nx2
"""
return np.vstack([l, r]).T
def split_pos_neg(x):
if isinstance(x, torch.Tensor):
return split_tensor_pos_neg(x)
else:
return split_ndarray_pos_neg(x)
def split_tensor_pos_neg(x):
""" Splits a tensor into positive and negative components """
pos = F.relu(x)
neg = -F.relu(-x)
return pos, neg
def split_ndarray_pos_neg(x):
""" Splits a numpy ndarray into positive and negative components """
pos = x * (x >= 0)
neg = x * (x <= 0)
return pos, neg
def swap_axes(x, source, dest):
""" Swaps the dimensions of source <-> dest for torch/numpy
ARGS:
x : numpy array or tensor
source : int index
dest : int index
RETURNS
x' - object with same data as x, but with axes swapped
"""
if isinstance(x, torch.Tensor):
return x.transpose(source, dest)
else:
return np.moveaxis(x, source, dest)
def build_var_namer(k):
return lambda d: '%s[%s]' % (k, d)
@contextlib.contextmanager
def silent():
save_stdout = sys.stdout
temp = tempfile.TemporaryFile(mode='w')
sys.stdout = temp
yield
sys.stdout = save_stdout
temp.close()
def ia_mm(matrix, intervals, lohi_dim, matrix_or_vec='matrix'):
""" Interval analysis matrix(-vec) multiplication for torch/np intervals
ARGS:
matrix : tensor or numpy array of shape (m,n) -
intervals : tensor or numpy array with shape (n1, ..., 2, n_i, ...) -
"vector" of intervals to be multiplied by a matrix
one such n_i must be equal to n (from matrix shape)
lohi_dim : int - which dimension (index) of intervals corresponds
to the lo/hi split
matrix_or_vec : string - must be matrix or vec, corresponds to whether
intervals is to be treated as a matrix or a vector.
If a v
RETURNS:
object of same type as intervals, but with the shape slightly
different: len(output[-1/-2]) == m
"""
# asserts for shapes and things
assert isinstance(matrix, torch.Tensor) # TENSOR ONLY FOR NOW
assert isinstance(intervals, torch.Tensor)
m, n = matrix.shape
assert intervals.shape[lohi_dim] == 2
assert matrix_or_vec in ['matrix', 'vec']
if matrix_or_vec == 'vec':
intervals = intervals.unsqueeze(-1)
assert lohi_dim != intervals.dim() - 2
assert intervals[dim][-2] == n
# define operators based on tensor/numpy case
matmul = lambda m, x: m.matmul(x)
stack = lambda a, b: torch.stack([a, b])
# now do IA stuff
intervals = swap_axes(intervals, 0, lohi_dim)
matrix_pos, matrix_neg = split_pos_neg(matrix)
los, his = intervals
new_los = matmul(matrix_pos, los) + matmul(matrix_neg, his)
new_his = matmul(matrix_pos, his) + matmul(matrix_neg, los)
intervals = swap_axes(stack(new_los, new_his), 0, lohi_dim)
if matrix_or_vec == 'vec':
intervals = interval.squeeze(-1)
return intervals
# =============================================================================
# = Image display functions =
# =============================================================================
def display_images(image_rows, figsize=(8, 8)):
""" Given either a tensor/np.array (or list of same), will display each
element in the row or tensor
ARGS:
image_rows: tensor or np.array or tensor[], np.array[] -
image or list of images to display
RETURNS: None, but displays images
"""
if not isinstance(image_rows, list):
image_rows = [image_rows]
np_rows = [as_numpy(row) for row in image_rows]
# Transpose channel to last dimension and stack to make rows
np_rows = [np.concatenate(_.transpose([0, 2, 3, 1]), axis=1)
for _ in np_rows]
# Now stack rows
full_image = np.concatenate(np_rows, axis=0)
# And then show image
imshow_kwargs = {}
if full_image.shape[-1] == 1:
full_image = full_image.squeeze()
imshow_kwargs['cmap'] = 'gray'
fig = plt.figure(figsize=figsize)
ax = fig.add_subplot()
ax.axis('off')
ax.imshow(full_image, **imshow_kwargs)
plt.show()
# ======================================================
# = Pytorch helpers =
# ======================================================
def seq_append(seq, module):
""" Takes a nn.sequential and a nn.module and creates a nn.sequential
with the module appended to it
ARGS:
seq: nn.Sequntial object
module: <inherits nn.Module>
RETURNS:
nn.Sequential object
"""
seq_modules = [seq[_] for _ in range(len(seq))] + [module]
return nn.Sequential(*seq_modules)
def cpufy(tensor_iter):
""" Takes a list of tensors and safely pushes them back onto the cpu"""
output = []
for el in tensor_iter:
if isinstance(el, tuple):
output.append(tuple(_.cpu() for _ in el))
else:
output.append(el.cpu())
return output
def cudafy(tensor_iter):
""" Takes a list of tensors and safely converts all of them to cuda"""
def safe_cuda(el):
try:
if isinstance(el, tuple):
return tuple(_.cuda() for _ in el)
else:
return el.cuda()
except AssertionError:
return el
return [safe_cuda(_) for _ in tensor_iter]
# =======================================
# = Polytope class =
# =======================================
class Polytope:
INPUT_KEY = 'input'
SLACK_KEY = 'slack'
def __init__(self, A, b):
""" Represents a polytope of the form {x | AX <= b}
(where everything is a numpy array)
"""
self.A = A
self.b = b
def _input_from_model(self, model):
var_namer = build_var_namer(self.INPUT_KEY)
return np.array([model.getVarByName(var_namer(i)).X
for i in range(self.A.shape[1])])
def _build_model(self, slack=False):
""" Builds a gurobi model of this object """
with silent():
model = gb.Model()
input_namer = build_var_namer(self.INPUT_KEY)
input_vars = [model.addVar(lb=-gb.GRB.INFINITY, ub=gb.GRB.INFINITY,
name=input_namer(i))
for i in range(self.A.shape[1])]
if slack == True:
slack_var = model.addVar(lb=0, ub=1.0, name=self.SLACK_KEY)
else:
slack_var = 0
for i, row in enumerate(self.A):
model.addConstr(gb.LinExpr(row, input_vars) + slack_var <= self.b[i])
model.update()
return model
def contains(self, x, tolerance=1e-6):
return all(self.A @ x <= self.b + tolerance)
def interior_point(self):
model = self._build_model(slack=True)
slack_var = model.getVarByName(self.SLACK_KEY)
model.setObjective(slack_var, gb.GRB.MAXIMIZE)
model.update()
model.optimize()
assert model.Status == 2
return self._input_from_model(model)
def intersects_hbox(self, hbox):
""" If this intersects a given hyperbox, returns a
point contained in both
"""
model = self._build_model(slack=True)
input_namer = build_var_namer(self.INPUT_KEY)
for i, (lb, ub) in enumerate(hbox):
var = model.getVarByName(input_namer(i))
model.addConstr(lb <= var <= ub)
slack_var = model.getVarByName(self.SLACK_KEY)
model.setObjective(slack_var, gb.GRB.MAXIMIZE)
model.update()
model.optimize()
assert model.Status == 2
return self._input_from_model(model)
# =========================================================
# = experiment.Result object helpers =
# =========================================================
def filename_to_epoch(filename):
return int(re.search(r'_EPOCH\d{4}_', filename).group()[-5:-1])
def read_result_files(result_files):
output = []
for result_file in result_files:
try:
with open(result_file, 'rb') as f:
output.append((result_file, pickle.load(f)))
except Exception as err:
print("Failed on file: ", result_file, err)
return output
def job_out_series(job_outs, eval_style, method,
value_or_time='value', avg_stdev='avg'):
""" Takes in some result or resultList objects and
a 'method', and desired object, and returns these objects
in a list
ARGS:
results: Result[] or ResultList[], results to consider
eval_style: str - which method of Experiment we look at
method: str - which Lipschitz-estimation technique to consider
value_or_time: 'value' or 'time' - which number to return
avg_stdev: 'avg' or 'stdev' - for ResultList[], we can
get average or stdev values
RETURNS:
list of floats
"""
# check everything is the same type
assert value_or_time in ['value', 'time']
assert avg_stdev in ['avg', 'stdev']
assert eval_style in ['do_random_evals', 'do_unit_hypercube_eval',
'do_data_evals', 'do_large_radius_evals']
results = [job_out[eval_style] for job_out in job_outs]
output = []
for result in results:
try: #Result object case
if value_or_time == 'value':
output.append(result.values(method))
else:
output.append(result.compute_times(method))
except:
triple = result.average_stdevs(value_or_time)[method]
if avg_stdev == 'avg':
output.append(triple[0])
else:
output.append(triple[1])
return output
def collect_result_outs(filematch):
""" Uses glob to collect and load result objects matching a series
ARGS:
filematch: string with *'s associated with it
e.g. 'NAME*SUBNAME*GLOBAL.result'
RESULTS:
list of (filename, experiment.Result) objects
"""
search_str = os.path.join(COMPLETED_JOB_DIR, filematch)
sorted_filenames = sorted(glob.glob(search_str))
return read_result_files(sorted_filenames)
def collect_epochs(filename_list):
""" Given a list of (filename) objects, converts
the filenames into integers, pulling the EPOCH attribute from
the filename
str[] -> int[]
"""
def epoch_gleamer(filename):
basename = os.path.basename(filename)
return int(re.search('_EPOCH\d+_', filename).group()[6:-1])
return [epoch_gleamer(_) for _ in filename_list]
def data_from_results(result_iter, method, lip_estimator, time_or_value='value',
avg_or_stdev='avg'):
""" Given a list of experiment.Result or experiment.ResultList objects
will return the time/value for the lip_estimator of the method
for result (or avg/stdev if resultList objects)
e.g., data_from_results('do_unit_hypercube_eval', 'LipMIP',
'value') gets a list of values of the
LipMIP over the unitHypercube domain
ARGS:
method: str - name of one of the experimental methods
lip_estimator : str - name of the class of lipschitz estimator to use
time_or_value : 'time' or 'value' - returning the time or value here
avg_or_stdev : 'avg' or 'stdev' - returning either avg or stdev of
results from ResultListObjects
"""
assert method in ['do_random_evals', 'do_data_evals',
'do_unit_hypercube_eval']
assert lip_estimator in ['LipMIP', 'FastLip', 'LipLP', 'CLEVER',
'LipSDP', 'NaiveUB', 'RandomLB', 'SeqLip']
assert time_or_value in ['time', 'value']
assert avg_or_stdev in ['avg', 'stdev']
def datum_getter(result_obj):
if not hasattr(result_obj, 'average_stdevs'):
if time_or_value == 'value':
return result_obj[method].values(lip_estimator)
else:
return result_obj[method].compute_times(lip_estimator)
else:
triple = result_obj.average_stdevs(time_or_value)
if avg_or_stdev == 'avg':
return triple[0]
else:
return triple[1]
return [datum_getter(_) for _ in result_iter]
| [((320, 345), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (335, 345), False, 'import os\n'), ((4728, 4737), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (4734, 4737), True, 'import torch.nn.functional as F\n'), ((5427, 5459), 'tempfile.TemporaryFile', 'tempfile.TemporaryFile', ([], {'mode': '"""w"""'}), "(mode='w')\n", (5449, 5459), False, 'import tempfile\n'), ((8052, 8083), 'numpy.concatenate', 'np.concatenate', (['np_rows'], {'axis': '(0)'}), '(np_rows, axis=0)\n', (8066, 8083), True, 'import numpy as np\n'), ((8238, 8265), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (8248, 8265), True, 'import matplotlib.pyplot as plt\n'), ((8348, 8358), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8356, 8358), True, 'import matplotlib.pyplot as plt\n'), ((13529, 13571), 'os.path.join', 'os.path.join', (['COMPLETED_JOB_DIR', 'filematch'], {}), '(COMPLETED_JOB_DIR, filematch)\n', (13541, 13571), False, 'import os\n'), ((2322, 2333), 'time.time', 'time.time', ([], {}), '()\n', (2331, 2333), False, 'import time\n'), ((2371, 2382), 'time.time', 'time.time', ([], {}), '()\n', (2380, 2382), False, 'import time\n'), ((4477, 4494), 'numpy.vstack', 'np.vstack', (['[l, r]'], {}), '([l, r])\n', (4486, 4494), True, 'import numpy as np\n'), ((4746, 4756), 'torch.nn.functional.relu', 'F.relu', (['(-x)'], {}), '(-x)\n', (4752, 4756), True, 'import torch.nn.functional as F\n'), ((5259, 5287), 'numpy.moveaxis', 'np.moveaxis', (['x', 'source', 'dest'], {}), '(x, source, dest)\n', (5270, 5287), True, 'import numpy as np\n'), ((6779, 6798), 'torch.stack', 'torch.stack', (['[a, b]'], {}), '([a, b])\n', (6790, 6798), False, 'import torch\n'), ((13599, 13620), 'glob.glob', 'glob.glob', (['search_str'], {}), '(search_str)\n', (13608, 13620), False, 'import glob\n'), ((13898, 13924), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (13914, 13924), False, 'import os\n'), ((1236, 1276), 'inspect.getfullargspec', 'inspect.getfullargspec', (['self.constructor'], {}), '(self.constructor)\n', (1258, 1276), False, 'import inspect\n'), ((10074, 10084), 'gurobipy.Model', 'gb.Model', ([], {}), '()\n', (10082, 10084), True, 'import gurobipy as gb\n'), ((11664, 11700), 're.search', 're.search', (['"""_EPOCH\\\\d{4}_"""', 'filename'], {}), "('_EPOCH\\\\d{4}_', filename)\n", (11673, 11700), False, 'import re\n'), ((10457, 10484), 'gurobipy.LinExpr', 'gb.LinExpr', (['row', 'input_vars'], {}), '(row, input_vars)\n', (10467, 10484), True, 'import gurobipy as gb\n'), ((11879, 11893), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (11890, 11893), False, 'import pickle\n'), ((13938, 13972), 're.search', 're.search', (['"""_EPOCH\\\\d+_"""', 'filename'], {}), "('_EPOCH\\\\d+_', filename)\n", (13947, 13972), False, 'import re\n')] |
alentoghostflame/StupidAlentoBot | OLD/karma_module/text.py | c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba | ADDED_KARMA_TO_MEMBER = "Gave {} karma to {}, their karma is now at {}."
REMOVED_KARMA_FROM_MEMBER = "Removed {} karma from {}, their karma is now at {}."
LIST_KARMA_OWN = "You currently have {} karma."
LIST_KARMA_OBJECT = "\"{}\" currently has {} karma."
LIST_KARMA_MEMBER = "{} currently has {} karma."
KARMA_TOP_START = "Top karma in server:\n"
KARMA_TOP_FORMAT = "{}. {} \\| {}\n"
| [] |
anssilaukkarinen/mry-cluster2 | read_delphin_data.py | 65d80a7371a4991dfe248ff6944f050e1573f8fc | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 6 14:51:24 2021
@author: laukkara
This script is run first to fetch results data from university's network drive
"""
import os
import pickle
input_folder_for_Delphin_data = r'S:\91202_Rakfys_Mallinnus\RAMI\simulations'
output_folder = os.path.join(r'C:\Local\laukkara\Data\github\mry-cluster2\input')
output_pickle_file_name = 'S_RAMI.pickle'
## Preparations
if not os.path.exists(output_folder):
os.makedirs(output_folder)
output_pickle_file_path = os.path.join(output_folder,
output_pickle_file_name)
## Read in results data from pickle files
cases = {}
data = {}
cases = os.listdir(input_folder_for_Delphin_data)
cases.remove('olds')
cases.remove('RAMI_simulated_cases.xlsx')
data = {}
for case in cases:
print('Reading:', case)
fname = os.path.join(input_folder_for_Delphin_data, case, 'd.pickle')
with open(fname, 'rb') as f:
try:
df = pickle.load(f)
if df.shape[0] == 1200:
data[case] = df
else:
print('ERROR AT:', case)
except:
print('Error when reading case:', case)
print(data[cases[0]].columns)
with open(output_pickle_file_path, 'wb') as f:
pickle.dump(data, f)
| [((289, 359), 'os.path.join', 'os.path.join', (['"""C:\\\\Local\\\\laukkara\\\\Data\\\\github\\\\mry-cluster2\\\\input"""'], {}), "('C:\\\\Local\\\\laukkara\\\\Data\\\\github\\\\mry-cluster2\\\\input')\n", (301, 359), False, 'import os\n'), ((517, 569), 'os.path.join', 'os.path.join', (['output_folder', 'output_pickle_file_name'], {}), '(output_folder, output_pickle_file_name)\n', (529, 569), False, 'import os\n'), ((682, 723), 'os.listdir', 'os.listdir', (['input_folder_for_Delphin_data'], {}), '(input_folder_for_Delphin_data)\n', (692, 723), False, 'import os\n'), ((428, 457), 'os.path.exists', 'os.path.exists', (['output_folder'], {}), '(output_folder)\n', (442, 457), False, 'import os\n'), ((463, 489), 'os.makedirs', 'os.makedirs', (['output_folder'], {}), '(output_folder)\n', (474, 489), False, 'import os\n'), ((857, 918), 'os.path.join', 'os.path.join', (['input_folder_for_Delphin_data', 'case', '"""d.pickle"""'], {}), "(input_folder_for_Delphin_data, case, 'd.pickle')\n", (869, 918), False, 'import os\n'), ((1312, 1332), 'pickle.dump', 'pickle.dump', (['data', 'f'], {}), '(data, f)\n', (1323, 1332), False, 'import pickle\n'), ((987, 1001), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (998, 1001), False, 'import pickle\n')] |
sumesh-aot/namex | api/config.py | 53e11aed5ea550b71b7b983f1b57b65db5a06766 | """Config for initializing the namex-api."""
import os
from dotenv import find_dotenv, load_dotenv
# this will load all the envars from a .env file located in the project root (api)
load_dotenv(find_dotenv())
CONFIGURATION = {
'development': 'config.DevConfig',
'testing': 'config.TestConfig',
'production': 'config.Config',
'default': 'config.Config'
}
class Config(object):
"""Base config (also production config)."""
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
SECRET_KEY = 'a secret'
SQLALCHEMY_TRACK_MODIFICATIONS = False
NRO_SERVICE_ACCOUNT = os.getenv('NRO_SERVICE_ACCOUNT', 'nro_service_account')
SOLR_BASE_URL = os.getenv('SOLR_BASE_URL', None)
SOLR_SYNONYMS_API_URL = os.getenv('SOLR_SYNONYMS_API_URL', None)
NRO_EXTRACTOR_URI = os.getenv('NRO_EXTRACTOR_URI', None)
AUTO_ANALYZE_URL = os.getenv('AUTO_ANALYZE_URL', None)
AUTO_ANALYZE_CONFIG = os.getenv('AUTO_ANALYZE_CONFIG', None)
REPORT_SVC_URL = os.getenv('REPORT_SVC_URL', None)
REPORT_TEMPLATE_PATH = os.getenv('REPORT_PATH', 'report-templates')
ALEMBIC_INI = 'migrations/alembic.ini'
# POSTGRESQL
DB_USER = os.getenv('DATABASE_USERNAME', '')
DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '')
DB_NAME = os.getenv('DATABASE_NAME', '')
DB_HOST = os.getenv('DATABASE_HOST', '')
DB_PORT = os.getenv('DATABASE_PORT', '5432')
SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format(
user=DB_USER,
password=DB_PASSWORD,
host=DB_HOST,
port=int(DB_PORT),
name=DB_NAME
)
# ORACLE - LEGACY NRO NAMESDB
NRO_USER = os.getenv('NRO_USER', '')
NRO_SCHEMA = os.getenv('NRO_SCHEMA', None)
NRO_PASSWORD = os.getenv('NRO_PASSWORD', '')
NRO_DB_NAME = os.getenv('NRO_DB_NAME', '')
NRO_HOST = os.getenv('NRO_HOST', '')
NRO_PORT = int(os.getenv('NRO_PORT', '1521'))
# JWT_OIDC Settings
JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv('JWT_OIDC_WELL_KNOWN_CONFIG')
JWT_OIDC_ALGORITHMS = os.getenv('JWT_OIDC_ALGORITHMS')
JWT_OIDC_JWKS_URI = os.getenv('JWT_OIDC_JWKS_URI')
JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_ISSUER')
JWT_OIDC_AUDIENCE = os.getenv('JWT_OIDC_AUDIENCE')
JWT_OIDC_CLIENT_SECRET = os.getenv('JWT_OIDC_CLIENT_SECRET')
JWT_OIDC_CACHING_ENABLED = os.getenv('JWT_OIDC_CACHING_ENABLED')
JWT_OIDC_JWKS_CACHE_TIMEOUT = int(os.getenv('JWT_OIDC_JWKS_CACHE_TIMEOUT', '300'))
TESTING = False,
DEBUG = False
# You can disable NRO updates for Name Requests by setting the variable in your .env / OpenShift configuration
DISABLE_NAMEREQUEST_NRO_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_NRO_UPDATES', 0))
DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0))
class DevConfig(Config):
"""Dev config used for development."""
TESTING = False,
DEBUG = True
# We can't run NRO locally unless you're provisioned, you can disable NRO updates for Name Requests by setting the variable in your .env
DISABLE_NAMEREQUEST_NRO_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_NRO_UPDATES', 0))
DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0))
class TestConfig(Config):
"""Test config used for pytests."""
DEBUG = True
TESTING = True
# POSTGRESQL
DB_USER = os.getenv('DATABASE_TEST_USERNAME', '')
DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', '')
DB_NAME = os.getenv('DATABASE_TEST_NAME', '')
DB_HOST = os.getenv('DATABASE_TEST_HOST', '')
DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432')
# Allows for NRO add / update bypass if necessary (for local development)
LOCAL_DEV_MODE = os.getenv('LOCAL_DEV_MODE', False)
# Set this in your .env to debug SQL Alchemy queries (for local development)
SQLALCHEMY_ECHO = 'debug' if os.getenv('DEBUG_SQL_QUERIES', False) else False
SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format(
user=DB_USER,
password=DB_PASSWORD,
host=DB_HOST,
port=int(DB_PORT),
name=DB_NAME
)
# We can't run NRO locally for running our tests
DISABLE_NAMEREQUEST_NRO_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_NRO_UPDATES', 1))
DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0))
# JWT OIDC settings
# JWT_OIDC_TEST_MODE will set jwt_manager to use
JWT_OIDC_TEST_MODE = True
JWT_OIDC_TEST_AUDIENCE = 'example'
JWT_OIDC_TEST_ISSUER = 'https://example.localdomain/auth/realms/example'
JWT_OIDC_TEST_KEYS = {
'keys': [
{
'kid': 'flask-jwt-oidc-test-client',
'kty': 'RSA',
'alg': 'RS256',
'use': 'sig',
'n': 'AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR', # noqa: E501
'e': 'AQAB'
}
]
}
JWT_OIDC_TEST_PRIVATE_KEY_JWKS = {
'keys': [
{
'kid': 'flask-jwt-oidc-test-client',
'kty': 'RSA',
'alg': 'RS256',
'use': 'sig',
'n': 'AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR', # noqa: E501
'e': 'AQAB',
'd': 'C0G3QGI6OQ6tvbCNYGCqq043YI_8MiBl7C5dqbGZmx1ewdJBhMNJPStuckhskURaDwk4-8VBW9SlvcfSJJrnZhgFMjOYSSsBtPGBIMIdM5eSKbenCCjO8Tg0BUh_xa3CHST1W4RQ5rFXadZ9AeNtaGcWj2acmXNO3DVETXAX3x0', # noqa: E501
'p': 'APXcusFMQNHjh6KVD_hOUIw87lvK13WkDEeeuqAydai9Ig9JKEAAfV94W6Aftka7tGgE7ulg1vo3eJoLWJ1zvKM',
'q': 'AOjX3OnPJnk0ZFUQBwhduCweRi37I6DAdLTnhDvcPTrrNWuKPg9uGwHjzFCJgKd8KBaDQ0X1rZTZLTqi3peT43s',
'dp': 'AN9kBoA5o6_Rl9zeqdsIdWFmv4DB5lEqlEnC7HlAP-3oo3jWFO9KQqArQL1V8w2D4aCd0uJULiC9pCP7aTHvBhc',
'dq': 'ANtbSY6njfpPploQsF9sU26U0s7MsuLljM1E8uml8bVJE1mNsiu9MgpUvg39jEu9BtM2tDD7Y51AAIEmIQex1nM',
'qi': 'XLE5O360x-MhsdFXx8Vwz4304-MJg-oGSJXCK_ZWYOB_FGXFRTfebxCsSYi0YwJo-oNu96bvZCuMplzRI1liZw'
}
]
}
JWT_OIDC_TEST_PRIVATE_KEY_PEM = """
-----BEGIN RSA PRIVATE KEY-----
MIICXQIBAAKBgQDfn1nKQshOSj8xw44oC2klFWSNLmK3BnHONCJ1bZfq0EQ5gIfg
tlvB+Px8Ya+VS3OnK7Cdi4iU1fxO9ktN6c6TjmmmFevk8wIwqLthmCSF3r+3+h4e
ddj7hucMsXWv05QUrCPoL6YUUz7Cgpz7ra24rpAmK5z7lsV+f3BEvXkrUQIDAQAB
AoGAC0G3QGI6OQ6tvbCNYGCqq043YI/8MiBl7C5dqbGZmx1ewdJBhMNJPStuckhs
kURaDwk4+8VBW9SlvcfSJJrnZhgFMjOYSSsBtPGBIMIdM5eSKbenCCjO8Tg0BUh/
xa3CHST1W4RQ5rFXadZ9AeNtaGcWj2acmXNO3DVETXAX3x0CQQD13LrBTEDR44ei
lQ/4TlCMPO5bytd1pAxHnrqgMnWovSIPSShAAH1feFugH7ZGu7RoBO7pYNb6N3ia
C1idc7yjAkEA6Nfc6c8meTRkVRAHCF24LB5GLfsjoMB0tOeEO9w9Ous1a4o+D24b
AePMUImAp3woFoNDRfWtlNktOqLel5PjewJBAN9kBoA5o6/Rl9zeqdsIdWFmv4DB
5lEqlEnC7HlAP+3oo3jWFO9KQqArQL1V8w2D4aCd0uJULiC9pCP7aTHvBhcCQQDb
W0mOp436T6ZaELBfbFNulNLOzLLi5YzNRPLppfG1SRNZjbIrvTIKVL4N/YxLvQbT
NrQw+2OdQACBJiEHsdZzAkBcsTk7frTH4yGx0VfHxXDPjfTj4wmD6gZIlcIr9lZg
4H8UZcVFN95vEKxJiLRjAmj6g273pu9kK4ymXNEjWWJn
-----END RSA PRIVATE KEY-----"""
| [((196, 209), 'dotenv.find_dotenv', 'find_dotenv', ([], {}), '()\n', (207, 209), False, 'from dotenv import find_dotenv, load_dotenv\n'), ((608, 663), 'os.getenv', 'os.getenv', (['"""NRO_SERVICE_ACCOUNT"""', '"""nro_service_account"""'], {}), "('NRO_SERVICE_ACCOUNT', 'nro_service_account')\n", (617, 663), False, 'import os\n'), ((685, 717), 'os.getenv', 'os.getenv', (['"""SOLR_BASE_URL"""', 'None'], {}), "('SOLR_BASE_URL', None)\n", (694, 717), False, 'import os\n'), ((746, 786), 'os.getenv', 'os.getenv', (['"""SOLR_SYNONYMS_API_URL"""', 'None'], {}), "('SOLR_SYNONYMS_API_URL', None)\n", (755, 786), False, 'import os\n'), ((811, 847), 'os.getenv', 'os.getenv', (['"""NRO_EXTRACTOR_URI"""', 'None'], {}), "('NRO_EXTRACTOR_URI', None)\n", (820, 847), False, 'import os\n'), ((871, 906), 'os.getenv', 'os.getenv', (['"""AUTO_ANALYZE_URL"""', 'None'], {}), "('AUTO_ANALYZE_URL', None)\n", (880, 906), False, 'import os\n'), ((933, 971), 'os.getenv', 'os.getenv', (['"""AUTO_ANALYZE_CONFIG"""', 'None'], {}), "('AUTO_ANALYZE_CONFIG', None)\n", (942, 971), False, 'import os\n'), ((993, 1026), 'os.getenv', 'os.getenv', (['"""REPORT_SVC_URL"""', 'None'], {}), "('REPORT_SVC_URL', None)\n", (1002, 1026), False, 'import os\n'), ((1054, 1098), 'os.getenv', 'os.getenv', (['"""REPORT_PATH"""', '"""report-templates"""'], {}), "('REPORT_PATH', 'report-templates')\n", (1063, 1098), False, 'import os\n'), ((1175, 1209), 'os.getenv', 'os.getenv', (['"""DATABASE_USERNAME"""', '""""""'], {}), "('DATABASE_USERNAME', '')\n", (1184, 1209), False, 'import os\n'), ((1228, 1262), 'os.getenv', 'os.getenv', (['"""DATABASE_PASSWORD"""', '""""""'], {}), "('DATABASE_PASSWORD', '')\n", (1237, 1262), False, 'import os\n'), ((1277, 1307), 'os.getenv', 'os.getenv', (['"""DATABASE_NAME"""', '""""""'], {}), "('DATABASE_NAME', '')\n", (1286, 1307), False, 'import os\n'), ((1322, 1352), 'os.getenv', 'os.getenv', (['"""DATABASE_HOST"""', '""""""'], {}), "('DATABASE_HOST', '')\n", (1331, 1352), False, 'import os\n'), ((1367, 1401), 'os.getenv', 'os.getenv', (['"""DATABASE_PORT"""', '"""5432"""'], {}), "('DATABASE_PORT', '5432')\n", (1376, 1401), False, 'import os\n'), ((1671, 1696), 'os.getenv', 'os.getenv', (['"""NRO_USER"""', '""""""'], {}), "('NRO_USER', '')\n", (1680, 1696), False, 'import os\n'), ((1714, 1743), 'os.getenv', 'os.getenv', (['"""NRO_SCHEMA"""', 'None'], {}), "('NRO_SCHEMA', None)\n", (1723, 1743), False, 'import os\n'), ((1763, 1792), 'os.getenv', 'os.getenv', (['"""NRO_PASSWORD"""', '""""""'], {}), "('NRO_PASSWORD', '')\n", (1772, 1792), False, 'import os\n'), ((1811, 1839), 'os.getenv', 'os.getenv', (['"""NRO_DB_NAME"""', '""""""'], {}), "('NRO_DB_NAME', '')\n", (1820, 1839), False, 'import os\n'), ((1855, 1880), 'os.getenv', 'os.getenv', (['"""NRO_HOST"""', '""""""'], {}), "('NRO_HOST', '')\n", (1864, 1880), False, 'import os\n'), ((1989, 2028), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_WELL_KNOWN_CONFIG"""'], {}), "('JWT_OIDC_WELL_KNOWN_CONFIG')\n", (1998, 2028), False, 'import os\n'), ((2055, 2087), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_ALGORITHMS"""'], {}), "('JWT_OIDC_ALGORITHMS')\n", (2064, 2087), False, 'import os\n'), ((2112, 2142), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_JWKS_URI"""'], {}), "('JWT_OIDC_JWKS_URI')\n", (2121, 2142), False, 'import os\n'), ((2165, 2193), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_ISSUER"""'], {}), "('JWT_OIDC_ISSUER')\n", (2174, 2193), False, 'import os\n'), ((2218, 2248), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_AUDIENCE"""'], {}), "('JWT_OIDC_AUDIENCE')\n", (2227, 2248), False, 'import os\n'), ((2278, 2313), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_CLIENT_SECRET"""'], {}), "('JWT_OIDC_CLIENT_SECRET')\n", (2287, 2313), False, 'import os\n'), ((2345, 2382), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_CACHING_ENABLED"""'], {}), "('JWT_OIDC_CACHING_ENABLED')\n", (2354, 2382), False, 'import os\n'), ((3382, 3421), 'os.getenv', 'os.getenv', (['"""DATABASE_TEST_USERNAME"""', '""""""'], {}), "('DATABASE_TEST_USERNAME', '')\n", (3391, 3421), False, 'import os\n'), ((3440, 3479), 'os.getenv', 'os.getenv', (['"""DATABASE_TEST_PASSWORD"""', '""""""'], {}), "('DATABASE_TEST_PASSWORD', '')\n", (3449, 3479), False, 'import os\n'), ((3494, 3529), 'os.getenv', 'os.getenv', (['"""DATABASE_TEST_NAME"""', '""""""'], {}), "('DATABASE_TEST_NAME', '')\n", (3503, 3529), False, 'import os\n'), ((3544, 3579), 'os.getenv', 'os.getenv', (['"""DATABASE_TEST_HOST"""', '""""""'], {}), "('DATABASE_TEST_HOST', '')\n", (3553, 3579), False, 'import os\n'), ((3594, 3633), 'os.getenv', 'os.getenv', (['"""DATABASE_TEST_PORT"""', '"""5432"""'], {}), "('DATABASE_TEST_PORT', '5432')\n", (3603, 3633), False, 'import os\n'), ((3733, 3767), 'os.getenv', 'os.getenv', (['"""LOCAL_DEV_MODE"""', '(False)'], {}), "('LOCAL_DEV_MODE', False)\n", (3742, 3767), False, 'import os\n'), ((481, 506), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (496, 506), False, 'import os\n'), ((1900, 1929), 'os.getenv', 'os.getenv', (['"""NRO_PORT"""', '"""1521"""'], {}), "('NRO_PORT', '1521')\n", (1909, 1929), False, 'import os\n'), ((2422, 2469), 'os.getenv', 'os.getenv', (['"""JWT_OIDC_JWKS_CACHE_TIMEOUT"""', '"""300"""'], {}), "('JWT_OIDC_JWKS_CACHE_TIMEOUT', '300')\n", (2431, 2469), False, 'import os\n'), ((2669, 2716), 'os.getenv', 'os.getenv', (['"""DISABLE_NAMEREQUEST_NRO_UPDATES"""', '(0)'], {}), "('DISABLE_NAMEREQUEST_NRO_UPDATES', 0)\n", (2678, 2716), False, 'import os\n'), ((2761, 2809), 'os.getenv', 'os.getenv', (['"""DISABLE_NAMEREQUEST_SOLR_UPDATES"""', '(0)'], {}), "('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)\n", (2770, 2809), False, 'import os\n'), ((3104, 3151), 'os.getenv', 'os.getenv', (['"""DISABLE_NAMEREQUEST_NRO_UPDATES"""', '(0)'], {}), "('DISABLE_NAMEREQUEST_NRO_UPDATES', 0)\n", (3113, 3151), False, 'import os\n'), ((3196, 3244), 'os.getenv', 'os.getenv', (['"""DISABLE_NAMEREQUEST_SOLR_UPDATES"""', '(0)'], {}), "('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)\n", (3205, 3244), False, 'import os\n'), ((3882, 3919), 'os.getenv', 'os.getenv', (['"""DEBUG_SQL_QUERIES"""', '(False)'], {}), "('DEBUG_SQL_QUERIES', False)\n", (3891, 3919), False, 'import os\n'), ((4247, 4294), 'os.getenv', 'os.getenv', (['"""DISABLE_NAMEREQUEST_NRO_UPDATES"""', '(1)'], {}), "('DISABLE_NAMEREQUEST_NRO_UPDATES', 1)\n", (4256, 4294), False, 'import os\n'), ((4339, 4387), 'os.getenv', 'os.getenv', (['"""DISABLE_NAMEREQUEST_SOLR_UPDATES"""', '(0)'], {}), "('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)\n", (4348, 4387), False, 'import os\n')] |
pierre-haessig/matplotlib | examples/pylab_examples/fancybox_demo2.py | 0d945044ca3fbf98cad55912584ef80911f330c6 | import matplotlib.patches as mpatch
import matplotlib.pyplot as plt
styles = mpatch.BoxStyle.get_styles()
figheight = (len(styles)+.5)
fig1 = plt.figure(1, (4/1.5, figheight/1.5))
fontsize = 0.3 * 72
for i, (stylename, styleclass) in enumerate(styles.items()):
fig1.text(0.5, (float(len(styles)) - 0.5 - i)/figheight, stylename,
ha="center",
size=fontsize,
transform=fig1.transFigure,
bbox=dict(boxstyle=stylename, fc="w", ec="k"))
plt.draw()
plt.show()
| [((78, 106), 'matplotlib.patches.BoxStyle.get_styles', 'mpatch.BoxStyle.get_styles', ([], {}), '()\n', (104, 106), True, 'import matplotlib.patches as mpatch\n'), ((144, 185), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)', '(4 / 1.5, figheight / 1.5)'], {}), '(1, (4 / 1.5, figheight / 1.5))\n', (154, 185), True, 'import matplotlib.pyplot as plt\n'), ((495, 505), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (503, 505), True, 'import matplotlib.pyplot as plt\n'), ((506, 516), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (514, 516), True, 'import matplotlib.pyplot as plt\n')] |
sdu-cfei/modest-py | setup.py | dc14091fb8c20a8b3fa5ab33bbf597c0b566ba0a | from setuptools import setup
setup(
name='modestpy',
version='0.1',
description='FMI-compliant model identification package',
url='https://github.com/sdu-cfei/modest-py',
keywords='fmi fmu optimization model identification estimation',
author='Krzysztof Arendt, Center for Energy Informatics SDU',
author_email='[email protected], [email protected]',
license='BSD',
platforms=['Windows', 'Linux'],
packages=[
'modestpy',
'modestpy.estim',
'modestpy.estim.ga_parallel',
'modestpy.estim.ga',
'modestpy.estim.ps',
'modestpy.estim.scipy',
'modestpy.fmi',
'modestpy.utilities',
'modestpy.test'],
include_package_data=True,
install_requires=[
'fmpy[complete]',
'scipy',
'pandas',
'matplotlib',
'numpy',
'pyDOE',
'modestga'
],
classifiers=[
'Programming Language :: Python :: 3'
]
)
| [((30, 827), 'setuptools.setup', 'setup', ([], {'name': '"""modestpy"""', 'version': '"""0.1"""', 'description': '"""FMI-compliant model identification package"""', 'url': '"""https://github.com/sdu-cfei/modest-py"""', 'keywords': '"""fmi fmu optimization model identification estimation"""', 'author': '"""Krzysztof Arendt, Center for Energy Informatics SDU"""', 'author_email': '"""[email protected], [email protected]"""', 'license': '"""BSD"""', 'platforms': "['Windows', 'Linux']", 'packages': "['modestpy', 'modestpy.estim', 'modestpy.estim.ga_parallel',\n 'modestpy.estim.ga', 'modestpy.estim.ps', 'modestpy.estim.scipy',\n 'modestpy.fmi', 'modestpy.utilities', 'modestpy.test']", 'include_package_data': '(True)', 'install_requires': "['fmpy[complete]', 'scipy', 'pandas', 'matplotlib', 'numpy', 'pyDOE',\n 'modestga']", 'classifiers': "['Programming Language :: Python :: 3']"}), "(name='modestpy', version='0.1', description=\n 'FMI-compliant model identification package', url=\n 'https://github.com/sdu-cfei/modest-py', keywords=\n 'fmi fmu optimization model identification estimation', author=\n 'Krzysztof Arendt, Center for Energy Informatics SDU', author_email=\n '[email protected], [email protected]', license='BSD',\n platforms=['Windows', 'Linux'], packages=['modestpy', 'modestpy.estim',\n 'modestpy.estim.ga_parallel', 'modestpy.estim.ga', 'modestpy.estim.ps',\n 'modestpy.estim.scipy', 'modestpy.fmi', 'modestpy.utilities',\n 'modestpy.test'], include_package_data=True, install_requires=[\n 'fmpy[complete]', 'scipy', 'pandas', 'matplotlib', 'numpy', 'pyDOE',\n 'modestga'], classifiers=['Programming Language :: Python :: 3'])\n", (35, 827), False, 'from setuptools import setup\n')] |
andersonbrands/gfworkflow | gfworkflow/core.py | 81c646fd53b8227691bcd3e236f538fee0d9d93c | import re
import subprocess as sp
from typing import Union, List
from gfworkflow.exceptions import RunCommandException
def run(command: Union[str, List[str]]):
completed_process = sp.run(command, stdout=sp.PIPE, stderr=sp.PIPE, universal_newlines=True)
if completed_process.returncode:
raise RunCommandException(completed_process)
return completed_process
def init():
run('git flow init -d -f')
run('git config gitflow.prefix.versiontag v')
def bump_version(part: str):
run(f'bumpversion {part}')
def start_release(new_version: str):
run(f'git flow release start {new_version}')
def get_new_version(part: str):
output = run(f'bumpversion {part} --list -n --allow-dirty --no-configured-files').stdout
return re.compile(r'new_version=(\S+)').search(output).group(1)
def get_current_branch_name():
return run('git rev-parse --abbrev-ref HEAD').stdout.strip()
def finish_release(release_name):
run(f'git flow release finish -m " - " {release_name}')
| [((187, 259), 'subprocess.run', 'sp.run', (['command'], {'stdout': 'sp.PIPE', 'stderr': 'sp.PIPE', 'universal_newlines': '(True)'}), '(command, stdout=sp.PIPE, stderr=sp.PIPE, universal_newlines=True)\n', (193, 259), True, 'import subprocess as sp\n'), ((311, 349), 'gfworkflow.exceptions.RunCommandException', 'RunCommandException', (['completed_process'], {}), '(completed_process)\n', (330, 349), False, 'from gfworkflow.exceptions import RunCommandException\n'), ((762, 794), 're.compile', 're.compile', (['"""new_version=(\\\\S+)"""'], {}), "('new_version=(\\\\S+)')\n", (772, 794), False, 'import re\n')] |
jorges119/localstack | tests/integration/lambdas/lambda_python3.py | a8a78cda6c13b2e42bc46301b23c7143580132fb | # simple test function that uses python 3 features (e.g., f-strings)
# see https://github.com/localstack/localstack/issues/264
def handler(event, context):
# the following line is Python 3.6+ specific
msg = f"Successfully processed {event}" # noqa This code is Python 3.6+ only
return event
| [] |
etiennody/purchoice | import_off.py | 43a2dc81ca953ac6168f8112e97a4bae91ace690 | #! usr/bin/python3
# code: utf-8
"""Download data from Open Food Facts API."""
import json
import requests
from src.purchoice.constants import CATEGORY_SELECTED
from src.purchoice.purchoice_database import PurchoiceDatabase
class ImportOff:
"""ImportOff class downloads data from Open Food Facts API."""
def __init__(self, db):
self.url = "https://fr.openfoodfacts.org//cgi/search.pl?"
self.db = db
def get_url_params(self, category):
"""get_urls_params helps to define more precisely
the request to Open Food Facts API.
Arguments:
category {string} -- a name of category.
Returns:
dictionnary -- contains parameters to complete
the request to Open Food Facts API.
"""
return {
"action": "process",
"tagtype_0": "categories",
"tag_contains_0": "contains",
"tag_0": category,
"sort_by": "unique_scans_n",
"page_size": 500,
"json": 1,
}
def get_off(self, category):
"""get_off method makes a request to the web page of Open Food Facts,
and load data in json if the return status code is successful.
Arguments:
category {string} -- a category name.
Returns:
dictionnary -- Deserialize an bytearray instance containing
a JSON document to a Python object as early as products.
"""
response = requests.get(self.url, params=self.get_url_params(category))
if response.status_code == 200:
return json.loads(response.content)["products"]
def import_by_category(self, category):
"""import_by_category method try to insert
products, categories, brands and stores data
for each product by category in the database.
Arguments:
category {string} -- a category name.
"""
products = self.get_off(category)
products = products if isinstance(products, list) else products.items()
print("Importation des données en cours. Patientez...")
for product in products:
try:
p = self.db.add_product(product)
for category in product.get("categories").split(","):
c = self.db.add_category(category)
p.categories.append(c)
for brand in product.get("brands").split(","):
b = self.db.add_brand(brand)
p.brands.append(b)
for store in product.get("stores").split(","):
s = self.db.add_store(store)
p.stores.append(s)
except Exception:
pass
if __name__ == "__main__":
db = PurchoiceDatabase()
db.truncate_tables()
import_off = ImportOff(db)
for category in CATEGORY_SELECTED:
import_off.import_by_category(category)
print("Merci d'avoir patienté. Vous pouvez lancer l'application !")
| [((2783, 2802), 'src.purchoice.purchoice_database.PurchoiceDatabase', 'PurchoiceDatabase', ([], {}), '()\n', (2800, 2802), False, 'from src.purchoice.purchoice_database import PurchoiceDatabase\n'), ((1611, 1639), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (1621, 1639), False, 'import json\n')] |
zhjp0/Orio | orio/module/loop/cfg.py | 7dfb80527053c5697d1bce1bd8ed996b1ea192c8 | '''
Created on April 26, 2015
@author: norris
'''
import ast, sys, os, traceback
from orio.main.util.globals import *
from orio.tool.graphlib import graph
from orio.module.loop import astvisitors
class CFGVertex(graph.Vertex):
'''A CFG vertex is a basic block.'''
def __init__(self, name, node=None):
try: graph.Vertex.__init__(self, name)
except Exception,e: err("CFGVertex.__init__:" + str(e))
self.stmts = [node] # basic block, starting with leader node
pass
def append(self, node):
self.stmts.append(node)
def copy(self):
v = CFGVertex(self.name)
v.e = self.e
v.data = self.data
return v
def succ(self):
return self.out_v()
def pred(self):
return self.in_v()
def __str__(self):
return "<%s> " % self.name + str(self.stmts)
pass # End of CFG vertex class
class CFGEdge(graph.DirEdge):
def __init__(self, v1, v2, name=''):
if not name: name = Globals().incrementCounter()
graph.DirEdge.__init__(self, name, v1, v2)
pass
pass # End of CFGEdge class
class CFGGraph(graph.Graph):
def __init__(self, nodes, name='CFG'):
graph.Graph.__init__(self, name)
self.cfgVisitor = CFGVisitor(self)
self.cfgVisitor.visit(nodes)
if True:
self.display()
pass
def nodes(self):
return self.v
def pred(self, bb):
return self.v[bb.name].in_v()
def succ(self, bb):
return self.v[bb.name].out_v()
def display(self):
#sys.stdout.write(str(self))
self.genDOT()
def genDOT(self, fname=''):
buf = 'digraph CFG {\n'
for n,vertex in self.v.items():
label = '[label="%s%s...",shape=box]' % (n,str(vertex.stmts[0]).split('\n')[0])
buf += '\t%s %s;\n' % (n, label)
for edge in vertex.out_e:
for dv in edge.dest_v:
buf += '\t%s -> %s;\n' % (n, dv.name)
buf += '\n}\n'
if fname == '': fname = Globals().tempfilename + '.dot'
f=open(fname,'w')
f.write(buf)
f.close()
# print buf
return buf
pass # End of CFG Graph class
class CFGVisitor(astvisitors.ASTVisitor):
def __init__(self, graph):
astvisitors.ASTVisitor.__init__(self)
self.cfg = graph
v = CFGVertex('_TOP_')
self.cfg.add_v(v)
self.stack = [v]
self.lead = True
self.verbose = False
self.last = None
def display(self, node, msg=''):
if self.verbose:
sys.stdout.write("[%s] " % self.__class__.__name__ + node.__class__.__name__ + ': ' + msg+'\n')
def visit(self, nodes, params={}):
'''Invoke accept method for specified AST node'''
if not isinstance(nodes, (list, tuple)):
nodes = [nodes]
try:
for node in nodes:
if not node: continue
v = CFGVertex(node.id, node)
if isinstance(node, ast.ForStmt):
self.display(node)
# Children: header: node.init, node.test, node.iter; body: node.stmt
v = CFGVertex('ForLoop' + str(node.id), node)
self.cfg.add_v(v)
self.cfg.add_e(CFGEdge(self.stack.pop(),v))
self.stack.append(v)
self.lead = True
self.stack.append(v)
self.visit(node.stmt)
vbottom = CFGVertex('_JOIN_' + str(node.id))
self.cfg.add_v(vbottom)
self.cfg.add_e(CFGEdge(v,vbottom))
self.cfg.add_e(CFGEdge(self.stack.pop(),vbottom))
self.stack.append(vbottom)
self.lead = True
elif isinstance(node, ast.IfStmt):
self.display(node)
v = CFGVertex('IfStmt' + str(node.id) , node)
self.cfg.add_v(v)
self.cfg.add_e(CFGEdge(self.stack.pop(),v))
self.stack.append(v)
self.lead = True
self.visit(node.true_stmt)
truelast = self.stack.pop()
self.stack.append(v)
self.lead = True
self.visit(node.false_stmt)
falselast = self.stack.pop()
self.lead = True
vbottom = CFGVertex('_JOIN_' + str(node.id))
self.cfg.add_v(vbottom)
self.cfg.add_e(CFGEdge(truelast,vbottom))
self.cfg.add_e(CFGEdge(falselast,vbottom))
self.stack.append(vbottom)
elif isinstance(node, ast.CompStmt):
self.display(node)
self.visit(node.stmts)
# TODO: handle gotos
else:
# Add to previous basic block
if self.lead:
v = CFGVertex(node.id, node)
self.cfg.add_v(v)
self.cfg.add_e(CFGEdge(self.stack.pop(),v))
self.stack.append(v)
self.lead = False
else:
self.stack.pop()
self.stack.append(v)
self.stack[-1].append(node)
except Exception as ex:
err("[orio.module.loop.cfg.CFGVisitor.visit()] %s" % str(ex))
return
def getCFG(self):
return self.cfg
pass # end of class CFGVisitor
| [] |
lubnc4261/House-Keeper | cogs rework/server specified/on_message_delete.py | 6de20014afaf00cf9050e54c91cd8b3a02702a27 | import discord
from discord import Embed
@commands.Cog.listener()
async def on_message_delete(self, message):
channel = "xxxxxxxxxxxxxxxxxxxxx"
deleted = Embed(
description=f"Message deleted in {message.channel.mention}", color=0x4040EC
).set_author(name=message.author, url=Embed.Empty, icon_url=message.author.avatar_url)
deleted.add_field(name="Message", value=message.content)
deleted.timestamp = message.created_at
await channel.send(embed=deleted) | [((173, 259), 'discord.Embed', 'Embed', ([], {'description': 'f"""Message deleted in {message.channel.mention}"""', 'color': '(4210924)'}), "(description=f'Message deleted in {message.channel.mention}', color=\n 4210924)\n", (178, 259), False, 'from discord import Embed\n')] |
icing/mod_md | test/modules/md/md_env.py | 4522ed547f0426f27aae86f00fbc9b5b17de545f | import copy
import inspect
import json
import logging
import pytest
import re
import os
import shutil
import subprocess
import time
from datetime import datetime, timedelta
from configparser import ConfigParser, ExtendedInterpolation
from typing import Dict, List, Optional
from pyhttpd.certs import CertificateSpec
from .md_cert_util import MDCertUtil
from pyhttpd.env import HttpdTestSetup, HttpdTestEnv
from pyhttpd.result import ExecResult
log = logging.getLogger(__name__)
class MDTestSetup(HttpdTestSetup):
def __init__(self, env: 'HttpdTestEnv'):
super().__init__(env=env)
def make(self):
super().make(add_modules=["proxy_connect", "md"])
if "pebble" == self.env.acme_server:
self._make_pebble_conf()
def _make_pebble_conf(self):
our_dir = os.path.dirname(inspect.getfile(MDTestSetup))
conf_src_dir = os.path.join(our_dir, 'pebble')
conf_dest_dir = os.path.join(self.env.gen_dir, 'pebble')
if not os.path.exists(conf_dest_dir):
os.makedirs(conf_dest_dir)
for name in os.listdir(conf_src_dir):
src_path = os.path.join(conf_src_dir, name)
m = re.match(r'(.+).template', name)
if m:
self._make_template(src_path, os.path.join(conf_dest_dir, m.group(1)))
elif os.path.isfile(src_path):
shutil.copy(src_path, os.path.join(conf_dest_dir, name))
class MDTestEnv(HttpdTestEnv):
MD_S_UNKNOWN = 0
MD_S_INCOMPLETE = 1
MD_S_COMPLETE = 2
MD_S_EXPIRED = 3
MD_S_ERROR = 4
EMPTY_JOUT = {'status': 0, 'output': []}
DOMAIN_SUFFIX = "%d.org" % time.time()
LOG_FMT_TIGHT = '%(levelname)s: %(message)s'
@classmethod
def get_acme_server(cls):
return os.environ['ACME'] if 'ACME' in os.environ else "pebble"
@classmethod
def has_acme_server(cls):
return cls.get_acme_server() != 'none'
@classmethod
def has_acme_eab(cls):
return cls.get_acme_server() == 'pebble'
@classmethod
def is_pebble(cls) -> bool:
return cls.get_acme_server() == 'pebble'
@classmethod
def lacks_ocsp(cls):
return cls.is_pebble()
def __init__(self, pytestconfig=None, setup_dirs=True):
super().__init__(pytestconfig=pytestconfig,
local_dir=os.path.dirname(inspect.getfile(MDTestEnv)),
interesting_modules=["md"])
self._acme_server = self.get_acme_server()
self._acme_tos = "accepted"
self._acme_ca_pemfile = os.path.join(self.gen_dir, "apache/acme-ca.pem")
if "pebble" == self._acme_server:
self._acme_url = "https://localhost:14000/dir"
self._acme_eab_url = "https://localhost:14001/dir"
elif "boulder" == self._acme_server:
self._acme_url = "http://localhost:4001/directory"
self._acme_eab_url = None
else:
raise Exception(f"unknown ACME server type: {self._acme_server}")
self._acme_server_down = False
self._acme_server_ok = False
self._a2md_bin = os.path.join(self.bin_dir, 'a2md')
self._default_domain = f"test1.{self.http_tld}"
self._store_dir = "./md"
self.set_store_dir_default()
self.add_cert_specs([
CertificateSpec(domains=[f"expired.{self._http_tld}"],
valid_from=timedelta(days=-100),
valid_to=timedelta(days=-10)),
CertificateSpec(domains=["localhost"], key_type='rsa2048'),
])
self.httpd_error_log.set_ignored_lognos([
#"AH10045", # mod_md complains that there is no vhost for an MDomain
"AH10105", # mod_md does not find a vhost with SSL enabled for an MDomain
"AH10085" # mod_ssl complains about fallback certificates
])
if self.lacks_ocsp():
self.httpd_error_log.set_ignored_patterns([
re.compile(r'.*certificate with serial \S+ has no OCSP responder URL.*'),
])
if setup_dirs:
self._setup = MDTestSetup(env=self)
self._setup.make()
self.issue_certs()
self.clear_store()
def set_store_dir_default(self):
dirpath = "md"
if self.httpd_is_at_least("2.5.0"):
dirpath = os.path.join("state", dirpath)
self.set_store_dir(dirpath)
def set_store_dir(self, dirpath):
self._store_dir = os.path.join(self.server_dir, dirpath)
if self.acme_url:
self.a2md_stdargs([self.a2md_bin, "-a", self.acme_url, "-d", self._store_dir, "-C", self.acme_ca_pemfile, "-j"])
self.a2md_rawargs([self.a2md_bin, "-a", self.acme_url, "-d", self._store_dir, "-C", self.acme_ca_pemfile])
def get_apxs_var(self, name: str) -> str:
p = subprocess.run([self._apxs, "-q", name], capture_output=True, text=True)
if p.returncode != 0:
return ""
return p.stdout.strip()
@property
def acme_server(self):
return self._acme_server
@property
def acme_url(self):
return self._acme_url
@property
def acme_tos(self):
return self._acme_tos
@property
def a2md_bin(self):
return self._a2md_bin
@property
def acme_ca_pemfile(self):
return self._acme_ca_pemfile
@property
def store_dir(self):
return self._store_dir
def get_request_domain(self, request):
return "%s-%s" % (re.sub(r'[_]', '-', request.node.originalname), MDTestEnv.DOMAIN_SUFFIX)
def get_method_domain(self, method):
return "%s-%s" % (re.sub(r'[_]', '-', method.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
def get_module_domain(self, module):
return "%s-%s" % (re.sub(r'[_]', '-', module.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
def get_class_domain(self, c):
return "%s-%s" % (re.sub(r'[_]', '-', c.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
# --------- cmd execution ---------
_a2md_args = []
_a2md_args_raw = []
def a2md_stdargs(self, args):
self._a2md_args = [] + args
def a2md_rawargs(self, args):
self._a2md_args_raw = [] + args
def a2md(self, args, raw=False) -> ExecResult:
preargs = self._a2md_args
if raw:
preargs = self._a2md_args_raw
log.debug("running: {0} {1}".format(preargs, args))
return self.run(preargs + args)
def check_acme(self):
if self._acme_server_ok:
return True
if self._acme_server_down:
pytest.skip(msg="ACME server not running")
return False
if self.is_live(self.acme_url, timeout=timedelta(seconds=0.5)):
self._acme_server_ok = True
return True
else:
self._acme_server_down = True
pytest.fail(msg="ACME server not running", pytrace=False)
return False
def get_ca_pem_file(self, hostname: str) -> Optional[str]:
pem_file = super().get_ca_pem_file(hostname)
if pem_file is None:
pem_file = self.acme_ca_pemfile
return pem_file
# --------- access local store ---------
def purge_store(self):
log.debug("purge store dir: %s" % self._store_dir)
assert len(self._store_dir) > 1
if os.path.exists(self._store_dir):
shutil.rmtree(self._store_dir, ignore_errors=False)
os.makedirs(self._store_dir)
def clear_store(self):
log.debug("clear store dir: %s" % self._store_dir)
assert len(self._store_dir) > 1
if not os.path.exists(self._store_dir):
os.makedirs(self._store_dir)
for dirpath in ["challenges", "tmp", "archive", "domains", "accounts", "staging", "ocsp"]:
shutil.rmtree(os.path.join(self._store_dir, dirpath), ignore_errors=True)
def clear_ocsp_store(self):
assert len(self._store_dir) > 1
dirpath = os.path.join(self._store_dir, "ocsp")
log.debug("clear ocsp store dir: %s" % dir)
if os.path.exists(dirpath):
shutil.rmtree(dirpath, ignore_errors=True)
def authz_save(self, name, content):
dirpath = os.path.join(self._store_dir, 'staging', name)
os.makedirs(dirpath)
open(os.path.join(dirpath, 'authz.json'), "w").write(content)
def path_store_json(self):
return os.path.join(self._store_dir, 'md_store.json')
def path_account(self, acct):
return os.path.join(self._store_dir, 'accounts', acct, 'account.json')
def path_account_key(self, acct):
return os.path.join(self._store_dir, 'accounts', acct, 'account.pem')
def store_domains(self):
return os.path.join(self._store_dir, 'domains')
def store_archives(self):
return os.path.join(self._store_dir, 'archive')
def store_stagings(self):
return os.path.join(self._store_dir, 'staging')
def store_challenges(self):
return os.path.join(self._store_dir, 'challenges')
def store_domain_file(self, domain, filename):
return os.path.join(self.store_domains(), domain, filename)
def store_archived_file(self, domain, version, filename):
return os.path.join(self.store_archives(), "%s.%d" % (domain, version), filename)
def store_staged_file(self, domain, filename):
return os.path.join(self.store_stagings(), domain, filename)
def path_fallback_cert(self, domain):
return os.path.join(self._store_dir, 'domains', domain, 'fallback-pubcert.pem')
def path_job(self, domain):
return os.path.join(self._store_dir, 'staging', domain, 'job.json')
def replace_store(self, src):
shutil.rmtree(self._store_dir, ignore_errors=False)
shutil.copytree(src, self._store_dir)
def list_accounts(self):
return os.listdir(os.path.join(self._store_dir, 'accounts'))
def check_md(self, domain, md=None, state=-1, ca=None, protocol=None, agreement=None, contacts=None):
domains = None
if isinstance(domain, list):
domains = domain
domain = domains[0]
if md:
domain = md
path = self.store_domain_file(domain, 'md.json')
with open(path) as f:
md = json.load(f)
assert md
if domains:
assert md['domains'] == domains
if state >= 0:
assert md['state'] == state
if ca:
assert md['ca']['url'] == ca
if protocol:
assert md['ca']['proto'] == protocol
if agreement:
assert md['ca']['agreement'] == agreement
if contacts:
assert md['contacts'] == contacts
def pkey_fname(self, pkeyspec=None):
if pkeyspec and not re.match(r'^rsa( ?\d+)?$', pkeyspec.lower()):
return "privkey.{0}.pem".format(pkeyspec)
return 'privkey.pem'
def cert_fname(self, pkeyspec=None):
if pkeyspec and not re.match(r'^rsa( ?\d+)?$', pkeyspec.lower()):
return "pubcert.{0}.pem".format(pkeyspec)
return 'pubcert.pem'
def check_md_complete(self, domain, pkey=None):
md = self.get_md_status(domain)
assert md
assert 'state' in md, "md is unexpected: {0}".format(md)
assert md['state'] is MDTestEnv.MD_S_COMPLETE, "unexpected state: {0}".format(md['state'])
assert os.path.isfile(self.store_domain_file(domain, self.pkey_fname(pkey)))
assert os.path.isfile(self.store_domain_file(domain, self.cert_fname(pkey)))
def check_md_credentials(self, domain):
if isinstance(domain, list):
domains = domain
domain = domains[0]
else:
domains = [domain]
# check private key, validate certificate, etc
MDCertUtil.validate_privkey(self.store_domain_file(domain, 'privkey.pem'))
cert = MDCertUtil(self.store_domain_file(domain, 'pubcert.pem'))
cert.validate_cert_matches_priv_key(self.store_domain_file(domain, 'privkey.pem'))
# check SANs and CN
assert cert.get_cn() == domain
# compare lists twice in opposite directions: SAN may not respect ordering
san_list = list(cert.get_san_list())
assert len(san_list) == len(domains)
assert set(san_list).issubset(domains)
assert set(domains).issubset(san_list)
# check valid dates interval
not_before = cert.get_not_before()
not_after = cert.get_not_after()
assert not_before < datetime.now(not_before.tzinfo)
assert not_after > datetime.now(not_after.tzinfo)
# --------- check utilities ---------
def check_json_contains(self, actual, expected):
# write all expected key:value bindings to a copy of the actual data ...
# ... assert it stays unchanged
test_json = copy.deepcopy(actual)
test_json.update(expected)
assert actual == test_json
def check_file_access(self, path, exp_mask):
actual_mask = os.lstat(path).st_mode & 0o777
assert oct(actual_mask) == oct(exp_mask)
def check_dir_empty(self, path):
assert os.listdir(path) == []
def get_http_status(self, domain, path, use_https=True):
r = self.get_meta(domain, path, use_https, insecure=True)
return r.response['status']
def get_cert(self, domain, tls=None, ciphers=None):
return MDCertUtil.load_server_cert(self._httpd_addr, self.https_port,
domain, tls=tls, ciphers=ciphers)
def get_server_cert(self, domain, proto=None, ciphers=None):
args = [
"openssl", "s_client", "-status",
"-connect", "%s:%s" % (self._httpd_addr, self.https_port),
"-CAfile", self.acme_ca_pemfile,
"-servername", domain,
"-showcerts"
]
if proto is not None:
args.extend(["-{0}".format(proto)])
if ciphers is not None:
args.extend(["-cipher", ciphers])
r = self.run(args)
# noinspection PyBroadException
try:
return MDCertUtil.parse_pem_cert(r.stdout)
except:
return None
def verify_cert_key_lenghts(self, domain, pkeys):
for p in pkeys:
cert = self.get_server_cert(domain, proto="tls1_2", ciphers=p['ciphers'])
if 0 == p['keylen']:
assert cert is None
else:
assert cert, "no cert returned for cipher: {0}".format(p['ciphers'])
assert cert.get_key_length() == p['keylen'], "key length, expected {0}, got {1}".format(
p['keylen'], cert.get_key_length()
)
def get_meta(self, domain, path, use_https=True, insecure=False):
schema = "https" if use_https else "http"
port = self.https_port if use_https else self.http_port
r = self.curl_get(f"{schema}://{domain}:{port}{path}", insecure=insecure)
assert r.exit_code == 0
assert r.response
assert r.response['header']
return r
def get_content(self, domain, path, use_https=True):
schema = "https" if use_https else "http"
port = self.https_port if use_https else self.http_port
r = self.curl_get(f"{schema}://{domain}:{port}{path}")
assert r.exit_code == 0
return r.stdout
def get_json_content(self, domain, path, use_https=True, insecure=False,
debug_log=True):
schema = "https" if use_https else "http"
port = self.https_port if use_https else self.http_port
url = f"{schema}://{domain}:{port}{path}"
r = self.curl_get(url, insecure=insecure, debug_log=debug_log)
if r.exit_code != 0:
log.error(f"curl get on {url} returned {r.exit_code}"
f"\nstdout: {r.stdout}"
f"\nstderr: {r.stderr}")
assert r.exit_code == 0, r.stderr
return r.json
def get_certificate_status(self, domain) -> Dict:
return self.get_json_content(domain, "/.httpd/certificate-status", insecure=True)
def get_md_status(self, domain, via_domain=None, use_https=True, debug_log=False) -> Dict:
if via_domain is None:
via_domain = self._default_domain
return self.get_json_content(via_domain, f"/md-status/{domain}",
use_https=use_https, debug_log=debug_log)
def get_server_status(self, query="/", via_domain=None, use_https=True):
if via_domain is None:
via_domain = self._default_domain
return self.get_content(via_domain, "/server-status%s" % query, use_https=use_https)
def await_completion(self, names, must_renew=False, restart=True, timeout=60,
via_domain=None, use_https=True):
try_until = time.time() + timeout
renewals = {}
names = names.copy()
while len(names) > 0:
if time.time() >= try_until:
return False
for name in names:
mds = self.get_md_status(name, via_domain=via_domain, use_https=use_https)
if mds is None:
log.debug("not managed by md: %s" % name)
return False
if 'renewal' in mds:
renewal = mds['renewal']
renewals[name] = True
if 'finished' in renewal and renewal['finished'] is True:
if (not must_renew) or (name in renewals):
log.debug(f"domain cert was renewed: {name}")
names.remove(name)
if len(names) != 0:
time.sleep(0.1)
if restart:
time.sleep(0.1)
return self.apache_restart() == 0
return True
def is_renewing(self, name):
stat = self.get_certificate_status(name)
return 'renewal' in stat
def await_renewal(self, names, timeout=60):
try_until = time.time() + timeout
while len(names) > 0:
if time.time() >= try_until:
return False
for name in names:
md = self.get_md_status(name)
if md is None:
log.debug("not managed by md: %s" % name)
return False
if 'renewal' in md:
names.remove(name)
if len(names) != 0:
time.sleep(0.1)
return True
def await_error(self, domain, timeout=60, via_domain=None, use_https=True, errors=1):
try_until = time.time() + timeout
while True:
if time.time() >= try_until:
return False
md = self.get_md_status(domain, via_domain=via_domain, use_https=use_https)
if md:
if 'state' in md and md['state'] == MDTestEnv.MD_S_ERROR:
return md
if 'renewal' in md and 'errors' in md['renewal'] \
and md['renewal']['errors'] >= errors:
return md
time.sleep(0.1)
return None
def await_file(self, fpath, timeout=60):
try_until = time.time() + timeout
while True:
if time.time() >= try_until:
return False
if os.path.isfile(fpath):
return True
time.sleep(0.1)
def check_file_permissions(self, domain):
md = self.a2md(["list", domain]).json['output'][0]
assert md
acct = md['ca']['account']
assert acct
self.check_file_access(self.path_store_json(), 0o600)
# domains
self.check_file_access(self.store_domains(), 0o700)
self.check_file_access(os.path.join(self.store_domains(), domain), 0o700)
self.check_file_access(self.store_domain_file(domain, 'privkey.pem'), 0o600)
self.check_file_access(self.store_domain_file(domain, 'pubcert.pem'), 0o600)
self.check_file_access(self.store_domain_file(domain, 'md.json'), 0o600)
# archive
self.check_file_access(self.store_archived_file(domain, 1, 'md.json'), 0o600)
# accounts
self.check_file_access(os.path.join(self._store_dir, 'accounts'), 0o755)
self.check_file_access(os.path.join(self._store_dir, 'accounts', acct), 0o755)
self.check_file_access(self.path_account(acct), 0o644)
self.check_file_access(self.path_account_key(acct), 0o644)
# staging
self.check_file_access(self.store_stagings(), 0o755)
def get_ocsp_status(self, domain, proto=None, cipher=None, ca_file=None):
stat = {}
args = [
"openssl", "s_client", "-status",
"-connect", "%s:%s" % (self._httpd_addr, self.https_port),
"-CAfile", ca_file if ca_file else self.acme_ca_pemfile,
"-servername", domain,
"-showcerts"
]
if proto is not None:
args.extend(["-{0}".format(proto)])
if cipher is not None:
args.extend(["-cipher", cipher])
r = self.run(args, debug_log=False)
ocsp_regex = re.compile(r'OCSP response: +([^=\n]+)\n')
matches = ocsp_regex.finditer(r.stdout)
for m in matches:
if m.group(1) != "":
stat['ocsp'] = m.group(1)
if 'ocsp' not in stat:
ocsp_regex = re.compile(r'OCSP Response Status:\s*(.+)')
matches = ocsp_regex.finditer(r.stdout)
for m in matches:
if m.group(1) != "":
stat['ocsp'] = m.group(1)
verify_regex = re.compile(r'Verify return code:\s*(.+)')
matches = verify_regex.finditer(r.stdout)
for m in matches:
if m.group(1) != "":
stat['verify'] = m.group(1)
return stat
def await_ocsp_status(self, domain, timeout=10, ca_file=None):
try_until = time.time() + timeout
while True:
if time.time() >= try_until:
break
stat = self.get_ocsp_status(domain, ca_file=ca_file)
if 'ocsp' in stat and stat['ocsp'] != "no response sent":
return stat
time.sleep(0.1)
raise TimeoutError(f"ocsp respopnse not available: {domain}")
def create_self_signed_cert(self, name_list, valid_days, serial=1000, path=None):
dirpath = path
if not path:
dirpath = os.path.join(self.store_domains(), name_list[0])
return MDCertUtil.create_self_signed_cert(dirpath, name_list, valid_days, serial) | [((454, 481), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (471, 481), False, 'import logging\n'), ((881, 912), 'os.path.join', 'os.path.join', (['our_dir', '"""pebble"""'], {}), "(our_dir, 'pebble')\n", (893, 912), False, 'import os\n'), ((937, 977), 'os.path.join', 'os.path.join', (['self.env.gen_dir', '"""pebble"""'], {}), "(self.env.gen_dir, 'pebble')\n", (949, 977), False, 'import os\n'), ((1083, 1107), 'os.listdir', 'os.listdir', (['conf_src_dir'], {}), '(conf_src_dir)\n', (1093, 1107), False, 'import os\n'), ((1654, 1665), 'time.time', 'time.time', ([], {}), '()\n', (1663, 1665), False, 'import time\n'), ((2562, 2610), 'os.path.join', 'os.path.join', (['self.gen_dir', '"""apache/acme-ca.pem"""'], {}), "(self.gen_dir, 'apache/acme-ca.pem')\n", (2574, 2610), False, 'import os\n'), ((3115, 3149), 'os.path.join', 'os.path.join', (['self.bin_dir', '"""a2md"""'], {}), "(self.bin_dir, 'a2md')\n", (3127, 3149), False, 'import os\n'), ((4495, 4533), 'os.path.join', 'os.path.join', (['self.server_dir', 'dirpath'], {}), '(self.server_dir, dirpath)\n', (4507, 4533), False, 'import os\n'), ((4865, 4937), 'subprocess.run', 'subprocess.run', (["[self._apxs, '-q', name]"], {'capture_output': '(True)', 'text': '(True)'}), "([self._apxs, '-q', name], capture_output=True, text=True)\n", (4879, 4937), False, 'import subprocess\n'), ((7367, 7398), 'os.path.exists', 'os.path.exists', (['self._store_dir'], {}), '(self._store_dir)\n', (7381, 7398), False, 'import os\n'), ((7472, 7500), 'os.makedirs', 'os.makedirs', (['self._store_dir'], {}), '(self._store_dir)\n', (7483, 7500), False, 'import os\n'), ((7993, 8030), 'os.path.join', 'os.path.join', (['self._store_dir', '"""ocsp"""'], {}), "(self._store_dir, 'ocsp')\n", (8005, 8030), False, 'import os\n'), ((8094, 8117), 'os.path.exists', 'os.path.exists', (['dirpath'], {}), '(dirpath)\n', (8108, 8117), False, 'import os\n'), ((8234, 8280), 'os.path.join', 'os.path.join', (['self._store_dir', '"""staging"""', 'name'], {}), "(self._store_dir, 'staging', name)\n", (8246, 8280), False, 'import os\n'), ((8289, 8309), 'os.makedirs', 'os.makedirs', (['dirpath'], {}), '(dirpath)\n', (8300, 8309), False, 'import os\n'), ((8427, 8473), 'os.path.join', 'os.path.join', (['self._store_dir', '"""md_store.json"""'], {}), "(self._store_dir, 'md_store.json')\n", (8439, 8473), False, 'import os\n'), ((8524, 8587), 'os.path.join', 'os.path.join', (['self._store_dir', '"""accounts"""', 'acct', '"""account.json"""'], {}), "(self._store_dir, 'accounts', acct, 'account.json')\n", (8536, 8587), False, 'import os\n'), ((8642, 8704), 'os.path.join', 'os.path.join', (['self._store_dir', '"""accounts"""', 'acct', '"""account.pem"""'], {}), "(self._store_dir, 'accounts', acct, 'account.pem')\n", (8654, 8704), False, 'import os\n'), ((8750, 8790), 'os.path.join', 'os.path.join', (['self._store_dir', '"""domains"""'], {}), "(self._store_dir, 'domains')\n", (8762, 8790), False, 'import os\n'), ((8837, 8877), 'os.path.join', 'os.path.join', (['self._store_dir', '"""archive"""'], {}), "(self._store_dir, 'archive')\n", (8849, 8877), False, 'import os\n'), ((8924, 8964), 'os.path.join', 'os.path.join', (['self._store_dir', '"""staging"""'], {}), "(self._store_dir, 'staging')\n", (8936, 8964), False, 'import os\n'), ((9013, 9056), 'os.path.join', 'os.path.join', (['self._store_dir', '"""challenges"""'], {}), "(self._store_dir, 'challenges')\n", (9025, 9056), False, 'import os\n'), ((9509, 9581), 'os.path.join', 'os.path.join', (['self._store_dir', '"""domains"""', 'domain', '"""fallback-pubcert.pem"""'], {}), "(self._store_dir, 'domains', domain, 'fallback-pubcert.pem')\n", (9521, 9581), False, 'import os\n'), ((9630, 9690), 'os.path.join', 'os.path.join', (['self._store_dir', '"""staging"""', 'domain', '"""job.json"""'], {}), "(self._store_dir, 'staging', domain, 'job.json')\n", (9642, 9690), False, 'import os\n'), ((9734, 9785), 'shutil.rmtree', 'shutil.rmtree', (['self._store_dir'], {'ignore_errors': '(False)'}), '(self._store_dir, ignore_errors=False)\n', (9747, 9785), False, 'import shutil\n'), ((9794, 9831), 'shutil.copytree', 'shutil.copytree', (['src', 'self._store_dir'], {}), '(src, self._store_dir)\n', (9809, 9831), False, 'import shutil\n'), ((12875, 12896), 'copy.deepcopy', 'copy.deepcopy', (['actual'], {}), '(actual)\n', (12888, 12896), False, 'import copy\n'), ((21206, 21249), 're.compile', 're.compile', (['"""OCSP response: +([^=\\\\n]+)\\\\n"""'], {}), "('OCSP response: +([^=\\\\n]+)\\\\n')\n", (21216, 21249), False, 'import re\n'), ((21686, 21727), 're.compile', 're.compile', (['"""Verify return code:\\\\s*(.+)"""'], {}), "('Verify return code:\\\\s*(.+)')\n", (21696, 21727), False, 'import re\n'), ((828, 856), 'inspect.getfile', 'inspect.getfile', (['MDTestSetup'], {}), '(MDTestSetup)\n', (843, 856), False, 'import inspect\n'), ((993, 1022), 'os.path.exists', 'os.path.exists', (['conf_dest_dir'], {}), '(conf_dest_dir)\n', (1007, 1022), False, 'import os\n'), ((1036, 1062), 'os.makedirs', 'os.makedirs', (['conf_dest_dir'], {}), '(conf_dest_dir)\n', (1047, 1062), False, 'import os\n'), ((1132, 1164), 'os.path.join', 'os.path.join', (['conf_src_dir', 'name'], {}), '(conf_src_dir, name)\n', (1144, 1164), False, 'import os\n'), ((1181, 1212), 're.match', 're.match', (['"""(.+).template"""', 'name'], {}), "('(.+).template', name)\n", (1189, 1212), False, 'import re\n'), ((4363, 4393), 'os.path.join', 'os.path.join', (['"""state"""', 'dirpath'], {}), "('state', dirpath)\n", (4375, 4393), False, 'import os\n'), ((6614, 6656), 'pytest.skip', 'pytest.skip', ([], {'msg': '"""ACME server not running"""'}), "(msg='ACME server not running')\n", (6625, 6656), False, 'import pytest\n'), ((6886, 6943), 'pytest.fail', 'pytest.fail', ([], {'msg': '"""ACME server not running"""', 'pytrace': '(False)'}), "(msg='ACME server not running', pytrace=False)\n", (6897, 6943), False, 'import pytest\n'), ((7412, 7463), 'shutil.rmtree', 'shutil.rmtree', (['self._store_dir'], {'ignore_errors': '(False)'}), '(self._store_dir, ignore_errors=False)\n', (7425, 7463), False, 'import shutil\n'), ((7643, 7674), 'os.path.exists', 'os.path.exists', (['self._store_dir'], {}), '(self._store_dir)\n', (7657, 7674), False, 'import os\n'), ((7688, 7716), 'os.makedirs', 'os.makedirs', (['self._store_dir'], {}), '(self._store_dir)\n', (7699, 7716), False, 'import os\n'), ((8131, 8173), 'shutil.rmtree', 'shutil.rmtree', (['dirpath'], {'ignore_errors': '(True)'}), '(dirpath, ignore_errors=True)\n', (8144, 8173), False, 'import shutil\n'), ((9888, 9929), 'os.path.join', 'os.path.join', (['self._store_dir', '"""accounts"""'], {}), "(self._store_dir, 'accounts')\n", (9900, 9929), False, 'import os\n'), ((10302, 10314), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10311, 10314), False, 'import json\n'), ((12545, 12576), 'datetime.datetime.now', 'datetime.now', (['not_before.tzinfo'], {}), '(not_before.tzinfo)\n', (12557, 12576), False, 'from datetime import datetime, timedelta\n'), ((12604, 12634), 'datetime.datetime.now', 'datetime.now', (['not_after.tzinfo'], {}), '(not_after.tzinfo)\n', (12616, 12634), False, 'from datetime import datetime, timedelta\n'), ((13172, 13188), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (13182, 13188), False, 'import os\n'), ((16888, 16899), 'time.time', 'time.time', ([], {}), '()\n', (16897, 16899), False, 'import time\n'), ((17798, 17813), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (17808, 17813), False, 'import time\n'), ((18065, 18076), 'time.time', 'time.time', ([], {}), '()\n', (18074, 18076), False, 'import time\n'), ((18662, 18673), 'time.time', 'time.time', ([], {}), '()\n', (18671, 18673), False, 'import time\n'), ((19157, 19172), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (19167, 19172), False, 'import time\n'), ((19259, 19270), 'time.time', 'time.time', ([], {}), '()\n', (19268, 19270), False, 'import time\n'), ((19386, 19407), 'os.path.isfile', 'os.path.isfile', (['fpath'], {}), '(fpath)\n', (19400, 19407), False, 'import os\n'), ((19449, 19464), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (19459, 19464), False, 'import time\n'), ((20271, 20312), 'os.path.join', 'os.path.join', (['self._store_dir', '"""accounts"""'], {}), "(self._store_dir, 'accounts')\n", (20283, 20312), False, 'import os\n'), ((20352, 20399), 'os.path.join', 'os.path.join', (['self._store_dir', '"""accounts"""', 'acct'], {}), "(self._store_dir, 'accounts', acct)\n", (20364, 20399), False, 'import os\n'), ((21454, 21497), 're.compile', 're.compile', (['"""OCSP Response Status:\\\\s*(.+)"""'], {}), "('OCSP Response Status:\\\\s*(.+)')\n", (21464, 21497), False, 'import re\n'), ((21989, 22000), 'time.time', 'time.time', ([], {}), '()\n', (21998, 22000), False, 'import time\n'), ((22269, 22284), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (22279, 22284), False, 'import time\n'), ((1336, 1360), 'os.path.isfile', 'os.path.isfile', (['src_path'], {}), '(src_path)\n', (1350, 1360), False, 'import os\n'), ((3506, 3564), 'pyhttpd.certs.CertificateSpec', 'CertificateSpec', ([], {'domains': "['localhost']", 'key_type': '"""rsa2048"""'}), "(domains=['localhost'], key_type='rsa2048')\n", (3521, 3564), False, 'from pyhttpd.certs import CertificateSpec\n'), ((5528, 5573), 're.sub', 're.sub', (['"""[_]"""', '"""-"""', 'request.node.originalname'], {}), "('[_]', '-', request.node.originalname)\n", (5534, 5573), False, 'import re\n'), ((6729, 6751), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(0.5)'}), '(seconds=0.5)\n', (6738, 6751), False, 'from datetime import datetime, timedelta\n'), ((7842, 7880), 'os.path.join', 'os.path.join', (['self._store_dir', 'dirpath'], {}), '(self._store_dir, dirpath)\n', (7854, 7880), False, 'import os\n'), ((13039, 13053), 'os.lstat', 'os.lstat', (['path'], {}), '(path)\n', (13047, 13053), False, 'import os\n'), ((17006, 17017), 'time.time', 'time.time', ([], {}), '()\n', (17015, 17017), False, 'import time\n'), ((17750, 17765), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (17760, 17765), False, 'import time\n'), ((18132, 18143), 'time.time', 'time.time', ([], {}), '()\n', (18141, 18143), False, 'import time\n'), ((18515, 18530), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (18525, 18530), False, 'import time\n'), ((18719, 18730), 'time.time', 'time.time', ([], {}), '()\n', (18728, 18730), False, 'import time\n'), ((19316, 19327), 'time.time', 'time.time', ([], {}), '()\n', (19325, 19327), False, 'import time\n'), ((22046, 22057), 'time.time', 'time.time', ([], {}), '()\n', (22055, 22057), False, 'import time\n'), ((2361, 2387), 'inspect.getfile', 'inspect.getfile', (['MDTestEnv'], {}), '(MDTestEnv)\n', (2376, 2387), False, 'import inspect\n'), ((3982, 4054), 're.compile', 're.compile', (['""".*certificate with serial \\\\S+ has no OCSP responder URL.*"""'], {}), "('.*certificate with serial \\\\S+ has no OCSP responder URL.*')\n", (3992, 4054), False, 'import re\n'), ((8323, 8358), 'os.path.join', 'os.path.join', (['dirpath', '"""authz.json"""'], {}), "(dirpath, 'authz.json')\n", (8335, 8358), False, 'import os\n'), ((1400, 1433), 'os.path.join', 'os.path.join', (['conf_dest_dir', 'name'], {}), '(conf_dest_dir, name)\n', (1412, 1433), False, 'import os\n'), ((3413, 3433), 'datetime.timedelta', 'timedelta', ([], {'days': '(-100)'}), '(days=-100)\n', (3422, 3433), False, 'from datetime import datetime, timedelta\n'), ((3472, 3491), 'datetime.timedelta', 'timedelta', ([], {'days': '(-10)'}), '(days=-10)\n', (3481, 3491), False, 'from datetime import datetime, timedelta\n')] |
ajrice6713/bw-messaging-emulator | models/create_message_response.py | d1be4976e2486ec91b419597afc8411c78ebfda7 | import datetime
import json
import random
import string
from typing import Dict
from sms_counter import SMSCounter
class CreateMessageResponse:
def __init__(self, request):
self.id = self.generate_id()
self.owner = request['from']
self.applicationId = request['applicationId']
self.time = str(datetime.datetime.utcnow().isoformat())
self.segmentCount = 1
self.direction = 'out'
if type(request['to']) is str:
self.to = [request['to']]
else:
self.to = request['to']
self.mfrom = request['from']
if 'media' in request:
self.media = request['media']
if 'text' in request:
self.text = request['text']
if 'tag' in request:
self.tag = request['tag']
if 'priority' in request:
self.priority = request['priority']
def calculate_segments(self, message) -> int:
count = SMSCounter.count(message)
return count['messages']
def generate_id(self) -> str:
pre = random.randint(1400000000000,1799999999999)
return str(pre) + ''.join(random.choice(string.ascii_lowercase) for x in range(16))
def to_json(self) -> str:
dict_response = {
'id': self.id,
'owner': self.owner,
'applicationId': self.applicationId,
'time': self.time,
'direction': self.direction,
'to': self.to,
'from': self.mfrom
}
if hasattr(self, 'media'): dict_response['media'] = self.media
if hasattr(self, 'text'):
dict_response['text'] = self.text
dict_response['segmentCount'] = self.calculate_segments(self.text)
if hasattr(self, 'tag'): dict_response['tag'] = self.tag
if hasattr(self, 'priority'): dict_response['priority'] = self.priority
return json.dumps(dict_response)
| [((964, 989), 'sms_counter.SMSCounter.count', 'SMSCounter.count', (['message'], {}), '(message)\n', (980, 989), False, 'from sms_counter import SMSCounter\n'), ((1077, 1121), 'random.randint', 'random.randint', (['(1400000000000)', '(1799999999999)'], {}), '(1400000000000, 1799999999999)\n', (1091, 1121), False, 'import random\n'), ((1923, 1948), 'json.dumps', 'json.dumps', (['dict_response'], {}), '(dict_response)\n', (1933, 1948), False, 'import json\n'), ((333, 359), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (357, 359), False, 'import datetime\n'), ((1155, 1192), 'random.choice', 'random.choice', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (1168, 1192), False, 'import random\n')] |
victor95pc/ccxt | python/ccxt/async_support/uex.py | 5c3e606296a1b15852a35f1330b645f451fa08d6 | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import ExchangeNotAvailable
class uex (Exchange):
def describe(self):
return self.deep_extend(super(uex, self).describe(), {
'id': 'uex',
'name': 'UEX',
'countries': ['SG', 'US'],
'version': 'v1.0.3',
'rateLimit': 1000,
'certified': False,
# new metainfo interface
'has': {
'CORS': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOrder': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchWithdrawals': True,
'withdraw': True,
},
'timeframes': {
'1m': '1',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'2h': '120',
'3h': '180',
'4h': '240',
'6h': '360',
'12h': '720',
'1d': '1440',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/43999923-051d9884-9e1f-11e8-965a-76948cb17678.jpg',
'api': 'https://open-api.uex.com/open/api',
'www': 'https://www.uex.com',
'doc': 'https://download.uex.com/doc/UEX-API-English-1.0.3.pdf',
'fees': 'https://www.uex.com/footer/ufees.html',
'referral': 'https://www.uex.com/signup.html?code=VAGQLL',
},
'api': {
'public': {
'get': [
'common/coins', # funding limits
'common/symbols',
'get_records', # ohlcvs
'get_ticker',
'get_trades',
'market_dept', # dept here is not a typo... they mean depth
],
},
'private': {
'get': [
'deposit_address_list',
'withdraw_address_list',
'deposit_history',
'withdraw_history',
'user/account',
'market', # an assoc array of market ids to corresponding prices traded most recently(prices of last trades per market)
'order_info',
'new_order', # a list of currently open orders
'all_order',
'all_trade',
],
'post': [
'create_order',
'cancel_order',
'create_withdraw',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.0010,
'taker': 0.0010,
},
},
'exceptions': {
# descriptions from ↓ exchange
# '0': 'no error', # succeed
'4': InsufficientFunds, # {"code":"4","msg":"余额不足:0E-16","data":null}
'5': InvalidOrder, # fail to order {"code":"5","msg":"Price fluctuates more than1000.0%","data":null}
'6': InvalidOrder, # the quantity value less than the minimum one {"code":"6","msg":"数量小于最小值:0.001","data":null}
'7': InvalidOrder, # the quantity value more than the maximum one {"code":"7","msg":"数量大于最大值:10000","data":null}
'8': InvalidOrder, # fail to cancel order
'9': ExchangeError, # transaction be frozen
'13': ExchangeError, # Sorry, the program made an error, please contact with the manager.
'19': InsufficientFunds, # Available balance is insufficient.
'22': OrderNotFound, # The order does not exist. {"code":"22","msg":"not exist order","data":null}
'23': InvalidOrder, # Lack of parameters of numbers of transaction
'24': InvalidOrder, # Lack of parameters of transaction price
'100001': ExchangeError, # System is abnormal
'100002': ExchangeNotAvailable, # Update System
'100004': ExchangeError, # {"code":"100004","msg":"request parameter illegal","data":null}
'100005': AuthenticationError, # {"code":"100005","msg":"request sign illegal","data":null}
'100007': PermissionDenied, # illegal IP
'110002': ExchangeError, # unknown currency code
'110003': AuthenticationError, # fund password error
'110004': AuthenticationError, # fund password error
'110005': InsufficientFunds, # Available balance is insufficient.
'110020': AuthenticationError, # Username does not exist.
'110023': AuthenticationError, # Phone number is registered.
'110024': AuthenticationError, # Email box is registered.
'110025': PermissionDenied, # Account is locked by background manager
'110032': PermissionDenied, # The user has no authority to do self operation.
'110033': ExchangeError, # fail to recharge
'110034': ExchangeError, # fail to withdraw
'-100': ExchangeError, # {"code":"-100","msg":"Your request path is not exist or you can try method GET/POST.","data":null}
'-1000': ExchangeNotAvailable, # {"msg":"System maintenancenot ","code":"-1000","data":null}
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'options': {
'createMarketBuyOrderRequiresPrice': True,
'limits': {
'BTC/USDT': {'amount': {'min': 0.001}, 'price': {'min': 0.01}},
'ETH/USDT': {'amount': {'min': 0.001}, 'price': {'min': 0.01}},
'BCH/USDT': {'amount': {'min': 0.001}, 'price': {'min': 0.01}},
'ETH/BTC': {'amount': {'min': 0.001}, 'price': {'min': 0.000001}},
'BCH/BTC': {'amount': {'min': 0.001}, 'price': {'min': 0.000001}},
'LEEK/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'CTXC/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'COSM/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'MANA/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'LBA/BTC': {'amount': {'min': 10}, 'price': {'min': 10}},
'OLT/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'DTA/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'KNT/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'REN/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'LBA/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'EXC/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'ZIL/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'RATING/ETH': {'amount': {'min': 100}, 'price': {'min': 100}},
'CENNZ/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
'TTC/ETH': {'amount': {'min': 10}, 'price': {'min': 10}},
},
},
})
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
key = 'quote'
rate = market[takerOrMaker]
cost = float(self.cost_to_precision(symbol, amount * rate))
if side == 'sell':
cost *= price
else:
key = 'base'
return {
'type': takerOrMaker,
'currency': market[key],
'rate': rate,
'cost': float(self.currency_to_precision(market[key], cost)),
}
async def fetch_markets(self, params={}):
response = await self.publicGetCommonSymbols()
#
# {code: "0",
# msg: "suc",
# data: [{ symbol: "btcusdt",
# count_coin: "usdt",
# amount_precision: 3,
# base_coin: "btc",
# price_precision: 2 },
# { symbol: "ethusdt",
# count_coin: "usdt",
# amount_precision: 3,
# base_coin: "eth",
# price_precision: 2 },
# { symbol: "ethbtc",
# count_coin: "btc",
# amount_precision: 3,
# base_coin: "eth",
# price_precision: 6 }]}
#
result = []
markets = response['data']
for i in range(0, len(markets)):
market = markets[i]
id = market['symbol']
baseId = market['base_coin']
quoteId = market['count_coin']
base = baseId.upper()
quote = quoteId.upper()
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = base + '/' + quote
precision = {
'amount': market['amount_precision'],
'price': market['price_precision'],
}
active = True
defaultLimits = self.safe_value(self.options['limits'], symbol, {})
limits = self.deep_extend({
'amount': {
'min': None,
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
}, defaultLimits)
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'info': market,
'precision': precision,
'limits': limits,
})
return result
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetUserAccount(params)
#
# {code: "0",
# msg: "suc",
# data: {total_asset: "0.00000000",
# coin_list: [{ normal: "0.00000000",
# btcValuatin: "0.00000000",
# locked: "0.00000000",
# coin: "usdt" },
# { normal: "0.00000000",
# btcValuatin: "0.00000000",
# locked: "0.00000000",
# coin: "btc" },
# { normal: "0.00000000",
# btcValuatin: "0.00000000",
# locked: "0.00000000",
# coin: "eth" },
# { normal: "0.00000000",
# btcValuatin: "0.00000000",
# locked: "0.00000000",
# coin: "ren" }]}}
#
balances = response['data']['coin_list']
result = {'info': balances}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = balance['coin']
code = currencyId.upper()
if currencyId in self.currencies_by_id:
code = self.currencies_by_id[currencyId]['code']
else:
code = self.common_currency_code(code)
account = self.account()
free = float(balance['normal'])
used = float(balance['locked'])
total = self.sum(free, used)
account['free'] = free
account['used'] = used
account['total'] = total
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
response = await self.publicGetMarketDept(self.extend({
'symbol': self.market_id(symbol),
'type': 'step0', # step1, step2 from most detailed to least detailed
}, params))
#
# {code: "0",
# msg: "suc",
# data: {tick: {asks: [["0.05824200", 9.77],
# ["0.05830000", 7.81],
# ["0.05832900", 8.59],
# ["0.10000000", 0.001] ],
# bids: [["0.05780000", 8.25],
# ["0.05775000", 8.12],
# ["0.05773200", 8.57],
# ["0.00010000", 0.79] ],
# time: 1533412622463 }} }
#
timestamp = self.safe_integer(response['data']['tick'], 'time')
return self.parse_order_book(response['data']['tick'], timestamp)
def parse_ticker(self, ticker, market=None):
#
# {code: "0",
# msg: "suc",
# data: {symbol: "ETHBTC",
# high: 0.058426,
# vol: 19055.875,
# last: 0.058019,
# low: 0.055802,
# change: 0.03437271,
# buy: "0.05780000",
# sell: "0.05824200",
# time: 1533413083184} }
#
timestamp = self.safe_integer(ticker, 'time')
symbol = None
if market is None:
marketId = self.safe_string(ticker, 'symbol')
marketId = marketId.lower()
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
if market is not None:
symbol = market['symbol']
last = self.safe_float(ticker, 'last')
change = self.safe_float(ticker, 'change')
percentage = change * 100
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': percentage,
'average': None,
'baseVolume': self.safe_float(ticker, 'vol'),
'quoteVolume': None,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
response = await self.publicGetGetTicker(self.extend({
'symbol': market['id'],
}, params))
#
# {code: "0",
# msg: "suc",
# data: {symbol: "ETHBTC",
# high: 0.058426,
# vol: 19055.875,
# last: 0.058019,
# low: 0.055802,
# change: 0.03437271,
# buy: "0.05780000",
# sell: "0.05824200",
# time: 1533413083184} }
#
return self.parse_ticker(response['data'], market)
def parse_trade(self, trade, market=None):
#
# public fetchTrades
#
# { amount: 0.88,
# create_time: 1533414358000,
# price: 0.058019,
# id: 406531,
# type: "sell" },
#
# private fetchMyTrades, fetchOrder, fetchOpenOrders, fetchClosedOrders
#
# { volume: "0.010",
# side: "SELL",
# feeCoin: "BTC",
# price: "0.05816200",
# fee: "0.00000029",
# ctime: 1533616674000,
# deal_price: "0.00058162",
# id: 415779,
# type: "卖出",
# bid_id: 3669539, # only in fetchMyTrades
# ask_id: 3669583, # only in fetchMyTrades
# }
#
timestamp = self.safe_integer_2(trade, 'create_time', 'ctime')
if timestamp is None:
timestring = self.safe_string(trade, 'created_at')
if timestring is not None:
timestamp = self.parse8601('2018-' + timestring + ':00Z')
side = self.safe_string_2(trade, 'side', 'type')
if side is not None:
side = side.lower()
id = self.safe_string(trade, 'id')
symbol = None
if market is not None:
symbol = market['symbol']
price = self.safe_float(trade, 'price')
amount = self.safe_float_2(trade, 'volume', 'amount')
cost = self.safe_float(trade, 'deal_price')
if cost is None:
if amount is not None:
if price is not None:
cost = amount * price
fee = None
feeCost = self.safe_float_2(trade, 'fee', 'deal_fee')
if feeCost is not None:
feeCurrency = self.safe_string(trade, 'feeCoin')
if feeCurrency is not None:
currencyId = feeCurrency.lower()
if currencyId in self.currencies_by_id:
feeCurrency = self.currencies_by_id[currencyId]['code']
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
orderIdField = 'ask_id' if (side == 'sell') else 'bid_id'
orderId = self.safe_string(trade, orderIdField)
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
response = await self.publicGetGetTrades(self.extend({
'symbol': market['id'],
}, params))
#
# {code: "0",
# msg: "suc",
# data: [{ amount: 0.88,
# create_time: 1533414358000,
# price: 0.058019,
# id: 406531,
# type: "sell" },
# { amount: 4.88,
# create_time: 1533414331000,
# price: 0.058019,
# id: 406530,
# type: "buy" },
# { amount: 0.5,
# create_time: 1533414311000,
# price: 0.058019,
# id: 406529,
# type: "sell" }]}
#
return self.parse_trades(response['data'], market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1d', since=None, limit=None):
return [
ohlcv[0] * 1000, # timestamp
ohlcv[1], # open
ohlcv[2], # high
ohlcv[3], # low
ohlcv[4], # close
ohlcv[5], # volume
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'period': self.timeframes[timeframe], # in minutes
}
response = await self.publicGetGetRecords(self.extend(request, params))
#
# {code: '0',
# msg: 'suc',
# data:
# [[1533402420, 0.057833, 0.057833, 0.057833, 0.057833, 18.1],
# [1533402480, 0.057833, 0.057833, 0.057833, 0.057833, 29.88],
# [1533402540, 0.057833, 0.057833, 0.057833, 0.057833, 29.06] ]}
#
return self.parse_ohlcvs(response['data'], market, timeframe, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
if type == 'market':
# for market buy it requires the amount of quote currency to spend
if side == 'buy':
if self.options['createMarketBuyOrderRequiresPrice']:
if price is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False to supply the cost in the amount argument(the exchange-specific behaviour)")
else:
amount = amount * price
await self.load_markets()
market = self.market(symbol)
orderType = '1' if (type == 'limit') else '2'
orderSide = side.upper()
amountToPrecision = self.amount_to_precision(symbol, amount)
request = {
'side': orderSide,
'type': orderType,
'symbol': market['id'],
'volume': amountToPrecision,
# An excerpt from their docs:
# side required Trading Direction
# type required pending order types,1:Limit-price Delegation 2:Market- price Delegation
# volume required
# Purchase Quantity(polysemy,multiplex field)
# type=1: Quantity of buying and selling
# type=2: Buying represents gross price, and selling represents total number
# Trading restriction user/me-user information
# price optional Delegation Price:type=2:self parameter is no use.
# fee_is_user_exchange_coin optional
# 0,when making transactions with all platform currencies,
# self parameter represents whether to use them to pay
# fees or not and 0 is no, 1 is yes.
}
priceToPrecision = None
if type == 'limit':
priceToPrecision = self.price_to_precision(symbol, price)
request['price'] = priceToPrecision
response = await self.privatePostCreateOrder(self.extend(request, params))
#
# {code: '0',
# msg: 'suc',
# data: {'order_id' : 34343} }
#
result = self.parse_order(response['data'], market)
return self.extend(result, {
'info': response,
'symbol': symbol,
'type': type,
'side': side,
'status': 'open',
'price': float(priceToPrecision),
'amount': float(amountToPrecision),
})
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'order_id': id,
'symbol': market['id'],
}
response = await self.privatePostCancelOrder(self.extend(request, params))
order = self.safe_value(response, 'data', {})
return self.extend(self.parse_order(order), {
'id': id,
'symbol': symbol,
'status': 'canceled',
})
def parse_order_status(self, status):
statuses = {
'0': 'open', # INIT(0,"primary order,untraded and not enter the market")
'1': 'open', # NEW_(1,"new order,untraded and enter the market ")
'2': 'closed', # FILLED(2,"complete deal")
'3': 'open', # PART_FILLED(3,"partial deal")
'4': 'canceled', # CANCELED(4,"already withdrawn")
'5': 'canceled', # PENDING_CANCEL(5,"pending withdrawak")
'6': 'canceled', # EXPIRED(6,"abnormal orders")
}
if status in statuses:
return statuses[status]
return status
def parse_order(self, order, market=None):
#
# createOrder
#
# {"order_id":34343}
#
# fetchOrder, fetchOpenOrders, fetchClosedOrders
#
# { side: "BUY",
# total_price: "0.10000000",
# created_at: 1510993841000,
# avg_price: "0.10000000",
# countCoin: "btc",
# source: 1,
# type: 1,
# side_msg: "买入",
# volume: "1.000",
# price: "0.10000000",
# source_msg: "WEB",
# status_msg: "完全成交",
# deal_volume: "1.00000000",
# id: 424,
# remain_volume: "0.00000000",
# baseCoin: "eth",
# tradeList: [{ volume: "1.000",
# feeCoin: "YLB",
# price: "0.10000000",
# fee: "0.16431104",
# ctime: 1510996571195,
# deal_price: "0.10000000",
# id: 306,
# type: "买入" }],
# status: 2 }
#
# fetchOrder
#
# {trade_list: [{ volume: "0.010",
# feeCoin: "BTC",
# price: "0.05816200",
# fee: "0.00000029",
# ctime: 1533616674000,
# deal_price: "0.00058162",
# id: 415779,
# type: "卖出" }],
# order_info: { side: "SELL",
# total_price: "0.010",
# created_at: 1533616673000,
# avg_price: "0.05816200",
# countCoin: "btc",
# source: 3,
# type: 2,
# side_msg: "卖出",
# volume: "0.010",
# price: "0.00000000",
# source_msg: "API",
# status_msg: "完全成交",
# deal_volume: "0.01000000",
# id: 3669583,
# remain_volume: "0.00000000",
# baseCoin: "eth",
# tradeList: [{ volume: "0.010",
# feeCoin: "BTC",
# price: "0.05816200",
# fee: "0.00000029",
# ctime: 1533616674000,
# deal_price: "0.00058162",
# id: 415779,
# type: "卖出" }],
# status: 2 }}
#
side = self.safe_string(order, 'side')
if side is not None:
side = side.lower()
status = self.parse_order_status(self.safe_string(order, 'status'))
symbol = None
if market is None:
baseId = self.safe_string(order, 'baseCoin')
quoteId = self.safe_string(order, 'countCoin')
marketId = baseId + quoteId
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
else:
if (baseId is not None) and(quoteId is not None):
base = baseId.upper()
quote = quoteId.upper()
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = base + '/' + quote
if market is not None:
symbol = market['symbol']
timestamp = self.safe_integer(order, 'created_at')
if timestamp is None:
timestring = self.safe_string(order, 'created_at')
if timestring is not None:
timestamp = self.parse8601('2018-' + timestring + ':00Z')
lastTradeTimestamp = None
fee = None
average = self.safe_float(order, 'avg_price')
price = self.safe_float(order, 'price')
if price == 0:
price = average
amount = self.safe_float(order, 'volume')
filled = self.safe_float(order, 'deal_volume')
remaining = self.safe_float(order, 'remain_volume')
cost = self.safe_float(order, 'total_price')
id = self.safe_string_2(order, 'id', 'order_id')
trades = None
tradeList = self.safe_value(order, 'tradeList', [])
feeCurrencies = {}
feeCost = None
for i in range(0, len(tradeList)):
trade = self.parse_trade(tradeList[i], market)
if feeCost is None:
feeCost = 0
feeCost = feeCost + trade['fee']['cost']
tradeFeeCurrency = trade['fee']['currency']
feeCurrencies[tradeFeeCurrency] = trade['fee']['cost']
if trades is None:
trades = []
lastTradeTimestamp = trade['timestamp']
trades.append(self.extend(trade, {
'order': id,
}))
if feeCost is not None:
feeCurrency = None
keys = list(feeCurrencies.keys())
numCurrencies = len(keys)
if numCurrencies == 1:
feeCurrency = keys[0]
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
result = {
'info': order,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': 'limit',
'side': side,
'price': price,
'cost': cost,
'average': average,
'amount': amount,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': trades,
}
return result
async def fetch_orders_with_method(self, method, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrdersWithMethod() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
# pageSize optional page size
# page optional page number
'symbol': market['id'],
}
if limit is not None:
request['pageSize'] = limit
response = await getattr(self, method)(self.extend(request, params))
#
# {code: "0",
# msg: "suc",
# data: { count: 1,
# orderList: [{ side: "SELL",
# total_price: "0.010",
# created_at: 1533616673000,
# avg_price: "0.05816200",
# countCoin: "btc",
# source: 3,
# type: 2,
# side_msg: "卖出",
# volume: "0.010",
# price: "0.00000000",
# source_msg: "API",
# status_msg: "完全成交",
# deal_volume: "0.01000000",
# id: 3669583,
# remain_volume: "0.00000000",
# baseCoin: "eth",
# tradeList: [{ volume: "0.010",
# feeCoin: "BTC",
# price: "0.05816200",
# fee: "0.00000029",
# ctime: 1533616674000,
# deal_price: "0.00058162",
# id: 415779,
# type: "卖出" }],
# status: 2 }]} }
#
# privateGetNewOrder returns resultList, privateGetAllOrder returns orderList
orders = self.safe_value_2(response['data'], 'orderList', 'resultList', [])
return self.parse_orders(orders, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('privateGetNewOrder', symbol, since, limit, params)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('privateGetAllOrder', symbol, since, limit, params)
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'order_id': id,
'symbol': market['id'],
}
response = await self.privateGetOrderInfo(self.extend(request, params))
#
# {code: "0",
# msg: "suc",
# data: {trade_list: [{ volume: "0.010",
# feeCoin: "BTC",
# price: "0.05816200",
# fee: "0.00000029",
# ctime: 1533616674000,
# deal_price: "0.00058162",
# id: 415779,
# type: "卖出" }],
# order_info: { side: "SELL",
# total_price: "0.010",
# created_at: 1533616673000,
# avg_price: "0.05816200",
# countCoin: "btc",
# source: 3,
# type: 2,
# side_msg: "卖出",
# volume: "0.010",
# price: "0.00000000",
# source_msg: "API",
# status_msg: "完全成交",
# deal_volume: "0.01000000",
# id: 3669583,
# remain_volume: "0.00000000",
# baseCoin: "eth",
# tradeList: [{ volume: "0.010",
# feeCoin: "BTC",
# price: "0.05816200",
# fee: "0.00000029",
# ctime: 1533616674000,
# deal_price: "0.00058162",
# id: 415779,
# type: "卖出" }],
# status: 2 }} }
#
return self.parse_order(response['data']['order_info'], market)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
# pageSize optional page size
# page optional page number
'symbol': market['id'],
}
if limit is not None:
request['pageSize'] = limit
response = await self.privateGetAllTrade(self.extend(request, params))
#
# {code: "0",
# msg: "suc",
# data: { count: 1,
# resultList: [{ volume: "0.010",
# side: "SELL",
# feeCoin: "BTC",
# price: "0.05816200",
# fee: "0.00000029",
# ctime: 1533616674000,
# deal_price: "0.00058162",
# id: 415779,
# type: "卖出",
# bid_id: 3669539,
# ask_id: 3669583 }]} }
#
trades = self.safe_value(response['data'], 'resultList', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
}
# https://github.com/UEX-OpenAPI/API_Docs_en/wiki/Query-deposit-address-of-assigned-token
response = await self.privateGetDepositAddressList(self.extend(request, params))
#
# {
# "code": "0",
# "msg": "suc",
# "data": {
# "addressList": [
# {
# "address": "0x198803ef8e0df9e8812c0105421885e843e6d2e2",
# "tag": "",
# },
# ],
# },
# }
#
data = self.safe_value(response, 'data')
if data is None:
raise InvalidAddress(self.id + ' privateGetDepositAddressList() returned no data')
addressList = self.safe_value(data, 'addressList')
if addressList is None:
raise InvalidAddress(self.id + ' privateGetDepositAddressList() returned no address list')
numAddresses = len(addressList)
if numAddresses < 1:
raise InvalidAddress(self.id + ' privatePostDepositAddresses() returned no addresses')
firstAddress = addressList[0]
address = self.safe_string(firstAddress, 'address')
tag = self.safe_string(firstAddress, 'tag')
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': response,
}
async def fetch_transactions_by_type(self, type, code=None, since=None, limit=None, params={}):
if code is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawals requires a currency code argument')
currency = self.currency(code)
request = {
'coin': currency['id'],
}
if limit is not None:
request['pageSize'] = limit # default 10
transactionType = 'deposit' if (type == 'deposit') else 'withdraw' # instead of withdrawal...
method = 'privateGet' + self.capitalize(transactionType) + 'History'
# https://github.com/UEX-OpenAPI/API_Docs_en/wiki/Query-deposit-record-of-assigned-token
# https://github.com/UEX-OpenAPI/API_Docs_en/wiki/Query-withdraw-record-of-assigned-token
response = await getattr(self, method)(self.extend(request, params))
#
# {code: "0",
# msg: "suc",
# data: {depositList: [{ createdAt: 1533615955000,
# amount: "0.01",
# updateAt: 1533616311000,
# txid: "0x0922fde6ab8270fe6eb31cb5a37dc732d96dc8193f81cf46c4ab29fde…",
# tag: "",
# confirmations: 30,
# addressTo: "0x198803ef8e0df9e8812c0105421885e843e6d2e2",
# status: 1,
# coin: "ETH" }]} }
#
# {
# "code": "0",
# "msg": "suc",
# "data": {
# "withdrawList": [{
# "updateAt": 1540344965000,
# "createdAt": 1539311971000,
# "status": 0,
# "addressTo": "tz1d7DXJXU3AKWh77gSmpP7hWTeDYs8WF18q",
# "tag": "100128877",
# "id": 5,
# "txid": "",
# "fee": 0.0,
# "amount": "1",
# "symbol": "XTZ"
# }]
# }
# }
#
transactions = self.safe_value(response['data'], transactionType + 'List')
return self.parse_transactions_by_type(type, transactions, code, since, limit)
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
return await self.fetch_transactions_by_type('deposit', code, since, limit, params)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
return await self.fetch_transactions_by_type('withdrawal', code, since, limit, params)
def parse_transactions_by_type(self, type, transactions, code=None, since=None, limit=None):
result = []
for i in range(0, len(transactions)):
transaction = self.parse_transaction(self.extend({
'type': type,
}, transactions[i]))
result.append(transaction)
return self.filterByCurrencySinceLimit(result, code, since, limit)
def parse_transaction(self, transaction, currency=None):
#
# deposits
#
# { createdAt: 1533615955000,
# amount: "0.01",
# updateAt: 1533616311000,
# txid: "0x0922fde6ab8270fe6eb31cb5a37dc732d96dc8193f81cf46c4ab29fde…",
# tag: "",
# confirmations: 30,
# addressTo: "0x198803ef8e0df9e8812c0105421885e843e6d2e2",
# status: 1,
# coin: "ETH" }]} }
#
# withdrawals
#
# {
# "updateAt": 1540344965000,
# "createdAt": 1539311971000,
# "status": 0,
# "addressTo": "tz1d7DXJXU3AKWh77gSmpP7hWTeDYs8WF18q",
# "tag": "100128877",
# "id": 5,
# "txid": "",
# "fee": 0.0,
# "amount": "1",
# "symbol": "XTZ"
# }
#
id = self.safe_string(transaction, 'id')
txid = self.safe_string(transaction, 'txid')
timestamp = self.safe_integer(transaction, 'createdAt')
updated = self.safe_integer(transaction, 'updateAt')
code = None
currencyId = self.safe_string_2(transaction, 'symbol', 'coin')
currency = self.safe_value(self.currencies_by_id, currencyId)
if currency is not None:
code = currency['code']
else:
code = self.common_currency_code(currencyId)
address = self.safe_string(transaction, 'addressTo')
tag = self.safe_string(transaction, 'tag')
amount = self.safe_float(transaction, 'amount')
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
type = self.safe_string(transaction, 'type') # injected from the outside
feeCost = self.safe_float(transaction, 'fee')
if (type == 'deposit') and(feeCost is None):
feeCost = 0
return {
'info': transaction,
'id': id,
'currency': code,
'amount': amount,
'address': address,
'tag': tag,
'status': status,
'type': type,
'updated': updated,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': {
'currency': code,
'cost': feeCost,
},
}
def parse_transaction_status(self, status):
statuses = {
'0': 'pending', # unaudited
'1': 'ok', # audited
'2': 'failed', # audit failed
'3': 'pending', # "payment"
'4': 'failed', # payment failed
'5': 'ok',
'6': 'canceled',
}
return self.safe_string(statuses, status, status)
async def withdraw(self, code, amount, address, tag=None, params={}):
await self.load_markets()
fee = self.safe_float(params, 'fee')
if fee is None:
raise ArgumentsRequired(self.id + 'requires a "fee" extra parameter in its last argument')
self.check_address(address)
currency = self.currency(code)
request = {
'coin': currency['id'],
'address': address, # only supports existing addresses in your withdraw address list
'amount': amount,
'fee': fee, # balance >= self.sum(amount, fee)
}
if tag is not None:
request['tag'] = tag
# https://github.com/UEX-OpenAPI/API_Docs_en/wiki/Withdraw
response = await self.privatePostCreateWithdraw(self.extend(request, params))
id = None
return {
'info': response,
'id': id,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.implode_params(path, params)
if api == 'public':
if params:
url += '?' + self.urlencode(params)
else:
self.check_required_credentials()
timestamp = str(self.seconds())
auth = ''
query = self.keysort(self.extend(params, {
'api_key': self.apiKey,
'time': timestamp,
}))
keys = list(query.keys())
for i in range(0, len(keys)):
key = keys[i]
auth += key
auth += str(query[key])
signature = self.hash(self.encode(auth + self.secret))
if query:
if method == 'GET':
url += '?' + self.urlencode(query) + '&sign=' + signature
else:
body = self.urlencode(query) + '&sign=' + signature
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response):
if not isinstance(body, basestring):
return # fallback to default error handler
if len(body) < 2:
return # fallback to default error handler
if (body[0] == '{') or (body[0] == '['):
response = json.loads(body)
#
# {"code":"0","msg":"suc","data":{}}
#
code = self.safe_string(response, 'code')
# message = self.safe_string(response, 'msg')
feedback = self.id + ' ' + self.json(response)
exceptions = self.exceptions
if code != '0':
if code in exceptions:
raise exceptions[code](feedback)
else:
raise ExchangeError(feedback)
| [((33242, 33328), 'ccxt.base.errors.ArgumentsRequired', 'ArgumentsRequired', (["(self.id + ' fetchOrdersWithMethod() requires a symbol argument')"], {}), "(self.id +\n ' fetchOrdersWithMethod() requires a symbol argument')\n", (33259, 33328), False, 'from ccxt.base.errors import ArgumentsRequired\n'), ((39101, 39173), 'ccxt.base.errors.ArgumentsRequired', 'ArgumentsRequired', (["(self.id + ' fetchMyTrades requires a symbol argument')"], {}), "(self.id + ' fetchMyTrades requires a symbol argument')\n", (39118, 39173), False, 'from ccxt.base.errors import ArgumentsRequired\n'), ((41385, 41461), 'ccxt.base.errors.InvalidAddress', 'InvalidAddress', (["(self.id + ' privateGetDepositAddressList() returned no data')"], {}), "(self.id + ' privateGetDepositAddressList() returned no data')\n", (41399, 41461), False, 'from ccxt.base.errors import InvalidAddress\n'), ((41571, 41659), 'ccxt.base.errors.InvalidAddress', 'InvalidAddress', (["(self.id + ' privateGetDepositAddressList() returned no address list')"], {}), "(self.id +\n ' privateGetDepositAddressList() returned no address list')\n", (41585, 41659), False, 'from ccxt.base.errors import InvalidAddress\n'), ((41743, 41828), 'ccxt.base.errors.InvalidAddress', 'InvalidAddress', (["(self.id + ' privatePostDepositAddresses() returned no addresses')"], {}), "(self.id + ' privatePostDepositAddresses() returned no addresses'\n )\n", (41757, 41828), False, 'from ccxt.base.errors import InvalidAddress\n'), ((42297, 42383), 'ccxt.base.errors.ArgumentsRequired', 'ArgumentsRequired', (["(self.id + ' fetchWithdrawals requires a currency code argument')"], {}), "(self.id +\n ' fetchWithdrawals requires a currency code argument')\n", (42314, 42383), False, 'from ccxt.base.errors import ArgumentsRequired\n'), ((48625, 48713), 'ccxt.base.errors.ArgumentsRequired', 'ArgumentsRequired', (['(self.id + \'requires a "fee" extra parameter in its last argument\')'], {}), '(self.id +\n \'requires a "fee" extra parameter in its last argument\')\n', (48642, 48713), False, 'from ccxt.base.errors import ArgumentsRequired\n'), ((50892, 50908), 'json.loads', 'json.loads', (['body'], {}), '(body)\n', (50902, 50908), False, 'import json\n'), ((51366, 51389), 'ccxt.base.errors.ExchangeError', 'ExchangeError', (['feedback'], {}), '(feedback)\n', (51379, 51389), False, 'from ccxt.base.errors import ExchangeError\n'), ((22721, 23165), 'ccxt.base.errors.InvalidOrder', 'InvalidOrder', (['(self.id +\n " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options[\'createMarketBuyOrderRequiresPrice\'] = False to supply the cost in the amount argument(the exchange-specific behaviour)"\n )'], {}), '(self.id +\n " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options[\'createMarketBuyOrderRequiresPrice\'] = False to supply the cost in the amount argument(the exchange-specific behaviour)"\n )\n', (22733, 23165), False, 'from ccxt.base.errors import InvalidOrder\n')] |
Mdlkxzmcp/various_python | Alpha & Beta/wootMath/decimalToBinaryFraction.py | be4f873c6263e3db11177bbccce2aa465514294d | def decimal_to_binary_fraction(x=0.5):
"""
Input: x, a float between 0 and 1
Returns binary representation of x
"""
p = 0
while ((2 ** p) * x) % 1 != 0:
# print('Remainder = ' + str((2**p)*x - int((2**p)*x)))
p += 1
num = int(x * (2 ** p))
result = ''
if num == 0:
result = '0'
while num > 0:
result = str(num % 2) + result
num //= 2
for i in range(p - len(result)):
result = '0' + result
result = result[0:-p] + '.' + result[-p:]
return result # If there is no integer p such that x*(2**p) is a whole number, then internal
# representation is always an approximation
# Suggest that testing equality of floats is not exact: Use abs(x-y) < some
# small number, rather than x == y
# Why does print(0.1) return 0.1, if not exact?
# Because Python designers set it up this way to automatically round
| [] |
ajaysaini725/composer | composer/utils/run_directory.py | 00fbf95823cd50354b2410fbd88f06eaf0481662 | # Copyright 2021 MosaicML. All Rights Reserved.
import datetime
import logging
import os
import pathlib
import time
from composer.utils import dist
log = logging.getLogger(__name__)
_RUN_DIRECTORY_KEY = "COMPOSER_RUN_DIRECTORY"
_start_time_str = datetime.datetime.now().isoformat()
def get_node_run_directory() -> str:
"""Returns the run directory for the node. This folder is shared by all ranks on the node.
Returns:
str: The node run directory.
"""
node_run_directory = os.environ.get(_RUN_DIRECTORY_KEY, os.path.join("runs", _start_time_str))
if node_run_directory.endswith(os.path.sep):
# chop off the training slash so os.path.basename would work as expected
node_run_directory = node_run_directory[:-1]
os.makedirs(node_run_directory, exist_ok=True)
return os.path.abspath(node_run_directory)
def get_run_directory() -> str:
"""Returns the run directory for the current rank.
Returns:
str: The run directory.
"""
run_dir = os.path.join(get_node_run_directory(), f"rank_{dist.get_global_rank()}")
os.makedirs(run_dir, exist_ok=True)
return run_dir
def get_modified_files(modified_since_timestamp: float, *, ignore_hidden: bool = True):
"""Returns a list of files (recursively) in the run directory that have been modified since
``modified_since_timestamp``.
Args:
modified_since_timestamp (float): Minimum last modified timestamp(in seconds since EPOCH)
of files to include.
ignore_hidden (bool, optional): Whether to ignore hidden files and folders (default: ``True``)
Returns:
List[str]: List of filepaths that have been modified since ``modified_since_timestamp``
"""
modified_files = []
run_directory = get_run_directory()
if run_directory is None:
raise RuntimeError("Run directory is not defined")
for root, dirs, files in os.walk(run_directory):
del dirs # unused
for file in files:
if ignore_hidden and any(x.startswith(".") for x in file.split(os.path.sep)):
# skip hidden files and folders
continue
filepath = os.path.join(root, file)
modified_time = os.path.getmtime(filepath)
if modified_time >= modified_since_timestamp:
modified_files.append(filepath)
return modified_files
def get_run_directory_timestamp() -> float:
"""Returns the current timestamp on the run directory filesystem.
Note that the disk time can differ from system time (e.g. when using
network filesystems).
Returns:
float: the current timestamp on the run directory filesystem.
"""
run_directory = get_run_directory()
if run_directory is None:
raise RuntimeError("Run directory is not defined")
python_time = time.time()
touch_file = (pathlib.Path(run_directory) / f".{python_time}")
touch_file.touch()
new_last_uploaded_timestamp = os.path.getmtime(str(touch_file))
os.remove(str(touch_file))
return new_last_uploaded_timestamp
| [((157, 184), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (174, 184), False, 'import logging\n'), ((770, 816), 'os.makedirs', 'os.makedirs', (['node_run_directory'], {'exist_ok': '(True)'}), '(node_run_directory, exist_ok=True)\n', (781, 816), False, 'import os\n'), ((828, 863), 'os.path.abspath', 'os.path.abspath', (['node_run_directory'], {}), '(node_run_directory)\n', (843, 863), False, 'import os\n'), ((1102, 1137), 'os.makedirs', 'os.makedirs', (['run_dir'], {'exist_ok': '(True)'}), '(run_dir, exist_ok=True)\n', (1113, 1137), False, 'import os\n'), ((1921, 1943), 'os.walk', 'os.walk', (['run_directory'], {}), '(run_directory)\n', (1928, 1943), False, 'import os\n'), ((2851, 2862), 'time.time', 'time.time', ([], {}), '()\n', (2860, 2862), False, 'import time\n'), ((251, 274), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (272, 274), False, 'import datetime\n'), ((544, 581), 'os.path.join', 'os.path.join', (['"""runs"""', '_start_time_str'], {}), "('runs', _start_time_str)\n", (556, 581), False, 'import os\n'), ((2881, 2908), 'pathlib.Path', 'pathlib.Path', (['run_directory'], {}), '(run_directory)\n', (2893, 2908), False, 'import pathlib\n'), ((2185, 2209), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (2197, 2209), False, 'import os\n'), ((2238, 2264), 'os.path.getmtime', 'os.path.getmtime', (['filepath'], {}), '(filepath)\n', (2254, 2264), False, 'import os\n'), ((1072, 1094), 'composer.utils.dist.get_global_rank', 'dist.get_global_rank', ([], {}), '()\n', (1092, 1094), False, 'from composer.utils import dist\n')] |
adi112100/newsapp | newsapp/migrations/0003_news.py | 7cdf6070299b4a8dcc950e7fcdfb82cf1a1d98cb | # Generated by Django 3.0.8 on 2020-07-11 08:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('newsapp', '0002_auto_20200711_1124'),
]
operations = [
migrations.CreateModel(
name='News',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField()),
('indian_news', models.TextField()),
('national_news', models.TextField()),
('international_news', models.TextField()),
('bollywood_news', models.TextField()),
('lifestyle_news', models.TextField()),
('sport_news', models.TextField()),
('business_news', models.TextField()),
('sharemarket_news', models.TextField()),
('corona_news', models.TextField()),
('space_news', models.TextField()),
('motivation_news', models.TextField()),
],
),
]
| [((328, 421), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (344, 421), False, 'from django.db import migrations, models\n'), ((445, 467), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (465, 467), False, 'from django.db import migrations, models\n'), ((502, 520), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (518, 520), False, 'from django.db import migrations, models\n'), ((557, 575), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (573, 575), False, 'from django.db import migrations, models\n'), ((617, 635), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (633, 635), False, 'from django.db import migrations, models\n'), ((673, 691), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (689, 691), False, 'from django.db import migrations, models\n'), ((729, 747), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (745, 747), False, 'from django.db import migrations, models\n'), ((781, 799), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (797, 799), False, 'from django.db import migrations, models\n'), ((836, 854), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (852, 854), False, 'from django.db import migrations, models\n'), ((894, 912), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (910, 912), False, 'from django.db import migrations, models\n'), ((947, 965), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (963, 965), False, 'from django.db import migrations, models\n'), ((999, 1017), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1015, 1017), False, 'from django.db import migrations, models\n'), ((1056, 1074), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1072, 1074), False, 'from django.db import migrations, models\n')] |
NazarioJL/faker_enum | src/enum/__init__.py | c2703cae232b229b4d4ab2b73757102453d541ab | # -*- coding: utf-8 -*-
from enum import Enum
from typing import TypeVar, Type, List, Iterable, cast
from faker.providers import BaseProvider
TEnum = TypeVar("TEnum", bound=Enum)
class EnumProvider(BaseProvider):
"""
A Provider for enums.
"""
def enum(self, enum_cls: Type[TEnum]) -> TEnum:
members: List[TEnum] = list(cast(Iterable[TEnum], enum_cls))
return self.random_element(members)
| [((152, 180), 'typing.TypeVar', 'TypeVar', (['"""TEnum"""'], {'bound': 'Enum'}), "('TEnum', bound=Enum)\n", (159, 180), False, 'from typing import TypeVar, Type, List, Iterable, cast\n'), ((349, 380), 'typing.cast', 'cast', (['Iterable[TEnum]', 'enum_cls'], {}), '(Iterable[TEnum], enum_cls)\n', (353, 380), False, 'from typing import TypeVar, Type, List, Iterable, cast\n')] |
Varriount/sanic | tests/performance/bottle/simple_server.py | 55c36e0240dfeb03deccdeb5a53ca7fcfa728bff | # Run with: gunicorn --workers=1 --worker-class=meinheld.gmeinheld.MeinheldWorker -b :8000 simple_server:app
import bottle
import ujson
from bottle import route, run
@route("/")
def index():
return ujson.dumps({"test": True})
app = bottle.default_app()
| [((170, 180), 'bottle.route', 'route', (['"""/"""'], {}), "('/')\n", (175, 180), False, 'from bottle import route, run\n'), ((241, 261), 'bottle.default_app', 'bottle.default_app', ([], {}), '()\n', (259, 261), False, 'import bottle\n'), ((205, 232), 'ujson.dumps', 'ujson.dumps', (["{'test': True}"], {}), "({'test': True})\n", (216, 232), False, 'import ujson\n')] |
alvarocneto/alura_django | usuarios/views.py | da2d3619b30c9d1c8767fa910eb7253bc20eeb90 | from django.shortcuts import redirect
from django.shortcuts import render
from django.contrib.auth.models import User
from django.views.generic.base import View
from perfis.models import Perfil
from usuarios.forms import RegistrarUsuarioForm
class RegistrarUsuarioView(View):
template_name = 'registrar.html'
def get(self, request):
return render(request, self.template_name)
def post(self, request):
# preenche o from
form = RegistrarUsuarioForm(request.POST)
# verifica se eh valido
if form.is_valid():
dados_form = form.data
# cria o usuario
usuario = User.objects.create_user(dados_form['nome'],
dados_form['email'],
dados_form['senha'])
# cria o perfil
perfil = Perfil(nome=dados_form['nome'],
telefone=dados_form['telefone'],
nome_empresa=dados_form['nome_empresa'],
usuario=usuario)
# grava no banco
perfil.save()
# redireciona para index
return redirect('index')
# so chega aqui se nao for valido
# vamos devolver o form para mostrar o formulario preenchido
return render(request, self.template_name, {'form': form})
| [((360, 395), 'django.shortcuts.render', 'render', (['request', 'self.template_name'], {}), '(request, self.template_name)\n', (366, 395), False, 'from django.shortcuts import render\n'), ((476, 510), 'usuarios.forms.RegistrarUsuarioForm', 'RegistrarUsuarioForm', (['request.POST'], {}), '(request.POST)\n', (496, 510), False, 'from usuarios.forms import RegistrarUsuarioForm\n'), ((1432, 1483), 'django.shortcuts.render', 'render', (['request', 'self.template_name', "{'form': form}"], {}), "(request, self.template_name, {'form': form})\n", (1438, 1483), False, 'from django.shortcuts import render\n'), ((680, 770), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (["dados_form['nome']", "dados_form['email']", "dados_form['senha']"], {}), "(dados_form['nome'], dados_form['email'],\n dados_form['senha'])\n", (704, 770), False, 'from django.contrib.auth.models import User\n'), ((927, 1053), 'perfis.models.Perfil', 'Perfil', ([], {'nome': "dados_form['nome']", 'telefone': "dados_form['telefone']", 'nome_empresa': "dados_form['nome_empresa']", 'usuario': 'usuario'}), "(nome=dados_form['nome'], telefone=dados_form['telefone'],\n nome_empresa=dados_form['nome_empresa'], usuario=usuario)\n", (933, 1053), False, 'from perfis.models import Perfil\n'), ((1275, 1292), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (1283, 1292), False, 'from django.shortcuts import redirect\n')] |
srsuper/BOT2020 | antolib/AntoCommon.py | 2cadfad470de62819b7aaa0f9ecf1e4b4052ea68 | ANTO_VER = '0.1.2'
| [] |
CPChain/fusion | cpc_fusion/pkgs/keys/main.py | 63b6913010e8e5b296a1900c59592c8fd1802c2e | from typing import (Any, Union, Type) # noqa: F401
from ..keys.datatypes import (
LazyBackend,
PublicKey,
PrivateKey,
Signature,
)
from eth_keys.exceptions import (
ValidationError,
)
from eth_keys.validation import (
validate_message_hash,
)
# These must be aliased due to a scoping issue in mypy
# https://github.com/python/mypy/issues/1775
_PublicKey = PublicKey
_PrivateKey = PrivateKey
_Signature = Signature
class KeyAPI(LazyBackend):
#
# datatype shortcuts
#
PublicKey = PublicKey # type: Type[_PublicKey]
PrivateKey = PrivateKey # type: Type[_PrivateKey]
Signature = Signature # type: Type[_Signature]
#
# Proxy method calls to the backends
#
def ecdsa_sign(self,
message_hash, # type: bytes
private_key # type: _PrivateKey
):
# type: (...) -> _Signature
validate_message_hash(message_hash)
if not isinstance(private_key, PrivateKey):
raise ValidationError(
"The `private_key` must be an instance of `eth_keys.datatypes.PrivateKey`"
)
signature = self.backend.ecdsa_sign(message_hash, private_key)
if not isinstance(signature, Signature):
raise ValidationError(
"Backend returned an invalid signature. Return value must be "
"an instance of `eth_keys.datatypes.Signature`"
)
return signature
def ecdsa_verify(self,
message_hash, # type: bytes
signature, # type: _Signature
public_key # type: _PublicKey
) -> bool:
if not isinstance(public_key, PublicKey):
raise ValidationError(
"The `public_key` must be an instance of `eth_keys.datatypes.PublicKey`"
)
return self.ecdsa_recover(message_hash, signature) == public_key
def ecdsa_recover(self,
message_hash, # type: bytes
signature # type: _Signature
):
# type: (...) -> _PublicKey
validate_message_hash(message_hash)
if not isinstance(signature, Signature):
raise ValidationError(
"The `signature` must be an instance of `eth_keys.datatypes.Signature`"
)
public_key = self.backend.ecdsa_recover(message_hash, signature)
if not isinstance(public_key, _PublicKey):
raise ValidationError(
"Backend returned an invalid public_key. Return value must be "
"an instance of `eth_keys.datatypes.PublicKey`"
)
return public_key
def private_key_to_public_key(self, private_key):
if not isinstance(private_key, PrivateKey):
raise ValidationError(
"The `private_key` must be an instance of `eth_keys.datatypes.PrivateKey`"
)
public_key = self.backend.private_key_to_public_key(private_key)
if not isinstance(public_key, PublicKey):
raise ValidationError(
"Backend returned an invalid public_key. Return value must be "
"an instance of `eth_keys.datatypes.PublicKey`"
)
return public_key
# This creates an easy to import backend which will lazily fetch whatever
# backend has been configured at runtime (as opposed to import or instantiation time).
lazy_key_api = KeyAPI(backend=None)
| [((912, 947), 'eth_keys.validation.validate_message_hash', 'validate_message_hash', (['message_hash'], {}), '(message_hash)\n', (933, 947), False, 'from eth_keys.validation import validate_message_hash\n'), ((2154, 2189), 'eth_keys.validation.validate_message_hash', 'validate_message_hash', (['message_hash'], {}), '(message_hash)\n', (2175, 2189), False, 'from eth_keys.validation import validate_message_hash\n'), ((1018, 1114), 'eth_keys.exceptions.ValidationError', 'ValidationError', (['"""The `private_key` must be an instance of `eth_keys.datatypes.PrivateKey`"""'], {}), "(\n 'The `private_key` must be an instance of `eth_keys.datatypes.PrivateKey`')\n", (1033, 1114), False, 'from eth_keys.exceptions import ValidationError\n'), ((1278, 1413), 'eth_keys.exceptions.ValidationError', 'ValidationError', (['"""Backend returned an invalid signature. Return value must be an instance of `eth_keys.datatypes.Signature`"""'], {}), "(\n 'Backend returned an invalid signature. Return value must be an instance of `eth_keys.datatypes.Signature`'\n )\n", (1293, 1413), False, 'from eth_keys.exceptions import ValidationError\n'), ((1760, 1854), 'eth_keys.exceptions.ValidationError', 'ValidationError', (['"""The `public_key` must be an instance of `eth_keys.datatypes.PublicKey`"""'], {}), "(\n 'The `public_key` must be an instance of `eth_keys.datatypes.PublicKey`')\n", (1775, 1854), False, 'from eth_keys.exceptions import ValidationError\n'), ((2257, 2350), 'eth_keys.exceptions.ValidationError', 'ValidationError', (['"""The `signature` must be an instance of `eth_keys.datatypes.Signature`"""'], {}), "(\n 'The `signature` must be an instance of `eth_keys.datatypes.Signature`')\n", (2272, 2350), False, 'from eth_keys.exceptions import ValidationError\n'), ((2518, 2654), 'eth_keys.exceptions.ValidationError', 'ValidationError', (['"""Backend returned an invalid public_key. Return value must be an instance of `eth_keys.datatypes.PublicKey`"""'], {}), "(\n 'Backend returned an invalid public_key. Return value must be an instance of `eth_keys.datatypes.PublicKey`'\n )\n", (2533, 2654), False, 'from eth_keys.exceptions import ValidationError\n'), ((2845, 2941), 'eth_keys.exceptions.ValidationError', 'ValidationError', (['"""The `private_key` must be an instance of `eth_keys.datatypes.PrivateKey`"""'], {}), "(\n 'The `private_key` must be an instance of `eth_keys.datatypes.PrivateKey`')\n", (2860, 2941), False, 'from eth_keys.exceptions import ValidationError\n'), ((3108, 3244), 'eth_keys.exceptions.ValidationError', 'ValidationError', (['"""Backend returned an invalid public_key. Return value must be an instance of `eth_keys.datatypes.PublicKey`"""'], {}), "(\n 'Backend returned an invalid public_key. Return value must be an instance of `eth_keys.datatypes.PublicKey`'\n )\n", (3123, 3244), False, 'from eth_keys.exceptions import ValidationError\n')] |
gadial/qiskit-terra | qiskit/pulse/transforms/canonicalization.py | 0fc83f44a6e80969875c738b2cee7bc33223e45f | # This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Basic rescheduling functions which take schedule or instructions and return new schedules."""
import warnings
from collections import defaultdict
from typing import List, Optional, Iterable, Union
import numpy as np
from qiskit.pulse import channels as chans, exceptions, instructions
from qiskit.pulse.exceptions import PulseError
from qiskit.pulse.exceptions import UnassignedDurationError
from qiskit.pulse.instruction_schedule_map import InstructionScheduleMap
from qiskit.pulse.instructions import directives
from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent
def block_to_schedule(block: ScheduleBlock) -> Schedule:
"""Convert ``ScheduleBlock`` to ``Schedule``.
Args:
block: A ``ScheduleBlock`` to convert.
Returns:
Scheduled pulse program.
Raises:
UnassignedDurationError: When any instruction duration is not assigned.
"""
if not block.is_schedulable():
raise UnassignedDurationError(
'All instruction durations should be assigned before creating `Schedule`.'
'Please check `.parameters` to find unassigned parameter objects.')
schedule = Schedule(name=block.name, metadata=block.metadata)
for op_data in block.instructions:
if isinstance(op_data, ScheduleBlock):
context_schedule = block_to_schedule(op_data)
schedule.append(context_schedule, inplace=True)
else:
schedule.append(op_data, inplace=True)
# transform with defined policy
return block.alignment_context.align(schedule)
def compress_pulses(schedules: List[Schedule]) -> List[Schedule]:
"""Optimization pass to replace identical pulses.
Args:
schedules: Schedules to compress.
Returns:
Compressed schedules.
"""
existing_pulses = []
new_schedules = []
for schedule in schedules:
new_schedule = Schedule(name=schedule.name, metadata=schedule.metadata)
for time, inst in schedule.instructions:
if isinstance(inst, instructions.Play):
if inst.pulse in existing_pulses:
idx = existing_pulses.index(inst.pulse)
identical_pulse = existing_pulses[idx]
new_schedule.insert(time,
instructions.Play(identical_pulse,
inst.channel,
inst.name),
inplace=True)
else:
existing_pulses.append(inst.pulse)
new_schedule.insert(time, inst, inplace=True)
else:
new_schedule.insert(time, inst, inplace=True)
new_schedules.append(new_schedule)
return new_schedules
def flatten(program: Schedule) -> Schedule:
"""Flatten (inline) any called nodes into a Schedule tree with no nested children.
Args:
program: Pulse program to remove nested structure.
Returns:
Flatten pulse program.
Raises:
PulseError: When invalid data format is given.
"""
if isinstance(program, Schedule):
return Schedule(*program.instructions, name=program.name, metadata=program.metadata)
else:
raise PulseError(f'Invalid input program {program.__class__.__name__} is specified.')
def inline_subroutines(program: Union[Schedule, ScheduleBlock]) -> Union[Schedule, ScheduleBlock]:
"""Recursively remove call instructions and inline the respective subroutine instructions.
Assigned parameter values, which are stored in the parameter table, are also applied.
The subroutine is copied before the parameter assignment to avoid mutation problem.
Args:
program: A program which may contain the subroutine, i.e. ``Call`` instruction.
Returns:
A schedule without subroutine.
Raises:
PulseError: When input program is not valid data format.
"""
if isinstance(program, Schedule):
return _inline_schedule(program)
elif isinstance(program, ScheduleBlock):
return _inline_block(program)
else:
raise PulseError(f'Invalid program {program.__class__.__name__} is specified.')
def _inline_schedule(schedule: Schedule) -> Schedule:
"""A helper function to inline subroutine of schedule.
.. note:: If subroutine is ``ScheduleBlock`` it is converted into Schedule to get ``t0``.
"""
ret_schedule = Schedule(name=schedule.name,
metadata=schedule.metadata)
for t0, inst in schedule.instructions:
if isinstance(inst, instructions.Call):
# bind parameter
subroutine = inst.assigned_subroutine()
# convert into schedule if block is given
if isinstance(subroutine, ScheduleBlock):
subroutine = block_to_schedule(subroutine)
# recursively inline the program
inline_schedule = _inline_schedule(subroutine)
ret_schedule.insert(t0, inline_schedule, inplace=True)
else:
ret_schedule.insert(t0, inst, inplace=True)
return ret_schedule
def _inline_block(block: ScheduleBlock) -> ScheduleBlock:
"""A helper function to inline subroutine of schedule block.
.. note:: If subroutine is ``Schedule`` the function raises an error.
"""
ret_block = ScheduleBlock(alignment_context=block.alignment_context,
name=block.name,
metadata=block.metadata)
for inst in block.instructions:
if isinstance(inst, instructions.Call):
# bind parameter
subroutine = inst.assigned_subroutine()
if isinstance(subroutine, Schedule):
raise PulseError(f'A subroutine {subroutine.name} is a pulse Schedule. '
'This program cannot be inserted into ScheduleBlock because '
't0 associated with instruction will be lost.')
# recursively inline the program
inline_block = _inline_block(subroutine)
ret_block.append(inline_block, inplace=True)
else:
ret_block.append(inst, inplace=True)
return ret_block
def remove_directives(schedule: Schedule) -> Schedule:
"""Remove directives.
Args:
schedule: A schedule to remove compiler directives.
Returns:
A schedule without directives.
"""
return schedule.exclude(instruction_types=[directives.Directive])
def remove_trivial_barriers(schedule: Schedule) -> Schedule:
"""Remove trivial barriers with 0 or 1 channels.
Args:
schedule: A schedule to remove trivial barriers.
Returns:
schedule: A schedule without trivial barriers
"""
def filter_func(inst):
return (isinstance(inst[1], directives.RelativeBarrier) and
len(inst[1].channels) < 2)
return schedule.exclude(filter_func)
def align_measures(schedules: Iterable[ScheduleComponent],
inst_map: Optional[InstructionScheduleMap] = None,
cal_gate: str = 'u3',
max_calibration_duration: Optional[int] = None,
align_time: Optional[int] = None,
align_all: Optional[bool] = True,
) -> List[Schedule]:
"""Return new schedules where measurements occur at the same physical time.
This transformation will align the first :class:`qiskit.pulse.Acquire` on
every channel to occur at the same time.
Minimum measurement wait time (to allow for calibration pulses) is enforced
and may be set with ``max_calibration_duration``.
By default only instructions containing a :class:`~qiskit.pulse.AcquireChannel`
or :class:`~qiskit.pulse.MeasureChannel` will be shifted. If you wish to keep
the relative timing of all instructions in the schedule set ``align_all=True``.
This method assumes that ``MeasureChannel(i)`` and ``AcquireChannel(i)``
correspond to the same qubit and the acquire/play instructions
should be shifted together on these channels.
.. jupyter-kernel:: python3
:id: align_measures
.. jupyter-execute::
from qiskit import pulse
from qiskit.pulse import transforms
with pulse.build() as sched:
with pulse.align_sequential():
pulse.play(pulse.Constant(10, 0.5), pulse.DriveChannel(0))
pulse.play(pulse.Constant(10, 1.), pulse.MeasureChannel(0))
pulse.acquire(20, pulse.AcquireChannel(0), pulse.MemorySlot(0))
sched_shifted = sched << 20
aligned_sched, aligned_sched_shifted = transforms.align_measures([sched, sched_shifted])
assert aligned_sched == aligned_sched_shifted
If it is desired to only shift acquisition and measurement stimulus instructions
set the flag ``align_all=False``:
.. jupyter-execute::
aligned_sched, aligned_sched_shifted = transforms.align_measures(
[sched, sched_shifted],
align_all=False,
)
assert aligned_sched != aligned_sched_shifted
Args:
schedules: Collection of schedules to be aligned together
inst_map: Mapping of circuit operations to pulse schedules
cal_gate: The name of the gate to inspect for the calibration time
max_calibration_duration: If provided, inst_map and cal_gate will be ignored
align_time: If provided, this will be used as final align time.
align_all: Shift all instructions in the schedule such that they maintain
their relative alignment with the shifted acquisition instruction.
If ``False`` only the acquisition and measurement pulse instructions
will be shifted.
Returns:
The input list of schedules transformed to have their measurements aligned.
Raises:
PulseError: If the provided alignment time is negative.
"""
def get_first_acquire_times(schedules):
"""Return a list of first acquire times for each schedule."""
acquire_times = []
for schedule in schedules:
visited_channels = set()
qubit_first_acquire_times = defaultdict(lambda: None)
for time, inst in schedule.instructions:
if (isinstance(inst, instructions.Acquire) and
inst.channel not in visited_channels):
visited_channels.add(inst.channel)
qubit_first_acquire_times[inst.channel.index] = time
acquire_times.append(qubit_first_acquire_times)
return acquire_times
def get_max_calibration_duration(inst_map, cal_gate):
"""Return the time needed to allow for readout discrimination calibration pulses."""
# TODO (qiskit-terra #5472): fix behavior of this.
max_calibration_duration = 0
for qubits in inst_map.qubits_with_instruction(cal_gate):
cmd = inst_map.get(cal_gate, qubits, np.pi, 0, np.pi)
max_calibration_duration = max(cmd.duration, max_calibration_duration)
return max_calibration_duration
if align_time is not None and align_time < 0:
raise exceptions.PulseError("Align time cannot be negative.")
first_acquire_times = get_first_acquire_times(schedules)
# Extract the maximum acquire in every schedule across all acquires in the schedule.
# If there are no acquires in the schedule default to 0.
max_acquire_times = [max(0, *times.values()) for times in first_acquire_times]
if align_time is None:
if max_calibration_duration is None:
if inst_map:
max_calibration_duration = get_max_calibration_duration(inst_map, cal_gate)
else:
max_calibration_duration = 0
align_time = max(max_calibration_duration, *max_acquire_times)
# Shift acquires according to the new scheduled time
new_schedules = []
for sched_idx, schedule in enumerate(schedules):
new_schedule = Schedule(name=schedule.name, metadata=schedule.metadata)
stop_time = schedule.stop_time
if align_all:
if first_acquire_times[sched_idx]:
shift = align_time - max_acquire_times[sched_idx]
else:
shift = align_time - stop_time
else:
shift = 0
for time, inst in schedule.instructions:
measurement_channels = {
chan.index for chan in inst.channels if
isinstance(chan, (chans.MeasureChannel, chans.AcquireChannel))
}
if measurement_channels:
sched_first_acquire_times = first_acquire_times[sched_idx]
max_start_time = max(sched_first_acquire_times[chan]
for chan in measurement_channels if
chan in sched_first_acquire_times)
shift = align_time - max_start_time
if shift < 0:
warnings.warn(
"The provided alignment time is scheduling an acquire instruction "
"earlier than it was scheduled for in the original Schedule. "
"This may result in an instruction being scheduled before t=0 and "
"an error being raised."
)
new_schedule.insert(time+shift, inst, inplace=True)
new_schedules.append(new_schedule)
return new_schedules
def add_implicit_acquires(schedule: ScheduleComponent,
meas_map: List[List[int]]
) -> Schedule:
"""Return a new schedule with implicit acquires from the measurement mapping replaced by
explicit ones.
.. warning:: Since new acquires are being added, Memory Slots will be set to match the
qubit index. This may overwrite your specification.
Args:
schedule: Schedule to be aligned.
meas_map: List of lists of qubits that are measured together.
Returns:
A ``Schedule`` with the additional acquisition instructions.
"""
new_schedule = Schedule(name=schedule.name, metadata=schedule.metadata)
acquire_map = dict()
for time, inst in schedule.instructions:
if isinstance(inst, instructions.Acquire):
if inst.mem_slot and inst.mem_slot.index != inst.channel.index:
warnings.warn("One of your acquires was mapped to a memory slot which didn't match"
" the qubit index. I'm relabeling them to match.")
# Get the label of all qubits that are measured with the qubit(s) in this instruction
all_qubits = []
for sublist in meas_map:
if inst.channel.index in sublist:
all_qubits.extend(sublist)
# Replace the old acquire instruction by a new one explicitly acquiring all qubits in
# the measurement group.
for i in all_qubits:
explicit_inst = instructions.Acquire(inst.duration,
chans.AcquireChannel(i),
mem_slot=chans.MemorySlot(i),
kernel=inst.kernel,
discriminator=inst.discriminator)
if time not in acquire_map:
new_schedule.insert(time, explicit_inst, inplace=True)
acquire_map = {time: {i}}
elif i not in acquire_map[time]:
new_schedule.insert(time, explicit_inst, inplace=True)
acquire_map[time].add(i)
else:
new_schedule.insert(time, inst, inplace=True)
return new_schedule
| [((1646, 1696), 'qiskit.pulse.schedule.Schedule', 'Schedule', ([], {'name': 'block.name', 'metadata': 'block.metadata'}), '(name=block.name, metadata=block.metadata)\n', (1654, 1696), False, 'from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent\n'), ((4991, 5047), 'qiskit.pulse.schedule.Schedule', 'Schedule', ([], {'name': 'schedule.name', 'metadata': 'schedule.metadata'}), '(name=schedule.name, metadata=schedule.metadata)\n', (4999, 5047), False, 'from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent\n'), ((5904, 6006), 'qiskit.pulse.schedule.ScheduleBlock', 'ScheduleBlock', ([], {'alignment_context': 'block.alignment_context', 'name': 'block.name', 'metadata': 'block.metadata'}), '(alignment_context=block.alignment_context, name=block.name,\n metadata=block.metadata)\n', (5917, 6006), False, 'from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent\n'), ((14721, 14777), 'qiskit.pulse.schedule.Schedule', 'Schedule', ([], {'name': 'schedule.name', 'metadata': 'schedule.metadata'}), '(name=schedule.name, metadata=schedule.metadata)\n', (14729, 14777), False, 'from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent\n'), ((1438, 1611), 'qiskit.pulse.exceptions.UnassignedDurationError', 'UnassignedDurationError', (['"""All instruction durations should be assigned before creating `Schedule`.Please check `.parameters` to find unassigned parameter objects."""'], {}), "(\n 'All instruction durations should be assigned before creating `Schedule`.Please check `.parameters` to find unassigned parameter objects.'\n )\n", (1461, 1611), False, 'from qiskit.pulse.exceptions import UnassignedDurationError\n'), ((2384, 2440), 'qiskit.pulse.schedule.Schedule', 'Schedule', ([], {'name': 'schedule.name', 'metadata': 'schedule.metadata'}), '(name=schedule.name, metadata=schedule.metadata)\n', (2392, 2440), False, 'from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent\n'), ((3699, 3776), 'qiskit.pulse.schedule.Schedule', 'Schedule', (['*program.instructions'], {'name': 'program.name', 'metadata': 'program.metadata'}), '(*program.instructions, name=program.name, metadata=program.metadata)\n', (3707, 3776), False, 'from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent\n'), ((3801, 3880), 'qiskit.pulse.exceptions.PulseError', 'PulseError', (['f"""Invalid input program {program.__class__.__name__} is specified."""'], {}), "(f'Invalid input program {program.__class__.__name__} is specified.')\n", (3811, 3880), False, 'from qiskit.pulse.exceptions import PulseError\n'), ((11773, 11828), 'qiskit.pulse.exceptions.PulseError', 'exceptions.PulseError', (['"""Align time cannot be negative."""'], {}), "('Align time cannot be negative.')\n", (11794, 11828), False, 'from qiskit.pulse import channels as chans, exceptions, instructions\n'), ((12604, 12660), 'qiskit.pulse.schedule.Schedule', 'Schedule', ([], {'name': 'schedule.name', 'metadata': 'schedule.metadata'}), '(name=schedule.name, metadata=schedule.metadata)\n', (12612, 12660), False, 'from qiskit.pulse.schedule import Schedule, ScheduleBlock, ScheduleComponent\n'), ((4680, 4753), 'qiskit.pulse.exceptions.PulseError', 'PulseError', (['f"""Invalid program {program.__class__.__name__} is specified."""'], {}), "(f'Invalid program {program.__class__.__name__} is specified.')\n", (4690, 4753), False, 'from qiskit.pulse.exceptions import PulseError\n'), ((10781, 10807), 'collections.defaultdict', 'defaultdict', (['(lambda : None)'], {}), '(lambda : None)\n', (10792, 10807), False, 'from collections import defaultdict\n'), ((6299, 6479), 'qiskit.pulse.exceptions.PulseError', 'PulseError', (['f"""A subroutine {subroutine.name} is a pulse Schedule. This program cannot be inserted into ScheduleBlock because t0 associated with instruction will be lost."""'], {}), "(\n f'A subroutine {subroutine.name} is a pulse Schedule. This program cannot be inserted into ScheduleBlock because t0 associated with instruction will be lost.'\n )\n", (6309, 6479), False, 'from qiskit.pulse.exceptions import PulseError\n'), ((13594, 13833), 'warnings.warn', 'warnings.warn', (['"""The provided alignment time is scheduling an acquire instruction earlier than it was scheduled for in the original Schedule. This may result in an instruction being scheduled before t=0 and an error being raised."""'], {}), "(\n 'The provided alignment time is scheduling an acquire instruction earlier than it was scheduled for in the original Schedule. This may result in an instruction being scheduled before t=0 and an error being raised.'\n )\n", (13607, 13833), False, 'import warnings\n'), ((14992, 15133), 'warnings.warn', 'warnings.warn', (['"""One of your acquires was mapped to a memory slot which didn\'t match the qubit index. I\'m relabeling them to match."""'], {}), '(\n "One of your acquires was mapped to a memory slot which didn\'t match the qubit index. I\'m relabeling them to match."\n )\n', (15005, 15133), False, 'import warnings\n'), ((15707, 15730), 'qiskit.pulse.channels.AcquireChannel', 'chans.AcquireChannel', (['i'], {}), '(i)\n', (15727, 15730), True, 'from qiskit.pulse import channels as chans, exceptions, instructions\n'), ((2798, 2857), 'qiskit.pulse.instructions.Play', 'instructions.Play', (['identical_pulse', 'inst.channel', 'inst.name'], {}), '(identical_pulse, inst.channel, inst.name)\n', (2815, 2857), False, 'from qiskit.pulse import channels as chans, exceptions, instructions\n'), ((15794, 15813), 'qiskit.pulse.channels.MemorySlot', 'chans.MemorySlot', (['i'], {}), '(i)\n', (15810, 15813), True, 'from qiskit.pulse import channels as chans, exceptions, instructions\n')] |
ananelson/oacensus | tests/test_scraper.py | 87916c92ab1233bcf82a481113017dfb8d7701b9 | from oacensus.scraper import Scraper
from oacensus.commands import defaults
class TestScraper(Scraper):
"""
Scraper for testing scraper methods.
"""
aliases = ['testscraper']
def scrape(self):
pass
def process(self):
pass
def test_hashcode():
scraper = Scraper.create_instance('testscraper', defaults)
assert len(scraper.hashcode()) == 32
def test_run():
scraper = Scraper.create_instance('testscraper', defaults)
scraper.run()
| [((301, 349), 'oacensus.scraper.Scraper.create_instance', 'Scraper.create_instance', (['"""testscraper"""', 'defaults'], {}), "('testscraper', defaults)\n", (324, 349), False, 'from oacensus.scraper import Scraper\n'), ((422, 470), 'oacensus.scraper.Scraper.create_instance', 'Scraper.create_instance', (['"""testscraper"""', 'defaults'], {}), "('testscraper', defaults)\n", (445, 470), False, 'from oacensus.scraper import Scraper\n')] |
li-ma/homework | python/test-deco-1-1.py | d75b1752a02bd028af0806683abe079c7b0a9b29 | def deco1(func):
print("before myfunc() called.")
func()
print("after myfunc() called.")
def myfunc():
print("myfunc() called.")
deco1(myfunc)
| [] |
mmoucka/py-junos-eznc | lib/jnpr/junos/transport/tty_netconf.py | 9ef5ad39e32ae670fe8ed0092d725661a45b3053 | import re
import time
from lxml import etree
import select
import socket
import logging
import sys
from lxml.builder import E
from lxml.etree import XMLSyntaxError
from datetime import datetime, timedelta
from ncclient.operations.rpc import RPCReply, RPCError
from ncclient.xml_ import to_ele
import six
from ncclient.transport.session import HelloHandler
class PY6:
NEW_LINE = six.b("\n")
EMPTY_STR = six.b("")
NETCONF_EOM = six.b("]]>]]>")
STARTS_WITH = six.b("<!--")
__all__ = ["xmlmode_netconf"]
_NETCONF_EOM = six.b("]]>]]>")
_xmlns = re.compile(six.b("xmlns=[^>]+"))
_xmlns_strip = lambda text: _xmlns.sub(PY6.EMPTY_STR, text)
_junosns = re.compile(six.b("junos:"))
_junosns_strip = lambda text: _junosns.sub(PY6.EMPTY_STR, text)
logger = logging.getLogger("jnpr.junos.tty_netconf")
# =========================================================================
# xmlmode_netconf
# =========================================================================
class tty_netconf(object):
"""
Basic Junos XML API for bootstraping through the TTY
"""
def __init__(self, tty):
self._tty = tty
self.hello = None
self._session_id = -1
# -------------------------------------------------------------------------
# NETCONF session open and close
# -------------------------------------------------------------------------
def open(self, at_shell):
""" start the XML API process and receive the 'hello' message """
nc_cmd = ("junoscript", "xml-mode")[at_shell]
self._tty.write(nc_cmd + " netconf need-trailer")
mark_start = datetime.now()
mark_end = mark_start + timedelta(seconds=15)
while datetime.now() < mark_end:
time.sleep(0.1)
line = self._tty.read()
if line.startswith(PY6.STARTS_WITH):
break
else:
# exceeded the while loop timeout
raise RuntimeError("Error: netconf not responding")
self.hello = self._receive()
self._session_id, _ = HelloHandler.parse(self.hello.decode("utf-8"))
def close(self, device_handler, force=False):
""" issue the XML API to close the session """
# if we do not have an open connection, then return now.
if force is False:
if self.hello is None:
return
self.rpc("close-session", device_handler)
# removed flush
# -------------------------------------------------------------------------
# MISC device commands
# -------------------------------------------------------------------------
def zeroize(self):
""" issue a reboot to the device """
cmd = E.command("request system zeroize")
try:
encode = None if sys.version < "3" else "unicode"
self.rpc(etree.tostring(cmd, encoding=encode))
except:
return False
return True
# -------------------------------------------------------------------------
# XML RPC command execution
# -------------------------------------------------------------------------
def rpc(self, cmd, device_handler):
"""
Write the XML cmd and return the response as XML object.
:cmd:
<str> of the XML command. if the :cmd: is not XML, then
this routine will perform the brackets; i.e. if given
'get-software-information', this routine will turn
it into '<get-software-information/>'
NOTES:
The return XML object is the first child element after
the <rpc-reply>. There is also no error-checking
performing by this routine.
"""
if not cmd.startswith("<"):
cmd = "<{}/>".format(cmd)
rpc = six.b("<rpc>{}</rpc>".format(cmd))
logger.info("Calling rpc: %s" % rpc)
self._tty.rawwrite(rpc)
rsp = self._receive()
rsp = rsp.decode("utf-8") if isinstance(rsp, bytes) else rsp
reply = RPCReply(rsp, device_handler, huge_tree=self._tty._huge_tree)
errors = reply.errors
if len(errors) > 1:
raise RPCError(to_ele(reply._raw), errs=errors)
elif len(errors) == 1:
raise reply.error
return reply
# -------------------------------------------------------------------------
# LOW-LEVEL I/O for reading back XML response
# -------------------------------------------------------------------------
def _receive(self):
# On windows select.select throws io.UnsupportedOperation: fileno
# so use read function for windows serial COM ports
if hasattr(self._tty, "port") and str(self._tty.port).startswith("COM"):
return self._receive_serial_win()
else:
return self._receive_serial()
def _receive_serial(self):
""" process the XML response into an XML object """
rxbuf = PY6.EMPTY_STR
line = PY6.EMPTY_STR
while True:
try:
rd, wt, err = select.select([self._tty._rx], [], [], 0.1)
except select.error as err:
raise err
except socket.error as err:
raise err
if rd:
line, lastline = rd[0].read_until(PY6.NETCONF_EOM, 0.1), line
if not line:
continue
if _NETCONF_EOM in line or _NETCONF_EOM in lastline + line:
rxbuf = rxbuf + line
break
else:
rxbuf = rxbuf + line
if _NETCONF_EOM in rxbuf:
break
return self._parse_buffer(rxbuf)
# -------------------------------------------------------------------------
# Read message from windows COM ports
# -------------------------------------------------------------------------
def _receive_serial_win(self):
""" process incoming data from windows port"""
rxbuf = PY6.EMPTY_STR
line = PY6.EMPTY_STR
while True:
line, lastline = self._tty.read().strip(), line
if not line:
continue
if _NETCONF_EOM in line or _NETCONF_EOM in lastline + line:
rxbuf = rxbuf + line
break
else:
rxbuf = rxbuf + line
if _NETCONF_EOM in rxbuf:
break
return self._parse_buffer(rxbuf)
def _parse_buffer(self, rxbuf):
rxbuf = rxbuf.splitlines()
if _NETCONF_EOM in rxbuf[-1]:
if rxbuf[-1] == _NETCONF_EOM:
rxbuf.pop()
else:
rxbuf[-1] = rxbuf[-1].split(_NETCONF_EOM)[0]
try:
rxbuf = [i.strip() for i in rxbuf if i.strip() != PY6.EMPTY_STR]
rcvd_data = PY6.NEW_LINE.join(rxbuf)
logger.debug("Received: \n%s" % rcvd_data)
parser = etree.XMLParser(
remove_blank_text=True, huge_tree=self._tty._huge_tree
)
try:
etree.XML(rcvd_data, parser)
except XMLSyntaxError:
if _NETCONF_EOM in rcvd_data:
rcvd_data = rcvd_data[: rcvd_data.index(_NETCONF_EOM)]
etree.XML(rcvd_data) # just to recheck
else:
parser = etree.XMLParser(recover=True)
rcvd_data = etree.tostring(etree.XML(rcvd_data, parser=parser))
return rcvd_data
except:
if "</xnm:error>" in rxbuf:
for x in rxbuf:
if "<message>" in x:
return etree.XML(
"<error-in-receive>" + x + "</error-in-receive>"
)
else:
return etree.XML("<error-in-receive/>")
| [((537, 552), 'six.b', 'six.b', (['"""]]>]]>"""'], {}), "(']]>]]>')\n", (542, 552), False, 'import six\n'), ((768, 811), 'logging.getLogger', 'logging.getLogger', (['"""jnpr.junos.tty_netconf"""'], {}), "('jnpr.junos.tty_netconf')\n", (785, 811), False, 'import logging\n'), ((385, 396), 'six.b', 'six.b', (['"""\n"""'], {}), "('\\n')\n", (390, 396), False, 'import six\n'), ((413, 422), 'six.b', 'six.b', (['""""""'], {}), "('')\n", (418, 422), False, 'import six\n'), ((441, 456), 'six.b', 'six.b', (['"""]]>]]>"""'], {}), "(']]>]]>')\n", (446, 456), False, 'import six\n'), ((475, 488), 'six.b', 'six.b', (['"""<!--"""'], {}), "('<!--')\n", (480, 488), False, 'import six\n'), ((573, 593), 'six.b', 'six.b', (['"""xmlns=[^>]+"""'], {}), "('xmlns=[^>]+')\n", (578, 593), False, 'import six\n'), ((677, 692), 'six.b', 'six.b', (['"""junos:"""'], {}), "('junos:')\n", (682, 692), False, 'import six\n'), ((1632, 1646), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1644, 1646), False, 'from datetime import datetime, timedelta\n'), ((2720, 2755), 'lxml.builder.E.command', 'E.command', (['"""request system zeroize"""'], {}), "('request system zeroize')\n", (2729, 2755), False, 'from lxml.builder import E\n'), ((4024, 4085), 'ncclient.operations.rpc.RPCReply', 'RPCReply', (['rsp', 'device_handler'], {'huge_tree': 'self._tty._huge_tree'}), '(rsp, device_handler, huge_tree=self._tty._huge_tree)\n', (4032, 4085), False, 'from ncclient.operations.rpc import RPCReply, RPCError\n'), ((1679, 1700), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(15)'}), '(seconds=15)\n', (1688, 1700), False, 'from datetime import datetime, timedelta\n'), ((1716, 1730), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1728, 1730), False, 'from datetime import datetime, timedelta\n'), ((1755, 1770), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1765, 1770), False, 'import time\n'), ((6963, 7034), 'lxml.etree.XMLParser', 'etree.XMLParser', ([], {'remove_blank_text': '(True)', 'huge_tree': 'self._tty._huge_tree'}), '(remove_blank_text=True, huge_tree=self._tty._huge_tree)\n', (6978, 7034), False, 'from lxml import etree\n'), ((2852, 2888), 'lxml.etree.tostring', 'etree.tostring', (['cmd'], {'encoding': 'encode'}), '(cmd, encoding=encode)\n', (2866, 2888), False, 'from lxml import etree\n'), ((4171, 4189), 'ncclient.xml_.to_ele', 'to_ele', (['reply._raw'], {}), '(reply._raw)\n', (4177, 4189), False, 'from ncclient.xml_ import to_ele\n'), ((5057, 5100), 'select.select', 'select.select', (['[self._tty._rx]', '[]', '[]', '(0.1)'], {}), '([self._tty._rx], [], [], 0.1)\n', (5070, 5100), False, 'import select\n'), ((7098, 7126), 'lxml.etree.XML', 'etree.XML', (['rcvd_data', 'parser'], {}), '(rcvd_data, parser)\n', (7107, 7126), False, 'from lxml import etree\n'), ((7852, 7884), 'lxml.etree.XML', 'etree.XML', (['"""<error-in-receive/>"""'], {}), "('<error-in-receive/>')\n", (7861, 7884), False, 'from lxml import etree\n'), ((7303, 7323), 'lxml.etree.XML', 'etree.XML', (['rcvd_data'], {}), '(rcvd_data)\n', (7312, 7323), False, 'from lxml import etree\n'), ((7394, 7423), 'lxml.etree.XMLParser', 'etree.XMLParser', ([], {'recover': '(True)'}), '(recover=True)\n', (7409, 7423), False, 'from lxml import etree\n'), ((7471, 7506), 'lxml.etree.XML', 'etree.XML', (['rcvd_data'], {'parser': 'parser'}), '(rcvd_data, parser=parser)\n', (7480, 7506), False, 'from lxml import etree\n'), ((7697, 7756), 'lxml.etree.XML', 'etree.XML', (["('<error-in-receive>' + x + '</error-in-receive>')"], {}), "('<error-in-receive>' + x + '</error-in-receive>')\n", (7706, 7756), False, 'from lxml import etree\n')] |
eydam-prototyping/mp_modbus | test/_test_client.py | 8007c41dd16e6f71bd27b587628f57f38f27a7e0 | from pymodbus.client.sync import ModbusTcpClient as ModbusClient
import logging
FORMAT = ('%(asctime)-15s %(threadName)-15s '
'%(levelname)-8s %(module)-15s:%(lineno)-8s %(message)s')
logging.basicConfig(format=FORMAT)
log = logging.getLogger()
log.setLevel(logging.DEBUG)
client = ModbusClient('192.168.178.61', port=502)
client.connect()
f = client.read_holding_registers(305,1)
print(f.registers) | [((194, 228), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'FORMAT'}), '(format=FORMAT)\n', (213, 228), False, 'import logging\n'), ((235, 254), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (252, 254), False, 'import logging\n'), ((292, 332), 'pymodbus.client.sync.ModbusTcpClient', 'ModbusClient', (['"""192.168.178.61"""'], {'port': '(502)'}), "('192.168.178.61', port=502)\n", (304, 332), True, 'from pymodbus.client.sync import ModbusTcpClient as ModbusClient\n')] |
technolotrix/tests | tests/selenium/test_about/test_about_page.py | ae5b9741e80a1fd735c66de93cc014f672c5afb2 | import unittest
from selenium import webdriver
import page
class AboutPage(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.get("http://nicolesmith.nyc")
#self.driver.get("http://127.0.0.1:4747/about")
self.about_page = page.AboutPage(self.driver)
######## HEADER STUFF ########
def test_title_on_about_page(self):
assert self.about_page.is_title_matches(), "about page title doesn't match"
def test_click_get_quote(self):
assert self.about_page.click_quote_button(), "link to contact page is broken"
def test_click_home_button(self):
assert self.about_page.click_home_button(), "home button does not go to homepage"
@unittest.skip("Needs fixing.")
def test_click_about_link(self):
assert self.about_page.click_projects_link(), "about link does not go to about page"
@unittest.skip("Needs fixing.")
def test_click_projects_link(self):
assert self.about_page.click_projects_link(), "projects link does not go to projects page"
@unittest.skip("Needs fixing.")
def test_click_services_link(self):
assert self.about_page.click_projects_link(), "services link does not go to services page"
######## PAGE SPECIFIC STUFF ########
def test_click_resume(self):
return self.about_page.click_resume(), "link to resume is broken"
def test_click_resumator(self):
return self.about_page.click_resumator(), "link to resumator is broken"
def test_click_contact_me(self):
return self.about_page.click_contact_me(), "link to contact me page is broken in FAQ"
def test_click_html5up_backlink(self):
return self.about_page.click_html5up_backlink(), "backlink to html5up in FAQ is broken"
######## FOOTER STUFF ########
def test_click_github(self):
assert self.about_page.click_github_button(), "link to github is broken"
def test_click_linkedin(self):
assert self.about_page.click_linkedin_button(), "link to linkedin is broken"
def test_click_gplus(self):
assert self.about_page.click_gplus_button(), "link to google plus is broken"
def test_click_twitter(self):
assert self.about_page.click_twitter_button(), "link to twitter is broken"
def test_click_html5up(self):
assert self.about_page.click_html5up_link(), "link to html5up template owner is broken"
def test_copyright_on_about_page(self):
assert self.about_page.is_copyright_matches(), "about page has wrong copyright"
def tearDown(self):
self.driver.close()
if __name__ == "__main__":
unittest.main() | [((739, 769), 'unittest.skip', 'unittest.skip', (['"""Needs fixing."""'], {}), "('Needs fixing.')\n", (752, 769), False, 'import unittest\n'), ((906, 936), 'unittest.skip', 'unittest.skip', (['"""Needs fixing."""'], {}), "('Needs fixing.')\n", (919, 936), False, 'import unittest\n'), ((1082, 1112), 'unittest.skip', 'unittest.skip', (['"""Needs fixing."""'], {}), "('Needs fixing.')\n", (1095, 1112), False, 'import unittest\n'), ((2649, 2664), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2662, 2664), False, 'import unittest\n'), ((140, 159), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (157, 159), False, 'from selenium import webdriver\n'), ((292, 319), 'page.AboutPage', 'page.AboutPage', (['self.driver'], {}), '(self.driver)\n', (306, 319), False, 'import page\n')] |
pcen/pulumi | sdk/python/lib/test/langhost/future_input/__main__.py | 1bb85ca98c90f2161fe915df083d47c56c135e4d | # Copyright 2016-2018, Pulumi Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
from pulumi import CustomResource, Output, Input
async def read_a_file_or_something():
await asyncio.sleep(0)
return "here's a file"
def assert_eq(l, r):
assert l == r
class FileResource(CustomResource):
contents: Output[str]
def __init__(self, name: str, file_contents: Input[str]) -> None:
CustomResource.__init__(self, "test:index:FileResource", name, {
"contents": file_contents
})
# read_a_file_or_something returns a coroutine when called, which needs to be scheduled
# and awaited in order to yield a value.
file_res = FileResource("file", read_a_file_or_something())
file_res.contents.apply(lambda c: assert_eq(c, "here's a file"))
| [((702, 718), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (715, 718), False, 'import asyncio\n'), ((928, 1023), 'pulumi.CustomResource.__init__', 'CustomResource.__init__', (['self', '"""test:index:FileResource"""', 'name', "{'contents': file_contents}"], {}), "(self, 'test:index:FileResource', name, {'contents':\n file_contents})\n", (951, 1023), False, 'from pulumi import CustomResource, Output, Input\n')] |
dewloosh/dewloosh-geom | src/dewloosh/geom/cells/h8.py | 5c97fbab4b68f4748bf4309184b9e0e877f94cd6 | # -*- coding: utf-8 -*-
from dewloosh.geom.polyhedron import HexaHedron
from dewloosh.math.numint import GaussPoints as Gauss
from dewloosh.geom.utils import cells_coords
from numba import njit, prange
import numpy as np
from numpy import ndarray
__cache = True
@njit(nogil=True, cache=__cache)
def monoms_H8(pcoord: np.ndarray):
r, s, t = pcoord
return np.array([1, r, s, t, r*s, r*t, s*t, r*s*t])
@njit(nogil=True, cache=__cache)
def shp_H8(pcoord):
r, s, t = pcoord
return np.array([-0.125*r*s*t + 0.125*r*s + 0.125*r*t - 0.125*r +
0.125*s*t - 0.125*s - 0.125*t + 0.125,
0.125*r*s*t - 0.125*r*s - 0.125*r*t + 0.125*r +
0.125*s*t - 0.125*s - 0.125*t + 0.125,
-0.125*r*s*t + 0.125*r*s - 0.125*r*t + 0.125*r -
0.125*s*t + 0.125*s - 0.125*t + 0.125,
0.125*r*s*t - 0.125*r*s + 0.125*r*t - 0.125*r -
0.125*s*t + 0.125*s - 0.125*t + 0.125,
0.125*r*s*t + 0.125*r*s - 0.125*r*t - 0.125*r -
0.125*s*t - 0.125*s + 0.125*t + 0.125,
-0.125*r*s*t - 0.125*r*s + 0.125*r*t + 0.125*r -
0.125*s*t - 0.125*s + 0.125*t + 0.125,
0.125*r*s*t + 0.125*r*s + 0.125*r*t + 0.125*r +
0.125*s*t + 0.125*s + 0.125*t + 0.125,
-0.125*r*s*t - 0.125*r*s - 0.125*r*t - 0.125*r +
0.125*s*t + 0.125*s + 0.125*t + 0.125]
)
@njit(nogil=True, parallel=True, cache=__cache)
def shape_function_matrix_H8(pcoord: np.ndarray):
eye = np.eye(3, dtype=pcoord.dtype)
shp = shp_H8(pcoord)
res = np.zeros((3, 24), dtype=pcoord.dtype)
for i in prange(8):
res[:, i*3: (i+1) * 3] = eye*shp[i]
return res
@njit(nogil=True, cache=__cache)
def dshp_H8(pcoord):
r, s, t = pcoord
return np.array(
[[-0.125*s*t + 0.125*s + 0.125*t - 0.125,
-0.125*r*t + 0.125*r + 0.125*t - 0.125,
-0.125*r*s + 0.125*r + 0.125*s - 0.125],
[0.125*s*t - 0.125*s - 0.125*t + 0.125,
0.125*r*t - 0.125*r + 0.125*t - 0.125,
0.125*r*s - 0.125*r + 0.125*s - 0.125],
[-0.125*s*t + 0.125*s - 0.125*t + 0.125,
-0.125*r*t + 0.125*r - 0.125*t + 0.125,
-0.125*r*s - 0.125*r - 0.125*s - 0.125],
[0.125*s*t - 0.125*s + 0.125*t - 0.125,
0.125*r*t - 0.125*r - 0.125*t + 0.125,
0.125*r*s + 0.125*r - 0.125*s - 0.125],
[0.125*s*t + 0.125*s - 0.125*t - 0.125,
0.125*r*t + 0.125*r - 0.125*t - 0.125,
0.125*r*s - 0.125*r - 0.125*s + 0.125],
[-0.125*s*t - 0.125*s + 0.125*t + 0.125,
-0.125*r*t - 0.125*r - 0.125*t - 0.125,
-0.125*r*s + 0.125*r - 0.125*s + 0.125],
[0.125*s*t + 0.125*s + 0.125*t + 0.125,
0.125*r*t + 0.125*r + 0.125*t + 0.125,
0.125*r*s + 0.125*r + 0.125*s + 0.125],
[-0.125*s*t - 0.125*s - 0.125*t - 0.125,
-0.125*r*t - 0.125*r + 0.125*t + 0.125,
-0.125*r*s - 0.125*r + 0.125*s + 0.125]]
)
@njit(nogil=True, parallel=True, cache=__cache)
def dshp_H8_bulk(pcoords: ndarray):
nP = pcoords.shape[0]
res = np.zeros((nP, 8, 3), dtype=pcoords.dtype)
for iP in prange(nP):
res[iP] = dshp_H8(pcoords[iP])
return res
@njit(nogil=True, parallel=True, fastmath=True, cache=__cache)
def volumes_H8(ecoords: np.ndarray, qpos: np.ndarray,
qweight: np.ndarray):
nE = ecoords.shape[0]
volumes = np.zeros(nE, dtype=ecoords.dtype)
nQ = len(qweight)
for iQ in range(nQ):
dshp = dshp_H8(qpos[iQ])
for i in prange(nE):
jac = ecoords[i].T @ dshp
djac = np.linalg.det(jac)
volumes[i] += qweight[iQ] * djac
return volumes
class H8(HexaHedron):
"""
8-node isoparametric hexahedron.
top
7--6
| |
4--5
bottom
3--2
| |
0--1
"""
@classmethod
def lcoords(cls, *args, **kwargs):
return np.array([[-1., -1., -1],
[1., -1., -1.],
[1., 1., -1.],
[-1., 1., -1.],
[-1., -1., 1.],
[1., -1., 1.],
[1., 1., 1.],
[-1., 1., 1.]])
@classmethod
def lcenter(cls, *args, **kwargs):
return np.array([0., 0., 0.])
def shape_function_derivatives(self, coords=None, *args, **kwargs):
coords = self.pointdata.x.to_numpy() if coords is None else coords
if len(coords.shape) == 2:
return dshp_H8_bulk(coords)
else:
return dshp_H8(coords)
def volumes(self, coords=None, topo=None):
coords = self.pointdata.x.to_numpy() if coords is None else coords
topo = self.nodes.to_numpy() if topo is None else topo
ecoords = cells_coords(coords, topo)
qpos, qweight = Gauss(2, 2, 2)
return volumes_H8(ecoords, qpos, qweight)
| [((265, 296), 'numba.njit', 'njit', ([], {'nogil': '(True)', 'cache': '__cache'}), '(nogil=True, cache=__cache)\n', (269, 296), False, 'from numba import njit, prange\n'), ((412, 443), 'numba.njit', 'njit', ([], {'nogil': '(True)', 'cache': '__cache'}), '(nogil=True, cache=__cache)\n', (416, 443), False, 'from numba import njit, prange\n'), ((1546, 1592), 'numba.njit', 'njit', ([], {'nogil': '(True)', 'parallel': '(True)', 'cache': '__cache'}), '(nogil=True, parallel=True, cache=__cache)\n', (1550, 1592), False, 'from numba import njit, prange\n'), ((1842, 1873), 'numba.njit', 'njit', ([], {'nogil': '(True)', 'cache': '__cache'}), '(nogil=True, cache=__cache)\n', (1846, 1873), False, 'from numba import njit, prange\n'), ((3126, 3172), 'numba.njit', 'njit', ([], {'nogil': '(True)', 'parallel': '(True)', 'cache': '__cache'}), '(nogil=True, parallel=True, cache=__cache)\n', (3130, 3172), False, 'from numba import njit, prange\n'), ((3370, 3431), 'numba.njit', 'njit', ([], {'nogil': '(True)', 'parallel': '(True)', 'fastmath': '(True)', 'cache': '__cache'}), '(nogil=True, parallel=True, fastmath=True, cache=__cache)\n', (3374, 3431), False, 'from numba import njit, prange\n'), ((364, 418), 'numpy.array', 'np.array', (['[1, r, s, t, r * s, r * t, s * t, r * s * t]'], {}), '([1, r, s, t, r * s, r * t, s * t, r * s * t])\n', (372, 418), True, 'import numpy as np\n'), ((496, 1452), 'numpy.array', 'np.array', (['[-0.125 * r * s * t + 0.125 * r * s + 0.125 * r * t - 0.125 * r + 0.125 * s *\n t - 0.125 * s - 0.125 * t + 0.125, 0.125 * r * s * t - 0.125 * r * s - \n 0.125 * r * t + 0.125 * r + 0.125 * s * t - 0.125 * s - 0.125 * t + \n 0.125, -0.125 * r * s * t + 0.125 * r * s - 0.125 * r * t + 0.125 * r -\n 0.125 * s * t + 0.125 * s - 0.125 * t + 0.125, 0.125 * r * s * t - \n 0.125 * r * s + 0.125 * r * t - 0.125 * r - 0.125 * s * t + 0.125 * s -\n 0.125 * t + 0.125, 0.125 * r * s * t + 0.125 * r * s - 0.125 * r * t - \n 0.125 * r - 0.125 * s * t - 0.125 * s + 0.125 * t + 0.125, -0.125 * r *\n s * t - 0.125 * r * s + 0.125 * r * t + 0.125 * r - 0.125 * s * t - \n 0.125 * s + 0.125 * t + 0.125, 0.125 * r * s * t + 0.125 * r * s + \n 0.125 * r * t + 0.125 * r + 0.125 * s * t + 0.125 * s + 0.125 * t + \n 0.125, -0.125 * r * s * t - 0.125 * r * s - 0.125 * r * t - 0.125 * r +\n 0.125 * s * t + 0.125 * s + 0.125 * t + 0.125]'], {}), '([-0.125 * r * s * t + 0.125 * r * s + 0.125 * r * t - 0.125 * r + \n 0.125 * s * t - 0.125 * s - 0.125 * t + 0.125, 0.125 * r * s * t - \n 0.125 * r * s - 0.125 * r * t + 0.125 * r + 0.125 * s * t - 0.125 * s -\n 0.125 * t + 0.125, -0.125 * r * s * t + 0.125 * r * s - 0.125 * r * t +\n 0.125 * r - 0.125 * s * t + 0.125 * s - 0.125 * t + 0.125, 0.125 * r *\n s * t - 0.125 * r * s + 0.125 * r * t - 0.125 * r - 0.125 * s * t + \n 0.125 * s - 0.125 * t + 0.125, 0.125 * r * s * t + 0.125 * r * s - \n 0.125 * r * t - 0.125 * r - 0.125 * s * t - 0.125 * s + 0.125 * t + \n 0.125, -0.125 * r * s * t - 0.125 * r * s + 0.125 * r * t + 0.125 * r -\n 0.125 * s * t - 0.125 * s + 0.125 * t + 0.125, 0.125 * r * s * t + \n 0.125 * r * s + 0.125 * r * t + 0.125 * r + 0.125 * s * t + 0.125 * s +\n 0.125 * t + 0.125, -0.125 * r * s * t - 0.125 * r * s - 0.125 * r * t -\n 0.125 * r + 0.125 * s * t + 0.125 * s + 0.125 * t + 0.125])\n', (504, 1452), True, 'import numpy as np\n'), ((1653, 1682), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': 'pcoord.dtype'}), '(3, dtype=pcoord.dtype)\n', (1659, 1682), True, 'import numpy as np\n'), ((1718, 1755), 'numpy.zeros', 'np.zeros', (['(3, 24)'], {'dtype': 'pcoord.dtype'}), '((3, 24), dtype=pcoord.dtype)\n', (1726, 1755), True, 'import numpy as np\n'), ((1769, 1778), 'numba.prange', 'prange', (['(8)'], {}), '(8)\n', (1775, 1778), False, 'from numba import njit, prange\n'), ((1927, 3161), 'numpy.array', 'np.array', (['[[-0.125 * s * t + 0.125 * s + 0.125 * t - 0.125, -0.125 * r * t + 0.125 *\n r + 0.125 * t - 0.125, -0.125 * r * s + 0.125 * r + 0.125 * s - 0.125],\n [0.125 * s * t - 0.125 * s - 0.125 * t + 0.125, 0.125 * r * t - 0.125 *\n r + 0.125 * t - 0.125, 0.125 * r * s - 0.125 * r + 0.125 * s - 0.125],\n [-0.125 * s * t + 0.125 * s - 0.125 * t + 0.125, -0.125 * r * t + 0.125 *\n r - 0.125 * t + 0.125, -0.125 * r * s - 0.125 * r - 0.125 * s - 0.125],\n [0.125 * s * t - 0.125 * s + 0.125 * t - 0.125, 0.125 * r * t - 0.125 *\n r - 0.125 * t + 0.125, 0.125 * r * s + 0.125 * r - 0.125 * s - 0.125],\n [0.125 * s * t + 0.125 * s - 0.125 * t - 0.125, 0.125 * r * t + 0.125 *\n r - 0.125 * t - 0.125, 0.125 * r * s - 0.125 * r - 0.125 * s + 0.125],\n [-0.125 * s * t - 0.125 * s + 0.125 * t + 0.125, -0.125 * r * t - 0.125 *\n r - 0.125 * t - 0.125, -0.125 * r * s + 0.125 * r - 0.125 * s + 0.125],\n [0.125 * s * t + 0.125 * s + 0.125 * t + 0.125, 0.125 * r * t + 0.125 *\n r + 0.125 * t + 0.125, 0.125 * r * s + 0.125 * r + 0.125 * s + 0.125],\n [-0.125 * s * t - 0.125 * s - 0.125 * t - 0.125, -0.125 * r * t - 0.125 *\n r + 0.125 * t + 0.125, -0.125 * r * s - 0.125 * r + 0.125 * s + 0.125]]'], {}), '([[-0.125 * s * t + 0.125 * s + 0.125 * t - 0.125, -0.125 * r * t +\n 0.125 * r + 0.125 * t - 0.125, -0.125 * r * s + 0.125 * r + 0.125 * s -\n 0.125], [0.125 * s * t - 0.125 * s - 0.125 * t + 0.125, 0.125 * r * t -\n 0.125 * r + 0.125 * t - 0.125, 0.125 * r * s - 0.125 * r + 0.125 * s - \n 0.125], [-0.125 * s * t + 0.125 * s - 0.125 * t + 0.125, -0.125 * r * t +\n 0.125 * r - 0.125 * t + 0.125, -0.125 * r * s - 0.125 * r - 0.125 * s -\n 0.125], [0.125 * s * t - 0.125 * s + 0.125 * t - 0.125, 0.125 * r * t -\n 0.125 * r - 0.125 * t + 0.125, 0.125 * r * s + 0.125 * r - 0.125 * s - \n 0.125], [0.125 * s * t + 0.125 * s - 0.125 * t - 0.125, 0.125 * r * t +\n 0.125 * r - 0.125 * t - 0.125, 0.125 * r * s - 0.125 * r - 0.125 * s + \n 0.125], [-0.125 * s * t - 0.125 * s + 0.125 * t + 0.125, -0.125 * r * t -\n 0.125 * r - 0.125 * t - 0.125, -0.125 * r * s + 0.125 * r - 0.125 * s +\n 0.125], [0.125 * s * t + 0.125 * s + 0.125 * t + 0.125, 0.125 * r * t +\n 0.125 * r + 0.125 * t + 0.125, 0.125 * r * s + 0.125 * r + 0.125 * s + \n 0.125], [-0.125 * s * t - 0.125 * s - 0.125 * t - 0.125, -0.125 * r * t -\n 0.125 * r + 0.125 * t + 0.125, -0.125 * r * s - 0.125 * r + 0.125 * s +\n 0.125]])\n', (1935, 3161), True, 'import numpy as np\n'), ((3245, 3286), 'numpy.zeros', 'np.zeros', (['(nP, 8, 3)'], {'dtype': 'pcoords.dtype'}), '((nP, 8, 3), dtype=pcoords.dtype)\n', (3253, 3286), True, 'import numpy as np\n'), ((3301, 3311), 'numba.prange', 'prange', (['nP'], {}), '(nP)\n', (3307, 3311), False, 'from numba import njit, prange\n'), ((3563, 3596), 'numpy.zeros', 'np.zeros', (['nE'], {'dtype': 'ecoords.dtype'}), '(nE, dtype=ecoords.dtype)\n', (3571, 3596), True, 'import numpy as np\n'), ((3694, 3704), 'numba.prange', 'prange', (['nE'], {}), '(nE)\n', (3700, 3704), False, 'from numba import njit, prange\n'), ((4083, 4247), 'numpy.array', 'np.array', (['[[-1.0, -1.0, -1], [1.0, -1.0, -1.0], [1.0, 1.0, -1.0], [-1.0, 1.0, -1.0],\n [-1.0, -1.0, 1.0], [1.0, -1.0, 1.0], [1.0, 1.0, 1.0], [-1.0, 1.0, 1.0]]'], {}), '([[-1.0, -1.0, -1], [1.0, -1.0, -1.0], [1.0, 1.0, -1.0], [-1.0, 1.0,\n -1.0], [-1.0, -1.0, 1.0], [1.0, -1.0, 1.0], [1.0, 1.0, 1.0], [-1.0, 1.0,\n 1.0]])\n', (4091, 4247), True, 'import numpy as np\n'), ((4464, 4489), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0])\n', (4472, 4489), True, 'import numpy as np\n'), ((4963, 4989), 'dewloosh.geom.utils.cells_coords', 'cells_coords', (['coords', 'topo'], {}), '(coords, topo)\n', (4975, 4989), False, 'from dewloosh.geom.utils import cells_coords\n'), ((5014, 5028), 'dewloosh.math.numint.GaussPoints', 'Gauss', (['(2)', '(2)', '(2)'], {}), '(2, 2, 2)\n', (5019, 5028), True, 'from dewloosh.math.numint import GaussPoints as Gauss\n'), ((3763, 3781), 'numpy.linalg.det', 'np.linalg.det', (['jac'], {}), '(jac)\n', (3776, 3781), True, 'import numpy as np\n')] |
Simbadeveloper/studious-octo-waddle.io | shopping_cart_test/shoppingcart2.py | 7ace6bb93e3b87c97d59df858e3079ec7a2db30e | class ShoppingCart(object):
def __init__(self):
self.total = 0
self.items = dict()
def add_item(self, item_name, quantity, price):
if item_name != None and quantity >= 1:
self.items.update({item_name: quantity})
if quantity and price >= 1:
self.total += (quantity * price)
def remove_item(self, item_name, quantity, price):
if item_name in self.items:
if quantity < self.items[item_name] and quantity > 0:
self.items[item_name] -= quantity
self.total -= price*quantity
def checkout(self, cash_paid):
balance = 0
if cash_paid < self.total:
return "Cash paid not enough"
balance = cash_paid - self.total
return balance
class Shop(ShoppingCart):
def __init__(self):
self.quantity = 100
def remove_item(self):
self.quantity -= 1
| [] |
heaven00/github-contribution-leaderboard | tests/models/pr_test_data.py | 3de53a60a7c81b91291e29d063c7fd14696d426d | import copy
import json
from ghcl.models.pull_request import PullRequest
class PRData:
def __init__(self, data: dict = None):
if data is None:
with open('./tests/models/empty_pr_data.json') as file:
self._data = json.load(file)
else:
self._data = data
def with_pr_url(self, url: str = 'some-url'):
data = copy.deepcopy(self._data)
data['issues_data']['pull_request']['html_url'] = url
return PRData(data)
def with_label(self, label_to_add: str = None):
data = copy.deepcopy(self._data)
if label_to_add is None:
label_number = len(data["issues_data"]["labels"]) + 1
label_to_add = f'label-{label_number}'
data['issues_data']['labels'].append({'name': label_to_add})
return PRData(data)
def with_created_at(self, created_at: str = '2014-04-24T16:34:47Z'):
data = copy.deepcopy(self._data)
data['issues_data']['created_at'] = created_at
return PRData(data)
def with_owner(self, owner: str = 'owner_user_id'):
data = copy.deepcopy(self._data)
data['pr_data']['base']['repo']['owner']['login'] = owner
return PRData(data)
def with_pr_raised_by(self, pr_raised_by: str = 'pr_raised_by_user_id'):
data = copy.deepcopy(self._data)
data['pr_data']['head']['user']['login'] = pr_raised_by
return PRData(data)
def with_merged(self, merged=False):
data = copy.deepcopy(self._data)
data['pr_data']['merged'] = merged
return PRData(data)
def with_state(self, state='some_state'):
data = copy.deepcopy(self._data)
data['issues_data']['state'] = state
return PRData(data)
def with_defaults(self):
return PRData(self._data).with_pr_url()\
.with_label()\
.with_label()\
.with_created_at()\
.with_owner()\
.with_pr_raised_by()\
.with_merged()\
.with_state()
def as_pull_request(self):
return PullRequest(**self._data)
| [((381, 406), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (394, 406), False, 'import copy\n'), ((565, 590), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (578, 590), False, 'import copy\n'), ((928, 953), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (941, 953), False, 'import copy\n'), ((1109, 1134), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (1122, 1134), False, 'import copy\n'), ((1322, 1347), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (1335, 1347), False, 'import copy\n'), ((1497, 1522), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (1510, 1522), False, 'import copy\n'), ((1656, 1681), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (1669, 1681), False, 'import copy\n'), ((2082, 2107), 'ghcl.models.pull_request.PullRequest', 'PullRequest', ([], {}), '(**self._data)\n', (2093, 2107), False, 'from ghcl.models.pull_request import PullRequest\n'), ((255, 270), 'json.load', 'json.load', (['file'], {}), '(file)\n', (264, 270), False, 'import json\n')] |
PKUfudawei/cmssw | Validation/EventGenerator/python/BasicGenParticleValidation_cfi.py | 8fbb5ce74398269c8a32956d7c7943766770c093 | import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
basicGenParticleValidation = DQMEDAnalyzer('BasicGenParticleValidation',
hepmcCollection = cms.InputTag("generatorSmeared"),
genparticleCollection = cms.InputTag("genParticles",""),
genjetsCollection = cms.InputTag("ak4GenJets",""),
matchingPrecision = cms.double(0.001),
verbosity = cms.untracked.uint32(0),
UseWeightFromHepMC = cms.bool(True),
signalParticlesOnly = cms.bool(False)
)
basicGenParticleValidationHiMix = basicGenParticleValidation.clone(signalParticlesOnly = True)
| [((194, 226), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""generatorSmeared"""'], {}), "('generatorSmeared')\n", (206, 226), True, 'import FWCore.ParameterSet.Config as cms\n'), ((256, 288), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""genParticles"""', '""""""'], {}), "('genParticles', '')\n", (268, 288), True, 'import FWCore.ParameterSet.Config as cms\n'), ((313, 343), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""ak4GenJets"""', '""""""'], {}), "('ak4GenJets', '')\n", (325, 343), True, 'import FWCore.ParameterSet.Config as cms\n'), ((368, 385), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(0.001)'], {}), '(0.001)\n', (378, 385), True, 'import FWCore.ParameterSet.Config as cms\n'), ((403, 426), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(0)'], {}), '(0)\n', (423, 426), True, 'import FWCore.ParameterSet.Config as cms\n'), ((453, 467), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(True)'], {}), '(True)\n', (461, 467), True, 'import FWCore.ParameterSet.Config as cms\n'), ((495, 510), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(False)'], {}), '(False)\n', (503, 510), True, 'import FWCore.ParameterSet.Config as cms\n')] |
Subsets and Splits