repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
funnel-io/python-on-rails | tests/either_catch_test.py | cccd2284c7dab32a37d573042531a54454164f6a | from python_on_rails.either import as_either, Failure, Success
@as_either(TypeError)
def add_one(x):
return x + 1
@as_either()
def times_five(x):
return x * 5
def test_success_executes_bindings():
result = Success(1).bind(add_one).bind(times_five)
assert isinstance(result, Success)
assert result.value == 10
def test_a_failure_stops_the_execution_of_later_bindings():
result = Success("NaN").bind(add_one).bind(times_five)
assert isinstance(result, Failure)
assert type(result.value) == TypeError
assert repr(result.value) == "TypeError('can only concatenate str (not \"int\") to str')"
| [((66, 86), 'python_on_rails.either.as_either', 'as_either', (['TypeError'], {}), '(TypeError)\n', (75, 86), False, 'from python_on_rails.either import as_either, Failure, Success\n'), ((123, 134), 'python_on_rails.either.as_either', 'as_either', ([], {}), '()\n', (132, 134), False, 'from python_on_rails.either import as_either, Failure, Success\n'), ((224, 234), 'python_on_rails.either.Success', 'Success', (['(1)'], {}), '(1)\n', (231, 234), False, 'from python_on_rails.either import as_either, Failure, Success\n'), ((410, 424), 'python_on_rails.either.Success', 'Success', (['"""NaN"""'], {}), "('NaN')\n", (417, 424), False, 'from python_on_rails.either import as_either, Failure, Success\n')] |
Coullence/DRF_Percels-Couriers_API_V.0.0.2 | ServerSide/models.py | 906786115861b316f8ecf023c8af82f2dacff68e | from django.db import models
# Create your models here.
# Station
class Stations(models.Model):
stationName = models.CharField(max_length=100)
stationLocation = models.CharField(max_length=100)
stationStaffId = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
def __str_(self):
return self.stationName
# Customers
class Customers(models.Model):
customerName = models.CharField(max_length=100)
customerPhone = models.CharField(max_length=100)
customerId = models.CharField(max_length=100)
customerStartLoc = models.CharField(max_length=100)
customerDestinationLoc = models.CharField(max_length=100)
stationStaffId = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
def __str_(self):
return self.customerName
# Items
class Items(models.Model):
itemName = models.CharField(max_length=100)
itemType = models.CharField(max_length=100)
Quantity = models.CharField(max_length=100)
originStation = models.CharField(max_length=100)
originCounty = models.CharField(max_length=100)
receiverName = models.CharField(max_length=100)
receiverPhone = models.CharField(max_length=100)
destinationAddress = models.CharField(max_length=100)
destinationCounty = models.CharField(max_length=100)
dateSend= models.CharField(max_length=100)
dateExpected = models.CharField(max_length=100)
def __str__(self):
return self.itemName
# Payments
class Payments(models.Model):
customerPhone = models.CharField(max_length=100)
paymentAmount = models.CharField(max_length=100)
paymentMeans = models.EmailField(max_length=100)
code = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.customerPhone
| [((119, 151), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (135, 151), False, 'from django.db import models\n'), ((174, 206), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (190, 206), False, 'from django.db import models\n'), ((229, 261), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (245, 261), False, 'from django.db import models\n'), ((274, 313), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (294, 313), False, 'from django.db import models\n'), ((436, 468), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (452, 468), False, 'from django.db import models\n'), ((492, 524), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (508, 524), False, 'from django.db import models\n'), ((548, 580), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (564, 580), False, 'from django.db import models\n'), ((604, 636), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (620, 636), False, 'from django.db import models\n'), ((666, 698), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (682, 698), False, 'from django.db import models\n'), ((721, 753), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (737, 753), False, 'from django.db import models\n'), ((776, 815), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (796, 815), False, 'from django.db import models\n'), ((923, 955), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (939, 955), False, 'from django.db import models\n'), ((971, 1003), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (987, 1003), False, 'from django.db import models\n'), ((1019, 1051), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1035, 1051), False, 'from django.db import models\n'), ((1072, 1104), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1088, 1104), False, 'from django.db import models\n'), ((1124, 1156), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1140, 1156), False, 'from django.db import models\n'), ((1176, 1208), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1192, 1208), False, 'from django.db import models\n'), ((1229, 1261), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1245, 1261), False, 'from django.db import models\n'), ((1287, 1319), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1303, 1319), False, 'from django.db import models\n'), ((1344, 1376), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1360, 1376), False, 'from django.db import models\n'), ((1391, 1423), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1407, 1423), False, 'from django.db import models\n'), ((1443, 1475), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1459, 1475), False, 'from django.db import models\n'), ((1598, 1630), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1614, 1630), False, 'from django.db import models\n'), ((1658, 1690), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1674, 1690), False, 'from django.db import models\n'), ((1718, 1751), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1735, 1751), False, 'from django.db import models\n'), ((1780, 1812), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1796, 1812), False, 'from django.db import models\n'), ((1840, 1879), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1860, 1879), False, 'from django.db import models\n')] |
JamesTheZ/BladeDISC | tao_compiler/mlir/disc/tests/glob_op_test.bzl | e6c76ee557ebfccd560d44f6b6276bbc4e0a8a34 | # Test definitions for Lit, the LLVM test runner.
#
# This is reusing the LLVM Lit test runner in the interim until the new build
# rules are upstreamed.
# TODO(b/136126535): remove this custom rule.
"""Lit runner globbing test
"""
load("//tensorflow:tensorflow.bzl", "filegroup")
load("@bazel_skylib//lib:paths.bzl", "paths")
load("//tensorflow:tensorflow.bzl", "tf_cc_test", "tf_native_cc_binary", "tf_copts")
# Default values used by the test runner.
_default_test_file_exts = ["mlir", ".pbtxt", ".td"]
_default_driver = "@llvm-project//mlir:run_lit.sh"
_default_size = "small"
_default_tags = []
# These are patterns which we should never match, for tests, subdirectories, or
# test input data files.
_ALWAYS_EXCLUDE = [
"**/LICENSE.txt",
"**/README.txt",
"**/lit.local.cfg",
# Exclude input files that have spaces in their names, since bazel
# cannot cope with such "targets" in the srcs list.
"**/* *",
"**/* */**",
]
def _run_lit_test(name, test_file, data, size, tags, driver, features, exec_properties):
"""Runs lit on all tests it can find in `data` under tensorflow/compiler/mlir.
Note that, due to Bazel's hermetic builds, lit only sees the tests that
are included in the `data` parameter, regardless of what other tests might
exist in the directory searched.
Args:
name: str, the name of the test, including extension.
data: [str], the data input to the test.
size: str, the size of the test.
tags: [str], tags to attach to the test.
driver: str, label of the driver shell script.
Note: use of a custom driver is not currently supported
and specifying a default driver will abort the tests.
features: [str], list of extra features to enable.
"""
name_without_suffix = test_file[0].split('.')[0]
local_test_files = name + ".test_files"
filegroup(
name = local_test_files,
srcs = native.glob([
"data/" + name_without_suffix + "*.mlir",
]),
)
tf_cc_test(
name = name,
srcs = test_file,
size = size,
deps = [
"//tensorflow/compiler/mlir/disc/tests:mlir_feature_test",
"//tensorflow/core:test",
"//tensorflow/core:test_main",
"//tensorflow/core:testlib",
],
data = [":" + local_test_files] + data + [
"//tensorflow/compiler/mlir/disc:disc_compiler_main",
"//tensorflow/compiler/mlir:tf-mlir-translate",
"//tensorflow/compiler/mlir:tf-opt",
],
)
def glob_op_tests(
exclude = [],
test_file_exts = _default_test_file_exts,
default_size = _default_size,
size_override = {},
data = [],
per_test_extra_data = {},
default_tags = _default_tags,
tags_override = {},
driver = _default_driver,
features = [],
exec_properties = {}):
"""Creates all plausible Lit tests (and their inputs) under this directory.
Args:
exclude: [str], paths to exclude (for tests and inputs).
test_file_exts: [str], extensions for files that are tests.
default_size: str, the test size for targets not in "size_override".
size_override: {str: str}, sizes to use for specific tests.
data: [str], additional input data to the test.
per_test_extra_data: {str: [str]}, extra data to attach to a given file.
default_tags: [str], additional tags to attach to the test.
tags_override: {str: str}, tags to add to specific tests.
driver: str, label of the driver shell script.
Note: use of a custom driver is not currently supported
and specifying a default driver will abort the tests.
features: [str], list of extra features to enable.
exec_properties: a dictionary of properties to pass on.
"""
# Ignore some patterns by default for tests and input data.
exclude = _ALWAYS_EXCLUDE + exclude
tests = native.glob(
["*." + ext for ext in test_file_exts],
exclude = exclude,
)
# Run tests individually such that errors can be attributed to a specific
# failure.
for i in range(len(tests)):
curr_test = tests[i]
# Instantiate this test with updated parameters.
lit_test(
name = curr_test,
data = data + per_test_extra_data.get(curr_test, []),
size = size_override.get(curr_test, default_size),
tags = default_tags + tags_override.get(curr_test, []),
driver = driver,
features = features,
exec_properties = exec_properties,
)
def lit_test(
name,
data = [],
size = _default_size,
tags = _default_tags,
driver = _default_driver,
features = [],
exec_properties = {}):
"""Runs test files under lit.
Args:
name: str, the name of the test.
data: [str], labels that should be provided as data inputs.
size: str, the size of the test.
tags: [str], tags to attach to the test.
driver: str, label of the driver shell script.
Note: use of a custom driver is not currently supported
and specifying a default driver will abort the tests.
features: [str], list of extra features to enable.
"""
_run_lit_test(name + ".test", [name], data, size, tags, driver, features, exec_properties)
| [] |
yulicrunchy/JALoP | build-scripts/PackageCheckHelpers.py | a474b464d4916fe559cf1df97c855232e5ec24ab | """
These are functions to add to the configure context.
"""
def __checkCanLink(context, source, source_type, message_libname, real_libs=[]):
"""
Check that source can be successfully compiled and linked against real_libs.
Keyword arguments:
source -- source to try to compile
source_type -- type of source file, (probably should be ".c")
message_libname -- library name to show in the message output from scons
real_libs -- list of actual libraries to link against (defaults to a list
with one element, the value of messager_libname)
"""
if not real_libs:
real_libs = [message_libname]
context.Message("Checking for %s..." % message_libname)
libsave = context.env.get('LIBS')
context.env.AppendUnique(LIBS=real_libs)
ret = context.TryLink(source, source_type)
context.Result( ret )
if libsave is None:
del(context.env['LIBS'])
else:
context.env['LIBS'] = libsave
return ret
libuuid_source = '''
#include <uuid/uuid.h>
int main() {
uuid_t uu;
char uuid_str[37];
uuid_generate(uu);
uuid_unparse(uu, uuid_str);
return 0;
}
'''
def CheckLibUUID(context):
return __checkCanLink(context, libuuid_source, ".c", "libuuid", ["uuid"])
selinux_source = '''
#include <selinux/selinux.h>
int main() {
security_context_t ctx;
getpeercon(0, &ctx);
return 0;
}
'''
def CheckSeLinux(context):
return __checkCanLink(context, selinux_source, '.cpp', 'selinux', ['selinux'])
byteswap_source = '''
#include <byteswap.h>
#include <stdint.h>
int main() {
uint16_t b16 = 0x00FF;
uint32_t b32 = 0x0011EEFF;
uint64_t b64 = 0x00112233CCDDEEFF;
bswap_16(b16);
bswap_32(b32);
bswap_64(b64);
return 0;
}
'''
def CheckByteswap(context):
context.Message("Checking for byteswap.h...")
ret = context.TryCompile(byteswap_source, '.c')
context.Result( ret )
return ret
bdb_source = '''
#include <db.h>
#if defined(DB_VERSION_MAJOR) && DB_VERSION_MAJOR >= 4
#if DB_VERSION_MAJOR == 4
#if defined(DB_VERSION_MINOR) && DB_VERSION_MINOR >= 3
#else
#error ""
#endif
#endif
#else
#error ""
#endif
'''
def CheckBDB(context):
context.Message("Checking for BDB >= 4.3...")
ret = context.TryCompile(bdb_source, '.c')
context.Result(ret)
return ret
| [] |
ari-holtzman/transformers | src/transformers/modeling_tf_pytorch_utils.py | 8725c545e8feeecdcee0ad92ca1d80cee8f0c6e4 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch - TF 2.0 general utilities."""
import logging
import os
import re
import numpy
logger = logging.getLogger(__name__)
def convert_tf_weight_name_to_pt_weight_name(tf_name, start_prefix_to_remove=""):
""" Convert a TF 2.0 model variable name in a pytorch model weight name.
Conventions for TF2.0 scopes -> PyTorch attribute names conversions:
- '$1___$2' is replaced by $2 (can be used to duplicate or remove layers in TF2.0 vs PyTorch)
- '_._' is replaced by a new level separation (can be used to convert TF2.0 lists in PyTorch nn.ModulesList)
return tuple with:
- pytorch model weight name
- transpose: boolean indicating weither TF2.0 and PyTorch weights matrices are transposed with regards to each other
"""
tf_name = tf_name.replace(":0", "") # device ids
tf_name = re.sub(
r"/[^/]*___([^/]*)/", r"/\1/", tf_name
) # '$1___$2' is replaced by $2 (can be used to duplicate or remove layers in TF2.0 vs PyTorch)
tf_name = tf_name.replace(
"_._", "/"
) # '_._' is replaced by a level separation (can be used to convert TF2.0 lists in PyTorch nn.ModulesList)
tf_name = re.sub(r"//+", "/", tf_name) # Remove empty levels at the end
tf_name = tf_name.split("/") # Convert from TF2.0 '/' separators to PyTorch '.' separators
tf_name = tf_name[1:] # Remove level zero
# When should we transpose the weights
transpose = bool(tf_name[-1] == "kernel" or "emb_projs" in tf_name or "out_projs" in tf_name)
# Convert standard TF2.0 names in PyTorch names
if tf_name[-1] == "kernel" or tf_name[-1] == "embeddings" or tf_name[-1] == "gamma":
tf_name[-1] = "weight"
if tf_name[-1] == "beta":
tf_name[-1] = "bias"
# Remove prefix if needed
tf_name = ".".join(tf_name)
if start_prefix_to_remove:
tf_name = tf_name.replace(start_prefix_to_remove, "", 1)
return tf_name, transpose
#####################
# PyTorch => TF 2.0 #
#####################
def load_pytorch_checkpoint_in_tf2_model(tf_model, pytorch_checkpoint_path, tf_inputs=None, allow_missing_keys=False):
""" Load pytorch checkpoints in a TF 2.0 model
"""
try:
import tensorflow as tf # noqa: F401
import torch # noqa: F401
except ImportError:
logger.error(
"Loading a PyTorch model in TensorFlow, requires both PyTorch and TensorFlow to be installed. Please see "
"https://pytorch.org/ and https://www.tensorflow.org/install/ for installation instructions."
)
raise
pt_path = os.path.abspath(pytorch_checkpoint_path)
logger.info("Loading PyTorch weights from {}".format(pt_path))
pt_state_dict = torch.load(pt_path, map_location="cpu")
logger.info("PyTorch checkpoint contains {:,} parameters".format(sum(t.numel() for t in pt_state_dict.values())))
return load_pytorch_weights_in_tf2_model(
tf_model, pt_state_dict, tf_inputs=tf_inputs, allow_missing_keys=allow_missing_keys
)
def load_pytorch_model_in_tf2_model(tf_model, pt_model, tf_inputs=None, allow_missing_keys=False):
""" Load pytorch checkpoints in a TF 2.0 model
"""
pt_state_dict = pt_model.state_dict()
return load_pytorch_weights_in_tf2_model(
tf_model, pt_state_dict, tf_inputs=tf_inputs, allow_missing_keys=allow_missing_keys
)
def load_pytorch_weights_in_tf2_model(tf_model, pt_state_dict, tf_inputs=None, allow_missing_keys=False):
""" Load pytorch state_dict in a TF 2.0 model.
"""
try:
import torch # noqa: F401
import tensorflow as tf # noqa: F401
from tensorflow.python.keras import backend as K
except ImportError:
logger.error(
"Loading a PyTorch model in TensorFlow, requires both PyTorch and TensorFlow to be installed. Please see "
"https://pytorch.org/ and https://www.tensorflow.org/install/ for installation instructions."
)
raise
if tf_inputs is None:
tf_inputs = tf_model.dummy_inputs
if tf_inputs is not None:
tf_model(tf_inputs, training=False) # Make sure model is built
# Adapt state dict - TODO remove this and update the AWS weights files instead
# Convert old format to new format if needed from a PyTorch state_dict
old_keys = []
new_keys = []
for key in pt_state_dict.keys():
new_key = None
if "gamma" in key:
new_key = key.replace("gamma", "weight")
if "beta" in key:
new_key = key.replace("beta", "bias")
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
pt_state_dict[new_key] = pt_state_dict.pop(old_key)
# Make sure we are able to load PyTorch base models as well as derived models (with heads)
# TF models always have a prefix, some of PyTorch models (base ones) don't
start_prefix_to_remove = ""
if not any(s.startswith(tf_model.base_model_prefix) for s in pt_state_dict.keys()):
start_prefix_to_remove = tf_model.base_model_prefix + "."
symbolic_weights = tf_model.trainable_weights + tf_model.non_trainable_weights
tf_loaded_numel = 0
weight_value_tuples = []
all_pytorch_weights = set(list(pt_state_dict.keys()))
for symbolic_weight in symbolic_weights:
sw_name = symbolic_weight.name
name, transpose = convert_tf_weight_name_to_pt_weight_name(
sw_name, start_prefix_to_remove=start_prefix_to_remove
)
# Find associated numpy array in pytorch model state dict
if name not in pt_state_dict:
if allow_missing_keys:
continue
raise AttributeError("{} not found in PyTorch model".format(name))
array = pt_state_dict[name].numpy()
if transpose:
array = numpy.transpose(array)
if len(symbolic_weight.shape) < len(array.shape):
array = numpy.squeeze(array)
elif len(symbolic_weight.shape) > len(array.shape):
array = numpy.expand_dims(array, axis=0)
try:
assert list(symbolic_weight.shape) == list(array.shape)
except AssertionError as e:
e.args += (symbolic_weight.shape, array.shape)
raise e
tf_loaded_numel += array.size
# logger.warning("Initialize TF weight {}".format(symbolic_weight.name))
weight_value_tuples.append((symbolic_weight, array))
all_pytorch_weights.discard(name)
K.batch_set_value(weight_value_tuples)
if tf_inputs is not None:
tf_model(tf_inputs, training=False) # Make sure restore ops are run
logger.info("Loaded {:,} parameters in the TF 2.0 model.".format(tf_loaded_numel))
logger.info("Weights or buffers not loaded from PyTorch model: {}".format(all_pytorch_weights))
return tf_model
#####################
# TF 2.0 => PyTorch #
#####################
def load_tf2_checkpoint_in_pytorch_model(pt_model, tf_checkpoint_path, tf_inputs=None, allow_missing_keys=False):
""" Load TF 2.0 HDF5 checkpoint in a PyTorch model
We use HDF5 to easily do transfer learning
(see https://github.com/tensorflow/tensorflow/blob/ee16fcac960ae660e0e4496658a366e2f745e1f0/tensorflow/python/keras/engine/network.py#L1352-L1357).
"""
try:
import tensorflow as tf # noqa: F401
import torch # noqa: F401
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires both PyTorch and TensorFlow to be installed. Please see "
"https://pytorch.org/ and https://www.tensorflow.org/install/ for installation instructions."
)
raise
import transformers
logger.info("Loading TensorFlow weights from {}".format(tf_checkpoint_path))
# Instantiate and load the associated TF 2.0 model
tf_model_class_name = "TF" + pt_model.__class__.__name__ # Add "TF" at the beggining
tf_model_class = getattr(transformers, tf_model_class_name)
tf_model = tf_model_class(pt_model.config)
if tf_inputs is None:
tf_inputs = tf_model.dummy_inputs
if tf_inputs is not None:
tf_model(tf_inputs, training=False) # Make sure model is built
tf_model.load_weights(tf_checkpoint_path, by_name=True)
return load_tf2_model_in_pytorch_model(pt_model, tf_model, allow_missing_keys=allow_missing_keys)
def load_tf2_model_in_pytorch_model(pt_model, tf_model, allow_missing_keys=False):
""" Load TF 2.0 model in a pytorch model
"""
weights = tf_model.weights
return load_tf2_weights_in_pytorch_model(pt_model, weights, allow_missing_keys=allow_missing_keys)
def load_tf2_weights_in_pytorch_model(pt_model, tf_weights, allow_missing_keys=False):
""" Load TF2.0 symbolic weights in a PyTorch model
"""
try:
import tensorflow as tf # noqa: F401
import torch # noqa: F401
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires both PyTorch and TensorFlow to be installed. Please see "
"https://pytorch.org/ and https://www.tensorflow.org/install/ for installation instructions."
)
raise
new_pt_params_dict = {}
current_pt_params_dict = dict(pt_model.named_parameters())
# Make sure we are able to load PyTorch base models as well as derived models (with heads)
# TF models always have a prefix, some of PyTorch models (base ones) don't
start_prefix_to_remove = ""
if not any(s.startswith(pt_model.base_model_prefix) for s in current_pt_params_dict.keys()):
start_prefix_to_remove = pt_model.base_model_prefix + "."
# Build a map from potential PyTorch weight names to TF 2.0 Variables
tf_weights_map = {}
for tf_weight in tf_weights:
pt_name, transpose = convert_tf_weight_name_to_pt_weight_name(
tf_weight.name, start_prefix_to_remove=start_prefix_to_remove
)
tf_weights_map[pt_name] = (tf_weight.numpy(), transpose)
all_tf_weights = set(list(tf_weights_map.keys()))
loaded_pt_weights_data_ptr = {}
missing_keys_pt = []
for pt_weight_name, pt_weight in current_pt_params_dict.items():
# Handle PyTorch shared weight ()not duplicated in TF 2.0
if pt_weight.data_ptr() in loaded_pt_weights_data_ptr:
new_pt_params_dict[pt_weight_name] = loaded_pt_weights_data_ptr[pt_weight.data_ptr()]
continue
# Find associated numpy array in pytorch model state dict
if pt_weight_name not in tf_weights_map:
if allow_missing_keys:
missing_keys_pt.append(pt_weight_name)
continue
raise AttributeError("{} not found in TF 2.0 model".format(pt_weight_name))
array, transpose = tf_weights_map[pt_weight_name]
if transpose:
array = numpy.transpose(array)
if len(pt_weight.shape) < len(array.shape):
array = numpy.squeeze(array)
elif len(pt_weight.shape) > len(array.shape):
array = numpy.expand_dims(array, axis=0)
try:
assert list(pt_weight.shape) == list(array.shape)
except AssertionError as e:
e.args += (pt_weight.shape, array.shape)
raise e
# logger.warning("Initialize PyTorch weight {}".format(pt_weight_name))
new_pt_params_dict[pt_weight_name] = torch.from_numpy(array)
loaded_pt_weights_data_ptr[pt_weight.data_ptr()] = torch.from_numpy(array)
all_tf_weights.discard(pt_weight_name)
missing_keys, unexpected_keys = pt_model.load_state_dict(new_pt_params_dict, strict=False)
missing_keys += missing_keys_pt
if len(missing_keys) > 0:
logger.info(
"Weights of {} not initialized from TF 2.0 model: {}".format(pt_model.__class__.__name__, missing_keys)
)
if len(unexpected_keys) > 0:
logger.info(
"Weights from TF 2.0 model not used in {}: {}".format(pt_model.__class__.__name__, unexpected_keys)
)
logger.info("Weights or buffers not loaded from TF 2.0 model: {}".format(all_tf_weights))
return pt_model
| [((814, 841), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (831, 841), False, 'import logging\n'), ((1581, 1626), 're.sub', 're.sub', (['"""/[^/]*___([^/]*)/"""', '"""/\\\\1/"""', 'tf_name'], {}), "('/[^/]*___([^/]*)/', '/\\\\1/', tf_name)\n", (1587, 1626), False, 'import re\n'), ((1913, 1940), 're.sub', 're.sub', (['"""//+"""', '"""/"""', 'tf_name'], {}), "('//+', '/', tf_name)\n", (1919, 1940), False, 'import re\n'), ((3331, 3371), 'os.path.abspath', 'os.path.abspath', (['pytorch_checkpoint_path'], {}), '(pytorch_checkpoint_path)\n', (3346, 3371), False, 'import os\n'), ((3460, 3499), 'torch.load', 'torch.load', (['pt_path'], {'map_location': '"""cpu"""'}), "(pt_path, map_location='cpu')\n", (3470, 3499), False, 'import torch\n'), ((7284, 7322), 'tensorflow.python.keras.backend.batch_set_value', 'K.batch_set_value', (['weight_value_tuples'], {}), '(weight_value_tuples)\n', (7301, 7322), True, 'from tensorflow.python.keras import backend as K\n'), ((12194, 12217), 'torch.from_numpy', 'torch.from_numpy', (['array'], {}), '(array)\n', (12210, 12217), False, 'import torch\n'), ((12277, 12300), 'torch.from_numpy', 'torch.from_numpy', (['array'], {}), '(array)\n', (12293, 12300), False, 'import torch\n'), ((6622, 6644), 'numpy.transpose', 'numpy.transpose', (['array'], {}), '(array)\n', (6637, 6644), False, 'import numpy\n'), ((6724, 6744), 'numpy.squeeze', 'numpy.squeeze', (['array'], {}), '(array)\n', (6737, 6744), False, 'import numpy\n'), ((11658, 11680), 'numpy.transpose', 'numpy.transpose', (['array'], {}), '(array)\n', (11673, 11680), False, 'import numpy\n'), ((11754, 11774), 'numpy.squeeze', 'numpy.squeeze', (['array'], {}), '(array)\n', (11767, 11774), False, 'import numpy\n'), ((6825, 6857), 'numpy.expand_dims', 'numpy.expand_dims', (['array'], {'axis': '(0)'}), '(array, axis=0)\n', (6842, 6857), False, 'import numpy\n'), ((11849, 11881), 'numpy.expand_dims', 'numpy.expand_dims', (['array'], {'axis': '(0)'}), '(array, axis=0)\n', (11866, 11881), False, 'import numpy\n')] |
mitochon/hail | hail/python/test/hail/helpers.py | 25e5e5b8da1d978468d2cee393426ade46484a87 | import os
from timeit import default_timer as timer
import unittest
import pytest
from decorator import decorator
from hail.utils.java import Env
import hail as hl
from hail.backend.local_backend import LocalBackend
_initialized = False
def startTestHailContext():
global _initialized
if not _initialized:
backend_name = os.environ.get('HAIL_QUERY_BACKEND', 'spark')
if backend_name == 'spark':
hl.init(master='local[1]', min_block_size=0, quiet=True)
else:
Env.hc() # force initialization
_initialized = True
def stopTestHailContext():
pass
_test_dir = os.environ.get('HAIL_TEST_RESOURCES_DIR', '../src/test/resources')
_doctest_dir = os.environ.get('HAIL_DOCTEST_DATA_DIR', 'hail/docs/data')
def resource(filename):
return os.path.join(_test_dir, filename)
def doctest_resource(filename):
return os.path.join(_doctest_dir, filename)
def schema_eq(x, y):
x_fds = dict(x)
y_fds = dict(y)
return x_fds == y_fds
def convert_struct_to_dict(x):
if isinstance(x, hl.Struct):
return {k: convert_struct_to_dict(v) for k, v in x._fields.items()}
elif isinstance(x, list):
return [convert_struct_to_dict(elt) for elt in x]
elif isinstance(x, tuple):
return tuple([convert_struct_to_dict(elt) for elt in x])
elif isinstance(x, dict):
return {k: convert_struct_to_dict(v) for k, v in x.items()}
else:
return x
_dataset = None
def get_dataset():
global _dataset
if _dataset is None:
_dataset = hl.split_multi_hts(hl.import_vcf(resource('sample.vcf'))).cache()
return _dataset
def assert_time(f, max_duration):
start = timer()
x = f()
end = timer()
assert (start - end) < max_duration
print(f'took {end - start:.3f}')
return x
def create_all_values():
return hl.struct(
f32=hl.float32(3.14),
i64=hl.int64(-9),
m=hl.null(hl.tfloat64),
astruct=hl.struct(a=hl.null(hl.tint32), b=5.5),
mstruct=hl.null(hl.tstruct(x=hl.tint32, y=hl.tstr)),
aset=hl.set(['foo', 'bar', 'baz']),
mset=hl.null(hl.tset(hl.tfloat64)),
d=hl.dict({hl.array(['a', 'b']): 0.5, hl.array(['x', hl.null(hl.tstr), 'z']): 0.3}),
md=hl.null(hl.tdict(hl.tint32, hl.tstr)),
h38=hl.locus('chr22', 33878978, 'GRCh38'),
ml=hl.null(hl.tlocus('GRCh37')),
i=hl.interval(
hl.locus('1', 999),
hl.locus('1', 1001)),
c=hl.call(0, 1),
mc=hl.null(hl.tcall),
t=hl.tuple([hl.call(1, 2, phased=True), 'foo', hl.null(hl.tstr)]),
mt=hl.null(hl.ttuple(hl.tlocus('GRCh37'), hl.tbool)),
nd=hl.nd.arange(0, 10).reshape((2, 5)),
)
def prefix_struct(s, prefix):
return hl.struct(**{prefix + k: s[k] for k in s})
def create_all_values_table():
all_values = create_all_values()
return (hl.utils.range_table(5, n_partitions=3)
.annotate_globals(**prefix_struct(all_values, 'global_'))
.annotate(**all_values)
.cache())
def create_all_values_matrix_table():
all_values = create_all_values()
return (hl.utils.range_matrix_table(3, 2, n_partitions=2)
.annotate_globals(**prefix_struct(all_values, 'global_'))
.annotate_rows(**prefix_struct(all_values, 'row_'))
.annotate_cols(**prefix_struct(all_values, 'col_'))
.annotate_entries(**prefix_struct(all_values, 'entry_'))
.cache())
def create_all_values_datasets():
return (create_all_values_table(), create_all_values_matrix_table())
def skip_unless_spark_backend():
from hail.backend.spark_backend import SparkBackend
@decorator
def wrapper(func, *args, **kwargs):
if isinstance(hl.utils.java.Env.backend(), SparkBackend):
return func(*args, **kwargs)
else:
raise unittest.SkipTest('requires Spark')
return wrapper
fails_local_backend = pytest.mark.xfail(
os.environ.get('HAIL_QUERY_BACKEND') == 'local',
reason="doesn't yet work on local backend",
strict=True)
def run_with_cxx_compile():
@decorator
def wrapper(func, *args, **kwargs):
return
return wrapper
def assert_evals_to(e, v):
res = hl.eval(e)
if res != v:
raise ValueError(f' actual: {res}\n expected: {v}')
def assert_all_eval_to(*expr_and_expected):
exprs, expecteds = zip(*expr_and_expected)
assert_evals_to(hl.tuple(exprs), expecteds)
def lower_only():
@decorator
def wrapper(func, *args, **kwargs):
flags = hl._get_flags()
prev_lower = flags.get('lower')
prev_lower_only = flags.get('lower_only')
hl._set_flags(lower='1', lower_only='1')
try:
return func(*args, **kwargs)
finally:
hl._set_flags(lower=prev_lower, lower_only=prev_lower_only)
return wrapper | [((630, 696), 'os.environ.get', 'os.environ.get', (['"""HAIL_TEST_RESOURCES_DIR"""', '"""../src/test/resources"""'], {}), "('HAIL_TEST_RESOURCES_DIR', '../src/test/resources')\n", (644, 696), False, 'import os\n'), ((712, 769), 'os.environ.get', 'os.environ.get', (['"""HAIL_DOCTEST_DATA_DIR"""', '"""hail/docs/data"""'], {}), "('HAIL_DOCTEST_DATA_DIR', 'hail/docs/data')\n", (726, 769), False, 'import os\n'), ((807, 840), 'os.path.join', 'os.path.join', (['_test_dir', 'filename'], {}), '(_test_dir, filename)\n', (819, 840), False, 'import os\n'), ((886, 922), 'os.path.join', 'os.path.join', (['_doctest_dir', 'filename'], {}), '(_doctest_dir, filename)\n', (898, 922), False, 'import os\n'), ((1699, 1706), 'timeit.default_timer', 'timer', ([], {}), '()\n', (1704, 1706), True, 'from timeit import default_timer as timer\n'), ((1729, 1736), 'timeit.default_timer', 'timer', ([], {}), '()\n', (1734, 1736), True, 'from timeit import default_timer as timer\n'), ((2780, 2824), 'hail.struct', 'hl.struct', ([], {}), '(**{(prefix + k): s[k] for k in s})\n', (2789, 2824), True, 'import hail as hl\n'), ((4266, 4276), 'hail.eval', 'hl.eval', (['e'], {}), '(e)\n', (4273, 4276), True, 'import hail as hl\n'), ((341, 386), 'os.environ.get', 'os.environ.get', (['"""HAIL_QUERY_BACKEND"""', '"""spark"""'], {}), "('HAIL_QUERY_BACKEND', 'spark')\n", (355, 386), False, 'import os\n'), ((3994, 4030), 'os.environ.get', 'os.environ.get', (['"""HAIL_QUERY_BACKEND"""'], {}), "('HAIL_QUERY_BACKEND')\n", (4008, 4030), False, 'import os\n'), ((4469, 4484), 'hail.tuple', 'hl.tuple', (['exprs'], {}), '(exprs)\n', (4477, 4484), True, 'import hail as hl\n'), ((4588, 4603), 'hail._get_flags', 'hl._get_flags', ([], {}), '()\n', (4601, 4603), True, 'import hail as hl\n'), ((4703, 4743), 'hail._set_flags', 'hl._set_flags', ([], {'lower': '"""1"""', 'lower_only': '"""1"""'}), "(lower='1', lower_only='1')\n", (4716, 4743), True, 'import hail as hl\n'), ((435, 491), 'hail.init', 'hl.init', ([], {'master': '"""local[1]"""', 'min_block_size': '(0)', 'quiet': '(True)'}), "(master='local[1]', min_block_size=0, quiet=True)\n", (442, 491), True, 'import hail as hl\n'), ((518, 526), 'hail.utils.java.Env.hc', 'Env.hc', ([], {}), '()\n', (524, 526), False, 'from hail.utils.java import Env\n'), ((1887, 1903), 'hail.float32', 'hl.float32', (['(3.14)'], {}), '(3.14)\n', (1897, 1903), True, 'import hail as hl\n'), ((1917, 1929), 'hail.int64', 'hl.int64', (['(-9)'], {}), '(-9)\n', (1925, 1929), True, 'import hail as hl\n'), ((1941, 1961), 'hail.null', 'hl.null', (['hl.tfloat64'], {}), '(hl.tfloat64)\n', (1948, 1961), True, 'import hail as hl\n'), ((2093, 2122), 'hail.set', 'hl.set', (["['foo', 'bar', 'baz']"], {}), "(['foo', 'bar', 'baz'])\n", (2099, 2122), True, 'import hail as hl\n'), ((2323, 2360), 'hail.locus', 'hl.locus', (['"""chr22"""', '(33878978)', '"""GRCh38"""'], {}), "('chr22', 33878978, 'GRCh38')\n", (2331, 2360), True, 'import hail as hl\n'), ((2502, 2515), 'hail.call', 'hl.call', (['(0)', '(1)'], {}), '(0, 1)\n', (2509, 2515), True, 'import hail as hl\n'), ((2528, 2545), 'hail.null', 'hl.null', (['hl.tcall'], {}), '(hl.tcall)\n', (2535, 2545), True, 'import hail as hl\n'), ((3774, 3801), 'hail.utils.java.Env.backend', 'hl.utils.java.Env.backend', ([], {}), '()\n', (3799, 3801), True, 'import hail as hl\n'), ((3891, 3926), 'unittest.SkipTest', 'unittest.SkipTest', (['"""requires Spark"""'], {}), "('requires Spark')\n", (3908, 3926), False, 'import unittest\n'), ((4828, 4887), 'hail._set_flags', 'hl._set_flags', ([], {'lower': 'prev_lower', 'lower_only': 'prev_lower_only'}), '(lower=prev_lower, lower_only=prev_lower_only)\n', (4841, 4887), True, 'import hail as hl\n'), ((2043, 2077), 'hail.tstruct', 'hl.tstruct', ([], {'x': 'hl.tint32', 'y': 'hl.tstr'}), '(x=hl.tint32, y=hl.tstr)\n', (2053, 2077), True, 'import hail as hl\n'), ((2145, 2165), 'hail.tset', 'hl.tset', (['hl.tfloat64'], {}), '(hl.tfloat64)\n', (2152, 2165), True, 'import hail as hl\n'), ((2280, 2308), 'hail.tdict', 'hl.tdict', (['hl.tint32', 'hl.tstr'], {}), '(hl.tint32, hl.tstr)\n', (2288, 2308), True, 'import hail as hl\n'), ((2381, 2400), 'hail.tlocus', 'hl.tlocus', (['"""GRCh37"""'], {}), "('GRCh37')\n", (2390, 2400), True, 'import hail as hl\n'), ((2438, 2456), 'hail.locus', 'hl.locus', (['"""1"""', '(999)'], {}), "('1', 999)\n", (2446, 2456), True, 'import hail as hl\n'), ((2470, 2489), 'hail.locus', 'hl.locus', (['"""1"""', '(1001)'], {}), "('1', 1001)\n", (2478, 2489), True, 'import hail as hl\n'), ((1991, 2009), 'hail.null', 'hl.null', (['hl.tint32'], {}), '(hl.tint32)\n', (1998, 2009), True, 'import hail as hl\n'), ((2187, 2207), 'hail.array', 'hl.array', (["['a', 'b']"], {}), "(['a', 'b'])\n", (2195, 2207), True, 'import hail as hl\n'), ((2567, 2593), 'hail.call', 'hl.call', (['(1)', '(2)'], {'phased': '(True)'}), '(1, 2, phased=True)\n', (2574, 2593), True, 'import hail as hl\n'), ((2602, 2618), 'hail.null', 'hl.null', (['hl.tstr'], {}), '(hl.tstr)\n', (2609, 2618), True, 'import hail as hl\n'), ((2651, 2670), 'hail.tlocus', 'hl.tlocus', (['"""GRCh37"""'], {}), "('GRCh37')\n", (2660, 2670), True, 'import hail as hl\n'), ((2695, 2714), 'hail.nd.arange', 'hl.nd.arange', (['(0)', '(10)'], {}), '(0, 10)\n', (2707, 2714), True, 'import hail as hl\n'), ((2229, 2245), 'hail.null', 'hl.null', (['hl.tstr'], {}), '(hl.tstr)\n', (2236, 2245), True, 'import hail as hl\n'), ((2904, 2943), 'hail.utils.range_table', 'hl.utils.range_table', (['(5)'], {'n_partitions': '(3)'}), '(5, n_partitions=3)\n', (2924, 2943), True, 'import hail as hl\n'), ((3160, 3209), 'hail.utils.range_matrix_table', 'hl.utils.range_matrix_table', (['(3)', '(2)'], {'n_partitions': '(2)'}), '(3, 2, n_partitions=2)\n', (3187, 3209), True, 'import hail as hl\n')] |
mjstrobl/WEXEA | src/entity_linker/models/figer_model/labeling_model.py | 0af0be1cdb93fc00cd81f885aa15ef8d6579b304 | """
Modifications copyright (C) 2020 Michael Strobl
"""
import time
import tensorflow as tf
import numpy as np
from entity_linker.models.base import Model
class LabelingModel(Model):
"""Unsupervised Clustering using Discrete-State VAE"""
def __init__(self, batch_size, num_labels, context_encoded_dim,
true_entity_embeddings,
word_embed_dim, context_encoded, mention_embed, scope_name, device):
self.batch_size = batch_size
self.num_labels = num_labels
self.word_embed_dim = word_embed_dim
with tf.variable_scope(scope_name) as s, tf.device(device) as d:
if mention_embed == None:
self.label_weights = tf.get_variable(
name="label_weights",
shape=[context_encoded_dim, num_labels],
initializer=tf.random_normal_initializer(mean=0.0,
stddev=1.0/(100.0)))
else:
context_encoded = tf.concat(
1, [context_encoded, mention_embed], name='con_ment_repr')
self.label_weights = tf.get_variable(
name="label_weights",
shape=[context_encoded_dim+word_embed_dim, num_labels],
initializer=tf.random_normal_initializer(mean=0.0,
stddev=1.0/(100.0)))
# [B, L]
self.label_scores = tf.matmul(context_encoded, self.label_weights)
self.label_probs = tf.sigmoid(self.label_scores)
### PREDICT TYPES FROM ENTITIES
#true_entity_embeddings = tf.nn.dropout(true_entity_embeddings, keep_prob=0.5)
self.entity_label_scores = tf.matmul(true_entity_embeddings, self.label_weights)
self.entity_label_probs = tf.sigmoid(self.label_scores)
def loss_graph(self, true_label_ids, scope_name, device_gpu):
with tf.variable_scope(scope_name) as s, tf.device(device_gpu) as d:
# [B, L]
self.cross_entropy_losses = tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.label_scores,
targets=true_label_ids,
name="labeling_loss")
self.labeling_loss = tf.reduce_sum(
self.cross_entropy_losses) / tf.to_float(self.batch_size)
self.enlabel_cross_entropy_losses = tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.entity_label_scores,
targets=true_label_ids,
name="entity_labeling_loss")
self.entity_labeling_loss = tf.reduce_sum(
self.enlabel_cross_entropy_losses) / tf.to_float(self.batch_size)
| [((575, 604), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope_name'], {}), '(scope_name)\n', (592, 604), True, 'import tensorflow as tf\n'), ((611, 628), 'tensorflow.device', 'tf.device', (['device'], {}), '(device)\n', (620, 628), True, 'import tensorflow as tf\n'), ((1486, 1532), 'tensorflow.matmul', 'tf.matmul', (['context_encoded', 'self.label_weights'], {}), '(context_encoded, self.label_weights)\n', (1495, 1532), True, 'import tensorflow as tf\n'), ((1564, 1593), 'tensorflow.sigmoid', 'tf.sigmoid', (['self.label_scores'], {}), '(self.label_scores)\n', (1574, 1593), True, 'import tensorflow as tf\n'), ((1769, 1822), 'tensorflow.matmul', 'tf.matmul', (['true_entity_embeddings', 'self.label_weights'], {}), '(true_entity_embeddings, self.label_weights)\n', (1778, 1822), True, 'import tensorflow as tf\n'), ((1861, 1890), 'tensorflow.sigmoid', 'tf.sigmoid', (['self.label_scores'], {}), '(self.label_scores)\n', (1871, 1890), True, 'import tensorflow as tf\n'), ((1972, 2001), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope_name'], {}), '(scope_name)\n', (1989, 2001), True, 'import tensorflow as tf\n'), ((2008, 2029), 'tensorflow.device', 'tf.device', (['device_gpu'], {}), '(device_gpu)\n', (2017, 2029), True, 'import tensorflow as tf\n'), ((2097, 2213), 'tensorflow.nn.sigmoid_cross_entropy_with_logits', 'tf.nn.sigmoid_cross_entropy_with_logits', ([], {'logits': 'self.label_scores', 'targets': 'true_label_ids', 'name': '"""labeling_loss"""'}), "(logits=self.label_scores, targets=\n true_label_ids, name='labeling_loss')\n", (2136, 2213), True, 'import tensorflow as tf\n'), ((2423, 2552), 'tensorflow.nn.sigmoid_cross_entropy_with_logits', 'tf.nn.sigmoid_cross_entropy_with_logits', ([], {'logits': 'self.entity_label_scores', 'targets': 'true_label_ids', 'name': '"""entity_labeling_loss"""'}), "(logits=self.entity_label_scores,\n targets=true_label_ids, name='entity_labeling_loss')\n", (2462, 2552), True, 'import tensorflow as tf\n'), ((1027, 1095), 'tensorflow.concat', 'tf.concat', (['(1)', '[context_encoded, mention_embed]'], {'name': '"""con_ment_repr"""'}), "(1, [context_encoded, mention_embed], name='con_ment_repr')\n", (1036, 1095), True, 'import tensorflow as tf\n'), ((2286, 2326), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['self.cross_entropy_losses'], {}), '(self.cross_entropy_losses)\n', (2299, 2326), True, 'import tensorflow as tf\n'), ((2344, 2372), 'tensorflow.to_float', 'tf.to_float', (['self.batch_size'], {}), '(self.batch_size)\n', (2355, 2372), True, 'import tensorflow as tf\n'), ((2633, 2681), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['self.enlabel_cross_entropy_losses'], {}), '(self.enlabel_cross_entropy_losses)\n', (2646, 2681), True, 'import tensorflow as tf\n'), ((2699, 2727), 'tensorflow.to_float', 'tf.to_float', (['self.batch_size'], {}), '(self.batch_size)\n', (2710, 2727), True, 'import tensorflow as tf\n'), ((856, 914), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', ([], {'mean': '(0.0)', 'stddev': '(1.0 / 100.0)'}), '(mean=0.0, stddev=1.0 / 100.0)\n', (884, 914), True, 'import tensorflow as tf\n'), ((1313, 1371), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', ([], {'mean': '(0.0)', 'stddev': '(1.0 / 100.0)'}), '(mean=0.0, stddev=1.0 / 100.0)\n', (1341, 1371), True, 'import tensorflow as tf\n')] |
wutobias/collection | python/molecular_diameter.py | fdac4ce5bb99c31115efdbed7db3316eea4b2826 | #!/usr/bin/env python
import sys
import parmed as pmd
import numpy as np
from scipy.spatial import distance
if len(sys.argv) < 2:
print "Usage: molecular_diameter.py <mymolecule.mol2>"
exit(1)
mol = pmd.load_file(sys.argv[1])
crds = mol.coordinates
dist = distance.cdist(crds, crds, 'euclidean')
print np.max(dist)
exit(0) | [] |
liorshk/simpletransformers | examples/text_classification/yelp_reviews_polarity/train.py | 226cf4d11edf5157c1beafcc44aaa78f65ccc985 | import sys
import pandas as pd
from simpletransformers.classification import ClassificationModel
prefix = "data/"
train_df = pd.read_csv(prefix + "train.csv", header=None)
train_df.head()
eval_df = pd.read_csv(prefix + "test.csv", header=None)
eval_df.head()
train_df[0] = (train_df[0] == 2).astype(int)
eval_df[0] = (eval_df[0] == 2).astype(int)
train_df = pd.DataFrame(
{"text": train_df[1].replace(r"\n", " ", regex=True), "labels": train_df[0]}
)
print(train_df.head())
eval_df = pd.DataFrame(
{"text": eval_df[1].replace(r"\n", " ", regex=True), "labels": eval_df[0]}
)
print(eval_df.head())
model_type = sys.argv[1]
if model_type == "bert":
model_name = "bert-base-cased"
elif model_type == "roberta":
model_name = "roberta-base"
elif model_type == "distilbert":
model_name = "distilbert-base-cased"
elif model_type == "distilroberta":
model_type = "roberta"
model_name = "distilroberta-base"
elif model_type == "electra-base":
model_type = "electra"
model_name = "google/electra-base-discriminator"
elif model_type == "electra-small":
model_type = "electra"
model_name = "google/electra-small-discriminator"
elif model_type == "xlnet":
model_name = "xlnet-base-cased"
train_args = {
"reprocess_input_data": True,
"overwrite_output_dir": True,
"use_cached_eval_features": True,
"output_dir": f"outputs/{model_type}",
"best_model_dir": f"outputs/{model_type}/best_model",
"evaluate_during_training": True,
"max_seq_length": 128,
"num_train_epochs": 3,
"evaluate_during_training_steps": 1000,
"wandb_project": "Classification Model Comparison",
"wandb_kwargs": {"name": model_name},
"save_model_every_epoch": False,
"save_eval_checkpoints": False,
# "use_early_stopping": True,
# "early_stopping_metric": "mcc",
# "n_gpu": 2,
# "manual_seed": 4,
# "use_multiprocessing": False,
"train_batch_size": 128,
"eval_batch_size": 64,
# "config": {
# "output_hidden_states": True
# }
}
if model_type == "xlnet":
train_args["train_batch_size"] = 64
train_args["gradient_accumulation_steps"] = 2
# Create a ClassificationModel
model = ClassificationModel(model_type, model_name, args=train_args)
# Train the model
model.train_model(train_df, eval_df=eval_df)
# # # Evaluate the model
# result, model_outputs, wrong_predictions = model.eval_model(eval_df)
| [((129, 175), 'pandas.read_csv', 'pd.read_csv', (["(prefix + 'train.csv')"], {'header': 'None'}), "(prefix + 'train.csv', header=None)\n", (140, 175), True, 'import pandas as pd\n'), ((203, 248), 'pandas.read_csv', 'pd.read_csv', (["(prefix + 'test.csv')"], {'header': 'None'}), "(prefix + 'test.csv', header=None)\n", (214, 248), True, 'import pandas as pd\n'), ((2203, 2263), 'simpletransformers.classification.ClassificationModel', 'ClassificationModel', (['model_type', 'model_name'], {'args': 'train_args'}), '(model_type, model_name, args=train_args)\n', (2222, 2263), False, 'from simpletransformers.classification import ClassificationModel\n')] |
mahdi-zafarmand/SNA | LoadGraph.py | a7188a2ceb63355183e470648f6ae4fa90a22faa | import networkx as nx
import os.path
def load_graph(path, weighted=False, delimiter='\t', self_loop=False):
graph = nx.Graph()
if not os.path.isfile(path):
print("Error: file " + path + " not found!")
exit(-1)
with open(path) as file:
for line in file.readlines():
w = 1.0
line = line.split(delimiter)
v1 = int(line[0])
v2 = int(line[1])
graph.add_node(v1)
graph.add_node(v2)
if weighted:
w = float(line[2])
if (self_loop and v1 == v2) or (v1 != v2):
graph.add_edge(v1, v2, weight=w)
return graph
def load_graph_uncertain(path, delimiter='\t', self_loop=False):
graph = nx.Graph()
if not os.path.isfile(path):
print("Error: file " + path + " not found!")
exit(-1)
with open(path) as file:
for line in file.readlines():
line = line.split(delimiter)
v1 = int(line[0])
v2 = int(line[1])
graph.add_node(v1)
graph.add_node(v2)
w = float(line[2])
p = float(line[3])
if (self_loop and v1 == v2) or (v1 != v2):
graph.add_edge(v1, v2, weight=w, prob=p)
return graph
| [((119, 129), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (127, 129), True, 'import networkx as nx\n'), ((621, 631), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (629, 631), True, 'import networkx as nx\n')] |
wan1869/dushuhu | mayan/apps/document_signatures/models.py | 934dd178e67140cffc6b9203e793fdf8bbc73a54 | import logging
import uuid
from django.db import models
from django.urls import reverse
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from model_utils.managers import InheritanceManager
from mayan.apps.django_gpg.exceptions import VerificationError
from mayan.apps.django_gpg.models import Key
from mayan.apps.documents.models import DocumentVersion
from mayan.apps.storage.classes import DefinedStorageLazy
from .literals import STORAGE_NAME_DOCUMENT_SIGNATURES_DETACHED_SIGNATURE
from .managers import DetachedSignatureManager, EmbeddedSignatureManager
logger = logging.getLogger(name=__name__)
def upload_to(*args, **kwargs):
return force_text(s=uuid.uuid4())
class SignatureBaseModel(models.Model):
"""
Fields:
* key_id - Key Identifier - This is what identifies uniquely a key. Not
two keys in the world have the same Key ID. The Key ID is also used to
locate a key in the key servers: http://pgp.mit.edu
* signature_id - Signature ID - Every time a key is used to sign something
it will generate a unique signature ID. No two signature IDs are the same,
even when using the same key.
"""
document_version = models.ForeignKey(
editable=False, on_delete=models.CASCADE, related_name='signatures',
to=DocumentVersion, verbose_name=_('Document version')
)
# Basic fields
date = models.DateField(
blank=True, editable=False, null=True, verbose_name=_('Date signed')
)
key_id = models.CharField(
help_text=_('ID of the key that will be used to sign the document.'),
max_length=40, verbose_name=_('Key ID')
)
# With proper key
signature_id = models.CharField(
blank=True, editable=False, null=True, max_length=64,
verbose_name=_('Signature ID')
)
public_key_fingerprint = models.CharField(
blank=True, editable=False, null=True, max_length=40,
verbose_name=_('Public key fingerprint')
)
objects = InheritanceManager()
class Meta:
ordering = ('pk',)
verbose_name = _('Document version signature')
verbose_name_plural = _('Document version signatures')
def __str__(self):
return self.signature_id or '{} - {}'.format(self.date, self.key_id)
def get_absolute_url(self):
return reverse(
viewname='signatures:document_version_signature_details',
kwargs={'signature_id': self.pk}
)
def get_key_id(self):
if self.public_key_fingerprint:
return self.public_key_fingerprint[-16:]
else:
return self.key_id
def get_signature_type_display(self):
if self.is_detached:
return _('Detached')
else:
return _('Embedded')
@property
def is_detached(self):
return hasattr(self, 'signature_file')
@property
def is_embedded(self):
return not hasattr(self, 'signature_file')
class EmbeddedSignature(SignatureBaseModel):
objects = EmbeddedSignatureManager()
class Meta:
verbose_name = _('Document version embedded signature')
verbose_name_plural = _('Document version embedded signatures')
def save(self, *args, **kwargs):
logger.debug(msg='checking for embedded signature')
if self.pk:
raw = True
else:
raw = False
with self.document_version.open(raw=raw) as file_object:
try:
verify_result = Key.objects.verify_file(
file_object=file_object
)
except VerificationError as exception:
# Not signed
logger.debug(
'embedded signature verification error; %s', exception
)
else:
self.date = verify_result.date
self.key_id = verify_result.key_id
self.signature_id = verify_result.signature_id
self.public_key_fingerprint = verify_result.pubkey_fingerprint
super(EmbeddedSignature, self).save(*args, **kwargs)
class DetachedSignature(SignatureBaseModel):
signature_file = models.FileField(
blank=True, help_text=_(
'Signature file previously generated.'
), null=True, storage=DefinedStorageLazy(
name=STORAGE_NAME_DOCUMENT_SIGNATURES_DETACHED_SIGNATURE
), upload_to=upload_to, verbose_name=_('Signature file')
)
objects = DetachedSignatureManager()
class Meta:
verbose_name = _('Document version detached signature')
verbose_name_plural = _('Document version detached signatures')
def __str__(self):
return '{}-{}'.format(self.document_version, _('signature'))
def delete(self, *args, **kwargs):
if self.signature_file.name:
self.signature_file.storage.delete(name=self.signature_file.name)
super(DetachedSignature, self).delete(*args, **kwargs)
def save(self, *args, **kwargs):
with self.document_version.open() as file_object:
try:
verify_result = Key.objects.verify_file(
file_object=file_object, signature_file=self.signature_file
)
except VerificationError as exception:
# Not signed
logger.debug(
'detached signature verification error; %s', exception
)
else:
self.signature_file.seek(0)
self.date = verify_result.date
self.key_id = verify_result.key_id
self.signature_id = verify_result.signature_id
self.public_key_fingerprint = verify_result.pubkey_fingerprint
return super(DetachedSignature, self).save(*args, **kwargs)
| [((624, 656), 'logging.getLogger', 'logging.getLogger', ([], {'name': '__name__'}), '(name=__name__)\n', (641, 656), False, 'import logging\n'), ((2025, 2045), 'model_utils.managers.InheritanceManager', 'InheritanceManager', ([], {}), '()\n', (2043, 2045), False, 'from model_utils.managers import InheritanceManager\n'), ((2113, 2144), 'django.utils.translation.ugettext_lazy', '_', (['"""Document version signature"""'], {}), "('Document version signature')\n", (2114, 2144), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2175, 2207), 'django.utils.translation.ugettext_lazy', '_', (['"""Document version signatures"""'], {}), "('Document version signatures')\n", (2176, 2207), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2357, 2461), 'django.urls.reverse', 'reverse', ([], {'viewname': '"""signatures:document_version_signature_details"""', 'kwargs': "{'signature_id': self.pk}"}), "(viewname='signatures:document_version_signature_details', kwargs={\n 'signature_id': self.pk})\n", (2364, 2461), False, 'from django.urls import reverse\n'), ((3118, 3158), 'django.utils.translation.ugettext_lazy', '_', (['"""Document version embedded signature"""'], {}), "('Document version embedded signature')\n", (3119, 3158), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3189, 3230), 'django.utils.translation.ugettext_lazy', '_', (['"""Document version embedded signatures"""'], {}), "('Document version embedded signatures')\n", (3190, 3230), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4586, 4626), 'django.utils.translation.ugettext_lazy', '_', (['"""Document version detached signature"""'], {}), "('Document version detached signature')\n", (4587, 4626), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4657, 4698), 'django.utils.translation.ugettext_lazy', '_', (['"""Document version detached signatures"""'], {}), "('Document version detached signatures')\n", (4658, 4698), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((715, 727), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (725, 727), False, 'import uuid\n'), ((1358, 1379), 'django.utils.translation.ugettext_lazy', '_', (['"""Document version"""'], {}), "('Document version')\n", (1359, 1379), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1494, 1510), 'django.utils.translation.ugettext_lazy', '_', (['"""Date signed"""'], {}), "('Date signed')\n", (1495, 1510), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1566, 1624), 'django.utils.translation.ugettext_lazy', '_', (['"""ID of the key that will be used to sign the document."""'], {}), "('ID of the key that will be used to sign the document.')\n", (1567, 1624), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1662, 1673), 'django.utils.translation.ugettext_lazy', '_', (['"""Key ID"""'], {}), "('Key ID')\n", (1663, 1673), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1822, 1839), 'django.utils.translation.ugettext_lazy', '_', (['"""Signature ID"""'], {}), "('Signature ID')\n", (1823, 1839), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1976, 2003), 'django.utils.translation.ugettext_lazy', '_', (['"""Public key fingerprint"""'], {}), "('Public key fingerprint')\n", (1977, 2003), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2747, 2760), 'django.utils.translation.ugettext_lazy', '_', (['"""Detached"""'], {}), "('Detached')\n", (2748, 2760), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2794, 2807), 'django.utils.translation.ugettext_lazy', '_', (['"""Embedded"""'], {}), "('Embedded')\n", (2795, 2807), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4260, 4301), 'django.utils.translation.ugettext_lazy', '_', (['"""Signature file previously generated."""'], {}), "('Signature file previously generated.')\n", (4261, 4301), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4344, 4420), 'mayan.apps.storage.classes.DefinedStorageLazy', 'DefinedStorageLazy', ([], {'name': 'STORAGE_NAME_DOCUMENT_SIGNATURES_DETACHED_SIGNATURE'}), '(name=STORAGE_NAME_DOCUMENT_SIGNATURES_DETACHED_SIGNATURE)\n', (4362, 4420), False, 'from mayan.apps.storage.classes import DefinedStorageLazy\n'), ((4478, 4497), 'django.utils.translation.ugettext_lazy', '_', (['"""Signature file"""'], {}), "('Signature file')\n", (4479, 4497), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4776, 4790), 'django.utils.translation.ugettext_lazy', '_', (['"""signature"""'], {}), "('signature')\n", (4777, 4790), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3526, 3574), 'mayan.apps.django_gpg.models.Key.objects.verify_file', 'Key.objects.verify_file', ([], {'file_object': 'file_object'}), '(file_object=file_object)\n', (3549, 3574), False, 'from mayan.apps.django_gpg.models import Key\n'), ((5155, 5244), 'mayan.apps.django_gpg.models.Key.objects.verify_file', 'Key.objects.verify_file', ([], {'file_object': 'file_object', 'signature_file': 'self.signature_file'}), '(file_object=file_object, signature_file=self.\n signature_file)\n', (5178, 5244), False, 'from mayan.apps.django_gpg.models import Key\n')] |
ramezrawas/galaxy-1 | scripts/sync_reports_config.py | c03748dd49c060a68d07bce56eae33e0ba154414 | from ConfigParser import ConfigParser
from sys import argv
REPLACE_PROPERTIES = ["file_path", "database_connection", "new_file_path"]
MAIN_SECTION = "app:main"
def sync():
# Add or replace the relevant properites from galaxy.ini
# into reports.ini
reports_config_file = "config/reports.ini"
if len(argv) > 1:
reports_config_file = argv[1]
universe_config_file = "config/galaxy.ini"
if len(argv) > 2:
universe_config_file = argv[2]
parser = ConfigParser()
parser.read(universe_config_file)
with open(reports_config_file, "r") as f:
reports_config_lines = f.readlines()
replaced_properties = set([])
with open(reports_config_file, "w") as f:
# Write all properties from reports config replacing as
# needed.
for reports_config_line in reports_config_lines:
(line, replaced_property) = get_synced_line(reports_config_line, parser)
if replaced_property:
replaced_properties.add(replaced_property)
f.write(line)
# If any properties appear in universe config and not in
# reports write these as well.
for replacement_property in REPLACE_PROPERTIES:
if parser.has_option(MAIN_SECTION, replacement_property) and \
not (replacement_property in replaced_properties):
f.write(get_universe_line(replacement_property, parser))
def get_synced_line(reports_line, universe_config):
# Cycle through properties to replace and perform replacement on
# this line if needed.
synced_line = reports_line
replaced_property = None
for replacement_property in REPLACE_PROPERTIES:
if reports_line.startswith(replacement_property) and \
universe_config.has_option(MAIN_SECTION, replacement_property):
synced_line = get_universe_line(replacement_property, universe_config)
replaced_property = replacement_property
break
return (synced_line, replaced_property)
def get_universe_line(property_name, universe_config):
return "%s=%s\n" % (property_name, universe_config.get(MAIN_SECTION, property_name))
if __name__ == '__main__':
sync()
| [((489, 503), 'ConfigParser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (501, 503), False, 'from ConfigParser import ConfigParser\n')] |
wotchin/openGauss-server | src/gausskernel/dbmind/xtuner/test/test_ssh.py | ebd92e92b0cfd76b121d98e4c57a22d334573159 | # Copyright (c) 2020 Huawei Technologies Co.,Ltd.
#
# openGauss is licensed under Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
#
# http://license.coscl.org.cn/MulanPSL2
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
# -------------------------------------------------------------------------
#
# test_ssh.py
#
# IDENTIFICATION
# src/gausskernel/dbmind/xtuner/test/test_ssh.py
#
# -------------------------------------------------------------------------
from ssh import ExecutorFactory
def test_remote():
exe = ExecutorFactory().set_host('').set_user('').set_pwd('').get_executor() # padding your information
print(exe.exec_command_sync("cat /proc/cpuinfo | grep \"processor\" | wc -l"))
print(exe.exec_command_sync("cat /proc/self/cmdline | xargs -0"))
print(exe.exec_command_sync("echo -e 'hello \\n world'")[0].count('\n'))
print(exe.exec_command_sync("echo -e 'hello \\n world'")[0])
print(exe.exec_command_sync('echo $SHELL'))
def test_local():
exe = ExecutorFactory().get_executor()
print(exe.exec_command_sync("ping -h"))
if __name__ == "__main__":
test_remote()
test_local()
| [((1307, 1324), 'ssh.ExecutorFactory', 'ExecutorFactory', ([], {}), '()\n', (1322, 1324), False, 'from ssh import ExecutorFactory\n'), ((835, 852), 'ssh.ExecutorFactory', 'ExecutorFactory', ([], {}), '()\n', (850, 852), False, 'from ssh import ExecutorFactory\n')] |
wyshi/Unsupervised-Structure-Learning | models/utils.py | 19b49320b46e5f7d990ab9e5b3054b331b86e59d | # Original work Copyright (C) 2017 Tiancheng Zhao, Carnegie Mellon University
# Modified work Copyright 2018 Weiyan Shi.
import tensorflow as tf
import numpy as np
from nltk.translate.bleu_score import sentence_bleu
from nltk.translate.bleu_score import SmoothingFunction
def get_bleu_stats(ref, hyps):
scores = []
for hyp in hyps:
try:
scores.append(sentence_bleu([ref], hyp, smoothing_function=SmoothingFunction().method7,
weights=[1./3, 1./3,1./3]))
except:
scores.append(0.0)
return np.max(scores), np.mean(scores)
def gaussian_kld(recog_mu, recog_logvar, prior_mu, prior_logvar):
kld = -0.5 * tf.reduce_sum(1 + (recog_logvar - prior_logvar)
- tf.div(tf.pow(prior_mu - recog_mu, 2), tf.exp(prior_logvar))
- tf.div(tf.exp(recog_logvar), tf.exp(prior_logvar)), reduction_indices=1)
return kld
def norm_log_liklihood(x, mu, logvar):
return -0.5*tf.reduce_sum(tf.log(2*np.pi) + logvar + tf.div(tf.pow((x-mu), 2), tf.exp(logvar)), reduction_indices=1)
def sample_gaussian(mu, logvar):
epsilon = tf.random_normal(tf.shape(logvar), name="epsilon")
std = tf.exp(0.5 * logvar)
z= mu + tf.multiply(std, epsilon)
return z
def get_bow(embedding, avg=False):
"""
Assumption, the last dimension is the embedding
The second last dimension is the sentence length. The rank must be 3
"""
embedding_size = embedding.get_shape()[2].value
if avg:
return tf.reduce_mean(embedding, reduction_indices=[1]), embedding_size
else:
return tf.reduce_sum(embedding, reduction_indices=[1]), embedding_size
def get_rnn_encode(embedding, cell, length_mask=None, scope=None, reuse=None):
"""
Assumption, the last dimension is the embedding
The second last dimension is the sentence length. The rank must be 3
The padding should have zero
"""
with tf.variable_scope(scope, 'RnnEncoding', reuse=reuse):
if length_mask is None:
length_mask = tf.reduce_sum(tf.sign(tf.reduce_max(tf.abs(embedding), reduction_indices=2)),reduction_indices=1)
length_mask = tf.to_int32(length_mask)
_, encoded_input = tf.nn.dynamic_rnn(cell, embedding, sequence_length=length_mask, dtype=tf.float32)
return encoded_input, cell.state_size
def get_bi_rnn_encode(embedding, f_cell, b_cell, length_mask=None, scope=None, reuse=None):
"""
Assumption, the last dimension is the embedding
The second last dimension is the sentence length. The rank must be 3
The padding should have zero
"""
with tf.variable_scope(scope, 'RnnEncoding', reuse=reuse):
if length_mask is None:
length_mask = tf.reduce_sum(tf.sign(tf.reduce_max(tf.abs(embedding), reduction_indices=2)),reduction_indices=1)
length_mask = tf.to_int32(length_mask)
_, encoded_input = tf.nn.bidirectional_dynamic_rnn(f_cell, b_cell, embedding, sequence_length=length_mask, dtype=tf.float32)
encoded_input = tf.concat(encoded_input, 1)
return encoded_input, f_cell.state_size+b_cell.state_size
def get_prob_for_one_sent(vocab_prob, sent, length_mask=None):
"""
:param vocab_prob:
:param sent:
:param length_mask:
:return:
"""
tf.boolean_mask(tf.reshape(usr_input_sent, [-1, 50]), tf.sequence_mask(length_mask, 50))
def tf_repeat(tensor, repeats):
"""
:param tensor:
:param repeats:
:return:
"""
with tf.variable_scope("repeat"):
expanded_tensor = tf.expand_dims(tensor, -1)
multiples = [1] + repeats
tiled_tensor = tf.tile(expanded_tensor, multiples=multiples)
repeated_tensor = tf.reshape(tiled_tensor, tf.shape(tensor) * repeats)
return repeated_tensor | [((1234, 1254), 'tensorflow.exp', 'tf.exp', (['(0.5 * logvar)'], {}), '(0.5 * logvar)\n', (1240, 1254), True, 'import tensorflow as tf\n'), ((582, 596), 'numpy.max', 'np.max', (['scores'], {}), '(scores)\n', (588, 596), True, 'import numpy as np\n'), ((598, 613), 'numpy.mean', 'np.mean', (['scores'], {}), '(scores)\n', (605, 613), True, 'import numpy as np\n'), ((1190, 1206), 'tensorflow.shape', 'tf.shape', (['logvar'], {}), '(logvar)\n', (1198, 1206), True, 'import tensorflow as tf\n'), ((1267, 1292), 'tensorflow.multiply', 'tf.multiply', (['std', 'epsilon'], {}), '(std, epsilon)\n', (1278, 1292), True, 'import tensorflow as tf\n'), ((1981, 2033), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope', '"""RnnEncoding"""'], {'reuse': 'reuse'}), "(scope, 'RnnEncoding', reuse=reuse)\n", (1998, 2033), True, 'import tensorflow as tf\n'), ((2269, 2355), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', (['cell', 'embedding'], {'sequence_length': 'length_mask', 'dtype': 'tf.float32'}), '(cell, embedding, sequence_length=length_mask, dtype=tf.\n float32)\n', (2286, 2355), True, 'import tensorflow as tf\n'), ((2674, 2726), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope', '"""RnnEncoding"""'], {'reuse': 'reuse'}), "(scope, 'RnnEncoding', reuse=reuse)\n", (2691, 2726), True, 'import tensorflow as tf\n'), ((2962, 3072), 'tensorflow.nn.bidirectional_dynamic_rnn', 'tf.nn.bidirectional_dynamic_rnn', (['f_cell', 'b_cell', 'embedding'], {'sequence_length': 'length_mask', 'dtype': 'tf.float32'}), '(f_cell, b_cell, embedding, sequence_length=\n length_mask, dtype=tf.float32)\n', (2993, 3072), True, 'import tensorflow as tf\n'), ((3092, 3119), 'tensorflow.concat', 'tf.concat', (['encoded_input', '(1)'], {}), '(encoded_input, 1)\n', (3101, 3119), True, 'import tensorflow as tf\n'), ((3364, 3400), 'tensorflow.reshape', 'tf.reshape', (['usr_input_sent', '[-1, 50]'], {}), '(usr_input_sent, [-1, 50])\n', (3374, 3400), True, 'import tensorflow as tf\n'), ((3402, 3435), 'tensorflow.sequence_mask', 'tf.sequence_mask', (['length_mask', '(50)'], {}), '(length_mask, 50)\n', (3418, 3435), True, 'import tensorflow as tf\n'), ((3548, 3575), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""repeat"""'], {}), "('repeat')\n", (3565, 3575), True, 'import tensorflow as tf\n'), ((3603, 3629), 'tensorflow.expand_dims', 'tf.expand_dims', (['tensor', '(-1)'], {}), '(tensor, -1)\n', (3617, 3629), True, 'import tensorflow as tf\n'), ((3687, 3732), 'tensorflow.tile', 'tf.tile', (['expanded_tensor'], {'multiples': 'multiples'}), '(expanded_tensor, multiples=multiples)\n', (3694, 3732), True, 'import tensorflow as tf\n'), ((1563, 1611), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['embedding'], {'reduction_indices': '[1]'}), '(embedding, reduction_indices=[1])\n', (1577, 1611), True, 'import tensorflow as tf\n'), ((1653, 1700), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['embedding'], {'reduction_indices': '[1]'}), '(embedding, reduction_indices=[1])\n', (1666, 1700), True, 'import tensorflow as tf\n'), ((2217, 2241), 'tensorflow.to_int32', 'tf.to_int32', (['length_mask'], {}), '(length_mask)\n', (2228, 2241), True, 'import tensorflow as tf\n'), ((2910, 2934), 'tensorflow.to_int32', 'tf.to_int32', (['length_mask'], {}), '(length_mask)\n', (2921, 2934), True, 'import tensorflow as tf\n'), ((3784, 3800), 'tensorflow.shape', 'tf.shape', (['tensor'], {}), '(tensor)\n', (3792, 3800), True, 'import tensorflow as tf\n'), ((881, 901), 'tensorflow.exp', 'tf.exp', (['recog_logvar'], {}), '(recog_logvar)\n', (887, 901), True, 'import tensorflow as tf\n'), ((903, 923), 'tensorflow.exp', 'tf.exp', (['prior_logvar'], {}), '(prior_logvar)\n', (909, 923), True, 'import tensorflow as tf\n'), ((1033, 1050), 'tensorflow.log', 'tf.log', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (1039, 1050), True, 'import tensorflow as tf\n'), ((1067, 1084), 'tensorflow.pow', 'tf.pow', (['(x - mu)', '(2)'], {}), '(x - mu, 2)\n', (1073, 1084), True, 'import tensorflow as tf\n'), ((1086, 1100), 'tensorflow.exp', 'tf.exp', (['logvar'], {}), '(logvar)\n', (1092, 1100), True, 'import tensorflow as tf\n'), ((787, 817), 'tensorflow.pow', 'tf.pow', (['(prior_mu - recog_mu)', '(2)'], {}), '(prior_mu - recog_mu, 2)\n', (793, 817), True, 'import tensorflow as tf\n'), ((819, 839), 'tensorflow.exp', 'tf.exp', (['prior_logvar'], {}), '(prior_logvar)\n', (825, 839), True, 'import tensorflow as tf\n'), ((2129, 2146), 'tensorflow.abs', 'tf.abs', (['embedding'], {}), '(embedding)\n', (2135, 2146), True, 'import tensorflow as tf\n'), ((2822, 2839), 'tensorflow.abs', 'tf.abs', (['embedding'], {}), '(embedding)\n', (2828, 2839), True, 'import tensorflow as tf\n'), ((427, 446), 'nltk.translate.bleu_score.SmoothingFunction', 'SmoothingFunction', ([], {}), '()\n', (444, 446), False, 'from nltk.translate.bleu_score import SmoothingFunction\n')] |
Kh4L/gluon-cv | gluoncv/data/transforms/block.py | 849411ed56632cd854850b07142087d599f97dcb | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
# pylint: disable= missing-docstring
"Addtional image transforms."
import random
import math
import numpy as np
from mxnet import image, nd
from mxnet.gluon import Block
__all__ = ['RandomCrop', 'RandomErasing']
class RandomCrop(Block):
"""Randomly crop `src` with `size` (width, height).
Padding is optional.
Upsample result if `src` is smaller than `size`.
Parameters
----------
size : int or tuple of (W, H)
Size of the final output.
pad: int or tuple
if int, size of the zero-padding
if tuple, number of values padded to the edges of each axis.
((before_1, after_1), ... (before_N, after_N)) unique pad widths for each axis.
((before, after),) yields same before and after pad for each axis.
(pad,) or int is a shortcut for before = after = pad width for all axes.
interpolation : int
Interpolation method for resizing. By default uses bilinear
interpolation. See OpenCV's resize function for available choices.
Inputs:
- **data**: input tensor with (Hi x Wi x C) shape.
Outputs:
- **out**: output tensor with (size[0] x size[1] x C) or (size x size x C) shape.
"""
def __init__(self, size, pad=None, interpolation=2):
super(RandomCrop, self).__init__()
numeric_types = (float, int, np.generic)
if isinstance(size, numeric_types):
size = (size, size)
self._args = (size, interpolation)
self.pad = ((pad, pad), (pad, pad), (0, 0)) if isinstance(pad, int) else pad
def forward(self, x):
if self.pad:
return image.random_crop(nd.array(
np.pad(x.asnumpy(), self.pad, mode='constant', constant_values=0)), *self._args)[0]
else:
return image.random_crop(x, *self._args)[0]
class RandomErasing(Block):
"""Randomly erasing the area in `src` between `s_min` and `s_max` with `probability`.
`ratio` controls the ratio between width and height.
`mean` means the value in erasing area.
Parameters
----------
probability : float
Probability of erasing.
s_min : float
Min area to all area.
s_max : float
Max area to all area.
ratio : float
The ratio between width and height.
mean : int or tuple of (R, G, B)
The value in erasing area.
Inputs:
- **data**: input tensor with (Hi x Wi x C) shape.
Outputs:
- **out**: output tensor with (Hi x Wi x C) shape.
"""
def __init__(self, probability=0.5, s_min=0.02, s_max=0.4, ratio=0.3,
mean=(125.31, 122.96, 113.86)):
super(RandomErasing, self).__init__()
self.probability = probability
self.mean = mean
self.s_min = s_min
self.s_max = s_max
self.ratio = ratio
def forward(self, x):
if not isinstance(self.probability, float):
raise TypeError('Got inappropriate size arg')
if not isinstance(self.s_min, float):
raise TypeError('Got inappropriate size arg')
if not isinstance(self.s_max, float):
raise TypeError('Got inappropriate size arg')
if not isinstance(self.ratio, float):
raise TypeError('Got inappropriate size arg')
if not isinstance(self.mean, (int, tuple)):
raise TypeError('Got inappropriate size arg')
if random.uniform(0, 1) > self.probability:
return x
width, height, _ = x.shape
area = width * height
target_area = random.uniform(self.s_min, self.s_max) * area
aspect_ratio = random.uniform(self.ratio, 1/self.ratio)
w = int(round(math.sqrt(target_area * aspect_ratio)))
h = int(round(math.sqrt(target_area / aspect_ratio)))
if w < width and h < height:
x1 = random.randint(0, width - w)
y1 = random.randint(0, height - h)
x[x1:x1+w, y1:y1+h, 0] = self.mean[0]
x[x1:x1+w, y1:y1+h, 1] = self.mean[1]
x[x1:x1+w, y1:y1+h, 2] = self.mean[2]
return x
| [((4465, 4507), 'random.uniform', 'random.uniform', (['self.ratio', '(1 / self.ratio)'], {}), '(self.ratio, 1 / self.ratio)\n', (4479, 4507), False, 'import random\n'), ((4246, 4266), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (4260, 4266), False, 'import random\n'), ((4396, 4434), 'random.uniform', 'random.uniform', (['self.s_min', 'self.s_max'], {}), '(self.s_min, self.s_max)\n', (4410, 4434), False, 'import random\n'), ((4684, 4712), 'random.randint', 'random.randint', (['(0)', '(width - w)'], {}), '(0, width - w)\n', (4698, 4712), False, 'import random\n'), ((4730, 4759), 'random.randint', 'random.randint', (['(0)', '(height - h)'], {}), '(0, height - h)\n', (4744, 4759), False, 'import random\n'), ((2633, 2666), 'mxnet.image.random_crop', 'image.random_crop', (['x', '*self._args'], {}), '(x, *self._args)\n', (2650, 2666), False, 'from mxnet import image, nd\n'), ((4528, 4565), 'math.sqrt', 'math.sqrt', (['(target_area * aspect_ratio)'], {}), '(target_area * aspect_ratio)\n', (4537, 4565), False, 'import math\n'), ((4590, 4627), 'math.sqrt', 'math.sqrt', (['(target_area / aspect_ratio)'], {}), '(target_area / aspect_ratio)\n', (4599, 4627), False, 'import math\n')] |
lribiere/explore-mit-bih-arrhythmia-db | explore.py | 44eb2601ed437cb9766ae9cfd3c3553bf108d4f1 | import plotly.graph_objects as go
import streamlit as st
import pandas as pd
from utils import *
import glob
import wfdb
import os
ANNOTATIONS_COL_NAME = 'annotations'
'''
# MIT-BIH Arrhythmia DB Exploration
'''
record_ids = [os.path.basename(file)[:-4] for file in glob.glob('data/*.dat')]
if len(record_ids) == 0:
st.write('Warning ! No data could be found under the ./data/ directory.',
'*\*.dat*, *\*.hea*, *\*.atr* files and such should be placed ',
'immediately under the ./data/ directory')
else:
record_ids.sort()
record_id = st.selectbox('Select a record id', record_ids)
record = wfdb.rdrecord(f'data/{record_id}')
annotation = wfdb.rdann(f'data/{record_id}', 'atr')
st.write('Signals found in this record :')
for idx, signal in enumerate(record.sig_name):
st.write(f'- `{signal}` : in {record.units[idx]}, with a frequency of '
f'{record.fs * record.samps_per_frame[idx]}hz')
st.write(f'Comments for this record : {record.comments}')
signals_df = pd.DataFrame(record.p_signal, columns=record.sig_name)
annot_serie = pd.Series(annotation.symbol, index=annotation.sample,
name=ANNOTATIONS_COL_NAME)
full_df = pd.concat([signals_df, annot_serie], axis=1)
''' ## Annotations '''
beat_annot_count = annot_serie.isin(dict(beat_annotations)).sum()
non_beat_annot_count = annot_serie.isin(dict(non_beat_annotations)).sum()
unique_annot = annot_serie.value_counts().index.values
st.write(f'This record contains `{annot_serie.size}` annotations '
f'among which `{beat_annot_count}` beat annotations and '
f'`{non_beat_annot_count}` non beat annotation(s).')
st.write('The annotations are the followings :')
for annot in unique_annot:
st.write(f'- `{annot}` : {annotation_definitions[annot]}')
st.write('More explanations on the annotations are available here : '
'https://archive.physionet.org/physiobank/annotations.shtml')
# Plot counts for each annotation
annot_counts_df = annot_serie \
.value_counts() \
.rename_axis(ANNOTATIONS_COL_NAME) \
.reset_index(name='counts')
bar_fig = go.Figure(data=[go.Bar(x=annot_counts_df[ANNOTATIONS_COL_NAME],
y=annot_counts_df['counts'],
text=annot_counts_df['counts'],
textposition='auto'
)])
bar_fig.update_layout(title='Annotations by count', yaxis_title='counts',
xaxis_title='annotations')
st.write(bar_fig)
''' ## Explore full dataset '''
signal = st.selectbox('Select a signal', record.sig_name)
# Plot signals and annotations
matching_rows_by_annot = {}
for annot in unique_annot:
matching_rows_by_annot[annot] = full_df[ANNOTATIONS_COL_NAME] == annot
fig = go.Figure(layout=go.Layout(title=go.layout.Title(
text='{} signal with annotations'.format(signal))))
fig.add_trace(go.Scatter(x=full_df.index.values,
y=full_df[signal],
mode='lines',
name=signal))
for annot, annot_matching_rows in matching_rows_by_annot.items():
fig.add_trace(go.Scatter(x=full_df.index[annot_matching_rows].values,
y=full_df[annot_matching_rows][signal].values,
mode='markers',
name='{} (annot)'.format(annot)))
st.plotly_chart(fig)
| [((324, 516), 'streamlit.write', 'st.write', (['"""Warning ! No data could be found under the ./data/ directory."""', '"""*\\\\*.dat*, *\\\\*.hea*, *\\\\*.atr* files and such should be placed """', '"""immediately under the ./data/ directory"""'], {}), "('Warning ! No data could be found under the ./data/ directory.',\n '*\\\\*.dat*, *\\\\*.hea*, *\\\\*.atr* files and such should be placed ',\n 'immediately under the ./data/ directory')\n", (332, 516), True, 'import streamlit as st\n'), ((576, 622), 'streamlit.selectbox', 'st.selectbox', (['"""Select a record id"""', 'record_ids'], {}), "('Select a record id', record_ids)\n", (588, 622), True, 'import streamlit as st\n'), ((636, 670), 'wfdb.rdrecord', 'wfdb.rdrecord', (['f"""data/{record_id}"""'], {}), "(f'data/{record_id}')\n", (649, 670), False, 'import wfdb\n'), ((688, 726), 'wfdb.rdann', 'wfdb.rdann', (['f"""data/{record_id}"""', '"""atr"""'], {}), "(f'data/{record_id}', 'atr')\n", (698, 726), False, 'import wfdb\n'), ((731, 773), 'streamlit.write', 'st.write', (['"""Signals found in this record :"""'], {}), "('Signals found in this record :')\n", (739, 773), True, 'import streamlit as st\n'), ((974, 1031), 'streamlit.write', 'st.write', (['f"""Comments for this record : {record.comments}"""'], {}), "(f'Comments for this record : {record.comments}')\n", (982, 1031), True, 'import streamlit as st\n'), ((1049, 1103), 'pandas.DataFrame', 'pd.DataFrame', (['record.p_signal'], {'columns': 'record.sig_name'}), '(record.p_signal, columns=record.sig_name)\n', (1061, 1103), True, 'import pandas as pd\n'), ((1122, 1207), 'pandas.Series', 'pd.Series', (['annotation.symbol'], {'index': 'annotation.sample', 'name': 'ANNOTATIONS_COL_NAME'}), '(annotation.symbol, index=annotation.sample, name=ANNOTATIONS_COL_NAME\n )\n', (1131, 1207), True, 'import pandas as pd\n'), ((1245, 1289), 'pandas.concat', 'pd.concat', (['[signals_df, annot_serie]'], {'axis': '(1)'}), '([signals_df, annot_serie], axis=1)\n', (1254, 1289), True, 'import pandas as pd\n'), ((1529, 1708), 'streamlit.write', 'st.write', (['f"""This record contains `{annot_serie.size}` annotations among which `{beat_annot_count}` beat annotations and `{non_beat_annot_count}` non beat annotation(s)."""'], {}), "(\n f'This record contains `{annot_serie.size}` annotations among which `{beat_annot_count}` beat annotations and `{non_beat_annot_count}` non beat annotation(s).'\n )\n", (1537, 1708), True, 'import streamlit as st\n'), ((1737, 1785), 'streamlit.write', 'st.write', (['"""The annotations are the followings :"""'], {}), "('The annotations are the followings :')\n", (1745, 1785), True, 'import streamlit as st\n'), ((1888, 2026), 'streamlit.write', 'st.write', (['"""More explanations on the annotations are available here : https://archive.physionet.org/physiobank/annotations.shtml"""'], {}), "(\n 'More explanations on the annotations are available here : https://archive.physionet.org/physiobank/annotations.shtml'\n )\n", (1896, 2026), True, 'import streamlit as st\n'), ((2661, 2678), 'streamlit.write', 'st.write', (['bar_fig'], {}), '(bar_fig)\n', (2669, 2678), True, 'import streamlit as st\n'), ((2729, 2777), 'streamlit.selectbox', 'st.selectbox', (['"""Select a signal"""', 'record.sig_name'], {}), "('Select a signal', record.sig_name)\n", (2741, 2777), True, 'import streamlit as st\n'), ((3610, 3630), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig'], {}), '(fig)\n', (3625, 3630), True, 'import streamlit as st\n'), ((229, 251), 'os.path.basename', 'os.path.basename', (['file'], {}), '(file)\n', (245, 251), False, 'import os\n'), ((269, 292), 'glob.glob', 'glob.glob', (['"""data/*.dat"""'], {}), "('data/*.dat')\n", (278, 292), False, 'import glob\n'), ((833, 958), 'streamlit.write', 'st.write', (['f"""- `{signal}` : in {record.units[idx]}, with a frequency of {record.fs * record.samps_per_frame[idx]}hz"""'], {}), "(\n f'- `{signal}` : in {record.units[idx]}, with a frequency of {record.fs * record.samps_per_frame[idx]}hz'\n )\n", (841, 958), True, 'import streamlit as st\n'), ((1825, 1883), 'streamlit.write', 'st.write', (['f"""- `{annot}` : {annotation_definitions[annot]}"""'], {}), "(f'- `{annot}` : {annotation_definitions[annot]}')\n", (1833, 1883), True, 'import streamlit as st\n'), ((3093, 3178), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'full_df.index.values', 'y': 'full_df[signal]', 'mode': '"""lines"""', 'name': 'signal'}), "(x=full_df.index.values, y=full_df[signal], mode='lines', name=signal\n )\n", (3103, 3178), True, 'import plotly.graph_objects as go\n'), ((2245, 2378), 'plotly.graph_objects.Bar', 'go.Bar', ([], {'x': 'annot_counts_df[ANNOTATIONS_COL_NAME]', 'y': "annot_counts_df['counts']", 'text': "annot_counts_df['counts']", 'textposition': '"""auto"""'}), "(x=annot_counts_df[ANNOTATIONS_COL_NAME], y=annot_counts_df['counts'],\n text=annot_counts_df['counts'], textposition='auto')\n", (2251, 2378), True, 'import plotly.graph_objects as go\n')] |
tranconbv/ironpython-stubs | release/stubs.min/System/__init___parts/CharEnumerator.py | a601759e6c6819beff8e6b639d18a24b7e351851 | class CharEnumerator(object):
""" Supports iterating over a System.String object and reading its individual characters. This class cannot be inherited. """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return CharEnumerator()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Clone(self):
"""
Clone(self: CharEnumerator) -> object
Creates a copy of the current System.CharEnumerator object.
Returns: An System.Object that is a copy of the current System.CharEnumerator object.
"""
pass
def Dispose(self):
"""
Dispose(self: CharEnumerator)
Releases all resources used by the current instance of the System.CharEnumerator class.
"""
pass
def MoveNext(self):
"""
MoveNext(self: CharEnumerator) -> bool
Increments the internal index of the current System.CharEnumerator object to the next character of the enumerated string.
Returns: true if the index is successfully incremented and within the enumerated string; otherwise,false.
"""
pass
def next(self,*args):
""" next(self: object) -> object """
pass
def Reset(self):
"""
Reset(self: CharEnumerator)
Initializes the index to a position logically before the first character of the enumerated string.
"""
pass
def __contains__(self,*args):
""" __contains__[Char](enumerator: IEnumerator[Char],value: Char) -> bool """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerator) -> object """
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Current=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the currently referenced character in the string enumerated by this System.CharEnumerator object.
Get: Current(self: CharEnumerator) -> Char
"""
| [] |
levidavis/py-home | src/home_automation_hub/config.py | 3cc30e19d506824de9816ad9dbcfba4338a7dfa8 | from .config_store import ConfigStore
config = ConfigStore()
config.set_mqtt_broker("mqtt", 1883)
config.set_redis_config("redis", 6379, 0)
| [] |
Krozark/Harpe-Website | Harpe-website/website/contrib/communication/utils.py | 1038a8550d08273806c9ec244cb8157ef9e9101e | # -*- coding: utf-8 -*-
import socket as csocket
from struct import pack,unpack
from website.contrib.communication.models import *
def enum(**enums):
return type('Enum', (), enums)
class Socket:
Dommaine = enum(IP=csocket.AF_INET,LOCAL=csocket.AF_UNIX)
Type = enum(TCP=csocket.SOCK_STREAM, UDP=csocket.SOCK_DGRAM)
Down = enum(SEND=0,RECIVE=1,BOTH=2)
NTW_WELCOM_MSG = "hello!\0"
NTW_ERROR_NO = 0
def __init__ (self,dommaine,type,protocole=0):
self.sock = csocket.socket(dommaine,type,protocole)
self.buffer = b""
self.status = 0
def connect(self,host,port):
self.sock.connect((host,port))
def verify_connexion(self):
code = 404
if self.receive() > 0:
msg = self._unpack_str()
if msg == self.NTW_WELCOM_MSG and self.status == self.NTW_ERROR_NO:
print "verify_connexion <%d : %s>" % (self.status,msg)
else:
print "verify_connexion <%d : %s>" % (self.status,msg)
self.clear()
return self.status
def _unpack_str(self):
i = 0
while self.buffer[i]!= '\0':
i+=1
i+=1
res = self.buffer[:i]
self.buffer = self.buffer[i:]
return res
def send(self):
size = len(self.buffer)
_size = pack('!Ih',size,self.status)
data = _size + self.buffer
sent = self.sock.send(data)
if sent == 0:
print "Connexion lost"
return False
return True
def receive(self):
recv = b''
recv = self.sock.recv(6)
if recv == b'':
print "Connexion lost"
return None
size,self.status = unpack('!Ih',recv)
self.buffer = self.sock.recv(size)
return len(recv) + len(self.buffer)
#Format C Type Python type Standard size
#x pad byte no value
#c char string of length 1
#b signed char integer 1
#B unsigned char integer 1
#? _Bool bool 1
#h short integer 2
#H unsigned short integer 2
#i int integer 4
#I unsigned int integer 4
#l long integer 4
#L unsigned long integer 4
#q long long integer 8
#Q unsigned long long integer 8
#f float float 4
#d double float 8
#s char[] string
#p char[] string
#P void * integer
def add(self,typ,*args):
self.buffer +=pack('!'+typ,*args)
def clear(self):
self.buffer = b""
self.status = 0
def call(self,ret_type,func_id,types="",*args):
if len(types) < len(args):
print "Wrong number of args/type"
return 0
self.clear()
self.add("i",func_id)
if types:
self.add(types,*args)
self.send()
size = self.receive()
if size:
if self.status != 0:
print "recive error code : %d" % self.status
else:
return unpack("!"+ret_type,self.buffer)[0]
return 0
def create_socket():
sock = Socket(Socket.Dommaine.IP,Socket.Type.TCP)
ser = HarpeServer.objects.filter(is_active=True)[:1]
if not ser:
return False
ser = ser[0]
sock.connect(ser.ip,ser.port)
if sock.verify_connexion() != sock.NTW_ERROR_NO:
print "An error occur"
return None
return sock
def send_AnalyseMgf_to_calc(analyseMfg):
sock = create_socket()
if not sock:
return False
data = analyseMfg.mgf.read() + '\0'
return sock.call("i",HarpeServer.FUNCTION_ID.ANALYSE,"i%ds" % (analyseMfg.mgf.size+1) ,analyseMfg.pk,data)
| [] |
hzi-bifo/Model-T | traitarm/reconstruction/visualize_recon.py | 197b52f6fe9b73e0411dbfc66f6d2a43081f5697 | import pandas as pd
import ete2
from ete2 import faces, Tree, AttrFace, TreeStyle
import pylab
from matplotlib.colors import hex2color, rgb2hex, hsv_to_rgb, rgb_to_hsv
kelly_colors_hex = [
0xFFB300, # Vivid Yellow
0x803E75, # Strong Purple
0xFF6800, # Vivid Orange
0xA6BDD7, # Very Light Blue
0xC10020, # Vivid Red
0xCEA262, # Grayish Yellow
0x817066, # Medium Gray
# The following don't work well for people with defective color vision
0x007D34, # Vivid Green
0xF6768E, # Strong Purplish Pink
0x00538A, # Strong Blue
0xFF7A5C, # Strong Yellowish Pink
0x53377A, # Strong Violet
0xFF8E00, # Vivid Orange Yellow
0xB32851, # Strong Purplish Red
0xF4C800, # Vivid Greenish Yellow
0x7F180D, # Strong Reddish Brown
0x93AA00, # Vivid Yellowish Green
0x593315, # Deep Yellowish Brown
0xF13A13, # Vivid Reddish Orange
0x232C16, # Dark Olive Green
]
def my_layout(node):
if node.is_leaf():
# If terminal node, draws its name
name_face = AttrFace("name")
else:
# If internal node, draws label with smaller font size
name_face = AttrFace("name", fsize=10)
# Adds the name face to the image at the preferred position
faces.add_face_to_node(name_face, node, column=0, position="branch-right")
def adjust_kelly_brightness(hex_color, val, recon_min, recon_max):
"""set brightness according to change in continuous reconstruction value"""
h, s, v = rgb_to_hsv(hex2color('#{0:06X}'.format(hex_color)))
scale_factor = 1 - (recon_max - val) / (recon_max - recon_min)
v_new = v - (v * (scale_factor))
return rgb2hex(hsv_to_rgb(pd.np.array([h, s, v_new])))
def get_style():
ts = TreeStyle()
# Do not add leaf names automatically
ts.show_leaf_name = False
ts.show_scale = True
ts.force_topology = False
# Use my custom layout
ts.layout_fn = my_layout
return ts
def plot_tree(pt_tree, target_node, out):
#pt_tree, feats, pf2color = get_tree(phenotype = phenotype, feat_list = "top_cor", is_ml_plus_phypat = True, target_node = target_node)
pt_tree.dist = 0
target = pt_tree.search_nodes(name = target_node)[0]
target.render(out + '_tree.pdf', tree_style = get_style())
#target.render(out + '_tree.png', tree_style = get_style())
return target, feats, pf2color
def plot_legend(feats, out, pf2color, pf_desc = False, pf_acc = True, include_class = False):
fig = pylab.figure()
figlegend = pylab.figure(figsize = (9, 6))
ax = fig.add_subplot(111)
x = [0,1]
lines = [ax.plot(x, pd.np.ones(len(x)), 'o', color = "#%06x" % (pf2color[feats.index[i]]))[0] for i in range(len(pf2color))]
labels= [i for i in feats.index]
#labels= ["%s" %(feats.loc[:,"Pfam_acc"].iloc[i]) for i in range(feats.shape[0])]
#if include_class:
# labels= ["%s %s" %(labels[i], feats.loc[:, "class"].iloc[i]) for i in range(len(labels))]
#if pf_desc:
# labels = ["%s %s" % (labels[i], pf2short_desc.loc[feats.loc[:,"Pfam_acc"].iloc[i], 1]) for i in range(len(labels))]
#if pf_acc:
# labels = ["%s %s" % (labels[i], pf2acc.loc[feats.loc[:,"Pfam_acc"].iloc[i], 1]) for i in range(len(labels))]
figlegend.legend(lines, labels, markerscale = 2.5, numpoints = 1, frameon = False)
#fig.show()
fig.tight_layout()
figlegend.savefig(out + "_legend.svg")
figlegend.savefig(out + "_legend.png")
return figlegend
def get_tree(phenotype, tree, gain_recon, loss_recon, node_recon, pfam_mapping, feat_list, sample_mapping, threshold = 0.5, target_node = None, are_continuous_features_with_discrete_phenotype = False, max_feats = 10, miscl = None, node_annotation = None):
#read target feats
feats = pd.read_csv(feat_list, index_col = 0, sep = "\t")
pt_tree = ete2.Tree(tree, format = 1)
pt_tree.ladderize()
if not node_annotation is None:
node_table = pd.read_csv(node_annotation, sep = "\t", index_col = 0)
sample_mapping = pd.read_csv(sample_mapping, index_col = 0, sep = "\t")
#read node and edge reconstruction matrices
node_recon = pd.read_csv(node_recon, sep = "\t", index_col = 0)
gain_recon = pd.read_csv(gain_recon, sep = "\t", index_col = 0)
gain_recon.index = ["_".join(("_".join(i.split("_")[:-1]), i.split("_")[-1])) for i in gain_recon.index.values]
loss_recon = pd.read_csv(loss_recon, sep = "\t", index_col = 0)
loss_recon.index = ["_".join(("_".join(i.split("_")[:-1]), i.split("_")[-1])) for i in loss_recon.index.values]
#prune to target node
if target_node is not None:
pt_tree = pt_tree.search_nodes(name = target_node)[0]
node2name = dict((i.name, i.name) for i in pt_tree.traverse(strategy = 'preorder'))
pfams_with_event = set()
pfam2color = {}
#set the style of the branches and nodes according to the posterior probability
top10_feats = feats.iloc[:max_feats,]
#for visualization of continuous feature get the range of values for each feature
if are_continuous_features_with_discrete_phenotype:
recon_min = gain_recon.abs().apply(pd.np.min)
recon_max = gain_recon.abs().apply(pd.np.max)
if not miscl is None:
miscl_m = pd.read_csv(miscl, sep = "\t", index_col = 0)
for n in pt_tree.traverse():
#ignore the root
if n.name == "N1":
continue
if not node_annotation is None:
if n.name in node_table.index:
for attr,i in zip(node_table.columns, range(len(node_table.columns))):
value = node_table.loc[n.name, attr]
if not pd.isnull(value):
if value == 0:
rf = ete2.CircleFace(radius = 8, style = "circle", color = 'red')
elif value == 2:
rf = faces.CircleFace(radius = 8, style = "circle", color = 'orange')
else:
rf = faces.CircleFace(radius = 8, style = "circle", color = 'green')
else:
rf = faces.CircleFace(radius = 8, style = "circle", color = 'grey')
n.add_face(rf, column = i, position = "aligned")
ns = node_recon.loc[n.name, phenotype]
style = ete2.NodeStyle()
style["shape"] = 'square'
style['size'] = 10
if pd.isnull(ns):
style['fgcolor'] = 'grey'
elif ns < threshold:
style['fgcolor'] = 'darkred'
else:
style['fgcolor'] = 'green'
if not n.name == "N1":
branch_id = n.name + "_" + n.up.name
if gain_recon.loc[branch_id, phenotype] > threshold:
style["hz_line_type"] = 1
style["hz_line_color"] = 'green'
style["hz_line_width"] = 3
elif loss_recon.loc[branch_id, phenotype] > threshold:
style["hz_line_type"] = 1
style["hz_line_color"] = 'red'
style["hz_line_width"] = 3
else:
style["hz_line_type"] = 0
style["hz_line_color"] = 'black'
n.set_style(style)
#check if sample was misclassified and add misclassified label
if not miscl is None:
if node2name[n.name] in miscl_m.index:
tf = faces.TextFace("misclassified")
n.add_face(tf, column = 0, position = "branch-right")
#set species name instead of tax id
if n.name in sample_mapping.index:
node2name[n.name] = sample_mapping.loc[n.name,][0]
#add majority feature gains and losses
events = []
for i in range(top10_feats.shape[0]):
if not are_continuous_features_with_discrete_phenotype:
cf = faces.CircleFace(radius = 8, style = "circle", color = kelly_colors_hex[i])
#gain events
if gain_recon.loc[branch_id, top10_feats.index[i]] > threshold:
pfam2color[top10_feats.index[i]] = kelly_colors_hex[i]
tf = faces.TextFace("-")
events.append(tf)
pfams_with_event.add(node_recon.index[i])
events.append(cf)
#loss events
elif loss_recon.loc[branch_id, top10_feats.index[i]] > threshold:
pfam2color[top10_feats.index[i]] = kelly_colors_hex[i]
tf = faces.TextFace("-")
events.append(tf)
pfams_with_event.add(node_recon.index[i])
events.append(cf)
#continuous features
else:
adjusted_color = adjust_kelly_brightness(kelly_colors_hex[i], abs(loss_recon.loc[branch_id, top10_feats.index[i]]), recon_min.loc[top10_feats.index[i]], recon_max.loc[top10_feats.index[i]])
#tf = faces.TextFace(gain_recon.loc[branch_id, top10_feats.index[i]])
if loss_recon.loc[branch_id, top10_feats.index[i]] < 0:
tf = faces.TextFace("-")
else:
tf = faces.TextFace("+")
cf = faces.CircleFace(radius = 8, style = "circle", color = adjusted_color)
pfam2color[top10_feats.index[i]] = kelly_colors_hex[i]
pfams_with_event.add(node_recon.index[i])
events.append(cf)
events.append(tf)
for i in range(len(events)):
n.add_face(events[i], column = i, position = "branch-top")
for n in pt_tree.traverse():
if n.name in node2name:
n.name = node2name[n.name]
#filtered_pfams = filter(lambda i: i in list(pfams_with_event), top10_feats.loc[:,"Pfam_acc"].values)
#print filtered_pfams
#filtered_ids = pt_gt2id.loc[filtered_pfams, 0] - 1
#print filtered_ids
#top10_feats_with_event = top10_feats.loc[filtered_ids,]
#process node annotation
return pt_tree, top10_feats, pfam2color
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser("""visualize target list of features""")
parser.add_argument("node_recon", help = "node ancestral character state reconstruction")
parser.add_argument("gain_recon", help = "gain events ancestral character state reconstruction")
parser.add_argument("loss_recon", help = "loss events ancestral character state reconstruction")
parser.add_argument("tree", help = "tree with internal nodes labeled")
parser.add_argument("pfam_mapping", help = "feature mapping/list")
parser.add_argument("feat_list", help = "list of features")
parser.add_argument("--target_node", default = "N1", help = "list of features")
parser.add_argument("phenotype", help = "target phenotype")
parser.add_argument("--are_continuous_features_with_discrete_phenotype", action = 'store_true', help = "set if using continuous features with a discrete phenotype")
parser.add_argument("threshold", type = float, help = "threshold to call genotype/phenotype events")
parser.add_argument("sample_mapping", help = "mapping between sample ids and names")
parser.add_argument("out", help = "output file")
parser.add_argument("--max_feats", type = int, default = 10, help = "visualize at most max_feats features")
parser.add_argument("--miscl", help = "table of misclassified samples")
parser.add_argument("--node_annotation", help = "table of binary features for labeling the nodes")
a = parser.parse_args()
pt_tree, feats, pf2color = get_tree(node_recon = a.node_recon, gain_recon = a.gain_recon, loss_recon = a.loss_recon, pfam_mapping = a.pfam_mapping, tree = a.tree, feat_list = a.feat_list, phenotype = a.phenotype, target_node = a.target_node, threshold = a.threshold, sample_mapping = a.sample_mapping, are_continuous_features_with_discrete_phenotype = a.are_continuous_features_with_discrete_phenotype, max_feats = a.max_feats, miscl = a.miscl, node_annotation = a.node_annotation)
plot_tree(pt_tree, a.target_node, a.out)
plot_legend(feats, a.out, pf2color)
| [((1247, 1321), 'ete2.faces.add_face_to_node', 'faces.add_face_to_node', (['name_face', 'node'], {'column': '(0)', 'position': '"""branch-right"""'}), "(name_face, node, column=0, position='branch-right')\n", (1269, 1321), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((1732, 1743), 'ete2.TreeStyle', 'TreeStyle', ([], {}), '()\n', (1741, 1743), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((2473, 2487), 'pylab.figure', 'pylab.figure', ([], {}), '()\n', (2485, 2487), False, 'import pylab\n'), ((2504, 2532), 'pylab.figure', 'pylab.figure', ([], {'figsize': '(9, 6)'}), '(figsize=(9, 6))\n', (2516, 2532), False, 'import pylab\n'), ((3755, 3800), 'pandas.read_csv', 'pd.read_csv', (['feat_list'], {'index_col': '(0)', 'sep': '"""\t"""'}), "(feat_list, index_col=0, sep='\\t')\n", (3766, 3800), True, 'import pandas as pd\n'), ((3819, 3844), 'ete2.Tree', 'ete2.Tree', (['tree'], {'format': '(1)'}), '(tree, format=1)\n', (3828, 3844), False, 'import ete2\n'), ((4005, 4055), 'pandas.read_csv', 'pd.read_csv', (['sample_mapping'], {'index_col': '(0)', 'sep': '"""\t"""'}), "(sample_mapping, index_col=0, sep='\\t')\n", (4016, 4055), True, 'import pandas as pd\n'), ((4125, 4171), 'pandas.read_csv', 'pd.read_csv', (['node_recon'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(node_recon, sep='\\t', index_col=0)\n", (4136, 4171), True, 'import pandas as pd\n'), ((4193, 4239), 'pandas.read_csv', 'pd.read_csv', (['gain_recon'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(gain_recon, sep='\\t', index_col=0)\n", (4204, 4239), True, 'import pandas as pd\n'), ((4377, 4423), 'pandas.read_csv', 'pd.read_csv', (['loss_recon'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(loss_recon, sep='\\t', index_col=0)\n", (4388, 4423), True, 'import pandas as pd\n'), ((10225, 10285), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""visualize target list of features"""'], {}), "('visualize target list of features')\n", (10248, 10285), False, 'import argparse\n'), ((1040, 1056), 'ete2.AttrFace', 'AttrFace', (['"""name"""'], {}), "('name')\n", (1048, 1056), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((1152, 1178), 'ete2.AttrFace', 'AttrFace', (['"""name"""'], {'fsize': '(10)'}), "('name', fsize=10)\n", (1160, 1178), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((3928, 3979), 'pandas.read_csv', 'pd.read_csv', (['node_annotation'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(node_annotation, sep='\\t', index_col=0)\n", (3939, 3979), True, 'import pandas as pd\n'), ((5221, 5262), 'pandas.read_csv', 'pd.read_csv', (['miscl'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(miscl, sep='\\t', index_col=0)\n", (5232, 5262), True, 'import pandas as pd\n'), ((6297, 6313), 'ete2.NodeStyle', 'ete2.NodeStyle', ([], {}), '()\n', (6311, 6313), False, 'import ete2\n'), ((6386, 6399), 'pandas.isnull', 'pd.isnull', (['ns'], {}), '(ns)\n', (6395, 6399), True, 'import pandas as pd\n'), ((1676, 1702), 'pandas.np.array', 'pd.np.array', (['[h, s, v_new]'], {}), '([h, s, v_new])\n', (1687, 1702), True, 'import pandas as pd\n'), ((7370, 7401), 'ete2.faces.TextFace', 'faces.TextFace', (['"""misclassified"""'], {}), "('misclassified')\n", (7384, 7401), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((7861, 7930), 'ete2.faces.CircleFace', 'faces.CircleFace', ([], {'radius': '(8)', 'style': '"""circle"""', 'color': 'kelly_colors_hex[i]'}), "(radius=8, style='circle', color=kelly_colors_hex[i])\n", (7877, 7930), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((9314, 9378), 'ete2.faces.CircleFace', 'faces.CircleFace', ([], {'radius': '(8)', 'style': '"""circle"""', 'color': 'adjusted_color'}), "(radius=8, style='circle', color=adjusted_color)\n", (9330, 9378), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((5628, 5644), 'pandas.isnull', 'pd.isnull', (['value'], {}), '(value)\n', (5637, 5644), True, 'import pandas as pd\n'), ((6100, 6156), 'ete2.faces.CircleFace', 'faces.CircleFace', ([], {'radius': '(8)', 'style': '"""circle"""', 'color': '"""grey"""'}), "(radius=8, style='circle', color='grey')\n", (6116, 6156), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((8162, 8181), 'ete2.faces.TextFace', 'faces.TextFace', (['"""-"""'], {}), "('-')\n", (8176, 8181), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((9194, 9213), 'ete2.faces.TextFace', 'faces.TextFace', (['"""-"""'], {}), "('-')\n", (9208, 9213), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((9269, 9288), 'ete2.faces.TextFace', 'faces.TextFace', (['"""+"""'], {}), "('+')\n", (9283, 9288), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((5718, 5772), 'ete2.CircleFace', 'ete2.CircleFace', ([], {'radius': '(8)', 'style': '"""circle"""', 'color': '"""red"""'}), "(radius=8, style='circle', color='red')\n", (5733, 5772), False, 'import ete2\n'), ((8559, 8578), 'ete2.faces.TextFace', 'faces.TextFace', (['"""-"""'], {}), "('-')\n", (8573, 8578), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((5853, 5911), 'ete2.faces.CircleFace', 'faces.CircleFace', ([], {'radius': '(8)', 'style': '"""circle"""', 'color': '"""orange"""'}), "(radius=8, style='circle', color='orange')\n", (5869, 5911), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n'), ((5981, 6038), 'ete2.faces.CircleFace', 'faces.CircleFace', ([], {'radius': '(8)', 'style': '"""circle"""', 'color': '"""green"""'}), "(radius=8, style='circle', color='green')\n", (5997, 6038), False, 'from ete2 import faces, Tree, AttrFace, TreeStyle\n')] |
volpatto/firedrake_scripts | scripts/misc/operator_condition_number_scipy.py | ba9c935bb0c9a6bbc6de69f476e42ad0ea8bb1c6 | import attr
from firedrake import *
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from scipy.linalg import svd
from scipy.sparse.linalg import svds
from scipy.sparse import csr_matrix
from slepc4py import SLEPc
import pandas as pd
from tqdm import tqdm
import os
matplotlib.use('Agg')
@attr.s
class ConditionNumberResult(object):
form = attr.ib()
assembled_form = attr.ib()
condition_number = attr.ib()
sparse_operator = attr.ib()
number_of_dofs = attr.ib()
nnz = attr.ib()
is_operator_symmetric = attr.ib()
bcs = attr.ib(default=list())
def plot_matrix(assembled_form, **kwargs):
"""Provides a plot of a matrix."""
fig, ax = plt.subplots(1, 1)
petsc_mat = assembled_form.M.handle
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
return plot
def plot_matrix_mixed(assembled_form, **kwargs):
"""Provides a plot of a mixed matrix."""
fig, ax = plt.subplots(1, 1)
petsc_mat = assembled_form.M.handle
f0_size = assembled_form.M[0, 0].handle.getSize()
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.axhline(y=f0_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] - 0.5, color="k")
return plot
def plot_matrix_primal_hybrid_full(a_form, bcs=[], **kwargs):
"""Provides a plot of a full hybrid-mixed matrix."""
fig, ax = plt.subplots(1, 1)
assembled_form = assemble(a_form, bcs=bcs, mat_type="aij")
petsc_mat = assembled_form.M.handle
f0_size = assembled_form.M[0, 0].handle.getSize()
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.axhline(y=f0_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] - 0.5, color="k")
return plot
def plot_matrix_mixed_hybrid_full(a_form, bcs=[], **kwargs):
"""Provides a plot of a full hybrid-mixed matrix."""
fig, ax = plt.subplots(1, 1)
assembled_form = assemble(a_form, bcs=bcs, mat_type="aij")
petsc_mat = assembled_form.M.handle
f0_size = assembled_form.M[0, 0].handle.getSize()
f1_size = assembled_form.M[1, 1].handle.getSize()
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.axhline(y=f0_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] - 0.5, color="k")
ax.axhline(y=f0_size[0] + f1_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] + f1_size[0] - 0.5, color="k")
return plot
def plot_matrix_hybrid_multiplier(a_form, trace_index=2, bcs=[], **kwargs):
"""Provides a plot of a condensed hybrid-mixed matrix for single scale problems."""
fig, ax = plt.subplots(1, 1)
_A = Tensor(a_form)
A = _A.blocks
idx = trace_index
S = A[idx, idx] - A[idx, :idx] * A[:idx, :idx].inv * A[:idx, idx]
Smat = assemble(S, bcs=bcs)
petsc_mat = Smat.M.handle
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Below there is the spy alternative
# plot = plt.spy(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
return plot
def filter_real_part_in_array(array: np.ndarray, imag_threshold: float = 1e-5) -> np.ndarray:
"""Utility function to filter real part in a numpy array.
:param array:
Array with real and complex numbers.
:param imag_threshold:
Threshold to cut off imaginary part in complex number.
:return:
Filtered array with only real numbers.
"""
real_part_array = array.real[abs(array.imag) < 1e-5]
return real_part_array
def calculate_condition_number(
A,
num_of_factors,
backend: str = "scipy",
use_sparse: bool = False,
zero_tol: float = 1e-5
):
backend = backend.lower()
if backend == "scipy":
size = A.getSize()
Mnp = csr_matrix(A.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
if use_sparse:
singular_values = svds(
A=Mnp,
k=num_of_factors,
which="LM",
maxiter=5000,
return_singular_vectors=False,
solver="lobpcg"
)
else:
M = Mnp.toarray()
singular_values = svd(M, compute_uv=False, check_finite=False)
singular_values = singular_values[singular_values > zero_tol]
condition_number = singular_values.max() / singular_values.min()
elif backend == "slepc":
S = SLEPc.SVD()
S.create()
S.setOperator(A)
S.setType(SLEPc.SVD.Type.LAPACK)
S.setDimensions(nsv=num_of_factors)
S.setTolerances(max_it=5000)
S.setWhichSingularTriplets(SLEPc.SVD.Which.LARGEST)
S.solve()
num_converged_values = S.getConverged()
singular_values_list = list()
if num_converged_values > 0:
for i in range(num_converged_values):
singular_value = S.getValue(i)
singular_values_list.append(singular_value)
else:
raise RuntimeError("SLEPc SVD has not converged.")
singular_values = np.array(singular_values_list)
singular_values = singular_values[singular_values > zero_tol]
condition_number = singular_values.max() / singular_values.min()
else:
raise NotImplementedError("The required method for condition number estimation is currently unavailable.")
return condition_number
def solve_poisson_cg(mesh, degree=1, use_quads=False):
# Function space declaration
V = FunctionSpace(mesh, "CG", degree)
# Trial and test functions
u = TrialFunction(V)
v = TestFunction(V)
# Dirichlet BCs
bcs = DirichletBC(V, 0.0, "on_boundary")
# Variational form
a = inner(grad(u), grad(v)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = V.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_ls(mesh, degree=1):
# Function space declaration
pressure_family = 'CG'
velocity_family = 'CG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Stabilization parameters
delta_1 = Constant(1)
delta_2 = Constant(1)
delta_3 = Constant(1)
# Least-squares terms
a = delta_1 * inner(u + grad(p), v + grad(q)) * dx
a += delta_2 * div(u) * div(v) * dx
a += delta_3 * inner(curl(u), curl(v)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_cgls(mesh, degree=1):
# Function space declaration
pressure_family = 'CG'
velocity_family = 'CG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Mixed classical terms
a = (dot(u, v) - div(v) * p - q * div(u)) * dx
# Stabilizing terms
a += -0.5 * inner((u + grad(p)), v + grad(q)) * dx
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
a += 0.5 * div(u) * div(v) * dx
a += 0.5 * inner(curl(u), curl(v)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_vms(mesh, degree=1):
# Function space declaration
pressure_family = 'CG'
velocity_family = 'CG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Mixed classical terms
a = (dot(u, v) - div(v) * p + q * div(u)) * dx
# Stabilizing terms
a += 0.5 * inner(u + grad(p), grad(q) - v) * dx
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
# a += 0.5 * div(u) * div(v) * dx
# a += 0.5 * inner(curl(u), curl(v)) * dx
# L += 0.5 * f * div(v) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_mixed_RT(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
if use_quads:
hdiv_family = 'RTCF'
pressure_family = 'DQ'
else:
hdiv_family = 'RT'
pressure_family = 'DG'
U = FunctionSpace(mesh, hdiv_family, degree + 1)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Mixed classical terms
a = (dot(u, v) - div(v) * p + q * div(u)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_dgls(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
# bcs = DirichletBC(W[0], sigma_e, "on_boundary", method="geometric")
# Average cell size and mesh dependent stabilization
h_avg = (h("+") + h("-")) / 2.0
# Jump stabilizing parameters based on Badia-Codina stabilized dG method
L0 = 1
eta_p = L0 * h # method B in the Badia-Codina paper
# eta_p = 1
# eta_p = L0 * L0 # method D in the Badia-Codina paper
eta_u = h / L0 # method B in the Badia-Codina paper
# eta_u = 1
# Nitsche's penalizing term
beta_0 = Constant(1.0)
beta = beta_0 / h
# Mixed classical terms
a = (dot(u, v) - div(v) * p - q * div(u)) * dx
# DG terms
a += jump(v, n) * avg(p) * dS - avg(q) * jump(u, n) * dS
# Edge stabilizing terms
# ** Badia-Codina based
a += (avg(eta_p) / h_avg) * (jump(u, n) * jump(v, n)) * dS
a += (avg(eta_u) / h_avg) * dot(jump(p, n), jump(q, n)) * dS
# ** Mesh independent terms
# a += jump(u, n) * jump(v, n) * dS
# a += dot(jump(p, n), jump(q, n)) * dS
# Volumetric stabilizing terms
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
# a += -0.5 * inner(u + grad(p), v + grad(q)) * dx
# a += 0.5 * div(u) * div(v) * dx
# a += 0.5 * inner(curl(u), curl(v)) * dx
# ** Badia-Codina based
a += -eta_u * inner(u + grad(p), v + grad(q)) * dx
a += eta_p * div(u) * div(v) * dx
a += eta_p * inner(curl(u), curl(v)) * dx
# Weakly imposed boundary conditions
a += dot(v, n) * p * ds - q * dot(u, n) * ds
a += beta * p * q * ds # may decrease convergente rates
# ** The terms below are based on ASGS Badia-Codina (2010), it is not a classical Nitsche's method
a += (eta_p / h) * dot(u, n) * dot(v, n) * ds
a += (eta_u / h) * dot(p * n, q * n) * ds
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_dvms(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
# bcs = DirichletBC(W[0], sigma_e, "on_boundary", method="geometric")
# Average cell size and mesh dependent stabilization
h_avg = (h("+") + h("-")) / 2.0
# Jump stabilizing parameters based on Badia-Codina stabilized dG method
L0 = 1
eta_p = L0 * h # method B in the Badia-Codina paper
# eta_p = L0 * L0 # method D in the Badia-Codina paper
eta_u = h / L0 # method B in the Badia-Codina paper
# Mixed classical terms
a = (dot(u, v) - div(v) * p + q * div(u)) * dx
# DG terms
a += jump(v, n) * avg(p) * dS - avg(q) * jump(u, n) * dS
# Edge stabilizing terms
# ** Badia-Codina based
a += (avg(eta_p) / h_avg) * (jump(u, n) * jump(v, n)) * dS
a += (avg(eta_u) / h_avg) * dot(jump(p, n), jump(q, n)) * dS
# ** Mesh independent (original)
# a += jump(u, n) * jump(v, n) * dS # not considered in the original paper
# a += dot(jump(p, n), jump(q, n)) * dS
# Volumetric stabilizing terms
# a += 0.5 * inner(u + grad(p), grad(q) - v) * dx
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
# a += 0.5 * div(u) * div(v) * dx
# a += 0.5 * inner(curl(u), curl(v)) * dx
# L += 0.5 * f * div(v) * dx
# ** Badia-Codina based
a += eta_u * inner(u + grad(p), grad(q) - v) * dx
a += eta_p * div(u) * div(v) * dx
# Weakly imposed boundary conditions
a += dot(v, n) * p * ds - q * dot(u, n) * ds
# ** The terms below are based on ASGS Badia-Codina (2010), it is not a classical Nitsche's method
a += (eta_p / h) * dot(u, n) * dot(v, n) * ds
a += (eta_u / h) * dot(p * n, q * n) * ds # may decrease convergente rates
# ** Classical Nitsche
# a += beta * p * q * ds # may decrease convergente rates (Nitsche)
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_sipg(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
V = FunctionSpace(mesh, pressure_family, degree)
# Trial and test functions
p = TrialFunction(V)
q = TestFunction(V)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Edge stabilizing parameter
beta0 = Constant(1e1)
beta = beta0 / h
# Symmetry term. Choose if the method is SIPG (-1) or NIPG (1)
s = Constant(-1)
# Classical volumetric terms
a = inner(grad(p), grad(q)) * dx
L = f * q * dx
# DG edge terms
a += s * dot(jump(p, n), avg(grad(q))) * dS - dot(avg(grad(p)), jump(q, n)) * dS
# Edge stabilizing terms
a += beta("+") * dot(jump(p, n), jump(q, n)) * dS
# Weak boundary conditions
a += s * dot(p * n, grad(q)) * ds - dot(grad(p), q * n) * ds
a += beta * p * q * ds
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = V.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_dls(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
# bcs = DirichletBC(W[0], sigma_e, "on_boundary", method="geometric")
# Average cell size and mesh dependent stabilization
h_avg = (h("+") + h("-")) / 2.0
# Jump stabilizing parameters based on Badia-Codina stabilized dG method
# L0 = 1
# eta_p = L0 * h_avg # method B in the Badia-Codina paper
eta_p = 1
# eta_p = L0 * L0 # method D in the Badia-Codina paper
# eta_u = h_avg / L0 # method B in the Badia-Codina paper
eta_u = 1
# eta_u_bc = h / L0 # method B in the Badia-Codina paper
eta_u_bc = 1
# Least-Squares weights
delta = Constant(1.0)
# delta = h
delta_0 = delta
delta_1 = delta
delta_2 = delta
delta_3 = 1 / h
delta_4 = 1 / h
# Least-squares terms
a = delta_0 * inner(u + grad(p), v + grad(q)) * dx
a += delta_1 * div(u) * div(v) * dx
a += delta_2 * inner(curl(u), curl(v)) * dx
# Edge stabilizing terms
# ** Badia-Codina based (better results) **
a += eta_u * avg(delta_3) * (jump(u, n) * jump(v, n)) * dS
a += eta_p * avg(delta_4) * dot(jump(p, n), jump(q, n)) * dS
a += eta_u_bc * delta_3 * p * q * ds # may decrease convergente rates
a += eta_u_bc * delta_4 * dot(u, n) * dot(v, n) * ds
# ** Mesh independent **
# a += jump(u, n) * jump(v, n) * dS
# a += dot(jump(p, n), jump(q, n)) * dS
# a += p * q * ds
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-12)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_sdhm(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
trace_family = "HDiv Trace"
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
T = FunctionSpace(mesh, trace_family, degree)
W = U * V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
u, p, lambda_h = TrialFunctions(W)
v, q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# BCs
u_projected = sigma_e
p_boundaries = p_exact
bcs = DirichletBC(W.sub(2), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0e-18)
# beta = beta_0 / h
beta = beta_0
# Stabilization parameters
delta_0 = Constant(-1)
delta_1 = Constant(-0.5) * h * h
delta_2 = Constant(0.5) * h * h
delta_3 = Constant(0.5) * h * h
# Mixed classical terms
a = (dot(u, v) - div(v) * p + delta_0 * q * div(u)) * dx
L = delta_0 * f * q * dx
# Stabilizing terms
a += delta_1 * inner(u + grad(p), v + grad(q)) * dx
a += delta_2 * div(u) * div(v) * dx
a += delta_3 * inner(curl(u), curl(v)) * dx
L += delta_2 * f * div(v) * dx
# Hybridization terms
a += lambda_h("+") * dot(v, n)("+") * dS + mu_h("+") * dot(u, n)("+") * dS
a += beta("+") * (lambda_h("+") - p("+")) * (mu_h("+") - q("+")) * dS
# Weakly imposed BC
a += (p_boundaries * dot(v, n) + mu_h * (dot(u, n) - dot(u_projected, n))) * ds
a += beta * (lambda_h - p_boundaries) * mu_h * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[2, 2] - A[2, :2] * A[:2, :2].inv * A[:2, 2]
Smat = assemble(S, bcs=bcs)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bcs
)
return result
def solve_poisson_hdg(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
trace_family = "HDiv Trace"
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
T = FunctionSpace(mesh, trace_family, degree)
W = U * V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
u, p, lambda_h = TrialFunctions(W)
v, q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Dirichlet BCs
bc_multiplier = DirichletBC(W.sub(2), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0e0)
beta = beta_0 / h
# beta = beta_0
# Numerical flux trace
u_hat = u + beta * (p - lambda_h) * n
# HDG classical form
a = (dot(u, v) - div(v) * p) * dx + lambda_h("+") * jump(v, n) * dS
a += -dot(u, grad(q)) * dx + jump(u_hat, n) * q("+") * dS
L = f * q * dx
# Transmission condition
a += jump(u_hat, n) * mu_h("+") * dS
# Weakly imposed BC
a += lambda_h * dot(v, n) * ds
a += dot(u_hat, n) * q * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[2, 2] - A[2, :2] * A[:2, :2].inv * A[:2, 2]
Smat = assemble(S, bcs=bc_multiplier)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bc_multiplier
)
return result
def solve_poisson_cgh(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
trace_family = "HDiv Trace"
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
T = FunctionSpace(mesh, trace_family, degree)
W = V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
p, lambda_h = TrialFunctions(W)
q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Dirichlet BCs
bc_multiplier = DirichletBC(W.sub(1), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0e0)
beta = beta_0 / h
# beta = beta_0
# Numerical flux trace
u = -grad(p)
u_hat = u + beta * (p - lambda_h) * n
# HDG classical form
a = -dot(u, grad(q)) * dx + jump(u_hat, n) * q("+") * dS
L = f * q * dx
# Transmission condition
a += jump(u_hat, n) * mu_h("+") * dS
# Weakly imposed BC
a += dot(u_hat, n) * q * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[1, 1] - A[1, :1] * A[:1, :1].inv * A[:1, 1]
Smat = assemble(S, bcs=bc_multiplier)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bc_multiplier
)
return result
def solve_poisson_ldgc(
mesh,
degree=1,
is_multiplier_continuous=True
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
primal_family = "DQ" if use_quads else "DG"
V = FunctionSpace(mesh, primal_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
trace_family = "HDiv Trace"
T = FunctionSpace(mesh, trace_family, degree)
W = V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
p, lambda_h = TrialFunctions(W)
q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Dirichlet BCs
p_boundaries = Constant(0.0)
bc_multiplier = DirichletBC(W.sub(1), p_exact, "on_boundary")
# Hybridization parameter
s = Constant(-1.0)
beta = Constant(32.0)
h = CellDiameter(mesh)
h_avg = avg(h)
# Classical term
a = dot(grad(p), grad(q)) * dx
L = f * q * dx
# Hybridization terms
a += s * dot(grad(q), n)("+") * (p("+") - lambda_h("+")) * dS
a += -dot(grad(p), n)("+") * (q("+") - mu_h("+")) * dS
a += (beta / h_avg) * (p("+") - lambda_h("+")) * (q("+") - mu_h("+")) * dS
# Boundary terms
# a += -dot(vel_projected, n) * v * ds # How to set this bc??
# a += (beta / h) * (p- p_boundaries) * q * ds # is this necessary?
L += s * dot(grad(q), n) * p_boundaries * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[1, 1] - A[1, :1] * A[:1, :1].inv * A[:1, 1]
Smat = assemble(S, bcs=bc_multiplier)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bc_multiplier
)
return result
def solve_poisson_lsh(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
trace_family = "HDiv Trace"
T = FunctionSpace(mesh, trace_family, degree)
W = U * V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
u, p, lambda_h = TrialFunctions(W)
v, q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# BCs
bcs = DirichletBC(W.sub(2), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0)
beta = beta_0 / h
beta_avg = beta_0 / h("+")
# Stabilizing parameter
# delta_0 = Constant(1)
# delta_1 = Constant(1)
# delta_2 = Constant(1)
# delta_3 = Constant(1)
# delta_4 = Constant(1)
# delta_5 = Constant(1)
# LARGE_NUMBER = Constant(1e0)
delta = h * h
# delta = Constant(1)
# delta = h
delta_0 = delta
delta_1 = delta
delta_2 = delta
delta_3 = delta
delta_4 = delta
# delta_4 = LARGE_NUMBER / h
delta_5 = delta
# Numerical flux trace
u_hat = u + beta * (p - lambda_h) * n
v_hat = v + beta * (q - mu_h) * n
# Flux least-squares
# a = (
# (inner(u, v) - q * div(u) - p * div(v) + inner(grad(p), grad(q)))
# * delta_1
# * dx
# )
# # These terms below are unsymmetric
# a += delta_1 * jump(u_hat, n=n) * q("+") * dS
# a += delta_1("+") * dot(u_hat, n) * q * ds
# # a += delta_1 * dot(u, n) * q * ds
# # L = -delta_1 * dot(u_projected, n) * q * ds
# a += delta_1("+") * lambda_h("+") * jump(v, n=n) * dS
# a += delta_1 * lambda_h * dot(v, n) * ds
# # L = delta_1 * p_exact * dot(v, n) * ds
# Flux Least-squares as in DG
a = delta_0 * inner(u + grad(p), v + grad(q)) * dx
# Classical mixed Darcy eq. first-order terms as stabilizing terms
a += delta_1 * (dot(u, v) - div(v) * p) * dx
a += delta_1("+") * lambda_h("+") * jump(v, n=n) * dS
a += delta_1 * lambda_h * dot(v, n) * ds
# Mass balance least-square
a += delta_2 * div(u) * div(v) * dx
# L = delta_2 * f * div(v) * dx
# Irrotational least-squares
a += delta_3 * inner(curl(u), curl(v)) * dx
# Hybridization terms
a += mu_h("+") * jump(u_hat, n=n) * dS
a += delta_4("+") * (p("+") - lambda_h("+")) * (q("+") - mu_h("+")) * dS
# a += delta_4 * (p - lambda_h) * (q - mu_h) * ds
# a += delta_5 * (dot(u, n)("+") - dot(u_hat, n)("+")) * (dot(v, n)("+") - dot(v_hat, n)("+")) * dS
# a += delta_5 * (dot(u, n) - dot(u_hat, n)) * (dot(v, n) - dot(v_hat, n)) * ds
# Weakly imposed BC from hybridization
# a += mu_h * (lambda_h - p_boundaries) * ds
# a += mu_h * lambda_h * ds
# ###
# a += (
# (mu_h - q) * (lambda_h - p_boundaries) * ds
# ) # maybe this is not a good way to impose BC, but this necessary
_A = Tensor(a)
A = _A.blocks
S = A[2, 2] - A[2, :2] * A[:2, :2].inv * A[:2, 2]
Smat = assemble(S, bcs=bcs)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bcs
)
return result
def hp_refinement_cond_number_calculation(
solver,
min_degree=1,
max_degree=4,
numel_xy=(5, 10, 15, 20, 25),
quadrilateral=True,
name="",
**kwargs
):
results_dict = {
"Element": list(),
"Number of Elements": list(),
"Degree": list(),
"Symmetric": list(),
"nnz": list(),
"dofs": list(),
"h": list(),
"Condition Number": list(),
}
element_kind = "Quad" if quadrilateral else "Tri"
pbar = tqdm(range(min_degree, max_degree))
for degree in pbar:
for n in numel_xy:
pbar.set_description(f"Processing {name} - degree = {degree} - N = {n}")
mesh = UnitSquareMesh(n, n, quadrilateral=quadrilateral)
result = solver(mesh, degree=degree)
current_cell_size = mesh.cell_sizes.dat.data_ro.min() if not quadrilateral else 1 / n
results_dict["Element"].append(element_kind)
results_dict["Number of Elements"].append(n * n)
results_dict["Degree"].append(degree)
results_dict["Symmetric"].append(result.is_operator_symmetric)
results_dict["nnz"].append(result.nnz)
results_dict["dofs"].append(result.number_of_dofs)
results_dict["h"].append(current_cell_size)
results_dict["Condition Number"].append(result.condition_number)
os.makedirs("./cond_number_results/results_%s" % name, exist_ok=True)
df_cond_number = pd.DataFrame(data=results_dict)
path_to_save_results = "./cond_number_results/results_%s/cond_numbers.csv" % name
df_cond_number.to_csv(path_to_save_results)
return df_cond_number
# Solver options
solvers_options = {
# "cg": solve_poisson_cg,
# "cgls": solve_poisson_cgls,
# "dgls": solve_poisson_dgls,
# "sdhm": solve_poisson_sdhm,
# "ls": solve_poisson_ls,
# "dls": solve_poisson_dls,
"lsh": solve_poisson_lsh,
# "vms": solve_poisson_vms,
# "dvms": solve_poisson_dvms,
# "mixed_RT": solve_poisson_mixed_RT,
# "hdg": solve_poisson_hdg,
# "cgh": solve_poisson_cgh,
# "ldgc": solve_poisson_ldgc,
# "sipg": solve_poisson_sipg,
}
degree = 1
last_degree = 1
for current_solver in solvers_options:
# Setting the output file name
name = f"{current_solver}"
# Selecting the solver and its kwargs
solver = solvers_options[current_solver]
# Performing the convergence study
hp_refinement_cond_number_calculation(
solver,
min_degree=degree,
max_degree=degree + last_degree,
quadrilateral=True,
name=name
)
# N = 5
# mesh = UnitSquareMesh(N, N, quadrilateral=True)
# result = solve_poisson_lsh(mesh, degree=1)
# print(f'Is symmetric? {result.is_operator_symmetric}')
# print(f'nnz: {result.nnz}')
# print(f'DoFs: {result.number_of_dofs}')
# print(f'Condition Number: {result.condition_number}')
# # Plotting the resulting matrix
# matplotlib.use('TkAgg')
# import copy
# my_cmap = copy.copy(plt.cm.get_cmap("winter"))
# my_cmap.set_bad(color="lightgray")
# # plot_matrix_primal_hybrid_full(result.form, result.bcs, cmap=my_cmap)
# # plot_matrix_mixed_hybrid_full(result.form, result.bcs, cmap=my_cmap)
# plot_matrix_hybrid_multiplier(result.form, trace_index=2, bcs=result.bcs, cmap=my_cmap)
# # plot_matrix(result.assembled_form, cmap=my_cmap)
# # plot_matrix_mixed(result.assembled_form, cmap=my_cmap)
# plt.tight_layout()
# plt.savefig("sparse_pattern.png")
# plt.show() | [((287, 308), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (301, 308), False, 'import matplotlib\n'), ((367, 376), 'attr.ib', 'attr.ib', ([], {}), '()\n', (374, 376), False, 'import attr\n'), ((398, 407), 'attr.ib', 'attr.ib', ([], {}), '()\n', (405, 407), False, 'import attr\n'), ((431, 440), 'attr.ib', 'attr.ib', ([], {}), '()\n', (438, 440), False, 'import attr\n'), ((463, 472), 'attr.ib', 'attr.ib', ([], {}), '()\n', (470, 472), False, 'import attr\n'), ((494, 503), 'attr.ib', 'attr.ib', ([], {}), '()\n', (501, 503), False, 'import attr\n'), ((514, 523), 'attr.ib', 'attr.ib', ([], {}), '()\n', (521, 523), False, 'import attr\n'), ((552, 561), 'attr.ib', 'attr.ib', ([], {}), '()\n', (559, 561), False, 'import attr\n'), ((698, 716), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (710, 716), True, 'import matplotlib.pyplot as plt\n'), ((1061, 1088), 'numpy.delete', 'np.delete', (['Mnp', 'idx'], {'axis': '(1)'}), '(Mnp, idx, axis=1)\n', (1070, 1088), True, 'import numpy as np\n'), ((1098, 1137), 'numpy.ma.masked_values', 'np.ma.masked_values', (['Mnp', '(0)'], {'rtol': '(1e-13)'}), '(Mnp, 0, rtol=1e-13)\n', (1117, 1137), True, 'import numpy as np\n'), ((1443, 1461), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (1455, 1461), True, 'import matplotlib.pyplot as plt\n'), ((1861, 1888), 'numpy.delete', 'np.delete', (['Mnp', 'idx'], {'axis': '(1)'}), '(Mnp, idx, axis=1)\n', (1870, 1888), True, 'import numpy as np\n'), ((1898, 1937), 'numpy.ma.masked_values', 'np.ma.masked_values', (['Mnp', '(0)'], {'rtol': '(1e-13)'}), '(Mnp, 0, rtol=1e-13)\n', (1917, 1937), True, 'import numpy as np\n'), ((2361, 2379), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (2373, 2379), True, 'import matplotlib.pyplot as plt\n'), ((2843, 2870), 'numpy.delete', 'np.delete', (['Mnp', 'idx'], {'axis': '(1)'}), '(Mnp, idx, axis=1)\n', (2852, 2870), True, 'import numpy as np\n'), ((2880, 2919), 'numpy.ma.masked_values', 'np.ma.masked_values', (['Mnp', '(0)'], {'rtol': '(1e-13)'}), '(Mnp, 0, rtol=1e-13)\n', (2899, 2919), True, 'import numpy as np\n'), ((3342, 3360), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (3354, 3360), True, 'import matplotlib.pyplot as plt\n'), ((3878, 3905), 'numpy.delete', 'np.delete', (['Mnp', 'idx'], {'axis': '(1)'}), '(Mnp, idx, axis=1)\n', (3887, 3905), True, 'import numpy as np\n'), ((3915, 3954), 'numpy.ma.masked_values', 'np.ma.masked_values', (['Mnp', '(0)'], {'rtol': '(1e-13)'}), '(Mnp, 0, rtol=1e-13)\n', (3934, 3954), True, 'import numpy as np\n'), ((4541, 4559), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (4553, 4559), True, 'import matplotlib.pyplot as plt\n'), ((5061, 5088), 'numpy.delete', 'np.delete', (['Mnp', 'idx'], {'axis': '(1)'}), '(Mnp, idx, axis=1)\n', (5070, 5088), True, 'import numpy as np\n'), ((5098, 5137), 'numpy.ma.masked_values', 'np.ma.masked_values', (['Mnp', '(0)'], {'rtol': '(1e-13)'}), '(Mnp, 0, rtol=1e-13)\n', (5117, 5137), True, 'import numpy as np\n'), ((44728, 44797), 'os.makedirs', 'os.makedirs', (["('./cond_number_results/results_%s' % name)"], {'exist_ok': '(True)'}), "('./cond_number_results/results_%s' % name, exist_ok=True)\n", (44739, 44797), False, 'import os\n'), ((44819, 44850), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'results_dict'}), '(data=results_dict)\n', (44831, 44850), True, 'import pandas as pd\n'), ((1017, 1051), 'numpy.all', 'np.all', (['(Mnp[(...), :] == 0)'], {'axis': '(0)'}), '(Mnp[(...), :] == 0, axis=0)\n', (1023, 1051), True, 'import numpy as np\n'), ((1817, 1851), 'numpy.all', 'np.all', (['(Mnp[(...), :] == 0)'], {'axis': '(0)'}), '(Mnp[(...), :] == 0, axis=0)\n', (1823, 1851), True, 'import numpy as np\n'), ((2799, 2833), 'numpy.all', 'np.all', (['(Mnp[(...), :] == 0)'], {'axis': '(0)'}), '(Mnp[(...), :] == 0, axis=0)\n', (2805, 2833), True, 'import numpy as np\n'), ((3834, 3868), 'numpy.all', 'np.all', (['(Mnp[(...), :] == 0)'], {'axis': '(0)'}), '(Mnp[(...), :] == 0, axis=0)\n', (3840, 3868), True, 'import numpy as np\n'), ((5017, 5051), 'numpy.all', 'np.all', (['(Mnp[(...), :] == 0)'], {'axis': '(0)'}), '(Mnp[(...), :] == 0, axis=0)\n', (5023, 5051), True, 'import numpy as np\n'), ((6257, 6364), 'scipy.sparse.linalg.svds', 'svds', ([], {'A': 'Mnp', 'k': 'num_of_factors', 'which': '"""LM"""', 'maxiter': '(5000)', 'return_singular_vectors': '(False)', 'solver': '"""lobpcg"""'}), "(A=Mnp, k=num_of_factors, which='LM', maxiter=5000,\n return_singular_vectors=False, solver='lobpcg')\n", (6261, 6364), False, 'from scipy.sparse.linalg import svds\n'), ((6550, 6594), 'scipy.linalg.svd', 'svd', (['M'], {'compute_uv': '(False)', 'check_finite': '(False)'}), '(M, compute_uv=False, check_finite=False)\n', (6553, 6594), False, 'from scipy.linalg import svd\n'), ((6781, 6792), 'slepc4py.SLEPc.SVD', 'SLEPc.SVD', ([], {}), '()\n', (6790, 6792), False, 'from slepc4py import SLEPc\n'), ((7422, 7452), 'numpy.array', 'np.array', (['singular_values_list'], {}), '(singular_values_list)\n', (7430, 7452), True, 'import numpy as np\n')] |
Wlgls/pyDEAP | pydeap/feature_extraction/_time_domain_features.py | b7cec369cedd4a69ea82bc49a2fb8376260e4ad2 | # -*- encoding: utf-8 -*-
'''
@File :_time_domain_features.py
@Time :2021/04/16 20:02:55
@Author :wlgls
@Version :1.0
'''
import numpy as np
def statistics(data, combined=True):
"""Statistical features, include Power, Mean, Std, 1st differece, Normalized 1st difference, 2nd difference, Normalized 2nd difference.
Parameters
----------
data array
data, for DEAP dataset, It's shape may be (n_trials, n_channels, points)
Return
----------
f:
Solved feature, It's shape is similar to the shape of your input data.
e.g. for input.shape is (n_trials, n_channels, points), the f.shape is (n_trials, n_channels, n_features)
Example
----------
In [13]: d.shape, l.shape
Out[13]: ((40, 32, 8064), (40, 1))
In [14]: statistics_feature(d).shape
Out[14]: (40, 32, 7)
"""
# Power
power = np.mean(data**2, axis=-1)
# Mean
ave = np.mean(data, axis=-1)
# Standard Deviation
std = np.std(data, axis=-1)
# the mean of the absolute values of 1st differece mean
diff_1st = np.mean(np.abs(np.diff(data,n=1, axis=-1)), axis=-1)
# the mean of the absolute values of Normalized 1st difference
normal_diff_1st = diff_1st / std
# the mean of the absolute values of 2nd difference mean
diff_2nd = np.mean(np.abs(data[..., 2:] - data[..., :-2]), axis=-1)
# the mean of the absolute values of Normalized 2nd difference
normal_diff_2nd = diff_2nd / std
# Features.append(np.concatenate((Power, Mean, Std, diff_1st, normal_diff_1st, diff_2nd, normal_diff_2nd), axis=2))
f = np.stack((power, ave, std, diff_1st, normal_diff_1st, diff_2nd, normal_diff_2nd), axis=-1)
if combined:
f = f.reshape((*f.shape[:-2]))
return f
def hjorth(data, combined=True):
"""Solving Hjorth features, include activity, mobility, complexity
Parameters
----------
data array
data, for DEAP dataset, It's shape may be (n_trials, n_channels, points)
Return
----------
f:
Solved feature, It's shape is similar to the shape of your input data.
e.g. for input.shape is (n_trials, n_channels, points), the f.shape is (n_trials, n_channels, n_features)
Example
----------
In [15]: d.shape, l.shape
Out[15]: ((40, 32, 8064), (40, 1))
In [16]: hjorth_features(d).shape
Out[16]: (40, 32, 3)
"""
data = np.array(data)
ave = np.mean(data, axis=-1)[..., np.newaxis]
diff_1st = np.diff(data, n=1, axis=-1)
# print(diff_1st.shape)
diff_2nd = data[..., 2:] - data[..., :-2]
# Activity
activity = np.mean((data-ave)**2, axis=-1)
# print(Activity.shape)
# Mobility
varfdiff = np.var(diff_1st, axis=-1)
# print(varfdiff.shape)
mobility = np.sqrt(varfdiff / activity)
# Complexity
varsdiff = np.var(diff_2nd, axis=-1)
complexity = np.sqrt(varsdiff/varfdiff) / mobility
f = np.stack((activity, mobility, complexity), axis=-1)
if combined:
f = f.reshape((*f.shape[:-2]))
return f
def higher_order_crossing(data, k=10, combined=True):
"""Solving the feature of hoc. Hoc is a high order zero crossing quantity.
Parameters
----------
data : array
data, for DEAP dataset, It's shape may be (n_trials, n_channels, points)
k : int, optional
Order, by default 10
Return
----------
nzc:
Solved feature, It's shape is similar to the shape of your input data.
e.g. for input.shape is (n_trials, n_channels, points), the f.shape is (n_trials, n_channels, n_features)
Example
----------
In [4]: d, l = load_deap(path, 0)
In [5]: hoc(d, k=10).shape
Out[5]: (40, 32, 10)
In [6]: hoc(d, k=5).shape
Out[6]: (40, 32, 5)
"""
nzc = []
for i in range(k):
curr_diff = np.diff(data, n=i)
x_t = curr_diff >= 0
x_t = np.diff(x_t)
x_t = np.abs(x_t)
count = np.count_nonzero(x_t, axis=-1)
nzc.append(count)
f = np.stack(nzc, axis=-1)
if combined:
f = f.reshape((*f.shape[:-2]))
return f
def sevcik_fd(data, combined=True):
"""Fractal dimension feature is solved, which is used to describe the shape information of EEG time series data. It seems that this feature can be used to judge the electrooculogram and EEG.The calculation methods include Sevcik, fractal Brownian motion, box counting, Higuchi and so on.
Sevcik method: fast calculation and robust analysis of noise
Higuchi: closer to the theoretical value than box counting
The Sevick method is used here because it is easier to implement
Parameters
----------
Parameters
----------
data array
data, for DEAP dataset, It's shape may be (n_trials, n_channels, points)
Return
----------
f:
Solved feature, It's shape is similar to the shape of your input data.
e.g. for input.shape is (n_trials, n_channels, points), the f.shape is (n_trials, n_channels, n_features)
Example
----------
In [7]: d.shape, l.shape
Out[7]: ((40, 32, 8064), (40, 1))
In [8]: sevcik_fd(d).shape
Out[8]: (40, 32, 1)
"""
points = data.shape[-1]
x = np.arange(1, points+1)
x_ = x / np.max(x)
miny = np.expand_dims(np.min(data, axis=-1), axis=-1)
maxy = np.expand_dims(np.max(data, axis=-1), axis=-1)
y_ = (data-miny) / (maxy-miny)
L = np.expand_dims(np.sum(np.sqrt(np.diff(y_, axis=-1)**2 + np.diff(x_)**2), axis=-1), axis=-1)
f = 1 + np.log(L) / np.log(2 * (points-1))
# print(FD.shape)
if combined:
f = f.reshape((*f.shape[:-2]))
return f
def calc_L(X, k, m):
"""
Return Lm(k) as the length of the curve.
"""
N = X.shape[-1]
n = np.floor((N-m)/k).astype(np.int64)
norm = (N-1) / (n*k)
ss = np.sum(np.abs(np.diff(X[..., m::k], n=1)), axis=-1)
Lm = (ss*norm) / k
return Lm
def calc_L_average(X, k):
"""
Return <L(k)> as the average value over k sets of Lm(k).
"""
calc_L_series = np.frompyfunc(lambda m: calc_L(X, k, m), 1, 1)
L_average = np.average(calc_L_series(np.arange(1, k+1)))
return L_average
def higuchi_fd(data, k_max, combined=True):
"""Fractal dimension feature is solved, which is used to describe the shape information of EEG time series data. It seems that this feature can be used to judge the electrooculogram and EEG.The calculation methods include Sevcik, fractal Brownian motion, box counting, Higuchi and so on.
Sevcik method: fast calculation and robust analysis of noise
Higuchi: closer to the theoretical value than box counting
The higuchi method is used here because it is easier to implement
Parameters
----------
Parameters
----------
data array
data, for DEAP dataset, It's shape may be (n_trials, n_channels, points)
Return
----------
f:
Solved feature, It's shape is similar to the shape of your input data.
e.g. for input.shape is (n_trials, n_channels, points), the f.shape is (n_trials, n_channels, n_features)
Example
----------
In [7]: d.shape, l.shape
Out[7]: ((40, 32, 8064), (40, 1))
In [8]: higuchi_fd(dif combined:
f = f
return ).shape
Out[8]: (40, 32, 1)
"""
calc_L_average_series = np.frompyfunc(lambda k: calc_L_average(data, k), 1, 1)
k = np.arange(1, k_max+1)
L = calc_L_average_series(k)
L = np.stack(L, axis=-1)
fd = np.zeros(data.shape[:-1])
for ind in np.argwhere(L[..., 0]):
tmp = L[ind[0], ind[1], ind[2]]
D, _= np.polyfit(np.log2(k), np.log2(tmp), 1)
fd[ind[0], ind[1if combined:
f = f
return ], ind[2]] = - D
f = np.expand_dims(fd, axis=-1)
if combined:
f = f.reshape((*f.shape[:-2]))
return f
| [] |
kimballh/pymutual | pymutual/__init__.py | 7d7f588099eee7bdd669d613756509c6ab44a911 | from .session import Session, MutualAPI | [] |
Joshua-Barawa/pitches-IP | forms.py | 41d9d0d2fbecab50e82a4ee64a036952b8d785e1 | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField
from wtforms.validators import InputRequired, Email, ValidationError
from models import User
class RegistrationForm(FlaskForm):
email = StringField('Your Email Address', validators=[InputRequired(), Email()])
username = StringField('Enter your username', validators=[InputRequired()])
password = PasswordField('Password', validators=[InputRequired()])
submit = SubmitField('Sign Up')
def validate_username(self, username):
existing_username = User.query.filter_by(username=username.data).first()
if existing_username:
raise ValidationError("The username already exists")
class LoginForm(FlaskForm):
username = StringField("Your email address", validators=[InputRequired()])
password = PasswordField("Your password:", validators=[InputRequired()])
submit = SubmitField("Sign In")
| [((471, 493), 'wtforms.SubmitField', 'SubmitField', (['"""Sign Up"""'], {}), "('Sign Up')\n", (482, 493), False, 'from wtforms import StringField, PasswordField, SubmitField\n'), ((913, 935), 'wtforms.SubmitField', 'SubmitField', (['"""Sign In"""'], {}), "('Sign In')\n", (924, 935), False, 'from wtforms import StringField, PasswordField, SubmitField\n'), ((667, 713), 'wtforms.validators.ValidationError', 'ValidationError', (['"""The username already exists"""'], {}), "('The username already exists')\n", (682, 713), False, 'from wtforms.validators import InputRequired, Email, ValidationError\n'), ((280, 295), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (293, 295), False, 'from wtforms.validators import InputRequired, Email, ValidationError\n'), ((297, 304), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (302, 304), False, 'from wtforms.validators import InputRequired, Email, ValidationError\n'), ((369, 384), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (382, 384), False, 'from wtforms.validators import InputRequired, Email, ValidationError\n'), ((440, 455), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (453, 455), False, 'from wtforms.validators import InputRequired, Email, ValidationError\n'), ((566, 610), 'models.User.query.filter_by', 'User.query.filter_by', ([], {'username': 'username.data'}), '(username=username.data)\n', (586, 610), False, 'from models import User\n'), ((805, 820), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (818, 820), False, 'from wtforms.validators import InputRequired, Email, ValidationError\n'), ((882, 897), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (895, 897), False, 'from wtforms.validators import InputRequired, Email, ValidationError\n')] |
lukaszlaszuk/insightconnect-plugins | plugins/barracuda_waf/komand_barracuda_waf/actions/create_security_policy/schema.py | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | # GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Component:
DESCRIPTION = "Creates a security policy with the default values"
class Input:
NAME = "name"
class Output:
ID = "id"
class CreateSecurityPolicyInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"name": {
"type": "string",
"title": "Name",
"description": "The name of the security policy that needs to be created",
"order": 1
}
},
"required": [
"name"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class CreateSecurityPolicyOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"id": {
"type": "string",
"title": "ID",
"description": "ID of the new policy",
"order": 1
}
},
"required": [
"id"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| [((288, 591), 'json.loads', 'json.loads', (['"""\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "name": {\n "type": "string",\n "title": "Name",\n "description": "The name of the security policy that needs to be created",\n "order": 1\n }\n },\n "required": [\n "name"\n ]\n}\n """'], {}), '(\n """\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "name": {\n "type": "string",\n "title": "Name",\n "description": "The name of the security policy that needs to be created",\n "order": 1\n }\n },\n "required": [\n "name"\n ]\n}\n """\n )\n', (298, 591), False, 'import json\n'), ((729, 990), 'json.loads', 'json.loads', (['"""\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "id": {\n "type": "string",\n "title": "ID",\n "description": "ID of the new policy",\n "order": 1\n }\n },\n "required": [\n "id"\n ]\n}\n """'], {}), '(\n """\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "id": {\n "type": "string",\n "title": "ID",\n "description": "ID of the new policy",\n "order": 1\n }\n },\n "required": [\n "id"\n ]\n}\n """\n )\n', (739, 990), False, 'import json\n')] |
fruttasecca/hay_checker | examples/dhc/rule_example.py | 2bbf4e8e90e0abc590dd74080fb6e4f445056354 | #!/usr/bin/python3
from pyspark.sql import SparkSession
from haychecker.dhc.metrics import rule
spark = SparkSession.builder.appName("rule_example").getOrCreate()
df = spark.read.format("csv").option("header", "true").load("examples/resources/employees.csv")
df.show()
condition1 = {"column": "salary", "operator": "gt", "value": 2100}
conditions = [condition1]
r1 = rule(conditions, df)[0]
print("Rule salary>2100: {}".format(r1))
condition1 = {"column": "salary", "operator": "lt", "value": 2100}
condition2 = {"column": "title", "operator": "eq", "value": "Sales Representative"}
conditions = [condition1, condition2]
task1 = rule(conditions)
condition1 = {"column": "salary", "operator": "lt", "value": 2100}
condition2 = {"column": "city", "operator": "eq", "value": "London"}
conditions = [condition1, condition2]
task2 = rule(conditions)
task3 = task1.add(task2)
result = task3.run(df)
r1 = result[0]["scores"][0]
r2 = result[1]["scores"][0]
print("Rule salary<2100 and title=\"Sales Representative\": {},"
" rule salary<2100 and city=\"London\": {}".format(r1, r2)) | [((636, 652), 'haychecker.dhc.metrics.rule', 'rule', (['conditions'], {}), '(conditions)\n', (640, 652), False, 'from haychecker.dhc.metrics import rule\n'), ((836, 852), 'haychecker.dhc.metrics.rule', 'rule', (['conditions'], {}), '(conditions)\n', (840, 852), False, 'from haychecker.dhc.metrics import rule\n'), ((372, 392), 'haychecker.dhc.metrics.rule', 'rule', (['conditions', 'df'], {}), '(conditions, df)\n', (376, 392), False, 'from haychecker.dhc.metrics import rule\n'), ((106, 150), 'pyspark.sql.SparkSession.builder.appName', 'SparkSession.builder.appName', (['"""rule_example"""'], {}), "('rule_example')\n", (134, 150), False, 'from pyspark.sql import SparkSession\n')] |
uk-gov-mirror/ONSdigital.ras-secure-message | secure_message/common/utilities.py | 741eed651eea47dd1a13c7c93b1b1796584cdf2b | import collections
import logging
import urllib.parse
from structlog import wrap_logger
from secure_message.constants import MESSAGE_BY_ID_ENDPOINT, MESSAGE_LIST_ENDPOINT, MESSAGE_QUERY_LIMIT
from secure_message.services.service_toggles import party, internal_user_service
logger = wrap_logger(logging.getLogger(__name__))
MessageArgs = collections.namedtuple(
'MessageArgs',
'page limit business_id surveys cc label desc ce is_closed my_conversations new_respondent_conversations all_conversation_types unread_conversations')
def get_options(args): # NOQA pylint:disable=too-complex
"""extract options from request , allow label to be set by caller
:param args: contains search arguments. Not all end points support all args
:returns: MessageArgs named tuple containing the args for the search
business_id If set , restricts search to conversations regarding this specific party id
surveys If set allows the count to be restricted by a list of survey_ids
cc If set , allows the count to be restricted by a particular case
ce If set, alows the count to be restricted by a particular collection exercise
is_closed If set to 'true' only counts closed conversations, else only open conversations
my_conversations If set to 'true only counts my conversations.
I.e conversations where the current user id is the to actor id
new_respondent_conversations If set to 'true'only counts conversations where the to actor is set to 'GROUP'
all_conversation_types If set 'true', overrides is_closed, my_conversations and new_respondent_conversations
and returns 4 counts 1 for each of , open , closed, my_conversations and new_respondent_conversations
page If set requests the specific page of information to return
limit If set it sets the maximum number of results to return
desc If present, requests the information in descending order
"""
fields = {'page': 1, 'limit': MESSAGE_QUERY_LIMIT, 'business_id': None, 'surveys': None,
'desc': True, 'cc': None, 'label': None, 'ce': None, 'is_closed': False,
'my_conversations': False, 'new_respondent_conversations': False, 'all_conversation_types': False,
'unread_conversations': False}
for field in ['cc', 'ce', 'business_id', 'label']:
if args.get(field):
fields[field] = str(args.get(field))
fields['surveys'] = args.getlist('survey')
for field in ['limit', 'page']:
if args.get(field):
fields[field] = int(args.get(field))
if args.get('desc') == 'false':
fields['desc'] = False
if args.get('is_closed') == 'true':
fields['is_closed'] = True
if args.get('my_conversations') == 'true':
fields['my_conversations'] = True
if args.get('new_respondent_conversations') == 'true':
fields['new_respondent_conversations'] = True
if args.get('all_conversation_types') == 'true':
fields['all_conversation_types'] = True
if args.get('unread_conversations') == 'true':
fields['unread_conversations'] = True
return MessageArgs(page=fields['page'], limit=fields['limit'], business_id=fields['business_id'],
surveys=fields['surveys'], cc=fields['cc'], label=fields['label'],
desc=fields['desc'], ce=fields['ce'], is_closed=fields['is_closed'],
my_conversations=fields['my_conversations'],
new_respondent_conversations=fields['new_respondent_conversations'],
all_conversation_types=fields['all_conversation_types'],
unread_conversations=fields['unread_conversations'])
def set_conversation_type_args(existing_args, is_closed=False, my_conversations=False, new_conversations=False,
all_types=False, unread_conversations=False):
"""Returns a new set of args based on the existing args which are a named tuple,
but allow the conversation type only to be changed"""
return MessageArgs(page=existing_args.page,
limit=existing_args.limit,
business_id=existing_args.business_id,
surveys=existing_args.surveys,
cc=existing_args.cc,
label=existing_args.label,
desc=existing_args.desc,
ce=existing_args.ce,
is_closed=is_closed,
my_conversations=my_conversations,
new_respondent_conversations=new_conversations,
all_conversation_types=all_types,
unread_conversations=unread_conversations)
def generate_string_query_args(args):
params = {}
for field in args._fields:
if field in ['page']:
continue
value = getattr(args, field)
if value:
params[field] = value
return urllib.parse.urlencode(params)
def process_paginated_list(paginated_list, host_url, user, message_args, endpoint=MESSAGE_LIST_ENDPOINT, body_summary=True):
"""used to change a pagination object to json format with links"""
messages = []
string_query_args = generate_string_query_args(message_args)
for message in paginated_list.items:
msg = message.serialize(user, body_summary=body_summary)
msg['_links'] = {"self": {"href": f"{host_url}{MESSAGE_BY_ID_ENDPOINT}/{msg['msg_id']}"}}
messages.append(msg)
links = {'first': {"href": f"{host_url}{endpoint}"},
'self': {"href": f"{host_url}{endpoint}?{string_query_args}&page={message_args.page}"}}
if paginated_list.has_next:
links['next'] = {
"href": f"{host_url}{endpoint}?{string_query_args}&page={message_args.page + 1}"}
if paginated_list.has_prev:
links['prev'] = {
"href": f"{host_url}{endpoint}?{string_query_args}&page={message_args.page - 1}"}
return messages, links
def add_to_details(messages):
"""Adds a @msg_to key to every message in a list of messages.
Every msg_to uuid is resolved to include details of the user.
If the call for the internal user id fails, an exception will be thrown.
If the external user id cannot be found in the list that we got from the party service. There
won't be a @msg_to value returned in the payload. The API documentation notes that these elements
aren't guaranteed to be provided so we're not breaking the contract by doing this.
Note: Several of these lines of code could be combined into a more succinct view, spreading them out
is deliberate so that log stack traces are better able to identify the cause of log errors
"""
external_user_details = {}
for user in party.get_users_details(get_external_user_uuid_list(messages)):
external_user_details[user['id']] = user
for message in messages:
try:
msg_to = message["msg_to"][0]
from_internal = message["from_internal"]
if not from_internal:
msg_to_details = internal_user_service.get_user_details(msg_to)
message.update({"@msg_to": [msg_to_details]})
else:
msg_to_details = external_user_details.get(msg_to)
if msg_to_details:
message.update({'@msg_to': [msg_to_details]})
else:
logger.info("No details found for the message recipient", msg_to=msg_to)
except IndexError:
logger.exception("Exception adding to details", msg_to=msg_to, from_internal=from_internal)
raise
return messages
def add_from_details(messages):
"""Adds a @msg_from key to every message in a list of messages.
Every msg_to uuid is resolved to include details of the user.
If the call for the internal user id fails, an exception will be thrown.
If the external user id cannot be found in the list that we got from the party service. There
won't be a @msg_from value returned in the payload. The API documentation notes that these elements
aren't guaranteed to be provided so we're not breaking the contract by doing this.
"""
external_user_details = {}
for user in party.get_users_details(get_external_user_uuid_list(messages)):
external_user_details[user['id']] = user
for message in messages:
try:
msg_from = message["msg_from"]
from_internal = message["from_internal"]
if from_internal:
message.update({"@msg_from": internal_user_service.get_user_details(msg_from)})
else:
if external_user_details.get(message['msg_from']):
message.update({'@msg_from': external_user_details.get(msg_from)})
except IndexError:
logger.exception("Exception adding from details message", msg_from=msg_from, from_internal=from_internal)
raise
return messages
def get_external_user_uuid_list(messages):
"""Compiles a list of all unique the external user (respondent) uuids from a list of messages"""
external_user_uuids = set()
external_msgs = [message for message in messages if message['from_internal'] is False]
for message in external_msgs:
external_user_uuids.add(message["msg_from"])
internal_messages = [message for message in messages if message['from_internal'] is True]
for uuid in internal_messages:
external_user_uuids.add(uuid["msg_to"][0])
return external_user_uuids
def add_business_details(messages):
"""Adds a @business_details key to every message in a list of messages."""
business_ids = set()
for message in messages:
business_ids.add(message['business_id'])
business_details = party.get_business_details(business_ids)
for message in messages:
message['@business_details'] = next((business for business in business_details if business["id"] == message['business_id']), None)
return messages
def add_users_and_business_details(messages):
"""Add both user and business details to messages based on data from party service"""
if not messages:
raise ValueError('messages is a required parameter and must not be empty')
messages = add_to_details(messages)
messages = add_from_details(messages)
logger.info("Successfully added to and from details")
messages = add_business_details(messages)
logger.info("Successfully added business details")
return messages
| [((340, 537), 'collections.namedtuple', 'collections.namedtuple', (['"""MessageArgs"""', '"""page limit business_id surveys cc label desc ce is_closed my_conversations new_respondent_conversations all_conversation_types unread_conversations"""'], {}), "('MessageArgs',\n 'page limit business_id surveys cc label desc ce is_closed my_conversations new_respondent_conversations all_conversation_types unread_conversations'\n )\n", (362, 537), False, 'import collections\n'), ((297, 324), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (314, 324), False, 'import logging\n'), ((9846, 9886), 'secure_message.services.service_toggles.party.get_business_details', 'party.get_business_details', (['business_ids'], {}), '(business_ids)\n', (9872, 9886), False, 'from secure_message.services.service_toggles import party, internal_user_service\n'), ((7125, 7171), 'secure_message.services.service_toggles.internal_user_service.get_user_details', 'internal_user_service.get_user_details', (['msg_to'], {}), '(msg_to)\n', (7163, 7171), False, 'from secure_message.services.service_toggles import party, internal_user_service\n'), ((8625, 8673), 'secure_message.services.service_toggles.internal_user_service.get_user_details', 'internal_user_service.get_user_details', (['msg_from'], {}), '(msg_from)\n', (8663, 8673), False, 'from secure_message.services.service_toggles import party, internal_user_service\n')] |
notechats/notegame | notegame/games/nonogram/core/renderer.py | 3d9538b98cb6b0b240956b1271e028b22458fc54 | # -*- coding: utf-8 -*-
"""
Defines various renderers for the game of nonogram
"""
from abc import ABC
from sys import stdout
from notetool.tool.log import logger
from six import integer_types, itervalues, text_type
from ..utils.iter import max_safe, pad
from ..utils.other import two_powers
from .common import BOX, SPACE, UNKNOWN, BlottedBlock, is_list_like
class Cell(object):
"""Represent basic rendered cell"""
DEFAULT_ICON = ' '
def __init__(self, icon=None):
self.icon = icon or self.DEFAULT_ICON
def ascii_icon(self):
"""How the cell can be printed as a text"""
return self.DEFAULT_ICON
def __repr__(self):
return '{}()'.format(self.__class__.__name__)
class ThumbnailCell(Cell):
"""
Represent upper-left cell
(where the thumbnail of the puzzle usually drawn).
"""
DEFAULT_ICON = '#'
class ClueCell(Cell):
"""
Represent cell that is part of description (clue).
They are usually drawn on the top and on the left.
"""
BLOTTED_SYMBOL = '?'
def __init__(self, value):
super(ClueCell, self).__init__()
if is_list_like(value):
self.value, self.color = value
else:
self.value, self.color = value, None
def ascii_icon(self):
"""
Gets a symbolic representation of a cell given its state
and predefined table `icons`
"""
if isinstance(self.value, integer_types):
return text_type(self.value)
if self.value == BlottedBlock:
return self.BLOTTED_SYMBOL
return self.DEFAULT_ICON
def __repr__(self):
return '{}(({}, {}))'.format(
self.__class__.__name__,
self.value, self.color)
class GridCell(Cell):
"""Represent the main area cell"""
def __init__(self, value, renderer, colored=False):
super(GridCell, self).__init__()
self.renderer = renderer
self.colored = colored
if self.colored:
self.value = tuple(two_powers(value))
else:
self.value = value
def ascii_icon(self):
value = self.value
icons = self.renderer.icons
if not self.colored:
return icons[self.value]
if len(value) == 1:
value = value[0]
else:
# multiple colors
value = UNKNOWN
symbol = self.renderer.board.symbol_for_color_id(value)
if symbol is not None:
return symbol
return icons.get(value, self.DEFAULT_ICON)
def __repr__(self):
return '{}({})'.format(
self.__class__.__name__, self.value)
class _DummyBoard(object):
"""
Stub for renderer initialization
when it created before the corresponding board
"""
rows_descriptions = columns_descriptions = ()
width = height = 0
class Renderer(object):
"""Defines the abstract renderer for a nonogram board"""
def __init__(self, board=None):
self.cells = None
self.board = None
self.board_init(board)
def board_init(self, board=None):
"""Initialize renderer's properties dependent on board it draws"""
if board:
logger.info('Init %r renderer with board %r',
self.__class__.__name__, board)
else:
if self.board:
return # already initialized, do nothing
board = _DummyBoard()
self.board = board
@property
def full_height(self):
"""The full visual height of a board"""
return self.header_height + self.board.height
@property
def full_width(self):
"""The full visual width of a board"""
return self.side_width + self.board.width
@property
def header_height(self):
"""The size of the header block with columns descriptions"""
return max_safe(map(len, self.board.columns_descriptions), default=0)
@property
def side_width(self):
"""The width of the side block with rows descriptions"""
return max_safe(map(len, self.board.rows_descriptions), default=0)
def render(self):
"""Actually print out the board"""
raise NotImplementedError()
def draw(self, cells=None):
"""Calculate all the cells and draw an image of the board"""
self.draw_header()
self.draw_side()
self.draw_grid(cells=cells)
self.render()
def draw_header(self):
"""
Changes the internal state to be able to draw columns descriptions
"""
raise NotImplementedError()
def draw_side(self):
"""
Changes the internal state to be able to draw rows descriptions
"""
raise NotImplementedError()
def draw_grid(self, cells=None):
"""
Changes the internal state to be able to draw a main grid
"""
raise NotImplementedError()
@property
def is_colored(self):
"""Whether the linked board is colored board"""
return self.board.is_colored
class StreamRenderer(Renderer, ABC):
"""
Simplify textual rendering of a board to a stream (stdout by default)
"""
DEFAULT_ICONS = {
UNKNOWN: '_',
BOX: 'X',
SPACE: '.',
}
def __init__(self, board=None, stream=stdout, icons=None):
self.stream = stream
if icons is None:
icons = dict(self.DEFAULT_ICONS)
self.icons = icons
super(StreamRenderer, self).__init__(board)
def _print(self, *args):
return print(*args, file=self.stream)
class BaseAsciiRenderer(StreamRenderer):
"""
Renders a board as a simple text table (without grid)
"""
__rend_name__ = 'text'
def board_init(self, board=None):
super(BaseAsciiRenderer, self).board_init(board)
logger.info('init cells: %sx%s', self.full_width, self.full_width)
self.cells = [[Cell()] * self.full_width
for _ in range(self.full_height)]
def cell_icon(self, cell):
"""
Get a symbolic representation of a cell given its state
and predefined table `icons`
"""
return cell.ascii_icon()
def render(self):
for row in self.cells:
res = []
for index, cell in enumerate(row):
ico = self.cell_icon(cell)
# do not pad the last symbol in a line
if len(ico) == 1:
if index < len(row) - 1:
ico += ' '
res.append(ico)
self._print(''.join(res))
def draw_header(self):
for i in range(self.header_height):
for j in range(self.side_width):
self.cells[i][j] = ThumbnailCell()
for j, col in enumerate(self.board.columns_descriptions):
rend_j = j + self.side_width
if not col:
col = [0]
rend_column = [ClueCell(val) for val in col]
rend_column = pad(rend_column, self.header_height, Cell())
# self.cells[:self.header_height, rend_j] = rend_column
for i, cell in enumerate(rend_column):
self.cells[i][rend_j] = cell
def draw_side(self):
for i, row in enumerate(self.board.rows_descriptions):
rend_i = i + self.header_height
# row = list(row)
if not row:
row = [0]
rend_row = [ClueCell(val) for val in row]
rend_row = pad(rend_row, self.side_width, Cell())
self.cells[rend_i][:self.side_width] = rend_row
def draw_grid(self, cells=None):
if cells is None:
cells = self.board.cells
is_colored = self.is_colored
for i, row in enumerate(cells):
rend_i = i + self.header_height
for j, val in enumerate(row):
rend_j = j + self.side_width
self.cells[rend_i][rend_j] = GridCell(
val, self, colored=is_colored)
def _register_renderers():
res = dict()
for obj in itervalues(globals()):
if isinstance(obj, type):
if issubclass(obj, StreamRenderer) and hasattr(obj, '__rend_name__'):
res[obj.__rend_name__] = obj
return res
RENDERERS = _register_renderers()
| [((5860, 5926), 'notetool.tool.log.logger.info', 'logger.info', (['"""init cells: %sx%s"""', 'self.full_width', 'self.full_width'], {}), "('init cells: %sx%s', self.full_width, self.full_width)\n", (5871, 5926), False, 'from notetool.tool.log import logger\n'), ((1484, 1505), 'six.text_type', 'text_type', (['self.value'], {}), '(self.value)\n', (1493, 1505), False, 'from six import integer_types, itervalues, text_type\n'), ((3225, 3302), 'notetool.tool.log.logger.info', 'logger.info', (['"""Init %r renderer with board %r"""', 'self.__class__.__name__', 'board'], {}), "('Init %r renderer with board %r', self.__class__.__name__, board)\n", (3236, 3302), False, 'from notetool.tool.log import logger\n')] |
Carreau/sympy | sympy/printing/lambdarepr.py | 168de33bb177936fa9517702b2c5a777b3989672 | from __future__ import print_function, division
from .str import StrPrinter
from sympy.utilities import default_sort_key
class LambdaPrinter(StrPrinter):
"""
This printer converts expressions into strings that can be used by
lambdify.
"""
def _print_MatrixBase(self, expr):
return "%s(%s)" % (expr.__class__.__name__,
self._print((expr.tolist())))
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
_print_MatrixBase
def _print_Piecewise(self, expr):
result = []
i = 0
for arg in expr.args:
e = arg.expr
c = arg.cond
result.append('((')
result.append(self._print(e))
result.append(') if (')
result.append(self._print(c))
result.append(') else (')
i += 1
result = result[:-1]
result.append(') else None)')
result.append(')'*(2*i - 2))
return ''.join(result)
def _print_Sum(self, expr):
loops = (
'for {i} in range({a}, {b}+1)'.format(
i=self._print(i),
a=self._print(a),
b=self._print(b))
for i, a, b in expr.limits)
return '(builtins.sum({function} {loops}))'.format(
function=self._print(expr.function),
loops=' '.join(loops))
def _print_And(self, expr):
result = ['(']
for arg in sorted(expr.args, key=default_sort_key):
result.extend(['(', self._print(arg), ')'])
result.append(' and ')
result = result[:-1]
result.append(')')
return ''.join(result)
def _print_Or(self, expr):
result = ['(']
for arg in sorted(expr.args, key=default_sort_key):
result.extend(['(', self._print(arg), ')'])
result.append(' or ')
result = result[:-1]
result.append(')')
return ''.join(result)
def _print_Not(self, expr):
result = ['(', 'not (', self._print(expr.args[0]), '))']
return ''.join(result)
def _print_BooleanTrue(self, expr):
return "True"
def _print_BooleanFalse(self, expr):
return "False"
def _print_ITE(self, expr):
result = [
'((', self._print(expr.args[1]),
') if (', self._print(expr.args[0]),
') else (', self._print(expr.args[2]), '))'
]
return ''.join(result)
class NumPyPrinter(LambdaPrinter):
"""
Numpy printer which handles vectorized piecewise functions,
logical operators, etc.
"""
_default_settings = {
"order": "none",
"full_prec": "auto",
}
def _print_seq(self, seq, delimiter=', '):
"General sequence printer: converts to tuple"
# Print tuples here instead of lists because numba supports
# tuples in nopython mode.
return '({},)'.format(delimiter.join(self._print(item) for item in seq))
def _print_MatMul(self, expr):
"Matrix multiplication printer"
return '({0})'.format(').dot('.join(self._print(i) for i in expr.args))
def _print_DotProduct(self, expr):
# DotProduct allows any shape order, but numpy.dot does matrix
# multiplication, so we have to make sure it gets 1 x n by n x 1.
arg1, arg2 = expr.args
if arg1.shape[0] != 1:
arg1 = arg1.T
if arg2.shape[1] != 1:
arg2 = arg2.T
return "dot(%s, %s)" % (self._print(arg1), self._print(arg2))
def _print_Piecewise(self, expr):
"Piecewise function printer"
exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args))
conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args))
# If [default_value, True] is a (expr, cond) sequence in a Piecewise object
# it will behave the same as passing the 'default' kwarg to select()
# *as long as* it is the last element in expr.args.
# If this is not the case, it may be triggered prematurely.
return 'select({0}, {1}, default=nan)'.format(conds, exprs)
def _print_Relational(self, expr):
"Relational printer for Equality and Unequality"
op = {
'==' :'equal',
'!=' :'not_equal',
'<' :'less',
'<=' :'less_equal',
'>' :'greater',
'>=' :'greater_equal',
}
if expr.rel_op in op:
lhs = self._print(expr.lhs)
rhs = self._print(expr.rhs)
return '{op}({lhs}, {rhs})'.format(op=op[expr.rel_op],
lhs=lhs,
rhs=rhs)
return super(NumPyPrinter, self)._print_Relational(expr)
def _print_And(self, expr):
"Logical And printer"
# We have to override LambdaPrinter because it uses Python 'and' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_and' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format('logical_and', ','.join(self._print(i) for i in expr.args))
def _print_Or(self, expr):
"Logical Or printer"
# We have to override LambdaPrinter because it uses Python 'or' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_or' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format('logical_or', ','.join(self._print(i) for i in expr.args))
def _print_Not(self, expr):
"Logical Not printer"
# We have to override LambdaPrinter because it uses Python 'not' keyword.
# If LambdaPrinter didn't define it, we would still have to define our
# own because StrPrinter doesn't define it.
return '{0}({1})'.format('logical_not', ','.join(self._print(i) for i in expr.args))
def _print_Min(self, expr):
return '{0}(({1}))'.format('amin', ','.join(self._print(i) for i in expr.args))
def _print_Max(self, expr):
return '{0}(({1}))'.format('amax', ','.join(self._print(i) for i in expr.args))
# numexpr works by altering the string passed to numexpr.evaluate
# rather than by populating a namespace. Thus a special printer...
class NumExprPrinter(LambdaPrinter):
# key, value pairs correspond to sympy name and numexpr name
# functions not appearing in this dict will raise a TypeError
_numexpr_functions = {
'sin' : 'sin',
'cos' : 'cos',
'tan' : 'tan',
'asin': 'arcsin',
'acos': 'arccos',
'atan': 'arctan',
'atan2' : 'arctan2',
'sinh' : 'sinh',
'cosh' : 'cosh',
'tanh' : 'tanh',
'asinh': 'arcsinh',
'acosh': 'arccosh',
'atanh': 'arctanh',
'ln' : 'log',
'log': 'log',
'exp': 'exp',
'sqrt' : 'sqrt',
'Abs' : 'abs',
'conjugate' : 'conj',
'im' : 'imag',
're' : 'real',
'where' : 'where',
'complex' : 'complex',
'contains' : 'contains',
}
def _print_ImaginaryUnit(self, expr):
return '1j'
def _print_seq(self, seq, delimiter=', '):
# simplified _print_seq taken from pretty.py
s = [self._print(item) for item in seq]
if s:
return delimiter.join(s)
else:
return ""
def _print_Function(self, e):
func_name = e.func.__name__
nstr = self._numexpr_functions.get(func_name, None)
if nstr is None:
# check for implemented_function
if hasattr(e, '_imp_'):
return "(%s)" % self._print(e._imp_(*e.args))
else:
raise TypeError("numexpr does not support function '%s'" %
func_name)
return "%s(%s)" % (nstr, self._print_seq(e.args))
def blacklisted(self, expr):
raise TypeError("numexpr cannot be used with %s" %
expr.__class__.__name__)
# blacklist all Matrix printing
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
blacklisted
# blacklist some python expressions
_print_list = \
_print_tuple = \
_print_Tuple = \
_print_dict = \
_print_Dict = \
blacklisted
def doprint(self, expr):
lstr = super(NumExprPrinter, self).doprint(expr)
return "evaluate('%s', truediv=True)" % lstr
def lambdarepr(expr, **settings):
"""
Returns a string usable for lambdifying.
"""
return LambdaPrinter(settings).doprint(expr)
| [] |
fredmell/CS229Project | python/fill_na_v2.py | b214127485ddc587b9fe3be253937ba8378f9db7 | """
Fill na with most common of the whole column
"""
import numpy as np
import pandas as pd
import time
import matplotlib.pyplot as plt
from datetime import datetime
import re
from collections import Counter
from statistics import median
from tqdm import tqdm
def find_most_common_value(element_list):
for element in element_list:
if not pd.isna(element):
break
if pd.isna(element):
return np.nan
elif isinstance(element, np.double):
array = np.array(element_list)
array = array[~np.isnan(array)]
if len(array) == 0:
return np.nan
else:
array = array.astype(np.int)
return np.double(np.bincount(array).argmax())
elif isinstance(element, str):
count = Counter(df[col])
try:
del count[np.nan]
except ValueError:
pass
if count == dict():
return np.nan
else:
return count.most_common(1)[0][0]
file = '/home/nicolasbievre/yelp_data.pkl'
file_na = '/home/nicolasbievre/yelp_data_no_na.pkl'
df = pd.read_pickle(file)
categories = list(set(df['categories'].values))
n = len(categories)
for i in tqdm(range(len(df.columns))):
col = df.columns[i]
if not col in {'review_id': 0, 'business_id': 0, 'user_id': 0, 'postal_code': 0}:
df_col = df[col].values
na = sum(pd.isna(df_col))
if na > 0:
most_commom_term = find_most_common_value(df_col)
if not pd.isna(most_commom_term):
df.loc[(pd.isna(df_col)), col] = most_commom_term
if i % 35 == 0 and i > 0:
df.to_pickle(file_na)
df.to_pickle(file_na)
| [((1100, 1120), 'pandas.read_pickle', 'pd.read_pickle', (['file'], {}), '(file)\n', (1114, 1120), True, 'import pandas as pd\n'), ((398, 414), 'pandas.isna', 'pd.isna', (['element'], {}), '(element)\n', (405, 414), True, 'import pandas as pd\n'), ((354, 370), 'pandas.isna', 'pd.isna', (['element'], {}), '(element)\n', (361, 370), True, 'import pandas as pd\n'), ((496, 518), 'numpy.array', 'np.array', (['element_list'], {}), '(element_list)\n', (504, 518), True, 'import numpy as np\n'), ((1392, 1407), 'pandas.isna', 'pd.isna', (['df_col'], {}), '(df_col)\n', (1399, 1407), True, 'import pandas as pd\n'), ((778, 794), 'collections.Counter', 'Counter', (['df[col]'], {}), '(df[col])\n', (785, 794), False, 'from collections import Counter\n'), ((1510, 1535), 'pandas.isna', 'pd.isna', (['most_commom_term'], {}), '(most_commom_term)\n', (1517, 1535), True, 'import pandas as pd\n'), ((542, 557), 'numpy.isnan', 'np.isnan', (['array'], {}), '(array)\n', (550, 557), True, 'import numpy as np\n'), ((697, 715), 'numpy.bincount', 'np.bincount', (['array'], {}), '(array)\n', (708, 715), True, 'import numpy as np\n'), ((1565, 1580), 'pandas.isna', 'pd.isna', (['df_col'], {}), '(df_col)\n', (1572, 1580), True, 'import pandas as pd\n')] |
jaiswalIT02/pythonprograms | GUI Applications/calc.py | bc94e52121202b04c3e9112d9786f93ed6707f7a | from tkinter import Tk
from tkinter import Entry
from tkinter import Button
from tkinter import StringVar
t=Tk()
t.title("Tarun Jaiswal")
t.geometry("425x300")
t.resizable(0,0)
t.configure(background="black")#back ground color
a=StringVar()
def show(c):
a.set(a.get()+c)
def equal():
x=a.get()
a.set(eval(x))
def clear():
a.set("")
e1=Entry(font=("",30),justify="right",textvariable=a)
e1.place(x=0,y=0,width=425,height=50)
b1=Button(text="7",font=("",25),bg="gray",fg="white",activebackground="yellow",command=show)
b1.place(x=5,y=55,width=100,height=50)
b1.configure(command=lambda:show("7"))
b2=Button(text="8",font=("",25),bg="gray",fg="white",activebackground="yellow")
b2.place(x=110,y=55,width=100,height=50)
b2.configure(command=lambda:show("8"))
b3=Button(text="9",font=("",25),bg="gray",fg="white",activebackground="yellow")
b3.place(x=215,y=55,width=100,height=50)
b3.configure(command=lambda:show("9"))
b4=Button(text="+",font=("",25),bg="gray",fg="white",activebackground="yellow")
b4.place(x=320,y=55,width=100,height=50)
b4.configure(command=lambda:show("+"))
b5=Button(text="4",font=("",25),bg="gray",fg="white",activebackground="yellow")
b5.place(x=5,y=110,width=100,height=50)
b5.configure(command=lambda:show("4"))
b6=Button(text="5",font=("",25),bg="gray",fg="white",activebackground="yellow")
b6.place(x=110,y=110,width=100,height=50)
b6.configure(command=lambda:show("5"))
b7=Button(text="6",font=("",25),bg="gray",fg="white",activebackground="yellow")
b7.place(x=215,y=110,width=100,height=50)
b7.configure(command=lambda:show("6"))
b8=Button(text="-",font=("",25),bg="gray",fg="white",activebackground="yellow")
b8.place(x=320,y=110,width=100,height=50)
b8.configure(command=lambda:show("-"))
b9=Button(text="1",font=("",25),bg="gray",fg="white",activebackground="yellow")
b9.place(x=5,y=165,width=100,height=50)
b9.configure(command=lambda:show("1"))
b10=Button(text="2",font=("",25),bg="gray",fg="white",activebackground="yellow")
b10.place(x=110,y=165,width=100,height=50)
b10.configure(command=lambda:show("2"))
b11=Button(text="3",font=("",25),bg="gray",fg="white",activebackground="yellow")
b11.place(x=215,y=165,width=100,height=50)
b11.configure(command=lambda:show("3"))
b12=Button(text="*",font=("",25),bg="gray",fg="white",activebackground="yellow")
b12.place(x=320,y=165,width=100,height=50)
b12.configure(command=lambda:show("*"))
b13=Button(text="C",font=("",25),bg="gray",fg="white",activebackground="yellow")
b13.place(x=5,y=220,width=100,height=50)
b13.configure(command=clear)
b14=Button(text="0",font=("",25),bg="gray",fg="white",activebackground="yellow")
b14.place(x=110,y=220,width=100,height=50)
b14.configure(command=lambda:show("0"))
b15=Button(text="=",font=("",25),bg="gray",fg="white",activebackground="yellow",command=equal)
b15.place(x=215,y=220,width=100,height=50)
b15.configure(command=equal)
b16=Button(text="/",font=("",25),bg="gray",fg="white",activebackground="yellow")
b16.place(x=320,y=220,width=100,height=50)
b16.configure(command=lambda:show("/"))
t.mainloop() | [((109, 113), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (111, 113), False, 'from tkinter import Tk\n'), ((232, 243), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (241, 243), False, 'from tkinter import StringVar\n'), ((363, 416), 'tkinter.Entry', 'Entry', ([], {'font': "('', 30)", 'justify': '"""right"""', 'textvariable': 'a'}), "(font=('', 30), justify='right', textvariable=a)\n", (368, 416), False, 'from tkinter import Entry\n'), ((456, 556), 'tkinter.Button', 'Button', ([], {'text': '"""7"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""', 'command': 'show'}), "(text='7', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow', command=show)\n", (462, 556), False, 'from tkinter import Button\n'), ((628, 714), 'tkinter.Button', 'Button', ([], {'text': '"""8"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='8', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (634, 714), False, 'from tkinter import Button\n'), ((789, 875), 'tkinter.Button', 'Button', ([], {'text': '"""9"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='9', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (795, 875), False, 'from tkinter import Button\n'), ((950, 1036), 'tkinter.Button', 'Button', ([], {'text': '"""+"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='+', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (956, 1036), False, 'from tkinter import Button\n'), ((1111, 1197), 'tkinter.Button', 'Button', ([], {'text': '"""4"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='4', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (1117, 1197), False, 'from tkinter import Button\n'), ((1271, 1357), 'tkinter.Button', 'Button', ([], {'text': '"""5"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='5', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (1277, 1357), False, 'from tkinter import Button\n'), ((1433, 1519), 'tkinter.Button', 'Button', ([], {'text': '"""6"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='6', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (1439, 1519), False, 'from tkinter import Button\n'), ((1595, 1681), 'tkinter.Button', 'Button', ([], {'text': '"""-"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='-', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (1601, 1681), False, 'from tkinter import Button\n'), ((1757, 1843), 'tkinter.Button', 'Button', ([], {'text': '"""1"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='1', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (1763, 1843), False, 'from tkinter import Button\n'), ((1918, 2004), 'tkinter.Button', 'Button', ([], {'text': '"""2"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='2', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (1924, 2004), False, 'from tkinter import Button\n'), ((2083, 2169), 'tkinter.Button', 'Button', ([], {'text': '"""3"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='3', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (2089, 2169), False, 'from tkinter import Button\n'), ((2248, 2334), 'tkinter.Button', 'Button', ([], {'text': '"""*"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='*', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (2254, 2334), False, 'from tkinter import Button\n'), ((2413, 2499), 'tkinter.Button', 'Button', ([], {'text': '"""C"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='C', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (2419, 2499), False, 'from tkinter import Button\n'), ((2565, 2651), 'tkinter.Button', 'Button', ([], {'text': '"""0"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='0', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (2571, 2651), False, 'from tkinter import Button\n'), ((2730, 2831), 'tkinter.Button', 'Button', ([], {'text': '"""="""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""', 'command': 'equal'}), "(text='=', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow', command=equal)\n", (2736, 2831), False, 'from tkinter import Button\n'), ((2898, 2984), 'tkinter.Button', 'Button', ([], {'text': '"""/"""', 'font': "('', 25)", 'bg': '"""gray"""', 'fg': '"""white"""', 'activebackground': '"""yellow"""'}), "(text='/', font=('', 25), bg='gray', fg='white', activebackground=\n 'yellow')\n", (2904, 2984), False, 'from tkinter import Button\n')] |
uktrade/great-cms | core/models.py | f13fa335ddcb925bc33a5fa096fe73ef7bdd351a | import hashlib
import mimetypes
from urllib.parse import unquote
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.http import HttpResponseRedirect
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from django_extensions.db.fields import CreationDateTimeField, ModificationDateTimeField
from great_components.mixins import GA360Mixin
from modelcluster.contrib.taggit import ClusterTaggableManager
from modelcluster.models import ClusterableModel, ParentalKey
from taggit.managers import TaggableManager
from taggit.models import ItemBase, TagBase, TaggedItemBase
from wagtail.admin.edit_handlers import (
FieldPanel,
InlinePanel,
MultiFieldPanel,
ObjectList,
PageChooserPanel,
StreamFieldPanel,
TabbedInterface,
)
from wagtail.contrib.redirects.models import Redirect
from wagtail.contrib.settings.models import BaseSetting, register_setting
from wagtail.core import blocks
from wagtail.core.blocks.stream_block import StreamBlockValidationError
from wagtail.core.fields import RichTextField, StreamField
from wagtail.core.models import Orderable, Page
from wagtail.images import get_image_model_string
from wagtail.images.edit_handlers import ImageChooserPanel
from wagtail.images.models import AbstractImage, AbstractRendition, Image
from wagtail.snippets.models import register_snippet
from wagtail.utils.decorators import cached_classmethod
from wagtailmedia.models import Media
from core import blocks as core_blocks, mixins
from core.case_study_index import delete_cs_index, update_cs_index
from core.constants import BACKLINK_QUERYSTRING_NAME, RICHTEXT_FEATURES__MINIMAL
from core.context import get_context_provider
from core.utils import PageTopicHelper, get_first_lesson
from exportplan.core.data import (
SECTION_SLUGS as EXPORTPLAN_SLUGS,
SECTIONS as EXPORTPLAN_URL_MAP,
)
# If we make a Redirect appear as a Snippet, we can sync it via Wagtail-Transfer
register_snippet(Redirect)
class GreatMedia(Media):
transcript = models.TextField(
verbose_name=_('Transcript'), blank=False, null=True # left null because was an existing field
)
subtitles_en = models.TextField(
verbose_name=_('English subtitles'),
null=True,
blank=True,
help_text='English-language subtitles for this video, in VTT format',
)
admin_form_fields = Media.admin_form_fields + (
'transcript',
'subtitles_en',
)
@property
def sources(self):
return [
{
'src': self.url,
'type': mimetypes.guess_type(self.filename)[0] or 'application/octet-stream',
'transcript': self.transcript,
}
]
@property
def subtitles(self):
output = []
# TO COME: support for more than just English
if self.subtitles_en:
output.append(
{
'srclang': 'en',
'label': 'English',
'url': reverse('core:subtitles-serve', args=[self.id, 'en']),
'default': False,
},
)
return output
class AbstractObjectHash(models.Model):
class Meta:
abstract = True
content_hash = models.CharField(max_length=1000)
@staticmethod
def generate_content_hash(field_file):
filehash = hashlib.md5()
field_file.open()
filehash.update(field_file.read())
field_file.close()
return filehash.hexdigest()
class DocumentHash(AbstractObjectHash):
document = models.ForeignKey(
'wagtaildocs.Document', null=True, blank=True, on_delete=models.CASCADE, related_name='+'
)
class ImageHash(AbstractObjectHash):
image = models.ForeignKey('wagtailimages.Image', null=True, blank=True, on_delete=models.CASCADE, related_name='+')
class AltTextImage(AbstractImage):
alt_text = models.CharField(max_length=255, blank=True)
admin_form_fields = Image.admin_form_fields + ('alt_text',)
class Rendition(AbstractRendition):
image = models.ForeignKey(AltTextImage, on_delete=models.CASCADE, related_name='renditions')
class Meta:
unique_together = ('image', 'filter_spec', 'focal_point_key')
@property
def alt(self):
return self.image.alt_text
@register_snippet
class Tour(ClusterableModel):
page = models.OneToOneField('wagtailcore.Page', on_delete=models.CASCADE, related_name='tour')
title = models.CharField(max_length=255)
body = models.CharField(max_length=255)
button_text = models.CharField(max_length=255)
panels = [
PageChooserPanel('page'),
FieldPanel('title'),
FieldPanel('body'),
FieldPanel('button_text'),
MultiFieldPanel([InlinePanel('steps')], heading='Steps'),
]
def __str__(self):
return self.page.title
class TourStep(Orderable):
title = models.CharField(max_length=255)
body = models.CharField(max_length=255)
position = models.CharField(max_length=255)
selector = models.CharField(max_length=255)
tour = ParentalKey(Tour, on_delete=models.CASCADE, related_name='steps')
panels = [
FieldPanel('title'),
FieldPanel('body'),
FieldPanel('position'),
FieldPanel('selector'),
]
@register_snippet
class Product(models.Model):
name = models.CharField(max_length=255)
panels = [
FieldPanel('name'),
]
def __str__(self):
return self.name
@register_snippet
class Region(models.Model):
name = models.CharField(max_length=100, unique=True)
panels = [FieldPanel('name')]
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@register_snippet
class Country(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=100, unique=True)
region = models.ForeignKey(Region, null=True, blank=True, on_delete=models.SET_NULL)
panels = [
FieldPanel('name'),
FieldPanel('region'),
]
class Meta:
verbose_name_plural = 'Countries'
ordering = ('name',)
def save(self, *args, **kwargs):
# Automatically set slug on save, if not already set
if not self.slug:
self.slug = slugify(self.name)
super().save(*args, **kwargs)
def __str__(self):
return self.name
@register_snippet
class Tag(models.Model):
name = models.CharField(max_length=100, unique=True)
panels = [FieldPanel('name')]
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@register_snippet
class IndustryTag(models.Model):
name = models.CharField(max_length=100, unique=True)
icon = models.ForeignKey(
AltTextImage,
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+',
)
panels = [FieldPanel('name'), ImageChooserPanel('icon')]
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class TimeStampedModel(models.Model):
"""Modified version of django_extensions.db.models.TimeStampedModel
Unfortunately, because null=True needed to be added to create and
modified fields, inheritance causes issues with field clash.
"""
created = CreationDateTimeField('created', null=True)
modified = ModificationDateTimeField('modified', null=True)
def save(self, **kwargs):
self.update_modified = kwargs.pop('update_modified', getattr(self, 'update_modified', True))
super().save(**kwargs)
class Meta:
get_latest_by = 'modified'
ordering = (
'-modified',
'-created',
)
abstract = True
# Content models
class CMSGenericPage(
mixins.EnableTourMixin,
mixins.AuthenticatedUserRequired,
mixins.WagtailGA360Mixin,
GA360Mixin,
Page,
):
"""
Generic page, freely inspired by Codered page
"""
class Meta:
abstract = True
# Do not allow this page type to be created in wagtail admin
is_creatable = False
template_choices = []
###############
# Layout fields
###############
template = models.CharField(
max_length=255,
choices=None,
)
#########
# Panels
##########
layout_panels = [FieldPanel('template')]
settings_panels = [FieldPanel('slug')] + Page.settings_panels
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
field = self._meta.get_field('template')
field.choices = self.template_choices
field.required = True
@cached_classmethod
def get_edit_handler(cls): # NOQA N805
panels = [
ObjectList(cls.content_panels, heading='Content'),
ObjectList(cls.layout_panels, heading='Layout'),
ObjectList(cls.settings_panels, heading='Settings', classname='settings'),
]
return TabbedInterface(panels).bind_to(model=cls)
def get_template(self, request, *args, **kwargs):
return self.template
def get_context(self, request, *args, **kwargs):
context = super().get_context(request)
self.set_ga360_payload(
page_id=self.id,
business_unit=settings.GA360_BUSINESS_UNIT,
site_section=str(self.url or '/').split('/')[1],
)
self.add_ga360_data_to_payload(request)
context['ga360'] = self.ga360_payload
provider = get_context_provider(request=request, page=self)
if provider:
context.update(provider.get_context_data(request=request, page=self))
return context
class LandingPage(CMSGenericPage):
parent_page_types = [
'domestic.DomesticHomePage', # TODO: once we've restructured, remove this permission
'domestic.GreatDomesticHomePage',
]
subpage_types = [
'core.ListPage',
'core.InterstitialPage',
'domestic.DomesticDashboard',
]
template_choices = (
('learn/landing_page.html', 'Learn'),
('core/generic_page.html', 'Generic'),
)
################
# Content fields
################
description = RichTextField()
button = StreamField([('button', core_blocks.ButtonBlock(icon='cog'))], null=True, blank=True)
image = models.ForeignKey(
get_image_model_string(), null=True, blank=True, on_delete=models.SET_NULL, related_name='+'
)
body = StreamField(
[
('section', core_blocks.SectionBlock()),
('title', core_blocks.TitleBlock()),
('text', blocks.RichTextBlock(icon='openquote', helptext='Add a textblock')),
('image', core_blocks.ImageBlock()),
],
null=True,
blank=True,
)
components = StreamField(
[
('route', core_blocks.RouteSectionBlock()),
],
null=True,
blank=True,
)
#########
# Panels
#########
content_panels = CMSGenericPage.content_panels + [
FieldPanel('description'),
StreamFieldPanel('button'),
ImageChooserPanel('image'),
StreamFieldPanel('components'),
StreamFieldPanel('body'),
]
class InterstitialPage(CMSGenericPage):
parent_page_types = ['core.LandingPage']
template_choices = (('learn/interstitial.html', 'Learn'),)
################
# Content fields
################
button = StreamField([('button', core_blocks.ButtonBlock(icon='cog'))], null=True, blank=True)
#########
# Panels
#########
content_panels = CMSGenericPage.content_panels + [
StreamFieldPanel('button'),
]
class ListPage(CMSGenericPage):
parent_page_types = ['core.LandingPage']
subpage_types = ['core.CuratedListPage']
template_choices = (('learn/automated_list_page.html', 'Learn'),)
record_read_progress = models.BooleanField(
default=False,
help_text='Should we record when a user views a page in this collection?',
)
class Meta:
verbose_name = 'Automated list page'
verbose_name_plural = 'Automated list pages'
def get_context(self, request, *args, **kwargs):
from core.helpers import get_high_level_completion_progress
from domestic.helpers import get_lesson_completion_status
context = super().get_context(request)
if request.user.is_authenticated:
completion_status = get_lesson_completion_status(request.user)
context['high_level_completion_progress'] = get_high_level_completion_progress(
completion_status=completion_status,
)
return context
################
# Content fields
################
description = RichTextField()
button_label = models.CharField(max_length=100)
#########
# Panels
#########
settings_panels = CMSGenericPage.settings_panels + [FieldPanel('record_read_progress')]
content_panels = CMSGenericPage.content_panels + [FieldPanel('description'), FieldPanel('button_label')]
class CuratedListPage(CMSGenericPage):
parent_page_types = ['core.ListPage']
subpage_types = [
'core.TopicPage',
]
template_choices = (('learn/curated_list_page.html', 'Learn'),)
################
# Content fields
################
heading = RichTextField()
image = models.ForeignKey(
get_image_model_string(), null=True, blank=True, on_delete=models.SET_NULL, related_name='+'
)
########
# Panels
########
content_panels = CMSGenericPage.content_panels + [
FieldPanel('heading'),
ImageChooserPanel('image'),
]
def get_topics(self, live=True) -> models.QuerySet:
qs = TopicPage.objects.live().specific().descendant_of(self)
if live:
qs = qs.live()
return qs
@cached_property
def count_topics(self):
return self.get_topics().count()
@cached_property
def count_detail_pages(self):
count = 0
for topic in self.get_topics():
count += DetailPage.objects.live().descendant_of(topic).count()
return count
def get_context(self, request, *args, **kwargs):
from core.helpers import (
get_high_level_completion_progress,
get_module_completion_progress,
)
from domestic.helpers import get_lesson_completion_status
context = super().get_context(request)
# Give the template a simple way to link back to the parent
# learning module (ListPage)
context['parent_page_url'] = self.get_parent().url
if request.user.is_authenticated:
# get this once, so we don't waste the network call to get the data twice
completion_status = get_lesson_completion_status(request.user)
context['module_completion_progress'] = get_module_completion_progress(
completion_status=completion_status,
module_page=self,
)
context['high_level_completion_progress'] = get_high_level_completion_progress(
completion_status=completion_status,
)
return context
def hero_singular_validation(value):
if value and len(value) > 1:
raise StreamBlockValidationError(
non_block_errors=ValidationError('Only one image or video allowed in Hero section', code='invalid'),
)
class TopicPage(mixins.AuthenticatedUserRequired, Page):
"""Structural page to allow for cleaner mapping of lessons (`DetailPage`s)
to modules (`CuratedListPage`s).
Not intented to be viewed by end users, so will redirect to the parent
module if accessed.
Also, for the above reason, mixins.WagtailGA360Mixin and GA360Mixin
are not used."""
parent_page_types = ['core.CuratedListPage']
subpage_types = [
'core.DetailPage',
'core.LessonPlaceholderPage',
]
# `title` comes from Page superclass and that's all we need here
def _redirect_to_parent_module(self):
return HttpResponseRedirect(self.get_parent().url)
def serve_preview(self, request, mode_name='dummy'):
# It doesn't matter what is passed as mode_name - we always redirect
return self._redirect_to_parent_module()
def serve(self, request):
return self._redirect_to_parent_module()
class LessonPlaceholderPage(mixins.AuthenticatedUserRequired, Page):
"""Structural page to allow for configuring and representing very simple
to modules (`CuratedListPage`s).
Not intented to be viewed by end users, so will redirect to the parent
module if accessed.
Also, for the above reason, mixins.WagtailGA360Mixin and GA360Mixin
are not used."""
parent_page_types = ['core.TopicPage']
subpage_types = [] # No child pages allowed for placeholders
# `title` comes from Page superclass and that's all we need here
def _redirect_to_parent_module(self):
dest = CuratedListPage.objects.ancestor_of(self).first().url
return HttpResponseRedirect(dest)
def serve_preview(self, request, mode_name='dummy'):
# It doesn't matter what is passed as mode_name - we always redirect
return self._redirect_to_parent_module()
def serve(self, request):
return self._redirect_to_parent_module()
class DetailPage(CMSGenericPage):
estimated_read_duration = models.DurationField(null=True, blank=True)
parent_page_types = [
'core.CuratedListPage', # TEMPORARY: remove after topics refactor migration has run
'core.TopicPage',
]
template_choices = (('learn/detail_page.html', 'Learn'),)
class Meta:
verbose_name = 'Detail page'
verbose_name_plural = 'Detail pages'
################
# Content fields
################
hero = StreamField(
[
('Image', core_blocks.ImageBlock(template='core/includes/_hero_image.html')),
('Video', core_blocks.SimpleVideoBlock(template='core/includes/_hero_video.html')),
],
null=True,
blank=True,
validators=[hero_singular_validation],
)
objective = StreamField(
[
(
'paragraph',
blocks.RichTextBlock(options={'class': 'objectives'}),
),
('ListItem', core_blocks.Item()),
]
)
body = StreamField(
[
(
'paragraph',
blocks.StructBlock(
[('paragraph', blocks.RichTextBlock())],
template='core/struct_paragraph_block.html',
icon='fa-font',
),
),
(
'video',
blocks.StructBlock(
[('video', core_blocks.VideoBlock())],
template='core/struct_video_block.html',
icon='fa-play',
),
),
('case_study', core_blocks.CaseStudyStaticBlock(icon='fa-book')),
(
'Step',
core_blocks.StepByStepBlock(icon='cog'),
),
(
'fictional_example',
blocks.StructBlock(
[('fiction_body', blocks.RichTextBlock(icon='openquote'))],
template='learn/fictional_company_example.html',
icon='fa-commenting-o',
),
),
(
'ITA_Quote',
core_blocks.ITAQuoteBlock(icon='fa-quote-left'),
),
(
'pros_cons',
blocks.StructBlock(
[
(
'pros',
blocks.StreamBlock(
[
(
'item',
core_blocks.Item(icon='fa-arrow-right'),
)
]
),
),
(
'cons',
blocks.StreamBlock(
[
(
'item',
core_blocks.Item(icon='fa-arrow-right'),
)
]
),
),
],
template='learn/pros_and_cons.html',
icon='fa-arrow-right',
),
),
('choose_do_not_choose', core_blocks.ChooseDoNotChooseBlock()),
(
'image',
core_blocks.ImageBlock(
template='core/includes/_image_full_width.html',
help_text='Image displayed within a full-page-width block',
),
),
(
'video',
core_blocks.SimpleVideoBlock(
template='core/includes/_video_full_width.html',
help_text='Video displayed within a full-page-width block',
),
),
]
)
recap = StreamField(
[
(
'recap_item',
blocks.StructBlock(
[
('title', blocks.CharBlock(icon='fa-header')),
(
'item',
blocks.StreamBlock(
[
(
'item',
core_blocks.Item(),
)
]
),
),
],
template='learn/recap.html',
icon='fa-commenting-o',
),
)
]
)
#########
# Panels
##########
content_panels = Page.content_panels + [
StreamFieldPanel('hero'),
StreamFieldPanel('objective'),
StreamFieldPanel('body'),
StreamFieldPanel('recap'),
]
def handle_page_view(self, request):
if request.user.is_authenticated:
# checking if the page should record read progress
# checking if the page is already marked as read
list_page = (
ListPage.objects.ancestor_of(self)
.filter(record_read_progress=True)
.exclude(page_views_list__sso_id=request.user.pk, page_views_list__page=self)
.first()
)
if list_page:
PageView.objects.get_or_create(
page=self,
list_page=list_page,
sso_id=request.user.pk,
)
def serve(self, request, *args, **kwargs):
self.handle_page_view(request)
return super().serve(request, **kwargs)
@cached_property
def topic_title(self):
return self.get_parent().title
@cached_property
def module(self):
"""Gets the learning module this lesson belongs to"""
return CuratedListPage.objects.live().specific().ancestor_of(self).first()
@cached_property
def _export_plan_url_map(self):
"""Return a lookup dictionary of URL Slugs->title for all the
Export Plan sections we have."""
return {url: values['title'] for url, values in EXPORTPLAN_URL_MAP.items()}
def _get_backlink(self, request):
"""Try to extract a backlink (used for a link to the export plan) from the
querystring on the request that brought us to this view.
Only accepts backlinks that we KNOW are for the export plan, else ignore it."""
backlink_path = request.GET.get(BACKLINK_QUERYSTRING_NAME, '')
if backlink_path is not None:
backlink_path = unquote(backlink_path)
if len(backlink_path.split('/')) > 2 and (
backlink_path.split('/')[3] in EXPORTPLAN_SLUGS and '://' not in backlink_path
):
# The check for '://' will stop us accepting a backlink which
# features a full URL as its OWN querystring param (eg a crafted attack
# URL), but that's an acceptable limitation here and is very unlikely
# to happen.
return backlink_path
return None # safe default
def _get_backlink_title(self, backlink_path):
"""For a given backlink, see if we can get a title that goes with it.
For now, this is limited only to Export Plan pages/links.
"""
# We have to re-arrange EXPORT_PLAN_SECTION_TITLES_URLS after import
# because it features lazily-evaluated URLs that aren't ready when
# models are imported
if backlink_path and len(backlink_path.split('/')) > 3:
_path = backlink_path.split('/')[3]
return self._export_plan_url_map.get(_path)
def get_context(self, request, *args, **kwargs):
context = super().get_context(request)
context['refresh_on_market_change'] = True
# Prepare backlink to the export plan if we detect one and can validate it
_backlink = self._get_backlink(request)
if _backlink:
context['backlink'] = _backlink
context['backlink_title'] = self._get_backlink_title(_backlink)
if isinstance(self.get_parent().specific, TopicPage):
# In a conditional because a DetailPage currently MAY be used as
# a child of another page type...
page_topic_helper = PageTopicHelper(self)
next_lesson = page_topic_helper.get_next_lesson()
context['current_lesson'] = self
context['current_module'] = page_topic_helper.module
if page_topic_helper:
topic_page = page_topic_helper.get_page_topic()
if topic_page:
context['current_topic'] = topic_page
context['page_topic'] = topic_page.title
if next_lesson:
context['next_lesson'] = next_lesson
else:
next_module = self.module.get_next_sibling()
if not next_module:
return context
context['next_module'] = next_module.specific
context['next_lesson'] = get_first_lesson(next_module)
return context
class PageView(TimeStampedModel):
page = models.ForeignKey(DetailPage, on_delete=models.CASCADE, related_name='page_views')
list_page = models.ForeignKey(ListPage, on_delete=models.CASCADE, related_name='page_views_list')
sso_id = models.TextField()
class Meta:
ordering = ['page__pk']
unique_together = ['page', 'sso_id']
# TODO: deprecate and remove
class ContentModuleTag(TaggedItemBase):
content_object = ParentalKey('core.ContentModule', on_delete=models.CASCADE, related_name='tagged_items')
# TODO: deprecate and remove
@register_snippet
class ContentModule(ClusterableModel):
title = models.CharField(max_length=255)
content = RichTextField()
tags = TaggableManager(through=ContentModuleTag, blank=True)
panels = [
FieldPanel('title'),
FieldPanel('content'),
FieldPanel('tags'),
]
def __str__(self):
return self.title
class PersonalisationHSCodeTag(TagBase):
"""Custom tag for personalisation.
Tag value will be a HS6, HS4 or HS2 code"""
# free_tagging = False # DISABLED until tag data only comes via data migration
class Meta:
verbose_name = 'HS Code tag for personalisation'
verbose_name_plural = 'HS Code tags for personalisation'
class PersonalisationCountryTag(TagBase):
"""Custom tag for personalisation.
Tag value will be an ISO-2 Country code ('DE')
"""
free_tagging = False
class Meta:
verbose_name = 'Country tag for personalisation'
verbose_name_plural = 'Country tags for personalisation'
class PersonalisationRegionTag(TagBase):
"""Custom tag for personalisation.
Tag value will be a geographical string ('Europe')
"""
free_tagging = False
class Meta:
verbose_name = 'Region tag for personalisation'
verbose_name_plural = 'Region tags for personalisation'
class PersonalisationTradingBlocTag(TagBase):
"""Custom tag for personalisation.
Tag value will be an Trading blocs
"""
free_tagging = False
class Meta:
verbose_name = 'Trading bloc tag for personalisation'
verbose_name_plural = 'Trading bloc tags for personalisation'
# If you're wondering what's going on here:
# https://docs.wagtail.io/en/stable/reference/pages/model_recipes.html#custom-tag-models
class HSCodeTaggedCaseStudy(ItemBase):
tag = models.ForeignKey(
PersonalisationHSCodeTag, related_name='hscode_tagged_case_studies', on_delete=models.CASCADE
)
content_object = ParentalKey(to='core.CaseStudy', on_delete=models.CASCADE, related_name='hs_code_tagged_items')
class CountryTaggedCaseStudy(ItemBase):
tag = models.ForeignKey(
PersonalisationCountryTag, related_name='country_tagged_case_studies', on_delete=models.CASCADE
)
content_object = ParentalKey(to='core.CaseStudy', on_delete=models.CASCADE, related_name='country_tagged_items')
class RegionTaggedCaseStudy(ItemBase):
tag = models.ForeignKey(
PersonalisationRegionTag, related_name='region_tagged_case_studies', on_delete=models.CASCADE
)
content_object = ParentalKey(to='core.CaseStudy', on_delete=models.CASCADE, related_name='region_tagged_items')
class TradingBlocTaggedCaseStudy(ItemBase):
tag = models.ForeignKey(
PersonalisationTradingBlocTag, related_name='trading_bloc_tagged_case_studies', on_delete=models.CASCADE
)
content_object = ParentalKey(
to='core.CaseStudy', on_delete=models.CASCADE, related_name='trading_bloc_tagged_items'
)
def _high_level_validation(value, error_messages):
TEXT_BLOCK = 'text' # noqa N806
MEDIA_BLOCK = 'media' # noqa N806
QUOTE_BLOCK = 'quote' # noqa N806
# we need to be strict about presence and ordering of these nodes
if [node.block_type for node in value if node.block_type != QUOTE_BLOCK] != [MEDIA_BLOCK, TEXT_BLOCK]:
error_messages.append(
(
'This block must contain one Media section (with one or '
'two items in it) and/or a Quote section, then one Text section following it.'
)
)
return error_messages
def _low_level_validation(value, error_messages):
# Check content of media node, which should be present here
MEDIA_BLOCK = 'media' # noqa N806
VIDEO_BLOCK = 'video' # noqa N806
for node in value:
if node.block_type == MEDIA_BLOCK:
subnode_block_types = [subnode.block_type for subnode in node.value]
if len(subnode_block_types) == 2:
if set(subnode_block_types) == {VIDEO_BLOCK}:
# Two videos: not allowed
error_messages.append('Only one video may be used in a case study.')
elif subnode_block_types[1] == VIDEO_BLOCK:
# implicitly, [0] must be an image
# video after image: not allowed
error_messages.append('The video must come before a still image.')
return error_messages
def case_study_body_validation(value):
"""Ensure the case study has exactly both a media node and a text node
and that the media node has the following content:
* One image, only
* One video, only
* One video + One image
* (video must comes first so that it is displayed first)
* Two images
"""
error_messages = []
if value:
error_messages = _high_level_validation(value, error_messages)
error_messages = _low_level_validation(value, error_messages)
if error_messages:
raise StreamBlockValidationError(
non_block_errors=ValidationError('; '.join(error_messages), code='invalid'),
)
class MagnaPageChooserPanel(PageChooserPanel):
show_label = False
field_template = 'admin/wagtailadmin/edit_handlers/field_panel_field.html'
def render_as_field(self):
instance_obj = self.get_chosen_item()
context = {
'field': self.bound_field,
self.object_type_name: instance_obj,
'is_chosen': bool(instance_obj), # DEPRECATED - passed to templates for backwards compatibility only
# Added obj_type on base class method render_as_field
'obj_type': instance_obj.specific.__class__.__name__ if instance_obj else None,
}
return mark_safe(render_to_string(self.field_template, context))
class CaseStudyRelatedPages(Orderable):
case_study = ParentalKey(
'core.CaseStudy',
related_name='related_pages',
on_delete=models.SET_NULL,
null=True,
blank=True,
)
page = models.ForeignKey(
'wagtailcore.Page',
on_delete=models.CASCADE,
related_name='+',
)
panels = [
MagnaPageChooserPanel('page', [DetailPage, CuratedListPage, TopicPage]),
]
class Meta:
unique_together = ['case_study', 'page']
@register_snippet
class CaseStudy(ClusterableModel):
"""Dedicated snippet for use as a case study. Supports personalised
selection via its tags.
The decision about the appropriate Case Study block to show will happen
when the page attempts to render the relevant CaseStudyBlock.
Note that this is rendered via Wagtail's ModelAdmin, so appears in the sidebar,
but we have to keep it registered as a Snippet to be able to transfer it
with Wagtail-Transfer
"""
title = models.CharField(
max_length=255,
blank=False,
verbose_name='Internal case study title',
)
# old name company_name
summary_context = models.CharField(max_length=255, blank=False, default='How we did it')
# old name summary
lead_title = models.TextField(blank=False) # Deliberately not rich-text / no formatting
body = StreamField(
[
(
'media',
blocks.StreamBlock(
[
('video', core_blocks.SimpleVideoBlock(template='core/includes/_case_study_video.html')),
('image', core_blocks.ImageBlock()),
],
min_num=1,
max_num=2,
),
),
(
'text',
blocks.RichTextBlock(
features=RICHTEXT_FEATURES__MINIMAL,
),
),
(
'quote',
core_blocks.CaseStudyQuoteBlock(),
),
],
validators=[case_study_body_validation],
help_text=(
'This block must contain one Media section (with one or two items in it) '
'and/or Quote sections, then one Text section.'
),
)
# We are keeping the personalisation-relevant tags in separate
# fields to aid lookup and make the UX easier for editors
hs_code_tags = ClusterTaggableManager(through='core.HSCodeTaggedCaseStudy', blank=True, verbose_name='HS-code tags')
country_code_tags = ClusterTaggableManager(
through='core.CountryTaggedCaseStudy', blank=True, verbose_name='Country tags'
)
region_code_tags = ClusterTaggableManager(
through='core.RegionTaggedCaseStudy', blank=True, verbose_name='Region tags'
)
trading_bloc_code_tags = ClusterTaggableManager(
through='core.TradingBlocTaggedCaseStudy', blank=True, verbose_name='Trading bloc tags'
)
created = CreationDateTimeField('created', null=True)
modified = ModificationDateTimeField('modified', null=True)
panels = [
MultiFieldPanel(
[
FieldPanel('title'),
FieldPanel('lead_title'),
FieldPanel('summary_context'),
StreamFieldPanel('body'),
],
heading='Case Study content',
),
MultiFieldPanel(
[
FieldPanel('hs_code_tags'),
FieldPanel('country_code_tags'),
FieldPanel('region_code_tags'),
FieldPanel('trading_bloc_code_tags'),
],
heading='Case Study tags for Personalisation',
),
MultiFieldPanel(
[
InlinePanel('related_pages', label='Related pages'),
],
heading='Related Lesson, Topic & Module, also used for Personalisation',
),
]
def __str__(self):
display_name = self.title if self.title else self.summary_context
return f'{display_name}'
def save(self, **kwargs):
# When we create a new CS need to call create to obtain an ID for indexing
self.update_modified = kwargs.pop('update_modified', getattr(self, 'update_modified', True))
super().save(**kwargs)
update_cs_index(self)
def delete(self, **kwargs):
delete_cs_index(self.id)
super().delete(**kwargs)
def get_cms_standalone_view_url(self):
return reverse('cms_extras:case-study-view', args=[self.id])
class Meta:
verbose_name_plural = 'Case studies'
get_latest_by = 'modified'
ordering = (
'-modified',
'-created',
)
@register_setting
class CaseStudyScoringSettings(BaseSetting):
threshold = models.DecimalField(
help_text='This is the minimum score which a case study needs to have to be '
'considered before being presented to users. ',
default=10,
decimal_places=3,
max_digits=5,
)
lesson = models.DecimalField(
help_text="Score given when user's lesson is tagged in the case study.",
default=8,
decimal_places=3,
max_digits=5,
)
topic = models.DecimalField(
help_text="Score given when user's lesson's topic is tagged in the case study "
'unless there is also lesson match.',
default=4,
decimal_places=3,
max_digits=5,
)
module = models.DecimalField(
help_text="Score given when the user's lesson's module is tagged in the case study "
'unless there is also lesson or topic match.',
default=2,
decimal_places=3,
max_digits=5,
)
product_hs6 = models.DecimalField(
help_text='Score given when any case study HS6 tag matches the complete HS6 code of '
"any of the user's products",
default=8,
decimal_places=3,
max_digits=5,
)
product_hs4 = models.DecimalField(
help_text="Score given when any case study HS4 tag matches the first 4 digits of any of the user's products "
'unless there is an HS6 match.',
default=4,
decimal_places=3,
max_digits=5,
)
product_hs2 = models.DecimalField(
help_text="Score given when any case study HS2 tag matches the first 2 digits of any of the user's products "
'unless there is an HS6 or HS4 match.',
default=2,
decimal_places=3,
max_digits=5,
)
country_exact = models.DecimalField(
help_text="Score given when any case study country tag exactly matches one of the user's export markets.",
default=4,
decimal_places=3,
max_digits=5,
)
country_region = models.DecimalField(
help_text="Score given when any case study region tag matches the region of any of the user's export markets "
'unless there is an exact country match.',
default=2,
decimal_places=3,
max_digits=5,
)
trading_blocs = models.DecimalField(
help_text='Score given when any case study trading bloc tag matches the any trading bloc that any of '
"the user's export markets falls into unless there is an exact country or region match.",
default=2,
decimal_places=3,
max_digits=5,
)
product_tab = [MultiFieldPanel([FieldPanel('product_hs6'), FieldPanel('product_hs4'), FieldPanel('product_hs2')])]
market_tab = [
MultiFieldPanel([FieldPanel('country_exact'), FieldPanel('country_region'), FieldPanel('trading_blocs')])
]
lesson_tab = [MultiFieldPanel([FieldPanel('lesson'), FieldPanel('topic'), FieldPanel('module')])]
threshold_tab = [
MultiFieldPanel(
[
FieldPanel('threshold'),
]
)
]
edit_handler = TabbedInterface(
[
ObjectList(product_tab, heading='Product'),
ObjectList(market_tab, heading='Market'),
ObjectList(lesson_tab, heading='Lesson'),
ObjectList(threshold_tab, heading='Threshold'),
]
)
class Meta:
verbose_name = 'Case Study Scoring'
| [((2206, 2232), 'wagtail.snippets.models.register_snippet', 'register_snippet', (['Redirect'], {}), '(Redirect)\n', (2222, 2232), False, 'from wagtail.snippets.models import register_snippet\n'), ((3527, 3560), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (3543, 3560), False, 'from django.db import models\n'), ((3845, 3958), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""wagtaildocs.Document"""'], {'null': '(True)', 'blank': '(True)', 'on_delete': 'models.CASCADE', 'related_name': '"""+"""'}), "('wagtaildocs.Document', null=True, blank=True, on_delete=\n models.CASCADE, related_name='+')\n", (3862, 3958), False, 'from django.db import models\n'), ((4019, 4131), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""wagtailimages.Image"""'], {'null': '(True)', 'blank': '(True)', 'on_delete': 'models.CASCADE', 'related_name': '"""+"""'}), "('wagtailimages.Image', null=True, blank=True, on_delete=\n models.CASCADE, related_name='+')\n", (4036, 4131), False, 'from django.db import models\n'), ((4179, 4223), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (4195, 4223), False, 'from django.db import models\n'), ((4339, 4428), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AltTextImage'], {'on_delete': 'models.CASCADE', 'related_name': '"""renditions"""'}), "(AltTextImage, on_delete=models.CASCADE, related_name=\n 'renditions')\n", (4356, 4428), False, 'from django.db import models\n'), ((4641, 4732), 'django.db.models.OneToOneField', 'models.OneToOneField', (['"""wagtailcore.Page"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""tour"""'}), "('wagtailcore.Page', on_delete=models.CASCADE,\n related_name='tour')\n", (4661, 4732), False, 'from django.db import models\n'), ((4741, 4773), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (4757, 4773), False, 'from django.db import models\n'), ((4785, 4817), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (4801, 4817), False, 'from django.db import models\n'), ((4836, 4868), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (4852, 4868), False, 'from django.db import models\n'), ((5179, 5211), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (5195, 5211), False, 'from django.db import models\n'), ((5223, 5255), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (5239, 5255), False, 'from django.db import models\n'), ((5271, 5303), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (5287, 5303), False, 'from django.db import models\n'), ((5319, 5351), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (5335, 5351), False, 'from django.db import models\n'), ((5363, 5428), 'modelcluster.models.ParentalKey', 'ParentalKey', (['Tour'], {'on_delete': 'models.CASCADE', 'related_name': '"""steps"""'}), "(Tour, on_delete=models.CASCADE, related_name='steps')\n", (5374, 5428), False, 'from modelcluster.models import ClusterableModel, ParentalKey\n'), ((5632, 5664), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (5648, 5664), False, 'from django.db import models\n'), ((5823, 5868), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (5839, 5868), False, 'from django.db import models\n'), ((6059, 6091), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (6075, 6091), False, 'from django.db import models\n'), ((6103, 6148), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (6119, 6148), False, 'from django.db import models\n'), ((6162, 6237), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Region'], {'null': '(True)', 'blank': '(True)', 'on_delete': 'models.SET_NULL'}), '(Region, null=True, blank=True, on_delete=models.SET_NULL)\n', (6179, 6237), False, 'from django.db import models\n'), ((6718, 6763), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (6734, 6763), False, 'from django.db import models\n'), ((6958, 7003), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (6974, 7003), False, 'from django.db import models\n'), ((7015, 7119), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AltTextImage'], {'null': '(True)', 'blank': '(True)', 'on_delete': 'models.SET_NULL', 'related_name': '"""+"""'}), "(AltTextImage, null=True, blank=True, on_delete=models.\n SET_NULL, related_name='+')\n", (7032, 7119), False, 'from django.db import models\n'), ((7591, 7634), 'django_extensions.db.fields.CreationDateTimeField', 'CreationDateTimeField', (['"""created"""'], {'null': '(True)'}), "('created', null=True)\n", (7612, 7634), False, 'from django_extensions.db.fields import CreationDateTimeField, ModificationDateTimeField\n'), ((7650, 7698), 'django_extensions.db.fields.ModificationDateTimeField', 'ModificationDateTimeField', (['"""modified"""'], {'null': '(True)'}), "('modified', null=True)\n", (7675, 7698), False, 'from django_extensions.db.fields import CreationDateTimeField, ModificationDateTimeField\n'), ((8486, 8532), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'choices': 'None'}), '(max_length=255, choices=None)\n', (8502, 8532), False, 'from django.db import models\n'), ((10485, 10500), 'wagtail.core.fields.RichTextField', 'RichTextField', ([], {}), '()\n', (10498, 10500), False, 'from wagtail.core.fields import RichTextField, StreamField\n'), ((12182, 12296), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Should we record when a user views a page in this collection?"""'}), "(default=False, help_text=\n 'Should we record when a user views a page in this collection?')\n", (12201, 12296), False, 'from django.db import models\n'), ((13048, 13063), 'wagtail.core.fields.RichTextField', 'RichTextField', ([], {}), '()\n', (13061, 13063), False, 'from wagtail.core.fields import RichTextField, StreamField\n'), ((13083, 13115), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (13099, 13115), False, 'from django.db import models\n'), ((13642, 13657), 'wagtail.core.fields.RichTextField', 'RichTextField', ([], {}), '()\n', (13655, 13657), False, 'from wagtail.core.fields import RichTextField, StreamField\n'), ((17723, 17766), 'django.db.models.DurationField', 'models.DurationField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (17743, 17766), False, 'from django.db import models\n'), ((27063, 27150), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DetailPage'], {'on_delete': 'models.CASCADE', 'related_name': '"""page_views"""'}), "(DetailPage, on_delete=models.CASCADE, related_name=\n 'page_views')\n", (27080, 27150), False, 'from django.db import models\n'), ((27162, 27252), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ListPage'], {'on_delete': 'models.CASCADE', 'related_name': '"""page_views_list"""'}), "(ListPage, on_delete=models.CASCADE, related_name=\n 'page_views_list')\n", (27179, 27252), False, 'from django.db import models\n'), ((27261, 27279), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (27277, 27279), False, 'from django.db import models\n'), ((27466, 27559), 'modelcluster.models.ParentalKey', 'ParentalKey', (['"""core.ContentModule"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""tagged_items"""'}), "('core.ContentModule', on_delete=models.CASCADE, related_name=\n 'tagged_items')\n", (27477, 27559), False, 'from modelcluster.models import ClusterableModel, ParentalKey\n'), ((27655, 27687), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (27671, 27687), False, 'from django.db import models\n'), ((27702, 27717), 'wagtail.core.fields.RichTextField', 'RichTextField', ([], {}), '()\n', (27715, 27717), False, 'from wagtail.core.fields import RichTextField, StreamField\n'), ((27729, 27782), 'taggit.managers.TaggableManager', 'TaggableManager', ([], {'through': 'ContentModuleTag', 'blank': '(True)'}), '(through=ContentModuleTag, blank=True)\n', (27744, 27782), False, 'from taggit.managers import TaggableManager\n'), ((29407, 29524), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PersonalisationHSCodeTag'], {'related_name': '"""hscode_tagged_case_studies"""', 'on_delete': 'models.CASCADE'}), "(PersonalisationHSCodeTag, related_name=\n 'hscode_tagged_case_studies', on_delete=models.CASCADE)\n", (29424, 29524), False, 'from django.db import models\n'), ((29555, 29655), 'modelcluster.models.ParentalKey', 'ParentalKey', ([], {'to': '"""core.CaseStudy"""', 'on_delete': 'models.CASCADE', 'related_name': '"""hs_code_tagged_items"""'}), "(to='core.CaseStudy', on_delete=models.CASCADE, related_name=\n 'hs_code_tagged_items')\n", (29566, 29655), False, 'from modelcluster.models import ClusterableModel, ParentalKey\n'), ((29703, 29822), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PersonalisationCountryTag'], {'related_name': '"""country_tagged_case_studies"""', 'on_delete': 'models.CASCADE'}), "(PersonalisationCountryTag, related_name=\n 'country_tagged_case_studies', on_delete=models.CASCADE)\n", (29720, 29822), False, 'from django.db import models\n'), ((29853, 29953), 'modelcluster.models.ParentalKey', 'ParentalKey', ([], {'to': '"""core.CaseStudy"""', 'on_delete': 'models.CASCADE', 'related_name': '"""country_tagged_items"""'}), "(to='core.CaseStudy', on_delete=models.CASCADE, related_name=\n 'country_tagged_items')\n", (29864, 29953), False, 'from modelcluster.models import ClusterableModel, ParentalKey\n'), ((30000, 30117), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PersonalisationRegionTag'], {'related_name': '"""region_tagged_case_studies"""', 'on_delete': 'models.CASCADE'}), "(PersonalisationRegionTag, related_name=\n 'region_tagged_case_studies', on_delete=models.CASCADE)\n", (30017, 30117), False, 'from django.db import models\n'), ((30148, 30247), 'modelcluster.models.ParentalKey', 'ParentalKey', ([], {'to': '"""core.CaseStudy"""', 'on_delete': 'models.CASCADE', 'related_name': '"""region_tagged_items"""'}), "(to='core.CaseStudy', on_delete=models.CASCADE, related_name=\n 'region_tagged_items')\n", (30159, 30247), False, 'from modelcluster.models import ClusterableModel, ParentalKey\n'), ((30299, 30427), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PersonalisationTradingBlocTag'], {'related_name': '"""trading_bloc_tagged_case_studies"""', 'on_delete': 'models.CASCADE'}), "(PersonalisationTradingBlocTag, related_name=\n 'trading_bloc_tagged_case_studies', on_delete=models.CASCADE)\n", (30316, 30427), False, 'from django.db import models\n'), ((30458, 30563), 'modelcluster.models.ParentalKey', 'ParentalKey', ([], {'to': '"""core.CaseStudy"""', 'on_delete': 'models.CASCADE', 'related_name': '"""trading_bloc_tagged_items"""'}), "(to='core.CaseStudy', on_delete=models.CASCADE, related_name=\n 'trading_bloc_tagged_items')\n", (30469, 30563), False, 'from modelcluster.models import ClusterableModel, ParentalKey\n'), ((33519, 33633), 'modelcluster.models.ParentalKey', 'ParentalKey', (['"""core.CaseStudy"""'], {'related_name': '"""related_pages"""', 'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), "('core.CaseStudy', related_name='related_pages', on_delete=\n models.SET_NULL, null=True, blank=True)\n", (33530, 33633), False, 'from modelcluster.models import ClusterableModel, ParentalKey\n'), ((33687, 33772), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""wagtailcore.Page"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""+"""'}), "('wagtailcore.Page', on_delete=models.CASCADE,\n related_name='+')\n", (33704, 33772), False, 'from django.db import models\n'), ((34475, 34567), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(False)', 'verbose_name': '"""Internal case study title"""'}), "(max_length=255, blank=False, verbose_name=\n 'Internal case study title')\n", (34491, 34567), False, 'from django.db import models\n'), ((34645, 34715), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(False)', 'default': '"""How we did it"""'}), "(max_length=255, blank=False, default='How we did it')\n", (34661, 34715), False, 'from django.db import models\n'), ((34756, 34785), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(False)'}), '(blank=False)\n', (34772, 34785), False, 'from django.db import models\n'), ((35922, 36027), 'modelcluster.contrib.taggit.ClusterTaggableManager', 'ClusterTaggableManager', ([], {'through': '"""core.HSCodeTaggedCaseStudy"""', 'blank': '(True)', 'verbose_name': '"""HS-code tags"""'}), "(through='core.HSCodeTaggedCaseStudy', blank=True,\n verbose_name='HS-code tags')\n", (35944, 36027), False, 'from modelcluster.contrib.taggit import ClusterTaggableManager\n'), ((36049, 36155), 'modelcluster.contrib.taggit.ClusterTaggableManager', 'ClusterTaggableManager', ([], {'through': '"""core.CountryTaggedCaseStudy"""', 'blank': '(True)', 'verbose_name': '"""Country tags"""'}), "(through='core.CountryTaggedCaseStudy', blank=True,\n verbose_name='Country tags')\n", (36071, 36155), False, 'from modelcluster.contrib.taggit import ClusterTaggableManager\n'), ((36189, 36293), 'modelcluster.contrib.taggit.ClusterTaggableManager', 'ClusterTaggableManager', ([], {'through': '"""core.RegionTaggedCaseStudy"""', 'blank': '(True)', 'verbose_name': '"""Region tags"""'}), "(through='core.RegionTaggedCaseStudy', blank=True,\n verbose_name='Region tags')\n", (36211, 36293), False, 'from modelcluster.contrib.taggit import ClusterTaggableManager\n'), ((36333, 36449), 'modelcluster.contrib.taggit.ClusterTaggableManager', 'ClusterTaggableManager', ([], {'through': '"""core.TradingBlocTaggedCaseStudy"""', 'blank': '(True)', 'verbose_name': '"""Trading bloc tags"""'}), "(through='core.TradingBlocTaggedCaseStudy', blank=\n True, verbose_name='Trading bloc tags')\n", (36355, 36449), False, 'from modelcluster.contrib.taggit import ClusterTaggableManager\n'), ((36474, 36517), 'django_extensions.db.fields.CreationDateTimeField', 'CreationDateTimeField', (['"""created"""'], {'null': '(True)'}), "('created', null=True)\n", (36495, 36517), False, 'from django_extensions.db.fields import CreationDateTimeField, ModificationDateTimeField\n'), ((36533, 36581), 'django_extensions.db.fields.ModificationDateTimeField', 'ModificationDateTimeField', (['"""modified"""'], {'null': '(True)'}), "('modified', null=True)\n", (36558, 36581), False, 'from django_extensions.db.fields import CreationDateTimeField, ModificationDateTimeField\n'), ((38294, 38490), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""This is the minimum score which a case study needs to have to be considered before being presented to users. """', 'default': '(10)', 'decimal_places': '(3)', 'max_digits': '(5)'}), "(help_text=\n 'This is the minimum score which a case study needs to have to be considered before being presented to users. '\n , default=10, decimal_places=3, max_digits=5)\n", (38313, 38490), False, 'from django.db import models\n'), ((38544, 38689), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when user\'s lesson is tagged in the case study."""', 'default': '(8)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when user\'s lesson is tagged in the case study.", default=\n 8, decimal_places=3, max_digits=5)\n', (38563, 38689), False, 'from django.db import models\n'), ((38731, 38918), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when user\'s lesson\'s topic is tagged in the case study unless there is also lesson match."""', 'default': '(4)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when user\'s lesson\'s topic is tagged in the case study unless there is also lesson match."\n , default=4, decimal_places=3, max_digits=5)\n', (38750, 38918), False, 'from django.db import models\n'), ((38972, 39173), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when the user\'s lesson\'s module is tagged in the case study unless there is also lesson or topic match."""', 'default': '(2)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when the user\'s lesson\'s module is tagged in the case study unless there is also lesson or topic match."\n , default=2, decimal_places=3, max_digits=5)\n', (38991, 39173), False, 'from django.db import models\n'), ((39232, 39417), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when any case study HS6 tag matches the complete HS6 code of any of the user\'s products"""', 'default': '(8)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when any case study HS6 tag matches the complete HS6 code of any of the user\'s products"\n , default=8, decimal_places=3, max_digits=5)\n', (39251, 39417), False, 'from django.db import models\n'), ((39476, 39688), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when any case study HS4 tag matches the first 4 digits of any of the user\'s products unless there is an HS6 match."""', 'default': '(4)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when any case study HS4 tag matches the first 4 digits of any of the user\'s products unless there is an HS6 match."\n , default=4, decimal_places=3, max_digits=5)\n', (39495, 39688), False, 'from django.db import models\n'), ((39747, 39966), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when any case study HS2 tag matches the first 2 digits of any of the user\'s products unless there is an HS6 or HS4 match."""', 'default': '(2)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when any case study HS2 tag matches the first 2 digits of any of the user\'s products unless there is an HS6 or HS4 match."\n , default=2, decimal_places=3, max_digits=5)\n', (39766, 39966), False, 'from django.db import models\n'), ((40027, 40206), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when any case study country tag exactly matches one of the user\'s export markets."""', 'default': '(4)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when any case study country tag exactly matches one of the user\'s export markets."\n , default=4, decimal_places=3, max_digits=5)\n', (40046, 40206), False, 'from django.db import models\n'), ((40257, 40480), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when any case study region tag matches the region of any of the user\'s export markets unless there is an exact country match."""', 'default': '(2)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when any case study region tag matches the region of any of the user\'s export markets unless there is an exact country match."\n , default=2, decimal_places=3, max_digits=5)\n', (40276, 40480), False, 'from django.db import models\n'), ((40542, 40804), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'help_text': '"""Score given when any case study trading bloc tag matches the any trading bloc that any of the user\'s export markets falls into unless there is an exact country or region match."""', 'default': '(2)', 'decimal_places': '(3)', 'max_digits': '(5)'}), '(help_text=\n "Score given when any case study trading bloc tag matches the any trading bloc that any of the user\'s export markets falls into unless there is an exact country or region match."\n , default=2, decimal_places=3, max_digits=5)\n', (40561, 40804), False, 'from django.db import models\n'), ((3642, 3655), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (3653, 3655), False, 'import hashlib\n'), ((4893, 4917), 'wagtail.admin.edit_handlers.PageChooserPanel', 'PageChooserPanel', (['"""page"""'], {}), "('page')\n", (4909, 4917), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((4927, 4946), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""title"""'], {}), "('title')\n", (4937, 4946), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((4956, 4974), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""body"""'], {}), "('body')\n", (4966, 4974), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((4984, 5009), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""button_text"""'], {}), "('button_text')\n", (4994, 5009), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((5453, 5472), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""title"""'], {}), "('title')\n", (5463, 5472), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((5482, 5500), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""body"""'], {}), "('body')\n", (5492, 5500), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((5510, 5532), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""position"""'], {}), "('position')\n", (5520, 5532), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((5542, 5564), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""selector"""'], {}), "('selector')\n", (5552, 5564), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((5689, 5707), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""name"""'], {}), "('name')\n", (5699, 5707), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((5884, 5902), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""name"""'], {}), "('name')\n", (5894, 5902), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((6262, 6280), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""name"""'], {}), "('name')\n", (6272, 6280), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((6290, 6310), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""region"""'], {}), "('region')\n", (6300, 6310), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((6779, 6797), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""name"""'], {}), "('name')\n", (6789, 6797), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((7177, 7195), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""name"""'], {}), "('name')\n", (7187, 7195), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((7197, 7222), 'wagtail.images.edit_handlers.ImageChooserPanel', 'ImageChooserPanel', (['"""icon"""'], {}), "('icon')\n", (7214, 7222), False, 'from wagtail.images.edit_handlers import ImageChooserPanel\n'), ((8620, 8642), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""template"""'], {}), "('template')\n", (8630, 8642), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((9775, 9823), 'core.context.get_context_provider', 'get_context_provider', ([], {'request': 'request', 'page': 'self'}), '(request=request, page=self)\n', (9795, 9823), False, 'from core.context import get_context_provider\n'), ((10639, 10663), 'wagtail.images.get_image_model_string', 'get_image_model_string', ([], {}), '()\n', (10661, 10663), False, 'from wagtail.images import get_image_model_string\n'), ((13697, 13721), 'wagtail.images.get_image_model_string', 'get_image_model_string', ([], {}), '()\n', (13719, 13721), False, 'from wagtail.images import get_image_model_string\n'), ((17366, 17392), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['dest'], {}), '(dest)\n', (17386, 17392), False, 'from django.http import HttpResponseRedirect\n'), ((27807, 27826), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""title"""'], {}), "('title')\n", (27817, 27826), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((27836, 27857), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""content"""'], {}), "('content')\n", (27846, 27857), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((27867, 27885), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""tags"""'], {}), "('tags')\n", (27877, 27885), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((37802, 37823), 'core.case_study_index.update_cs_index', 'update_cs_index', (['self'], {}), '(self)\n', (37817, 37823), False, 'from core.case_study_index import delete_cs_index, update_cs_index\n'), ((37865, 37889), 'core.case_study_index.delete_cs_index', 'delete_cs_index', (['self.id'], {}), '(self.id)\n', (37880, 37889), False, 'from core.case_study_index import delete_cs_index, update_cs_index\n'), ((37982, 38035), 'django.urls.reverse', 'reverse', (['"""cms_extras:case-study-view"""'], {'args': '[self.id]'}), "('cms_extras:case-study-view', args=[self.id])\n", (37989, 38035), False, 'from django.urls import reverse\n'), ((2317, 2332), 'django.utils.translation.ugettext_lazy', '_', (['"""Transcript"""'], {}), "('Transcript')\n", (2318, 2332), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2465, 2487), 'django.utils.translation.ugettext_lazy', '_', (['"""English subtitles"""'], {}), "('English subtitles')\n", (2466, 2487), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6555, 6573), 'django.utils.text.slugify', 'slugify', (['self.name'], {}), '(self.name)\n', (6562, 6573), False, 'from django.utils.text import slugify\n'), ((8667, 8685), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""slug"""'], {}), "('slug')\n", (8677, 8685), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((9019, 9068), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['cls.content_panels'], {'heading': '"""Content"""'}), "(cls.content_panels, heading='Content')\n", (9029, 9068), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((9082, 9129), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['cls.layout_panels'], {'heading': '"""Layout"""'}), "(cls.layout_panels, heading='Layout')\n", (9092, 9129), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((9143, 9216), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['cls.settings_panels'], {'heading': '"""Settings"""', 'classname': '"""settings"""'}), "(cls.settings_panels, heading='Settings', classname='settings')\n", (9153, 9216), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((11328, 11353), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""description"""'], {}), "('description')\n", (11338, 11353), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((11363, 11389), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""button"""'], {}), "('button')\n", (11379, 11389), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((11399, 11425), 'wagtail.images.edit_handlers.ImageChooserPanel', 'ImageChooserPanel', (['"""image"""'], {}), "('image')\n", (11416, 11425), False, 'from wagtail.images.edit_handlers import ImageChooserPanel\n'), ((11435, 11465), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""components"""'], {}), "('components')\n", (11451, 11465), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((11475, 11499), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""body"""'], {}), "('body')\n", (11491, 11499), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((11925, 11951), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""button"""'], {}), "('button')\n", (11941, 11951), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((12741, 12783), 'domestic.helpers.get_lesson_completion_status', 'get_lesson_completion_status', (['request.user'], {}), '(request.user)\n', (12769, 12783), False, 'from domestic.helpers import get_lesson_completion_status\n'), ((12840, 12911), 'core.helpers.get_high_level_completion_progress', 'get_high_level_completion_progress', ([], {'completion_status': 'completion_status'}), '(completion_status=completion_status)\n', (12874, 12911), False, 'from core.helpers import get_high_level_completion_progress, get_module_completion_progress\n'), ((13214, 13248), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""record_read_progress"""'], {}), "('record_read_progress')\n", (13224, 13248), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((13304, 13329), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""description"""'], {}), "('description')\n", (13314, 13329), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((13331, 13357), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""button_label"""'], {}), "('button_label')\n", (13341, 13357), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((13899, 13920), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""heading"""'], {}), "('heading')\n", (13909, 13920), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((13930, 13956), 'wagtail.images.edit_handlers.ImageChooserPanel', 'ImageChooserPanel', (['"""image"""'], {}), "('image')\n", (13947, 13956), False, 'from wagtail.images.edit_handlers import ImageChooserPanel\n'), ((15084, 15126), 'domestic.helpers.get_lesson_completion_status', 'get_lesson_completion_status', (['request.user'], {}), '(request.user)\n', (15112, 15126), False, 'from domestic.helpers import get_lesson_completion_status\n'), ((15179, 15268), 'core.helpers.get_module_completion_progress', 'get_module_completion_progress', ([], {'completion_status': 'completion_status', 'module_page': 'self'}), '(completion_status=completion_status,\n module_page=self)\n', (15209, 15268), False, 'from core.helpers import get_high_level_completion_progress, get_module_completion_progress\n'), ((15368, 15439), 'core.helpers.get_high_level_completion_progress', 'get_high_level_completion_progress', ([], {'completion_status': 'completion_status'}), '(completion_status=completion_status)\n', (15402, 15439), False, 'from core.helpers import get_high_level_completion_progress, get_module_completion_progress\n'), ((22546, 22570), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""hero"""'], {}), "('hero')\n", (22562, 22570), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((22580, 22609), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""objective"""'], {}), "('objective')\n", (22596, 22609), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((22619, 22643), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""body"""'], {}), "('body')\n", (22635, 22643), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((22653, 22678), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""recap"""'], {}), "('recap')\n", (22669, 22678), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((24442, 24464), 'urllib.parse.unquote', 'unquote', (['backlink_path'], {}), '(backlink_path)\n', (24449, 24464), False, 'from urllib.parse import unquote\n'), ((26186, 26207), 'core.utils.PageTopicHelper', 'PageTopicHelper', (['self'], {}), '(self)\n', (26201, 26207), False, 'from core.utils import PageTopicHelper, get_first_lesson\n'), ((33412, 33458), 'django.template.loader.render_to_string', 'render_to_string', (['self.field_template', 'context'], {}), '(self.field_template, context)\n', (33428, 33458), False, 'from django.template.loader import render_to_string\n'), ((41398, 41440), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['product_tab'], {'heading': '"""Product"""'}), "(product_tab, heading='Product')\n", (41408, 41440), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41454, 41494), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['market_tab'], {'heading': '"""Market"""'}), "(market_tab, heading='Market')\n", (41464, 41494), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41508, 41548), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['lesson_tab'], {'heading': '"""Lesson"""'}), "(lesson_tab, heading='Lesson')\n", (41518, 41548), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41562, 41608), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['threshold_tab'], {'heading': '"""Threshold"""'}), "(threshold_tab, heading='Threshold')\n", (41572, 41608), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((5036, 5056), 'wagtail.admin.edit_handlers.InlinePanel', 'InlinePanel', (['"""steps"""'], {}), "('steps')\n", (5047, 5056), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((9244, 9267), 'wagtail.admin.edit_handlers.TabbedInterface', 'TabbedInterface', (['panels'], {}), '(panels)\n', (9259, 9267), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((10538, 10573), 'core.blocks.ButtonBlock', 'core_blocks.ButtonBlock', ([], {'icon': '"""cog"""'}), "(icon='cog')\n", (10561, 10573), True, 'from core import blocks as core_blocks, mixins\n'), ((10797, 10823), 'core.blocks.SectionBlock', 'core_blocks.SectionBlock', ([], {}), '()\n', (10821, 10823), True, 'from core import blocks as core_blocks, mixins\n'), ((10848, 10872), 'core.blocks.TitleBlock', 'core_blocks.TitleBlock', ([], {}), '()\n', (10870, 10872), True, 'from core import blocks as core_blocks, mixins\n'), ((10896, 10962), 'wagtail.core.blocks.RichTextBlock', 'blocks.RichTextBlock', ([], {'icon': '"""openquote"""', 'helptext': '"""Add a textblock"""'}), "(icon='openquote', helptext='Add a textblock')\n", (10916, 10962), False, 'from wagtail.core import blocks\n'), ((10987, 11011), 'core.blocks.ImageBlock', 'core_blocks.ImageBlock', ([], {}), '()\n', (11009, 11011), True, 'from core import blocks as core_blocks, mixins\n'), ((11133, 11164), 'core.blocks.RouteSectionBlock', 'core_blocks.RouteSectionBlock', ([], {}), '()\n', (11162, 11164), True, 'from core import blocks as core_blocks, mixins\n'), ((11758, 11793), 'core.blocks.ButtonBlock', 'core_blocks.ButtonBlock', ([], {'icon': '"""cog"""'}), "(icon='cog')\n", (11781, 11793), True, 'from core import blocks as core_blocks, mixins\n'), ((15637, 15724), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Only one image or video allowed in Hero section"""'], {'code': '"""invalid"""'}), "('Only one image or video allowed in Hero section', code=\n 'invalid')\n", (15652, 15724), False, 'from django.core.exceptions import ValidationError\n'), ((18199, 18264), 'core.blocks.ImageBlock', 'core_blocks.ImageBlock', ([], {'template': '"""core/includes/_hero_image.html"""'}), "(template='core/includes/_hero_image.html')\n", (18221, 18264), True, 'from core import blocks as core_blocks, mixins\n'), ((18289, 18360), 'core.blocks.SimpleVideoBlock', 'core_blocks.SimpleVideoBlock', ([], {'template': '"""core/includes/_hero_video.html"""'}), "(template='core/includes/_hero_video.html')\n", (18317, 18360), True, 'from core import blocks as core_blocks, mixins\n'), ((18564, 18617), 'wagtail.core.blocks.RichTextBlock', 'blocks.RichTextBlock', ([], {'options': "{'class': 'objectives'}"}), "(options={'class': 'objectives'})\n", (18584, 18617), False, 'from wagtail.core import blocks\n'), ((18659, 18677), 'core.blocks.Item', 'core_blocks.Item', ([], {}), '()\n', (18675, 18677), True, 'from core import blocks as core_blocks, mixins\n'), ((19297, 19345), 'core.blocks.CaseStudyStaticBlock', 'core_blocks.CaseStudyStaticBlock', ([], {'icon': '"""fa-book"""'}), "(icon='fa-book')\n", (19329, 19345), True, 'from core import blocks as core_blocks, mixins\n'), ((19402, 19441), 'core.blocks.StepByStepBlock', 'core_blocks.StepByStepBlock', ([], {'icon': '"""cog"""'}), "(icon='cog')\n", (19429, 19441), True, 'from core import blocks as core_blocks, mixins\n'), ((19831, 19878), 'core.blocks.ITAQuoteBlock', 'core_blocks.ITAQuoteBlock', ([], {'icon': '"""fa-quote-left"""'}), "(icon='fa-quote-left')\n", (19856, 19878), True, 'from core import blocks as core_blocks, mixins\n'), ((21072, 21108), 'core.blocks.ChooseDoNotChooseBlock', 'core_blocks.ChooseDoNotChooseBlock', ([], {}), '()\n', (21106, 21108), True, 'from core import blocks as core_blocks, mixins\n'), ((21166, 21301), 'core.blocks.ImageBlock', 'core_blocks.ImageBlock', ([], {'template': '"""core/includes/_image_full_width.html"""', 'help_text': '"""Image displayed within a full-page-width block"""'}), "(template='core/includes/_image_full_width.html',\n help_text='Image displayed within a full-page-width block')\n", (21188, 21301), True, 'from core import blocks as core_blocks, mixins\n'), ((21428, 21575), 'core.blocks.SimpleVideoBlock', 'core_blocks.SimpleVideoBlock', ([], {'template': '"""core/includes/_video_full_width.html"""', 'help_text': '"""Video displayed within a full-page-width block"""'}), "(template=\n 'core/includes/_video_full_width.html', help_text=\n 'Video displayed within a full-page-width block')\n", (21456, 21575), True, 'from core import blocks as core_blocks, mixins\n'), ((24000, 24026), 'exportplan.core.data.SECTIONS.items', 'EXPORTPLAN_URL_MAP.items', ([], {}), '()\n', (24024, 24026), True, 'from exportplan.core.data import SECTION_SLUGS as EXPORTPLAN_SLUGS, SECTIONS as EXPORTPLAN_URL_MAP\n'), ((26963, 26992), 'core.utils.get_first_lesson', 'get_first_lesson', (['next_module'], {}), '(next_module)\n', (26979, 26992), False, 'from core.utils import PageTopicHelper, get_first_lesson\n'), ((35311, 35368), 'wagtail.core.blocks.RichTextBlock', 'blocks.RichTextBlock', ([], {'features': 'RICHTEXT_FEATURES__MINIMAL'}), '(features=RICHTEXT_FEATURES__MINIMAL)\n', (35331, 35368), False, 'from wagtail.core import blocks\n'), ((35479, 35512), 'core.blocks.CaseStudyQuoteBlock', 'core_blocks.CaseStudyQuoteBlock', ([], {}), '()\n', (35510, 35512), True, 'from core import blocks as core_blocks, mixins\n'), ((36653, 36672), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""title"""'], {}), "('title')\n", (36663, 36672), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((36690, 36714), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""lead_title"""'], {}), "('lead_title')\n", (36700, 36714), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((36732, 36761), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""summary_context"""'], {}), "('summary_context')\n", (36742, 36761), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((36779, 36803), 'wagtail.admin.edit_handlers.StreamFieldPanel', 'StreamFieldPanel', (['"""body"""'], {}), "('body')\n", (36795, 36803), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((36928, 36954), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""hs_code_tags"""'], {}), "('hs_code_tags')\n", (36938, 36954), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((36972, 37003), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""country_code_tags"""'], {}), "('country_code_tags')\n", (36982, 37003), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((37021, 37051), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""region_code_tags"""'], {}), "('region_code_tags')\n", (37031, 37051), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((37069, 37105), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""trading_bloc_code_tags"""'], {}), "('trading_bloc_code_tags')\n", (37079, 37105), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((37247, 37298), 'wagtail.admin.edit_handlers.InlinePanel', 'InlinePanel', (['"""related_pages"""'], {'label': '"""Related pages"""'}), "('related_pages', label='Related pages')\n", (37258, 37298), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((40882, 40907), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""product_hs6"""'], {}), "('product_hs6')\n", (40892, 40907), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((40909, 40934), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""product_hs4"""'], {}), "('product_hs4')\n", (40919, 40934), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((40936, 40961), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""product_hs2"""'], {}), "('product_hs2')\n", (40946, 40961), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41009, 41036), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""country_exact"""'], {}), "('country_exact')\n", (41019, 41036), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41038, 41066), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""country_region"""'], {}), "('country_region')\n", (41048, 41066), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41068, 41095), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""trading_blocs"""'], {}), "('trading_blocs')\n", (41078, 41095), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41139, 41159), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""lesson"""'], {}), "('lesson')\n", (41149, 41159), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41161, 41180), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""topic"""'], {}), "('topic')\n", (41171, 41180), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41182, 41202), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""module"""'], {}), "('module')\n", (41192, 41202), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((41284, 41307), 'wagtail.admin.edit_handlers.FieldPanel', 'FieldPanel', (['"""threshold"""'], {}), "('threshold')\n", (41294, 41307), False, 'from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface\n'), ((3277, 3330), 'django.urls.reverse', 'reverse', (['"""core:subtitles-serve"""'], {'args': "[self.id, 'en']"}), "('core:subtitles-serve', args=[self.id, 'en'])\n", (3284, 3330), False, 'from django.urls import reverse\n'), ((2843, 2878), 'mimetypes.guess_type', 'mimetypes.guess_type', (['self.filename'], {}), '(self.filename)\n', (2863, 2878), False, 'import mimetypes\n'), ((18844, 18866), 'wagtail.core.blocks.RichTextBlock', 'blocks.RichTextBlock', ([], {}), '()\n', (18864, 18866), False, 'from wagtail.core import blocks\n'), ((19111, 19135), 'core.blocks.VideoBlock', 'core_blocks.VideoBlock', ([], {}), '()\n', (19133, 19135), True, 'from core import blocks as core_blocks, mixins\n'), ((19583, 19621), 'wagtail.core.blocks.RichTextBlock', 'blocks.RichTextBlock', ([], {'icon': '"""openquote"""'}), "(icon='openquote')\n", (19603, 19621), False, 'from wagtail.core import blocks\n'), ((21828, 21862), 'wagtail.core.blocks.CharBlock', 'blocks.CharBlock', ([], {'icon': '"""fa-header"""'}), "(icon='fa-header')\n", (21844, 21862), False, 'from wagtail.core import blocks\n'), ((34997, 35074), 'core.blocks.SimpleVideoBlock', 'core_blocks.SimpleVideoBlock', ([], {'template': '"""core/includes/_case_study_video.html"""'}), "(template='core/includes/_case_study_video.html')\n", (35025, 35074), True, 'from core import blocks as core_blocks, mixins\n'), ((35111, 35135), 'core.blocks.ImageBlock', 'core_blocks.ImageBlock', ([], {}), '()\n', (35133, 35135), True, 'from core import blocks as core_blocks, mixins\n'), ((20266, 20305), 'core.blocks.Item', 'core_blocks.Item', ([], {'icon': '"""fa-arrow-right"""'}), "(icon='fa-arrow-right')\n", (20282, 20305), True, 'from core import blocks as core_blocks, mixins\n'), ((20707, 20746), 'core.blocks.Item', 'core_blocks.Item', ([], {'icon': '"""fa-arrow-right"""'}), "(icon='fa-arrow-right')\n", (20723, 20746), True, 'from core import blocks as core_blocks, mixins\n'), ((22135, 22153), 'core.blocks.Item', 'core_blocks.Item', ([], {}), '()\n', (22151, 22153), True, 'from core import blocks as core_blocks, mixins\n')] |
dumpmemory/Research | CV/Effective Transformer-based Solution for RSNA Intracranial Hemorrhage Detection/easymia/transforms/transforms.py | 30fd70ff331b3d9aeede0b71e7a691ed6c2b87b3 | # -*-coding utf-8 -*-
##########################################################################
#
# Copyright (c) 2022 Baidu.com, Inc. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##########################################################################
"""
数据变换器
"""
import numpy as np
import numbers
import collections
import random
import math
import cv2
from . import functional as F
from easymia.core.abstract_transforms import AbstractTransform
from easymia.libs import manager
@manager.TRANSFORMS.add_component
class Compose(AbstractTransform):
"""
Do transformation on input data with corresponding pre-processing and augmentation operations.
The shape of input data to all operations is [height, width, channels].
Args:
transforms (list): A list contains data pre-processing or augmentation. Empty list means only reading images, no transformation.
to_rgb (bool, optional): If converting image to RGB color space. Default: True.
Raises:
TypeError: When 'transforms' is not a list.
ValueError: when the length of 'transforms' is less than 1.
"""
def __init__(self, mode, transforms):
if not isinstance(transforms, list):
raise TypeError('The transforms must be a list!')
self.transforms = transforms
super().__init__(mode)
def __clas__(self, im):
"""
Args:
im (np.ndarray): It is either image path or image object.
Returns:
(np.array). Image after transformation.
"""
for op in self.transforms:
im = op(im)
return im
@manager.TRANSFORMS.add_component
class RandomHorizontalFlip(AbstractTransform):
"""Horizontally flip the given PIL Image randomly with a given probability.
Args:
p (float): probability of the image being flipped. Default value is 0.5
"""
def __init__(self, mode, prob=0.5):
"""
init
"""
self.prob = prob
super().__init__(mode)
def __clas__(self, img):
"""
Args:
img (numpy ndarray): Image to be flipped.
Returns:
numpy ndarray: Randomly flipped image.
"""
if random.random() < self.prob:
return F.hflip(img)
return img
@manager.TRANSFORMS.add_component
class RandomVerticalFlip(AbstractTransform):
"""Vertically flip the given PIL Image randomly with a given probability.
Args:
p (float): probability of the image being flipped. Default value is 0.5
"""
def __init__(self, mode, prob=0.5):
"""
init
"""
self.prob = prob
super().__init__(mode)
def __clas__(self, img):
"""
Args:
img (numpy ndarray): Image to be flipped.
Returns:
numpy ndarray: Randomly flipped image.
"""
if random.random() < self.prob:
return F.vflip(img)
return img
@manager.TRANSFORMS.add_component
class RandomResizedCrop(AbstractTransform):
"""Crop the given numpy ndarray to random size and aspect ratio.
A crop of random size (default: of 0.08 to 1.0) of the original size and a random
aspect ratio (default: of 3/4 to 4/3) of the original aspect ratio is made. This crop
is finally resized to given size.
This is popularly used to train the Inception networks.
Args:
size: expected output size of each edge
scale: range of size of the origin size cropped
ratio: range of aspect ratio of the origin aspect ratio cropped
interpolation: Default: cv2.INTER_CUBIC
"""
def __init__(self, mode, size, scale=(0.08, 1.0), ratio=(3. / 4., 4. / 3.), interpolation=cv2.INTER_CUBIC):
"""
init
"""
self.size = (size, size)
self.interpolation = interpolation
self.scale = scale
self.ratio = ratio
super().__init__(mode)
def get_params(self, img, scale, ratio):
"""Get parameters for ``crop`` for a random sized crop.
Args:
img (numpy ndarray): Image to be cropped.
scale (tuple): range of size of the origin size cropped
ratio (tuple): range of aspect ratio of the origin aspect ratio cropped
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for a random
sized crop.
"""
params_ret = collections.namedtuple('params_ret', ['i', 'j', 'h', 'w'])
for attempt in range(10):
area = img.shape[0] * img.shape[1]
target_area = random.uniform(*scale) * area
aspect_ratio = random.uniform(*ratio)
w = int(round(math.sqrt(target_area * aspect_ratio)))
h = int(round(math.sqrt(target_area / aspect_ratio)))
if random.random() < 0.5:
w, h = h, w
if w <= img.shape[1] and h <= img.shape[0]:
i = random.randint(0, img.shape[0] - h)
j = random.randint(0, img.shape[1] - w)
return params_ret(i, j, h, w)
# Fallback
w = min(img.shape[0], img.shape[1])
i = (img.shape[0] - w) // 2
j = (img.shape[1] - w) // 2
return params_ret(i, j, w, w)
def __clas__(self, img):
"""
Args:
img (numpy ndarray): Image to be cropped and resized.
Returns:
numpy ndarray: Randomly cropped and resized image.
"""
i, j, h, w = self.get_params(img, self.scale, self.ratio)
return F.resized_crop(img, i, j, h, w, self.size, self.interpolation)
@manager.TRANSFORMS.add_component
class RandomRotation(AbstractTransform):
"""Rotate the image by angle.
Args:
degrees (sequence or float or int): Range of degrees to select from.
If degrees is a number instead of sequence like (min, max), the range of degrees
will be (-degrees, +degrees).
resample ({cv2.INTER_NEAREST, cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_LANCZOS4}, optional):
An optional resampling filter. See `filters`_ for more information.
If omitted, or if the image has mode "1" or "P", it is set to PIL.Image.NEAREST.
expand (bool, optional): Optional expansion flag.
If true, expands the output to make it large enough to hold the entire rotated image.
If false or omitted, make the output image the same size as the input image.
Note that the expand flag assumes rotation around the center and no translation.
center (2-tuple, optional): Optional center of rotation.
Origin is the upper left corner.
Default is the center of the image.
"""
def __init__(self, mode, degrees, center=None):
"""
init
"""
if isinstance(degrees, numbers.Number):
if degrees < 0:
raise ValueError(
"If degrees is a single number, it must be positive.")
self.degrees = (-degrees, degrees)
else:
if len(degrees) != 2:
raise ValueError(
"If degrees is a sequence, it must be of len 2.")
self.degrees = degrees
self.center = center
super().__init__(mode)
@staticmethod
def get_params(degrees):
"""Get parameters for ``rotate`` for a random rotation.
Returns:
sequence: params to be passed to ``rotate`` for random rotation.
"""
angle = random.uniform(degrees[0], degrees[1])
return angle
def __clas__(self, img):
"""
img (numpy ndarray): Image to be rotated.
Returns:
numpy ndarray: Rotated image.
"""
angle = self.get_params(self.degrees)
return F.rotate(img, angle, self.center)
@manager.TRANSFORMS.add_component
class Resize(AbstractTransform):
"""Resize the input numpy ndarray to the given size.
Args:
size (sequence or int): Desired output size. If size is a sequence like
(h, w), output size will be matched to this. If size is an int,
smaller edge of the image will be matched to this number.
i.e, if height > width, then image will be rescaled to
(size * height / width, size)
interpolation (int, optional): Desired interpolation. Default is
``cv2.INTER_CUBIC``, bicubic interpolation
"""
def __init__(self, mode, size, interpolation=cv2.INTER_LINEAR):
"""
resize
"""
# assert isinstance(size, int) or (isinstance(size, collections.Iterable) and len(size) == 2)
if isinstance(size, int):
self.size = (size, size)
elif isinstance(size, collections.abc.Iterable) and len(size) == 2:
if type(size) == list:
size = tuple(size)
self.size = size
else:
raise ValueError('Unknown inputs for size: {}'.format(size))
self.interpolation = interpolation
super().__init__(mode)
def __clas__(self, img):
"""
Args:
img (numpy ndarray): Image to be scaled.
Returns:
numpy ndarray: Rescaled image.
"""
return F.resize(img, self.size, self.interpolation) | [((4959, 5017), 'collections.namedtuple', 'collections.namedtuple', (['"""params_ret"""', "['i', 'j', 'h', 'w']"], {}), "('params_ret', ['i', 'j', 'h', 'w'])\n", (4981, 5017), False, 'import collections\n'), ((8071, 8109), 'random.uniform', 'random.uniform', (['degrees[0]', 'degrees[1]'], {}), '(degrees[0], degrees[1])\n', (8085, 8109), False, 'import random\n'), ((2737, 2752), 'random.random', 'random.random', ([], {}), '()\n', (2750, 2752), False, 'import random\n'), ((3417, 3432), 'random.random', 'random.random', ([], {}), '()\n', (3430, 3432), False, 'import random\n'), ((5182, 5204), 'random.uniform', 'random.uniform', (['*ratio'], {}), '(*ratio)\n', (5196, 5204), False, 'import random\n'), ((5125, 5147), 'random.uniform', 'random.uniform', (['*scale'], {}), '(*scale)\n', (5139, 5147), False, 'import random\n'), ((5354, 5369), 'random.random', 'random.random', ([], {}), '()\n', (5367, 5369), False, 'import random\n'), ((5482, 5517), 'random.randint', 'random.randint', (['(0)', '(img.shape[0] - h)'], {}), '(0, img.shape[0] - h)\n', (5496, 5517), False, 'import random\n'), ((5538, 5573), 'random.randint', 'random.randint', (['(0)', '(img.shape[1] - w)'], {}), '(0, img.shape[1] - w)\n', (5552, 5573), False, 'import random\n'), ((5232, 5269), 'math.sqrt', 'math.sqrt', (['(target_area * aspect_ratio)'], {}), '(target_area * aspect_ratio)\n', (5241, 5269), False, 'import math\n'), ((5298, 5335), 'math.sqrt', 'math.sqrt', (['(target_area / aspect_ratio)'], {}), '(target_area / aspect_ratio)\n', (5307, 5335), False, 'import math\n')] |
galterlibrary/InvenioRDM-at-NU | tests/ui/terms/test_views.py | 5aff6ac7c428c9a61bdf221627bfc05f2280d1a3 | # -*- coding: utf-8 -*-
#
# This file is part of menRva.
# Copyright (C) 2018-present NU,FSM,GHSL.
#
# menRva is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test terms views.py"""
from cd2h_repo_project.modules.terms.views import serialize_terms_for_edit_ui
def test_serialize_terms_for_edit_ui(create_record):
deposit = create_record(
{
'terms': [
{'source': 'MeSH', 'value': 'Cognitive Neuroscience'},
{'source': 'FAST', 'value': 'Border terrier'}
]
},
published=False
)
serialized_deposit = serialize_terms_for_edit_ui(deposit)
assert 'terms' not in serialized_deposit
assert serialized_deposit['mesh_terms'] == [
{
'data': {'source': 'MeSH', 'value': 'Cognitive Neuroscience'}
}
]
assert serialized_deposit['fast_terms'] == [
{
'data': {'source': 'FAST', 'value': 'Border terrier'}
}
]
def test_serialize_terms_for_edit_ui_no_terms(create_record):
deposit = create_record(published=False)
serialized_deposit = serialize_terms_for_edit_ui(deposit)
assert 'terms' not in serialized_deposit
assert serialized_deposit['mesh_terms'] == []
assert serialized_deposit['fast_terms'] == []
| [((679, 715), 'cd2h_repo_project.modules.terms.views.serialize_terms_for_edit_ui', 'serialize_terms_for_edit_ui', (['deposit'], {}), '(deposit)\n', (706, 715), False, 'from cd2h_repo_project.modules.terms.views import serialize_terms_for_edit_ui\n'), ((1187, 1223), 'cd2h_repo_project.modules.terms.views.serialize_terms_for_edit_ui', 'serialize_terms_for_edit_ui', (['deposit'], {}), '(deposit)\n', (1214, 1223), False, 'from cd2h_repo_project.modules.terms.views import serialize_terms_for_edit_ui\n')] |
alamin3637k/Searcher | main.py | bb948b373d1bd1261930a47c37fa9210a98e9ef3 | import webbrowser
import wikipedia
import requests
def yt_search(search: str):
webbrowser.open_new_tab(f"https://www.youtube.com/results?search_query={search}")
def google_search(search: str):
webbrowser.open_new_tab(f"https://www.google.com/search?q={search}")
def bing_search(search: str):
webbrowser.open_new_tab(f"https://www.bing.com/search?q={search}")
def duck_search(search: str):
webbrowser.open_new_tab(f"https://duckduckgo.com/?q={search}")
def yahoo_search(search: str):
webbrowser.open_new_tab(f"https://search.yahoo.com/search?p={search}")
def ask_search(search: str):
webbrowser.open_new_tab(f"https://www.ask.com/web?q={search}")
def yandex_search(search: str):
webbrowser.open_new_tab(f"https://yandex.com/search/?text={search}")
def ecosia_search(search: str):
webbrowser.open_new_tab(f"https://www.ecosia.org/search?q={search}")
def fb_search(search: str):
webbrowser.open_new_tab(f"https://www.facebook.com/search/top/?q={search}")
def wiki_terminal_search(search: str, lang='en', sentence=1):
try:
wikipedia.set_lang(lang)
print(wikipedia.summary(search, sentences=sentence))
except Exception as error:
print(error)
return error
def mail_search(search: str):
webbrowser.open_new_tab(f"https://mail.google.com/mail/u/0/#search/{search}")
def wiki_web_search(search: str):
webbrowser.open_new_tab(f"https://en.wikipedia.org/wiki/{search}")
def test_site(search: str):
"""please enter site name with http information"""
try:
r = requests.get(search)
except Exception as error:
print(error)
return "site not working"
if r.status_code == 200:
print("site working")
return "site working"
| [((84, 170), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://www.youtube.com/results?search_query={search}"""'], {}), "(\n f'https://www.youtube.com/results?search_query={search}')\n", (107, 170), False, 'import webbrowser\n'), ((203, 271), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://www.google.com/search?q={search}"""'], {}), "(f'https://www.google.com/search?q={search}')\n", (226, 271), False, 'import webbrowser\n'), ((307, 373), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://www.bing.com/search?q={search}"""'], {}), "(f'https://www.bing.com/search?q={search}')\n", (330, 373), False, 'import webbrowser\n'), ((409, 471), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://duckduckgo.com/?q={search}"""'], {}), "(f'https://duckduckgo.com/?q={search}')\n", (432, 471), False, 'import webbrowser\n'), ((508, 578), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://search.yahoo.com/search?p={search}"""'], {}), "(f'https://search.yahoo.com/search?p={search}')\n", (531, 578), False, 'import webbrowser\n'), ((613, 675), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://www.ask.com/web?q={search}"""'], {}), "(f'https://www.ask.com/web?q={search}')\n", (636, 675), False, 'import webbrowser\n'), ((713, 781), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://yandex.com/search/?text={search}"""'], {}), "(f'https://yandex.com/search/?text={search}')\n", (736, 781), False, 'import webbrowser\n'), ((819, 887), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://www.ecosia.org/search?q={search}"""'], {}), "(f'https://www.ecosia.org/search?q={search}')\n", (842, 887), False, 'import webbrowser\n'), ((921, 996), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://www.facebook.com/search/top/?q={search}"""'], {}), "(f'https://www.facebook.com/search/top/?q={search}')\n", (944, 996), False, 'import webbrowser\n'), ((1275, 1352), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://mail.google.com/mail/u/0/#search/{search}"""'], {}), "(f'https://mail.google.com/mail/u/0/#search/{search}')\n", (1298, 1352), False, 'import webbrowser\n'), ((1392, 1458), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['f"""https://en.wikipedia.org/wiki/{search}"""'], {}), "(f'https://en.wikipedia.org/wiki/{search}')\n", (1415, 1458), False, 'import webbrowser\n'), ((1077, 1101), 'wikipedia.set_lang', 'wikipedia.set_lang', (['lang'], {}), '(lang)\n', (1095, 1101), False, 'import wikipedia\n'), ((1564, 1584), 'requests.get', 'requests.get', (['search'], {}), '(search)\n', (1576, 1584), False, 'import requests\n'), ((1116, 1161), 'wikipedia.summary', 'wikipedia.summary', (['search'], {'sentences': 'sentence'}), '(search, sentences=sentence)\n', (1133, 1161), False, 'import wikipedia\n')] |
ptsurko/coursera_crypt | hw1.py | ec952800c441a9b07ac427045851285fee8c6543 | import string
from timeit import itertools
s1 = '315c4eeaa8b5f8aaf9174145bf43e1784b8fa00dc71d885a804e5ee9fa40b16349c146fb778cdf2d3aff021dfff5b403b510d0d0455468aeb98622b137dae857553ccd8883a7bc37520e06e515d22c954eba5025b8cc57ee59418ce7dc6bc41556bdb36bbca3e8774301fbcaa3b83b220809560987815f65286764703de0f3d524400a19b159610b11ef3e'
s2 = '234c02ecbbfbafa3ed18510abd11fa724fcda2018a1a8342cf064bbde548b12b07df44ba7191d9606ef4081ffde5ad46a5069d9f7f543bedb9c861bf29c7e205132eda9382b0bc2c5c4b45f919cf3a9f1cb74151f6d551f4480c82b2cb24cc5b028aa76eb7b4ab24171ab3cdadb8356f'
s3 = '32510ba9a7b2bba9b8005d43a304b5714cc0bb0c8a34884dd91304b8ad40b62b07df44ba6e9d8a2368e51d04e0e7b207b70b9b8261112bacb6c866a232dfe257527dc29398f5f3251a0d47e503c66e935de81230b59b7afb5f41afa8d661cb'
s4 = '32510ba9aab2a8a4fd06414fb517b5605cc0aa0dc91a8908c2064ba8ad5ea06a029056f47a8ad3306ef5021eafe1ac01a81197847a5c68a1b78769a37bc8f4575432c198ccb4ef63590256e305cd3a9544ee4160ead45aef520489e7da7d835402bca670bda8eb775200b8dabbba246b130f040d8ec6447e2c767f3d30ed81ea2e4c1404e1315a1010e7229be6636aaa'
s5 = '3f561ba9adb4b6ebec54424ba317b564418fac0dd35f8c08d31a1fe9e24fe56808c213f17c81d9607cee021dafe1e001b21ade877a5e68bea88d61b93ac5ee0d562e8e9582f5ef375f0a4ae20ed86e935de81230b59b73fb4302cd95d770c65b40aaa065f2a5e33a5a0bb5dcaba43722130f042f8ec85b7c2070'
s6 = '32510bfbacfbb9befd54415da243e1695ecabd58c519cd4bd2061bbde24eb76a19d84aba34d8de287be84d07e7e9a30ee714979c7e1123a8bd9822a33ecaf512472e8e8f8db3f9635c1949e640c621854eba0d79eccf52ff111284b4cc61d11902aebc66f2b2e436434eacc0aba938220b084800c2ca4e693522643573b2c4ce35050b0cf774201f0fe52ac9f26d71b6cf61a711cc229f77ace7aa88a2f19983122b11be87a59c355d25f8e4'
s7 = '32510bfbacfbb9befd54415da243e1695ecabd58c519cd4bd90f1fa6ea5ba47b01c909ba7696cf606ef40c04afe1ac0aa8148dd066592ded9f8774b529c7ea125d298e8883f5e9305f4b44f915cb2bd05af51373fd9b4af511039fa2d96f83414aaaf261bda2e97b170fb5cce2a53e675c154c0d9681596934777e2275b381ce2e40582afe67650b13e72287ff2270abcf73bb028932836fbdecfecee0a3b894473c1bbeb6b4913a536ce4f9b13f1efff71ea313c8661dd9a4ce'
s8 = '315c4eeaa8b5f8bffd11155ea506b56041c6a00c8a08854dd21a4bbde54ce56801d943ba708b8a3574f40c00fff9e00fa1439fd0654327a3bfc860b92f89ee04132ecb9298f5fd2d5e4b45e40ecc3b9d59e9417df7c95bba410e9aa2ca24c5474da2f276baa3ac325918b2daada43d6712150441c2e04f6565517f317da9d3'
s9 = '271946f9bbb2aeadec111841a81abc300ecaa01bd8069d5cc91005e9fe4aad6e04d513e96d99de2569bc5e50eeeca709b50a8a987f4264edb6896fb537d0a716132ddc938fb0f836480e06ed0fcd6e9759f40462f9cf57f4564186a2c1778f1543efa270bda5e933421cbe88a4a52222190f471e9bd15f652b653b7071aec59a2705081ffe72651d08f822c9ed6d76e48b63ab15d0208573a7eef027'
s10 = '466d06ece998b7a2fb1d464fed2ced7641ddaa3cc31c9941cf110abbf409ed39598005b3399ccfafb61d0315fca0a314be138a9f32503bedac8067f03adbf3575c3b8edc9ba7f537530541ab0f9f3cd04ff50d66f1d559ba520e89a2cb2a83'
s11 = '32510ba9babebbbefd001547a810e67149caee11d945cd7fc81a05e9f85aac650e9052ba6a8cd8257bf14d13e6f0a803b54fde9e77472dbff89d71b57bddef121336cb85ccb8f3315f4b52e301d16e9f52f904'
def strxor(a, b): # xor two strings of different lengths
# a = a.decode('hex')
# b = b.decode('hex')
if len(a) > len(b):
return ("".join([chr(ord(x) ^ ord(y)) for (x, y) in zip(a[:len(b)], b)])) #.encode('hex')
else:
return ("".join([chr(ord(x) ^ ord(y)) for (x, y) in zip(a, b[:len(a)])])) #.encode('hex')
def random(size=16):
return open("/dev/urandom").read(size)
def encrypt(key, msg):
c = strxor(key, msg)
print
print c #.encode('hex')
return c
MSGS = (s1,s2,s3,s4,s5,s6,s7,s8,s9,s10,s11)
MSGS_DECODED = [s.decode('hex') for s in MSGS]
def combinations(iterable, r):
indices = range(r)
n = len(iterable)
while True:
yield tuple(i for i in indices) + (tuple(iterable[i] for i in indices),)
for i in reversed(range(r)):
if indices[i] < n - (r - i):
indices[i] = indices[i] + 1
for j in range(i + 1, r):
indices[j] = indices[j - 1] + 1
break
else:
return
# def main():
# for c in combinations('ABCD', 2):
# print c
def output_combinations_table():
comb = [(i1, i2, strxor(s1.decode('hex'), s2.decode('hex'))) for i1, i2, (s1,s2) in combinations(MSGS, 2)]
html = '<html><body>'
html += '<table style="white-space:nowrap" border="1">'
html += '<thead>'
html += '<tr>'
# WTF???
# max_len = max(combinations, key=lambda x: len(x))
max_len = 0
for i1, i2, c in comb:
if len(c) > max_len:
max_len = len(c)
# print max_len
html += '<th></th>'
for i in xrange(max_len):
html += '<th>' + str(i) + '</th>'
html += '</tr>'
html += '</thead>'
for i1, i2, c in comb:
html += '<tr>'
html += '<td>(%s, %s)</td>' % (i1 + 1, i2 + 1)
for ch in c:
html += '<td>'
html += '%02d' % ord(ch)
if ch in string.printable:
html += '<br />'
html += '&#%d;' % ord(ch)
html += '</td>'
html += '</tr>'
html += '<tr>'
html += '<th></th>'
for i in xrange(max_len):
html += '<th>' + str(i) + '</th>'
html += '</tr>'
html += '</table>'
html += '</body>'
html += '</html>'
with open('combinations.html', 'w') as f:
f.write(html)
def key_by_space(ct_index, ch_index):
return key_by_guess(ct_index, ch_index)
def key_by_guess(ct_index, ch_index, guess=' '):
return strxor(MSGS_DECODED[ct_index][ch_index], guess)
def main():
output_combinations_table()
key = [
lambda : key_by_space(9, 0),
lambda : key_by_space(8, 1),
lambda : key_by_space(0, 2),
lambda : key_by_space(4, 3),
lambda : key_by_space(9, 4),
lambda : key_by_space(6, 5),
lambda : key_by_space(7, 6),
lambda : key_by_guess(4, 7, '\''), #???
lambda : key_by_space(2, 8),
lambda : key_by_space(6, 9),
lambda : key_by_space(10, 10),
lambda : key_by_space(1, 11),
lambda : key_by_space(9, 12),
lambda : key_by_space(6, 13),
lambda : key_by_space(4, 14),
lambda : key_by_space(8, 15),
lambda : key_by_space(8, 16),
lambda : key_by_space(4, 17),
lambda : key_by_space(10, 18),
lambda : key_by_space(6, 19),
lambda : key_by_space(7, 20),
lambda : key_by_space(4, 21),
lambda : key_by_space(6, 22),
lambda : key_by_space(4, 23),
lambda : key_by_space(0, 24),
lambda : key_by_guess(1, 25, 'y'),
lambda : key_by_space(7, 26),
lambda : key_by_space(10, 27),
lambda : key_by_space(3, 28),
lambda : key_by_space(9, 29),
lambda : key_by_space(7, 30),
lambda : key_by_space(1, 31),
lambda : key_by_space(0, 32),
lambda : key_by_space(10, 33),
lambda : key_by_space(8, 34),
lambda : key_by_space(10, 35),
lambda : key_by_space(9, 36),
lambda : key_by_space(5, 37),
lambda : key_by_space(7, 38),
lambda : key_by_space(1, 39),
lambda : key_by_space(0, 40),
lambda : key_by_space(8, 41),
lambda : key_by_space(5, 42),
lambda : key_by_guess(3, 43, 'n'),
lambda : key_by_space(4, 44),
lambda : key_by_guess(7, 45, 'y'),
lambda : key_by_space(7, 46),
lambda : key_by_guess(10, 47, 'e'),
lambda : key_by_guess(10, 48, 'r'),
lambda : key_by_space(7, 49),
lambda : key_by_guess(3, 50, 'i'),
lambda : key_by_guess(3, 51, 't'),
lambda : key_by_guess(3, 52, 'h'),
lambda : key_by_guess(3, 53, 'm'),
lambda : key_by_space(4, 54),
lambda : key_by_space(1, 55),
lambda : key_by_space(10, 56),
lambda : key_by_space(1, 57),
lambda : key_by_space(0, 58),
lambda : key_by_space(9, 59),
lambda : key_by_space(3, 60),
lambda : key_by_space(7, 61),
lambda : key_by_guess(0, 62, 'o'),
lambda : key_by_space(0, 63),
lambda : key_by_space(10, 64),
lambda : key_by_guess(6, 65, 't'),
lambda : key_by_space(5, 66),
lambda : key_by_guess(10, 67, 'y'),
lambda : key_by_space(10, 68),
lambda : key_by_space(7, 69),
lambda : key_by_space(1, 70),
lambda : key_by_space(3, 71),
lambda : key_by_space(2, 72),
lambda : key_by_space(1, 73),
lambda : key_by_space(0, 74),
lambda : key_by_guess(10, 75, 'h'),
lambda : key_by_guess(10, 76, 'a'),
lambda : key_by_guess(10, 77, 'n'),
lambda : key_by_space(10, 78),
lambda : key_by_guess(3, 79, 'e'),
lambda : key_by_guess(3, 80, 'x'),
lambda : key_by_guess(3, 81, 't'),
lambda : key_by_guess(10, 82, 'e'),
lambda : key_by_guess(6, 83, 'c'),
lambda : key_by_guess(6, 84, 'e'),
# lambda : key_by_guess(6, 68, 't'),
]
for i, s in enumerate(MSGS):
print '%2d: %s' % (i + 1, ''.join([strxor(k(), ch) for k, ch in itertools.izip(key, s.decode('hex'))]))
if __name__ == "__main__":
main() | [] |
dstoeckel/MOE | moe/bandit/ucb/ucb_interface.py | 5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c | # -*- coding: utf-8 -*-
"""Classes (Python) to compute the Bandit UCB (Upper Confidence Bound) arm allocation and choosing the arm to pull next.
See :mod:`moe.bandit.bandit_interface` for further details on bandit.
"""
import copy
from abc import abstractmethod
from moe.bandit.bandit_interface import BanditInterface
from moe.bandit.utils import get_winning_arm_names_from_payoff_arm_name_list, get_equal_arm_allocations
class UCBInterface(BanditInterface):
r"""Implementation of the constructor of UCB (Upper Confidence Bound) and method allocate_arms. The method get_ucb_payoff is implemented in subclass.
A class to encapsulate the computation of bandit UCB.
The Algorithm: http://moodle.technion.ac.il/pluginfile.php/192340/mod_resource/content/0/UCB.pdf
To inherit this class, a subclass needs to implement get_ucb_payoff
(see :func:`moe.bandit.ucb.ucb1.UCB1.get_ucb_payoff` for an example), everything else is already implemented.
See :mod:`moe.bandit.bandit_interface` docs for further details.
"""
def __init__(
self,
historical_info,
subtype=None,
):
"""Construct a UCB object.
:param historical_info: a dictionary of arms sampled
:type historical_info: dictionary of (str, SampleArm()) pairs (see :class:`moe.bandit.data_containers.SampleArm` for more details)
:param subtype: subtype of the UCB bandit algorithm (default: None)
:type subtype: str
"""
self._historical_info = copy.deepcopy(historical_info)
self._subtype = subtype
@staticmethod
def get_unsampled_arm_names(arms_sampled):
r"""Compute the set of unsampled arm names based on the given ``arms_sampled``..
Throws an exception when arms_sampled is empty.
:param arms_sampled: a dictionary of arm name to :class:`moe.bandit.data_containers.SampleArm`
:type arms_sampled: dictionary of (str, SampleArm()) pairs
:return: set of names of the unsampled arms
:rtype: frozenset(str)
:raise: ValueError when ``arms_sampled`` are empty.
"""
if not arms_sampled:
raise ValueError('arms_sampled is empty!')
unsampled_arm_name_list = [name for name, sampled_arm in arms_sampled.iteritems() if sampled_arm.total == 0]
return frozenset(unsampled_arm_name_list)
@abstractmethod
def get_ucb_payoff(self, sampled_arm, number_sampled):
r"""Compute the expected upper confidence bound payoff using the UCB subtype formula.
See definition in subclasses for details.
:param sampled_arm: a sampled arm
:type sampled_arm: :class:`moe.bandit.data_containers.SampleArm`
:param number_sampled: the overall number of pulls so far
:type number_sampled: int
:return: ucb payoff
:rtype: float64
:raise: ValueError when ``sampled_arm`` is empty.
"""
pass
def allocate_arms(self):
r"""Compute the allocation to each arm given ``historical_info``, running bandit ``subtype`` endpoint.
Computes the allocation to each arm based on the given subtype, and, historical info.
Works with k-armed bandits (k >= 1).
The Algorithm: http://moodle.technion.ac.il/pluginfile.php/192340/mod_resource/content/0/UCB.pdf
If there is at least one unsampled arm, this method will choose to pull the unsampled arm
(randomly choose an unsampled arm if there are multiple unsampled arms).
If all arms are pulled at least once, this method will pull the optimal arm
(best expected upper confidence bound payoff).
See :func:`moe.bandit.ucb.ucb_interface.UCBInterface.get_ucb_payoff` for details on how to compute the expected upper confidence bound payoff (expected UCB payoff)
In case of a tie, the method will split the allocation among the optimal arms.
For example, if we have three arms (arm1, arm2, and arm3) with expected UCB payoff 0.5, 0.5, and 0.1 respectively.
We split the allocation between the optimal arms arm1 and arm2.
``{arm1: 0.5, arm2: 0.5, arm3: 0.0}``
:return: the dictionary of (arm, allocation) key-value pairs
:rtype: a dictionary of (str, float64) pairs
:raise: ValueError when ``sample_arms`` are empty.
"""
arms_sampled = self._historical_info.arms_sampled
if not arms_sampled:
raise ValueError('sample_arms are empty!')
return get_equal_arm_allocations(arms_sampled, self.get_winning_arm_names(arms_sampled))
def get_winning_arm_names(self, arms_sampled):
r"""Compute the set of winning arm names based on the given ``arms_sampled``..
Throws an exception when arms_sampled is empty.
:param arms_sampled: a dictionary of arm name to :class:`moe.bandit.data_containers.SampleArm`
:type arms_sampled: dictionary of (str, SampleArm()) pairs
:return: set of names of the winning arms
:rtype: frozenset(str)
:raise: ValueError when ``arms_sampled`` are empty.
"""
if not arms_sampled:
raise ValueError('arms_sampled is empty!')
# If there exists an unsampled arm, return the names of the unsampled arms
unsampled_arm_names = self.get_unsampled_arm_names(arms_sampled)
if unsampled_arm_names:
return unsampled_arm_names
number_sampled = sum([sampled_arm.total for sampled_arm in arms_sampled.itervalues()])
ucb_payoff_arm_name_list = [(self.get_ucb_payoff(sampled_arm, number_sampled), arm_name) for arm_name, sampled_arm in arms_sampled.iteritems()]
return get_winning_arm_names_from_payoff_arm_name_list(ucb_payoff_arm_name_list)
| [((1529, 1559), 'copy.deepcopy', 'copy.deepcopy', (['historical_info'], {}), '(historical_info)\n', (1542, 1559), False, 'import copy\n'), ((5703, 5776), 'moe.bandit.utils.get_winning_arm_names_from_payoff_arm_name_list', 'get_winning_arm_names_from_payoff_arm_name_list', (['ucb_payoff_arm_name_list'], {}), '(ucb_payoff_arm_name_list)\n', (5750, 5776), False, 'from moe.bandit.utils import get_winning_arm_names_from_payoff_arm_name_list, get_equal_arm_allocations\n')] |
RonaldoAPSD/Hedge | Hedge/Shell.py | 2a1550ea38a0384f39ed3541c8a91f9ca57f5a64 | import Hedge
while True:
text = input('Hedge > ')
if text.strip() == "":
continue
result, error = Hedge.run('<stdin>', text)
if (error):
print(error.asString())
elif result:
if len(result.elements) == 1:
print(repr(result.elements[0]))
else:
print(repr(result)) | [((110, 136), 'Hedge.run', 'Hedge.run', (['"""<stdin>"""', 'text'], {}), "('<stdin>', text)\n", (119, 136), False, 'import Hedge\n')] |
Xarthisius/yt | yt/frontends/enzo/io.py | 321643c3abff64a6f132d98d0747f3558f7552a3 | import numpy as np
from yt.geometry.selection_routines import GridSelector
from yt.utilities.io_handler import BaseIOHandler
from yt.utilities.logger import ytLogger as mylog
from yt.utilities.on_demand_imports import _h5py as h5py
_convert_mass = ("particle_mass", "mass")
_particle_position_names = {}
class IOHandlerPackedHDF5(BaseIOHandler):
_dataset_type = "enzo_packed_3d"
_base = slice(None)
_field_dtype = "float64"
def _read_field_names(self, grid):
if grid.filename is None:
return []
f = h5py.File(grid.filename, mode="r")
try:
group = f["/Grid%08i" % grid.id]
except KeyError:
group = f
fields = []
dtypes = set()
add_io = "io" in grid.ds.particle_types
add_dm = "DarkMatter" in grid.ds.particle_types
for name, v in group.items():
# NOTE: This won't work with 1D datasets or references.
# For all versions of Enzo I know about, we can assume all floats
# are of the same size. So, let's grab one.
if not hasattr(v, "shape") or v.dtype == "O":
continue
elif len(v.dims) == 1:
if grid.ds.dimensionality == 1:
fields.append(("enzo", str(name)))
elif add_io:
fields.append(("io", str(name)))
elif add_dm:
fields.append(("DarkMatter", str(name)))
else:
fields.append(("enzo", str(name)))
dtypes.add(v.dtype)
if len(dtypes) == 1:
# Now, if everything we saw was the same dtype, we can go ahead and
# set it here. We do this because it is a HUGE savings for 32 bit
# floats, since our numpy copying/casting is way faster than
# h5py's, for some reason I don't understand. This does *not* need
# to be correct -- it will get fixed later -- it just needs to be
# okay for now.
self._field_dtype = list(dtypes)[0]
f.close()
return fields
@property
def _read_exception(self):
return (KeyError,)
def _read_particle_coords(self, chunks, ptf):
yield from self._read_particle_fields(chunks, ptf, None)
def _read_particle_fields(self, chunks, ptf, selector):
chunks = list(chunks)
for chunk in chunks: # These should be organized by grid filename
f = None
for g in chunk.objs:
if g.filename is None:
continue
if f is None:
# print("Opening (read) %s" % g.filename)
f = h5py.File(g.filename, mode="r")
nap = sum(g.NumberOfActiveParticles.values())
if g.NumberOfParticles == 0 and nap == 0:
continue
ds = f.get("/Grid%08i" % g.id)
for ptype, field_list in sorted(ptf.items()):
if ptype == "io":
if g.NumberOfParticles == 0:
continue
pds = ds
elif ptype == "DarkMatter":
if g.NumberOfActiveParticles[ptype] == 0:
continue
pds = ds
elif not g.NumberOfActiveParticles[ptype]:
continue
else:
for pname in ["Active Particles", "Particles"]:
pds = ds.get(f"{pname}/{ptype}")
if pds is not None:
break
else:
raise RuntimeError(
"Could not find active particle group in data."
)
pn = _particle_position_names.get(ptype, r"particle_position_%s")
x, y, z = (
np.asarray(pds.get(pn % ax)[()], dtype="=f8") for ax in "xyz"
)
if selector is None:
# This only ever happens if the call is made from
# _read_particle_coords.
yield ptype, (x, y, z)
continue
mask = selector.select_points(x, y, z, 0.0)
if mask is None:
continue
for field in field_list:
data = np.asarray(pds.get(field)[()], "=f8")
if field in _convert_mass:
data *= g.dds.prod(dtype="f8")
yield (ptype, field), data[mask]
if f:
f.close()
def io_iter(self, chunks, fields):
h5_dtype = self._field_dtype
for chunk in chunks:
fid = None
filename = -1
for obj in chunk.objs:
if obj.filename is None:
continue
if obj.filename != filename:
# Note one really important thing here: even if we do
# implement LRU caching in the _read_obj_field function,
# we'll still be doing file opening and whatnot. This is a
# problem, but one we can return to.
if fid is not None:
fid.close()
fid = h5py.h5f.open(
obj.filename.encode("latin-1"), h5py.h5f.ACC_RDONLY
)
filename = obj.filename
for field in fields:
nodal_flag = self.ds.field_info[field].nodal_flag
dims = obj.ActiveDimensions[::-1] + nodal_flag[::-1]
data = np.empty(dims, dtype=h5_dtype)
yield field, obj, self._read_obj_field(obj, field, (fid, data))
if fid is not None:
fid.close()
def _read_obj_field(self, obj, field, fid_data):
if fid_data is None:
fid_data = (None, None)
fid, data = fid_data
if fid is None:
close = True
fid = h5py.h5f.open(obj.filename.encode("latin-1"), h5py.h5f.ACC_RDONLY)
else:
close = False
if data is None:
data = np.empty(obj.ActiveDimensions[::-1], dtype=self._field_dtype)
ftype, fname = field
try:
node = "/Grid%08i/%s" % (obj.id, fname)
dg = h5py.h5d.open(fid, node.encode("latin-1"))
except KeyError:
if fname == "Dark_Matter_Density":
data[:] = 0
return data.T
raise
dg.read(h5py.h5s.ALL, h5py.h5s.ALL, data)
# I don't know why, but on some installations of h5py this works, but
# on others, nope. Doesn't seem to be a version thing.
# dg.close()
if close:
fid.close()
return data.T
class IOHandlerPackedHDF5GhostZones(IOHandlerPackedHDF5):
_dataset_type = "enzo_packed_3d_gz"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
NGZ = self.ds.parameters.get("NumberOfGhostZones", 3)
self._base = (slice(NGZ, -NGZ), slice(NGZ, -NGZ), slice(NGZ, -NGZ))
def _read_obj_field(self, *args, **kwargs):
return super()._read_obj_field(*args, **kwargs)[self._base]
class IOHandlerInMemory(BaseIOHandler):
_dataset_type = "enzo_inline"
def __init__(self, ds, ghost_zones=3):
self.ds = ds
import enzo
self.enzo = enzo
self.grids_in_memory = enzo.grid_data
self.old_grids_in_memory = enzo.old_grid_data
self.my_slice = (
slice(ghost_zones, -ghost_zones),
slice(ghost_zones, -ghost_zones),
slice(ghost_zones, -ghost_zones),
)
BaseIOHandler.__init__(self, ds)
def _read_field_names(self, grid):
fields = []
add_io = "io" in grid.ds.particle_types
for name, v in self.grids_in_memory[grid.id].items():
# NOTE: This won't work with 1D datasets or references.
if not hasattr(v, "shape") or v.dtype == "O":
continue
elif v.ndim == 1:
if grid.ds.dimensionality == 1:
fields.append(("enzo", str(name)))
elif add_io:
fields.append(("io", str(name)))
else:
fields.append(("enzo", str(name)))
return fields
def _read_fluid_selection(self, chunks, selector, fields, size):
rv = {}
# Now we have to do something unpleasant
chunks = list(chunks)
if isinstance(selector, GridSelector):
if not (len(chunks) == len(chunks[0].objs) == 1):
raise RuntimeError
g = chunks[0].objs[0]
for ftype, fname in fields:
rv[(ftype, fname)] = self.grids_in_memory[g.id][fname].swapaxes(0, 2)
return rv
if size is None:
size = sum(g.count(selector) for chunk in chunks for g in chunk.objs)
for field in fields:
ftype, fname = field
fsize = size
rv[field] = np.empty(fsize, dtype="float64")
ng = sum(len(c.objs) for c in chunks)
mylog.debug(
"Reading %s cells of %s fields in %s grids",
size,
[f2 for f1, f2 in fields],
ng,
)
ind = 0
for chunk in chunks:
for g in chunk.objs:
# We want a *hard error* here.
# if g.id not in self.grids_in_memory: continue
for field in fields:
ftype, fname = field
data_view = self.grids_in_memory[g.id][fname][
self.my_slice
].swapaxes(0, 2)
nd = g.select(selector, data_view, rv[field], ind)
ind += nd
assert ind == fsize
return rv
def _read_particle_coords(self, chunks, ptf):
chunks = list(chunks)
for chunk in chunks: # These should be organized by grid filename
for g in chunk.objs:
if g.id not in self.grids_in_memory:
continue
nap = sum(g.NumberOfActiveParticles.values())
if g.NumberOfParticles == 0 and nap == 0:
continue
for ptype in sorted(ptf):
x, y, z = (
self.grids_in_memory[g.id]["particle_position_x"],
self.grids_in_memory[g.id]["particle_position_y"],
self.grids_in_memory[g.id]["particle_position_z"],
)
yield ptype, (x, y, z)
def _read_particle_fields(self, chunks, ptf, selector):
chunks = list(chunks)
for chunk in chunks: # These should be organized by grid filename
for g in chunk.objs:
if g.id not in self.grids_in_memory:
continue
nap = sum(g.NumberOfActiveParticles.values())
if g.NumberOfParticles == 0 and nap == 0:
continue
for ptype, field_list in sorted(ptf.items()):
x, y, z = (
self.grids_in_memory[g.id]["particle_position_x"],
self.grids_in_memory[g.id]["particle_position_y"],
self.grids_in_memory[g.id]["particle_position_z"],
)
mask = selector.select_points(x, y, z, 0.0)
if mask is None:
continue
for field in field_list:
data = self.grids_in_memory[g.id][field]
if field in _convert_mass:
data = data * g.dds.prod(dtype="f8")
yield (ptype, field), data[mask]
class IOHandlerPacked2D(IOHandlerPackedHDF5):
_dataset_type = "enzo_packed_2d"
_particle_reader = False
def _read_data_set(self, grid, field):
f = h5py.File(grid.filename, mode="r")
ds = f["/Grid%08i/%s" % (grid.id, field)][:]
f.close()
return ds.transpose()[:, :, None]
def _read_fluid_selection(self, chunks, selector, fields, size):
rv = {}
# Now we have to do something unpleasant
chunks = list(chunks)
if isinstance(selector, GridSelector):
if not (len(chunks) == len(chunks[0].objs) == 1):
raise RuntimeError
g = chunks[0].objs[0]
f = h5py.File(g.filename, mode="r")
gds = f.get("/Grid%08i" % g.id)
for ftype, fname in fields:
rv[(ftype, fname)] = np.atleast_3d(gds.get(fname)[()].transpose())
f.close()
return rv
if size is None:
size = sum(g.count(selector) for chunk in chunks for g in chunk.objs)
for field in fields:
ftype, fname = field
fsize = size
rv[field] = np.empty(fsize, dtype="float64")
ng = sum(len(c.objs) for c in chunks)
mylog.debug(
"Reading %s cells of %s fields in %s grids",
size,
[f2 for f1, f2 in fields],
ng,
)
ind = 0
for chunk in chunks:
f = None
for g in chunk.objs:
if f is None:
# print("Opening (count) %s" % g.filename)
f = h5py.File(g.filename, mode="r")
gds = f.get("/Grid%08i" % g.id)
if gds is None:
gds = f
for field in fields:
ftype, fname = field
ds = np.atleast_3d(gds.get(fname)[()].transpose())
nd = g.select(selector, ds, rv[field], ind) # caches
ind += nd
f.close()
return rv
class IOHandlerPacked1D(IOHandlerPackedHDF5):
_dataset_type = "enzo_packed_1d"
_particle_reader = False
def _read_data_set(self, grid, field):
f = h5py.File(grid.filename, mode="r")
ds = f["/Grid%08i/%s" % (grid.id, field)][:]
f.close()
return ds.transpose()[:, None, None]
| [((550, 584), 'yt.utilities.on_demand_imports._h5py.File', 'h5py.File', (['grid.filename'], {'mode': '"""r"""'}), "(grid.filename, mode='r')\n", (559, 584), True, 'from yt.utilities.on_demand_imports import _h5py as h5py\n'), ((7940, 7972), 'yt.utilities.io_handler.BaseIOHandler.__init__', 'BaseIOHandler.__init__', (['self', 'ds'], {}), '(self, ds)\n', (7962, 7972), False, 'from yt.utilities.io_handler import BaseIOHandler\n'), ((9396, 9493), 'yt.utilities.logger.ytLogger.debug', 'mylog.debug', (['"""Reading %s cells of %s fields in %s grids"""', 'size', '[f2 for f1, f2 in fields]', 'ng'], {}), "('Reading %s cells of %s fields in %s grids', size, [f2 for f1,\n f2 in fields], ng)\n", (9407, 9493), True, 'from yt.utilities.logger import ytLogger as mylog\n'), ((12245, 12279), 'yt.utilities.on_demand_imports._h5py.File', 'h5py.File', (['grid.filename'], {'mode': '"""r"""'}), "(grid.filename, mode='r')\n", (12254, 12279), True, 'from yt.utilities.on_demand_imports import _h5py as h5py\n'), ((13300, 13397), 'yt.utilities.logger.ytLogger.debug', 'mylog.debug', (['"""Reading %s cells of %s fields in %s grids"""', 'size', '[f2 for f1, f2 in fields]', 'ng'], {}), "('Reading %s cells of %s fields in %s grids', size, [f2 for f1,\n f2 in fields], ng)\n", (13311, 13397), True, 'from yt.utilities.logger import ytLogger as mylog\n'), ((14269, 14303), 'yt.utilities.on_demand_imports._h5py.File', 'h5py.File', (['grid.filename'], {'mode': '"""r"""'}), "(grid.filename, mode='r')\n", (14278, 14303), True, 'from yt.utilities.on_demand_imports import _h5py as h5py\n'), ((6390, 6451), 'numpy.empty', 'np.empty', (['obj.ActiveDimensions[::-1]'], {'dtype': 'self._field_dtype'}), '(obj.ActiveDimensions[::-1], dtype=self._field_dtype)\n', (6398, 6451), True, 'import numpy as np\n'), ((9309, 9341), 'numpy.empty', 'np.empty', (['fsize'], {'dtype': '"""float64"""'}), "(fsize, dtype='float64')\n", (9317, 9341), True, 'import numpy as np\n'), ((12752, 12783), 'yt.utilities.on_demand_imports._h5py.File', 'h5py.File', (['g.filename'], {'mode': '"""r"""'}), "(g.filename, mode='r')\n", (12761, 12783), True, 'from yt.utilities.on_demand_imports import _h5py as h5py\n'), ((13213, 13245), 'numpy.empty', 'np.empty', (['fsize'], {'dtype': '"""float64"""'}), "(fsize, dtype='float64')\n", (13221, 13245), True, 'import numpy as np\n'), ((2705, 2736), 'yt.utilities.on_demand_imports._h5py.File', 'h5py.File', (['g.filename'], {'mode': '"""r"""'}), "(g.filename, mode='r')\n", (2714, 2736), True, 'from yt.utilities.on_demand_imports import _h5py as h5py\n'), ((5857, 5887), 'numpy.empty', 'np.empty', (['dims'], {'dtype': 'h5_dtype'}), '(dims, dtype=h5_dtype)\n', (5865, 5887), True, 'import numpy as np\n'), ((13669, 13700), 'yt.utilities.on_demand_imports._h5py.File', 'h5py.File', (['g.filename'], {'mode': '"""r"""'}), "(g.filename, mode='r')\n", (13678, 13700), True, 'from yt.utilities.on_demand_imports import _h5py as h5py\n')] |
arisada/cidr_enum | cidr_enum.py | 1908f20ac15a83738fc1ff74ff17a7280bec769f | #!/usr/bin/env python3
"""
cidr_enum.py is a very simple tool to help enumerate IP ranges when being used with other tools
"""
import argparse
import netaddr
def enum_ranges(ranges, do_sort):
cidrs=[]
for r in ranges:
try:
cidrs.append(netaddr.IPNetwork(r))
except Exception as e:
print("Error:", e)
return
if(do_sort):
cidrs = sorted(cidrs)
#print(cidrs)
for cidr in cidrs:
for ip in cidr:
print(ip)
def main():
parser = argparse.ArgumentParser(description='Enumarate CIDR ranges')
parser.add_argument('ranges', metavar='range', type=str, nargs='*',
help='List of CIDR ranges to enumerate')
parser.add_argument('-f', '--files', metavar='file', type=str, nargs='*',
help='List of files to retrieve CIDR ranges to enumerate')
parser.add_argument('-s', '--sort', action='store_true', help='Sort CIDR ranges')
args = parser.parse_args()
if args.files:
files = list(args.files)
else:
files = []
ranges = list(args.ranges)
if not (files or ranges):
print ("Please give a list or ranges or input files")
parser.print_help()
return
for f in files:
with open(f, "r") as fd:
for l in fd.readlines():
ranges.append(l.strip())
enum_ranges(ranges, do_sort=args.sort)
if __name__ == '__main__':
main()
| [((452, 512), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Enumarate CIDR ranges"""'}), "(description='Enumarate CIDR ranges')\n", (475, 512), False, 'import argparse\n'), ((246, 266), 'netaddr.IPNetwork', 'netaddr.IPNetwork', (['r'], {}), '(r)\n', (263, 266), False, 'import netaddr\n')] |
lambert-x/video_semisup | configs/k400-fixmatch-tg-alignment-videos-ptv-simclr/8gpu/r3d_r18_8x8x1_45e_k400_rgb_offlinetg_1percent_align0123_1clip_no_contrast_precisebn_ptv.py | 8ff44343bb34485f8ad08d50ca4d8de22e122c1d | # model settings
model = dict(
type='Semi_AppSup_TempSup_SimCLR_Crossclip_PTV_Recognizer3D',
backbone=dict(
type='ResNet3d',
depth=18,
pretrained=None,
pretrained2d=False,
norm_eval=False,
conv_cfg=dict(type='Conv3d'),
norm_cfg=dict(type='SyncBN', requires_grad=True, eps=1e-3),
act_cfg=dict(type='ReLU'),
conv1_kernel=(3, 7, 7),
conv1_stride_t=1,
pool1_stride_t=1,
inflate=(1, 1, 1, 1),
spatial_strides=(1, 2, 2, 2),
temporal_strides=(1, 2, 2, 2),
zero_init_residual=False),
cls_head=dict(
type='I3DHead',
num_classes=400,
in_channels=512,
spatial_type='avg',
dropout_ratio=0.5,
init_std=0.01),
cls_head_temp=None,
temp_backbone='same',
temp_sup_head='same',
train_cfg=dict(
warmup_epoch=10,
fixmatch_threshold=0.3,
temp_align_indices=(0, 1, 2, 3),
align_loss_func='Cosine',
pseudo_label_metric='avg',
crossclip_contrast_loss=[],
crossclip_contrast_range=[],
),
test_cfg=dict(average_clips='score'))
# dataset settings
dataset_type = 'VideoDataset'
dataset_type_labeled = 'VideoDataset_Contrastive'
dataset_type_unlabeled = 'UnlabeledVideoDataset_MultiView_Contrastive'
# dataset_type_appearance = 'RawframeDataset_withAPP'
data_root = 'data/kinetics400/videos_train'
data_root_val = 'data/kinetics400/videos_val'
labeled_percentage = 1
ann_file_train_labeled = f'data/kinetics400/videossl_splits/kinetics400_train_{labeled_percentage}_percent_labeled_videos.txt'
ann_file_train_unlabeled = 'data/kinetics400/kinetics400_train_list_videos.txt'
ann_file_val = 'data/kinetics400/kinetics400_val_list_videos.txt'
ann_file_test = 'data/kinetics400/kinetics400_val_list_videos.txt'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)
train_pipeline = [
dict(type='DecordInit'),
dict(type='SampleFrames_Custom', clip_len=8, frame_interval=8, num_clips=1,
total_frames_offset=-1),
dict(type='DecordDecode_Custom',
extra_modalities=['tempgrad']),
dict(type='Resize', scale=(-1, 256), lazy=True),
dict(type='RandomResizedCrop', lazy=True),
dict(type='Resize', scale=(224, 224), keep_ratio=False, lazy=True),
dict(type='Flip', flip_ratio=0.5, lazy=True),
dict(type='Fuse_WithDiff'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Normalize_Diff', **img_norm_cfg, raw_to_diff=False, redist_to_rgb=False),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='FormatShape_Diff', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label', 'imgs_diff'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label', 'imgs_diff'])
]
# Get the frame and resize, shared by both weak and strong
train_pipeline_weak = [
dict(type='DecordInit'),
dict(type='SampleFrames_Custom', clip_len=8, frame_interval=8, num_clips=1,
total_frames_offset=-1),
dict(type='DecordDecode_Custom',
extra_modalities=['tempgrad']),
dict(type='Resize', scale=(-1, 256), lazy=True),
dict(type='RandomResizedCrop', lazy=True),
dict(type='Resize', scale=(224, 224), keep_ratio=False, lazy=True),
dict(type='Flip', flip_ratio=0.5, lazy=True),
dict(type='Fuse_WithDiff'),
]
# Only used for strong augmentation
train_pipeline_strong = [
dict(type='Imgaug', transforms='default'),
dict(type='Imgaug_Custom', transforms='default', modality='imgs_diff')
]
# Formating the input tensors, shared by both weak and strong
train_pipeline_format = [
dict(type='Normalize', **img_norm_cfg),
dict(type='Normalize_Diff', **img_norm_cfg, raw_to_diff=False, redist_to_rgb=False),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='FormatShape_Diff', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label', 'imgs_diff'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label', 'imgs_diff'])
]
val_pipeline = [
dict(type='DecordInit'),
dict(
type='SampleFrames',
clip_len=8,
frame_interval=8,
num_clips=1,
test_mode=True),
dict(type='DecordDecode'),
dict(type='Resize', scale=(-1, 256), lazy=True),
dict(type='CenterCrop', crop_size=224, lazy=True),
dict(type='Flip', flip_ratio=0, lazy=True),
dict(type='Fuse'),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
test_pipeline = [
dict(type='DecordInit'),
dict(
type='SampleFrames',
clip_len=8,
frame_interval=8,
num_clips=10,
test_mode=True),
dict(type='DecordDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='ThreeCrop', crop_size=256),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
data = dict(
videos_per_gpu=8, # NOTE: Need to reduce batch size. 16 -> 5
workers_per_gpu=4, # Default: 4
train_dataloader=dict(drop_last=True, pin_memory=True),
train_labeled=dict(
type=dataset_type_labeled,
ann_file=ann_file_train_labeled,
data_prefix=data_root,
pipeline=train_pipeline,
contrast_clip_num=1
),
train_unlabeled=dict(
type=dataset_type_unlabeled,
ann_file=ann_file_train_unlabeled,
data_prefix=data_root,
pipeline_weak=train_pipeline_weak,
pipeline_strong=train_pipeline_strong,
pipeline_format=train_pipeline_format,
contrast_clip_num=1
),
val=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=val_pipeline,
test_mode=True),
test=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=test_pipeline,
test_mode=True),
precise_bn=dict(
type=dataset_type,
ann_file=ann_file_train_unlabeled,
data_prefix=data_root,
pipeline=val_pipeline),
videos_per_gpu_precise_bn=5
)
# optimizer
optimizer = dict(
type='SGD', lr=0.2, momentum=0.9,
weight_decay=0.0001) # this lr 0.2 is used for 8 gpus
optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))
# learning policy
lr_config = dict(policy='CosineAnnealing',
min_lr=0,
warmup='linear',
warmup_ratio=0.1,
warmup_by_epoch=True,
warmup_iters=10)
total_epochs = 45 # Might need to increase this number for different splits. Default: 180
checkpoint_config = dict(interval=5, max_keep_ckpts=3)
evaluation = dict(
interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy'], topk=(1, 5)) # Default: 5
log_config = dict(
interval=20, # Default: 20
hooks=[
dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook'),
])
precise_bn = dict(num_iters=200, interval=5,
bn_range=['backbone', 'cls_head'])
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = None
load_from = None
resume_from = None
workflow = [('train', 1)]
find_unused_parameters = False
| [] |
willingc/pingo | experiments/rpi/gertboard/dtoa.py | 0890bf5ed763e9061320093fc3fb5f7543c5cc2c | #!/usr/bin/python2.7
# Python 2.7 version by Alex Eames of http://RasPi.TV
# functionally equivalent to the Gertboard dtoa test by Gert Jan van Loo & Myra VanInwegen
# Use at your own risk - I'm pretty sure the code is harmless, but check it yourself.
# This will not work unless you have installed py-spidev as in the README.txt file
# spi must also be enabled on your system
import spidev
import sys
from time import sleep
board_type = sys.argv[-1]
# reload spi drivers to prevent spi failures
import subprocess
unload_spi = subprocess.Popen('sudo rmmod spi_bcm2708', shell=True, stdout=subprocess.PIPE)
start_spi = subprocess.Popen('sudo modprobe spi_bcm2708', shell=True, stdout=subprocess.PIPE)
sleep(3)
def which_channel():
channel = raw_input("Which channel do you want to test? Type 0 or 1.\n") # User inputs channel number
while not channel.isdigit(): # Check valid user input
channel = raw_input("Try again - just numbers 0 or 1 please!\n") # Make them do it again if wrong
return channel
spi = spidev.SpiDev()
spi.open(0,1) # The Gertboard DAC is on SPI channel 1 (CE1 - aka GPIO7)
channel = 3 # set initial value to force user selection
common = [0,0,0,160,240] # 2nd byte common to both channels
voltages = [0.0,0.5,1.02,1.36,2.04] # voltages for display
while not (channel == 1 or channel == 0): # channel is set by user input
channel = int(which_channel()) # continue asking until answer 0 or 1 given
if channel == 1: # once proper answer given, carry on
num_list = [176,180,184,186,191] # set correct channel-dependent list for byte 1
else:
num_list = [48,52,56,58,63]
print "These are the connections for the digital to analogue test:"
if board_type == "m":
print "jumper connecting GPIO 7 to CSB"
print "Multimeter connections (set your meter to read V DC):"
print " connect black probe to GND"
print " connect red probe to DA%d on D/A header" % channel
else:
print "jumper connecting GP11 to SCLK"
print "jumper connecting GP10 to MOSI"
print "jumper connecting GP9 to MISO"
print "jumper connecting GP7 to CSnB"
print "Multimeter connections (set your meter to read V DC):"
print " connect black probe to GND"
print " connect red probe to DA%d on J29" % channel
raw_input("When ready hit enter.\n")
for i in range(5):
r = spi.xfer2([num_list[i],common[i]]) #write the two bytes to the DAC
print "Your meter should read about %.2fV" % voltages[i]
raw_input("When ready hit enter.\n")
r = spi.xfer2([16,0]) # switch off channel A = 00010000 00000000 [16,0]
r = spi.xfer2([144,0]) # switch off channel B = 10010000 00000000 [144,0]
# The DAC is controlled by writing 2 bytes (16 bits) to it.
# So we need to write a 16 bit word to DAC
# bit 15 = channel, bit 14 = ignored, bit 13 =gain, bit 12 = shutdown, bits 11-4 data, bits 3-0 ignored
# You feed spidev a decimal number and it converts it to 8 bit binary
# each argument is a byte (8 bits), so we need two arguments, which together make 16 bits.
# that's what spidev sends to the DAC. If you need to delve further, have a look at the datasheet. :)
| [] |
themilkman/GitGutter | modules/statusbar.py | 355b4480e7e1507fe1f9ae1ad9eca9649400a76c | # -*- coding: utf-8 -*-
import sublime
from . import blame
from . import templates
class SimpleStatusBarTemplate(object):
"""A simple template class with the same interface as jinja2's one."""
# a list of variables used by this template
variables = frozenset([
'repo', 'branch', 'compare', 'inserted', 'deleted', 'modified',
'line_author', 'line_author_age'
])
@staticmethod
def render(repo=None, branch=None, compare=None, inserted=0, deleted=0,
modified=0, line_author=None, line_author_age=None, **kwargs):
"""Format the status bar text using a static set of rules.
Arguments:
repo (string): The repository name
branch (string): The branch name.
compare (string): The compared branch/tag/commit
inserted (int): The amount of inserted lines
deleted (int): The amount of deleted lines
modified (int): The amount of modified lines
line_author (string): The author of the active line
line_author_age (string): The age of the active line's change
Returns:
string: The formatted message to display in the status bar.
"""
if not repo or not branch:
return ''
parts = ['{repo}/{branch}']
# Compare against
if compare not in ('HEAD', branch, None):
parts.append('Comparing against {compare}')
# File statistics
if inserted:
parts.append('{inserted}+')
if deleted:
parts.append('{deleted}-')
if modified:
parts.append(u'{modified}≠')
# blame message
if line_author and line_author_age:
parts.append(u'⟢ {line_author} ({line_author_age})')
# join template and fill with locals
return ', '.join(parts).format(**locals())
class GitGutterStatusBar(object):
"""The class manages status bar text rendering.
It stores all information, which might get displayed in the status bar
and provides functions to partially update them.
"""
def __init__(self, view, settings):
"""Initialize object."""
# the sublime.View the status bar is attached to
self.view = view
# the settings.ViewSettings object which stores GitGutter' settings
self.settings = settings
# initialize the jinja2 template
self.template = None
# the variables to use to render the status bar
self.vars = {
# sublime text git integration enabled
'st_git_status': view.settings().get('show_git_status', False),
# the repository name
'repo': None,
# the active branch name
'branch': None,
# the branch we compare against
'compare': None,
# the upstream branch name
'remote': None,
# the commits the local is ahead of upstream
'ahead': 0,
# the commits the local is behind of upstream
'behind': 0,
# repository statistics
'added_files': 0,
'deleted_files': 0,
'modified_files': 0,
'staged_files': 0,
# file statistics
'state': None,
'deleted': 0,
'inserted': 0,
'modified': 0,
}
# declare all blame variables
for var in blame.BLAME_VARIABLES:
self.vars[var] = None
def is_enabled(self):
"""Return whether status bar text is enabled in settings or not."""
enabled = self.settings.get('show_status_bar_text', False)
if self.template and not enabled:
self.template = None
self.vars['repo'] = None
self.erase()
return enabled
def has(self, variables):
"""Check if a set of variables is used by the user defined template.
Arguments:
variables (iter):
An iterateable object with all the variables to check for
existence within the active template.
Returns:
bool:
True - if at least one variable is used by the template.
False - if no variable is used by the template.
"""
try:
return any(var in self.template.variables for var in variables)
except:
return False
def erase(self):
"""Erase status bar text."""
self.view.erase_status('00_git_gutter')
def update(self, **kwargs):
"""Update a set of variables and redraw the status bar text.
Arguments:
kwargs (dict):
The dictionary of possibly changed variables to update the
status bar text with.
Raises:
KeyError, if `kwargs` contains unknown variables.
"""
want_update = False
for key, value in kwargs.items():
if self.vars[key] != value:
self.vars[key] = value
want_update = True
if want_update:
if not self.template:
self.template = templates.create(
self.settings, 'status_bar_text', SimpleStatusBarTemplate)
self.view.set_status(
'00_git_gutter', self.template.render(**self.vars))
| [] |
bunya017/Django-Polls-App | polls/tests.py | 7b71ac9d1ffb66518e1d0345bc0f11ee5907c1be | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
class RandomTestCase(TestCase):
def test_one_plus_one1(self):
self.assertEqual(1+1, 2)
| [] |
LeonHodgesAustin/video_stream_processor | test.py | 8014705edc37599716eb1320d46c99136fe3e262 | # import logging
# import hercules.lib.util.hercules_logging as l
# from hercules.lib.util import sso as sso
import opencv2 as cv2
import urllib
import numpy as np
# log = l.setup_logging(__name__)
def main(args=None):
# username, passowrd = sso.get_login_credentials("WATCHER")
# Open a sample video available in sample-videos
vcap = cv2.VideoCapture('https://www.sample-videos.com/video/mp4/720/big_buck_bunny_720p_2mb.mp4')
#if not vcap.isOpened():
# print "File Cannot be Opened"
while(True):
# Capture frame-by-frame
ret, frame = vcap.read()
#print cap.isOpened(), ret
if frame is not None:
# Display the resulting frame
cv2.imshow('frame',frame)
# Press q to close the video windows before it ends if you want
if cv2.waitKey(22) & 0xFF == ord('q'):
break
else:
print("Frame is None")
break
# When everything done, release the capture
vcap.release()
cv2.destroyAllWindows()
print("Video stop")
if __name__ == "__main__":
main()
| [((349, 445), 'opencv2.VideoCapture', 'cv2.VideoCapture', (['"""https://www.sample-videos.com/video/mp4/720/big_buck_bunny_720p_2mb.mp4"""'], {}), "(\n 'https://www.sample-videos.com/video/mp4/720/big_buck_bunny_720p_2mb.mp4')\n", (365, 445), True, 'import opencv2 as cv2\n'), ((1026, 1049), 'opencv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1047, 1049), True, 'import opencv2 as cv2\n'), ((712, 738), 'opencv2.imshow', 'cv2.imshow', (['"""frame"""', 'frame'], {}), "('frame', frame)\n", (722, 738), True, 'import opencv2 as cv2\n'), ((829, 844), 'opencv2.waitKey', 'cv2.waitKey', (['(22)'], {}), '(22)\n', (840, 844), True, 'import opencv2 as cv2\n')] |
cloutiertyler/RibbonGraph | ribbon/exceptions.py | 000864dd0ee33da4ed44af2f4bd1f1a83d5a1ba4 | from rest_framework.exceptions import APIException
from rest_framework import status
class GraphAPIError(APIException):
"""Base class for exceptions in this module."""
pass
class NodeNotFoundError(GraphAPIError):
status_code = status.HTTP_404_NOT_FOUND
def __init__(self, id):
self.id = id
super(NodeNotFoundError, self).__init__("Node with id '{}' does not exist.".format(id))
class NodeTypeNotFoundError(GraphAPIError):
status_code = status.HTTP_404_NOT_FOUND
def __init__(self, node_type):
self.node_type = node_type
super(NodeTypeNotFoundError, self).__init__("Node type '{}' does not exist.".format(node_type))
class MissingNodeTypeError(GraphAPIError):
""" Creating a node requires a type. """
status_code = status.HTTP_400_BAD_REQUEST
class MalformedUpdateDictionaryError(GraphAPIError):
status_code = status.HTTP_400_BAD_REQUEST
class InvalidPropertyError(GraphAPIError):
status_code = status.HTTP_400_BAD_REQUEST
class InvalidValueError(GraphAPIError):
status_code = status.HTTP_400_BAD_REQUEST
class PermissionDenied(GraphAPIError):
status_code = status.HTTP_403_FORBIDDEN
default_detail = 'Insufficient permissions for the request.'
| [] |
LeonardoM011/kobe-trading-bot | kobe-trading-bot/app.py | 83a84ee0fb8dab3d9ae174be91e96de6d5f2d823 | #!/usr/bin/env python3
# Crypto trading bot using binance api
# Author: LeonardoM011<[email protected]>
# Created on 2021-02-05 21:56
# Set constants here:
DELTA_TIME = 300 # How long can we check for setting up new trade (in seconds)
# ----------------------
# Imports:
import os
import sys
import time as t
import datetime
# Adding python-binance to path and importing python-binance
sys.path.insert(1, "../deps/binance")
from binance.client import Client
from fun import *
import candles as can
# Globals:
client = None
# Main program loop
def start():
hour_repeated = -1
try:
while True:
time = datetime.datetime.now()
hour = time.hour
minute = time.minute
open_trade = client.futures_get_open_orders()
if minute < 10:
if not open_trade and hour_repeated != hour:
candles = client.futures_klines(symbol="BTCUSDT", interval=Client.KLINE_INTERVAL_1HOUR, contractType="PERPETUAL")
info = can.get_candle_info(candles[:-1])
candle_side = can.get_side(info)
if candle_side:
output.print_info('Initiating trade...')
#current_price = client.futures_mark_price(symbol="BTCUSDT", contractType="PERPETUAL")['markPrice']
close_price = candles
client.futures_create_order(symbol="BTCUSDT", side=candle_side, type=Client.ORDER_TYPE_MARKET, quantity=0.001)
client.futures_create_order(symbol="BTCUSDT", side=can.flip_side(candle_side), type=Client.ORDER_TYPE_STOP_LOSS_LIMIT, quantity=0.001, price=57975.0, stopPrice=57976.0, workingType="MARK_PRICE")
hour_repeated = hour
t.sleep(300)
except KeyboardInterrupt:
print('Program canceled...')
def connect():
while True:
api_key = get_api_key("BINANCE_API_KEY")
api_secret = get_api_key("BINANCE_API_SECRET_KEY")
output.print_info('Connecting to binance...')
global client
client = Client(api_key, api_secret)
if check_connectivity(client):
output.print_ok('Successfully connected to binance.')
if check_account_status(client):
output.print_ok('Successfully connected using api keys.')
return
output.print_failed('Cannot connect to binance with api keys.')
def main():
output.print_ok('Starting kobe trading bot...')
connect()
start()
#try:
# client.get_all_orders()
#except BinanceAPIException as e:
# print e.status_code
# print e.message
# datetime.datetime.now().year
#btcusdt_price = requests.get("https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT")
#if (btcusdt_price.status_code != 200):
# print("Error connecting to api server to get price")
# return
#print("Successfully connected and got price")
#while(True):
# btcusdt_price = requests.get("https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT")
# print("BTC/USDT: {}".format(btcusdt_price.json()['price']))
# time.sleep(1.0)
#btcusdtindex = find_index_of('symbol', 'BTCUSDT', client.get_all_tickers())
#while (True):
# print(client.get_all_tickers()[btcusdtindex])
# time.sleep(5.0)
# client.futures_create_order(symbol="BTCUSDT", side="SELL", type="STOP", quantity=0.001, price=57975.0, stopPrice=57976.0, workingType="MARK_PRICE")
# client.futures_create_order(symbol="BTCUSDT", side="BUY", type="MARKET", quantity=0.001)
if __name__ == "__main__":
main() | [((421, 458), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""../deps/binance"""'], {}), "(1, '../deps/binance')\n", (436, 458), False, 'import sys\n'), ((2192, 2219), 'binance.client.Client', 'Client', (['api_key', 'api_secret'], {}), '(api_key, api_secret)\n', (2198, 2219), False, 'from binance.client import Client\n'), ((679, 702), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (700, 702), False, 'import datetime\n'), ((1858, 1870), 'time.sleep', 't.sleep', (['(300)'], {}), '(300)\n', (1865, 1870), True, 'import time as t\n'), ((1080, 1113), 'candles.get_candle_info', 'can.get_candle_info', (['candles[:-1]'], {}), '(candles[:-1])\n', (1099, 1113), True, 'import candles as can\n'), ((1149, 1167), 'candles.get_side', 'can.get_side', (['info'], {}), '(info)\n', (1161, 1167), True, 'import candles as can\n'), ((1655, 1681), 'candles.flip_side', 'can.flip_side', (['candle_side'], {}), '(candle_side)\n', (1668, 1681), True, 'import candles as can\n')] |
SoumyaShreeram/Locating_AGN_in_DM_halos | imported_files/plotting_edh01.py | 1cfbee69b2c000faee4ecb199d65c3235afbed42 | # -*- coding: utf-8 -*-
"""Plotting.py for notebook 01_Exploring_DM_Halos
This python file contains all the functions used for plotting graphs and maps in the 1st notebook (.ipynb) of the repository: 01. Exploring parameters in DM halos and sub-halos
Script written by: Soumya Shreeram
Project supervised by Johan Comparat
Date created: 23rd February 2021
Last updated on 30th March 2021
"""
# astropy modules
import astropy.units as u
import astropy.io.fits as fits
from astropy.table import Table, Column
from astropy.coordinates import SkyCoord
from astropy.cosmology import FlatLambdaCDM, z_at_value
import numpy as np
# scipy modules
from scipy.spatial import KDTree
from scipy.interpolate import interp1d
import os
import importlib
# plotting imports
import matplotlib
from mpl_toolkits import axes_grid1
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d.axes3d import Axes3D
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from matplotlib import cm
from matplotlib.collections import PatchCollection
from matplotlib.patches import Rectangle
import Exploring_DM_Haloes as edh
def setLabel(ax, xlabel, ylabel, title, xlim, ylim, legend=True):
"""
Function defining plot properties
@param ax :: axes to be held
@param xlabel, ylabel :: labels of the x-y axis
@param title :: title of the plot
@param xlim, ylim :: x-y limits for the axis
"""
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
if xlim != 'default':
ax.set_xlim(xlim)
if ylim != 'default':
ax.set_ylim(ylim)
if legend:
l = ax.legend(loc='best', fontsize=14)
for legend_handle in l.legendHandles:
legend_handle._legmarker.set_markersize(12)
ax.grid(False)
ax.set_title(title, fontsize=18)
return
def plotAgnClusterDistribution(pos_z_clu, pos_z_AGN, pos_z_halo, cluster_params):
"""
Function to plot the AGN cluster distribution
@pos_z_clu :: postion and redshifts of all the selected 'clusters'
@pos_z_AGN :: postion and redshifts of all the selected AGNs
@pos_z_gal :: postion and redshifts of all the selected galaxies
"""
halo_m_500c = cluster_params[0]
fig, ax = plt.subplots(1,1,figsize=(9,8))
# plotting halos
halos = ax.plot(pos_z_halo[0], pos_z_halo[1], '.', color='#fcd16d', markersize=0.2, label=r'All DM Halos', alpha=0.2)
# plotting clusters
cluster = ax.plot(pos_z_clu[0], pos_z_clu[1], 'o', color= '#03a351', markersize=3, label=r'Clusters $M_{500c}> 10^{%.1f} M_\odot$ '%(np.log10(halo_m_500c)))
# plotting AGNs
agn = ax.plot(pos_z_AGN[0], pos_z_AGN[1], '*', color='k', markersize=3.5, label=r'AGN', alpha=0.7)
# labeling axes and defining limits
xlim = [np.min(pos_z_halo[0]), np.max(pos_z_halo[0])]
ylim = [np.min(pos_z_halo[1]), np.max(pos_z_halo[1])]
setLabel(ax, 'R.A. (deg)', 'Dec (deg)', '', xlim, ylim, legend=True)
print('Redshift z<%.2f'%(np.max(pos_z_clu[2])))
return
def plotHostSubHalos(pos_z_cen_halo, pos_z_sat_halo, pos_z_AGN):
"""
Function to plot the host and satellite halo distribution
@hd_halo :: table with all relevant info on halos, clusters, and galaxies within them
--> divided into 3 because each hd_halo holds info on 1000 halos alone
@pos_z_AGN :: postion and redshifts of all the selected AGNs
"""
ra_cen, dec_cen = pos_z_cen_halo[0], pos_z_cen_halo[1]
ra_sat, dec_sat = pos_z_sat_halo[0], pos_z_sat_halo[1]
fig, ax = plt.subplots(1,1,figsize=(9,8))
# plotting host halos
host_halos = ax.plot(ra_cen, dec_cen, '.', color= 'k', markersize=0.06, label=r'Host-halos $P_{id}=-1$', alpha=0.4)
# plotting sat halos
sat_halos = ax.plot(ra_sat, dec_sat, 'o', color='#07d9f5', markersize=0.07, label=r'Satellite halos $P_{id} \neq -1$', alpha=0.7)
# plotting AGNs
agn = ax.plot(pos_z_AGN[0], pos_z_AGN[1], '*', color='#fff717', markersize=6.5, label=r'AGN', markeredgecolor='w', markeredgewidth=0.4)
# labeling axes and defining limits
xlim = [np.min(pos_z_AGN[0]), np.max(pos_z_AGN[0])]
ylim = [np.min(pos_z_AGN[1]), np.max(pos_z_AGN[1])]
setLabel(ax, 'R.A. (deg)', 'Dec (deg)', '', xlim, ylim, legend=True)
print('AGNs: %d, Host (central) halos: %.2e, Sattelite halos: %.2e'%(len(pos_z_AGN[0]), len(ra_cen), len(ra_sat)))
return
def plotAGNfraction(pos_z_AGN, pos_z_gal, redshift_limit_agn, bin_size):
"""
Function to plot the agn fraction in the given pixel
@pos_z_AGN :: postion and redshifts of all the selected AGNs
@pos_z_gal :: postion and redshifts of all the selected galaxies
@redshift_limit_agn :: upper limit on redshift based on the clusters found
"""
fig, ax = plt.subplots(1,2,figsize=(19,7))
# getting the useful histogram properties
counts_agn, redshift_bins_agn = np.histogram(pos_z_AGN[2], bins = bin_size)
counts_gal, redshift_bins_gal = np.histogram(pos_z_gal[2], bins = bin_size)
# plotting the galaxy and agn distribution as a function of redshift
ax[0].plot(redshift_bins_gal[1:], counts_gal, 'ks', ms=4, label=r'DM Halos')
ax[0].plot(redshift_bins_agn[1:], counts_agn, 'bs', ms=4, label=r'AGNs')
# axis properties - 0
xlim = [np.min(redshift_bins_agn[1:]), np.max(redshift_bins_agn[1:])]
setLabel(ax[0], r'Redshift$_R$', 'Counts','', xlim, 'default', legend=True)
ax[0].set_yscale("log")
# agn fraction as a function of redshift
f_agn, idx = [], []
for c, c_gal in enumerate(counts_gal):
if c_gal != 0:
f_agn.append(((counts_agn[c]*100)/c_gal))
idx.append(c)
z_bin_modified = redshift_bins_gal[1:][np.array(idx)]
# plot agn fraction
ax[1].plot(z_bin_modified, f_agn, 's', color='#6b0385', ms=4)
# axis properties - 1
xlim = [np.min(redshift_bins_agn[1:])-0.02, np.max(redshift_bins_agn[1:])]
setLabel(ax[1], r'Redshift$_R$', r'$f_{AGN}$ (%s)'%"%", '', xlim, 'default', legend=False)
ax[1].set_yscale("log")
plt.savefig('figures/agn_frac.pdf', facecolor='w', edgecolor='w')
print( 'Reddhift z<%.2f'%redshift_limit_agn )
return redshift_bins_gal[1:]
def plotRedshiftComovingDistance(cosmo, redshift_limit, resolution = 0.0001):
"""Function to plot the relation between redshift and the comoving distance
@cosmo :: cosmology package loaded
@redshift_limit :: upper limit in redshift --> end point for interpolation
@resolution :: resolution of time steps (set to e-4 based of simulation resolution)
@Returns :: plot showing the dependence of redshift on comoving distance
"""
fig, ax = plt.subplots(1,1,figsize=(7,6))
distance_Mpc = cosmo.comoving_distance(np.arange(0,redshift_limit, resolution))
redshifts = np.arange(0,redshift_limit, resolution)
ax.plot(redshifts, distance_Mpc, 'k.', ms=1)
setLabel(ax, 'Redshift (z)', 'Comoving distance (Mpc)', '', 'default', 'default', legend=False)
print('Redshift-Comoving distance relationship')
return
def plotMergerDistribution(merger_val_gal, counts_gal, merger_val_agn, counts_agn, cosmo, redshift_limit):
"""
Function to plot the distribution (counts) of the merger scale factor/redshift
"""
fig, ax = plt.subplots(1,1,figsize=(7,6))
ax1 = plt.gca()
ax2 = ax1.twiny()
# plot the merger distribution for galaxies and agns
ax1.plot(merger_val_gal, counts_gal, 'kx', label='DM Halos')
ax1.plot(merger_val_agn, counts_agn, 'bx', label='AGNs')
setLabel(ax1, r'Scale, $a(t)$, of last Major Merger', 'Counts', '', 'default', 'default', legend=True)
ax.set_yscale("log")
# setting the x-label on top (converting a to redshift)
a_min, a_max = np.min(merger_val_gal), np.max(merger_val_gal)
scale_factor_arr = [a_max, a_min*4, a_min*2, a_min]
ax2.set_xticks([(1/a) -1 for a in scale_factor_arr])
ax2.invert_xaxis()
ax2.set_xlabel('Redshift (z)')
ax2.xaxis.set_major_formatter(FormatStrFormatter('%.1f'))
print("Objects with merger redshifts z < %.2f"%z_at_value(cosmo.scale_factor, a_min))
plt.savefig('figures/merger_distribution_z%.2f.pdf'%redshift_limit, facecolor='w', edgecolor='w')
return
def plotCentralSatelliteScaleMergers(cen_sat_AGN, cen_sat_halo, redshift_limit):
"""
Function to plot the central and sattelite scale factors for mergers
"""
fig, ax = plt.subplots(1,1,figsize=(7,6))
labels = [r'central AGNs', r'satellite AGNs', 'central DM halos', 'satellite DM halos']
c, m, ms = ['b', '#38cee8', 'k', 'grey'], ['^', '*', '^', '*'], [9, 15, 5, 9]
mec, mew = ['w', 'k', 'k', '#abaeb3'], [0.7, 0.4, 1, 0.7]
for i in [0, 1]:
s_m_agn, c_agn = np.unique(cen_sat_AGN[i]['HALO_scale_of_last_MM'], return_counts=True)
s_m_gal, c_gal = np.unique(cen_sat_halo[i]['HALO_scale_of_last_MM'], return_counts=True)
# agns
ax.plot(s_m_agn, c_agn, color=c[i], marker=m[i], ls='', ms=ms[i], label=labels[i], markeredgecolor=mec[i], markeredgewidth=mew[i])
# DM halos
j = i + 2
ax.plot(s_m_gal, c_gal, color=c[j], marker=m[j], ls='', ms=ms[j], label=labels[j], markeredgecolor=mec[j], markeredgewidth=mew[j])
# set label
setLabel(ax, r'Scale, $a(t)$, of last Major Merger', 'Counts', '', 'default', 'default', legend=True)
ax.set_yscale("log")
plt.savefig('figures/merger_dist_cenAndsat_z%.2f.pdf'%redshift_limit, facecolor='w', edgecolor='w')
print('Objects below z: ', redshift_limit)
return [labels, c, m, ms, mec, mew]
def plotTimeSinceMergerDist(scale_merger_AGN, scale_merger_gal, z_AGN, z_gal, cosmo, bin_size, redshift_limit):
"""
Plot the distribution of halos with respective galaxies & agns given the time since merger
"""
# get the time difference since merger events in the halos
t_merger_agn = edh.getMergerTimeDifference(scale_merger_AGN, z_AGN, cosmo)
t_merger_gal = edh.getMergerTimeDifference(scale_merger_gal, z_gal, cosmo)
# get the t since merger bins and counts
if bin_size[0]:
c_t_agn, merger_bins_agn = np.histogram(np.array(t_merger_agn), bins = bin_size[1])
c_t_gal, merger_bins_gal = np.histogram(np.array(t_merger_gal), bins = bin_size[1])
merger_bins_agn = merger_bins_agn[:-1]
merger_bins_gal = merger_bins_gal[:-1]
else:
merger_bins_agn, c_t_agn = np.unique(t_merger_agn, return_counts=True)
merger_bins_gal, c_t_gal = np.unique(t_merger_gal, return_counts=True)
fig, ax = plt.subplots(1,1,figsize=(7,6))
# plot the time since merger distribution for galaxies and agns
ax.plot(merger_bins_gal, np.cumsum(c_t_gal), 'k^', label='DM Halos', ms=4)
ax.plot(merger_bins_agn, np.cumsum(c_t_agn), 'b^', label='AGNs', ms=4)
# set labels/legends
setLabel(ax, r'$\Delta t_{merger} = t(z_{merger})-t(z_{current})$ [Gyr]', 'Cumulative counts', '', 'default', 'default', legend=False)
ax.legend(loc='lower left', fontsize=14)
ax.set_yscale("log")
ax.set_xscale("log")
return ax, fig, t_merger_agn, t_merger_gal
def mergerRedshiftPlot(cen_sat_AGN, cen_sat_halo, dt_m, plot_params, redshift_limit):
"""
Function to plot the time since merger as a function of the redshift
@cen_sat_AGN(gal) :: handels to access the central and satellite AGNs(galaxies)
@dt_m :: time difference after merger for cen/sat AGNs(galaxies)
@plot_params :: to keep consistency between plots, array containing [labels, c, m, ms]
"""
fig, ax = plt.subplots(1,1,figsize=(7,6))
# change marker size for central DM halos
plot_params[3][1] = 9
z_R = [cen_sat_AGN[0]['redshift_R'], cen_sat_AGN[1]['redshift_R'], cen_sat_halo[0]['redshift_R'], cen_sat_halo[1]['redshift_R']]
# plot central, satellite merger distributions as per visual preference
for i in [2, 3, 0, 1]:
ax.plot(dt_m[i], z_R[i], plot_params[2][i], color=plot_params[1][i], ms=plot_params[3][i], label=plot_params[0][i], markeredgecolor=plot_params[4][i], markeredgewidth=plot_params[5][i])
# set labels/legends
setLabel(ax, r'$\Delta t_{merger} = t(z_{merger})-t(z_{current})$ [Gyr]', r'Redshift$_R$', '', 'default', 'default', legend=True)
ax.set_xscale("log")
plt.savefig('figures/t_since_merger_z_plot_%.2f.pdf'%redshift_limit, facecolor='w', edgecolor='w')
return ax
def plotMergerTimeCuts(ax, t_merger_cut_arr, l):
"""
Function to plot the defined cuts in merger times within the concerned plot
@t_merger_cut_arr :: array that defines the cuts in the merger times
@l :: array that defines the linestyles used to denote these cuts (refer to the initial codeblock in the notebook)
"""
for i, t_m_cut in enumerate(t_merger_cut_arr):
ax.axvline(x=t_m_cut, color='r', linestyle= l[i], label='%.1f Gyr'%t_m_cut)
ax.legend(fontsize=14, loc='lower left')
return | [((2235, 2269), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(9, 8)'}), '(1, 1, figsize=(9, 8))\n', (2247, 2269), True, 'import matplotlib.pyplot as plt\n'), ((3541, 3575), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(9, 8)'}), '(1, 1, figsize=(9, 8))\n', (3553, 3575), True, 'import matplotlib.pyplot as plt\n'), ((4793, 4828), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(19, 7)'}), '(1, 2, figsize=(19, 7))\n', (4805, 4828), True, 'import matplotlib.pyplot as plt\n'), ((4913, 4954), 'numpy.histogram', 'np.histogram', (['pos_z_AGN[2]'], {'bins': 'bin_size'}), '(pos_z_AGN[2], bins=bin_size)\n', (4925, 4954), True, 'import numpy as np\n'), ((4993, 5034), 'numpy.histogram', 'np.histogram', (['pos_z_gal[2]'], {'bins': 'bin_size'}), '(pos_z_gal[2], bins=bin_size)\n', (5005, 5034), True, 'import numpy as np\n'), ((6101, 6166), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""figures/agn_frac.pdf"""'], {'facecolor': '"""w"""', 'edgecolor': '"""w"""'}), "('figures/agn_frac.pdf', facecolor='w', edgecolor='w')\n", (6112, 6166), True, 'import matplotlib.pyplot as plt\n'), ((6719, 6753), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(7, 6)'}), '(1, 1, figsize=(7, 6))\n', (6731, 6753), True, 'import matplotlib.pyplot as plt\n'), ((6852, 6892), 'numpy.arange', 'np.arange', (['(0)', 'redshift_limit', 'resolution'], {}), '(0, redshift_limit, resolution)\n', (6861, 6892), True, 'import numpy as np\n'), ((7328, 7362), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(7, 6)'}), '(1, 1, figsize=(7, 6))\n', (7340, 7362), True, 'import matplotlib.pyplot as plt\n'), ((7370, 7379), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (7377, 7379), True, 'import matplotlib.pyplot as plt\n'), ((8193, 8296), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('figures/merger_distribution_z%.2f.pdf' % redshift_limit)"], {'facecolor': '"""w"""', 'edgecolor': '"""w"""'}), "('figures/merger_distribution_z%.2f.pdf' % redshift_limit,\n facecolor='w', edgecolor='w')\n", (8204, 8296), True, 'import matplotlib.pyplot as plt\n'), ((8494, 8528), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(7, 6)'}), '(1, 1, figsize=(7, 6))\n', (8506, 8528), True, 'import matplotlib.pyplot as plt\n'), ((9488, 9593), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('figures/merger_dist_cenAndsat_z%.2f.pdf' % redshift_limit)"], {'facecolor': '"""w"""', 'edgecolor': '"""w"""'}), "('figures/merger_dist_cenAndsat_z%.2f.pdf' % redshift_limit,\n facecolor='w', edgecolor='w')\n", (9499, 9593), True, 'import matplotlib.pyplot as plt\n'), ((9982, 10041), 'Exploring_DM_Haloes.getMergerTimeDifference', 'edh.getMergerTimeDifference', (['scale_merger_AGN', 'z_AGN', 'cosmo'], {}), '(scale_merger_AGN, z_AGN, cosmo)\n', (10009, 10041), True, 'import Exploring_DM_Haloes as edh\n'), ((10061, 10120), 'Exploring_DM_Haloes.getMergerTimeDifference', 'edh.getMergerTimeDifference', (['scale_merger_gal', 'z_gal', 'cosmo'], {}), '(scale_merger_gal, z_gal, cosmo)\n', (10088, 10120), True, 'import Exploring_DM_Haloes as edh\n'), ((10657, 10691), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(7, 6)'}), '(1, 1, figsize=(7, 6))\n', (10669, 10691), True, 'import matplotlib.pyplot as plt\n'), ((11670, 11704), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(7, 6)'}), '(1, 1, figsize=(7, 6))\n', (11682, 11704), True, 'import matplotlib.pyplot as plt\n'), ((12413, 12517), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('figures/t_since_merger_z_plot_%.2f.pdf' % redshift_limit)"], {'facecolor': '"""w"""', 'edgecolor': '"""w"""'}), "('figures/t_since_merger_z_plot_%.2f.pdf' % redshift_limit,\n facecolor='w', edgecolor='w')\n", (12424, 12517), True, 'import matplotlib.pyplot as plt\n'), ((2788, 2809), 'numpy.min', 'np.min', (['pos_z_halo[0]'], {}), '(pos_z_halo[0])\n', (2794, 2809), True, 'import numpy as np\n'), ((2811, 2832), 'numpy.max', 'np.max', (['pos_z_halo[0]'], {}), '(pos_z_halo[0])\n', (2817, 2832), True, 'import numpy as np\n'), ((2846, 2867), 'numpy.min', 'np.min', (['pos_z_halo[1]'], {}), '(pos_z_halo[1])\n', (2852, 2867), True, 'import numpy as np\n'), ((2869, 2890), 'numpy.max', 'np.max', (['pos_z_halo[1]'], {}), '(pos_z_halo[1])\n', (2875, 2890), True, 'import numpy as np\n'), ((4103, 4123), 'numpy.min', 'np.min', (['pos_z_AGN[0]'], {}), '(pos_z_AGN[0])\n', (4109, 4123), True, 'import numpy as np\n'), ((4125, 4145), 'numpy.max', 'np.max', (['pos_z_AGN[0]'], {}), '(pos_z_AGN[0])\n', (4131, 4145), True, 'import numpy as np\n'), ((4159, 4179), 'numpy.min', 'np.min', (['pos_z_AGN[1]'], {}), '(pos_z_AGN[1])\n', (4165, 4179), True, 'import numpy as np\n'), ((4181, 4201), 'numpy.max', 'np.max', (['pos_z_AGN[1]'], {}), '(pos_z_AGN[1])\n', (4187, 4201), True, 'import numpy as np\n'), ((5320, 5349), 'numpy.min', 'np.min', (['redshift_bins_agn[1:]'], {}), '(redshift_bins_agn[1:])\n', (5326, 5349), True, 'import numpy as np\n'), ((5351, 5380), 'numpy.max', 'np.max', (['redshift_bins_agn[1:]'], {}), '(redshift_bins_agn[1:])\n', (5357, 5380), True, 'import numpy as np\n'), ((5938, 5967), 'numpy.max', 'np.max', (['redshift_bins_agn[1:]'], {}), '(redshift_bins_agn[1:])\n', (5944, 5967), True, 'import numpy as np\n'), ((6795, 6835), 'numpy.arange', 'np.arange', (['(0)', 'redshift_limit', 'resolution'], {}), '(0, redshift_limit, resolution)\n', (6804, 6835), True, 'import numpy as np\n'), ((7809, 7831), 'numpy.min', 'np.min', (['merger_val_gal'], {}), '(merger_val_gal)\n', (7815, 7831), True, 'import numpy as np\n'), ((7833, 7855), 'numpy.max', 'np.max', (['merger_val_gal'], {}), '(merger_val_gal)\n', (7839, 7855), True, 'import numpy as np\n'), ((8070, 8096), 'matplotlib.ticker.FormatStrFormatter', 'FormatStrFormatter', (['"""%.1f"""'], {}), "('%.1f')\n", (8088, 8096), False, 'from matplotlib.ticker import LinearLocator, FormatStrFormatter\n'), ((8815, 8885), 'numpy.unique', 'np.unique', (["cen_sat_AGN[i]['HALO_scale_of_last_MM']"], {'return_counts': '(True)'}), "(cen_sat_AGN[i]['HALO_scale_of_last_MM'], return_counts=True)\n", (8824, 8885), True, 'import numpy as np\n'), ((8911, 8982), 'numpy.unique', 'np.unique', (["cen_sat_halo[i]['HALO_scale_of_last_MM']"], {'return_counts': '(True)'}), "(cen_sat_halo[i]['HALO_scale_of_last_MM'], return_counts=True)\n", (8920, 8982), True, 'import numpy as np\n'), ((10519, 10562), 'numpy.unique', 'np.unique', (['t_merger_agn'], {'return_counts': '(True)'}), '(t_merger_agn, return_counts=True)\n', (10528, 10562), True, 'import numpy as np\n'), ((10598, 10641), 'numpy.unique', 'np.unique', (['t_merger_gal'], {'return_counts': '(True)'}), '(t_merger_gal, return_counts=True)\n', (10607, 10641), True, 'import numpy as np\n'), ((10787, 10805), 'numpy.cumsum', 'np.cumsum', (['c_t_gal'], {}), '(c_t_gal)\n', (10796, 10805), True, 'import numpy as np\n'), ((10867, 10885), 'numpy.cumsum', 'np.cumsum', (['c_t_agn'], {}), '(c_t_agn)\n', (10876, 10885), True, 'import numpy as np\n'), ((2994, 3014), 'numpy.max', 'np.max', (['pos_z_clu[2]'], {}), '(pos_z_clu[2])\n', (3000, 3014), True, 'import numpy as np\n'), ((5749, 5762), 'numpy.array', 'np.array', (['idx'], {}), '(idx)\n', (5757, 5762), True, 'import numpy as np\n'), ((5902, 5931), 'numpy.min', 'np.min', (['redshift_bins_agn[1:]'], {}), '(redshift_bins_agn[1:])\n', (5908, 5931), True, 'import numpy as np\n'), ((8150, 8187), 'astropy.cosmology.z_at_value', 'z_at_value', (['cosmo.scale_factor', 'a_min'], {}), '(cosmo.scale_factor, a_min)\n', (8160, 8187), False, 'from astropy.cosmology import FlatLambdaCDM, z_at_value\n'), ((10235, 10257), 'numpy.array', 'np.array', (['t_merger_agn'], {}), '(t_merger_agn)\n', (10243, 10257), True, 'import numpy as np\n'), ((10327, 10349), 'numpy.array', 'np.array', (['t_merger_gal'], {}), '(t_merger_gal)\n', (10335, 10349), True, 'import numpy as np\n'), ((2582, 2603), 'numpy.log10', 'np.log10', (['halo_m_500c'], {}), '(halo_m_500c)\n', (2590, 2603), True, 'import numpy as np\n')] |
Rubtsowa/modin | asv_bench/benchmarks/omnisci/io.py | 6550939753c76e896ef2bfd65bb9468d6ad161d7 | # Licensed to Modin Development Team under one or more contributor license agreements.
# See the NOTICE file distributed with this work for additional information regarding
# copyright ownership. The Modin Development Team licenses this file to you under the
# Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""IO Modin on OmniSci storage format benchmarks."""
import modin.pandas as pd
from ..utils import (
generate_dataframe,
RAND_LOW,
RAND_HIGH,
ASV_USE_IMPL,
IMPL,
get_shape_id,
trigger_import,
get_benchmark_shapes,
)
from ..io.csv import TimeReadCsvTrueFalseValues # noqa: F401
class TimeReadCsvNames:
shapes = get_benchmark_shapes("omnisci.TimeReadCsvNames")
param_names = ["shape"]
params = [shapes]
def setup_cache(self, test_filename="io_test_file_csv_names"):
# filenames with a metadata of saved dataframes
cache = {}
for shape in self.shapes:
df = generate_dataframe("pandas", "int", *shape, RAND_LOW, RAND_HIGH)
file_id = get_shape_id(shape)
cache[file_id] = (
f"{test_filename}_{file_id}.csv",
df.columns.to_list(),
df.dtypes.to_dict(),
)
df.to_csv(cache[file_id][0], index=False)
return cache
def setup(self, cache, shape):
# ray init
if ASV_USE_IMPL == "modin":
pd.DataFrame([])
file_id = get_shape_id(shape)
self.filename, self.names, self.dtype = cache[file_id]
def time_read_csv_names(self, cache, shape):
df = IMPL[ASV_USE_IMPL].read_csv(
self.filename,
names=self.names,
header=0,
dtype=self.dtype,
)
trigger_import(df)
| [((1883, 1899), 'modin.pandas.DataFrame', 'pd.DataFrame', (['[]'], {}), '([])\n', (1895, 1899), True, 'import modin.pandas as pd\n')] |
ypeng22/ProgLearn | benchmarks/rotation/rotated_cifar.py | 671ff6a03c156bab3eedbd9e112705eeabd59da7 | import matplotlib.pyplot as plt
import random
import pickle
from skimage.transform import rotate
from scipy import ndimage
from skimage.util import img_as_ubyte
from joblib import Parallel, delayed
from sklearn.ensemble.forest import _generate_unsampled_indices
from sklearn.ensemble.forest import _generate_sample_indices
import numpy as np
from sklearn.ensemble import BaggingClassifier
from sklearn.tree import DecisionTreeClassifier
from itertools import product
import keras
from keras import layers
from joblib import Parallel, delayed
from multiprocessing import Pool
import tensorflow as tf
from numba import cuda
import sys
sys.path.append("../../proglearn/")
from progressive_learner import ProgressiveLearner
from deciders import SimpleArgmaxAverage
from transformers import TreeClassificationTransformer, NeuralClassificationTransformer
from voters import TreeClassificationVoter, KNNClassificationVoter
def cross_val_data(data_x, data_y, total_cls=10):
x = data_x.copy()
y = data_y.copy()
idx = [np.where(data_y == u)[0] for u in np.unique(data_y)]
for i in range(total_cls):
indx = idx[i]#np.roll(idx[i],(cv-1)*100)
random.shuffle(indx)
if i==0:
train_x1 = x[indx[0:250],:]
train_x2 = x[indx[250:500],:]
train_y1 = y[indx[0:250]]
train_y2 = y[indx[250:500]]
test_x = x[indx[500:600],:]
test_y = y[indx[500:600]]
else:
train_x1 = np.concatenate((train_x1, x[indx[0:250],:]), axis=0)
train_x2 = np.concatenate((train_x2, x[indx[250:500],:]), axis=0)
train_y1 = np.concatenate((train_y1, y[indx[0:250]]), axis=0)
train_y2 = np.concatenate((train_y2, y[indx[250:500]]), axis=0)
test_x = np.concatenate((test_x, x[indx[500:600],:]), axis=0)
test_y = np.concatenate((test_y, y[indx[500:600]]), axis=0)
return train_x1, train_y1, train_x2, train_y2, test_x, test_y
def LF_experiment(data_x, data_y, angle, model, granularity, reps=1, ntrees=29, acorn=None):
if acorn is not None:
np.random.seed(acorn)
errors = np.zeros(2)
for rep in range(reps):
print("Starting Rep {} of Angle {}".format(rep, angle))
train_x1, train_y1, train_x2, train_y2, test_x, test_y = cross_val_data(data_x, data_y, total_cls=10)
#change data angle for second task
tmp_data = train_x2.copy()
_tmp_ = np.zeros((32,32,3), dtype=int)
total_data = tmp_data.shape[0]
for i in range(total_data):
tmp_ = image_aug(tmp_data[i],angle)
tmp_data[i] = tmp_
if model == "uf":
train_x1 = train_x1.reshape((train_x1.shape[0], train_x1.shape[1] * train_x1.shape[2] * train_x1.shape[3]))
tmp_data = tmp_data.reshape((tmp_data.shape[0], tmp_data.shape[1] * tmp_data.shape[2] * tmp_data.shape[3]))
test_x = test_x.reshape((test_x.shape[0], test_x.shape[1] * test_x.shape[2] * test_x.shape[3]))
with tf.device('/gpu:'+str(int(angle // granularity) % 4)):
default_transformer_class = NeuralClassificationTransformer
network = keras.Sequential()
network.add(layers.Conv2D(filters=16, kernel_size=(3, 3), activation='relu', input_shape=np.shape(train_x1)[1:]))
network.add(layers.BatchNormalization())
network.add(layers.Conv2D(filters=32, kernel_size=(3, 3), strides = 2, padding = "same", activation='relu'))
network.add(layers.BatchNormalization())
network.add(layers.Conv2D(filters=64, kernel_size=(3, 3), strides = 2, padding = "same", activation='relu'))
network.add(layers.BatchNormalization())
network.add(layers.Conv2D(filters=128, kernel_size=(3, 3), strides = 2, padding = "same", activation='relu'))
network.add(layers.BatchNormalization())
network.add(layers.Conv2D(filters=254, kernel_size=(3, 3), strides = 2, padding = "same", activation='relu'))
network.add(layers.Flatten())
network.add(layers.BatchNormalization())
network.add(layers.Dense(2000, activation='relu'))
network.add(layers.BatchNormalization())
network.add(layers.Dense(2000, activation='relu'))
network.add(layers.BatchNormalization())
network.add(layers.Dense(units=10, activation = 'softmax'))
default_transformer_kwargs = {"network" : network,
"euclidean_layer_idx" : -2,
"num_classes" : 10,
"optimizer" : keras.optimizers.Adam(3e-4)
}
default_voter_class = KNNClassificationVoter
default_voter_kwargs = {"k" : int(np.log2(len(train_x1)))}
default_decider_class = SimpleArgmaxAverage
progressive_learner = ProgressiveLearner(default_transformer_class = default_transformer_class,
default_transformer_kwargs = default_transformer_kwargs,
default_voter_class = default_voter_class,
default_voter_kwargs = default_voter_kwargs,
default_decider_class = default_decider_class)
progressive_learner.add_task(
X = train_x1,
y = train_y1,
transformer_voter_decider_split = [0.67, 0.33, 0],
decider_kwargs = {"classes" : np.unique(train_y1)}
)
progressive_learner.add_transformer(
X = tmp_data,
y = train_y2,
transformer_data_proportion = 1,
backward_task_ids = [0]
)
llf_task1=progressive_learner.predict(test_x, task_id=0)
llf_single_task=progressive_learner.predict(test_x, task_id=0, transformer_ids=[0])
errors[1] = errors[1]+(1 - np.mean(llf_task1 == test_y))
errors[0] = errors[0]+(1 - np.mean(llf_single_task == test_y))
errors = errors/reps
print("Errors For Angle {}: {}".format(angle, errors))
with open('rotation_results/angle_'+str(angle)+'_'+model+'.pickle', 'wb') as f:
pickle.dump(errors, f, protocol = 2)
def image_aug(pic, angle, centroid_x=23, centroid_y=23, win=16, scale=1.45):
im_sz = int(np.floor(pic.shape[0]*scale))
pic_ = np.uint8(np.zeros((im_sz,im_sz,3),dtype=int))
pic_[:,:,0] = ndimage.zoom(pic[:,:,0],scale)
pic_[:,:,1] = ndimage.zoom(pic[:,:,1],scale)
pic_[:,:,2] = ndimage.zoom(pic[:,:,2],scale)
image_aug = rotate(pic_, angle, resize=False)
#print(image_aug.shape)
image_aug_ = image_aug[centroid_x-win:centroid_x+win,centroid_y-win:centroid_y+win,:]
return img_as_ubyte(image_aug_)
### MAIN HYPERPARAMS ###
model = "dnn"
granularity = 2
reps = 4
########################
(X_train, y_train), (X_test, y_test) = keras.datasets.cifar100.load_data()
data_x = np.concatenate([X_train, X_test])
data_y = np.concatenate([y_train, y_test])
data_y = data_y[:, 0]
def perform_angle(angle):
LF_experiment(data_x, data_y, angle, model, granularity, reps=reps, ntrees=16, acorn=1)
if model == "dnn":
for angle_adder in range(30, 180, granularity * 4):
angles = angle_adder + np.arange(0, granularity * 4, granularity)
with Pool(4) as p:
p.map(perform_angle, angles)
elif model == "uf":
angles = np.arange(30,180,2)
Parallel(n_jobs=-1)(delayed(LF_experiment)(data_x, data_y, angle, model, granularity, reps=20, ntrees=16, acorn=1) for angle in angles)
| [((638, 673), 'sys.path.append', 'sys.path.append', (['"""../../proglearn/"""'], {}), "('../../proglearn/')\n", (653, 673), False, 'import sys\n'), ((7056, 7091), 'keras.datasets.cifar100.load_data', 'keras.datasets.cifar100.load_data', ([], {}), '()\n', (7089, 7091), False, 'import keras\n'), ((7101, 7134), 'numpy.concatenate', 'np.concatenate', (['[X_train, X_test]'], {}), '([X_train, X_test])\n', (7115, 7134), True, 'import numpy as np\n'), ((7144, 7177), 'numpy.concatenate', 'np.concatenate', (['[y_train, y_test]'], {}), '([y_train, y_test])\n', (7158, 7177), True, 'import numpy as np\n'), ((2145, 2156), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (2153, 2156), True, 'import numpy as np\n'), ((6589, 6624), 'scipy.ndimage.zoom', 'ndimage.zoom', (['pic[:, :, (0)]', 'scale'], {}), '(pic[:, :, (0)], scale)\n', (6601, 6624), False, 'from scipy import ndimage\n'), ((6639, 6674), 'scipy.ndimage.zoom', 'ndimage.zoom', (['pic[:, :, (1)]', 'scale'], {}), '(pic[:, :, (1)], scale)\n', (6651, 6674), False, 'from scipy import ndimage\n'), ((6688, 6723), 'scipy.ndimage.zoom', 'ndimage.zoom', (['pic[:, :, (2)]', 'scale'], {}), '(pic[:, :, (2)], scale)\n', (6700, 6723), False, 'from scipy import ndimage\n'), ((6736, 6769), 'skimage.transform.rotate', 'rotate', (['pic_', 'angle'], {'resize': '(False)'}), '(pic_, angle, resize=False)\n', (6742, 6769), False, 'from skimage.transform import rotate\n'), ((6900, 6924), 'skimage.util.img_as_ubyte', 'img_as_ubyte', (['image_aug_'], {}), '(image_aug_)\n', (6912, 6924), False, 'from skimage.util import img_as_ubyte\n'), ((1170, 1190), 'random.shuffle', 'random.shuffle', (['indx'], {}), '(indx)\n', (1184, 1190), False, 'import random\n'), ((2109, 2130), 'numpy.random.seed', 'np.random.seed', (['acorn'], {}), '(acorn)\n', (2123, 2130), True, 'import numpy as np\n'), ((2456, 2488), 'numpy.zeros', 'np.zeros', (['(32, 32, 3)'], {'dtype': 'int'}), '((32, 32, 3), dtype=int)\n', (2464, 2488), True, 'import numpy as np\n'), ((6352, 6386), 'pickle.dump', 'pickle.dump', (['errors', 'f'], {'protocol': '(2)'}), '(errors, f, protocol=2)\n', (6363, 6386), False, 'import pickle\n'), ((6483, 6513), 'numpy.floor', 'np.floor', (['(pic.shape[0] * scale)'], {}), '(pic.shape[0] * scale)\n', (6491, 6513), True, 'import numpy as np\n'), ((6533, 6571), 'numpy.zeros', 'np.zeros', (['(im_sz, im_sz, 3)'], {'dtype': 'int'}), '((im_sz, im_sz, 3), dtype=int)\n', (6541, 6571), True, 'import numpy as np\n'), ((7570, 7591), 'numpy.arange', 'np.arange', (['(30)', '(180)', '(2)'], {}), '(30, 180, 2)\n', (7579, 7591), True, 'import numpy as np\n'), ((1027, 1048), 'numpy.where', 'np.where', (['(data_y == u)'], {}), '(data_y == u)\n', (1035, 1048), True, 'import numpy as np\n'), ((1061, 1078), 'numpy.unique', 'np.unique', (['data_y'], {}), '(data_y)\n', (1070, 1078), True, 'import numpy as np\n'), ((1485, 1540), 'numpy.concatenate', 'np.concatenate', (['(train_x1, x[(indx[0:250]), :])'], {'axis': '(0)'}), '((train_x1, x[(indx[0:250]), :]), axis=0)\n', (1499, 1540), True, 'import numpy as np\n'), ((1561, 1618), 'numpy.concatenate', 'np.concatenate', (['(train_x2, x[(indx[250:500]), :])'], {'axis': '(0)'}), '((train_x2, x[(indx[250:500]), :]), axis=0)\n', (1575, 1618), True, 'import numpy as np\n'), ((1639, 1689), 'numpy.concatenate', 'np.concatenate', (['(train_y1, y[indx[0:250]])'], {'axis': '(0)'}), '((train_y1, y[indx[0:250]]), axis=0)\n', (1653, 1689), True, 'import numpy as np\n'), ((1713, 1765), 'numpy.concatenate', 'np.concatenate', (['(train_y2, y[indx[250:500]])'], {'axis': '(0)'}), '((train_y2, y[indx[250:500]]), axis=0)\n', (1727, 1765), True, 'import numpy as np\n'), ((1788, 1843), 'numpy.concatenate', 'np.concatenate', (['(test_x, x[(indx[500:600]), :])'], {'axis': '(0)'}), '((test_x, x[(indx[500:600]), :]), axis=0)\n', (1802, 1843), True, 'import numpy as np\n'), ((1862, 1912), 'numpy.concatenate', 'np.concatenate', (['(test_y, y[indx[500:600]])'], {'axis': '(0)'}), '((test_y, y[indx[500:600]]), axis=0)\n', (1876, 1912), True, 'import numpy as np\n'), ((3182, 3200), 'keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (3198, 3200), False, 'import keras\n'), ((4969, 5237), 'progressive_learner.ProgressiveLearner', 'ProgressiveLearner', ([], {'default_transformer_class': 'default_transformer_class', 'default_transformer_kwargs': 'default_transformer_kwargs', 'default_voter_class': 'default_voter_class', 'default_voter_kwargs': 'default_voter_kwargs', 'default_decider_class': 'default_decider_class'}), '(default_transformer_class=default_transformer_class,\n default_transformer_kwargs=default_transformer_kwargs,\n default_voter_class=default_voter_class, default_voter_kwargs=\n default_voter_kwargs, default_decider_class=default_decider_class)\n', (4987, 5237), False, 'from progressive_learner import ProgressiveLearner\n'), ((7426, 7468), 'numpy.arange', 'np.arange', (['(0)', '(granularity * 4)', 'granularity'], {}), '(0, granularity * 4, granularity)\n', (7435, 7468), True, 'import numpy as np\n'), ((7482, 7489), 'multiprocessing.Pool', 'Pool', (['(4)'], {}), '(4)\n', (7486, 7489), False, 'from multiprocessing import Pool\n'), ((7594, 7613), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': '(-1)'}), '(n_jobs=-1)\n', (7602, 7613), False, 'from joblib import Parallel, delayed\n'), ((3351, 3378), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (3376, 3378), False, 'from keras import layers\n'), ((3404, 3499), 'keras.layers.Conv2D', 'layers.Conv2D', ([], {'filters': '(32)', 'kernel_size': '(3, 3)', 'strides': '(2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=32, kernel_size=(3, 3), strides=2, padding='same',\n activation='relu')\n", (3417, 3499), False, 'from keras import layers\n'), ((3525, 3552), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (3550, 3552), False, 'from keras import layers\n'), ((3578, 3673), 'keras.layers.Conv2D', 'layers.Conv2D', ([], {'filters': '(64)', 'kernel_size': '(3, 3)', 'strides': '(2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=64, kernel_size=(3, 3), strides=2, padding='same',\n activation='relu')\n", (3591, 3673), False, 'from keras import layers\n'), ((3699, 3726), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (3724, 3726), False, 'from keras import layers\n'), ((3752, 3848), 'keras.layers.Conv2D', 'layers.Conv2D', ([], {'filters': '(128)', 'kernel_size': '(3, 3)', 'strides': '(2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=128, kernel_size=(3, 3), strides=2, padding='same',\n activation='relu')\n", (3765, 3848), False, 'from keras import layers\n'), ((3874, 3901), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (3899, 3901), False, 'from keras import layers\n'), ((3927, 4023), 'keras.layers.Conv2D', 'layers.Conv2D', ([], {'filters': '(254)', 'kernel_size': '(3, 3)', 'strides': '(2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=254, kernel_size=(3, 3), strides=2, padding='same',\n activation='relu')\n", (3940, 4023), False, 'from keras import layers\n'), ((4050, 4066), 'keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (4064, 4066), False, 'from keras import layers\n'), ((4092, 4119), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (4117, 4119), False, 'from keras import layers\n'), ((4145, 4182), 'keras.layers.Dense', 'layers.Dense', (['(2000)'], {'activation': '"""relu"""'}), "(2000, activation='relu')\n", (4157, 4182), False, 'from keras import layers\n'), ((4208, 4235), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (4233, 4235), False, 'from keras import layers\n'), ((4261, 4298), 'keras.layers.Dense', 'layers.Dense', (['(2000)'], {'activation': '"""relu"""'}), "(2000, activation='relu')\n", (4273, 4298), False, 'from keras import layers\n'), ((4324, 4351), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (4349, 4351), False, 'from keras import layers\n'), ((4377, 4421), 'keras.layers.Dense', 'layers.Dense', ([], {'units': '(10)', 'activation': '"""softmax"""'}), "(units=10, activation='softmax')\n", (4389, 4421), False, 'from keras import layers\n'), ((4677, 4706), 'keras.optimizers.Adam', 'keras.optimizers.Adam', (['(0.0003)'], {}), '(0.0003)\n', (4698, 4706), False, 'import keras\n'), ((6070, 6098), 'numpy.mean', 'np.mean', (['(llf_task1 == test_y)'], {}), '(llf_task1 == test_y)\n', (6077, 6098), True, 'import numpy as np\n'), ((6139, 6173), 'numpy.mean', 'np.mean', (['(llf_single_task == test_y)'], {}), '(llf_single_task == test_y)\n', (6146, 6173), True, 'import numpy as np\n'), ((7614, 7636), 'joblib.delayed', 'delayed', (['LF_experiment'], {}), '(LF_experiment)\n', (7621, 7636), False, 'from joblib import Parallel, delayed\n'), ((5615, 5634), 'numpy.unique', 'np.unique', (['train_y1'], {}), '(train_y1)\n', (5624, 5634), True, 'import numpy as np\n'), ((3302, 3320), 'numpy.shape', 'np.shape', (['train_x1'], {}), '(train_x1)\n', (3310, 3320), True, 'import numpy as np\n')] |
toddbenanzer/sklearn_pandas | sklearn_pandas/transformers/monitor.py | 36e24c55ef4829aa261963201c346869097d4931 | import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin, clone
from sklearn_pandas.util import validate_dataframe
class MonitorMixin(object):
def print_message(self, message):
if self.logfile:
with open(self.logfile, "a") as fout:
fout.write(message)
else:
print(message)
class ValidateTypes(BaseEstimator, TransformerMixin, MonitorMixin):
def __init__(self, logfile=None, to_screen=True):
self.logfile = logfile
self.to_screen = to_screen
def fit(self, X, y=None, **fitparams):
X = validate_dataframe(X)
self.types = {}
for col in X.columns:
self.types[col] = X[col].dtype.name
return self
def transform(self, X, **transformparams):
X = validate_dataframe(X)
new_col_list = []
for col in X.columns:
var_type = X[col].dtype.name
if var_type != self.types[col]:
self.print_message(
'Data Type Mismatch for column {col}: Expected {expected} Received {received}'.format(
col=col, expected=self.types[col], received=var_type)
)
return X
class ValidateRange(BaseEstimator, TransformerMixin, MonitorMixin):
def __init__(self, logfile=None, to_screen=True, max_nunique=20):
self.logfile = logfile
self.to_screen = to_screen
self.max_nunique = max_nunique
def fit(self, X, y=None, **fitparams):
X = validate_dataframe(X)
self.types = {}
self.unique_vals = {}
self.minmax = {}
for col in X.columns:
self.types[col] = X[col].dtype.name
if self.types[col] in ('object', 'bool', 'category'):
unique_values = X[col].unique()
if len(unique_values) <= self.max_nunique:
self.unique_vals[col] = unique_values
else:
self.unique_vals[col] = None
elif self.types[col] in ('int64', 'float64', 'datetime64', 'timedelta'):
self.minmax[col] = (X[col].min(), X[col].max())
return self
def transform(self, X, **transformparams):
X = validate_dataframe(X)
new_col_list = []
for col in X.columns:
var_type = X[col].dtype.name
if self.types[col] in ('object', 'bool', 'category'):
if self.unique_vals[col] is not None:
not_in_list = ~X[col].isin(self.unique_vals[col])
if sum(not_in_list) > 0:
new_values = str(X[col][not_in_list].unique().tolist())
self.print_message(
'New Categories specified for column {col}: Received {received}'.format(
col=col, received=new_values)
)
elif self.types[col] in ('int64', 'float64', 'datetime64', 'timedelta'):
minX = X[col].min()
maxX = X[col].max()
if minX < self.minmax[col][0]:
self.print_message(
'Low Value warning for column {col}: Lowest Training value {lowtrain}, Lowest Scoring value {lowscore}'.format(
col=col, lowtrain=self.minmax[col][0], lowscore=minX)
)
if maxX > self.minmax[col][1]:
self.print_message(
'High Value warning for column {col}: Largest Training value {hightrain}, Largest Scoring value {highscore}'.format(
col=col, hightrain=self.minmax[col][1], highscore=maxX)
)
return X
| [((622, 643), 'sklearn_pandas.util.validate_dataframe', 'validate_dataframe', (['X'], {}), '(X)\n', (640, 643), False, 'from sklearn_pandas.util import validate_dataframe\n'), ((826, 847), 'sklearn_pandas.util.validate_dataframe', 'validate_dataframe', (['X'], {}), '(X)\n', (844, 847), False, 'from sklearn_pandas.util import validate_dataframe\n'), ((1548, 1569), 'sklearn_pandas.util.validate_dataframe', 'validate_dataframe', (['X'], {}), '(X)\n', (1566, 1569), False, 'from sklearn_pandas.util import validate_dataframe\n'), ((2259, 2280), 'sklearn_pandas.util.validate_dataframe', 'validate_dataframe', (['X'], {}), '(X)\n', (2277, 2280), False, 'from sklearn_pandas.util import validate_dataframe\n')] |
cbrake1/content | Packs/ProofpointThreatResponse/Integrations/ProofpointThreatResponse/ProofpointThreatResponse_test.py | 5b031129f98935c492056675eeee0fefcacbd87b | import pytest
from CommonServerPython import *
from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, \
pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, \
get_incidents_batch_by_time_request, get_new_incidents, get_time_delta
MOCK_INCIDENT = {
"id": 1,
"type": "Malware",
"summary": "Unsolicited Bulk Email",
"description": "EvilScheme test message",
"score": 4200,
"state": "Open",
"created_at": "2018-05-26T21:07:17Z",
"event_count": 3,
"event_sources": [
"Proofpoint TAP"
],
"users": [
""
],
"assignee": "Unassigned",
"team": "Unassigned",
"hosts": {
"attacker": [
""
],
"forensics": [
"",
]
},
"incident_field_values": [
{
"name": "Attack Vector",
"value": "Email"
},
{
"name": "Classification",
"value": "Spam"
},
{
"name": "Severity",
"value": "Critical"
},
{
"name": "Abuse Disposition",
"value": "Unknown"
}
],
"events": [
{
"id": 3,
"category": "malware",
"severity": "Info",
"source": "Proofpoint TAP",
"threatname": "",
"state": "Linked",
"description": "",
"attackDirection": "inbound",
"received": "2018-05-26T21:07:17Z",
"malwareName": "",
"emails": [
{
"sender": {
"email": "test"
},
"recipient": {
"email": "test"
},
"subject": "test",
"messageId": "test",
"messageDeliveryTime": {
"chronology": {
"zone": {
"id": "UTC"
}
},
"millis": 1544640072000,
},
"abuseCopy": "false",
"body": "test",
'bodyType': "test",
'headers': "test",
'urls': "test"
}
],
}
],
"quarantine_results": [],
"successful_quarantines": 0,
"failed_quarantines": 0,
"pending_quarantines": 0
}
INCIDENT_FIELD_CONTEXT = {
"Attack_Vector": "Email",
"Classification": "Spam",
"Severity": "Critical",
"Abuse_Disposition": "Unknown"
}
INCIDENT_FIELD_INPUT = [
(MOCK_INCIDENT, INCIDENT_FIELD_CONTEXT)
]
def get_fetch_data():
with open('./test_data/raw_response.json', 'r') as f:
file = json.loads(f.read())
return file.get('result')
FETCH_RESPONSE = get_fetch_data()
@pytest.mark.parametrize('incident, answer', INCIDENT_FIELD_INPUT)
def test_get_incident_field_context(incident, answer):
incident_field_values = create_incident_field_context(incident)
assert incident_field_values == answer
EMAIL_RESULT = [
{
'sender': "test",
'recipient': "test",
'subject': "test",
'message_id': "test",
'message_delivery_time': 1544640072000,
'body': "test",
'body_type': "test",
'headers': "test",
'urls': "test"
}
]
EMAILS_CONTEXT_INPUT = [
(MOCK_INCIDENT['events'][0], EMAIL_RESULT)
]
@pytest.mark.parametrize('event, answer', EMAILS_CONTEXT_INPUT)
def test_get_emails_context(event, answer):
emails_context = get_emails_context(event)
assert emails_context == answer
SOURCE_LIST_INPUT = [
(["Proofpoint TAP"], True),
([], True),
(["No such source"], False),
(["No such source", "Proofpoint TAP"], True)
]
@pytest.mark.parametrize('sources_list, expected_answer', SOURCE_LIST_INPUT)
def test_pass_sources_list_filter(sources_list, expected_answer):
result = pass_sources_list_filter(MOCK_INCIDENT, sources_list)
assert result == expected_answer
ABUSE_DISPOSITION_INPUT = [
(["Unknown"], True),
([], True),
(["No such value"], False),
(["No such value", "Unknown"], True)
]
@pytest.mark.parametrize('abuse_dispotion_values, expected_answer', ABUSE_DISPOSITION_INPUT)
def test_pass_abuse_disposition_filter(abuse_dispotion_values, expected_answer):
result = pass_abuse_disposition_filter(MOCK_INCIDENT, abuse_dispotion_values)
assert result == expected_answer
DEMISTO_PARAMS = [({'event_sources': "No such source, Proofpoint TAP", 'abuse_disposition': "No such value, Unknown"},
[MOCK_INCIDENT]), ({'event_sources': "", 'abuse_disposition': ""}, [MOCK_INCIDENT]),
({'event_sources': "No such source", 'abuse_disposition': "No such value, Unknown"}, []),
({'event_sources': "No such source, Proofpoint TAP", 'abuse_disposition': "No such value"}, []),
({'event_sources': "No such source", 'abuse_disposition': "No such value"}, [])]
@pytest.mark.parametrize('demisto_params, expected_answer', DEMISTO_PARAMS)
def test_filter_incidents(mocker, demisto_params, expected_answer):
mocker.patch.object(demisto, 'params', return_value=demisto_params)
filtered_incidents = filter_incidents([MOCK_INCIDENT])
assert filtered_incidents == expected_answer
INGEST_ALERT_ARGS = {
"attacker": "{\"attacker\":{\"key\":\"value\"}}",
"cnc_host": "{\"cnc_host\":{\"key\":\"value\"}}",
"detector": "{\"detector\":{\"key\":\"value\"}}",
"email": "{\"email\":{\"key\":\"value\"}}",
"forensics_hosts": "{\"forensics_hosts\":{\"key\":\"value\"}}",
"target": "{\"target\":{\"key\":\"value\"}}",
"threat_info": "{\"threat_info\":{\"key\":\"value\"}}",
"custom_fields": "{\"custom_fields\":{\"key\":\"value\"}}",
"post_url_id": "value",
"json_version": "value",
"summary": "value"
}
EXPECTED_RESULT = {
"attacker": {"key": "value"},
"cnc_host": {"key": "value"},
"detector": {"key": "value"},
"email": {"key": "value"},
"forensics_hosts": {"key": "value"},
"target": {"key": "value"},
"threat_info": {"key": "value"},
"custom_fields": {"key": "value"},
"post_url_id": "value",
"json_version": "value",
"summary": "value"
}
def test_prepare_ingest_alert_request_body():
prepared_body = prepare_ingest_alert_request_body(INGEST_ALERT_ARGS)
assert prepared_body == EXPECTED_RESULT
def test_fetch_incidents_limit_exceed(mocker):
"""
Given
- a dict of params given to the function which is gathered originally from demisto.params()
The dict includes the relevant params for the fetch e.g. fetch_delta, fetch_limit, created_after, state.
- response of the api
When
- a single iteration of the fetch is activated with a fetch limit set to 5
Then
- validate that the number or incidents that is returned is equal to the limit when the api returned more.
"""
params = {
'fetch_delta': '6 hours',
'fetch_limit': ' 5',
'created_after': '2021-03-30T11:44:24Z',
'state': 'closed'
}
mocker.patch('ProofpointThreatResponse.get_incidents_request', return_value=FETCH_RESPONSE)
incidents_list = get_incidents_batch_by_time_request(params)
assert len(incidents_list) == 5
def test_fetch_incidents_with_same_created_time(mocker):
"""
Given
- a dict of params given to the function which is gathered originally from demisto.params()
The dict includes the relevant params for the fetch e.g. fetch_delta, fetch_limit, created_after, state and
last_fetched_id.
- response of the api
When
- when a fetch occurs and the last fetched incident has exactly the same time of the next incident.
Then
- validate that only one of the incidents appear as to the fetch limit.
- validate that the next incident whose time is exactly the same is brought in the next fetch loop.
( e.g. 3057 and 3058)
"""
expected_ids_to_fetch_first = [3055, 3056, 3057]
expected_ids_to_fetch_second = [3058, 3059, 3060]
params = {
'fetch_delta': '2 hours',
'fetch_limit': '3',
'created_after': '2021-03-30T10:44:24Z',
'state': 'closed'
}
mocker.patch('ProofpointThreatResponse.get_incidents_request', return_value=FETCH_RESPONSE)
new_fetched_first = get_incidents_batch_by_time_request(params)
for incident in new_fetched_first:
assert incident.get('id') in expected_ids_to_fetch_first
params = {
'fetch_delta': '2 hour',
'fetch_limit': '3',
'created_after': '2021-03-30T11:21:24Z',
'last_fetched_id': '3057',
'state': 'closed'
}
new_fetched_second = get_incidents_batch_by_time_request(params)
for incident in new_fetched_second:
assert incident.get('id') in expected_ids_to_fetch_second
def test_get_new_incidents(mocker):
"""
Given
- a dict of request_params to the api.
- The last fetched incident id.
When
- Get new incidents is called during the fetch process.
Then
- validate that the number of expected incidents return.
- validate that all of the returned incident have a bigger id then the last fetched incident.
"""
last_incident_fetched = 3057
request_params = {
'state': 'closed',
'created_after': '2021-03-30T10:21:24Z',
'created_before': '2021-03-31T11:21:24Z',
}
mocker.patch('ProofpointThreatResponse.get_incidents_request', return_value=FETCH_RESPONSE)
new_incidnets = get_new_incidents(request_params, last_incident_fetched)
assert len(new_incidnets) == 14
for incident in new_incidnets:
assert incident.get('id') > 3057
def test_get_time_delta():
"""
Given
- input to the get_time_delta function which is valid and invalid
When
- run the get_time_delta function.
Then
- validate that on invalid input such as days or no units relevant errors are raised.
- validate that on valid inputs the return value is as expected.
"""
time_delta = get_time_delta('1 minute')
assert str(time_delta) == '0:01:00'
time_delta = get_time_delta('2 hours')
assert str(time_delta) == '2:00:00'
try:
get_time_delta('2')
except Exception as ex:
assert 'The fetch_delta is invalid. Please make sure to insert both the number and the unit of the fetch delta.' in str(
ex)
try:
get_time_delta('2 days')
except Exception as ex:
assert 'The unit of fetch_delta is invalid. Possible values are "minutes" or "hours' in str(ex)
| [((2985, 3050), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""incident, answer"""', 'INCIDENT_FIELD_INPUT'], {}), "('incident, answer', INCIDENT_FIELD_INPUT)\n", (3008, 3050), False, 'import pytest\n'), ((3591, 3653), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""event, answer"""', 'EMAILS_CONTEXT_INPUT'], {}), "('event, answer', EMAILS_CONTEXT_INPUT)\n", (3614, 3653), False, 'import pytest\n'), ((3940, 4015), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""sources_list, expected_answer"""', 'SOURCE_LIST_INPUT'], {}), "('sources_list, expected_answer', SOURCE_LIST_INPUT)\n", (3963, 4015), False, 'import pytest\n'), ((4335, 4430), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""abuse_dispotion_values, expected_answer"""', 'ABUSE_DISPOSITION_INPUT'], {}), "('abuse_dispotion_values, expected_answer',\n ABUSE_DISPOSITION_INPUT)\n", (4358, 4430), False, 'import pytest\n'), ((5177, 5251), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""demisto_params, expected_answer"""', 'DEMISTO_PARAMS'], {}), "('demisto_params, expected_answer', DEMISTO_PARAMS)\n", (5200, 5251), False, 'import pytest\n'), ((3134, 3173), 'ProofpointThreatResponse.create_incident_field_context', 'create_incident_field_context', (['incident'], {}), '(incident)\n', (3163, 3173), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((3719, 3744), 'ProofpointThreatResponse.get_emails_context', 'get_emails_context', (['event'], {}), '(event)\n', (3737, 3744), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((4095, 4148), 'ProofpointThreatResponse.pass_sources_list_filter', 'pass_sources_list_filter', (['MOCK_INCIDENT', 'sources_list'], {}), '(MOCK_INCIDENT, sources_list)\n', (4119, 4148), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((4521, 4589), 'ProofpointThreatResponse.pass_abuse_disposition_filter', 'pass_abuse_disposition_filter', (['MOCK_INCIDENT', 'abuse_dispotion_values'], {}), '(MOCK_INCIDENT, abuse_dispotion_values)\n', (4550, 4589), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((5417, 5450), 'ProofpointThreatResponse.filter_incidents', 'filter_incidents', (['[MOCK_INCIDENT]'], {}), '([MOCK_INCIDENT])\n', (5433, 5450), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((6511, 6563), 'ProofpointThreatResponse.prepare_ingest_alert_request_body', 'prepare_ingest_alert_request_body', (['INGEST_ALERT_ARGS'], {}), '(INGEST_ALERT_ARGS)\n', (6544, 6563), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((7410, 7453), 'ProofpointThreatResponse.get_incidents_batch_by_time_request', 'get_incidents_batch_by_time_request', (['params'], {}), '(params)\n', (7445, 7453), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((8564, 8607), 'ProofpointThreatResponse.get_incidents_batch_by_time_request', 'get_incidents_batch_by_time_request', (['params'], {}), '(params)\n', (8599, 8607), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((8930, 8973), 'ProofpointThreatResponse.get_incidents_batch_by_time_request', 'get_incidents_batch_by_time_request', (['params'], {}), '(params)\n', (8965, 8973), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((9773, 9829), 'ProofpointThreatResponse.get_new_incidents', 'get_new_incidents', (['request_params', 'last_incident_fetched'], {}), '(request_params, last_incident_fetched)\n', (9790, 9829), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((10308, 10334), 'ProofpointThreatResponse.get_time_delta', 'get_time_delta', (['"""1 minute"""'], {}), "('1 minute')\n", (10322, 10334), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((10392, 10417), 'ProofpointThreatResponse.get_time_delta', 'get_time_delta', (['"""2 hours"""'], {}), "('2 hours')\n", (10406, 10417), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((10475, 10494), 'ProofpointThreatResponse.get_time_delta', 'get_time_delta', (['"""2"""'], {}), "('2')\n", (10489, 10494), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n'), ((10685, 10709), 'ProofpointThreatResponse.get_time_delta', 'get_time_delta', (['"""2 days"""'], {}), "('2 days')\n", (10699, 10709), False, 'from ProofpointThreatResponse import create_incident_field_context, get_emails_context, pass_sources_list_filter, pass_abuse_disposition_filter, filter_incidents, prepare_ingest_alert_request_body, get_incidents_batch_by_time_request, get_new_incidents, get_time_delta\n')] |
l1haoyuan/macholib | macholib/macho_methname.py | 48c59841e2ca5aa308eab67f72faed384a2c0723 | import sys
import os
import json
from enum import Enum
from .mach_o import LC_SYMTAB
from macholib import MachO
from macholib import mach_o
from shutil import copy2
from shutil import SameFileError
class ReplaceType(Enum):
objc_methname = 1
symbol_table = 2
def replace_in_bytes(method_bytes, name_dict, type):
is_prefix = False
empty_byte = b'\x00'
if not method_bytes.startswith(empty_byte):
is_prefix = True
method_bytes = empty_byte + method_bytes
for key, value in name_dict.items():
if len(key) != len(value):
raise("replace method name with different length may break the mach-o file, ori: " +
key + ", dst: " + value)
if type == ReplaceType.objc_methname:
method_bytes = method_bytes.replace(
empty_byte + key.encode('utf-8') + empty_byte, empty_byte + value.encode('utf-8') + empty_byte)
elif type == ReplaceType.symbol_table:
method_bytes = method_bytes.replace(
b' ' + key.encode('utf-8') + b']', b' ' + value.encode('utf-8') + b']')
if is_prefix:
method_bytes = method_bytes.replace(empty_byte, b'', 1)
return method_bytes
def ch_methname_sect(header, name_dict):
commands = header.commands
lc = None
sect = None
for _, command_tuple in enumerate(commands):
seg = command_tuple[1]
data = command_tuple[2]
if hasattr(seg, 'segname') and seg.segname.rstrip(b'\x00') == b'__TEXT':
for tmp_sect in data:
if tmp_sect.sectname.rstrip(b'\x00') == b'__objc_methname':
lc = command_tuple[0]
sect = tmp_sect
if sect is None:
raise("Can't find __objc_methname section")
sect.section_data = replace_in_bytes(
sect.section_data, name_dict, ReplaceType.objc_methname)
header.mod_dict[lc] = [sect]
def ch_symtab(header, name_dict):
commands = header.commands
for idx, command_tuple in enumerate(commands):
lc = command_tuple[0]
cmd = command_tuple[1]
data = command_tuple[2]
if lc.cmd == LC_SYMTAB:
data = replace_in_bytes(data, name_dict, ReplaceType.symbol_table)
header.mod_dict[lc] = [data]
commands[idx] = (lc, cmd, data)
return
raise("Can't find LC_SYMTAB")
def replace_methname(macho_file, methname_json, output_dir):
"""
Map method names in Mach-O file with the JSON file
"""
if not os.path.isfile(macho_file):
raise("passing not exist file " + macho_file)
if not os.path.isfile(methname_json):
raise("passing not exist file " + methname_json)
if output_dir is not None and not os.path.isdir(output_dir):
raise("passing not exist dir " + output_dir)
macho = MachO.MachO(macho_file)
name_dict = None
with open(methname_json) as json_file:
name_dict = json.load(json_file)
for header in macho.headers:
ch_methname_sect(header, name_dict)
ch_symtab(header, name_dict)
ori_dir, filename = os.path.split(macho_file)
if output_dir is None:
output_dir = ori_dir
output = os.path.join(output_dir, filename)
try:
copy2(macho_file, output_dir)
except SameFileError:
pass
with open(output, 'r+b') as fp:
macho.write(fp)
os.chmod(output, 0o755)
def main():
replace_methname(sys.argv[0], sys.argv[1], sys.argv[2])
if __name__ == '__main__':
main()
| [((2829, 2852), 'macholib.MachO.MachO', 'MachO.MachO', (['macho_file'], {}), '(macho_file)\n', (2840, 2852), False, 'from macholib import MachO\n'), ((3099, 3124), 'os.path.split', 'os.path.split', (['macho_file'], {}), '(macho_file)\n', (3112, 3124), False, 'import os\n'), ((3194, 3228), 'os.path.join', 'os.path.join', (['output_dir', 'filename'], {}), '(output_dir, filename)\n', (3206, 3228), False, 'import os\n'), ((3385, 3406), 'os.chmod', 'os.chmod', (['output', '(493)'], {}), '(output, 493)\n', (3393, 3406), False, 'import os\n'), ((2517, 2543), 'os.path.isfile', 'os.path.isfile', (['macho_file'], {}), '(macho_file)\n', (2531, 2543), False, 'import os\n'), ((2610, 2639), 'os.path.isfile', 'os.path.isfile', (['methname_json'], {}), '(methname_json)\n', (2624, 2639), False, 'import os\n'), ((2938, 2958), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (2947, 2958), False, 'import json\n'), ((3251, 3280), 'shutil.copy2', 'copy2', (['macho_file', 'output_dir'], {}), '(macho_file, output_dir)\n', (3256, 3280), False, 'from shutil import copy2\n'), ((2736, 2761), 'os.path.isdir', 'os.path.isdir', (['output_dir'], {}), '(output_dir)\n', (2749, 2761), False, 'import os\n')] |
FloFincke/affective-chat | archive/data-processing/archive/features/sd1.py | 241c2b555541968f7e5e70b022fdb71102aed510 | #!/usr/bin/env python
import math
import numpy as np
def sd1(rr):
sdnn = np.std(rr)
return math.sqrt(0.5 * sdnn * sdnn) | [((76, 86), 'numpy.std', 'np.std', (['rr'], {}), '(rr)\n', (82, 86), True, 'import numpy as np\n'), ((95, 123), 'math.sqrt', 'math.sqrt', (['(0.5 * sdnn * sdnn)'], {}), '(0.5 * sdnn * sdnn)\n', (104, 123), False, 'import math\n')] |
soheilv/python-samples | forms/snippets/delete_watch.py | 4443431261dbcd88408dcc89d5702eeb1ac18ffd | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START forms_delete_watch]
from __future__ import print_function
from apiclient import discovery
from httplib2 import Http
from oauth2client import client, file, tools
SCOPES = "https://www.googleapis.com/auth/drive"
API_KEY = "<YOUR_API_KEY>"
DISCOVERY_DOC = f"https://forms.googleapis.com/$discovery/rest?version=v1beta&key={API_KEY}&labels=FORMS_BETA_TESTERS"
store = file.Storage('credentials.json')
creds = None
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store)
service = discovery.build('forms', 'v1beta', http=creds.authorize(
Http()), discoveryServiceUrl=DISCOVERY_DOC, static_discovery=False)
form_id = '<YOUR_FORM_ID>'
watch_id = '<YOUR_WATCH_ID>'
# Print JSON response after deleting a form watch
result = service.forms().watches().delete(formId=form_id, watchId=watch_id).execute()
print(result)
# [END forms_delete_watch]
| [((952, 984), 'oauth2client.file.Storage', 'file.Storage', (['"""credentials.json"""'], {}), "('credentials.json')\n", (964, 984), False, 'from oauth2client import client, file, tools\n'), ((1040, 1100), 'oauth2client.client.flow_from_clientsecrets', 'client.flow_from_clientsecrets', (['"""client_secret.json"""', 'SCOPES'], {}), "('client_secret.json', SCOPES)\n", (1070, 1100), False, 'from oauth2client import client, file, tools\n'), ((1113, 1140), 'oauth2client.tools.run_flow', 'tools.run_flow', (['flow', 'store'], {}), '(flow, store)\n', (1127, 1140), False, 'from oauth2client import client, file, tools\n'), ((1212, 1218), 'httplib2.Http', 'Http', ([], {}), '()\n', (1216, 1218), False, 'from httplib2 import Http\n')] |
egagraha/python-algorithm | data_structures/disjoint_set/disjoint_set.py | 07a6a745b4ebddc93ab7c10b205c75b2427ac1fb | """
Disjoint set.
Reference: https://en.wikipedia.org/wiki/Disjoint-set_data_structure
"""
class Node:
def __init__(self, data: int) -> None:
self.data = data
self.rank: int
self.parent: Node
def make_set(x: Node) -> None:
"""
Make x as a set.
"""
# rank is the distance from x to its' parent
# root's rank is 0
x.rank = 0
x.parent = x
def union_set(x: Node, y: Node) -> None:
"""
Union of two sets.
set with bigger rank should be parent, so that the
disjoint set tree will be more flat.
"""
x, y = find_set(x), find_set(y)
if x == y:
return
elif x.rank > y.rank:
y.parent = x
else:
x.parent = y
if x.rank == y.rank:
y.rank += 1
def find_set(x: Node) -> Node:
"""
Return the parent of x
"""
if x != x.parent:
x.parent = find_set(x.parent)
return x.parent
def find_python_set(node: Node) -> set:
"""
Return a Python Standard Library set that contains i.
"""
sets = ({0, 1, 2}, {3, 4, 5})
for s in sets:
if node.data in s:
return s
raise ValueError(f"{node.data} is not in {sets}")
def test_disjoint_set() -> None:
"""
>>> test_disjoint_set()
"""
vertex = [Node(i) for i in range(6)]
for v in vertex:
make_set(v)
union_set(vertex[0], vertex[1])
union_set(vertex[1], vertex[2])
union_set(vertex[3], vertex[4])
union_set(vertex[3], vertex[5])
for node0 in vertex:
for node1 in vertex:
if find_python_set(node0).isdisjoint(find_python_set(node1)):
assert find_set(node0) != find_set(node1)
else:
assert find_set(node0) == find_set(node1)
if __name__ == "__main__":
test_disjoint_set()
| [] |
tkeller12/spin_physics | cw_EPR.py | 271f3081bc8ca87b159ed3e3494dbd0ffdea8fa5 | # Timothy Keller
# S = 1/2, I = 1/2
# Spin 1/2 electron coupled to spin 1/2 nuclei
import numpy as np
from scipy.linalg import expm
from matplotlib.pylab import *
from matplotlib import cm
sigma_x = 0.5*np.r_[[[0, 1],[1, 0]]]
sigma_y = 0.5*np.r_[[[0,-1j],[1j, 0]]]
sigma_z = 0.5*np.r_[[[1, 0],[0, -1]]]
Identity = np.eye(2)
Sx = np.kron(sigma_x, Identity)
Sy = np.kron(sigma_y, Identity)
Sz = np.kron(sigma_z, Identity)
Ix = np.kron(Identity, sigma_x)
Iy = np.kron(Identity, sigma_y)
Iz = np.kron(Identity, sigma_z)
SxIx = np.kron(sigma_x,sigma_z)
SxIx2 = np.dot(Sx,Iz)
print(SxIx)
print(SxIx2)
print(np.allclose(SxIx,SxIx2))
omega_S = 1.76e11 # rad / (s * T)
omega_I = 267.522e6 # rad / (s * T)
Aiso = 2*np.pi * 50.e6 # Isotropic Hyperfine coupling rad / s
B0 = 0.35# T
H = omega_S/(2.*np.pi)*B0*Sz + omega_I/(2.*np.pi)*B0*Iz + Aiso * np.dot(Sz,Iz)
#H = omega_S/(2.*np.pi)*B0*Sz + omega_I/(2.*np.pi)*B0*Iz + Aiso * (np.dot(Sx,Ix) + np.dot(Sy,Iy) + np.dot(Sz,Iz))
print('Hamiltonian:')
print(H)
out = np.linalg.eig(H)
E = out[0]
print(E)
E12 = E[0] - E[1]
E34 = E[2] - E[3]
E13 = E[0] - E[2]
E24 = E[1] - E[3]
print(E12)
print(E34)
print(E13)
print(E24)
print('Nuclear')
print('%0.05f MHz'%(E12 / 1e6))
print('%0.05f MHz'%(E34 / 1e6))
print('Electron')
print('%0.05f GHz'%(E13 / 1e9))
print('%0.05f GHz'%(E24 / 1e9))
matshow(abs(H), cmap = cm.jet)
title('Hamiltonian')
show()
| [((316, 325), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (322, 325), True, 'import numpy as np\n'), ((333, 359), 'numpy.kron', 'np.kron', (['sigma_x', 'Identity'], {}), '(sigma_x, Identity)\n', (340, 359), True, 'import numpy as np\n'), ((365, 391), 'numpy.kron', 'np.kron', (['sigma_y', 'Identity'], {}), '(sigma_y, Identity)\n', (372, 391), True, 'import numpy as np\n'), ((397, 423), 'numpy.kron', 'np.kron', (['sigma_z', 'Identity'], {}), '(sigma_z, Identity)\n', (404, 423), True, 'import numpy as np\n'), ((430, 456), 'numpy.kron', 'np.kron', (['Identity', 'sigma_x'], {}), '(Identity, sigma_x)\n', (437, 456), True, 'import numpy as np\n'), ((462, 488), 'numpy.kron', 'np.kron', (['Identity', 'sigma_y'], {}), '(Identity, sigma_y)\n', (469, 488), True, 'import numpy as np\n'), ((494, 520), 'numpy.kron', 'np.kron', (['Identity', 'sigma_z'], {}), '(Identity, sigma_z)\n', (501, 520), True, 'import numpy as np\n'), ((529, 554), 'numpy.kron', 'np.kron', (['sigma_x', 'sigma_z'], {}), '(sigma_x, sigma_z)\n', (536, 554), True, 'import numpy as np\n'), ((563, 577), 'numpy.dot', 'np.dot', (['Sx', 'Iz'], {}), '(Sx, Iz)\n', (569, 577), True, 'import numpy as np\n'), ((1012, 1028), 'numpy.linalg.eig', 'np.linalg.eig', (['H'], {}), '(H)\n', (1025, 1028), True, 'import numpy as np\n'), ((609, 633), 'numpy.allclose', 'np.allclose', (['SxIx', 'SxIx2'], {}), '(SxIx, SxIx2)\n', (620, 633), True, 'import numpy as np\n'), ((846, 860), 'numpy.dot', 'np.dot', (['Sz', 'Iz'], {}), '(Sz, Iz)\n', (852, 860), True, 'import numpy as np\n')] |
TOMJERRY23333/V2RayCloudSpider | V2RaycSpider0825/MiddleKey/VMes_IO.py | 0647db8c7b67e4393d1f65dadc08d7e16c1dc324 | from spiderNest.preIntro import *
path_ = os.path.dirname(os.path.dirname(__file__)) + '/dataBase/log_information.csv'
def save_login_info(VMess, class_):
"""
VMess入库
class_: ssr or v2ray
"""
now = str(datetime.now()).split('.')[0]
with open(path_, 'a', encoding='utf-8', newline='') as f:
writer = csv.writer(f)
# 入库时间,Vmess,初始化状态:0
writer.writerow(['{}'.format(now), '{}'.format(VMess), class_, '0'])
def vmess_IO(class_):
"""
获取可用订阅链接并刷新存储池
class_: ssr ; v2ray
"""
def refresh_log(dataFlow):
with open(path_, 'w', encoding='utf-8', newline='') as f:
writer = csv.writer(f)
writer.writerows(dataFlow)
try:
with open(path_, 'r', encoding='utf-8') as f:
reader = csv.reader(f)
vm_q = [vm for vm in reader]
new_q = vm_q
for i, value in enumerate(reversed(vm_q)):
if value[-1] == '0' and value[-2] == class_:
vm = value[1]
new_q[-(i + 1)][-1] = '1'
break
refresh_log(new_q)
return vm
except UnboundLocalError:
return '无可用订阅连接'
def avi_num():
from datetime import datetime, timedelta
with open(path_, 'r', encoding='utf-8') as f:
reader = csv.reader(f)
vm_list = [i for i in reader]
# ['2020-08-06 04:27:59', 'link','class_', '1']
vm_q = [vm for vm in vm_list if vm[-1] == '0']
tag_items = ''
for vm in vm_list:
if vm[-1] == '0':
bei_ing_time = datetime.fromisoformat(vm[0]) + timedelta(hours=12)
tag_items += '\n【√可选】【{}】#{}'.format(bei_ing_time, vm[-2])
# return vm_q.__len__()
return tag_items
| [((225, 239), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (237, 239), False, 'from datetime import datetime, timedelta\n'), ((1602, 1631), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['vm[0]'], {}), '(vm[0])\n', (1624, 1631), False, 'from datetime import datetime, timedelta\n'), ((1634, 1653), 'datetime.timedelta', 'timedelta', ([], {'hours': '(12)'}), '(hours=12)\n', (1643, 1653), False, 'from datetime import datetime, timedelta\n')] |
AK391/PaddleSpeech | paddlespeech/s2t/frontend/audio.py | 8cdbe3a6c0fe447e54cfbcfd82139d2869f5fc49 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the audio segment class."""
import copy
import io
import random
import re
import struct
import numpy as np
import resampy
import soundfile
from scipy import signal
from .utility import convert_samples_from_float32
from .utility import convert_samples_to_float32
from .utility import subfile_from_tar
class AudioSegment():
"""Monaural audio segment abstraction.
:param samples: Audio samples [num_samples x num_channels].
:type samples: ndarray.float32
:param sample_rate: Audio sample rate.
:type sample_rate: int
:raises TypeError: If the sample data type is not float or int.
"""
def __init__(self, samples, sample_rate):
"""Create audio segment from samples.
Samples are convert float32 internally, with int scaled to [-1, 1].
"""
self._samples = self._convert_samples_to_float32(samples)
self._sample_rate = sample_rate
if self._samples.ndim >= 2:
self._samples = np.mean(self._samples, 1)
def __eq__(self, other):
"""Return whether two objects are equal."""
if type(other) is not type(self):
return False
if self._sample_rate != other._sample_rate:
return False
if self._samples.shape != other._samples.shape:
return False
if np.any(self.samples != other._samples):
return False
return True
def __ne__(self, other):
"""Return whether two objects are unequal."""
return not self.__eq__(other)
def __str__(self):
"""Return human-readable representation of segment."""
return ("%s: num_samples=%d, sample_rate=%d, duration=%.2fsec, "
"rms=%.2fdB" % (type(self), self.num_samples, self.sample_rate,
self.duration, self.rms_db))
@classmethod
def from_file(cls, file, infos=None):
"""Create audio segment from audio file.
Args:
filepath (str|file): Filepath or file object to audio file.
infos (TarLocalData, optional): tar2obj and tar2infos. Defaults to None.
Returns:
AudioSegment: Audio segment instance.
"""
if isinstance(file, str) and re.findall(r".seqbin_\d+$", file):
return cls.from_sequence_file(file)
elif isinstance(file, str) and file.startswith('tar:'):
return cls.from_file(subfile_from_tar(file, infos))
else:
samples, sample_rate = soundfile.read(file, dtype='float32')
return cls(samples, sample_rate)
@classmethod
def slice_from_file(cls, file, start=None, end=None):
"""Loads a small section of an audio without having to load
the entire file into the memory which can be incredibly wasteful.
:param file: Input audio filepath or file object.
:type file: str|file
:param start: Start time in seconds. If start is negative, it wraps
around from the end. If not provided, this function
reads from the very beginning.
:type start: float
:param end: End time in seconds. If end is negative, it wraps around
from the end. If not provided, the default behvaior is
to read to the end of the file.
:type end: float
:return: AudioSegment instance of the specified slice of the input
audio file.
:rtype: AudioSegment
:raise ValueError: If start or end is incorrectly set, e.g. out of
bounds in time.
"""
sndfile = soundfile.SoundFile(file)
sample_rate = sndfile.samplerate
duration = float(len(sndfile)) / sample_rate
start = 0. if start is None else start
end = duration if end is None else end
if start < 0.0:
start += duration
if end < 0.0:
end += duration
if start < 0.0:
raise ValueError("The slice start position (%f s) is out of "
"bounds." % start)
if end < 0.0:
raise ValueError("The slice end position (%f s) is out of bounds." %
end)
if start > end:
raise ValueError("The slice start position (%f s) is later than "
"the slice end position (%f s)." % (start, end))
if end > duration:
raise ValueError("The slice end position (%f s) is out of bounds "
"(> %f s)" % (end, duration))
start_frame = int(start * sample_rate)
end_frame = int(end * sample_rate)
sndfile.seek(start_frame)
data = sndfile.read(frames=end_frame - start_frame, dtype='float32')
return cls(data, sample_rate)
@classmethod
def from_sequence_file(cls, filepath):
"""Create audio segment from sequence file. Sequence file is a binary
file containing a collection of multiple audio files, with several
header bytes in the head indicating the offsets of each audio byte data
chunk.
The format is:
4 bytes (int, version),
4 bytes (int, num of utterance),
4 bytes (int, bytes per header),
[bytes_per_header*(num_utterance+1)] bytes (offsets for each audio),
audio_bytes_data_of_1st_utterance,
audio_bytes_data_of_2nd_utterance,
......
Sequence file name must end with ".seqbin". And the filename of the 5th
utterance's audio file in sequence file "xxx.seqbin" must be
"xxx.seqbin_5", with "5" indicating the utterance index within this
sequence file (starting from 1).
:param filepath: Filepath of sequence file.
:type filepath: str
:return: Audio segment instance.
:rtype: AudioSegment
"""
# parse filepath
matches = re.match(r"(.+\.seqbin)_(\d+)", filepath)
if matches is None:
raise IOError("File type of %s is not supported" % filepath)
filename = matches.group(1)
fileno = int(matches.group(2))
# read headers
f = io.open(filename, mode='rb', encoding='utf8')
version = f.read(4)
num_utterances = struct.unpack("i", f.read(4))[0]
bytes_per_header = struct.unpack("i", f.read(4))[0]
header_bytes = f.read(bytes_per_header * (num_utterances + 1))
header = [
struct.unpack("i", header_bytes[bytes_per_header * i:
bytes_per_header * (i + 1)])[0]
for i in range(num_utterances + 1)
]
# read audio bytes
f.seek(header[fileno - 1])
audio_bytes = f.read(header[fileno] - header[fileno - 1])
f.close()
# create audio segment
try:
return cls.from_bytes(audio_bytes)
except Exception as e:
samples = np.frombuffer(audio_bytes, dtype='int16')
return cls(samples=samples, sample_rate=8000)
@classmethod
def from_bytes(cls, bytes):
"""Create audio segment from a byte string containing audio samples.
:param bytes: Byte string containing audio samples.
:type bytes: str
:return: Audio segment instance.
:rtype: AudioSegment
"""
samples, sample_rate = soundfile.read(
io.BytesIO(bytes), dtype='float32')
return cls(samples, sample_rate)
@classmethod
def concatenate(cls, *segments):
"""Concatenate an arbitrary number of audio segments together.
:param *segments: Input audio segments to be concatenated.
:type *segments: tuple of AudioSegment
:return: Audio segment instance as concatenating results.
:rtype: AudioSegment
:raises ValueError: If the number of segments is zero, or if the
sample_rate of any segments does not match.
:raises TypeError: If any segment is not AudioSegment instance.
"""
# Perform basic sanity-checks.
if len(segments) == 0:
raise ValueError("No audio segments are given to concatenate.")
sample_rate = segments[0]._sample_rate
for seg in segments:
if sample_rate != seg._sample_rate:
raise ValueError("Can't concatenate segments with "
"different sample rates")
if type(seg) is not cls:
raise TypeError("Only audio segments of the same type "
"can be concatenated.")
samples = np.concatenate([seg.samples for seg in segments])
return cls(samples, sample_rate)
@classmethod
def make_silence(cls, duration, sample_rate):
"""Creates a silent audio segment of the given duration and sample rate.
:param duration: Length of silence in seconds.
:type duration: float
:param sample_rate: Sample rate.
:type sample_rate: float
:return: Silent AudioSegment instance of the given duration.
:rtype: AudioSegment
"""
samples = np.zeros(int(duration * sample_rate))
return cls(samples, sample_rate)
def to_wav_file(self, filepath, dtype='float32'):
"""Save audio segment to disk as wav file.
:param filepath: WAV filepath or file object to save the
audio segment.
:type filepath: str|file
:param dtype: Subtype for audio file. Options: 'int16', 'int32',
'float32', 'float64'. Default is 'float32'.
:type dtype: str
:raises TypeError: If dtype is not supported.
"""
samples = self._convert_samples_from_float32(self._samples, dtype)
subtype_map = {
'int16': 'PCM_16',
'int32': 'PCM_32',
'float32': 'FLOAT',
'float64': 'DOUBLE'
}
soundfile.write(
filepath,
samples,
self._sample_rate,
format='WAV',
subtype=subtype_map[dtype])
def superimpose(self, other):
"""Add samples from another segment to those of this segment
(sample-wise addition, not segment concatenation).
Note that this is an in-place transformation.
:param other: Segment containing samples to be added in.
:type other: AudioSegments
:raise TypeError: If type of two segments don't match.
:raise ValueError: If the sample rates of the two segments are not
equal, or if the lengths of segments don't match.
"""
if isinstance(other, type(self)):
raise TypeError("Cannot add segments of different types: %s "
"and %s." % (type(self), type(other)))
if self._sample_rate != other._sample_rate:
raise ValueError("Sample rates must match to add segments.")
if len(self._samples) != len(other._samples):
raise ValueError("Segment lengths must match to add segments.")
self._samples += other._samples
def to_bytes(self, dtype='float32'):
"""Create a byte string containing the audio content.
:param dtype: Data type for export samples. Options: 'int16', 'int32',
'float32', 'float64'. Default is 'float32'.
:type dtype: str
:return: Byte string containing audio content.
:rtype: str
"""
samples = self._convert_samples_from_float32(self._samples, dtype)
return samples.tostring()
def to(self, dtype='int16'):
"""Create a `dtype` audio content.
:param dtype: Data type for export samples. Options: 'int16', 'int32',
'float32', 'float64'. Default is 'float32'.
:type dtype: str
:return: np.ndarray containing `dtype` audio content.
:rtype: str
"""
samples = self._convert_samples_from_float32(self._samples, dtype)
return samples
def gain_db(self, gain):
"""Apply gain in decibels to samples.
Note that this is an in-place transformation.
:param gain: Gain in decibels to apply to samples.
:type gain: float|1darray
"""
self._samples *= 10.**(gain / 20.)
def change_speed(self, speed_rate):
"""Change the audio speed by linear interpolation.
Note that this is an in-place transformation.
:param speed_rate: Rate of speed change:
speed_rate > 1.0, speed up the audio;
speed_rate = 1.0, unchanged;
speed_rate < 1.0, slow down the audio;
speed_rate <= 0.0, not allowed, raise ValueError.
:type speed_rate: float
:raises ValueError: If speed_rate <= 0.0.
"""
if speed_rate == 1.0:
return
if speed_rate <= 0:
raise ValueError("speed_rate should be greater than zero.")
# numpy
# old_length = self._samples.shape[0]
# new_length = int(old_length / speed_rate)
# old_indices = np.arange(old_length)
# new_indices = np.linspace(start=0, stop=old_length, num=new_length)
# self._samples = np.interp(new_indices, old_indices, self._samples)
# sox, slow
try:
import soxbindings as sox
except:
try:
from paddlespeech.s2t.utils import dynamic_pip_install
package = "sox"
dynamic_pip_install.install(package)
package = "soxbindings"
dynamic_pip_install.install(package)
import soxbindings as sox
except:
raise RuntimeError("Can not install soxbindings on your system." )
tfm = sox.Transformer()
tfm.set_globals(multithread=False)
tfm.speed(speed_rate)
self._samples = tfm.build_array(
input_array=self._samples,
sample_rate_in=self._sample_rate).squeeze(-1).astype(
np.float32).copy()
def normalize(self, target_db=-20, max_gain_db=300.0):
"""Normalize audio to be of the desired RMS value in decibels.
Note that this is an in-place transformation.
:param target_db: Target RMS value in decibels. This value should be
less than 0.0 as 0.0 is full-scale audio.
:type target_db: float
:param max_gain_db: Max amount of gain in dB that can be applied for
normalization. This is to prevent nans when
attempting to normalize a signal consisting of
all zeros.
:type max_gain_db: float
:raises ValueError: If the required gain to normalize the segment to
the target_db value exceeds max_gain_db.
"""
gain = target_db - self.rms_db
if gain > max_gain_db:
raise ValueError(
"Unable to normalize segment to %f dB because the "
"the probable gain have exceeds max_gain_db (%f dB)" %
(target_db, max_gain_db))
self.gain_db(min(max_gain_db, target_db - self.rms_db))
def normalize_online_bayesian(self,
target_db,
prior_db,
prior_samples,
startup_delay=0.0):
"""Normalize audio using a production-compatible online/causal
algorithm. This uses an exponential likelihood and gamma prior to
make online estimates of the RMS even when there are very few samples.
Note that this is an in-place transformation.
:param target_db: Target RMS value in decibels.
:type target_bd: float
:param prior_db: Prior RMS estimate in decibels.
:type prior_db: float
:param prior_samples: Prior strength in number of samples.
:type prior_samples: float
:param startup_delay: Default 0.0s. If provided, this function will
accrue statistics for the first startup_delay
seconds before applying online normalization.
:type startup_delay: float
"""
# Estimate total RMS online.
startup_sample_idx = min(self.num_samples - 1,
int(self.sample_rate * startup_delay))
prior_mean_squared = 10.**(prior_db / 10.)
prior_sum_of_squares = prior_mean_squared * prior_samples
cumsum_of_squares = np.cumsum(self.samples**2)
sample_count = np.arange(self.num_samples) + 1
if startup_sample_idx > 0:
cumsum_of_squares[:startup_sample_idx] = \
cumsum_of_squares[startup_sample_idx]
sample_count[:startup_sample_idx] = \
sample_count[startup_sample_idx]
mean_squared_estimate = ((cumsum_of_squares + prior_sum_of_squares) /
(sample_count + prior_samples))
rms_estimate_db = 10 * np.log10(mean_squared_estimate)
# Compute required time-varying gain.
gain_db = target_db - rms_estimate_db
self.gain_db(gain_db)
def resample(self, target_sample_rate, filter='kaiser_best'):
"""Resample the audio to a target sample rate.
Note that this is an in-place transformation.
:param target_sample_rate: Target sample rate.
:type target_sample_rate: int
:param filter: The resampling filter to use one of {'kaiser_best',
'kaiser_fast'}.
:type filter: str
"""
self._samples = resampy.resample(
self.samples, self.sample_rate, target_sample_rate, filter=filter)
self._sample_rate = target_sample_rate
def pad_silence(self, duration, sides='both'):
"""Pad this audio sample with a period of silence.
Note that this is an in-place transformation.
:param duration: Length of silence in seconds to pad.
:type duration: float
:param sides: Position for padding:
'beginning' - adds silence in the beginning;
'end' - adds silence in the end;
'both' - adds silence in both the beginning and the end.
:type sides: str
:raises ValueError: If sides is not supported.
"""
if duration == 0.0:
return self
cls = type(self)
silence = self.make_silence(duration, self._sample_rate)
if sides == "beginning":
padded = cls.concatenate(silence, self)
elif sides == "end":
padded = cls.concatenate(self, silence)
elif sides == "both":
padded = cls.concatenate(silence, self, silence)
else:
raise ValueError("Unknown value for the sides %s" % sides)
self._samples = padded._samples
def shift(self, shift_ms):
"""Shift the audio in time. If `shift_ms` is positive, shift with time
advance; if negative, shift with time delay. Silence are padded to
keep the duration unchanged.
Note that this is an in-place transformation.
:param shift_ms: Shift time in millseconds. If positive, shift with
time advance; if negative; shift with time delay.
:type shift_ms: float
:raises ValueError: If shift_ms is longer than audio duration.
"""
if abs(shift_ms) / 1000.0 > self.duration:
raise ValueError("Absolute value of shift_ms should be smaller "
"than audio duration.")
shift_samples = int(shift_ms * self._sample_rate / 1000)
if shift_samples > 0:
# time advance
self._samples[:-shift_samples] = self._samples[shift_samples:]
self._samples[-shift_samples:] = 0
elif shift_samples < 0:
# time delay
self._samples[-shift_samples:] = self._samples[:shift_samples]
self._samples[:-shift_samples] = 0
def subsegment(self, start_sec=None, end_sec=None):
"""Cut the AudioSegment between given boundaries.
Note that this is an in-place transformation.
:param start_sec: Beginning of subsegment in seconds.
:type start_sec: float
:param end_sec: End of subsegment in seconds.
:type end_sec: float
:raise ValueError: If start_sec or end_sec is incorrectly set, e.g. out
of bounds in time.
"""
start_sec = 0.0 if start_sec is None else start_sec
end_sec = self.duration if end_sec is None else end_sec
if start_sec < 0.0:
start_sec = self.duration + start_sec
if end_sec < 0.0:
end_sec = self.duration + end_sec
if start_sec < 0.0:
raise ValueError("The slice start position (%f s) is out of "
"bounds." % start_sec)
if end_sec < 0.0:
raise ValueError("The slice end position (%f s) is out of bounds." %
end_sec)
if start_sec > end_sec:
raise ValueError("The slice start position (%f s) is later than "
"the end position (%f s)." % (start_sec, end_sec))
if end_sec > self.duration:
raise ValueError("The slice end position (%f s) is out of bounds "
"(> %f s)" % (end_sec, self.duration))
start_sample = int(round(start_sec * self._sample_rate))
end_sample = int(round(end_sec * self._sample_rate))
self._samples = self._samples[start_sample:end_sample]
def random_subsegment(self, subsegment_length, rng=None):
"""Cut the specified length of the audiosegment randomly.
Note that this is an in-place transformation.
:param subsegment_length: Subsegment length in seconds.
:type subsegment_length: float
:param rng: Random number generator state.
:type rng: random.Random
:raises ValueError: If the length of subsegment is greater than
the origineal segemnt.
"""
rng = random.Random() if rng is None else rng
if subsegment_length > self.duration:
raise ValueError("Length of subsegment must not be greater "
"than original segment.")
start_time = rng.uniform(0.0, self.duration - subsegment_length)
self.subsegment(start_time, start_time + subsegment_length)
def convolve(self, impulse_segment, allow_resample=False):
"""Convolve this audio segment with the given impulse segment.
Note that this is an in-place transformation.
:param impulse_segment: Impulse response segments.
:type impulse_segment: AudioSegment
:param allow_resample: Indicates whether resampling is allowed when
the impulse_segment has a different sample
rate from this signal.
:type allow_resample: bool
:raises ValueError: If the sample rate is not match between two
audio segments when resample is not allowed.
"""
if allow_resample and self.sample_rate != impulse_segment.sample_rate:
impulse_segment.resample(self.sample_rate)
if self.sample_rate != impulse_segment.sample_rate:
raise ValueError("Impulse segment's sample rate (%d Hz) is not "
"equal to base signal sample rate (%d Hz)." %
(impulse_segment.sample_rate, self.sample_rate))
samples = signal.fftconvolve(self.samples, impulse_segment.samples,
"full")
self._samples = samples
def convolve_and_normalize(self, impulse_segment, allow_resample=False):
"""Convolve and normalize the resulting audio segment so that it
has the same average power as the input signal.
Note that this is an in-place transformation.
:param impulse_segment: Impulse response segments.
:type impulse_segment: AudioSegment
:param allow_resample: Indicates whether resampling is allowed when
the impulse_segment has a different sample
rate from this signal.
:type allow_resample: bool
"""
target_db = self.rms_db
self.convolve(impulse_segment, allow_resample=allow_resample)
self.normalize(target_db)
def add_noise(self,
noise,
snr_dB,
allow_downsampling=False,
max_gain_db=300.0,
rng=None):
"""Add the given noise segment at a specific signal-to-noise ratio.
If the noise segment is longer than this segment, a random subsegment
of matching length is sampled from it and used instead.
Note that this is an in-place transformation.
:param noise: Noise signal to add.
:type noise: AudioSegment
:param snr_dB: Signal-to-Noise Ratio, in decibels.
:type snr_dB: float
:param allow_downsampling: Whether to allow the noise signal to be
downsampled to match the base signal sample
rate.
:type allow_downsampling: bool
:param max_gain_db: Maximum amount of gain to apply to noise signal
before adding it in. This is to prevent attempting
to apply infinite gain to a zero signal.
:type max_gain_db: float
:param rng: Random number generator state.
:type rng: None|random.Random
:raises ValueError: If the sample rate does not match between the two
audio segments when downsampling is not allowed, or
if the duration of noise segments is shorter than
original audio segments.
"""
rng = random.Random() if rng is None else rng
if allow_downsampling and noise.sample_rate > self.sample_rate:
noise = noise.resample(self.sample_rate)
if noise.sample_rate != self.sample_rate:
raise ValueError("Noise sample rate (%d Hz) is not equal to base "
"signal sample rate (%d Hz)." % (noise.sample_rate,
self.sample_rate))
if noise.duration < self.duration:
raise ValueError("Noise signal (%f sec) must be at least as long as"
" base signal (%f sec)." %
(noise.duration, self.duration))
noise_gain_db = min(self.rms_db - noise.rms_db - snr_dB, max_gain_db)
noise_new = copy.deepcopy(noise)
noise_new.random_subsegment(self.duration, rng=rng)
noise_new.gain_db(noise_gain_db)
self.superimpose(noise_new)
@property
def samples(self):
"""Return audio samples.
:return: Audio samples.
:rtype: ndarray
"""
return self._samples.copy()
@property
def sample_rate(self):
"""Return audio sample rate.
:return: Audio sample rate.
:rtype: int
"""
return self._sample_rate
@property
def num_samples(self):
"""Return number of samples.
:return: Number of samples.
:rtype: int
"""
return self._samples.shape[0]
@property
def duration(self):
"""Return audio duration.
:return: Audio duration in seconds.
:rtype: float
"""
return self._samples.shape[0] / float(self._sample_rate)
@property
def rms_db(self):
"""Return root mean square energy of the audio in decibels.
:return: Root mean square energy in decibels.
:rtype: float
"""
# square root => multiply by 10 instead of 20 for dBs
mean_square = np.mean(self._samples**2)
return 10 * np.log10(mean_square)
def _convert_samples_to_float32(self, samples):
"""Convert sample type to float32.
Audio sample type is usually integer or float-point.
Integers will be scaled to [-1, 1] in float32.
"""
return convert_samples_to_float32(samples)
def _convert_samples_from_float32(self, samples, dtype):
"""Convert sample type from float32 to dtype.
Audio sample type is usually integer or float-point. For integer
type, float32 will be rescaled from [-1, 1] to the maximum range
supported by the integer type.
This is for writing a audio file.
"""
return convert_samples_from_float32(samples, dtype)
| [((1933, 1971), 'numpy.any', 'np.any', (['(self.samples != other._samples)'], {}), '(self.samples != other._samples)\n', (1939, 1971), True, 'import numpy as np\n'), ((4228, 4253), 'soundfile.SoundFile', 'soundfile.SoundFile', (['file'], {}), '(file)\n', (4247, 4253), False, 'import soundfile\n'), ((6540, 6582), 're.match', 're.match', (['"""(.+\\\\.seqbin)_(\\\\d+)"""', 'filepath'], {}), "('(.+\\\\.seqbin)_(\\\\d+)', filepath)\n", (6548, 6582), False, 'import re\n'), ((6794, 6839), 'io.open', 'io.open', (['filename'], {'mode': '"""rb"""', 'encoding': '"""utf8"""'}), "(filename, mode='rb', encoding='utf8')\n", (6801, 6839), False, 'import io\n'), ((9243, 9292), 'numpy.concatenate', 'np.concatenate', (['[seg.samples for seg in segments]'], {}), '([seg.samples for seg in segments])\n', (9257, 9292), True, 'import numpy as np\n'), ((10568, 10668), 'soundfile.write', 'soundfile.write', (['filepath', 'samples', 'self._sample_rate'], {'format': '"""WAV"""', 'subtype': 'subtype_map[dtype]'}), "(filepath, samples, self._sample_rate, format='WAV', subtype\n =subtype_map[dtype])\n", (10583, 10668), False, 'import soundfile\n'), ((14482, 14499), 'soxbindings.Transformer', 'sox.Transformer', ([], {}), '()\n', (14497, 14499), True, 'import soxbindings as sox\n'), ((17289, 17317), 'numpy.cumsum', 'np.cumsum', (['(self.samples ** 2)'], {}), '(self.samples ** 2)\n', (17298, 17317), True, 'import numpy as np\n'), ((18389, 18477), 'resampy.resample', 'resampy.resample', (['self.samples', 'self.sample_rate', 'target_sample_rate'], {'filter': 'filter'}), '(self.samples, self.sample_rate, target_sample_rate, filter\n =filter)\n', (18405, 18477), False, 'import resampy\n'), ((24425, 24490), 'scipy.signal.fftconvolve', 'signal.fftconvolve', (['self.samples', 'impulse_segment.samples', '"""full"""'], {}), "(self.samples, impulse_segment.samples, 'full')\n", (24443, 24490), False, 'from scipy import signal\n'), ((27628, 27648), 'copy.deepcopy', 'copy.deepcopy', (['noise'], {}), '(noise)\n', (27641, 27648), False, 'import copy\n'), ((28824, 28851), 'numpy.mean', 'np.mean', (['(self._samples ** 2)'], {}), '(self._samples ** 2)\n', (28831, 28851), True, 'import numpy as np\n'), ((1589, 1614), 'numpy.mean', 'np.mean', (['self._samples', '(1)'], {}), '(self._samples, 1)\n', (1596, 1614), True, 'import numpy as np\n'), ((2839, 2872), 're.findall', 're.findall', (['""".seqbin_\\\\d+$"""', 'file'], {}), "('.seqbin_\\\\d+$', file)\n", (2849, 2872), False, 'import re\n'), ((8021, 8038), 'io.BytesIO', 'io.BytesIO', (['bytes'], {}), '(bytes)\n', (8031, 8038), False, 'import io\n'), ((17339, 17366), 'numpy.arange', 'np.arange', (['self.num_samples'], {}), '(self.num_samples)\n', (17348, 17366), True, 'import numpy as np\n'), ((17788, 17819), 'numpy.log10', 'np.log10', (['mean_squared_estimate'], {}), '(mean_squared_estimate)\n', (17796, 17819), True, 'import numpy as np\n'), ((22938, 22953), 'random.Random', 'random.Random', ([], {}), '()\n', (22951, 22953), False, 'import random\n'), ((26832, 26847), 'random.Random', 'random.Random', ([], {}), '()\n', (26845, 26847), False, 'import random\n'), ((28870, 28891), 'numpy.log10', 'np.log10', (['mean_square'], {}), '(mean_square)\n', (28878, 28891), True, 'import numpy as np\n'), ((3099, 3136), 'soundfile.read', 'soundfile.read', (['file'], {'dtype': '"""float32"""'}), "(file, dtype='float32')\n", (3113, 3136), False, 'import soundfile\n'), ((7088, 7173), 'struct.unpack', 'struct.unpack', (['"""i"""', 'header_bytes[bytes_per_header * i:bytes_per_header * (i + 1)]'], {}), "('i', header_bytes[bytes_per_header * i:bytes_per_header * (i +\n 1)])\n", (7101, 7173), False, 'import struct\n'), ((7567, 7608), 'numpy.frombuffer', 'np.frombuffer', (['audio_bytes'], {'dtype': '"""int16"""'}), "(audio_bytes, dtype='int16')\n", (7580, 7608), True, 'import numpy as np\n'), ((14192, 14228), 'paddlespeech.s2t.utils.dynamic_pip_install.install', 'dynamic_pip_install.install', (['package'], {}), '(package)\n', (14219, 14228), False, 'from paddlespeech.s2t.utils import dynamic_pip_install\n'), ((14285, 14321), 'paddlespeech.s2t.utils.dynamic_pip_install.install', 'dynamic_pip_install.install', (['package'], {}), '(package)\n', (14312, 14321), False, 'from paddlespeech.s2t.utils import dynamic_pip_install\n')] |
bhch/tornado-debugger | tornado_debugger/debug.py | 4adeead7a45506eda34fc8d1e91dd32acc8cfe4e | import os.path
import re
import sys
import traceback
from pprint import pformat
import tornado
from tornado import template
SENSITIVE_SETTINGS_RE = re.compile(
'api|key|pass|salt|secret|signature|token',
flags=re.IGNORECASE
)
class ExceptionReporter:
def __init__(self, exc_info, handler):
self.exc_type = exc_info[0]
self.exc_value = exc_info[1]
self.exc_tb = exc_info[2]
self.handler = handler
def get_response(self):
loader = template.Loader(os.path.dirname(os.path.abspath(__file__)))
t = loader.load('debug.html')
return t.generate(
traceback=traceback,
pprint=pprint,
handler=self.handler,
app_settings=self.get_app_settings(),
exc_type=self.exc_type,
exc_value=self.exc_value,
exc_tb=self.exc_tb,
frames=self.get_traceback_frames(),
tornado_version=tornado.version,
sys_version='%d.%d.%d' % sys.version_info[0:3],
sys_executable=sys.executable,
sys_path=sys.path,
)
def get_app_settings(self):
settings = {}
for arg, value in self.handler.application.settings.items():
if SENSITIVE_SETTINGS_RE.search(arg):
value = '*' * 15
settings[arg] = value
return settings
def get_source_lines(self, tb):
filename = tb.tb_frame.f_code.co_filename
lineno = tb.tb_lineno
lines = []
try:
with open(filename, 'rb') as f:
_lines = f.read().splitlines()
for _lineno in range(
max(lineno - 5, 0),
min(lineno + 5, len(_lines))
):
lines.append((_lineno + 1, _lines[_lineno]))
except Exception as e:
# could not open file
pass
return lines
def get_traceback_frames(self):
frames = []
tb = self.exc_tb
while tb:
frames.append({
'lineno': tb.tb_lineno,
'filename': tb.tb_frame.f_code.co_filename,
'function': tb.tb_frame.f_code.co_name,
'module_name': tb.tb_frame.f_globals.get('__name__') or '',
'vars': tb.tb_frame.f_locals,
'lines': self.get_source_lines(tb),
})
tb = tb.tb_next
frames.reverse()
return frames
exceptions = []
exc_value = self.exc_value
while exc_value:
exceptions.append(exc_value)
exc_value = self._get_explicit_or_implicit_cause(exc_value)
if exc_value in exceptions:
warnings.warn(
"Cycle in the exception chain detected: exception '%s' "
"encountered again." % exc_value,
ExceptionCycleWarning,
)
# Avoid infinite loop if there's a cyclic reference (#29393).
break
frames = []
# No exceptions were supplied to ExceptionReporter
if not exceptions:
return frames
# In case there's just one exception, take the traceback from self.tb
exc_value = exceptions.pop()
tb = self.tb if not exceptions else exc_value.__traceback__
while True:
frames.extend(self.get_exception_traceback_frames(exc_value, tb))
try:
exc_value = exceptions.pop()
except IndexError:
break
tb = exc_value.__traceback__
return frames
def _get_explicit_or_implicit_cause(self, exc_value):
explicit = getattr(exc_value, '__cause__', None)
suppress_context = getattr(exc_value, '__suppress_context__', None)
implicit = getattr(exc_value, '__context__', None)
return explicit or (None if suppress_context else implicit)
def pprint(value):
try:
return pformat(value, width=1)
except Exception as e:
return 'Error in formatting: %s: %s' % (e.__class__.__name__, e)
| [((151, 226), 're.compile', 're.compile', (['"""api|key|pass|salt|secret|signature|token"""'], {'flags': 're.IGNORECASE'}), "('api|key|pass|salt|secret|signature|token', flags=re.IGNORECASE)\n", (161, 226), False, 'import re\n'), ((3995, 4018), 'pprint.pformat', 'pformat', (['value'], {'width': '(1)'}), '(value, width=1)\n', (4002, 4018), False, 'from pprint import pformat\n')] |
jpolitz/lambda-py-paper | base/pylib/seq_iter.py | 746ef63fc1123714b4adaf78119028afbea7bd76 | class SeqIter:
def __init__(self,l):
self.l = l
self.i = 0
self.stop = False
def __len__(self):
return len(self.l)
def __list__(self):
l = []
while True:
try:
l.append(self.__next__())
except StopIteration:
break
return l
def __iter__(self):
return self
def __next__(self):
has_length = True
found = False
try:
self.l.__len__()
except AttributeError:
has_length = False
try:
if self.stop:
raise StopIteration()
if has_length and self.i >= self.l.__len__():
self.stop = True
raise StopIteration()
ret = self.l[self.i]
found = True
except IndexError:
raise StopIteration()
except StopIteration:
raise StopIteration()
self.i += 1
if found:
return ret
else:
return None
___assign("%SeqIter", SeqIter)
def iter(l, *args):
callable = ___id("%callable")
if args.__len__() == 1:
if callable(l):
stopwhen = args[0]
return FuncIter(l, stopwhen)
else:
TypeError("iter(v, w): v must be callable")
elif args.__len__() == 0:
try:
return l.__iter__()
except:
try:
if callable(l.__getitem__):
return SeqIter(l)
except:
raise TypeError("object is not iterable")
else:
raise TypeError("iter expect at most 2 arguments")
___assign("%iter", iter)
def next(it, *arg):
if len(arg) == 0:
return it.__next__()
else:
return it.__next__(arg[0])
___assign("%next", next)
class FuncIter:
def __init__(self, func, stopwhen):
self.func = func
self.stopwhen = stopwhen
self.stopped = False
def __list__(self):
l = []
while not self.stopped:
try:
l.append(self.__next__())
except StopIteration:
break
return l
def __next__(self):
f = self.func
v = f()
if v == self.stopwhen:
self.stopped = True
raise StopIteration()
else:
return v
___assign("%FuncIter", FuncIter)
| [] |
code-review-doctor/lite-frontend-1 | caseworker/open_general_licences/enums.py | cb3b885bb389ea33ef003c916bea7b03a36d86bb | from lite_content.lite_internal_frontend.open_general_licences import (
OGEL_DESCRIPTION,
OGTCL_DESCRIPTION,
OGTL_DESCRIPTION,
)
from lite_forms.components import Option
class OpenGeneralExportLicences:
class OpenGeneralLicence:
def __init__(self, id, name, description, acronym):
self.id = id
self.name = name
self.description = description
self.acronym = acronym
open_general_export_licence = OpenGeneralLicence(
"00000000-0000-0000-0000-000000000002",
"Open General Export Licence",
OGEL_DESCRIPTION,
"OGEL",
)
open_general_trade_control_licence = OpenGeneralLicence(
"00000000-0000-0000-0000-000000000013",
"Open General Trade Control Licence",
OGTCL_DESCRIPTION,
"OGTCL",
)
open_general_transhipment_licence = OpenGeneralLicence(
"00000000-0000-0000-0000-000000000014",
"Open General Transhipment Licence",
OGTL_DESCRIPTION,
"OGTL",
)
@classmethod
def all(cls):
return [
cls.open_general_export_licence,
cls.open_general_trade_control_licence,
cls.open_general_transhipment_licence,
]
@classmethod
def as_options(cls):
return [
Option(key=ogl.id, value=f"{ogl.name} ({ogl.acronym})", description=ogl.description) for ogl in cls.all()
]
@classmethod
def get_by_id(cls, id):
return next(ogl for ogl in cls.all() if ogl.id == id)
| [((1318, 1407), 'lite_forms.components.Option', 'Option', ([], {'key': 'ogl.id', 'value': 'f"""{ogl.name} ({ogl.acronym})"""', 'description': 'ogl.description'}), "(key=ogl.id, value=f'{ogl.name} ({ogl.acronym})', description=ogl.\n description)\n", (1324, 1407), False, 'from lite_forms.components import Option\n')] |
pratika1505/DSA-Path-And-Important-Questions | Matrix/Python/rotatematrix.py | a86a0774f0abf5151c852afd2bbf67a5368125c8 | # -*- coding: utf-8 -*-
"""RotateMatrix.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1LX-dZFuQCyBXDNVosTp0MHaZZxoc5T4I
"""
#Function to rotate matrix by 90 degree
def rotate(mat):
# `N × N` matrix
N = len(mat)
# Transpose the matrix
for i in range(N):
for j in range(i):
temp = mat[i][j]
mat[i][j] = mat[j][i]
mat[j][i] = temp
# swap columns
for i in range(N):
for j in range(N // 2):
temp = mat[i][j]
mat[i][j] = mat[i][N - j - 1]
mat[i][N - j - 1] = temp
if __name__ == '__main__':
#Declaring matrix
mat = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]
]
rotate(mat)
#printing matrix
for i in mat:
print(i)
| [] |
EYH0602/FP_Workshop | sort.py | 866b180b411c1ef439e1a2d039c6d6333e91cd39 | def quicksort(xs):
if len(xs) == 0:
return []
pivot = xs[0]
xs = xs[1:]
left = [x for x in xs if x <= pivot]
right = [x for x in xs if x > pivot]
res = quicksort(left)
res.append(pivot)
res += quicksort(right)
return res
xs = [1, 3, 2, 4, 5, 2]
sorted_xs = quicksort(xs)
| [] |
TarantulaTechnology/fabric5 | bddtests/steps/bdd_test_util.py | 6da971177ab7d74f1e1cfa6f7fc73e75768e5686 |
# Copyright IBM Corp. 2016 All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import re
import subprocess
import devops_pb2
import fabric_pb2
import chaincode_pb2
from grpc.beta import implementations
def cli_call(context, arg_list, expect_success=True):
"""Executes a CLI command in a subprocess and return the results.
@param context: the behave context
@param arg_list: a list command arguments
@param expect_success: use False to return even if an error occurred when executing the command
@return: (string, string, int) output message, error message, return code
"""
#arg_list[0] = "update-" + arg_list[0]
# We need to run the cli command by actually calling the python command
# the update-cli.py script has a #!/bin/python as the first line
# which calls the system python, not the virtual env python we
# setup for running the update-cli
p = subprocess.Popen(arg_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = p.communicate()
if p.returncode != 0:
if output is not None:
print("Output:\n" + output)
if error is not None:
print("Error Message:\n" + error)
if expect_success:
raise subprocess.CalledProcessError(p.returncode, arg_list, output)
return output, error, p.returncode
class UserRegistration:
def __init__(self, secretMsg, composeService):
self.secretMsg = secretMsg
self.composeService = composeService
self.tags = {}
self.lastResult = None
def getUserName(self):
return self.secretMsg['enrollId']
def getSecret(self):
return devops_pb2.Secret(enrollId=self.secretMsg['enrollId'],enrollSecret=self.secretMsg['enrollSecret'])
# Registerses a user on a specific composeService
def registerUser(context, secretMsg, composeService):
userName = secretMsg['enrollId']
if 'users' in context:
pass
else:
context.users = {}
if userName in context.users:
raise Exception("User already registered: {0}".format(userName))
context.users[userName] = UserRegistration(secretMsg, composeService)
# Registerses a user on a specific composeService
def getUserRegistration(context, enrollId):
userRegistration = None
if 'users' in context:
pass
else:
context.users = {}
if enrollId in context.users:
userRegistration = context.users[enrollId]
else:
raise Exception("User has not been registered: {0}".format(enrollId))
return userRegistration
def ipFromContainerNamePart(namePart, containerDataList):
"""Returns the IPAddress based upon a name part of the full container name"""
ip = None
containerNamePrefix = os.path.basename(os.getcwd()) + "_"
for containerData in containerDataList:
if containerData.containerName.startswith(containerNamePrefix + namePart):
ip = containerData.ipAddress
if ip == None:
raise Exception("Could not find container with namePart = {0}".format(namePart))
return ip
def getTxResult(context, enrollId):
'''Returns the TransactionResult using the enrollId supplied'''
assert 'users' in context, "users not found in context. Did you register a user?"
assert 'compose_containers' in context, "compose_containers not found in context"
(channel, userRegistration) = getGRPCChannelAndUser(context, enrollId)
stub = devops_pb2.beta_create_Devops_stub(channel)
txRequest = devops_pb2.TransactionRequest(transactionUuid = context.transactionID)
response = stub.GetTransactionResult(txRequest, 2)
assert response.status == fabric_pb2.Response.SUCCESS, 'Failure getting Transaction Result from {0}, for user "{1}": {2}'.format(userRegistration.composeService,enrollId, response.msg)
# Now grab the TransactionResult from the Msg bytes
txResult = fabric_pb2.TransactionResult()
txResult.ParseFromString(response.msg)
return txResult
def getGRPCChannel(ipAddress):
channel = implementations.insecure_channel(ipAddress, 30303)
print("Returning GRPC for address: {0}".format(ipAddress))
return channel
def getGRPCChannelAndUser(context, enrollId):
'''Returns a tuple of GRPC channel and UserRegistration instance. The channel is open to the composeService that the user registered with.'''
userRegistration = getUserRegistration(context, enrollId)
# Get the IP address of the server that the user registered on
ipAddress = ipFromContainerNamePart(userRegistration.composeService, context.compose_containers)
channel = getGRPCChannel(ipAddress)
return (channel, userRegistration)
def getDeployment(context, ccAlias):
'''Return a deployment with chaincode alias from prior deployment, or None if not found'''
deployment = None
if 'deployments' in context:
pass
else:
context.deployments = {}
if ccAlias in context.deployments:
deployment = context.deployments[ccAlias]
# else:
# raise Exception("Deployment alias not found: '{0}'. Are you sure you have deployed a chaincode with this alias?".format(ccAlias))
return deployment
def deployChaincode(context, enrollId, chaincodePath, ccAlias, ctor):
'''Deploy a chaincode with the specified alias for the specfied enrollId'''
(channel, userRegistration) = getGRPCChannelAndUser(context, enrollId)
stub = devops_pb2.beta_create_Devops_stub(channel)
# Make sure deployment alias does NOT already exist
assert getDeployment(context, ccAlias) == None, "Deployment alias already exists: '{0}'.".format(ccAlias)
args = getArgsFromContextForUser(context, enrollId)
ccSpec = chaincode_pb2.ChaincodeSpec(type = chaincode_pb2.ChaincodeSpec.GOLANG,
chaincodeID = chaincode_pb2.ChaincodeID(name="",path=chaincodePath),
ctorMsg = chaincode_pb2.ChaincodeInput(function = ctor, args = args))
ccSpec.secureContext = userRegistration.getUserName()
if 'metadata' in context:
ccSpec.metadata = context.metadata
try:
ccDeploymentSpec = stub.Deploy(ccSpec, 60)
ccSpec.chaincodeID.name = ccDeploymentSpec.chaincodeSpec.chaincodeID.name
context.grpcChaincodeSpec = ccSpec
context.deployments[ccAlias] = ccSpec
except:
del stub
raise
def invokeChaincode(context, enrollId, ccAlias, functionName):
# Get the deployment for the supplied chaincode alias
deployedCcSpec = getDeployment(context, ccAlias)
assert deployedCcSpec != None, "Deployment NOT found for chaincode alias '{0}'".format(ccAlias)
# Create a new ChaincodeSpec by copying the deployed one
newChaincodeSpec = chaincode_pb2.ChaincodeSpec()
newChaincodeSpec.CopyFrom(deployedCcSpec)
# Update hte chaincodeSpec ctorMsg for invoke
args = getArgsFromContextForUser(context, enrollId)
chaincodeInput = chaincode_pb2.ChaincodeInput(function = functionName, args = args )
newChaincodeSpec.ctorMsg.CopyFrom(chaincodeInput)
ccInvocationSpec = chaincode_pb2.ChaincodeInvocationSpec(chaincodeSpec = newChaincodeSpec)
(channel, userRegistration) = getGRPCChannelAndUser(context, enrollId)
stub = devops_pb2.beta_create_Devops_stub(channel)
response = stub.Invoke(ccInvocationSpec,2)
return response
def getArgsFromContextForUser(context, enrollId):
# Update the chaincodeSpec ctorMsg for invoke
args = []
if 'table' in context:
# There are function arguments
userRegistration = getUserRegistration(context, enrollId)
# Allow the user to specify expressions referencing tags in the args list
pattern = re.compile('\{(.*)\}$')
for arg in context.table[0].cells:
m = pattern.match(arg)
if m:
# tagName reference found in args list
tagName = m.groups()[0]
# make sure the tagName is found in the users tags
assert tagName in userRegistration.tags, "TagName '{0}' not found for user '{1}'".format(tagName, userRegistration.getUserName())
args.append(userRegistration.tags[tagName])
else:
#No tag referenced, pass the arg
args.append(arg)
return args
def getContainerDataValuesFromContext(context, aliases, callback):
"""Returns the IPAddress based upon a name part of the full container name"""
assert 'compose_containers' in context, "compose_containers not found in context"
values = []
containerNamePrefix = os.path.basename(os.getcwd()) + "_"
for namePart in aliases:
for containerData in context.compose_containers:
if containerData.containerName.startswith(containerNamePrefix + namePart):
values.append(callback(containerData))
break
return values
| [((1433, 1507), 'subprocess.Popen', 'subprocess.Popen', (['arg_list'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(arg_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (1449, 1507), False, 'import subprocess\n'), ((3973, 4016), 'devops_pb2.beta_create_Devops_stub', 'devops_pb2.beta_create_Devops_stub', (['channel'], {}), '(channel)\n', (4007, 4016), False, 'import devops_pb2\n'), ((4038, 4106), 'devops_pb2.TransactionRequest', 'devops_pb2.TransactionRequest', ([], {'transactionUuid': 'context.transactionID'}), '(transactionUuid=context.transactionID)\n', (4067, 4106), False, 'import devops_pb2\n'), ((4425, 4455), 'fabric_pb2.TransactionResult', 'fabric_pb2.TransactionResult', ([], {}), '()\n', (4453, 4455), False, 'import fabric_pb2\n'), ((4565, 4615), 'grpc.beta.implementations.insecure_channel', 'implementations.insecure_channel', (['ipAddress', '(30303)'], {}), '(ipAddress, 30303)\n', (4597, 4615), False, 'from grpc.beta import implementations\n'), ((5953, 5996), 'devops_pb2.beta_create_Devops_stub', 'devops_pb2.beta_create_Devops_stub', (['channel'], {}), '(channel)\n', (5987, 5996), False, 'import devops_pb2\n'), ((7225, 7254), 'chaincode_pb2.ChaincodeSpec', 'chaincode_pb2.ChaincodeSpec', ([], {}), '()\n', (7252, 7254), False, 'import chaincode_pb2\n'), ((7438, 7500), 'chaincode_pb2.ChaincodeInput', 'chaincode_pb2.ChaincodeInput', ([], {'function': 'functionName', 'args': 'args'}), '(function=functionName, args=args)\n', (7466, 7500), False, 'import chaincode_pb2\n'), ((7585, 7654), 'chaincode_pb2.ChaincodeInvocationSpec', 'chaincode_pb2.ChaincodeInvocationSpec', ([], {'chaincodeSpec': 'newChaincodeSpec'}), '(chaincodeSpec=newChaincodeSpec)\n', (7622, 7654), False, 'import chaincode_pb2\n'), ((7749, 7792), 'devops_pb2.beta_create_Devops_stub', 'devops_pb2.beta_create_Devops_stub', (['channel'], {}), '(channel)\n', (7783, 7792), False, 'import devops_pb2\n'), ((2192, 2296), 'devops_pb2.Secret', 'devops_pb2.Secret', ([], {'enrollId': "self.secretMsg['enrollId']", 'enrollSecret': "self.secretMsg['enrollSecret']"}), "(enrollId=self.secretMsg['enrollId'], enrollSecret=self.\n secretMsg['enrollSecret'])\n", (2209, 2296), False, 'import devops_pb2\n'), ((8203, 8228), 're.compile', 're.compile', (['"""\\\\{(.*)\\\\}$"""'], {}), "('\\\\{(.*)\\\\}$')\n", (8213, 8228), False, 'import re\n'), ((1762, 1823), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['p.returncode', 'arg_list', 'output'], {}), '(p.returncode, arg_list, output)\n', (1791, 1823), False, 'import subprocess\n'), ((3300, 3311), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3309, 3311), False, 'import os\n'), ((6327, 6381), 'chaincode_pb2.ChaincodeID', 'chaincode_pb2.ChaincodeID', ([], {'name': '""""""', 'path': 'chaincodePath'}), "(name='', path=chaincodePath)\n", (6352, 6381), False, 'import chaincode_pb2\n'), ((6400, 6454), 'chaincode_pb2.ChaincodeInput', 'chaincode_pb2.ChaincodeInput', ([], {'function': 'ctor', 'args': 'args'}), '(function=ctor, args=args)\n', (6428, 6454), False, 'import chaincode_pb2\n'), ((9081, 9092), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (9090, 9092), False, 'import os\n')] |
cefect/SOFDA0 | 1.0.0/hp/dict.py | 62c5566d0f388a5fd76a070ceb5ee3e38b0d7463 | '''
Created on Mar 6, 2018
@author: cef
hp functions for workign with dictionaries
'''
import logging, os, sys, math, copy, inspect
from collections import OrderedDict
from weakref import WeakValueDictionary as wdict
import numpy as np
import hp.basic
mod_logger = logging.getLogger(__name__) #creates a child logger of the root
def dict_2_logr(dict, logger= mod_logger): #log each value of the dictionary to fille
logger = logger.getChild('dict_2_logr')
msg = '\n'
for key, value in dict.iteritems():
msg = msg + ' key: %s\n value: %s \n'%(key, value)
logger.debug(msg)
def key_list(d, #return the intersection of the dict.keys() and the key_list
key_list, logger = mod_logger):
logger = logger.getChild('key_list')
#===========================================================================
# pre check
#===========================================================================
bool_list = hp.basic.bool_list_in_list(d.keys(), key_list)
if not bool_list.any(): raise IOError #check if any are not found
#===========================================================================
# build the found values
#===========================================================================
values_fnd_list = []
for key, value in d.iteritems():
if key in key_list: values_fnd_list.append(value)
return values_fnd_list
def build_nones_dict(key_list, logger=mod_logger): #add 'None' values to the passed keys
val_list = np.full((1, len(key_list)), None)
dict = dict(zip(key_list, val_list))
return dict
def merge_two_dicts(x, y):
if x is None: return y
if y is None: return x
z = x.copy() # start with x's keys and values
z.update(y) # modifies z with y's keys and values & returns None
return z
def value_by_ksearch(ksearch_str, d, #get the entry that matches the search str
logger=mod_logger, *search_args):
#===========================================================================
# take a shot at a perfect match
#===========================================================================
try:
return d[ksearch_str]
except:
#find a match for this key
k_fnd = hp.basic.list_search(d.keys(), ksearch_str, *search_args)
if k_fnd is None:
logger = logger.getChild('value_by_ksearch')
logger.debug('could not find \'%s\' in %i dict keys. returning None'%(ksearch_str, len(d)))
return None
else:
return d[k_fnd]
def merge(dl, dr, #intelligent dictionary merging
set_type = 'intersect',
method = 'exact',
container = dict(),
logger = mod_logger, *search_args):
if set_type == 'union':
if method == 'exact':
d_merge = merge_two_dicts(dl, dr, logger=logger)
else:
raise IOError #todo
elif set_type == 'intersect':
d_merge = subset(dl, dr.keys(), set_type = set_type,
method=method, container=container, logger=logger, *search_args)
else: raise IOError
logger.debug('got d_merge %i'%len(d_merge))
return container(d_merge)
def subset_pfx(d_big, prefix, logger=mod_logger):
#===========================================================================
# shortcuts
#===========================================================================
if len(d_big) == 0: return dict()
#===========================================================================
# defaults
#===========================================================================
logger = logger.getChild('subset_pfx')
d = copy.copy(d_big)
fnd_d = dict()
for k, v in d.iteritems():
if k.startswith(prefix):
fnd_d[k] = v
logger.debug('found %i entries with prefix \'%s\' \n'%(len(fnd_d), prefix))
return fnd_d
def subset(d_big, l, #get a dictionary subset using standard user inputs
#ordered = False, using containers instead
set_type = 'sub',
method = 'exact',
container = dict,
logger = mod_logger,
*search_args):
"""
#===========================================================================
# INPUTS
#===========================================================================
l: list of keys (within d_big) on which to erturn the sutset
set_type: how to treat the set
intersect: returna dictionary with only the common keys
sub: raise a flag if not every item in 'l' is found in d_big.keys()
method: what type of key search to perform (re.function)
search: look for a key in the dictionary that contains the list entry.
returned d is keyed by the list
"""
logger = logger.getChild('subset')
#===========================================================================
# setup[]
#==========================================================================
d = container()
"""
#dictionary setup
if ordered: d = OrderedDict()
else: d = dict()"""
#input list setup
if isinstance(l, list): pass
elif isinstance(l, basestring): l = [l]
elif l is None: return d
else: raise IOError
nofnd_l = []
#===========================================================================
# determine subset by kwarg
#===========================================================================
for k in l:
try: #attempt teh direct match
d[k] = d_big[k]
except:
#===================================================================
# try again using search functions
#===================================================================
try:
if method == 'search':
#search and return this value
v = value_by_ksearch(k, d_big, logger=logger, *search_args)
if not v is None:
d[k] = v
continue #not sure this is needed
else: raise ValueError
else: raise ValueError
#===================================================================
# nothing found. proceed based on set_type
#===================================================================
except:
logger.debug('unable to find \'%s\' in the dict with method \'%s\''%(k, method))
if set_type == 'sub':
boolar = hp.basic.bool_list_in_list(d_big.keys(), l)
if not np.all(boolar):
logger.error('%i entries in list not found in big_d'%(len(l) - boolar.sum()))
raise IOError
elif set_type == 'intersect': nofnd_l.append(k)
else: raise IOError
#===========================================================================
# wrap up
#===========================================================================
if len(nofnd_l) >0:
logger.debug('%i of %i list entries DO NOT intersect: %s'%(len(nofnd_l), len(l), nofnd_l))
if set_type == 'sub': raise IOError
#===========================================================================
# check
#===========================================================================
if len(d) == 0:
logger.warning('0 common values between d(%i) and l(%i)'%(len(d), len(l)))
logger.debug('returning d with %i entries: %s \n'%(len(d), d.keys()))
return container(d)
#===============================================================================
# def subset(d_big, l, #get a dictionary subset using standard user inputs
# ordered = False, set_type = 'sub', search = 'search',
# logger = mod_logger):
# """
# #===========================================================================
# # INPUTS
# #===========================================================================
# l: list of keys (within d_big) on which to erturn the sutset
#
# set_type: how to treat the set
# intersect: returna dictionary with only the common keys
# sub: raise a flag if not every item in 'l' is found in d_big.keys()
#
# search: what type of key search to perform (re.function)
# """
# logger = logger.getChild('subset')
#
# #===========================================================================
# # setup[]
# #==========================================================================
# #dictionary setup
# if ordered: d = OrderedDict()
# else: d = dict()
#
# #input list setup
# if isinstance(l, list): pass
# elif isinstance(l, basestring): l = [l]
# elif l is None: return None
# else: raise IOError
#
# #===========================================================================
# # determine subset by kwarg
# #===========================================================================
# if set_type == 'sub':
# try:
# for k in l:
# d[k] = d_big[k]
#
# except:
# boolar = hp.basic.bool_list_in_list(d_big.keys(), l)
#
# if not np.all(boolar):
# logger.error('%i entries in list not found in big_d'%(len(l) - boolar.sum()))
#
# raise IOError
#
# if len(d) == 0: raise IOError
#
# elif set_type == 'intersect':
# nofnd_l = []
# for k in l:
# try:
# d[k] = d_big[k]
# except:
# nofnd_l.append(k)
#
# if len(nofnd_l) >0:
# logger.debug('%i of %i list entries DO NOT intersect: %s'%(len(nofnd_l), len(l), nofnd_l))
#
# #===========================================================================
# # check
# #===========================================================================
# if len(d) == 0: logger.warning('0 common values between d(%i) and l(%i)'%
# (len(d), len(l)))
#
# return d
#===============================================================================
class deepcopier():
tries = 0 #keep track of the loop
def __init__(self,obj, logger=mod_logger):
self.logger = logger.getChild('deepcopier')
self.copy_o = obj
def tryit(self, obj=None): #make as deep a copy as possible
if obj is None: obj = self.copy_o
#===========================================================================
# simple try
#===========================================================================
try:
copy_o = copy.deepcopy(obj)
return copy_o
except:
self.logger.debug('failed first attempt')
self.tries += 1
#=======================================================================
# sophisiticated try
#=======================================================================
self.logger.debug('copy attempt %i'%self.tries)
if self.tries > 10: return self.copy_o
#try for each element of the dict
if isinstance(obj, dict):
new_d = dict()
for key, value in obj.iteritems():
try:
new_d[key] = self.tryit(obj = value)
except:
new_d[key] = copy.copy(obj)
self.logger.debug('returning new_d with %i entries: %s'%(len(new_d), new_d.keys()))
else: raise IOError
return new_d
from collections import OrderedDict
class MyOrderedDict(OrderedDict):
"""
as there is no builtin method to add to the head of an ordered dict,
here we add a method
https://stackoverflow.com/questions/16664874/how-can-i-add-an-element-at-the-top-of-an-ordereddict-in-python
"""
def prepend(self, key, value, dict_setitem=dict.__setitem__):
"""add entry to the front of myself"""
root = self._OrderedDict__root
first = root[1]
if key in self:
link = self._OrderedDict__map[key]
link_prev, link_next, _ = link
link_prev[1] = link_next
link_next[0] = link_prev
link[0] = root
link[1] = first
root[1] = first[0] = link
else:
root[1] = first[0] = self._OrderedDict__map[key] = [root, first, key]
dict_setitem(self, key, value)
| [((291, 318), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (308, 318), False, 'import logging, os, sys, math, copy, inspect\n'), ((4046, 4062), 'copy.copy', 'copy.copy', (['d_big'], {}), '(d_big)\n', (4055, 4062), False, 'import logging, os, sys, math, copy, inspect\n'), ((11902, 11920), 'copy.deepcopy', 'copy.deepcopy', (['obj'], {}), '(obj)\n', (11915, 11920), False, 'import logging, os, sys, math, copy, inspect\n'), ((12697, 12711), 'copy.copy', 'copy.copy', (['obj'], {}), '(obj)\n', (12706, 12711), False, 'import logging, os, sys, math, copy, inspect\n'), ((7354, 7368), 'numpy.all', 'np.all', (['boolar'], {}), '(boolar)\n', (7360, 7368), True, 'import numpy as np\n')] |
Cyber-Dioxide/CyberPhish | Core/pre.py | bc2e39d8612ef657d481cdd40d676983f7bf190c | import os
import random
try:
from colorama import Fore, Style
except ModuleNotFoundError:
os.system("pip install colorama")
from urllib.request import urlopen
from Core.helper.color import green, white, blue, red, start, alert
Version = "2.2"
yellow = ("\033[1;33;40m")
def connected(host='http://duckduckgo.com'):
try:
urlopen(host)
return True
except:
return False
all_col = [Style.BRIGHT + Fore.RED, Style.BRIGHT + Fore.CYAN, Style.BRIGHT + Fore.LIGHTCYAN_EX,
Style.BRIGHT + Fore.LIGHTBLUE_EX, Style.BRIGHT + Fore.LIGHTCYAN_EX, Style.BRIGHT + Fore.LIGHTMAGENTA_EX,
Style.BRIGHT + Fore.LIGHTYELLOW_EX]
ran = random.choice(all_col)
def banner():
print(Style.BRIGHT + Fore.LIGHTCYAN_EX, "\n", "- " * 4, " [+] Follow me on Instagram @cyber_dioxide ", "- " * 4)
print(Style.BRIGHT + Fore.LIGHTYELLOW_EX, "\n", "- " * 4, " [+] Coding Instagram @cyber_dioxide_ ", "- " * 4)
print(Style.BRIGHT + Fore.LIGHTRED_EX, "\n", "- " * 4, "[+] Github: https://github.com/Cyber-Dioxide/ ", "- " * 3)
banner()
def menu():
print(blue + " ██████╗██╗ ██╗██████╗ ███████╗██████╗" + white )
print(white +"██╔════╝╚██╗ ██╔╝██╔══██╗██╔════╝██╔══██╗")
print(blue + "██║ ╚████╔╝ ██████╔╝█████╗ ██████╔╝")
print(green + "██║ ╚██╔╝ ██╔══██╗██╔══╝ ██╔══██╗" + white+ blue + white)
print(green + "╚██████╗ ██║ ██████╔╝███████╗██║ ██║")
print(red + "██████╗ ██╗ ██╗██╗███████╗██╗ ██╗" )
print(white+ "██╔══██╗██║ ██║██║██╔════╝██║ ██║" + green)
print(yellow+"██████╔╝███████║██║███████╗███████║" + blue)
print(yellow+"██╔═══╝ ██╔══██║██║╚════██║██╔══██║" + green)
print(yellow+"██║ ██║ ██║██║███████║██║ ██║")
print(red+ "╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝╚═╝ ╚═╝ ")
banner()
print(alert + " More Versions Will Come Soon Stay Updated, Follow My Github\n")
print(white + "options:")
print(green + "[" + white + "1" + green + "]" + white + " Instagram" + green + " [" + white + "12" + green + "]" + white + " Paypal")
print(green + "[" + white + "2" + green + "]" + white + " Facebook" + green + " [" + white + "13" + green + "]" + white + " Discord")
print(green + "[" + white + "3" + green + "]" + white + " Gmail" + green + " [" + white + "14" + green + "]" + white + " Spotify")
print(green + "[" + white + "4" + green + "]" + white + " Gmail (simple)" + green + " [" + white + "15" + green + "]" + white + " Blockchain")
print(green + "[" + white + "5" + green + "]" + white + " Twitter" + green + " [" + white + "16" + green + "]" + white + " RiotGames")
print(green + "[" + white + "6" + green + "]" + white + " Snapchat" + green + " [" + white + "17" + green + "]" + white + " Rockstar")
print(green + "[" + white + "7" + green + "]" + white + " Snapchat (simple)" + green + " [" + white + "18" + green + "]" + white + " AskFM")
print(green + "[" + white + "8" + green + "]" + white + " Steam" + green + " [" + white + "19" + green + "]" + white + " 000Webhost")
print(green + "[" + white + "9" + green + "]" + white + " Dropbox" + green)
print(green + "[" + white + "10" + green + "]" + white + " Linkedin" + green + " [" + white + "21" + green + "]" + white + " Gamehag")
print(green + "[" + white + "11" + green + "]" + white + " Playstation" + green + " [" + white + "22" + green + "]" + white + " Mega")
print(green + "-----------------------------------------------------------------------")
print(green + "[" + white + "00" + green + "]" + red + " EXIT")
def Welcome():
os.system("clear")
| [((664, 686), 'random.choice', 'random.choice', (['all_col'], {}), '(all_col)\n', (677, 686), False, 'import random\n'), ((3511, 3529), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (3520, 3529), False, 'import os\n'), ((92, 125), 'os.system', 'os.system', (['"""pip install colorama"""'], {}), "('pip install colorama')\n", (101, 125), False, 'import os\n'), ((338, 351), 'urllib.request.urlopen', 'urlopen', (['host'], {}), '(host)\n', (345, 351), False, 'from urllib.request import urlopen\n')] |
candango/automatoes | automatoes/authorize.py | fbfd01cfaa2c36e23a7251e333ef3fa86ef4bff9 | #!/usr/bin/env python
#
# Copyright 2019-2020 Flavio Garcia
# Copyright 2016-2017 Veeti Paananen under MIT License
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The domain authorization command.
"""
from . import get_version
from .acme import AcmeV2
from .crypto import generate_jwk_thumbprint
from .errors import AutomatoesError
from .model import Order
from cartola import fs, sysexits
import hashlib
import os
import sys
def create_order(acme, domains, method, order_file):
order = acme.new_order(domains, method)
update_order(order, order_file)
return order
def update_order(order, order_file):
fs.write(order_file, order.serialize().decode())
def clean_http_challenges(files):
# Clean up created files
for path in files:
try:
os.remove(path)
except:
print("Couldn't delete http challenge file {}".format(path))
def clean_challenge_file(challenge_file):
try:
os.remove(challenge_file)
except:
print("Couldn't delete challenge file {}".format(challenge_file))
def authorize(server, paths, account, domains, method, verbose=False):
print("Candango Automatoes {}. Manuale replacement.\n\n".format(
get_version()))
current_path = paths['current']
orders_path = paths['orders']
domains_hash = hashlib.sha256(
"_".join(domains).encode('ascii')).hexdigest()
order_path = os.path.join(orders_path, domains_hash)
order_file = os.path.join(order_path, "order.json".format(domains_hash))
if not os.path.exists(orders_path):
if verbose:
print("Orders path not found creating it at {}."
"".format(orders_path))
os.mkdir(orders_path)
os.chmod(orders_path, 0o770)
else:
if verbose:
print("Orders path found at {}.".format(orders_path))
if not os.path.exists(order_path):
if verbose:
print("Current order {} path not found creating it at orders "
"path.\n".format(domains_hash))
os.mkdir(order_path)
os.chmod(order_path, 0o770)
else:
if verbose:
print("Current order {} path found at orders path.\n".format(
domains_hash))
method = method
acme = AcmeV2(server, account)
try:
print("Authorizing {}.\n".format(", ".join(domains)))
# Creating orders for domains if not existent
if not os.path.exists(order_file):
if verbose:
print(" Order file not found creating it.")
order = create_order(acme, domains, method, order_file)
else:
if verbose:
print(" Found order file. Querying ACME server for current "
"status.")
order = Order.deserialize(fs.read(order_file))
try:
server_order = acme.query_order(order)
order.contents = server_order.contents
except:
print(" WARNING: Old order. Setting it as expired.\n")
order.contents['status'] = "expired"
update_order(order, order_file)
if not order.expired and not order.invalid:
if order.contents['status'] == 'valid':
print(" Order is valid and expires at {}. Please run "
"the issue "
"command.\n".format(order.contents['expires']))
print(" {} domain(s) authorized. Let's Encrypt!".format(
len(domains)))
sys.exit(sysexits.EX_OK)
else:
if verbose:
print(" Order still pending and expires "
"at {}.\n".format(order.contents['expires']))
else:
if order.invalid:
print(" WARNING: Invalid order, renewing it.\n Just "
"continue with the authorization when all "
"verifications are in place.\n")
else:
print(" WARNING: Expired order. Renewing order.\n")
os.remove(order_file)
order = create_order(acme, domains, method, order_file)
update_order(order, order_file)
pending_challenges = []
for challenge in acme.get_order_challenges(order):
print(" Requesting challenge for {}.".format(challenge.domain))
if challenge.status == 'valid':
print(" {} is already authorized until {}.".format(
challenge.domain, challenge.expires))
continue
else:
challenge_file = os.path.join(order_path, challenge.file_name)
if verbose:
print(" Creating challenge file {}.\n".format(
challenge.file_name))
fs.write(challenge_file, challenge.serialize().decode())
pending_challenges.append(challenge)
# Quit if nothing to authorize
if not pending_challenges:
print("\nAll domains are already authorized, exiting.")
sys.exit(sysexits.EX_OK)
files = set()
if method == 'dns':
print("\n DNS verification required. Make sure these TXT records"
" are in place:\n")
for challenge in pending_challenges:
print(" _acme-challenge.{}. IN TXT "
"\"{}\"".format(challenge.domain, challenge.key))
elif method == 'http':
print("\n HTTP verification required. Make sure these files are "
"in place:\n")
for challenge in pending_challenges:
token = challenge.contents['token']
# path sanity check
assert (token and os.path.sep not in token and '.' not in
token)
files.add(token)
fs.write(
os.path.join(current_path, token),
"%s.%s" % (token,
generate_jwk_thumbprint(account.key))
)
print(" http://{}/.well-known/acme-challenge/{}".format(
challenge.domain, token))
print("\n The necessary files have been written to the current "
"directory.\n")
# Wait for the user to complete the challenges
input("\nPress Enter to continue.\n")
# Validate challenges
done, failed, pending = set(), set(), set()
for challenge in pending_challenges:
print(" {}: waiting for verification. Checking in 5 "
"seconds.".format(challenge.domain))
response = acme.verify_order_challenge(challenge, 5, 1)
if response['status'] == "valid":
print(" {}: OK! Authorization lasts until {}.".format(
challenge.domain, challenge.expires))
done.add(challenge.domain)
elif response['status'] == 'invalid':
print(" {}: {} ({})".format(
challenge.domain,
response['error']['detail'],
response['error']['type'])
)
failed.add(challenge.domain)
break
else:
print("{}: Pending!".format(challenge.domain))
pending.add(challenge.domain)
break
challenge_file = os.path.join(order_path, challenge.file_name)
# Print results
if failed:
print(" {} domain(s) authorized, {} failed.".format(
len(done),
len(failed),
))
print(" Authorized: {}".format(' '.join(done) or "N/A"))
print(" Failed: {}".format(' '.join(failed)))
print(" WARNING: The current order will be invalidated. "
"Try again.")
if verbose:
print(" Deleting invalid challenge file {}.\n".format(
challenge.file_name))
clean_challenge_file(challenge_file)
os.remove(order_file)
os.rmdir(order_path)
if method == 'http':
print(files)
clean_http_challenges(files)
sys.exit(sysexits.EX_FATAL_ERROR)
else:
if pending:
print(" {} domain(s) authorized, {} pending.".format(
len(done),
len(pending)))
print(" Authorized: {}".format(' '.join(done) or "N/A"))
print(" Pending: {}".format(' '.join(pending)))
print(" Try again.")
sys.exit(sysexits.EX_CANNOT_EXECUTE)
else:
if verbose:
print(" Deleting valid challenge file {}.".format(
challenge.file_name))
clean_challenge_file(challenge_file)
if verbose:
print(" Querying ACME server for current status.\n")
server_order = acme.query_order(order)
order.contents = server_order.contents
update_order(order, order_file)
print(" {} domain(s) authorized. Let's Encrypt!".format(
len(done)))
if method == 'http':
clean_http_challenges(files)
sys.exit(sysexits.EX_OK)
except IOError as e:
print("A connection or service error occurred. Aborting.")
raise AutomatoesError(e)
| [((1909, 1948), 'os.path.join', 'os.path.join', (['orders_path', 'domains_hash'], {}), '(orders_path, domains_hash)\n', (1921, 1948), False, 'import os\n'), ((1453, 1478), 'os.remove', 'os.remove', (['challenge_file'], {}), '(challenge_file)\n', (1462, 1478), False, 'import os\n'), ((2038, 2065), 'os.path.exists', 'os.path.exists', (['orders_path'], {}), '(orders_path)\n', (2052, 2065), False, 'import os\n'), ((2198, 2219), 'os.mkdir', 'os.mkdir', (['orders_path'], {}), '(orders_path)\n', (2206, 2219), False, 'import os\n'), ((2228, 2254), 'os.chmod', 'os.chmod', (['orders_path', '(504)'], {}), '(orders_path, 504)\n', (2236, 2254), False, 'import os\n'), ((2365, 2391), 'os.path.exists', 'os.path.exists', (['order_path'], {}), '(order_path)\n', (2379, 2391), False, 'import os\n'), ((2546, 2566), 'os.mkdir', 'os.mkdir', (['order_path'], {}), '(order_path)\n', (2554, 2566), False, 'import os\n'), ((2575, 2600), 'os.chmod', 'os.chmod', (['order_path', '(504)'], {}), '(order_path, 504)\n', (2583, 2600), False, 'import os\n'), ((8068, 8113), 'os.path.join', 'os.path.join', (['order_path', 'challenge.file_name'], {}), '(order_path, challenge.file_name)\n', (8080, 8113), False, 'import os\n'), ((10013, 10037), 'sys.exit', 'sys.exit', (['sysexits.EX_OK'], {}), '(sysexits.EX_OK)\n', (10021, 10037), False, 'import sys\n'), ((1287, 1302), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (1296, 1302), False, 'import os\n'), ((2935, 2961), 'os.path.exists', 'os.path.exists', (['order_file'], {}), '(order_file)\n', (2949, 2961), False, 'import os\n'), ((5709, 5733), 'sys.exit', 'sys.exit', (['sysexits.EX_OK'], {}), '(sysexits.EX_OK)\n', (5717, 5733), False, 'import sys\n'), ((8735, 8756), 'os.remove', 'os.remove', (['order_file'], {}), '(order_file)\n', (8744, 8756), False, 'import os\n'), ((8769, 8789), 'os.rmdir', 'os.rmdir', (['order_path'], {}), '(order_path)\n', (8777, 8789), False, 'import os\n'), ((8909, 8942), 'sys.exit', 'sys.exit', (['sysexits.EX_FATAL_ERROR'], {}), '(sysexits.EX_FATAL_ERROR)\n', (8917, 8942), False, 'import sys\n'), ((3303, 3322), 'cartola.fs.read', 'fs.read', (['order_file'], {}), '(order_file)\n', (3310, 3322), False, 'from cartola import fs, sysexits\n'), ((4677, 4698), 'os.remove', 'os.remove', (['order_file'], {}), '(order_file)\n', (4686, 4698), False, 'import os\n'), ((5238, 5283), 'os.path.join', 'os.path.join', (['order_path', 'challenge.file_name'], {}), '(order_path, challenge.file_name)\n', (5250, 5283), False, 'import os\n'), ((9311, 9347), 'sys.exit', 'sys.exit', (['sysexits.EX_CANNOT_EXECUTE'], {}), '(sysexits.EX_CANNOT_EXECUTE)\n', (9319, 9347), False, 'import sys\n'), ((4081, 4105), 'sys.exit', 'sys.exit', (['sysexits.EX_OK'], {}), '(sysexits.EX_OK)\n', (4089, 4105), False, 'import sys\n'), ((6545, 6578), 'os.path.join', 'os.path.join', (['current_path', 'token'], {}), '(current_path, token)\n', (6557, 6578), False, 'import os\n')] |
jamesliu/ray | rllib/agents/dqn/simple_q_torch_policy.py | 11ab412db1fa3603a3006e8ed414e80dd1f11c0c | """PyTorch policy class used for Simple Q-Learning"""
import logging
from typing import Dict, Tuple
import gym
import ray
from ray.rllib.agents.dqn.simple_q_tf_policy import (
build_q_models, compute_q_values, get_distribution_inputs_and_class)
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.torch.torch_action_dist import TorchCategorical, \
TorchDistributionWrapper
from ray.rllib.policy import Policy
from ray.rllib.policy.policy_template import build_policy_class
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.policy.torch_policy import TorchPolicy
from ray.rllib.utils.annotations import override
from ray.rllib.utils.framework import try_import_torch
from ray.rllib.utils.torch_utils import concat_multi_gpu_td_errors, huber_loss
from ray.rllib.utils.typing import TensorType, TrainerConfigDict
torch, nn = try_import_torch()
F = None
if nn:
F = nn.functional
logger = logging.getLogger(__name__)
class TargetNetworkMixin:
"""Assign the `update_target` method to the SimpleQTorchPolicy
The function is called every `target_network_update_freq` steps by the
master learner.
"""
def __init__(self):
# Hard initial update from Q-net(s) to target Q-net(s).
self.update_target()
def update_target(self):
# Update_target_fn will be called periodically to copy Q network to
# target Q networks.
state_dict = self.model.state_dict()
for target in self.target_models.values():
target.load_state_dict(state_dict)
@override(TorchPolicy)
def set_weights(self, weights):
# Makes sure that whenever we restore weights for this policy's
# model, we sync the target network (from the main model)
# at the same time.
TorchPolicy.set_weights(self, weights)
self.update_target()
def build_q_model_and_distribution(
policy: Policy, obs_space: gym.spaces.Space,
action_space: gym.spaces.Space,
config: TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]:
return build_q_models(policy, obs_space, action_space, config), \
TorchCategorical
def build_q_losses(policy: Policy, model, dist_class,
train_batch: SampleBatch) -> TensorType:
"""Constructs the loss for SimpleQTorchPolicy.
Args:
policy (Policy): The Policy to calculate the loss for.
model (ModelV2): The Model to calculate the loss for.
dist_class (Type[ActionDistribution]): The action distribution class.
train_batch (SampleBatch): The training data.
Returns:
TensorType: A single loss tensor.
"""
target_model = policy.target_models[model]
# q network evaluation
q_t = compute_q_values(
policy,
model,
train_batch[SampleBatch.CUR_OBS],
explore=False,
is_training=True)
# target q network evalution
q_tp1 = compute_q_values(
policy,
target_model,
train_batch[SampleBatch.NEXT_OBS],
explore=False,
is_training=True)
# q scores for actions which we know were selected in the given state.
one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS].long(),
policy.action_space.n)
q_t_selected = torch.sum(q_t * one_hot_selection, 1)
# compute estimate of best possible value starting from state at t + 1
dones = train_batch[SampleBatch.DONES].float()
q_tp1_best_one_hot_selection = F.one_hot(
torch.argmax(q_tp1, 1), policy.action_space.n)
q_tp1_best = torch.sum(q_tp1 * q_tp1_best_one_hot_selection, 1)
q_tp1_best_masked = (1.0 - dones) * q_tp1_best
# compute RHS of bellman equation
q_t_selected_target = (train_batch[SampleBatch.REWARDS] +
policy.config["gamma"] * q_tp1_best_masked)
# Compute the error (Square/Huber).
td_error = q_t_selected - q_t_selected_target.detach()
loss = torch.mean(huber_loss(td_error))
# Store values for stats function in model (tower), such that for
# multi-GPU, we do not override them during the parallel loss phase.
model.tower_stats["loss"] = loss
# TD-error tensor in final stats
# will be concatenated and retrieved for each individual batch item.
model.tower_stats["td_error"] = td_error
return loss
def stats_fn(policy: Policy, batch: SampleBatch) -> Dict[str, TensorType]:
return {"loss": torch.mean(torch.stack(policy.get_tower_stats("loss")))}
def extra_action_out_fn(policy: Policy, input_dict, state_batches, model,
action_dist) -> Dict[str, TensorType]:
"""Adds q-values to the action out dict."""
return {"q_values": policy.q_values}
def setup_late_mixins(policy: Policy, obs_space: gym.spaces.Space,
action_space: gym.spaces.Space,
config: TrainerConfigDict) -> None:
"""Call all mixin classes' constructors before SimpleQTorchPolicy
initialization.
Args:
policy (Policy): The Policy object.
obs_space (gym.spaces.Space): The Policy's observation space.
action_space (gym.spaces.Space): The Policy's action space.
config (TrainerConfigDict): The Policy's config.
"""
TargetNetworkMixin.__init__(policy)
SimpleQTorchPolicy = build_policy_class(
name="SimpleQPolicy",
framework="torch",
loss_fn=build_q_losses,
get_default_config=lambda: ray.rllib.agents.dqn.simple_q.DEFAULT_CONFIG,
stats_fn=stats_fn,
extra_action_out_fn=extra_action_out_fn,
after_init=setup_late_mixins,
make_model_and_action_dist=build_q_model_and_distribution,
mixins=[TargetNetworkMixin],
action_distribution_fn=get_distribution_inputs_and_class,
extra_learn_fetches_fn=concat_multi_gpu_td_errors,
)
| [((867, 885), 'ray.rllib.utils.framework.try_import_torch', 'try_import_torch', ([], {}), '()\n', (883, 885), False, 'from ray.rllib.utils.framework import try_import_torch\n'), ((933, 960), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (950, 960), False, 'import logging\n'), ((5341, 5819), 'ray.rllib.policy.policy_template.build_policy_class', 'build_policy_class', ([], {'name': '"""SimpleQPolicy"""', 'framework': '"""torch"""', 'loss_fn': 'build_q_losses', 'get_default_config': '(lambda : ray.rllib.agents.dqn.simple_q.DEFAULT_CONFIG)', 'stats_fn': 'stats_fn', 'extra_action_out_fn': 'extra_action_out_fn', 'after_init': 'setup_late_mixins', 'make_model_and_action_dist': 'build_q_model_and_distribution', 'mixins': '[TargetNetworkMixin]', 'action_distribution_fn': 'get_distribution_inputs_and_class', 'extra_learn_fetches_fn': 'concat_multi_gpu_td_errors'}), "(name='SimpleQPolicy', framework='torch', loss_fn=\n build_q_losses, get_default_config=lambda : ray.rllib.agents.dqn.\n simple_q.DEFAULT_CONFIG, stats_fn=stats_fn, extra_action_out_fn=\n extra_action_out_fn, after_init=setup_late_mixins,\n make_model_and_action_dist=build_q_model_and_distribution, mixins=[\n TargetNetworkMixin], action_distribution_fn=\n get_distribution_inputs_and_class, extra_learn_fetches_fn=\n concat_multi_gpu_td_errors)\n", (5359, 5819), False, 'from ray.rllib.policy.policy_template import build_policy_class\n'), ((1562, 1583), 'ray.rllib.utils.annotations.override', 'override', (['TorchPolicy'], {}), '(TorchPolicy)\n', (1570, 1583), False, 'from ray.rllib.utils.annotations import override\n'), ((2752, 2855), 'ray.rllib.agents.dqn.simple_q_tf_policy.compute_q_values', 'compute_q_values', (['policy', 'model', 'train_batch[SampleBatch.CUR_OBS]'], {'explore': '(False)', 'is_training': '(True)'}), '(policy, model, train_batch[SampleBatch.CUR_OBS], explore=\n False, is_training=True)\n', (2768, 2855), False, 'from ray.rllib.agents.dqn.simple_q_tf_policy import build_q_models, compute_q_values, get_distribution_inputs_and_class\n'), ((2938, 3048), 'ray.rllib.agents.dqn.simple_q_tf_policy.compute_q_values', 'compute_q_values', (['policy', 'target_model', 'train_batch[SampleBatch.NEXT_OBS]'], {'explore': '(False)', 'is_training': '(True)'}), '(policy, target_model, train_batch[SampleBatch.NEXT_OBS],\n explore=False, is_training=True)\n', (2954, 3048), False, 'from ray.rllib.agents.dqn.simple_q_tf_policy import build_q_models, compute_q_values, get_distribution_inputs_and_class\n'), ((1794, 1832), 'ray.rllib.policy.torch_policy.TorchPolicy.set_weights', 'TorchPolicy.set_weights', (['self', 'weights'], {}), '(self, weights)\n', (1817, 1832), False, 'from ray.rllib.policy.torch_policy import TorchPolicy\n'), ((2084, 2139), 'ray.rllib.agents.dqn.simple_q_tf_policy.build_q_models', 'build_q_models', (['policy', 'obs_space', 'action_space', 'config'], {}), '(policy, obs_space, action_space, config)\n', (2098, 2139), False, 'from ray.rllib.agents.dqn.simple_q_tf_policy import build_q_models, compute_q_values, get_distribution_inputs_and_class\n'), ((3992, 4012), 'ray.rllib.utils.torch_utils.huber_loss', 'huber_loss', (['td_error'], {}), '(td_error)\n', (4002, 4012), False, 'from ray.rllib.utils.torch_utils import concat_multi_gpu_td_errors, huber_loss\n')] |
NoahDrisort/ViSV2TTS | viphoneme/T2IPA.py | bea6fa1f85527c824c85986d8b7bfa3e3efd120a | #Grapheme
Rime_tone=[ "a","ă","â","e","ê","i","o","ô","ơ","u","ư","y","iê","oa","oă","oe","oo","uâ","uê","uô","uơ","uy","ươ","uyê","yê", #blank
"á","ắ","ấ","é","ế","í","ó","ố","ớ","ú","ứ","ý","iế","óa","oắ","óe","oó","uấ","uế","uố","ướ","úy","ướ","uyế","yế", #grave
"oá", "oé","óo", "uý",
"à","ằ","ầ","è","ề","ì","ò","ồ","ờ","ù","ừ","ỳ","iề","òa","oằ","òe","oò","uầ","uề","uồ","ườ","ùy","ườ","uyề","yề", #acute
"oà", "oè","òo", "uỳ",
"ả","ẳ","ẩ","ẻ","ể","ỉ","ỏ","ổ","ở","ủ","ử","ỷ","iể","ỏa","oẳ","ỏe","oỏ","uẩ","uể","uổ","ưở","ủy","ưở","uyể","yể", #hook
"oả", "oẻ","ỏo", "uỷ",
"ã","ẵ","ẫ","ẽ","ễ","ĩ","õ","ỗ","ỡ","ũ","ữ","ỹ","iễ","õa","oẵ","õe","oõ","uẫ","uễ","uỗ","ưỡ","ũy","ưỡ","uyễ","yễ", #tilde
"oã", "oẽ","õo", "uỹ",
"ạ","ặ","ậ","ẹ","ệ","ị","ọ","ộ","ợ","ụ","ự","ỵ","iệ","ọa","oặ","ọe","oọ","uậ","uệ","uệ","ượ","ụy","ượ","uyệ","yệ", #dot
"oạ", "oẹ","ọo", "uỵ"]
Onset=["b","d","h","l","m","n","p","r","s","t","v","x","đ","p",
"tr", "th", "ch", "ph","nh","kh","gi","qu",
"ngh","ng","gh","g","k","c"]
#coding: utf-8
#Custom phoneme follow the https://vi.wikipedia.org/wiki/%C3%82m_v%E1%BB%8B_h%E1%BB%8Dc_ti%E1%BA%BFng_Vi%E1%BB%87t
#Improve pronoune between N C S
Cus_onsets = { u'b' : u'b', u't' : u't', u'th' : u'tʰ', u'đ' : u'd', u'ch' : u'c',
u'kh' : u'x', u'g' : u'ɣ', u'l' : u'l', u'm' : u'm', u'n': u'n',
u'ngh': u'ŋ', u'nh' : u'ɲ', u'ng' : u'ŋ', u'ph' : u'f', u'v' : u'v',
u'x' : u's', u'd' : u'z', u'h' : u'h', u'p' : u'p', u'qu' : u'kw',
u'gi' : u'j', u'tr' : u'ʈ', u'k' : u'k', u'c' : u'k', u'gh' : u'ɣ',
u'r' : u'ʐ', u's' : u'ʂ', u'gi': u'j'}
Cus_nuclei = { u'a' : u'a', u'á' : u'a', u'à' : u'a', u'ả' : u'a', u'ã' : u'a', u'ạ' : u'a',
u'â' : u'ɤ̆', u'ấ' : u'ɤ̆', u'ầ' : u'ɤ̆', u'ẩ' : u'ɤ̆', u'ẫ' : u'ɤ̆', u'ậ' : u'ɤ̆',
u'ă' : u'ă', u'ắ' : u'ă', u'ằ' : u'ă', u'ẳ' : u'ă', u'ẵ' : u'ă', u'ặ' : u'ă',
u'e' : u'ɛ', u'é' : u'ɛ', u'è' : u'ɛ', u'ẻ' : u'ɛ', u'ẽ' : u'ɛ', u'ẹ' : u'ɛ',
u'ê' : u'e', u'ế' : u'e', u'ề' : u'e', u'ể' : u'e', u'ễ' : u'e', u'ệ' : u'e',
u'i' : u'i', u'í' : u'i', u'ì' : u'i', u'ỉ' : u'i', u'ĩ' : u'i', u'ị' : u'i',
u'o' : u'ɔ', u'ó' : u'ɔ', u'ò' : u'ɔ', u'ỏ' : u'ɔ', u'õ' : u'ɔ', u'ọ' : u'ɔ',
u'ô' : u'o', u'ố' : u'o', u'ồ' : u'o', u'ổ' : u'o', u'ỗ' : u'o', u'ộ' : u'o',
u'ơ' : u'ɤ', u'ớ' : u'ɤ', u'ờ' : u'ɤ', u'ở' : u'ɤ', u'ỡ' : u'ɤ', u'ợ' : u'ɤ',
u'u' : u'u', u'ú' : u'u', u'ù' : u'u', u'ủ' : u'u', u'ũ' : u'u', u'ụ' : u'u',
u'ư' : u'ɯ', u'ứ' : u'ɯ', u'ừ' : u'ɯ', u'ử' : u'ɯ', u'ữ' : u'ɯ', u'ự' : u'ɯ',
u'y' : u'i', u'ý' : u'i', u'ỳ' : u'i', u'ỷ' : u'i', u'ỹ' : u'i', u'ỵ' : u'i',
u'eo' : u'eo', u'éo' : u'eo', u'èo' : u'eo', u'ẻo' : u'eo', u'ẽo': u'eo', u'ẹo' : u'eo',
u'êu' : u'ɛu', u'ếu' : u'ɛu', u'ều' : u'ɛu', u'ểu' : u'ɛu', u'ễu': u'ɛu', u'ệu' : u'ɛu',
u'ia' : u'iə', u'ía' : u'iə', u'ìa' : u'iə', u'ỉa' : u'iə', u'ĩa' : u'iə', u'ịa' : u'iə',
u'ia' : u'iə', u'iá' : u'iə', u'ià' : u'iə', u'iả' : u'iə', u'iã' : u'iə', u'iạ' : u'iə',
u'iê' : u'iə', u'iế' : u'iə', u'iề' : u'iə', u'iể' : u'iə', u'iễ' : u'iə', u'iệ' : u'iə',
u'oo' : u'ɔ', u'óo' : u'ɔ', u'òo' : u'ɔ', u'ỏo' : u'ɔ', u'õo' : u'ɔ', u'ọo' : u'ɔ',
u'oo' : u'ɔ', u'oó' : u'ɔ', u'oò' : u'ɔ', u'oỏ' : u'ɔ', u'oõ' : u'ɔ', u'oọ' : u'ɔ',
u'ôô' : u'o', u'ốô' : u'o', u'ồô' : u'o', u'ổô' : u'o', u'ỗô' : u'o', u'ộô' : u'o',
u'ôô' : u'o', u'ôố' : u'o', u'ôồ' : u'o', u'ôổ' : u'o', u'ôỗ' : u'o', u'ôộ' : u'o',
u'ua' : u'uə', u'úa' : u'uə', u'ùa' : u'uə', u'ủa' : u'uə', u'ũa' : u'uə', u'ụa' : u'uə',
u'uô' : u'uə', u'uố' : u'uə', u'uồ' : u'uə', u'uổ' : u'uə', u'uỗ' : u'uə', u'uộ' : u'uə',
u'ưa' : u'ɯə', u'ứa' : u'ɯə', u'ừa' : u'ɯə', u'ửa' : u'ɯə', u'ữa' : u'ɯə', u'ựa' : u'ɯə',
u'ươ' : u'ɯə', u'ướ' : u'ɯə', u'ườ' : u'ɯə', u'ưở' : u'ɯə', u'ưỡ' : u'ɯə', u'ượ' : u'ɯə',
u'yê' : u'iɛ', u'yế' : u'iɛ', u'yề' : u'iɛ', u'yể' : u'iɛ', u'yễ' : u'iɛ', u'yệ' : u'iɛ',
u'uơ' : u'uə', u'uở' : u'uə', u'uờ': u'uə', u'uở' : u'uə', u'uỡ' : u'uə', u'uợ' : u'uə',
}
Cus_offglides = { u'ai' : u'aj', u'ái' : u'aj', u'ài' : u'aj', u'ải' : u'aj', u'ãi' : u'aj', u'ại' : u'aj',
u'ay' : u'ăj', u'áy' : u'ăj', u'ày' : u'ăj', u'ảy' : u'ăj', u'ãy' : u'ăj', u'ạy' : u'ăj',
u'ao' : u'aw', u'áo' : u'aw', u'ào' : u'aw', u'ảo' : u'aw', u'ão' : u'aw', u'ạo' : u'aw',
u'au' : u'ăw', u'áu' : u'ăw', u'àu' : u'ăw', u'ảu' : u'ăw', u'ãu' : u'ăw', u'ạu' : u'ăw',
u'ây' : u'ɤ̆j', u'ấy' : u'ɤ̆j', u'ầy' : u'ɤ̆j', u'ẩy' : u'ɤ̆j', u'ẫy' : u'ɤ̆j', u'ậy' : u'ɤ̆j',
u'âu' : u'ɤ̆w', u'ấu' : u'ɤ̆w', u'ầu': u'ɤ̆w', u'ẩu' : u'ɤ̆w', u'ẫu' : u'ɤ̆w', u'ậu' : u'ɤ̆w',
u'eo' : u'ew', u'éo' : u'ew', u'èo' : u'ew', u'ẻo' : u'ew', u'ẽo' : u'ew', u'ẹo' : u'ew',
u'iu' : u'iw', u'íu' : u'iw', u'ìu' : u'iw', u'ỉu' : u'iw', u'ĩu' : u'iw', u'ịu' : u'iw',
u'oi' : u'ɔj', u'ói' : u'ɔj', u'òi' : u'ɔj', u'ỏi' : u'ɔj', u'õi' : u'ɔj', u'ọi' : u'ɔj',
u'ôi' : u'oj', u'ối' : u'oj', u'ồi' : u'oj', u'ổi' : u'oj', u'ỗi' : u'oj', u'ội' : u'oj',
u'ui' : u'uj', u'úi' : u'uj', u'ùi' : u'uj', u'ủi' : u'uj', u'ũi' : u'uj', u'ụi' : u'uj',
#u'uy' : u'uj', u'úy' : u'uj', u'ùy' : u'uj', u'ủy' : u'uj', u'ũy' : u'uj', u'ụy' : u'uj',
u'uy' : u'ʷi', u'úy' : u'uj', u'ùy' : u'uj', u'ủy' : u'uj', u'ũy' : u'uj', u'ụy' : u'uj',
#thay để hạn chế trùng âm
u'uy' : u'ʷi', u'uý' : u'ʷi', u'uỳ' : u'ʷi', u'uỷ' : u'ʷi', u'uỹ' : u'ʷi', u'uỵ' : u'ʷi',
u'ơi' : u'ɤj', u'ới' : u'ɤj', u'ời' : u'ɤj', u'ởi' : u'ɤj', u'ỡi' : u'ɤj', u'ợi' : u'ɤj',
u'ưi' : u'ɯj', u'ứi' : u'ɯj', u'ừi' : u'ɯj', u'ửi' : u'ɯj', u'ữi' : u'ɯj', u'ựi' : u'ɯj',
u'ưu' : u'ɯw', u'ứu' : u'ɯw', u'ừu' : u'ɯw', u'ửu' : u'ɯw', u'ữu' : u'ɯw', u'ựu' : u'ɯw',
u'iêu' : u'iəw', u'iếu' : u'iəw', u'iều' : u'iəw', u'iểu' : u'iəw', u'iễu' : u'iəw', u'iệu' : u'iəw',
u'yêu' : u'iəw', u'yếu' : u'iəw', u'yều' : u'iəw', u'yểu' : u'iəw', u'yễu' : u'iəw', u'yệu' : u'iəw',
u'uôi' : u'uəj', u'uối' : u'uəj', u'uồi' : u'uəj', u'uổi' : u'uəj', u'uỗi' : u'uəj', u'uội' : u'uəj',
u'ươi' : u'ɯəj', u'ưới' : u'ɯəj', u'ười' : u'ɯəj', u'ưởi' : u'ɯəj', u'ưỡi' : u'ɯəj', u'ượi' : u'ɯəj',
u'ươu' : u'ɯəw', u'ướu' : u'ɯəw', u'ườu' : u'ɯəw', u'ưởu' : u'ɯəw', 'ưỡu' : u'ɯəw', u'ượu' : u'ɯəw'
}
#Các âm vòng ở đây i chang không vòm: không có w ở trước => Try to add ʷ
Cus_onglides = { u'oa' : u'ʷa', u'oá' : u'ʷa', u'oà' : u'ʷa', u'oả' : u'ʷa', u'oã' : u'ʷa', u'oạ' : u'ʷa',
u'óa' : u'ʷa', u'òa' : u'ʷa', u'ỏa' : u'ʷa', u'õa' : u'ʷa', u'ọa' : u'ʷa',
u'oă' : u'ʷă', u'oắ' : u'ʷă', u'oằ' : u'ʷă', u'oẳ' : u'ʷă', u'oẵ' : u'ʷă', u'oặ' : u'ʷă',
u'oe' : u'ʷɛ', u'oé' : u'ʷɛ', u'oè' : u'ʷɛ', u'oẻ' : u'ʷɛ', u'oẽ' : u'ʷɛ', u'oẹ' : u'ʷɛ',
u'oe' : u'ʷɛ', u'óe' : u'ʷɛ', u'òe' : u'ʷɛ', u'ỏe' : u'ʷɛ', u'õe' : u'ʷɛ', u'ọe' : u'ʷɛ',
u'ua' : u'ʷa', u'uá' : u'ʷa', u'uà' : u'ʷa', u'uả' : u'ʷa', u'uã' : u'ʷa', u'uạ' : u'ʷa',
u'uă' : u'ʷă', u'uắ' : u'ʷă', u'uằ' : u'ʷă', u'uẳ' : u'ʷă', u'uẵ' : u'ʷă', u'uặ' : u'ʷă',
u'uâ' : u'ʷɤ̆', u'uấ' : u'ʷɤ̆', u'uầ' : u'ʷɤ̆', u'uẩ' : u'ʷɤ̆', u'uẫ' : u'ʷɤ̆', u'uậ' : u'ʷɤ̆',
u'ue' : u'ʷɛ', u'ué' : u'ʷɛ', u'uè' : u'ʷɛ', u'uẻ' : u'ʷɛ', u'uẽ' : u'ʷɛ', u'uẹ' : u'ʷɛ',
u'uê' : u'ʷe', u'uế' : u'ʷe', u'uề' : u'ʷe', u'uể' : u'ʷe', u'uễ' : u'ʷe', u'uệ' : u'ʷe',
u'uơ' : u'ʷɤ', u'uớ' : u'ʷɤ', u'uờ' : u'ʷɤ', u'uở' : u'ʷɤ', u'uỡ' : u'ʷɤ', u'uợ' : u'ʷɤ',
u'uy' : u'ʷi', u'uý' : u'ʷi', u'uỳ' : u'ʷi', u'uỷ' : u'ʷi', u'uỹ' : u'ʷi', u'uỵ' : u'ʷi',
u'uya' : u'ʷiə', u'uyá' : u'ʷiə', u'uyà' : u'ʷiə', u'uyả' : u'ʷiə', u'uyã' : u'ʷiə', u'uyạ' : u'ʷiə',
u'uyê' : u'ʷiə', u'uyế' : u'ʷiə', u'uyề' : u'ʷiə', u'uyể' : u'ʷiə', u'uyễ' : u'ʷiə', u'uyệ' : u'ʷiə',
u'uyu' : u'ʷiu', u'uyú' : u'ʷiu', u'uyù' : u'ʷiu', u'uyủ' : u'ʷiu', u'uyũ' : u'ʷiu', u'uyụ' : u'ʷiu',
u'uyu' : u'ʷiu', u'uýu' : u'ʷiu', u'uỳu' : u'ʷiu', u'uỷu' : u'ʷiu', u'uỹu' : u'ʷiu', u'uỵu' : u'ʷiu',
u'oen' : u'ʷen', u'oén' : u'ʷen', u'oèn' : u'ʷen', u'oẻn' : u'ʷen', u'oẽn' : u'ʷen', u'oẹn' : u'ʷen',
u'oet' : u'ʷet', u'oét' : u'ʷet', u'oèt' : u'ʷet', u'oẻt' : u'ʷet', u'oẽt' : u'ʷet', u'oẹt' : u'ʷet'
}
Cus_onoffglides = { u'oe' : u'ɛj', u'oé' : u'ɛj', u'oè' : u'ɛj', u'oẻ' : u'ɛj', u'oẽ' : u'ɛj', u'oẹ' : u'ɛj',
u'oai' : u'aj', u'oái' : u'aj', u'oài' : u'aj', u'oải' : u'aj', u'oãi' : u'aj', u'oại' : u'aj',
u'oay' : u'ăj', u'oáy' : u'ăj', u'oày' : u'ăj', u'oảy' : u'ăj', u'oãy' : u'ăj', u'oạy' : u'ăj',
u'oao' : u'aw', u'oáo' : u'aw', u'oào' : u'aw', u'oảo' : u'aw', u'oão' : u'aw', u'oạo' : u'aw',
u'oeo' : u'ew', u'oéo' : u'ew', u'oèo' : u'ew', u'oẻo' : u'ew', u'oẽo' : u'ew', u'oẹo' : u'ew',
u'oeo' : u'ew', u'óeo' : u'ew', u'òeo' : u'ew', u'ỏeo' : u'ew', u'õeo' : u'ew', u'ọeo' : u'ew',
u'ueo' : u'ew', u'uéo' : u'ew', u'uèo' : u'ew', u'uẻo' : u'ew', u'uẽo' : u'ew', u'uẹo' : u'ew',
u'uai' : u'aj', u'uái' : u'aj', u'uài' : u'aj', u'uải' : u'aj', u'uãi' : u'aj', u'uại' : u'aj',
u'uay' : u'ăj', u'uáy' : u'ăj', u'uày' : u'ăj', u'uảy' : u'ăj', u'uãy' : u'ăj', u'uạy' : u'ăj',
u'uây' : u'ɤ̆j', u'uấy' : u'ɤ̆j', u'uầy' : u'ɤ̆j', u'uẩy' : u'ɤ̆j', u'uẫy' : u'ɤ̆j', u'uậy' : u'ɤ̆j'
}
Cus_codas = { u'p' : u'p', u't' : u't', u'c' : u'k', u'm' : u'm', u'n' : u'n', u'ng' : u'ŋ', u'nh' : u'ɲ', u'ch' : u'tʃ' }
Cus_tones_p = { u'á' : 5, u'à' : 2, u'ả' : 4, u'ã' : 3, u'ạ' : 6,
u'ấ' : 5, u'ầ' : 2, u'ẩ' : 4, u'ẫ' : 3, u'ậ' : 6,
u'ắ' : 5, u'ằ' : 2, u'ẳ' : 4, u'ẵ' : 3, u'ặ' : 6,
u'é' : 5, u'è' : 2, u'ẻ' : 4, u'ẽ' : 3, u'ẹ' : 6,
u'ế' : 5, u'ề' : 2, u'ể' : 4, u'ễ' : 3, u'ệ' : 6,
u'í' : 5, u'ì' : 2, u'ỉ' : 4, u'ĩ' : 3, u'ị' : 6,
u'ó' : 5, u'ò' : 2, u'ỏ' : 4, u'õ' : 3, u'ọ' : 6,
u'ố' : 5, u'ồ' : 2, u'ổ' : 4, u'ỗ' : 3, u'ộ' : 6,
u'ớ' : 5, u'ờ' : 2, u'ở' : 4, u'ỡ' : 3, u'ợ' : 6,
u'ú' : 5, u'ù' : 2, u'ủ' : 4, u'ũ' : 3, u'ụ' : 6,
u'ứ' : 5, u'ừ' : 2, u'ử' : 4, u'ữ' : 3, u'ự' : 6,
u'ý' : 5, u'ỳ' : 2, u'ỷ' : 4, u'ỹ' : 3, u'ỵ' : 6,
}
Cus_gi = { u'gi' : u'zi', u'gí': u'zi', u'gì' : u'zi', u'gì' : u'zi', u'gĩ' : u'zi', u'gị' : u'zi'}
Cus_qu = {u'quy' : u'kwi', u'qúy' : u'kwi', u'qùy' : u'kwi', u'qủy' : u'kwi', u'qũy' : u'kwi', u'qụy' : u'kwi'}
#######################################################
# North
# #coding: utf-8
N_onsets = { u'b' : u'b', u't' : u't', u'th' : u'tʰ', u'đ' : u'd', u'ch' : u'c',
u'kh' : u'x', u'g' : u'ɣ', u'l' : u'l', u'm' : u'm', u'n': u'n',
u'ngh': u'ŋ', u'nh' : u'ɲ', u'ng' : u'ŋ', u'ph' : u'f', u'v' : u'v',
u'x' : u's', u'd' : u'z', u'h' : u'h', u'p' : u'p', u'qu' : u'kw',
u'gi' : u'z', u'tr' : u'c', u'k' : u'k', u'c' : u'k', u'gh' : u'ɣ',
u'r' : u'z', u's' : u's', u'gi': u'z'}
N_nuclei = { u'a' : u'a', u'á' : u'a', u'à' : u'a', u'ả' : u'a', u'ã' : u'a', u'ạ' : u'a',
u'â' : u'ɤ̆', u'ấ' : u'ɤ̆', u'ầ' : u'ɤ̆', u'ẩ' : u'ɤ̆', u'ẫ' : u'ɤ̆', u'ậ' : u'ɤ̆',
u'ă' : u'ă', u'ắ' : u'ă', u'ằ' : u'ă', u'ẳ' : u'ă', u'ẵ' : u'ă', u'ặ' : u'ă',
u'e' : u'ɛ', u'é' : u'ɛ', u'è' : u'ɛ', u'ẻ' : u'ɛ', u'ẽ' : u'ɛ', u'ẹ' : u'ɛ',
u'ê' : u'e', u'ế' : u'e', u'ề' : u'e', u'ể' : u'e', u'ễ' : u'e', u'ệ' : u'e',
u'i' : u'i', u'í' : u'i', u'ì' : u'i', u'ỉ' : u'i', u'ĩ' : u'i', u'ị' : u'i',
u'o' : u'ɔ', u'ó' : u'ɔ', u'ò' : u'ɔ', u'ỏ' : u'ɔ', u'õ' : u'ɔ', u'ọ' : u'ɔ',
u'ô' : u'o', u'ố' : u'o', u'ồ' : u'o', u'ổ' : u'o', u'ỗ' : u'o', u'ộ' : u'o',
u'ơ' : u'ɤ', u'ớ' : u'ɤ', u'ờ' : u'ɤ', u'ở' : u'ɤ', u'ỡ' : u'ɤ', u'ợ' : u'ɤ',
u'u' : u'u', u'ú' : u'u', u'ù' : u'u', u'ủ' : u'u', u'ũ' : u'u', u'ụ' : u'u',
u'ư' : u'ɯ', u'ứ' : u'ɯ', u'ừ' : u'ɯ', u'ử' : u'ɯ', u'ữ' : u'ɯ', u'ự' : u'ɯ',
u'y' : u'i', u'ý' : u'i', u'ỳ' : u'i', u'ỷ' : u'i', u'ỹ' : u'i', u'ỵ' : u'i',
u'eo' : u'eo', u'éo' : u'eo', u'èo' : u'eo', u'ẻo' : u'eo', u'ẽo': u'eo', u'ẹo' : u'eo',
u'êu' : u'ɛu', u'ếu' : u'ɛu', u'ều' : u'ɛu', u'ểu' : u'ɛu', u'ễu': u'ɛu', u'ệu' : u'ɛu',
u'ia' : u'iə', u'ía' : u'iə', u'ìa' : u'iə', u'ỉa' : u'iə', u'ĩa' : u'iə', u'ịa' : u'iə',
u'ia' : u'iə', u'iá' : u'iə', u'ià' : u'iə', u'iả' : u'iə', u'iã' : u'iə', u'iạ' : u'iə',
u'iê' : u'iə', u'iế' : u'iə', u'iề' : u'iə', u'iể' : u'iə', u'iễ' : u'iə', u'iệ' : u'iə',
u'oo' : u'ɔ', u'óo' : u'ɔ', u'òo' : u'ɔ', u'ỏo' : u'ɔ', u'õo' : u'ɔ', u'ọo' : u'ɔ',
u'oo' : u'ɔ', u'oó' : u'ɔ', u'oò' : u'ɔ', u'oỏ' : u'ɔ', u'oõ' : u'ɔ', u'oọ' : u'ɔ',
u'ôô' : u'o', u'ốô' : u'o', u'ồô' : u'o', u'ổô' : u'o', u'ỗô' : u'o', u'ộô' : u'o',
u'ôô' : u'o', u'ôố' : u'o', u'ôồ' : u'o', u'ôổ' : u'o', u'ôỗ' : u'o', u'ôộ' : u'o',
u'ua' : u'uə', u'úa' : u'uə', u'ùa' : u'uə', u'ủa' : u'uə', u'ũa' : u'uə', u'ụa' : u'uə',
u'uô' : u'uə', u'uố' : u'uə', u'uồ' : u'uə', u'uổ' : u'uə', u'uỗ' : u'uə', u'uộ' : u'uə',
u'ưa' : u'ɯə', u'ứa' : u'ɯə', u'ừa' : u'ɯə', u'ửa' : u'ɯə', u'ữa' : u'ɯə', u'ựa' : u'ɯə',
u'ươ' : u'ɯə', u'ướ' : u'ɯə', u'ườ' : u'ɯə', u'ưở' : u'ɯə', u'ưỡ' : u'ɯə', u'ượ' : u'ɯə',
u'yê' : u'iɛ', u'yế' : u'iɛ', u'yề' : u'iɛ', u'yể' : u'iɛ', u'yễ' : u'iɛ', u'yệ' : u'iɛ',
u'uơ' : u'uə', u'uở' : u'uə', u'uờ': u'uə', u'uở' : u'uə', u'uỡ' : u'uə', u'uợ' : u'uə',
}
N_offglides = { u'ai' : u'aj', u'ái' : u'aj', u'ài' : u'aj', u'ải' : u'aj', u'ãi' : u'aj', u'ại' : u'aj',
u'ay' : u'ăj', u'áy' : u'ăj', u'ày' : u'ăj', u'ảy' : u'ăj', u'ãy' : u'ăj', u'ạy' : u'ăj',
u'ao' : u'aw', u'áo' : u'aw', u'ào' : u'aw', u'ảo' : u'aw', u'ão' : u'aw', u'ạo' : u'aw',
u'au' : u'ăw', u'áu' : u'ăw', u'àu' : u'ăw', u'ảu' : u'ăw', u'ãu' : u'ăw', u'ạu' : u'ăw',
u'ây' : u'ɤ̆j', u'ấy' : u'ɤ̆j', u'ầy' : u'ɤ̆j', u'ẩy' : u'ɤ̆j', u'ẫy' : u'ɤ̆j', u'ậy' : u'ɤ̆j',
u'âu' : u'ɤ̆w', u'ấu' : u'ɤ̆w', u'ầu': u'ɤ̆w', u'ẩu' : u'ɤ̆w', u'ẫu' : u'ɤ̆w', u'ậu' : u'ɤ̆w',
u'eo' : u'ew', u'éo' : u'ew', u'èo' : u'ew', u'ẻo' : u'ew', u'ẽo' : u'ew', u'ẹo' : u'ew',
u'iu' : u'iw', u'íu' : u'iw', u'ìu' : u'iw', u'ỉu' : u'iw', u'ĩu' : u'iw', u'ịu' : u'iw',
u'oi' : u'ɔj', u'ói' : u'ɔj', u'òi' : u'ɔj', u'ỏi' : u'ɔj', u'õi' : u'ɔj', u'ọi' : u'ɔj',
u'ôi' : u'oj', u'ối' : u'oj', u'ồi' : u'oj', u'ổi' : u'oj', u'ỗi' : u'oj', u'ội' : u'oj',
u'ui' : u'uj', u'úi' : u'uj', u'ùi' : u'uj', u'ủi' : u'uj', u'ũi' : u'uj', u'ụi' : u'uj',
u'uy' : u'uj', u'úy' : u'uj', u'ùy' : u'uj', u'ủy' : u'uj', u'ũy' : u'uj', u'ụy' : u'uj',
u'ơi' : u'ɤj', u'ới' : u'ɤj', u'ời' : u'ɤj', u'ởi' : u'ɤj', u'ỡi' : u'ɤj', u'ợi' : u'ɤj',
u'ưi' : u'ɯj', u'ứi' : u'ɯj', u'ừi' : u'ɯj', u'ửi' : u'ɯj', u'ữi' : u'ɯj', u'ựi' : u'ɯj',
u'ưu' : u'ɯw', u'ứu' : u'ɯw', u'ừu' : u'ɯw', u'ửu' : u'ɯw', u'ữu' : u'ɯw', u'ựu' : u'ɯw',
u'iêu' : u'iəw', u'iếu' : u'iəw', u'iều' : u'iəw', u'iểu' : u'iəw', u'iễu' : u'iəw', u'iệu' : u'iəw',
u'yêu' : u'iəw', u'yếu' : u'iəw', u'yều' : u'iəw', u'yểu' : u'iəw', u'yễu' : u'iəw', u'yệu' : u'iəw',
u'uôi' : u'uəj', u'uối' : u'uəj', u'uồi' : u'uəj', u'uổi' : u'uəj', u'uỗi' : u'uəj', u'uội' : u'uəj',
u'ươi' : u'ɯəj', u'ưới' : u'ɯəj', u'ười' : u'ɯəj', u'ưởi' : u'ɯəj', u'ưỡi' : u'ɯəj', u'ượi' : u'ɯəj',
u'ươu' : u'ɯəw', u'ướu' : u'ɯəw', u'ườu' : u'ɯəw', u'ưởu' : u'ɯəw', 'ưỡu' : u'ɯəw', u'ượu' : u'ɯəw'
}
N_onglides = { u'oa' : u'a', u'oá' : u'a', u'oà' : u'a', u'oả' : u'a', u'oã' : u'a', u'oạ' : u'a',
u'óa' : u'a', u'òa' : u'a', u'ỏa' : u'a', u'õa' : u'a', u'ọa' : u'a',
u'oă' : u'ă', u'oắ' : u'ă', u'oằ' : u'ă', u'oẳ' : u'ă', u'oẵ' : u'ă', u'oặ' : u'ă',
u'oe' : u'e', u'oé' : u'e', u'oè' : u'e', u'oẻ' : u'e', u'oẽ' : u'e', u'oẹ' : u'e',
u'oe' : u'e', u'óe' : u'e', u'òe' : u'e', u'ỏe' : u'e', u'õe' : u'e', u'ọe' : u'e',
u'ua' : u'a', u'uá' : u'a', u'uà' : u'a', u'uả' : u'a', u'uã' : u'a', u'uạ' : u'a',
u'uă' : u'ă', u'uắ' : u'ă', u'uằ' : u'ă', u'uẳ' : u'ă', u'uẵ' : u'ă', u'uặ' : u'ă',
u'uâ' : u'ɤ̆', u'uấ' : u'ɤ̆', u'uầ' : u'ɤ̆', u'uẩ' : u'ɤ̆', u'uẫ' : u'ɤ̆', u'uậ' : u'ɤ̆',
u'ue' : u'ɛ', u'ué' : u'ɛ', u'uè' : u'ɛ', u'uẻ' : u'ɛ', u'uẽ' : u'ɛ', u'uẹ' : u'ɛ',
u'uê' : u'e', u'uế' : u'e', u'uề' : u'e', u'uể' : u'e', u'uễ' : u'e', u'uệ' : u'e',
u'uơ' : u'ɤ', u'uớ' : u'ɤ', u'uờ' : u'ɤ', u'uở' : u'ɤ', u'uỡ' : u'ɤ', u'uợ' : u'ɤ',
u'uy' : u'i', u'uý' : u'i', u'uỳ' : u'i', u'uỷ' : u'i', u'uỹ' : u'i', u'uỵ' : u'i',
u'uya' : u'iə', u'uyá' : u'iə', u'uyà' : u'iə', u'uyả' : u'iə', u'uyã' : u'iə', u'uyạ' : u'iə',
u'uyê' : u'iə', u'uyế' : u'iə', u'uyề' : u'iə', u'uyể' : u'iə', u'uyễ' : u'iə', u'uyệ' : u'iə',
u'uyu' : u'iu', u'uyú' : u'iu', u'uyù' : u'iu', u'uyủ' : u'iu', u'uyũ' : u'iu', u'uyụ' : u'iu',
u'uyu' : u'iu', u'uýu' : u'iu', u'uỳu' : u'iu', u'uỷu' : u'iu', u'uỹu' : u'iu', u'uỵu' : u'iu',
u'oen' : u'en', u'oén' : u'en', u'oèn' : u'en', u'oẻn' : u'en', u'oẽn' : u'en', u'oẹn' : u'en',
u'oet' : u'et', u'oét' : u'et', u'oèt' : u'et', u'oẻt' : u'et', u'oẽt' : u'et', u'oẹt' : u'et'
}
N_onoffglides = { u'oe' : u'ej', u'oé' : u'ej', u'oè' : u'ej', u'oẻ' : u'ej', u'oẽ' : u'ej', u'oẹ' : u'ej',
u'oai' : u'aj', u'oái' : u'aj', u'oài' : u'aj', u'oải' : u'aj', u'oãi' : u'aj', u'oại' : u'aj',
u'oay' : u'ăj', u'oáy' : u'ăj', u'oày' : u'ăj', u'oảy' : u'ăj', u'oãy' : u'ăj', u'oạy' : u'ăj',
u'oao' : u'aw', u'oáo' : u'aw', u'oào' : u'aw', u'oảo' : u'aw', u'oão' : u'aw', u'oạo' : u'aw',
u'oeo' : u'ew', u'oéo' : u'ew', u'oèo' : u'ew', u'oẻo' : u'ew', u'oẽo' : u'ew', u'oẹo' : u'ew',
u'oeo' : u'ew', u'óeo' : u'ew', u'òeo' : u'ew', u'ỏeo' : u'ew', u'õeo' : u'ew', u'ọeo' : u'ew',
u'ueo' : u'ew', u'uéo' : u'ew', u'uèo' : u'ew', u'uẻo' : u'ew', u'uẽo' : u'ew', u'uẹo' : u'ew',
u'uai' : u'aj', u'uái' : u'aj', u'uài' : u'aj', u'uải' : u'aj', u'uãi' : u'aj', u'uại' : u'aj',
u'uay' : u'ăj', u'uáy' : u'ăj', u'uày' : u'ăj', u'uảy' : u'ăj', u'uãy' : u'ăj', u'uạy' : u'ăj',
u'uây' : u'ɤ̆j', u'uấy' : u'ɤ̆j', u'uầy' : u'ɤ̆j', u'uẩy' : u'ɤ̆j', u'uẫy' : u'ɤ̆j', u'uậy' : u'ɤ̆j'
}
N_codas = { u'p' : u'p', u't' : u't', u'c' : u'k', u'm' : u'm', u'n' : u'n', u'ng' : u'ŋ', u'nh' : u'ɲ', u'ch' : u'k' }
#tones = { u'a' : 33, u'á' : 24, u'à' : 32, u'ả' : 312, u'ã' : u'35g', u'ạ' : u'21g',
# u'â' : 33, u'ấ' : 24, u'ầ' : 32, u'ẩ' : 312, u'ẫ' : u'35g', u'ậ' : u'21g',
# u'ă' : 33, u'ắ' : 24, u'ằ' : 32, u'ẳ' : 312, u'ẵ' : u'35g', u'ặ' : u'21g',
# u'e' : 33, u'é' : 24, u'è' : 32, u'ẻ' : 312, u'ẽ' : u'35g', u'ẹ' : u'21g',
# u'ê' : 33, u'ế' : 24, u'ề' : 32, u'ể' : 312, u'ễ' : u'35g', u'ệ' : u'21g',
# u'i' : 33, u'í' : 24, u'ì' : 32, u'ỉ' : 312, u'ĩ' : u'35g', u'ị' : u'21g',
# u'o' : 33, u'ó' : 24, u'ò' : 32, u'ỏ' : 312, u'õ' : u'35g', u'ọ' : u'21g',
# u'ô' : 33, u'ố' : 24, u'ồ' : 32, u'ổ' : 312, u'ỗ' : u'35g', u'ộ' : u'21g',
# u'ơ' : 33, u'ớ' : 24, u'ờ' : 32, u'ở' : 312, u'ỡ' : u'35g', u'ợ' : u'21g',
# u'u' : 33, u'ú' : 24, u'ù' : 32, u'ủ' : 312, u'ũ' : u'35g', u'ụ' : u'21g',
# u'ư' : 33, u'ứ' : 24, u'ừ' : 32, u'ử' : 312, u'ữ' : u'35g', u'ự' : u'21g',
# u'y' : 33, u'ý' : 24, u'ỳ' : 32, u'ỷ' : 312, u'ỹ' : u'35g', u'ỵ' : u'21g',
# }
N_tones = { u'á' : 24, u'à' : 32, u'ả' : 312, u'ã' : u'35g', u'ạ' : u'21g',
u'ấ' : 24, u'ầ' : 32, u'ẩ' : 312, u'ẫ' : u'35g', u'ậ' : u'21g',
u'ắ' : 24, u'ằ' : 32, u'ẳ' : 312, u'ẵ' : u'35g', u'ặ' : u'21g',
u'é' : 24, u'è' : 32, u'ẻ' : 312, u'ẽ' : u'35g', u'ẹ' : u'21g',
u'ế' : 24, u'ề' : 32, u'ể' : 312, u'ễ' : u'35g', u'ệ' : u'21g',
u'í' : 24, u'ì' : 32, u'ỉ' : 312, u'ĩ' : u'35g', u'ị' : u'21g',
u'ó' : 24, u'ò' : 32, u'ỏ' : 312, u'õ' : u'35g', u'ọ' : u'21g',
u'ố' : 24, u'ồ' : 32, u'ổ' : 312, u'ỗ' : u'35g', u'ộ' : u'21g',
u'ớ' : 24, u'ờ' : 32, u'ở' : 312, u'ỡ' : u'35g', u'ợ' : u'21g',
u'ú' : 24, u'ù' : 32, u'ủ' : 312, u'ũ' : u'35g', u'ụ' : u'21g',
u'ứ' : 24, u'ừ' : 32, u'ử' : 312, u'ữ' : u'35g', u'ự' : u'21g',
u'ý' : 24, u'ỳ' : 32, u'ỷ' : 312, u'ỹ' : u'35g', u'ỵ' : u'21g',
}
# used to use \u02C0 for the unicode raised glottal character
N_tones_p = { u'á' : 5, u'à' : 2, u'ả' : 4, u'ã' : 3, u'ạ' : 6,
u'ấ' : 5, u'ầ' : 2, u'ẩ' : 4, u'ẫ' : 3, u'ậ' : 6,
u'ắ' : 5, u'ằ' : 2, u'ẳ' : 4, u'ẵ' : 3, u'ặ' : 6,
u'é' : 5, u'è' : 2, u'ẻ' : 4, u'ẽ' : 3, u'ẹ' : 6,
u'ế' : 5, u'ề' : 2, u'ể' : 4, u'ễ' : 3, u'ệ' : 6,
u'í' : 5, u'ì' : 2, u'ỉ' : 4, u'ĩ' : 3, u'ị' : 6,
u'ó' : 5, u'ò' : 2, u'ỏ' : 4, u'õ' : 3, u'ọ' : 6,
u'ố' : 5, u'ồ' : 2, u'ổ' : 4, u'ỗ' : 3, u'ộ' : 6,
u'ớ' : 5, u'ờ' : 2, u'ở' : 4, u'ỡ' : 3, u'ợ' : 6,
u'ú' : 5, u'ù' : 2, u'ủ' : 4, u'ũ' : 3, u'ụ' : 6,
u'ứ' : 5, u'ừ' : 2, u'ử' : 4, u'ữ' : 3, u'ự' : 6,
u'ý' : 5, u'ỳ' : 2, u'ỷ' : 4, u'ỹ' : 3, u'ỵ' : 6,
}
N_gi = { u'gi' : u'zi', u'gí': u'zi', u'gì' : u'zi', u'gì' : u'zi', u'gĩ' : u'zi', u'gị' : u'zi'}
N_qu = {u'quy' : u'kwi', u'qúy' : u'kwi', u'qùy' : u'kwi', u'qủy' : u'kwi', u'qũy' : u'kwi', u'qụy' : u'kwi'}
#######################################################
#central.py
#coding: utf-8
C_onsets = { u'b' : u'b', u't' : u't', u'th' : u'tʰ', u'đ' : u'd', u'ch' : u'c',
u'kh' : u'x', u'g' : u'ɣ', u'l' : u'l', u'm' : u'm', u'n': u'n',
u'ngh': u'ŋ', u'nh' : u'ɲ', u'ng' : u'ŋ', u'ph' : u'f', u'v' : u'j',
u'x' : u's', u'd' : u'j', u'h' : u'h', u'p' : u'p', u'qu' : u'w',
u'gi' : u'j', u'tr' : u'ʈ', u'k' : u'k', u'c' : u'k', u'gh' : u'ɣ',
u'r' : u'ʐ', u's' : u'ʂ', u'gi' : u'j'
}
C_nuclei = { u'a' : u'a', u'á' : u'a', u'à' : u'a', u'ả' : u'a', u'ã' : u'a', u'ạ' : u'a',
u'â' : u'ɤ̆', u'ấ' : u'ɤ̆', u'ầ' : u'ɤ̆', u'ẩ' : u'ɤ̆', u'ẫ' : u'ɤ̆', u'ậ' : u'ɤ̆',
u'ă' : u'ă', u'ắ' : u'ă', u'ằ' : u'ă', u'ẳ' : u'ă', u'ẵ' : u'ă', u'ặ' : u'ă',
u'e' : u'ɛ', u'é' : u'ɛ', u'è' : u'ɛ', u'ẻ' : u'ɛ', u'ẽ' : u'ɛ', u'ẹ' : u'ɛ',
u'ê' : u'e', u'ế' : u'e', u'ề' : u'e', u'ể' : u'e', u'ễ' : u'e', u'ệ' : u'e',
u'i' : u'i', u'í' : u'i', u'ì' : u'i', u'ỉ' : u'i', u'ĩ' : u'i', u'ị' : u'i',
u'o' : u'ɔ', u'ó' : u'ɔ', u'ò' : u'ɔ', u'ỏ' : u'ɔ', u'õ' : u'ɔ', u'ọ' : u'ɔ',
u'ô' : u'o', u'ố' : u'o', u'ồ' : u'o', u'ổ' : u'o', u'ỗ' : u'o', u'ộ' : u'o',
u'ơ' : u'ɤ', u'ớ' : u'ɤ', u'ờ' : u'ɤ', u'ở' : u'ɤ', u'ỡ' : u'ɤ', u'ợ' : u'ɤ',
u'u' : u'u', u'ú' : u'u', u'ù' : u'u', u'ủ' : u'u', u'ũ' : u'u', u'ụ' : u'u',
u'ư' : u'ɯ', u'ứ' : u'ɯ', u'ừ' : u'ɯ', u'ử' : u'ɯ', u'ữ' : u'ɯ', u'ự' : u'ɯ',
u'y' : u'i', u'ý' : u'i', u'ỳ' : u'i', u'ỷ' : u'i', u'ỹ' : u'i', u'ỵ' : u'i',
u'eo' : u'eo', u'éo' : u'eo', u'èo' : u'eo', u'ẻo' : u'eo', u'ẽo': u'eo', u'ẹo' : u'eo',
u'êu' : u'ɛu', u'ếu' : u'ɛu', u'ều' : u'ɛu', u'ểu' : u'ɛu', u'ễu': u'ɛu', u'ệu' : u'ɛu',
u'ia' : u'iə', u'ía' : u'iə', u'ìa' : u'iə', u'ỉa' : u'iə', u'ĩa' : u'iə', u'ịa' : u'iə',
u'ia' : u'iə', u'iá' : u'iə', u'ià' : u'iə', u'iả' : u'iə', u'iã' : u'iə', u'iạ' : u'iə',
u'iê' : u'iə', u'iế' : u'iə', u'iề' : u'iə', u'iể' : u'iə', u'iễ' : u'iə', u'iệ' : u'iə',
u'oo' : u'ɔ', u'óo' : u'ɔ', u'òo' : u'ɔ', u'ỏo' : u'ɔ', u'õo' : u'ɔ', u'ọo' : u'ɔ',
u'oo' : u'ɔ', u'oó' : u'ɔ', u'oò' : u'ɔ', u'oỏ' : u'ɔ', u'oõ' : u'ɔ', u'oọ' : u'ɔ',
u'ôô' : u'o', u'ốô' : u'o', u'ồô' : u'o', u'ổô' : u'o', u'ỗô' : u'o', u'ộô' : u'o',
u'ôô' : u'o', u'ôố' : u'o', u'ôồ' : u'o', u'ôổ' : u'o', u'ôỗ' : u'o', u'ôộ' : u'o',
u'ua' : u'uə', u'úa' : u'uə', u'ùa' : u'uə', u'ủa' : u'uə', u'ũa' : u'uə', u'ụa' : u'uə',
u'uô' : u'uə', u'uố' : u'uə', u'uồ' : u'uə', u'uổ' : u'uə', u'uỗ' : u'uə', u'uộ' : u'uə',
u'ưa' : u'ɯə', u'ứa' : u'ɯə', u'ừa' : u'ɯə', u'ửa' : u'ɯə', u'ữa' : u'ɯə', u'ựa' : u'ɯə',
u'ươ' : u'ɯə', u'ướ' : u'ɯə', u'ườ' : u'ɯə', u'ưở' : u'ɯə', u'ưỡ' : u'ɯə', u'ượ' : u'ɯə',
u'yê' : u'iɛ', u'yế' : u'iɛ', u'yề' : u'iɛ', u'yể' : u'iɛ', u'yễ' : u'iɛ', u'yệ' : u'iɛ',
u'uơ' : u'uə', u'uở' : u'uə', u'uờ': u'uə', u'uở' : u'uə', u'uỡ' : u'uə', u'uợ' : u'uə',
}
C_offglides = { u'ai' : u'aj', u'ái' : u'aj', u'ài' : u'aj', u'ải' : u'aj', u'ãi' : u'aj', u'ại' : u'aj',
u'ay' : u'ăj', u'áy' : u'ăj', u'ày' : u'ăj', u'ảy' : u'ăj', u'ãy' : u'ăj', u'ạy' : u'ăj',
u'ao' : u'aw', u'áo' : u'aw', u'ào' : u'aw', u'ảo' : u'aw', u'ão' : u'aw', u'ạo' : u'aw',
u'au' : u'ăw', u'áu' : u'ăw', u'àu' : u'ăw', u'ảu' : u'ăw', u'ãu' : u'ăw', u'ạu' : u'ăw',
u'ây' : u'ɤ̆j', u'ấy' : u'ɤ̆j', u'ầy' : u'ɤ̆j', u'ẩy' : u'ɤ̆j', u'ẫy' : u'ɤ̆j', u'ậy' : u'ɤ̆j',
u'âu' : u'ɤ̆w', u'ấu' : u'ɤ̆w', u'ầu': u'ɤ̆w', u'ẩu' : u'ɤ̆w', u'ẫu' : u'ɤ̆w', u'ậu' : u'ɤ̆w',
u'eo' : u'ew', u'éo' : u'ew', u'èo' : u'ew', u'ẻo' : u'ew', u'ẽo' : u'ew', u'ẹo' : u'ew',
u'iu' : u'iw', u'íu' : u'iw', u'ìu' : u'iw', u'ỉu' : u'iw', u'ĩu' : u'iw', u'ịu' : u'iw',
u'oi' : u'ɔj', u'ói' : u'ɔj', u'òi' : u'ɔj', u'ỏi' : u'ɔj', u'õi' : u'ɔj', u'ọi' : u'ɔj',
u'ôi' : u'oj', u'ối' : u'oj', u'ồi' : u'oj', u'ổi' : u'oj', u'ỗi' : u'oj', u'ội' : u'oj',
u'ui' : u'uj', u'úi' : u'uj', u'ùi' : u'uj', u'ủi' : u'uj', u'ũi' : u'uj', u'ụi' : u'uj',
u'uy' : u'uj', u'úy' : u'uj', u'ùy' : u'uj', u'ủy' : u'uj', u'ũy' : u'uj', u'ụy' : u'uj',
u'ơi' : u'ɤj', u'ới' : u'ɤj', u'ời' : u'ɤj', u'ởi' : u'ɤj', u'ỡi' : u'ɤj', u'ợi' : u'ɤj',
u'ưi' : u'ɯj', u'ứi' : u'ɯj', u'ừi' : u'ɯj', u'ửi' : u'ɯj', u'ữi' : u'ɯj', u'ựi' : u'ɯj',
u'ưu' : u'ɯw', u'ứu' : u'ɯw', u'ừu' : u'ɯw', u'ửu' : u'ɯw', u'ữu' : u'ɯw', u'ựu' : u'ɯw',
u'iêu' : u'iəw', u'iếu' : u'iəw', u'iều' : u'iəw', u'iểu' : u'iəw', u'iễu' : u'iəw', u'iệu' : u'iəw',
u'yêu' : u'iəw', u'yếu' : u'iəw', u'yều' : u'iəw', u'yểu' : u'iəw', u'yễu' : u'iəw', u'yệu' : u'iəw',
u'uôi' : u'uəj', u'uối' : u'uəj', u'uồi' : u'uəj', u'uổi' : u'uəj', u'uỗi' : u'uəj', u'uội' : u'uəj',
u'ươi' : u'ɯəj', u'ưới' : u'ɯəj', u'ười' : u'ɯəj', u'ưởi' : u'ɯəj', u'ưỡi' : u'ɯəj', u'ượi' : u'ɯəj',
u'ươu' : u'ɯəw', u'ướu' : u'ɯəw', u'ườu' : u'ɯəw', u'ưởu' : u'ɯəw', 'ưỡu' : u'ɯəw', u'ượu' : u'ɯəw'
}
C_onglides = { u'oa' : u'a', u'oá' : u'a', u'oà' : u'a', u'oả' : u'a', u'oã' : u'a', u'oạ' : u'a',
u'óa' : u'a', u'òa' : u'a', u'ỏa' : u'a', u'õa' : u'a', u'ọa' : u'a',
u'oă' : u'ă', u'oắ' : u'ă', u'oằ' : u'ă', u'oẳ' : u'ă', u'oẵ' : u'ă', u'oặ' : u'ă',
u'oe' : u'e', u'oé' : u'e', u'oè' : u'e', u'oẻ' : u'e', u'oẽ' : u'e', u'oẹ' : u'e',
u'oe' : u'e', u'óe' : u'e', u'òe' : u'e', u'ỏe' : u'e', u'õe' : u'e', u'ọe' : u'e',
u'ua' : u'a', u'uá' : u'a', u'uà' : u'a', u'uả' : u'a', u'uã' : u'a', u'uạ' : u'a',
u'uă' : u'ă', u'uắ' : u'ă', u'uằ' : u'ă', u'uẳ' : u'ă', u'uẵ' : u'ă', u'uặ' : u'ă',
u'uâ' : u'ɤ̆', u'uấ' : u'ɤ̆', u'uầ' : u'ɤ̆', u'uẩ' : u'ɤ̆', u'uẫ' : u'ɤ̆', u'uậ' : u'ɤ̆',
u'ue' : u'ɛ', u'ué' : u'ɛ', u'uè' : u'ɛ', u'uẻ' : u'ɛ', u'uẽ' : u'ɛ', u'uẹ' : u'ɛ',
u'uê' : u'e', u'uế' : u'e', u'uề' : u'e', u'uể' : u'e', u'uễ' : u'e', u'uệ' : u'e',
u'uơ' : u'ɤ', u'uớ' : u'ɤ', u'uờ' : u'ɤ', u'uở' : u'ɤ', u'uỡ' : u'ɤ', u'uợ' : u'ɤ',
u'uy' : u'i', u'uý' : u'i', u'uỳ' : u'i', u'uỷ' : u'i', u'uỹ' : u'i', u'uỵ' : u'i',
u'uya' : u'iə', u'uyá' : u'iə', u'uyà' : u'iə', u'uyả' : u'iə', u'uyã' : u'iə', u'uyạ' : u'iə',
u'uyê' : u'iə', u'uyế' : u'iə', u'uyề' : u'iə', u'uyể' : u'iə', u'uyễ' : u'iə', u'uyệ' : u'iə',
u'uyu' : u'iu', u'uyú' : u'iu', u'uyù' : u'iu', u'uyủ' : u'iu', u'uyũ' : u'iu', u'uyụ' : u'iu',
u'uyu' : u'iu', u'uýu' : u'iu', u'uỳu' : u'iu', u'uỷu' : u'iu', u'uỹu' : u'iu', u'uỵu' : u'iu',
u'oen' : u'en', u'oén' : u'en', u'oèn' : u'en', u'oẻn' : u'en', u'oẽn' : u'en', u'oẹn' : u'en',
u'oet' : u'et', u'oét' : u'et', u'oèt' : u'et', u'oẻt' : u'et', u'oẽt' : u'et', u'oẹt' : u'et'
}
C_onoffglides = { u'oe' : u'ej', u'oé' : u'ej', u'oè' : u'ej', u'oẻ' : u'ej', u'oẽ' : u'ej', u'oẹ' : u'ej',
u'oai' : u'aj', u'oái' : u'aj', u'oài' : u'aj', u'oải' : u'aj', u'oãi' : u'aj', u'oại' : u'aj',
u'oay' : u'ăj', u'oáy' : u'ăj', u'oày' : u'ăj', u'oảy' : u'ăj', u'oãy' : u'ăj', u'oạy' : u'ăj',
u'oao' : u'aw', u'oáo' : u'aw', u'oào' : u'aw', u'oảo' : u'aw', u'oão' : u'aw', u'oạo' : u'aw',
u'oeo' : u'ew', u'oéo' : u'ew', u'oèo' : u'ew', u'oẻo' : u'ew', u'oẽo' : u'ew', u'oẹo' : u'ew',
u'oeo' : u'ew', u'óeo' : u'ew', u'òeo' : u'ew', u'ỏeo' : u'ew', u'õeo' : u'ew', u'ọeo' : u'ew',
u'ueo' : u'ew', u'uéo' : u'ew', u'uèo' : u'ew', u'uẻo' : u'ew', u'uẽo' : u'ew', u'uẹo' : u'ew',
u'uai' : u'aj', u'uái' : u'aj', u'uài' : u'aj', u'uải' : u'aj', u'uãi' : u'aj', u'uại' : u'aj',
u'uay' : u'ăj', u'uáy' : u'ăj', u'uày' : u'ăj', u'uảy' : u'ăj', u'uãy' : u'ăj', u'uạy' : u'ăj',
u'uây' : u'ɤ̆j', u'uấy' : u'ɤ̆j', u'uầy' : u'ɤ̆j', u'uẩy' : u'ɤ̆j', u'uẫy' : u'ɤ̆j', u'uậy' : u'ɤ̆j'
}
C_codas = { u'p' : u'p', u't' : u'k', u'c' : u'k', u'm' : u'm', u'n' : u'ŋ', u'ng' : u'ŋ', u'nh' : u'n', u'ch' : u'k' }
# See Alves 2007 (SEALS XII), Vũ 1982
C_tones = { u'á' : 13, u'à' : 42, u'ả' : 312, u'ã' : 312, u'ạ' : u'21g',
u'ấ' : 13, u'ầ' : 42, u'ẩ' : 312, u'ẫ' : 312, u'ậ' : u'21g',
u'ắ' : 13, u'ằ' : 42, u'ẳ' : 312, u'ẵ' : 312, u'ặ' : u'21g',
u'é' : 13, u'è' : 42, u'ẻ' : 312, u'ẽ' : 312, u'ẹ' : u'21g',
u'ế' : 13, u'ề' : 42, u'ể' : 312, u'ễ' : 312, u'ệ' : u'21g',
u'í' : 13, u'ì' : 42, u'ỉ' : 312, u'ĩ' : 312, u'ị' : u'21g',
u'ó' : 13, u'ò' : 42, u'ỏ' : 312, u'õ' : 312, u'ọ' : u'21g',
u'ố' : 13, u'ồ' : 42, u'ổ' : 312, u'ỗ' : 312, u'ộ' : u'21g',
u'ớ' : 13, u'ờ' : 42, u'ở' : 312, u'ỡ' : 312, u'ợ' : u'21g',
u'ú' : 13, u'ù' : 42, u'ủ' : 312, u'ũ' : 312, u'ụ' : u'21g',
u'ứ' : 13, u'ừ' : 42, u'ử' : 312, u'ữ' : 312, u'ự' : u'21g',
u'ý' : 13, u'ỳ' : 42, u'ỷ' : 312, u'ỹ' : 312, u'ỵ' : u'21g',
}
# used to use \u02C0 for raised glottal instead of g
C_tones_p = { u'á' : 5, u'à' : 2, u'ả' : 4, u'ã' : 4, u'ạ' : 6,
u'ấ' : 5, u'ầ' : 2, u'ẩ' : 4, u'ẫ' : 4, u'ậ' : 6,
u'ắ' : 5, u'ằ' : 2, u'ẳ' : 4, u'ẵ' : 4, u'ặ' : 6,
u'é' : 5, u'è' : 2, u'ẻ' : 4, u'ẽ' : 4, u'ẹ' : 6,
u'ế' : 5, u'ề' : 2, u'ể' : 4, u'ễ' : 4, u'ệ' : 6,
u'í' : 5, u'ì' : 2, u'ỉ' : 4, u'ĩ' : 4, u'ị' : 6,
u'ó' : 5, u'ò' : 2, u'ỏ' : 4, u'õ' : 4, u'ọ' : 6,
u'ố' : 5, u'ồ' : 2, u'ổ' : 4, u'ỗ' : 4, u'ộ' : 6,
u'ớ' : 5, u'ờ' : 2, u'ở' : 4, u'ỡ' : 4, u'ợ' : 6,
u'ú' : 5, u'ù' : 2, u'ủ' : 4, u'ũ' : 4, u'ụ' : 6,
u'ứ' : 5, u'ừ' : 2, u'ử' : 4, u'ữ' : 4, u'ự' : 6,
u'ý' : 5, u'ỳ' : 2, u'ỷ' : 4, u'ỹ' : 4, u'ỵ' : 6,
}
C_gi = { u'gi' : u'ji', u'gí': u'ji', u'gì' : u'ji', u'gì' : u'ji', u'gĩ' : u'ji', u'gị' : u'ji' }
C_qu = {u'quy' : u'wi', u'qúy' : u'wi', u'qùy' : u'wi', u'qủy' : u'wi', u'qũy' : u'wi', u'qụy' : u'wi'}
############################################
#south.py
#coding: utf-8
S_onsets = { u'b' : u'b', u't' : u't', u'th' : u'tʰ', u'đ' : u'd', u'ch' : u'c',
u'kh' : u'x', u'g' : u'ɣ', u'l' : u'l', u'm' : u'm', u'n': u'n',
u'ngh': u'ŋ', u'nh' : u'ɲ', u'ng' : u'ŋ', u'ph' : u'f', u'v' : u'j',
u'x' : u's', u'd' : u'j', u'h' : u'h', u'p' : u'p', u'qu' : u'w',
u'gi' : u'j', u'tr' : u'ʈ', u'k' : u'k', u'c' : u'k', u'gh' : u'ɣ',
u'r' : u'ʐ', u's' : u'ʂ', u'gi' : u'j'
}
S_nuclei = { u'a' : u'a', u'á' : u'a', u'à' : u'a', u'ả' : u'a', u'ã' : u'a', u'ạ' : u'a',
u'â' : u'ɤ̆', u'ấ' : u'ɤ̆', u'ầ' : u'ɤ̆', u'ẩ' : u'ɤ̆', u'ẫ' : u'ɤ̆', u'ậ' : u'ɤ̆',
u'ă' : u'ă', u'ắ' : u'ă', u'ằ' : u'ă', u'ẳ' : u'ă', u'ẵ' : u'ă', u'ặ' : u'ă',
u'e' : u'ɛ', u'é' : u'ɛ', u'è' : u'ɛ', u'ẻ' : u'ɛ', u'ẽ' : u'ɛ', u'ẹ' : u'ɛ',
u'ê' : u'e', u'ế' : u'e', u'ề' : u'e', u'ể' : u'e', u'ễ' : u'e', u'ệ' : u'e',
u'i' : u'i', u'í' : u'i', u'ì' : u'i', u'ỉ' : u'i', u'ĩ' : u'i', u'ị' : u'i',
u'o' : u'ɔ', u'ó' : u'ɔ', u'ò' : u'ɔ', u'ỏ' : u'ɔ', u'õ' : u'ɔ', u'ọ' : u'ɔ',
u'ô' : u'o', u'ố' : u'o', u'ồ' : u'o', u'ổ' : u'o', u'ỗ' : u'o', u'ộ' : u'o',
u'ơ' : u'ɤ', u'ớ' : u'ɤ', u'ờ' : u'ɤ', u'ở' : u'ɤ', u'ỡ' : u'ɤ', u'ợ' : u'ɤ',
u'u' : u'u', u'ú' : u'u', u'ù' : u'u', u'ủ' : u'u', u'ũ' : u'u', u'ụ' : u'u',
u'ư' : u'ɯ', u'ứ' : u'ɯ', u'ừ' : u'ɯ', u'ử' : u'ɯ', u'ữ' : u'ɯ', u'ự' : u'ɯ',
u'y' : u'i', u'ý' : u'i', u'ỳ' : u'i', u'ỷ' : u'i', u'ỹ' : u'i', u'ỵ' : u'i',
u'eo' : u'eo', u'éo' : u'eo', u'èo' : u'eo', u'ẻo' : u'eo', u'ẽo': u'eo', u'ẹo' : u'eo',
u'êu' : u'ɛu', u'ếu' : u'ɛu', u'ều' : u'ɛu', u'ểu' : u'ɛu', u'ễu': u'ɛu', u'ệu' : u'ɛu',
u'ia' : u'iə', u'ía' : u'iə', u'ìa' : u'iə', u'ỉa' : u'iə', u'ĩa' : u'iə', u'ịa' : u'iə',
u'ia' : u'iə', u'iá' : u'iə', u'ià' : u'iə', u'iả' : u'iə', u'iã' : u'iə', u'iạ' : u'iə',
u'iê' : u'iə', u'iế' : u'iə', u'iề' : u'iə', u'iể' : u'iə', u'iễ' : u'iə', u'iệ' : u'iə',
u'oo' : u'ɔ', u'óo' : u'ɔ', u'òo' : u'ɔ', u'ỏo' : u'ɔ', u'õo' : u'ɔ', u'ọo' : u'ɔ',
u'oo' : u'ɔ', u'oó' : u'ɔ', u'oò' : u'ɔ', u'oỏ' : u'ɔ', u'oõ' : u'ɔ', u'oọ' : u'ɔ',
u'ôô' : u'o', u'ốô' : u'o', u'ồô' : u'o', u'ổô' : u'o', u'ỗô' : u'o', u'ộô' : u'o', u'ôô' : u'o', u'ôố' : u'o', u'ôồ' : u'o', u'ôổ' : u'o', u'ôỗ' : u'o', u'ôộ' : u'o', u'ua' : u'uə', u'úa' : u'uə', u'ùa' : u'uə', u'ủa' : u'uə', u'ũa' : u'uə', u'ụa' : u'uə',
u'uô' : u'uə', u'uố' : u'uə', u'uồ' : u'uə', u'uổ' : u'uə', u'uỗ' : u'uə', u'uộ' : u'uə',
u'ưa' : u'ɯə', u'ứa' : u'ɯə', u'ừa' : u'ɯə', u'ửa' : u'ɯə', u'ữa' : u'ɯə', u'ựa' : u'ɯə',
u'ươ' : u'ɯə', u'ướ' : u'ɯə', u'ườ' : u'ɯə', u'ưở' : u'ɯə', u'ưỡ' : u'ɯə', u'ượ' : u'ɯə',
u'yê' : u'iɛ', u'yế' : u'iɛ', u'yề' : u'iɛ', u'yể' : u'iɛ', u'yễ' : u'iɛ', u'yệ' : u'iɛ',
u'uơ' : u'uə', u'uở' : u'uə', u'uờ': u'uə', u'uở' : u'uə', u'uỡ' : u'uə', u'uợ' : u'uə',
}
S_offglides = { u'ai' : u'aj', u'ái' : u'aj', u'ài' : u'aj', u'ải' : u'aj', u'ãi' : u'aj', u'ại' : u'aj',
u'ay' : u'ăj', u'áy' : u'ăj', u'ày' : u'ăj', u'ảy' : u'ăj', u'ãy' : u'ăj', u'ạy' : u'ăj',
u'ao' : u'aw', u'áo' : u'aw', u'ào' : u'aw', u'ảo' : u'aw', u'ão' : u'aw', u'ạo' : u'aw',
u'au' : u'ăw', u'áu' : u'ăw', u'àu' : u'ăw', u'ảu' : u'ăw', u'ãu' : u'ăw', u'ạu' : u'ăw',
u'ây' : u'ɤ̆j', u'ấy' : u'ɤ̆j', u'ầy' : u'ɤ̆j', u'ẩy' : u'ɤ̆j', u'ẫy' : u'ɤ̆j', u'ậy' : u'ɤ̆j',
u'âu' : u'ɤ̆w', u'ấu' : u'ɤ̆w', u'ầu': u'ɤ̆w', u'ẩu' : u'ɤ̆w', u'ẫu' : u'ɤ̆w', u'ậu' : u'ɤ̆w',
u'eo' : u'ew', u'éo' : u'ew', u'èo' : u'ew', u'ẻo' : u'ew', u'ẽo' : u'ew', u'ẹo' : u'ew',
u'iu' : u'iw', u'íu' : u'iw', u'ìu' : u'iw', u'ỉu' : u'iw', u'ĩu' : u'iw', u'ịu' : u'iw',
u'oi' : u'ɔj', u'ói' : u'ɔj', u'òi' : u'ɔj', u'ỏi' : u'ɔj', u'õi' : u'ɔj', u'ọi' : u'ɔj',
u'ôi' : u'oj', u'ối' : u'oj', u'ồi' : u'oj', u'ổi' : u'oj', u'ỗi' : u'oj', u'ội' : u'oj',
u'ui' : u'uj', u'úi' : u'uj', u'ùi' : u'uj', u'ủi' : u'uj', u'ũi' : u'uj', u'ụi' : u'uj',
u'uy' : u'uj', u'úy' : u'uj', u'ùy' : u'uj', u'ủy' : u'uj', u'ũy' : u'uj', u'ụy' : u'uj',
u'ơi' : u'ɤj', u'ới' : u'ɤj', u'ời' : u'ɤj', u'ởi' : u'ɤj', u'ỡi' : u'ɤj', u'ợi' : u'ɤj',
u'ưi' : u'ɯj', u'ứi' : u'ɯj', u'ừi' : u'ɯj', u'ửi' : u'ɯj', u'ữi' : u'ɯj', u'ựi' : u'ɯj',
u'ưu' : u'ɯw', u'ứu' : u'ɯw', u'ừu' : u'ɯw', u'ửu' : u'ɯw', u'ữu' : u'ɯw', u'ựu' : u'ɯw',
u'iêu' : u'iəw', u'iếu' : u'iəw', u'iều' : u'iəw', u'iểu' : u'iəw', u'iễu' : u'iəw', u'iệu' : u'iəw',
u'yêu' : u'iəw', u'yếu' : u'iəw', u'yều' : u'iəw', u'yểu' : u'iəw', u'yễu' : u'iəw', u'yệu' : u'iəw',
u'uôi' : u'uəj', u'uối' : u'uəj', u'uồi' : u'uəj', u'uổi' : u'uəj', u'uỗi' : u'uəj', u'uội' : u'uəj',
u'ươi' : u'ɯəj', u'ưới' : u'ɯəj', u'ười' : u'ɯəj', u'ưởi' : u'ɯəj', u'ưỡi' : u'ɯəj', u'ượi' : u'ɯəj',
u'ươu' : u'ɯəw', u'ướu' : u'ɯəw', u'ườu' : u'ɯəw', u'ưởu' : u'ɯəw', 'ưỡu' : u'ɯəw', u'ượu' : u'ɯəw'
}
S_onglides = { u'oa' : u'a', u'oá' : u'a', u'oà' : u'a', u'oả' : u'a', u'oã' : u'a', u'oạ' : u'a',
u'óa' : u'a', u'òa' : u'a', u'ỏa' : u'a', u'õa' : u'a', u'ọa' : u'a',
u'oă' : u'ă', u'oắ' : u'ă', u'oằ' : u'ă', u'oẳ' : u'ă', u'oẵ' : u'ă', u'oặ' : u'ă',
u'oe' : u'e', u'oé' : u'e', u'oè' : u'e', u'oẻ' : u'e', u'oẽ' : u'e', u'oẹ' : u'e',
u'oe' : u'e', u'óe' : u'e', u'òe' : u'e', u'ỏe' : u'e', u'õe' : u'e', u'ọe' : u'e',
u'ua' : u'a', u'uá' : u'a', u'uà' : u'a', u'uả' : u'a', u'uã' : u'a', u'uạ' : u'a',
u'uă' : u'ă', u'uắ' : u'ă', u'uằ' : u'ă', u'uẳ' : u'ă', u'uẵ' : u'ă', u'uặ' : u'ă',
u'uâ' : u'ɤ̆', u'uấ' : u'ɤ̆', u'uầ' : u'ɤ̆', u'uẩ' : u'ɤ̆', u'uẫ' : u'ɤ̆', u'uậ' : u'ɤ̆',
u'ue' : u'ɛ', u'ué' : u'ɛ', u'uè' : u'ɛ', u'uẻ' : u'ɛ', u'uẽ' : u'ɛ', u'uẹ' : u'ɛ',
u'uê' : u'e', u'uế' : u'e', u'uề' : u'e', u'uể' : u'e', u'uễ' : u'e', u'uệ' : u'e',
u'uơ' : u'ɤ', u'uớ' : u'ɤ', u'uờ' : u'ɤ', u'uở' : u'ɤ', u'uỡ' : u'ɤ', u'uợ' : u'ɤ',
u'uy' : u'i', u'uý' : u'i', u'uỳ' : u'i', u'uỷ' : u'i', u'uỹ' : u'i', u'uỵ' : u'i',
u'uya' : u'iə', u'uyá' : u'iə', u'uyà' : u'iə', u'uyả' : u'iə', u'uyã' : u'iə', u'uyạ' : u'iə',
u'uyê' : u'iə', u'uyế' : u'iə', u'uyề' : u'iə', u'uyể' : u'iə', u'uyễ' : u'iə', u'uyệ' : u'iə',
u'uyu' : u'iu', u'uyú' : u'iu', u'uyù' : u'iu', u'uyủ' : u'iu', u'uyũ' : u'iu', u'uyụ' : u'iu',
u'uyu' : u'iu', u'uýu' : u'iu', u'uỳu' : u'iu', u'uỷu' : u'iu', u'uỹu' : u'iu', u'uỵu' : u'iu',
u'oen' : u'en', u'oén' : u'en', u'oèn' : u'en', u'oẻn' : u'en', u'oẽn' : u'en', u'oẹn' : u'en',
u'oet' : u'et', u'oét' : u'et', u'oèt' : u'et', u'oẻt' : u'et', u'oẽt' : u'et', u'oẹt' : u'et'
}
S_onoffglides = { u'oe' : u'ej', u'oé' : u'ej', u'oè' : u'ej', u'oẻ' : u'ej', u'oẽ' : u'ej', u'oẹ' : u'ej',
u'oai' : u'aj', u'oái' : u'aj', u'oài' : u'aj', u'oải' : u'aj', u'oãi' : u'aj', u'oại' : u'aj',
u'oay' : u'ăj', u'oáy' : u'ăj', u'oày' : u'ăj', u'oảy' : u'ăj', u'oãy' : u'ăj', u'oạy' : u'ăj',
u'oao' : u'aw', u'oáo' : u'aw', u'oào' : u'aw', u'oảo' : u'aw', u'oão' : u'aw', u'oạo' : u'aw',
u'oeo' : u'ew', u'oéo' : u'ew', u'oèo' : u'ew', u'oẻo' : u'ew', u'oẽo' : u'ew', u'oẹo' : u'ew',
u'oeo' : u'ew', u'óeo' : u'ew', u'òeo' : u'ew', u'ỏeo' : u'ew', u'õeo' : u'ew', u'ọeo' : u'ew',
u'ueo' : u'ew', u'uéo' : u'ew', u'uèo' : u'ew', u'uẻo' : u'ew', u'uẽo' : u'ew', u'uẹo' : u'ew',
u'uai' : u'aj', u'uái' : u'aj', u'uài' : u'aj', u'uải' : u'aj', u'uãi' : u'aj', u'uại' : u'aj',
u'uay' : u'ăj', u'uáy' : u'ăj', u'uày' : u'ăj', u'uảy' : u'ăj', u'uãy' : u'ăj', u'uạy' : u'ăj',
u'uây' : u'ɤ̆j', u'uấy' : u'ɤ̆j', u'uầy' : u'ɤ̆j', u'uẩy' : u'ɤ̆j', u'uẫy' : u'ɤ̆j', u'uậy' : u'ɤ̆j'
}
S_codas = { u'p' : u'p', u't' : u't', u'c' : u'k', u'm' : u'm', u'n' : u'ŋ', u'ng' : u'ŋ', u'nh' : u'n', u'ch' : u't' }
S_tones = { u'á' : 45, u'à' : 32, u'ả' : 214, u'ã' : 214, u'ạ' : 212,
u'ấ' : 45, u'ầ' : 32, u'ẩ' : 214, u'ẫ' : 214, u'ậ' : 212,
u'ắ' : 45, u'ằ' : 32, u'ẳ' : 214, u'ẵ' : 214, u'ặ' : 212,
u'é' : 45, u'è' : 32, u'ẻ' : 214, u'ẽ' : 214, u'ẹ' : 212,
u'ế' : 45, u'ề' : 32, u'ể' : 214, u'ễ' : 214, u'ệ' : 212,
u'í' : 45, u'ì' : 32, u'ỉ' : 214, u'ĩ' : 214, u'ị' : 212,
u'ó' : 45, u'ò' : 32, u'ỏ' : 214, u'õ' : 214, u'ọ' : 212,
u'ố' : 45, u'ồ' : 32, u'ổ' : 214, u'ỗ' : 214, u'ộ' : 212,
u'ớ' : 45, u'ờ' : 32, u'ở' : 214, u'ỡ' : 214, u'ợ' : 212,
u'ú' : 45, u'ù' : 32, u'ủ' : 214, u'ũ' : 214, u'ụ' : 212,
u'ứ' : 45, u'ừ' : 32, u'ử' : 214, u'ữ' : 214, u'ự' : 212,
u'ý' : 45, u'ỳ' : 32, u'ỷ' : 214, u'ỹ' : 214, u'ỵ' : 212,
}
S_tones_p = { u'á' : 5, u'à' : 2, u'ả' : 4, u'ã' : 4, u'ạ' : 6,
u'ấ' : 5, u'ầ' : 2, u'ẩ' : 4, u'ẫ' : 4, u'ậ' : 6,
u'ắ' : 5, u'ằ' : 2, u'ẳ' : 4, u'ẵ' : 4, u'ặ' : 6,
u'é' : 5, u'è' : 2, u'ẻ' : 4, u'ẽ' : 4, u'ẹ' : 6,
u'ế' : 5, u'ề' : 2, u'ể' : 4, u'ễ' : 4, u'ệ' : 6,
u'í' : 5, u'ì' : 2, u'ỉ' : 4, u'ĩ' : 4, u'ị' : 6,
u'ó' : 5, u'ò' : 2, u'ỏ' : 4, u'õ' : 4, u'ọ' : 6,
u'ố' : 5, u'ồ' : 2, u'ổ' : 4, u'ỗ' : 4, u'ộ' : 6,
u'ớ' : 5, u'ờ' : 2, u'ở' : 4, u'ỡ' : 4, u'ợ' : 6,
u'ú' : 5, u'ù' : 2, u'ủ' : 4, u'ũ' : 4, u'ụ' : 6,
u'ứ' : 5, u'ừ' : 2, u'ử' : 4, u'ữ' : 4, u'ự' : 6,
u'ý' : 5, u'ỳ' : 2, u'ỷ' : 4, u'ỹ' : 4, u'ỵ' : 6,
}
S_gi = { u'gi' : u'ji', u'gí': u'ji', u'gì' : u'ji', u'gì' : u'ji', u'gĩ' : u'ji', u'gị' : u'ji' }
S_qu = {u'quy' : u'wi', u'qúy' : u'wi', u'qùy' : u'wi', u'qủy' : u'wi', u'qũy' : u'wi', u'qụy' : u'wi'}
################################################3
import sys, codecs, re
from io import StringIO
from optparse import OptionParser
from string import punctuation
def trans(word, dialect, glottal, pham, cao, palatals):
# This looks ugly, but newer versions of python complain about "from x import *" syntax
if dialect == 'n':
onsets, nuclei, codas, tones, onglides, offglides, onoffglides, qu, gi = N_onsets, N_nuclei, N_codas, N_tones, N_onglides, N_offglides, N_onoffglides, N_qu, N_gi
elif dialect == 'c':
onsets, nuclei, codas, tones, onglides, offglides, onoffglides, qu, gi = C_onsets, C_nuclei, C_codas, C_tones, C_onglides, C_offglides, C_onoffglides, C_qu, C_gi
elif dialect == 's':
onsets, nuclei, codas, tones, onglides, offglides, onoffglides, qu, gi = S_onsets, S_nuclei, S_codas, S_tones, S_onglides, S_offglides, S_onoffglides, S_qu, S_gi
#Custom
onsets, nuclei, codas, onglides, offglides, onoffglides, qu, gi = Cus_onsets, Cus_nuclei, Cus_codas, Cus_onglides, Cus_offglides, Cus_onoffglides, Cus_qu, Cus_gi
if pham or cao:
if dialect == 'n': tones_p = N_tones_p
if dialect == 'c': tones_p = C_tones_p
if dialect == 's': tones_p = S_tones_p
#Custom
tones_p = Cus_tones_p
tones = tones_p
ons = ''
nuc = ''
cod = ''
ton = 0
oOffset = 0
cOffset = 0
l = len(word)
if l > 0:
if word[0:3] in onsets: # if onset is 'ngh'
ons = onsets[word[0:3]]
oOffset = 3
elif word[0:2] in onsets: # if onset is 'nh', 'gh', 'kʷ' etc
ons = onsets[word[0:2]]
oOffset = 2
elif word[0] in onsets: # if single onset
ons = onsets[word[0]]
oOffset = 1
if word[l-2:l] in codas: # if two-character coda
cod = codas[word[l-2:l]]
cOffset = 2
elif word[l-1] in codas: # if one-character coda
cod = codas[word[l-1]]
cOffset = 1
#if word[0:2] == u'gi' and cod and len(word) == 3: # if you just have 'gi' and a coda...
if word[0:2] in gi and cod and len(word) == 3: # if you just have 'gi' and a coda...
nucl = u'i'
ons = u'z'
else:
nucl = word[oOffset:l-cOffset]
if nucl in nuclei:
if oOffset == 0:
if glottal == 1:
if word[0] not in onsets: # if there isn't an onset....
ons = u'ʔ'+nuclei[nucl] # add a glottal stop
else: # otherwise...
nuc = nuclei[nucl] # there's your nucleus
else:
nuc = nuclei[nucl] # there's your nucleus
else: # otherwise...
nuc = nuclei[nucl] # there's your nucleus
elif nucl in onglides and ons != u'kw': # if there is an onglide...
nuc = onglides[nucl] # modify the nuc accordingly
if ons: # if there is an onset...
ons = ons+u'w' # labialize it, but...
else: # if there is no onset...
ons = u'w' # add a labiovelar onset
elif nucl in onglides and ons == u'kw':
nuc = onglides[nucl]
elif nucl in onoffglides:
cod = onoffglides[nucl][-1]
nuc = onoffglides[nucl][0:-1]
if ons != u'kw':
if ons:
ons = ons+u'w'
else:
ons = u'w'
elif nucl in offglides:
cod = offglides[nucl][-1]
nuc = offglides[nucl][:-1]
elif word in gi: # if word == 'gi', 'gì',...
ons = gi[word][0]
nuc = gi[word][1]
elif word in qu: # if word == 'quy', 'qúy',...
ons = qu[word][:-1]
nuc = qu[word][-1]
else:
# Something is non-Viet
return (None, None, None, None)
# Velar Fronting (Northern dialect)
if dialect == 'n':
if nuc == u'a':
if cod == u'k' and cOffset == 2: nuc = u'ɛ'
if cod == u'ɲ' and nuc == u'a': nuc = u'ɛ'
# Final palatals (Northern dialect)
if nuc not in [u'i', u'e', u'ɛ']:
if cod == u'ɲ':
cod = u'ɲ' # u'ŋ'
elif palatals != 1 and nuc in [u'i', u'e', u'ɛ']:
if cod == u'ɲ':
cod = u'ɲ'#u'ŋ'
if palatals == 1:
if cod == u'k' and nuc in [u'i', u'e', u'ɛ']:
cod = u'c'
# Velar Fronting (Southern and Central dialects)
else:
if nuc in [u'i', u'e']:
if cod == u'k': cod = u't'
if cod == u'ŋ': cod = u'n'
# There is also this reverse fronting, see Thompson 1965:94 ff.
elif nuc in [u'iə', u'ɯə', u'uə', u'u', u'ɯ', u'ɤ', u'o', u'ɔ', u'ă', u'ɤ̆']:
if cod == u't':
cod = u'k'
if cod == u'n': cod = u'ŋ'
# Monophthongization (Southern dialects: Thompson 1965: 86; Hoàng 1985: 181)
if dialect == 's':
if cod in [u'm', u'p']:
if nuc == u'iə': nuc = u'i'
if nuc == u'uə': nuc = u'u'
if nuc == u'ɯə': nuc = u'ɯ'
# Tones
# Modified 20 Sep 2008 to fix aberrant 33 error
tonelist = [tones[word[i]] for i in range(0,l) if word[i] in tones]
if tonelist:
ton = str(tonelist[len(tonelist)-1])
else:
if not (pham or cao):
if dialect == 'c':
ton = str('35')
else:
ton = str('33')
else:
ton = str('1')
# Modifications for closed syllables
if cOffset !=0:
# Obstruent-final nang tones are modal voice
if (dialect == 'n' or dialect == 's') and ton == u'21g' and cod in ['p', 't', 'k']:
#if ton == u'21\u02C0' and cod in ['p', 't', 'k']: # fixed 8 Nov 2016
ton = u'21'
# Modification for sắc in closed syllables (Northern and Central only)
if ((dialect == 'n' and ton == u'24') or (dialect == 'c' and ton == u'13')) and cod in ['p', 't', 'k']:
ton = u'45'
# Modification for 8-tone system
if cao == 1:
if ton == u'5' and cod in ['p', 't', 'k']:
ton = u'5b'
if ton == u'6' and cod in ['p', 't', 'k']:
ton = u'6b'
# labialized allophony (added 17.09.08)
if nuc in [u'u', u'o', u'ɔ']:
if cod == u'ŋ':
cod = u'ŋ͡m'
if cod == u'k':
cod = u'k͡p'
return (ons, nuc, cod, ton)
def convert(word, dialect, glottal, pham, cao, palatals, delimit):
"""Convert a single orthographic string to IPA."""
ons = ''
nuc = ''
cod = ''
ton = 0
seq = ''
try:
(ons, nuc, cod, ton) = trans(word, dialect, glottal, pham, cao, palatals)
if None in (ons, nuc, cod, ton):
seq = u'['+word+u']'
else:
seq = delimit+delimit.join(filter(None, (ons, nuc, cod, ton)))+delimit
except (TypeError):
pass
return seq
########################333
from vinorm import *
from underthesea import word_tokenize
import eng_to_ipa
SET=[S_onsets, S_nuclei, S_codas#, S_tones
, S_onglides, S_offglides, S_onoffglides, S_qu, S_gi, C_onsets, C_nuclei, C_codas#, C_tones
, C_onglides, C_offglides, C_onoffglides, C_qu, C_gi, N_onsets, N_nuclei, N_codas#, N_tones
, N_onglides, N_offglides, N_onoffglides, N_qu, N_gi, Cus_onsets, Cus_nuclei, Cus_codas#, N_tones
, Cus_onglides, Cus_offglides, Cus_onoffglides, Cus_qu, Cus_gi]
DICT={}
#144 in total
syms=['ɯəj', 'ɤ̆j', 'ʷiə', 'ɤ̆w', 'ɯəw', 'ʷet', 'iəw', 'uəj', 'ʷen', 'tʰw', 'ʷɤ̆', 'ʷiu', 'kwi', 'ŋ͡m', 'k͡p', 'cw', 'jw', 'uə', 'eə', 'bw', 'oj', 'ʷi', 'vw', 'ăw', 'ʈw', 'ʂw', 'aʊ', 'fw', 'ɛu', 'tʰ', 'tʃ', 'ɔɪ', 'xw', 'ʷɤ', 'ɤ̆', 'ŋw', 'ʊə', 'zi', 'ʷă', 'dw', 'eɪ', 'aɪ', 'ew', 'iə', 'ɣw', 'zw', 'ɯj', 'ʷɛ', 'ɯw', 'ɤj', 'ɔ:', 'əʊ', 'ʷa', 'mw', 'ɑ:', 'hw', 'ɔj', 'uj', 'lw', 'ɪə', 'ăj', 'u:', 'aw', 'ɛj', 'iw', 'aj', 'ɜ:', 'kw', 'nw', 't∫', 'ɲw', 'eo', 'sw', 'tw', 'ʐw', 'iɛ', 'ʷe', 'i:', 'ɯə', 'dʒ', 'ɲ', 'θ', 'ʌ', 'l', 'w', '1', 'ɪ', 'ɯ', 'd', '∫', 'p', 'ə', 'u', 'o', '3', 'ɣ', '!', 'ð', 'ʧ', '6', 'ʒ', 'ʐ', 'z', 'v', 'g', 'ă', '_', 'æ', 'ɤ', '2', 'ʤ', 'i', '.', 'ɒ', 'b', 'h', 'n', 'ʂ', 'ɔ', 'ɛ', 'k', 'm', '5', ' ', 'c', 'j', 'x', 'ʈ', ',', '4', 'ʊ', 's', 'ŋ', 'a', 'ʃ', '?', 'r', ':', 'η', 'f', ';', 'e', 't', "'"]
def Parsing(listParse, text, delimit):
undefine_symbol = "'"
if listParse == "default":
listParse=['ʷiə', 'uəj', 'iəw', 'k͡p', 'ʷɤ̆', 'ɤ̆j', 'ŋ͡m', 'kwi', 'ɤ̆w', 'ɯəj', 'ʷen', 'ʷiu', 'ʷet', 'ɯəw', 'ʷɛ', 'ʷɤ', 'ɯj', 'oj', 'ăw', 'zi', 'kw', 'aɪ', 'iɛ', 'ɤ̆', 'ɔ:', 'ăj', 'ʷa', 'eə', 'u:', 'uj', 'aʊ', 'uə', 'aj', 'iə', 'iw', 'əʊ', 'ɑ:', 'tʃ', 'ʷe', 'ɛu', 'ɔɪ', 'ʷi', 'eɪ', 'ɤj', 'ɯw', 'ɛj', 'ɔj', 'i:', 't∫', 'ɪə', 'ʷă', 'ɜ:', 'tʰ', 'dʒ', 'ew', 'ʊə', 'ɯə', 'aw', '3', 'θ', 'v', 'ʊ', 'ʤ', 'ɔ', '1', 'ʧ', 'ʈ', ' ', 'd', 'i', 'ɣ', 'ɲ', 'ɤ', '?', 'ɪ', 'l', '.', 'j', ':', 't', 'ʒ', 'ə', 'ʌ', 'm', '!', '∫', 'ð', 'u', 'e', 'w', 'p', 'ʃ', 'æ', "'", 'h', 'o', 'k', '5', 'g', '4', 'n', ';', 'r', 'b', 'ɯ', 'a', 's', 'ʐ', 'η', 'ŋ', 'ɒ', 'ʂ', '_', 'f', ',', 'ɛ', 'z', '6', '2', 'x', 'ă']
listParse.sort(reverse = True,key=len)
output=""
skip=0
for ic,char in enumerate(text):
#print(char,skip)
check = 0
if skip>0:
skip=skip-1
continue
for l in listParse:
if len(l) <= len(text[ic:]) and l == text[ic:ic+len(l)]:
output+=delimit+l
check =1
skip=len(l)-1
break
if check == 0:
#Case symbol not in list
if str(char) in ["ˈ","ˌ","*"]:
continue
print("this is not in symbol :"+ char+":")
output+=delimit+undefine_symbol
return output.rstrip()+delimit
#print("Parsing",Parsing("default","iu iu","|"))
def getSymbol():
for s in SET:
DICT.update(s)
list_phoneme=DICT.values()
list_phoneme=list(list_phoneme)
English_phoneme=["p","b","t","d","t∫","dʒ","k","g","f","v","ð","θ","s","z","∫","ʒ","m","n","η","l","r","w","j","ɪ","i:","ʊ","u:","e","ə","ɜ:","ɒ","ɔ:","æ","ʌ","ɑ:","ɪə","ʊə","eə","eɪ","ɔɪ","aɪ","əʊ","aʊ",'ʃ',"ʤ","ʧ"]
Special=['jw', 'ŋw', 'bw', 'vw', 'dw', 'eo', 'ʈw', 'mw', 'zw', 'fw', 'tw', 'tʰw', 'ɲw', 'cw', 'ʂw', 'ɣw', 'ʐw', 'xw', 'lw', 'hw', 'nw', 'sw', 'c']
word_pad = ["_"]
space = [" "]
tone=["1","2","3","4","5","6"]
punctuation = [".",",","!",":","?",";","'"] #" ' ( ) Have been removed due to none sound
modifi = ["k͡p","ŋ͡m"]
symbols = list_phoneme + space+word_pad + English_phoneme + punctuation + tone + modifi + Special
symbols = list(set(symbols))
symbols.sort(reverse = True,key=len)
return symbols
def vi2IPA_pitrain(text):
epi = epitran.Epitran('vie-Latn')
r=epi.transliterate(text)
return r
def T2IPA_split(text,delimit):
sys.path.append('./Rules') # make sure we can find the Rules files
#Setup option
glottal = 0
pham = 0
cao = 0
palatals = 0
tokenize = 0
dialect='n' #"c""s"
tone_type=0
if tone_type==0:
pham=1
else:
cao=1
#Input text
line = text
if line =='\n':
return ""
else:
compound = u''
ortho = u''
words = line.split()
## toss len==0 junk
words = [word for word in words if len(word)>0]
## hack to get rid of single hyphens or underscores
words = [word for word in words if word!=u'-']
words = [word for word in words if word!=u'_']
for i in range(0,len(words)):
word = words[i].strip()
ortho += word
word = word.strip(punctuation).lower()
## 29.03.16: check if tokenize is true
## if true, call this routine for each substring
## and re-concatenate
if (tokenize and '-' in word) or (tokenize and '_' in word):
substrings = re.split(r'(_|-)', word)
values = substrings[::2]
delimiters = substrings[1::2] + ['']
ipa = [convert(x, dialect, glottal, pham, cao, palatals, delimit).strip() for x in values]
seq = ''.join(v+d for v,d in zip(ipa, delimiters))
else:
seq = convert(word, dialect, glottal, pham, cao, palatals, delimit).strip()
# concatenate
if len(words) >= 2:
ortho += ' '
if i < len(words)-1:
seq = seq+u' '
compound = compound + seq
return compound
def T2IPA(text):
sys.path.append('./Rules') # make sure we can find the Rules files
#Setup option
glottal = 0
pham = 0
cao = 0
palatals = 0
tokenize = 0
delimit = ''
dialect='n' #"c""s"
tone_type=0
if tone_type==0:
pham=1
else:
cao=1
#Input text
line = text
if line =='\n':
return ""
else:
compound = u''
ortho = u''
words = line.split()
## toss len==0 junk
words = [word for word in words if len(word)>0]
## hack to get rid of single hyphens or underscores
words = [word for word in words if word!=u'-']
words = [word for word in words if word!=u'_']
for i in range(0,len(words)):
word = words[i].strip()
ortho += word
word = word.strip(punctuation).lower()
## 29.03.16: check if tokenize is true
## if true, call this routine for each substring
## and re-concatenate
if (tokenize and '-' in word) or (tokenize and '_' in word):
substrings = re.split(r'(_|-)', word)
values = substrings[::2]
delimiters = substrings[1::2] + ['']
ipa = [convert(x, dialect, glottal, pham, cao, palatals, delimit).strip() for x in values]
seq = ''.join(v+d for v,d in zip(ipa, delimiters))
else:
seq = convert(word, dialect, glottal, pham, cao, palatals, delimit).strip()
# concatenate
if len(words) >= 2:
ortho += ' '
if i < len(words)-1:
seq = seq+u' '
compound = compound + seq
return compound
EN={"a":"ây","ă":"á","â":"ớ","b":"bi","c":"si","d":"đi","đ":"đê","e":"i","ê":"ê","f":"ép","g":"giy","h":"ếch","i":"ai","j":"giây","k":"cây","l":"eo","m":"em","n":"en","o":"âu","ô":"ô","ơ":"ơ","p":"pi","q":"kiu","r":"a","s":"ét","t":"ti","u":"diu","ư":"ư","v":"vi","w":"đắp liu","x":"ít","y":"quai","z":"giét"}
import re
def vi2IPA_split(texts,delimit):
content=[]
with open("Popular.txt",encoding="utf-8") as f:
content=f.read().splitlines()
tess = texts.split(".")
Results =""
for text in tess:
print("------------------------------------------------------")
TN= TTSnorm(text)
print("------------------------------------------------------")
print("Text normalize: ",TN)
TK= word_tokenize(TN)
print("Vietnamese Tokenize: ",TK)
for iuv,under_valid in enumerate(TK):
token_under=under_valid.split(" ")
checkinvalid=0
print(token_under)
if len(token_under) >1:
for tok in token_under:
if tok not in content or "[" in T2IPA(tok):
checkinvalid=1
if checkinvalid==1:
TK = TK[:iuv] + TK[iuv+1 :]
for tok in reversed(token_under):
TK.insert(iuv, tok)
IPA=""
for tk in TK:
ipa = T2IPA_split(tk,delimit).replace(" ","_")
if ipa =="":
IPA+=delimit+tk+delimit+" "
elif ipa[0]=="[" and ipa[-1]=="]":
eng = eng_to_ipa.convert(tk)
if eng[-1] == "*":
if tk.lower().upper() == tk:
#print("ENGLISH",tk)
#Đọc tiếng anh từng chữ
letter2sound=""
for char in tk:
CHAR = str(char).lower()
if CHAR in list(EN.keys()):
letter2sound+=EN[CHAR]+" "
else:
letter2sound+=char+" "
IPA+=T2IPA_split(letter2sound,delimit)+" "
else:
#Giữ nguyên
IPA+=Parsing("default",tk.lower(),delimit)+" "
else:
IPA+=Parsing("default",eng,delimit)+" "
#Check tu dien tieng anh Etrain bưc
#Neu co Mapping
#Neu khong, check co nguyen am
#Neu co de nguyen
#Neu khong danh van
print(" ..................Out of domain word: " ,ipa)
else:
IPA+=ipa+" "
IPA=re.sub(delimit+'+', delimit, IPA)
IPA=re.sub(' +', ' ', IPA)
print("IPA Vietnamese: ",IPA)
print("------------------------------------------------------")
Results+= IPA.rstrip()+" "+delimit+"."+delimit+" "
#For checking: need much memory
'''
check_sym="ɯəjɤ̆jʷiəɤ̆wɯəwʷetiəwuəjʷentʰwʷɤ̆ʷiukwiŋ͡mk͡pcwjwuəeəbwojʷivwăwʈwʂwaʊfwɛutʰtʃɔɪxwʷɤɤ̆ŋwʊəziʷădweɪaɪewiəɣwzwɯjʷɛɯwɤjɔ:əʊʷamwɑ:hwɔjujlwɪəăju:awɛjiwajɜ:kwnwt∫ɲweoswtwʐwiɛʷei:ɯədʒɲθʌlw1ɪɯd∫pəuo3ɣ!ðʧ6ʒʐzvgă_æɤ2ʤi.ɒbhnʂɔɛkm5cjxʈ,4ʊsŋaʃ?r:ηf;et'"
for ine,res in enumerate(Results):
if res not in check_sym:
Results[ine]="'"
'''
return Results.rstrip()
def vi2IPA(text):
print("------------------------------------------------------")
TN= TTSnorm(text)
print("------------------------------------------------------")
print("Text normalize: ",TN)
TK= word_tokenize(TN)
print("Vietnamese Tokenize: ",TK)
IPA=""
for tk in TK:
ipa = T2IPA(tk).replace(" ","_")
if ipa =="":
IPA+=tk+" "
elif ipa[0]=="[" and ipa[-1]=="]":
eng = eng_to_ipa.convert(tk)
if eng[-1] == "*":
if tk.lower().upper() == tk:
#Đọc tiếng anh từng chữ
letter2sound=""
for char in tk:
CHAR = str(char).lower()
if CHAR in list(EN.keys()):
letter2sound+=EN[CHAR]+" "
else:
letter2sound+=char+" "
IPA+=T2IPA_split(letter2sound,"")+" "
else:
#Giữ nguyên
IPA+=Parsing("default",tk,"")+" "
else:
IPA+=eng+" "
#Check tu dien tieng anh Etrain bưc
#Neu co Mapping
#Neu khong, check co nguyen am
#Neu co de nguyen
#Neu khong danh van
print(" ..................Out of domain word: " ,ipa)
else:
IPA+=ipa+" "
IPA=re.sub(' +', ' ', IPA)
print("IPA Vietnamese: ",IPA)
print("------------------------------------------------------")
return IPA
def checkDict():
cout=0
trung=0
List_token=[]
List_pair = []
with open("Popular.txt", encoding="utf-8") as f:
content=f.read().splitlines()
for line in content:
#nor_tr = vi2IPA_pitrain(line)
#nor = vi2IPA(line)
nor = T2IPA(line)
if nor in List_token:
print(line + " -> "+nor)
trung +=1
List_pair.append(line)
List_token.append(nor)
if nor=="":
cout+=1
print(line)
print("Number of token can not convert: ",cout)
print("Number of token in the same mapping:",trung)
List_token = list(set(List_token))
#print(List_token)
print(len(List_token))
################################
#Looking for pair
Pair = {}
for lt in List_pair:
Pair[T2IPA(lt)] = lt
cout_same=0
with open("Popular.txt", encoding="utf-8") as f:
content=f.read().splitlines()
for line in content:
if T2IPA(line) in Pair:
lin2 =Pair[T2IPA(line)]
if line != lin2:
if (lin2[0]=="k" and line[0]=="c") or (lin2[-1] in ['i','í','ì','ĩ','ỉ','ị'] and line[-1] in ['y','ý','ỳ','ỷ','ỹ','ỵ']) or (lin2[-1] in ['y','ý','ỳ','ỷ','ỹ','ỵ'] and line[-1] in ['i','í','ì','ĩ','ỉ','ị']):
continue
cout_same+=1
print(line+ " <-> " + lin2 +"\t\t:\t\t"+T2IPA(line))
print("Same pair:" , cout_same)
#Các trường hợp dẫn đến trùng âm là:
# Phương ngữ khác nhau đã thống nhất ở list custom
# Các trường hợp có cách bỏ dấu khác nhau đều gộp chung làm một
#Disable convert from 'ɲ' to 'ɲ'' in north
#Các âm vòng ở đây i chang không vòm: không có w ở trước như: "oa,ua,a" đều như một > must consider (nhưng nếu thêm vào ảnh hưởng chữ qu cũng ra w)
#Try to add ʷ to all start o and u as in wiki
# *** Problem with ủy onglide and off-glide is a big problem
#Same positive
#k <-> c
#g <-> gh
#i <-> y
#Same negative / need to fix
#oe <-> uê -> fix oe from e to ɛ
#âm cuối: ch : k theo bắc : t theo nam -> custom k vì nó giảm trùng nhiều hơn 241->153 case
#Tuy nhiên cuối cùng "ch" "c" "t" không phân âm được => ý tưởng mượn "tʃ" trong teach and watch để thay thế => k for c , t for t, tʃ for ch
#Thay offglide: úy -> wi để phân biệt với úi
#Remain
'''
di <-> gi : zi1
dìm <-> gìm : zim2
din <-> gin : zin1
díp <-> gíp : zip5
gen <-> ghen : ɣɛn1
ghì <-> gì : ɣi2
ghích <-> gích : ɣitʃ5
ia <-> iê : iə1
iêu <-> yêu : iəw1
khoắng <-> khuắng : xwʷăŋ5
khỏe <-> khoẻ : xwʷɛ4
khua <-> khuơ : xuə1
lóe <-> loé : lwʷɛ5
ngét <-> nghét : ŋɛt5
ngễu <-> nghễu : ŋɛu3
nghía <-> ngía : ŋiə5
nghịu <-> ngịu : ŋiw6
nghoèo <-> ngoèo : ŋwew2
quít <-> quýt : kwit5
thủa <-> thuở : tʰuə4
tòe <-> toè : twʷɛ2
ua <-> uơ : uə1
ưa <-> ươ : ɯə1
xõa <-> xoã : swʷa3
'''
#Ở đây tiết kiệm chi phí chạy máy không normal phoneme về cường độ âm sắc chỉ dừng từ 1->6
#học ác cho kết quả "c" khác nhau
###################################################
checkDict()
#print(vi2IPA_split("!Singapo english? đại học là IUYE gì khôngtontaij NIYE BoOK","'"))
#check các ipa của tiếng anh
#print(vi2IPA_split("Another table was prepared to show available onsets. Onsets are splitted into 3 types. Type 1 are onsets which has one letter ","/"))
#Lọc bỏ dấu nhấn của tiếng anh "'"
#print(vi2IPA_split("speech? Secondly, we paper, we investigate work! One is that e language to another by","/").replace("/",""))
#Case need to be deal:
# NIYE BoOK
#print(len(getSymbol()))
#print(getSymbol())
'''
test="t"
if test in syms:
print(test)
else:
print("none")
'''
###################################################
#Step
#Vinorm
#Underthesea
#For each Convert to phoneme
#Nếu không được check phoneme tiếng anh
#Nếu không có trong từ tiếng anh -> đọc từng kí tự
#Now
#+Thêm kí tự IPA của tiếng ANH
#+Thêm xử lí case không có cũng như case Tiếng anh: => dùng etrain cho tiếng anh
#+Deal case thống nhất âm vực phoneme -> ok
#+Get lại bộ symbol | [((53180, 53206), 'sys.path.append', 'sys.path.append', (['"""./Rules"""'], {}), "('./Rules')\n", (53195, 53206), False, 'import sys, codecs, re\n'), ((54939, 54965), 'sys.path.append', 'sys.path.append', (['"""./Rules"""'], {}), "('./Rules')\n", (54954, 54965), False, 'import sys, codecs, re\n'), ((60447, 60464), 'underthesea.word_tokenize', 'word_tokenize', (['TN'], {}), '(TN)\n', (60460, 60464), False, 'from underthesea import word_tokenize\n'), ((61714, 61736), 're.sub', 're.sub', (['""" +"""', '""" """', 'IPA'], {}), "(' +', ' ', IPA)\n", (61720, 61736), False, 'import re\n'), ((57463, 57480), 'underthesea.word_tokenize', 'word_tokenize', (['TN'], {}), '(TN)\n', (57476, 57480), False, 'from underthesea import word_tokenize\n'), ((59507, 59542), 're.sub', 're.sub', (["(delimit + '+')", 'delimit', 'IPA'], {}), "(delimit + '+', delimit, IPA)\n", (59513, 59542), False, 'import re\n'), ((59554, 59576), 're.sub', 're.sub', (['""" +"""', '""" """', 'IPA'], {}), "(' +', ' ', IPA)\n", (59560, 59576), False, 'import re\n'), ((54287, 54310), 're.split', 're.split', (['"""(_|-)"""', 'word'], {}), "('(_|-)', word)\n", (54295, 54310), False, 'import re\n'), ((56064, 56087), 're.split', 're.split', (['"""(_|-)"""', 'word'], {}), "('(_|-)', word)\n", (56072, 56087), False, 'import re\n'), ((60695, 60717), 'eng_to_ipa.convert', 'eng_to_ipa.convert', (['tk'], {}), '(tk)\n', (60713, 60717), False, 'import eng_to_ipa\n'), ((58297, 58319), 'eng_to_ipa.convert', 'eng_to_ipa.convert', (['tk'], {}), '(tk)\n', (58315, 58319), False, 'import eng_to_ipa\n')] |
retmas-dv/deftcore | taskengine/sessions.py | 23052549e8948bbedfb958a96683b84b46820b09 | __author__ = 'Dmitry Golubkov'
from django.contrib.sessions.base_session import AbstractBaseSession
from django.contrib.sessions.backends.db import SessionStore as DBStore
class CustomSession(AbstractBaseSession):
@classmethod
def get_session_store_class(cls):
return SessionStore
class Meta:
db_name = 'deft_adcr'
db_table = '"ATLAS_DEFT"."DJANGO_SESSION"'
class SessionStore(DBStore):
@classmethod
def get_model_class(cls):
return CustomSession
| [] |
acc-cosc-1336/cosc-1336-spring-2018-vcruz350 | tests/assignments/test_assign7.py | 0cee9fde3d4129c51626c4e0c870972aebec9b95 | import unittest
#write the import for function for assignment7 sum_list_values
from src.assignments.assignment7 import sum_list_values
class Test_Assign7(unittest.TestCase):
def sample_test(self):
self.assertEqual(1,1)
#create a test for the sum_list_values function with list elements:
# bill 23 16 19 22
def test_sum_w_23_16_19_22(self):
test_list = ['bill', 23, 16, 19, 22]
self.assertEqual(80, sum_list_values(test_list))
#unittest.main(verbosity=2)
| [((441, 467), 'src.assignments.assignment7.sum_list_values', 'sum_list_values', (['test_list'], {}), '(test_list)\n', (456, 467), False, 'from src.assignments.assignment7 import sum_list_values\n')] |
kgriffs/setec | setec/__init__.py | c6701ffd757cdfe1cfb9c3919b0fd3aa02396f54 | # Copyright 2018 by Kurt Griffiths
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base64 import b64decode, b64encode
import msgpack
import nacl.encoding
import nacl.secret
import nacl.signing
import nacl.utils
from .version import __version__ # NOQA
class Signer:
"""Message signer based on Ed25519 and nacl.signing.
Arguments:
key (str): Base64-encoded key obtained from keygen()
"""
__slots__ = ('_signing_key',)
def __init__(self, skey):
self._signing_key = nacl.signing.SigningKey(skey, nacl.encoding.Base64Encoder)
@staticmethod
def keygen():
signing_key = nacl.signing.SigningKey.generate()
return (
signing_key.encode(nacl.encoding.Base64Encoder).decode(),
signing_key.verify_key.encode(nacl.encoding.Base64Encoder).decode(),
)
@staticmethod
def vkey(skey):
signing_key = nacl.signing.SigningKey(skey, nacl.encoding.Base64Encoder)
return signing_key.verify_key.encode(nacl.encoding.Base64Encoder)
def signb(self, message):
"""Sign a binary message with its signature attached.
Arguments:
message(bytes): Data to sign.
Returns:
bytes: Signed message
"""
return self._signing_key.sign(message)
def pack(self, doc):
return b64encode(self.packb(doc)).decode()
def packb(self, doc):
packed = msgpack.packb(doc, encoding='utf-8', use_bin_type=True)
return self.signb(packed)
class Verifier:
"""Signature verifier based on Ed25519 and nacl.signing.
Arguments:
key (str): Base64-encoded verify key
"""
__slots__ = ('_verify_key',)
def __init__(self, vkey):
self._verify_key = nacl.signing.VerifyKey(vkey, nacl.encoding.Base64Encoder)
def verifyb(self, message):
"""Verify a signed binary message.
Arguments:
message(bytes): Data to verify.
Returns:
bytes: The orignal message, sans signature.
"""
return self._verify_key.verify(message)
def unpack(self, packed):
return self.unpackb(b64decode(packed))
def unpackb(self, packed):
packed = self.verifyb(packed)
return msgpack.unpackb(packed, raw=False, encoding='utf-8')
class BlackBox:
"""Encryption engine based on PyNaCl's SecretBox (Salsa20/Poly1305).
Warning per the SecretBox docs:
Once you’ve decrypted the message you’ve demonstrated the ability to
create arbitrary valid messages, so messages you send are repudiable.
For non-repudiable messages, sign them after encryption.
(See also: https://pynacl.readthedocs.io/en/stable/signing)
Arguments:
key (str): Base64-encoded key obtained from keygen()
"""
__slots__ = ('_box',)
def __init__(self, key):
self._box = nacl.secret.SecretBox(b64decode(key))
@staticmethod
def keygen():
return b64encode(nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE)).decode()
def encrypt(self, doc, signer=None):
"""Serialize and encrypt a document to Base64-encoded ciphertext.
Arguments:
doc: The string, dict, array, or other JSON-compatible
object to serialize and encrypt.
Keyword Arguments:
signer: An instance of Signer to use in signing the result. If
not provided, the ciphertext is not signed.
Returns:
str: Ciphertext
"""
data = msgpack.packb(doc, encoding='utf-8', use_bin_type=True)
ciphertext = self._box.encrypt(data)
if signer:
ciphertext = signer.signb(ciphertext)
return b64encode(ciphertext).decode()
def decrypt(self, ciphertext, verifier=None):
"""Unpack Base64-encoded ciphertext.
Arguments:
ciphertext (bytes): Ciphertext to decrypt and deserialize.
Keyword Arguments:
verifier: An instance of Verifier to use in verifying the
signed ciphertext. If not provided, the ciphertext is
assumed to be unsigned.
Returns:
doc: Deserialized JSON-compatible object.
"""
ciphertext = b64decode(ciphertext)
if verifier:
ciphertext = verifier.verifyb(ciphertext)
data = self._box.decrypt(ciphertext)
return msgpack.unpackb(data, raw=False, encoding='utf-8')
| [((1922, 1977), 'msgpack.packb', 'msgpack.packb', (['doc'], {'encoding': '"""utf-8"""', 'use_bin_type': '(True)'}), "(doc, encoding='utf-8', use_bin_type=True)\n", (1935, 1977), False, 'import msgpack\n'), ((2747, 2799), 'msgpack.unpackb', 'msgpack.unpackb', (['packed'], {'raw': '(False)', 'encoding': '"""utf-8"""'}), "(packed, raw=False, encoding='utf-8')\n", (2762, 2799), False, 'import msgpack\n'), ((4029, 4084), 'msgpack.packb', 'msgpack.packb', (['doc'], {'encoding': '"""utf-8"""', 'use_bin_type': '(True)'}), "(doc, encoding='utf-8', use_bin_type=True)\n", (4042, 4084), False, 'import msgpack\n'), ((4748, 4769), 'base64.b64decode', 'b64decode', (['ciphertext'], {}), '(ciphertext)\n', (4757, 4769), False, 'from base64 import b64decode, b64encode\n'), ((4906, 4956), 'msgpack.unpackb', 'msgpack.unpackb', (['data'], {'raw': '(False)', 'encoding': '"""utf-8"""'}), "(data, raw=False, encoding='utf-8')\n", (4921, 4956), False, 'import msgpack\n'), ((2643, 2660), 'base64.b64decode', 'b64decode', (['packed'], {}), '(packed)\n', (2652, 2660), False, 'from base64 import b64decode, b64encode\n'), ((3402, 3416), 'base64.b64decode', 'b64decode', (['key'], {}), '(key)\n', (3411, 3416), False, 'from base64 import b64decode, b64encode\n'), ((4216, 4237), 'base64.b64encode', 'b64encode', (['ciphertext'], {}), '(ciphertext)\n', (4225, 4237), False, 'from base64 import b64decode, b64encode\n')] |
niacdoial/armory | blender/arm/logicnode/native/LN_detect_mobile_browser.py | 3f9b633fbf772017c576a3f77695a6c28d9956e1 | from arm.logicnode.arm_nodes import *
class DetectMobileBrowserNode(ArmLogicTreeNode):
"""Determines the mobile browser or not (works only for web browsers)."""
bl_idname = 'LNDetectMobileBrowserNode'
bl_label = 'Detect Mobile Browser'
arm_version = 1
def init(self, context):
super(DetectMobileBrowserNode, self).init(context)
self.add_output('NodeSocketBool', 'Mobile') | [] |
andyasne/commcare-hq | corehq/apps/dump_reload/tests/test_sql_dump_load.py | c59a24e57bdd4d2536493f9ecdcc9906f4ae1b88 | import inspect
import json
import uuid
from collections import Counter
from datetime import datetime
from io import StringIO
import mock
from django.contrib.admin.utils import NestedObjects
from django.db import transaction, IntegrityError
from django.db.models.signals import post_delete, post_save
from django.test import SimpleTestCase, TestCase
from nose.tools import nottest
from casexml.apps.case.mock import CaseFactory, CaseIndex, CaseStructure
from corehq.apps.commtrack.helpers import make_product
from corehq.apps.commtrack.tests.util import get_single_balance_block
from corehq.apps.domain.models import Domain
from corehq.apps.dump_reload.sql import SqlDataDumper, SqlDataLoader
from corehq.apps.dump_reload.sql.dump import (
get_model_iterator_builders_to_dump,
get_objects_to_dump,
)
from corehq.apps.dump_reload.sql.load import (
DefaultDictWithKey,
constraint_checks_deferred,
)
from corehq.apps.hqcase.utils import submit_case_blocks
from corehq.apps.products.models import SQLProduct
from corehq.apps.zapier.consts import EventTypes
from corehq.apps.zapier.models import ZapierSubscription
from corehq.apps.zapier.signals.receivers import (
zapier_subscription_post_delete,
)
from corehq.blobs.models import BlobMeta
from corehq.form_processor.backends.sql.dbaccessors import LedgerAccessorSQL
from corehq.form_processor.interfaces.dbaccessors import (
CaseAccessors,
FormAccessors,
)
from corehq.form_processor.models import (
CaseTransaction,
CommCareCaseIndexSQL,
CommCareCaseSQL,
LedgerTransaction,
LedgerValue,
XFormInstanceSQL,
)
from corehq.form_processor.tests.utils import (
FormProcessorTestUtils,
create_form_for_test,
sharded,
)
from corehq.messaging.scheduling.scheduling_partitioned.models import (
AlertScheduleInstance,
)
class BaseDumpLoadTest(TestCase):
@classmethod
def setUpClass(cls):
post_delete.disconnect(zapier_subscription_post_delete, sender=ZapierSubscription)
super(BaseDumpLoadTest, cls).setUpClass()
cls.domain_name = uuid.uuid4().hex
cls.domain = Domain(name=cls.domain_name)
cls.domain.save()
cls.default_objects_counts = Counter({})
@classmethod
def tearDownClass(cls):
cls.domain.delete()
super(BaseDumpLoadTest, cls).tearDownClass()
post_delete.connect(zapier_subscription_post_delete, sender=ZapierSubscription)
def delete_sql_data(self):
delete_domain_sql_data_for_dump_load_test(self.domain_name)
def tearDown(self):
self.delete_sql_data()
super(BaseDumpLoadTest, self).tearDown()
def _dump_and_load(self, expected_dump_counts, load_filter=None, expected_load_counts=None, dumper_fn=None):
expected_load_counts = expected_load_counts or expected_dump_counts
expected_dump_counts.update(self.default_objects_counts)
models = list(expected_dump_counts)
self._check_signals_handle_raw(models)
output_stream = StringIO()
if dumper_fn:
dumper_fn(output_stream)
else:
SqlDataDumper(self.domain_name, [], []).dump(output_stream)
self.delete_sql_data()
# make sure that there's no data left in the DB
objects_remaining = list(get_objects_to_dump(self.domain_name, [], []))
object_classes = [obj.__class__.__name__ for obj in objects_remaining]
counts = Counter(object_classes)
self.assertEqual([], objects_remaining, 'Not all data deleted: {}'.format(counts))
# Dump
actual_model_counts, dump_lines = self._parse_dump_output(output_stream)
expected_model_counts = _normalize_object_counter(expected_dump_counts)
self.assertDictEqual(dict(expected_model_counts), dict(actual_model_counts))
# Load
loader = SqlDataLoader(object_filter=load_filter)
loaded_model_counts = loader.load_objects(dump_lines)
normalized_expected_loaded_counts = _normalize_object_counter(expected_load_counts, for_loaded=True)
self.assertDictEqual(dict(normalized_expected_loaded_counts), dict(loaded_model_counts))
self.assertEqual(sum(expected_load_counts.values()), sum(loaded_model_counts.values()))
return dump_lines
def _parse_dump_output(self, output_stream):
dump_output = output_stream.getvalue().split('\n')
dump_lines = [line.strip() for line in dump_output if line.strip()]
actual_model_counts = Counter([json.loads(line)['model'] for line in dump_lines])
return actual_model_counts, dump_lines
def _check_signals_handle_raw(self, models):
"""Ensure that any post_save signal handlers have been updated
to handle 'raw' calls."""
whitelist_receivers = [
'django_digest.models._post_save_persist_partial_digests'
]
for model in models:
for receiver in post_save._live_receivers(model):
receiver_path = receiver.__module__ + '.' + receiver.__name__
if receiver_path in whitelist_receivers:
continue
args = inspect.getargspec(receiver).args
message = 'Signal handler "{}" for model "{}" missing raw arg'.format(
receiver, model
)
self.assertIn('raw', args, message)
@nottest
def delete_domain_sql_data_for_dump_load_test(domain_name):
for model_class, builder in get_model_iterator_builders_to_dump(domain_name, [], []):
for iterator in builder.querysets():
with transaction.atomic(using=iterator.db), \
constraint_checks_deferred(iterator.db):
collector = NestedObjects(using=iterator.db)
collector.collect(iterator)
collector.delete()
assert [] == list(get_objects_to_dump(domain_name, [], [])), "Not all SQL objects deleted"
@sharded
class TestSQLDumpLoadShardedModels(BaseDumpLoadTest):
maxDiff = None
@classmethod
def setUpClass(cls):
super(TestSQLDumpLoadShardedModels, cls).setUpClass()
cls.factory = CaseFactory(domain=cls.domain_name)
cls.form_accessors = FormAccessors(cls.domain_name)
cls.case_accessors = CaseAccessors(cls.domain_name)
cls.product = make_product(cls.domain_name, 'A Product', 'prodcode_a')
cls.default_objects_counts.update({SQLProduct: 1})
@classmethod
def tearDownClass(cls):
FormProcessorTestUtils.delete_all_cases_forms_ledgers(cls.domain_name)
super(TestSQLDumpLoadShardedModels, cls).tearDownClass()
def test_dump_load_form(self):
expected_object_counts = Counter({
XFormInstanceSQL: 2,
BlobMeta: 2
})
pre_forms = [
create_form_for_test(self.domain_name),
create_form_for_test(self.domain_name)
]
self._dump_and_load(expected_object_counts)
form_ids = self.form_accessors.get_all_form_ids_in_domain('XFormInstance')
self.assertEqual(set(form_ids), set(form.form_id for form in pre_forms))
for pre_form in pre_forms:
post_form = self.form_accessors.get_form(pre_form.form_id)
self.assertDictEqual(pre_form.to_json(), post_form.to_json())
def test_sql_dump_load_case(self):
expected_object_counts = Counter({
XFormInstanceSQL: 2,
BlobMeta: 2,
CommCareCaseSQL: 2,
CaseTransaction: 3,
CommCareCaseIndexSQL: 1
})
pre_cases = self.factory.create_or_update_case(
CaseStructure(
attrs={'case_name': 'child', 'update': {'age': 3, 'diabetic': False}, 'create': True},
indices=[
CaseIndex(CaseStructure(attrs={'case_name': 'parent', 'update': {'age': 42}, 'create': True})),
]
)
)
pre_cases[0] = self.factory.create_or_update_case(CaseStructure(
case_id=pre_cases[0].case_id,
attrs={'external_id': 'billie jean', 'update': {'name': 'Billie Jean'}}
))[0]
self._dump_and_load(expected_object_counts)
case_ids = self.case_accessors.get_case_ids_in_domain()
self.assertEqual(set(case_ids), set(case.case_id for case in pre_cases))
for pre_case in pre_cases:
post_case = self.case_accessors.get_case(pre_case.case_id)
self.assertDictEqual(pre_case.to_json(), post_case.to_json())
def test_ledgers(self):
expected_object_counts = Counter({
XFormInstanceSQL: 3,
BlobMeta: 3,
CommCareCaseSQL: 1,
CaseTransaction: 3,
LedgerValue: 1,
LedgerTransaction: 2
})
case = self.factory.create_case()
submit_case_blocks([
get_single_balance_block(case.case_id, self.product._id, 10)
], self.domain_name)
submit_case_blocks([
get_single_balance_block(case.case_id, self.product._id, 5)
], self.domain_name)
pre_ledger_values = LedgerAccessorSQL.get_ledger_values_for_case(case.case_id)
pre_ledger_transactions = LedgerAccessorSQL.get_ledger_transactions_for_case(case.case_id)
self.assertEqual(1, len(pre_ledger_values))
self.assertEqual(2, len(pre_ledger_transactions))
self._dump_and_load(expected_object_counts)
post_ledger_values = LedgerAccessorSQL.get_ledger_values_for_case(case.case_id)
post_ledger_transactions = LedgerAccessorSQL.get_ledger_transactions_for_case(case.case_id)
self.assertEqual(1, len(post_ledger_values))
self.assertEqual(2, len(post_ledger_transactions))
self.assertEqual(pre_ledger_values[0].ledger_reference, post_ledger_values[0].ledger_reference)
self.assertDictEqual(pre_ledger_values[0].to_json(), post_ledger_values[0].to_json())
pre_ledger_transactions = sorted(pre_ledger_transactions, key=lambda t: t.pk)
post_ledger_transactions = sorted(post_ledger_transactions, key=lambda t: t.pk)
for pre, post in zip(pre_ledger_transactions, post_ledger_transactions):
self.assertEqual(str(pre), str(post))
class TestSQLDumpLoad(BaseDumpLoadTest):
def test_case_search_config(self):
from corehq.apps.case_search.models import CaseSearchConfig, FuzzyProperties
expected_object_counts = Counter({
CaseSearchConfig: 1,
FuzzyProperties: 2,
})
pre_config, created = CaseSearchConfig.objects.get_or_create(pk=self.domain_name)
pre_config.enabled = True
pre_fuzzies = [
FuzzyProperties(domain=self.domain, case_type='dog', properties=['breed', 'color']),
FuzzyProperties(domain=self.domain, case_type='owner', properties=['name']),
]
for fuzzy in pre_fuzzies:
fuzzy.save()
pre_config.fuzzy_properties.set(pre_fuzzies)
pre_config.save()
self._dump_and_load(expected_object_counts)
post_config = CaseSearchConfig.objects.get(domain=self.domain_name)
self.assertTrue(post_config.enabled)
self.assertEqual(pre_config.fuzzy_properties, post_config.fuzzy_properties)
post_fuzzies = FuzzyProperties.objects.filter(domain=self.domain_name)
self.assertEqual(set(f.case_type for f in post_fuzzies), {'dog', 'owner'})
def test_users(self):
from corehq.apps.users.models import CommCareUser
from corehq.apps.users.models import WebUser
from django.contrib.auth.models import User
expected_object_counts = Counter({User: 3})
ccuser_1 = CommCareUser.create(
domain=self.domain_name,
username='user_1',
password='secret',
created_by=None,
created_via=None,
email='[email protected]',
)
ccuser_2 = CommCareUser.create(
domain=self.domain_name,
username='user_2',
password='secret',
created_by=None,
created_via=None,
email='[email protected]',
)
web_user = WebUser.create(
domain=self.domain_name,
username='webuser_t1',
password='secret',
created_by=None,
created_via=None,
email='[email protected]',
)
self.addCleanup(ccuser_1.delete, self.domain_name, deleted_by=None)
self.addCleanup(ccuser_2.delete, self.domain_name, deleted_by=None)
self.addCleanup(web_user.delete, self.domain_name, deleted_by=None)
self._dump_and_load(expected_object_counts)
def test_dump_roles(self):
from corehq.apps.users.models import UserRole, Permissions, RoleAssignableBy, RolePermission
expected_object_counts = Counter({
UserRole: 2,
RolePermission: 11,
RoleAssignableBy: 1
})
role1 = UserRole.create(self.domain_name, 'role1')
role2 = UserRole.create(
self.domain_name, 'role1',
permissions=Permissions(edit_web_users=True),
assignable_by=[role1.id]
)
self.addCleanup(role1.delete)
self.addCleanup(role2.delete)
self._dump_and_load(expected_object_counts)
role1_loaded = UserRole.objects.get(id=role1.id)
role2_loaded = UserRole.objects.get(id=role2.id)
self.assertEqual(role1_loaded.permissions.to_list(), Permissions().to_list())
self.assertEqual(role1_loaded.assignable_by, [])
self.assertEqual(role2_loaded.permissions.to_list(), Permissions(edit_web_users=True).to_list())
self.assertEqual(role2_loaded.assignable_by, [role1_loaded.get_id])
def test_device_logs(self):
from corehq.apps.receiverwrapper.util import submit_form_locally
from phonelog.models import DeviceReportEntry, ForceCloseEntry, UserEntry, UserErrorEntry
from corehq.apps.users.models import CommCareUser
from django.contrib.auth.models import User
expected_object_counts = Counter({
User: 1,
DeviceReportEntry: 7,
UserEntry: 1,
UserErrorEntry: 2,
ForceCloseEntry: 1
})
user = CommCareUser.create(
domain=self.domain_name,
username='user_1',
password='secret',
created_by=None,
created_via=None,
email='[email protected]',
uuid='428d454aa9abc74e1964e16d3565d6b6' # match ID in devicelog.xml
)
self.addCleanup(user.delete, self.domain_name, deleted_by=None)
with open('corehq/ex-submodules/couchforms/tests/data/devicelogs/devicelog.xml', 'rb') as f:
xml = f.read()
submit_form_locally(xml, self.domain_name)
self._dump_and_load(expected_object_counts)
def test_demo_user_restore(self):
from corehq.apps.users.models import CommCareUser
from corehq.apps.ota.models import DemoUserRestore
from django.contrib.auth.models import User
expected_object_counts = Counter({
User: 1,
DemoUserRestore: 1
})
user_id = uuid.uuid4().hex
user = CommCareUser.create(
domain=self.domain_name,
username='user_1',
password='secret',
created_by=None,
created_via=None,
email='[email protected]',
uuid=user_id
)
self.addCleanup(user.delete, self.domain_name, deleted_by=None)
DemoUserRestore(
demo_user_id=user_id,
restore_blob_id=uuid.uuid4().hex,
content_length=1027,
restore_comment="Test migrate demo user restore"
).save()
self._dump_and_load(expected_object_counts)
def test_products(self):
from corehq.apps.products.models import SQLProduct
expected_object_counts = Counter({SQLProduct: 3})
p1 = SQLProduct.objects.create(domain=self.domain_name, product_id='test1', name='test1')
p2 = SQLProduct.objects.create(domain=self.domain_name, product_id='test2', name='test2')
parchived = SQLProduct.objects.create(domain=self.domain_name, product_id='test3', name='test3', is_archived=True)
self._dump_and_load(expected_object_counts)
self.assertEqual(2, SQLProduct.active_objects.filter(domain=self.domain_name).count())
all_active = SQLProduct.active_objects.filter(domain=self.domain_name).all()
self.assertTrue(p1 in all_active)
self.assertTrue(p2 in all_active)
self.assertTrue(parchived not in all_active)
def test_location_type(self):
from corehq.apps.locations.models import LocationType
from corehq.apps.locations.tests.test_location_types import make_loc_type
expected_object_counts = Counter({LocationType: 7})
state = make_loc_type('state', domain=self.domain_name)
district = make_loc_type('district', state, domain=self.domain_name)
section = make_loc_type('section', district, domain=self.domain_name)
block = make_loc_type('block', district, domain=self.domain_name)
center = make_loc_type('center', block, domain=self.domain_name)
county = make_loc_type('county', state, domain=self.domain_name)
city = make_loc_type('city', county, domain=self.domain_name)
self._dump_and_load(expected_object_counts)
hierarchy = LocationType.objects.full_hierarchy(self.domain_name)
desired_hierarchy = {
state.id: (
state,
{
district.id: (
district,
{
section.id: (section, {}),
block.id: (block, {
center.id: (center, {}),
}),
},
),
county.id: (
county,
{city.id: (city, {})},
),
},
),
}
self.assertEqual(hierarchy, desired_hierarchy)
def test_location(self):
from corehq.apps.locations.models import LocationType, SQLLocation
from corehq.apps.locations.tests.util import setup_locations_and_types
expected_object_counts = Counter({LocationType: 3, SQLLocation: 11})
location_type_names = ['province', 'district', 'city']
location_structure = [
('Western Cape', [
('Cape Winelands', [
('Stellenbosch', []),
('Paarl', []),
]),
('Cape Town', [
('Cape Town City', []),
])
]),
('Gauteng', [
('Ekurhuleni ', [
('Alberton', []),
('Benoni', []),
('Springs', []),
]),
]),
]
location_types, locations = setup_locations_and_types(
self.domain_name,
location_type_names,
[],
location_structure,
)
self._dump_and_load(expected_object_counts)
names = ['Cape Winelands', 'Paarl', 'Cape Town']
location_ids = [locations[name].location_id for name in names]
result = SQLLocation.objects.get_locations_and_children(location_ids)
self.assertItemsEqual(
[loc.name for loc in result],
['Cape Winelands', 'Stellenbosch', 'Paarl', 'Cape Town', 'Cape Town City']
)
result = SQLLocation.objects.get_locations_and_children([locations['Gauteng'].location_id])
self.assertItemsEqual(
[loc.name for loc in result],
['Gauteng', 'Ekurhuleni ', 'Alberton', 'Benoni', 'Springs']
)
def test_sms(self):
from corehq.apps.sms.models import PhoneNumber, MessagingEvent, MessagingSubEvent
expected_object_counts = Counter({PhoneNumber: 1, MessagingEvent: 1, MessagingSubEvent: 1})
phone_number = PhoneNumber(
domain=self.domain_name,
owner_doc_type='CommCareCase',
owner_id='fake-owner-id1',
phone_number='99912341234',
backend_id=None,
ivr_backend_id=None,
verified=True,
is_two_way=True,
pending_verification=False,
contact_last_modified=datetime.utcnow()
)
phone_number.save()
event = MessagingEvent.objects.create(
domain=self.domain_name,
date=datetime.utcnow(),
source=MessagingEvent.SOURCE_REMINDER,
content_type=MessagingEvent.CONTENT_SMS,
status=MessagingEvent.STATUS_COMPLETED
)
MessagingSubEvent.objects.create(
parent=event,
date=datetime.utcnow(),
recipient_type=MessagingEvent.RECIPIENT_CASE,
content_type=MessagingEvent.CONTENT_SMS,
status=MessagingEvent.STATUS_COMPLETED
)
self._dump_and_load(expected_object_counts)
def test_message_scheduling(self):
AlertScheduleInstance(
schedule_instance_id=uuid.uuid4(),
domain=self.domain_name,
recipient_type='CommCareUser',
recipient_id=uuid.uuid4().hex,
current_event_num=0,
schedule_iteration_num=1,
next_event_due=datetime(2017, 3, 1),
active=True,
alert_schedule_id=uuid.uuid4(),
).save()
self._dump_and_load({AlertScheduleInstance: 1})
def test_mobile_backend(self):
from corehq.apps.sms.models import (
SQLMobileBackend,
SQLMobileBackendMapping,
)
domain_backend = SQLMobileBackend.objects.create(
domain=self.domain_name,
name='test-domain-mobile-backend',
display_name='Test Domain Mobile Backend',
hq_api_id='TDMB',
inbound_api_key='test-domain-mobile-backend-inbound-api-key',
supported_countries=["*"],
backend_type=SQLMobileBackend.SMS,
is_global=False,
)
SQLMobileBackendMapping.objects.create(
domain=self.domain_name,
backend=domain_backend,
backend_type=SQLMobileBackend.SMS,
prefix='123',
)
global_backend = SQLMobileBackend.objects.create(
domain=None,
name='test-global-mobile-backend',
display_name='Test Global Mobile Backend',
hq_api_id='TGMB',
inbound_api_key='test-global-mobile-backend-inbound-api-key',
supported_countries=["*"],
backend_type=SQLMobileBackend.SMS,
is_global=True,
)
SQLMobileBackendMapping.objects.create(
domain=self.domain_name,
backend=global_backend,
backend_type=SQLMobileBackend.SMS,
prefix='*',
)
self._dump_and_load({
SQLMobileBackendMapping: 1,
SQLMobileBackend: 1,
})
self.assertEqual(SQLMobileBackend.objects.first().domain,
self.domain_name)
self.assertEqual(SQLMobileBackendMapping.objects.first().domain,
self.domain_name)
def test_case_importer(self):
from corehq.apps.case_importer.tracking.models import (
CaseUploadFileMeta,
CaseUploadFormRecord,
CaseUploadRecord,
)
upload_file_meta = CaseUploadFileMeta.objects.create(
identifier=uuid.uuid4().hex,
filename='picture.jpg',
length=1024,
)
case_upload_record = CaseUploadRecord.objects.create(
domain=self.domain_name,
upload_id=uuid.uuid4(),
task_id=uuid.uuid4(),
couch_user_id=uuid.uuid4().hex,
case_type='person',
upload_file_meta=upload_file_meta,
)
CaseUploadFormRecord.objects.create(
case_upload_record=case_upload_record,
form_id=uuid.uuid4().hex,
)
self._dump_and_load(Counter({
CaseUploadFileMeta: 1,
CaseUploadRecord: 1,
CaseUploadFormRecord: 1,
}))
def test_transifex(self):
from corehq.apps.translations.models import TransifexProject, TransifexOrganization
org = TransifexOrganization.objects.create(slug='test', name='demo', api_token='123')
TransifexProject.objects.create(
organization=org, slug='testp', name='demop', domain=self.domain_name
)
TransifexProject.objects.create(
organization=org, slug='testp1', name='demop1', domain=self.domain_name
)
self._dump_and_load(Counter({TransifexOrganization: 1, TransifexProject: 2}))
def test_filtered_dump_load(self):
from corehq.apps.locations.tests.test_location_types import make_loc_type
from corehq.apps.products.models import SQLProduct
from corehq.apps.locations.models import LocationType
make_loc_type('state', domain=self.domain_name)
SQLProduct.objects.create(domain=self.domain_name, product_id='test1', name='test1')
expected_object_counts = Counter({LocationType: 1, SQLProduct: 1})
self._dump_and_load(expected_object_counts, load_filter='sqlproduct', expected_load_counts=Counter({SQLProduct: 1}))
self.assertEqual(0, LocationType.objects.count())
def test_sms_content(self):
from corehq.messaging.scheduling.models import AlertSchedule, SMSContent, AlertEvent
from corehq.messaging.scheduling.scheduling_partitioned.dbaccessors import \
delete_alert_schedule_instances_for_schedule
schedule = AlertSchedule.create_simple_alert(self.domain, SMSContent())
schedule.set_custom_alert(
[
(AlertEvent(minutes_to_wait=5), SMSContent()),
(AlertEvent(minutes_to_wait=15), SMSContent()),
]
)
self.addCleanup(lambda: delete_alert_schedule_instances_for_schedule(AlertScheduleInstance, schedule.schedule_id))
self._dump_and_load(Counter({AlertSchedule: 1, AlertEvent: 2, SMSContent: 2}))
def test_zapier_subscription(self):
ZapierSubscription.objects.create(
domain=self.domain_name,
case_type='case_type',
event_name=EventTypes.NEW_CASE,
url='example.com',
user_id='user_id',
)
self._dump_and_load(Counter({ZapierSubscription: 1}))
@mock.patch("corehq.apps.dump_reload.sql.load.ENQUEUE_TIMEOUT", 1)
class TestSqlLoadWithError(BaseDumpLoadTest):
def setUp(self):
self.products = [
SQLProduct.objects.create(domain=self.domain_name, product_id='test1', name='test1'),
SQLProduct.objects.create(domain=self.domain_name, product_id='test2', name='test2'),
SQLProduct.objects.create(domain=self.domain_name, product_id='test3', name='test3'),
]
def test_load_error_queue_full(self):
"""Blocks when sending 'test3'"""
self._load_with_errors(chunk_size=1)
def test_load_error_queue_full_on_terminate(self):
"""Blocks when sending ``None`` into the queue to 'terminate' it."""
self._load_with_errors(chunk_size=2)
def _load_with_errors(self, chunk_size):
output_stream = StringIO()
SqlDataDumper(self.domain_name, [], []).dump(output_stream)
self.delete_sql_data()
# resave the product to force an error
self.products[0].save()
actual_model_counts, dump_lines = self._parse_dump_output(output_stream)
self.assertEqual(actual_model_counts['products.sqlproduct'], 3)
loader = SqlDataLoader()
with self.assertRaises(IntegrityError),\
mock.patch("corehq.apps.dump_reload.sql.load.CHUNK_SIZE", chunk_size):
# patch the chunk size so that the queue blocks
loader.load_objects(dump_lines)
class DefaultDictWithKeyTests(SimpleTestCase):
def test_intended_use_case(self):
def enlist(item):
return [item]
greasy_spoon = DefaultDictWithKey(enlist)
self.assertEqual(greasy_spoon['spam'], ['spam'])
greasy_spoon['spam'].append('spam')
self.assertEqual(greasy_spoon['spam'], ['spam', 'spam'])
def test_not_enough_params(self):
def empty_list():
return []
greasy_spoon = DefaultDictWithKey(empty_list)
with self.assertRaisesRegex(
TypeError,
r'empty_list\(\) takes 0 positional arguments but 1 was given'
):
greasy_spoon['spam']
def test_too_many_params(self):
def appender(item1, item2):
return [item1, item2]
greasy_spoon = DefaultDictWithKey(appender)
with self.assertRaisesRegex(
TypeError,
r"appender\(\) missing 1 required positional argument: 'item2'"
):
greasy_spoon['spam']
def test_no_factory(self):
greasy_spoon = DefaultDictWithKey()
with self.assertRaisesRegex(
TypeError,
"'NoneType' object is not callable"
):
greasy_spoon['spam']
def _normalize_object_counter(counter, for_loaded=False):
"""Converts a <Model Class> keyed counter to an model label keyed counter"""
def _model_class_to_label(model_class):
label = '{}.{}'.format(model_class._meta.app_label, model_class.__name__)
return label if for_loaded else label.lower()
return Counter({
_model_class_to_label(model_class): count
for model_class, count in counter.items()
})
| [((26813, 26878), 'mock.patch', 'mock.patch', (['"""corehq.apps.dump_reload.sql.load.ENQUEUE_TIMEOUT"""', '(1)'], {}), "('corehq.apps.dump_reload.sql.load.ENQUEUE_TIMEOUT', 1)\n", (26823, 26878), False, 'import mock\n'), ((5470, 5526), 'corehq.apps.dump_reload.sql.dump.get_model_iterator_builders_to_dump', 'get_model_iterator_builders_to_dump', (['domain_name', '[]', '[]'], {}), '(domain_name, [], [])\n', (5505, 5526), False, 'from corehq.apps.dump_reload.sql.dump import get_model_iterator_builders_to_dump, get_objects_to_dump\n'), ((1914, 2001), 'django.db.models.signals.post_delete.disconnect', 'post_delete.disconnect', (['zapier_subscription_post_delete'], {'sender': 'ZapierSubscription'}), '(zapier_subscription_post_delete, sender=\n ZapierSubscription)\n', (1936, 2001), False, 'from django.db.models.signals import post_delete, post_save\n'), ((2111, 2139), 'corehq.apps.domain.models.Domain', 'Domain', ([], {'name': 'cls.domain_name'}), '(name=cls.domain_name)\n', (2117, 2139), False, 'from corehq.apps.domain.models import Domain\n'), ((2204, 2215), 'collections.Counter', 'Counter', (['{}'], {}), '({})\n', (2211, 2215), False, 'from collections import Counter\n'), ((2351, 2430), 'django.db.models.signals.post_delete.connect', 'post_delete.connect', (['zapier_subscription_post_delete'], {'sender': 'ZapierSubscription'}), '(zapier_subscription_post_delete, sender=ZapierSubscription)\n', (2370, 2430), False, 'from django.db.models.signals import post_delete, post_save\n'), ((3008, 3018), 'io.StringIO', 'StringIO', ([], {}), '()\n', (3016, 3018), False, 'from io import StringIO\n'), ((3429, 3452), 'collections.Counter', 'Counter', (['object_classes'], {}), '(object_classes)\n', (3436, 3452), False, 'from collections import Counter\n'), ((3840, 3880), 'corehq.apps.dump_reload.sql.SqlDataLoader', 'SqlDataLoader', ([], {'object_filter': 'load_filter'}), '(object_filter=load_filter)\n', (3853, 3880), False, 'from corehq.apps.dump_reload.sql import SqlDataDumper, SqlDataLoader\n'), ((6136, 6171), 'casexml.apps.case.mock.CaseFactory', 'CaseFactory', ([], {'domain': 'cls.domain_name'}), '(domain=cls.domain_name)\n', (6147, 6171), False, 'from casexml.apps.case.mock import CaseFactory, CaseIndex, CaseStructure\n'), ((6201, 6231), 'corehq.form_processor.interfaces.dbaccessors.FormAccessors', 'FormAccessors', (['cls.domain_name'], {}), '(cls.domain_name)\n', (6214, 6231), False, 'from corehq.form_processor.interfaces.dbaccessors import CaseAccessors, FormAccessors\n'), ((6261, 6291), 'corehq.form_processor.interfaces.dbaccessors.CaseAccessors', 'CaseAccessors', (['cls.domain_name'], {}), '(cls.domain_name)\n', (6274, 6291), False, 'from corehq.form_processor.interfaces.dbaccessors import CaseAccessors, FormAccessors\n'), ((6314, 6370), 'corehq.apps.commtrack.helpers.make_product', 'make_product', (['cls.domain_name', '"""A Product"""', '"""prodcode_a"""'], {}), "(cls.domain_name, 'A Product', 'prodcode_a')\n", (6326, 6370), False, 'from corehq.apps.commtrack.helpers import make_product\n'), ((6484, 6554), 'corehq.form_processor.tests.utils.FormProcessorTestUtils.delete_all_cases_forms_ledgers', 'FormProcessorTestUtils.delete_all_cases_forms_ledgers', (['cls.domain_name'], {}), '(cls.domain_name)\n', (6537, 6554), False, 'from corehq.form_processor.tests.utils import FormProcessorTestUtils, create_form_for_test, sharded\n'), ((6689, 6732), 'collections.Counter', 'Counter', (['{XFormInstanceSQL: 2, BlobMeta: 2}'], {}), '({XFormInstanceSQL: 2, BlobMeta: 2})\n', (6696, 6732), False, 'from collections import Counter\n'), ((7374, 7486), 'collections.Counter', 'Counter', (['{XFormInstanceSQL: 2, BlobMeta: 2, CommCareCaseSQL: 2, CaseTransaction: 3,\n CommCareCaseIndexSQL: 1}'], {}), '({XFormInstanceSQL: 2, BlobMeta: 2, CommCareCaseSQL: 2,\n CaseTransaction: 3, CommCareCaseIndexSQL: 1})\n', (7381, 7486), False, 'from collections import Counter\n'), ((8579, 8704), 'collections.Counter', 'Counter', (['{XFormInstanceSQL: 3, BlobMeta: 3, CommCareCaseSQL: 1, CaseTransaction: 3,\n LedgerValue: 1, LedgerTransaction: 2}'], {}), '({XFormInstanceSQL: 3, BlobMeta: 3, CommCareCaseSQL: 1,\n CaseTransaction: 3, LedgerValue: 1, LedgerTransaction: 2})\n', (8586, 8704), False, 'from collections import Counter\n'), ((9117, 9175), 'corehq.form_processor.backends.sql.dbaccessors.LedgerAccessorSQL.get_ledger_values_for_case', 'LedgerAccessorSQL.get_ledger_values_for_case', (['case.case_id'], {}), '(case.case_id)\n', (9161, 9175), False, 'from corehq.form_processor.backends.sql.dbaccessors import LedgerAccessorSQL\n'), ((9210, 9274), 'corehq.form_processor.backends.sql.dbaccessors.LedgerAccessorSQL.get_ledger_transactions_for_case', 'LedgerAccessorSQL.get_ledger_transactions_for_case', (['case.case_id'], {}), '(case.case_id)\n', (9260, 9274), False, 'from corehq.form_processor.backends.sql.dbaccessors import LedgerAccessorSQL\n'), ((9468, 9526), 'corehq.form_processor.backends.sql.dbaccessors.LedgerAccessorSQL.get_ledger_values_for_case', 'LedgerAccessorSQL.get_ledger_values_for_case', (['case.case_id'], {}), '(case.case_id)\n', (9512, 9526), False, 'from corehq.form_processor.backends.sql.dbaccessors import LedgerAccessorSQL\n'), ((9562, 9626), 'corehq.form_processor.backends.sql.dbaccessors.LedgerAccessorSQL.get_ledger_transactions_for_case', 'LedgerAccessorSQL.get_ledger_transactions_for_case', (['case.case_id'], {}), '(case.case_id)\n', (9612, 9626), False, 'from corehq.form_processor.backends.sql.dbaccessors import LedgerAccessorSQL\n'), ((10443, 10493), 'collections.Counter', 'Counter', (['{CaseSearchConfig: 1, FuzzyProperties: 2}'], {}), '({CaseSearchConfig: 1, FuzzyProperties: 2})\n', (10450, 10493), False, 'from collections import Counter\n'), ((10560, 10619), 'corehq.apps.case_search.models.CaseSearchConfig.objects.get_or_create', 'CaseSearchConfig.objects.get_or_create', ([], {'pk': 'self.domain_name'}), '(pk=self.domain_name)\n', (10598, 10619), False, 'from corehq.apps.case_search.models import CaseSearchConfig, FuzzyProperties\n'), ((11088, 11141), 'corehq.apps.case_search.models.CaseSearchConfig.objects.get', 'CaseSearchConfig.objects.get', ([], {'domain': 'self.domain_name'}), '(domain=self.domain_name)\n', (11116, 11141), False, 'from corehq.apps.case_search.models import CaseSearchConfig, FuzzyProperties\n'), ((11294, 11349), 'corehq.apps.case_search.models.FuzzyProperties.objects.filter', 'FuzzyProperties.objects.filter', ([], {'domain': 'self.domain_name'}), '(domain=self.domain_name)\n', (11324, 11349), False, 'from corehq.apps.case_search.models import CaseSearchConfig, FuzzyProperties\n'), ((11657, 11675), 'collections.Counter', 'Counter', (['{User: 3}'], {}), '({User: 3})\n', (11664, 11675), False, 'from collections import Counter\n'), ((11696, 11845), 'corehq.apps.users.models.CommCareUser.create', 'CommCareUser.create', ([], {'domain': 'self.domain_name', 'username': '"""user_1"""', 'password': '"""secret"""', 'created_by': 'None', 'created_via': 'None', 'email': '"""[email protected]"""'}), "(domain=self.domain_name, username='user_1', password=\n 'secret', created_by=None, created_via=None, email='[email protected]')\n", (11715, 11845), False, 'from corehq.apps.users.models import CommCareUser\n'), ((11943, 12093), 'corehq.apps.users.models.CommCareUser.create', 'CommCareUser.create', ([], {'domain': 'self.domain_name', 'username': '"""user_2"""', 'password': '"""secret"""', 'created_by': 'None', 'created_via': 'None', 'email': '"""[email protected]"""'}), "(domain=self.domain_name, username='user_2', password=\n 'secret', created_by=None, created_via=None, email='[email protected]')\n", (11962, 12093), False, 'from corehq.apps.users.models import CommCareUser\n'), ((12191, 12341), 'corehq.apps.users.models.WebUser.create', 'WebUser.create', ([], {'domain': 'self.domain_name', 'username': '"""webuser_t1"""', 'password': '"""secret"""', 'created_by': 'None', 'created_via': 'None', 'email': '"""[email protected]"""'}), "(domain=self.domain_name, username='webuser_t1', password=\n 'secret', created_by=None, created_via=None, email='[email protected]')\n", (12205, 12341), False, 'from corehq.apps.users.models import WebUser\n'), ((12868, 12931), 'collections.Counter', 'Counter', (['{UserRole: 2, RolePermission: 11, RoleAssignableBy: 1}'], {}), '({UserRole: 2, RolePermission: 11, RoleAssignableBy: 1})\n', (12875, 12931), False, 'from collections import Counter\n'), ((12995, 13037), 'corehq.apps.users.models.UserRole.create', 'UserRole.create', (['self.domain_name', '"""role1"""'], {}), "(self.domain_name, 'role1')\n", (13010, 13037), False, 'from corehq.apps.users.models import UserRole, Permissions, RoleAssignableBy, RolePermission\n'), ((13368, 13401), 'corehq.apps.users.models.UserRole.objects.get', 'UserRole.objects.get', ([], {'id': 'role1.id'}), '(id=role1.id)\n', (13388, 13401), False, 'from corehq.apps.users.models import UserRole, Permissions, RoleAssignableBy, RolePermission\n'), ((13425, 13458), 'corehq.apps.users.models.UserRole.objects.get', 'UserRole.objects.get', ([], {'id': 'role2.id'}), '(id=role2.id)\n', (13445, 13458), False, 'from corehq.apps.users.models import UserRole, Permissions, RoleAssignableBy, RolePermission\n'), ((14133, 14230), 'collections.Counter', 'Counter', (['{User: 1, DeviceReportEntry: 7, UserEntry: 1, UserErrorEntry: 2,\n ForceCloseEntry: 1}'], {}), '({User: 1, DeviceReportEntry: 7, UserEntry: 1, UserErrorEntry: 2,\n ForceCloseEntry: 1})\n', (14140, 14230), False, 'from collections import Counter\n'), ((14313, 14507), 'corehq.apps.users.models.CommCareUser.create', 'CommCareUser.create', ([], {'domain': 'self.domain_name', 'username': '"""user_1"""', 'password': '"""secret"""', 'created_by': 'None', 'created_via': 'None', 'email': '"""[email protected]"""', 'uuid': '"""428d454aa9abc74e1964e16d3565d6b6"""'}), "(domain=self.domain_name, username='user_1', password=\n 'secret', created_by=None, created_via=None, email='[email protected]',\n uuid='428d454aa9abc74e1964e16d3565d6b6')\n", (14332, 14507), False, 'from corehq.apps.users.models import CommCareUser\n'), ((14831, 14873), 'corehq.apps.receiverwrapper.util.submit_form_locally', 'submit_form_locally', (['xml', 'self.domain_name'], {}), '(xml, self.domain_name)\n', (14850, 14873), False, 'from corehq.apps.receiverwrapper.util import submit_form_locally\n'), ((15169, 15207), 'collections.Counter', 'Counter', (['{User: 1, DemoUserRestore: 1}'], {}), '({User: 1, DemoUserRestore: 1})\n', (15176, 15207), False, 'from collections import Counter\n'), ((15293, 15460), 'corehq.apps.users.models.CommCareUser.create', 'CommCareUser.create', ([], {'domain': 'self.domain_name', 'username': '"""user_1"""', 'password': '"""secret"""', 'created_by': 'None', 'created_via': 'None', 'email': '"""[email protected]"""', 'uuid': 'user_id'}), "(domain=self.domain_name, username='user_1', password=\n 'secret', created_by=None, created_via=None, email='[email protected]',\n uuid=user_id)\n", (15312, 15460), False, 'from corehq.apps.users.models import CommCareUser\n'), ((16010, 16034), 'collections.Counter', 'Counter', (['{SQLProduct: 3}'], {}), '({SQLProduct: 3})\n', (16017, 16034), False, 'from collections import Counter\n'), ((16049, 16138), 'corehq.apps.products.models.SQLProduct.objects.create', 'SQLProduct.objects.create', ([], {'domain': 'self.domain_name', 'product_id': '"""test1"""', 'name': '"""test1"""'}), "(domain=self.domain_name, product_id='test1', name\n ='test1')\n", (16074, 16138), False, 'from corehq.apps.products.models import SQLProduct\n'), ((16147, 16236), 'corehq.apps.products.models.SQLProduct.objects.create', 'SQLProduct.objects.create', ([], {'domain': 'self.domain_name', 'product_id': '"""test2"""', 'name': '"""test2"""'}), "(domain=self.domain_name, product_id='test2', name\n ='test2')\n", (16172, 16236), False, 'from corehq.apps.products.models import SQLProduct\n'), ((16252, 16359), 'corehq.apps.products.models.SQLProduct.objects.create', 'SQLProduct.objects.create', ([], {'domain': 'self.domain_name', 'product_id': '"""test3"""', 'name': '"""test3"""', 'is_archived': '(True)'}), "(domain=self.domain_name, product_id='test3', name\n ='test3', is_archived=True)\n", (16277, 16359), False, 'from corehq.apps.products.models import SQLProduct\n'), ((16938, 16964), 'collections.Counter', 'Counter', (['{LocationType: 7}'], {}), '({LocationType: 7})\n', (16945, 16964), False, 'from collections import Counter\n'), ((16982, 17029), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""state"""'], {'domain': 'self.domain_name'}), "('state', domain=self.domain_name)\n", (16995, 17029), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((17050, 17107), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""district"""', 'state'], {'domain': 'self.domain_name'}), "('district', state, domain=self.domain_name)\n", (17063, 17107), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((17126, 17185), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""section"""', 'district'], {'domain': 'self.domain_name'}), "('section', district, domain=self.domain_name)\n", (17139, 17185), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((17202, 17259), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""block"""', 'district'], {'domain': 'self.domain_name'}), "('block', district, domain=self.domain_name)\n", (17215, 17259), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((17277, 17332), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""center"""', 'block'], {'domain': 'self.domain_name'}), "('center', block, domain=self.domain_name)\n", (17290, 17332), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((17351, 17406), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""county"""', 'state'], {'domain': 'self.domain_name'}), "('county', state, domain=self.domain_name)\n", (17364, 17406), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((17422, 17476), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""city"""', 'county'], {'domain': 'self.domain_name'}), "('city', county, domain=self.domain_name)\n", (17435, 17476), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((17551, 17604), 'corehq.apps.locations.models.LocationType.objects.full_hierarchy', 'LocationType.objects.full_hierarchy', (['self.domain_name'], {}), '(self.domain_name)\n', (17586, 17604), False, 'from corehq.apps.locations.models import LocationType\n'), ((18488, 18531), 'collections.Counter', 'Counter', (['{LocationType: 3, SQLLocation: 11}'], {}), '({LocationType: 3, SQLLocation: 11})\n', (18495, 18531), False, 'from collections import Counter\n'), ((19157, 19249), 'corehq.apps.locations.tests.util.setup_locations_and_types', 'setup_locations_and_types', (['self.domain_name', 'location_type_names', '[]', 'location_structure'], {}), '(self.domain_name, location_type_names, [],\n location_structure)\n', (19182, 19249), False, 'from corehq.apps.locations.tests.util import setup_locations_and_types\n'), ((19504, 19564), 'corehq.apps.locations.models.SQLLocation.objects.get_locations_and_children', 'SQLLocation.objects.get_locations_and_children', (['location_ids'], {}), '(location_ids)\n', (19550, 19564), False, 'from corehq.apps.locations.models import LocationType, SQLLocation\n'), ((19753, 19840), 'corehq.apps.locations.models.SQLLocation.objects.get_locations_and_children', 'SQLLocation.objects.get_locations_and_children', (["[locations['Gauteng'].location_id]"], {}), "([locations['Gauteng'].\n location_id])\n", (19799, 19840), False, 'from corehq.apps.locations.models import LocationType, SQLLocation\n'), ((20139, 20205), 'collections.Counter', 'Counter', (['{PhoneNumber: 1, MessagingEvent: 1, MessagingSubEvent: 1}'], {}), '({PhoneNumber: 1, MessagingEvent: 1, MessagingSubEvent: 1})\n', (20146, 20205), False, 'from collections import Counter\n'), ((21951, 22262), 'corehq.apps.sms.models.SQLMobileBackend.objects.create', 'SQLMobileBackend.objects.create', ([], {'domain': 'self.domain_name', 'name': '"""test-domain-mobile-backend"""', 'display_name': '"""Test Domain Mobile Backend"""', 'hq_api_id': '"""TDMB"""', 'inbound_api_key': '"""test-domain-mobile-backend-inbound-api-key"""', 'supported_countries': "['*']", 'backend_type': 'SQLMobileBackend.SMS', 'is_global': '(False)'}), "(domain=self.domain_name, name=\n 'test-domain-mobile-backend', display_name='Test Domain Mobile Backend',\n hq_api_id='TDMB', inbound_api_key=\n 'test-domain-mobile-backend-inbound-api-key', supported_countries=['*'],\n backend_type=SQLMobileBackend.SMS, is_global=False)\n", (21982, 22262), False, 'from corehq.apps.sms.models import SQLMobileBackend, SQLMobileBackendMapping\n'), ((22360, 22501), 'corehq.apps.sms.models.SQLMobileBackendMapping.objects.create', 'SQLMobileBackendMapping.objects.create', ([], {'domain': 'self.domain_name', 'backend': 'domain_backend', 'backend_type': 'SQLMobileBackend.SMS', 'prefix': '"""123"""'}), "(domain=self.domain_name, backend=\n domain_backend, backend_type=SQLMobileBackend.SMS, prefix='123')\n", (22398, 22501), False, 'from corehq.apps.sms.models import SQLMobileBackend, SQLMobileBackendMapping\n'), ((22582, 22880), 'corehq.apps.sms.models.SQLMobileBackend.objects.create', 'SQLMobileBackend.objects.create', ([], {'domain': 'None', 'name': '"""test-global-mobile-backend"""', 'display_name': '"""Test Global Mobile Backend"""', 'hq_api_id': '"""TGMB"""', 'inbound_api_key': '"""test-global-mobile-backend-inbound-api-key"""', 'supported_countries': "['*']", 'backend_type': 'SQLMobileBackend.SMS', 'is_global': '(True)'}), "(domain=None, name=\n 'test-global-mobile-backend', display_name='Test Global Mobile Backend',\n hq_api_id='TGMB', inbound_api_key=\n 'test-global-mobile-backend-inbound-api-key', supported_countries=['*'],\n backend_type=SQLMobileBackend.SMS, is_global=True)\n", (22613, 22880), False, 'from corehq.apps.sms.models import SQLMobileBackend, SQLMobileBackendMapping\n'), ((22978, 23117), 'corehq.apps.sms.models.SQLMobileBackendMapping.objects.create', 'SQLMobileBackendMapping.objects.create', ([], {'domain': 'self.domain_name', 'backend': 'global_backend', 'backend_type': 'SQLMobileBackend.SMS', 'prefix': '"""*"""'}), "(domain=self.domain_name, backend=\n global_backend, backend_type=SQLMobileBackend.SMS, prefix='*')\n", (23016, 23117), False, 'from corehq.apps.sms.models import SQLMobileBackend, SQLMobileBackendMapping\n'), ((24629, 24708), 'corehq.apps.translations.models.TransifexOrganization.objects.create', 'TransifexOrganization.objects.create', ([], {'slug': '"""test"""', 'name': '"""demo"""', 'api_token': '"""123"""'}), "(slug='test', name='demo', api_token='123')\n", (24665, 24708), False, 'from corehq.apps.translations.models import TransifexProject, TransifexOrganization\n'), ((24717, 24824), 'corehq.apps.translations.models.TransifexProject.objects.create', 'TransifexProject.objects.create', ([], {'organization': 'org', 'slug': '"""testp"""', 'name': '"""demop"""', 'domain': 'self.domain_name'}), "(organization=org, slug='testp', name=\n 'demop', domain=self.domain_name)\n", (24748, 24824), False, 'from corehq.apps.translations.models import TransifexProject, TransifexOrganization\n'), ((24850, 24959), 'corehq.apps.translations.models.TransifexProject.objects.create', 'TransifexProject.objects.create', ([], {'organization': 'org', 'slug': '"""testp1"""', 'name': '"""demop1"""', 'domain': 'self.domain_name'}), "(organization=org, slug='testp1', name=\n 'demop1', domain=self.domain_name)\n", (24881, 24959), False, 'from corehq.apps.translations.models import TransifexProject, TransifexOrganization\n'), ((25315, 25362), 'corehq.apps.locations.tests.test_location_types.make_loc_type', 'make_loc_type', (['"""state"""'], {'domain': 'self.domain_name'}), "('state', domain=self.domain_name)\n", (25328, 25362), False, 'from corehq.apps.locations.tests.test_location_types import make_loc_type\n'), ((25371, 25460), 'corehq.apps.products.models.SQLProduct.objects.create', 'SQLProduct.objects.create', ([], {'domain': 'self.domain_name', 'product_id': '"""test1"""', 'name': '"""test1"""'}), "(domain=self.domain_name, product_id='test1', name\n ='test1')\n", (25396, 25460), False, 'from corehq.apps.products.models import SQLProduct\n'), ((25489, 25530), 'collections.Counter', 'Counter', (['{LocationType: 1, SQLProduct: 1}'], {}), '({LocationType: 1, SQLProduct: 1})\n', (25496, 25530), False, 'from collections import Counter\n'), ((26525, 26686), 'corehq.apps.zapier.models.ZapierSubscription.objects.create', 'ZapierSubscription.objects.create', ([], {'domain': 'self.domain_name', 'case_type': '"""case_type"""', 'event_name': 'EventTypes.NEW_CASE', 'url': '"""example.com"""', 'user_id': '"""user_id"""'}), "(domain=self.domain_name, case_type=\n 'case_type', event_name=EventTypes.NEW_CASE, url='example.com', user_id\n ='user_id')\n", (26558, 26686), False, 'from corehq.apps.zapier.models import ZapierSubscription\n'), ((27654, 27664), 'io.StringIO', 'StringIO', ([], {}), '()\n', (27662, 27664), False, 'from io import StringIO\n'), ((28014, 28029), 'corehq.apps.dump_reload.sql.SqlDataLoader', 'SqlDataLoader', ([], {}), '()\n', (28027, 28029), False, 'from corehq.apps.dump_reload.sql import SqlDataDumper, SqlDataLoader\n'), ((28430, 28456), 'corehq.apps.dump_reload.sql.load.DefaultDictWithKey', 'DefaultDictWithKey', (['enlist'], {}), '(enlist)\n', (28448, 28456), False, 'from corehq.apps.dump_reload.sql.load import DefaultDictWithKey, constraint_checks_deferred\n'), ((28733, 28763), 'corehq.apps.dump_reload.sql.load.DefaultDictWithKey', 'DefaultDictWithKey', (['empty_list'], {}), '(empty_list)\n', (28751, 28763), False, 'from corehq.apps.dump_reload.sql.load import DefaultDictWithKey, constraint_checks_deferred\n'), ((29073, 29101), 'corehq.apps.dump_reload.sql.load.DefaultDictWithKey', 'DefaultDictWithKey', (['appender'], {}), '(appender)\n', (29091, 29101), False, 'from corehq.apps.dump_reload.sql.load import DefaultDictWithKey, constraint_checks_deferred\n'), ((29337, 29357), 'corehq.apps.dump_reload.sql.load.DefaultDictWithKey', 'DefaultDictWithKey', ([], {}), '()\n', (29355, 29357), False, 'from corehq.apps.dump_reload.sql.load import DefaultDictWithKey, constraint_checks_deferred\n'), ((2073, 2085), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2083, 2085), False, 'import uuid\n'), ((3286, 3331), 'corehq.apps.dump_reload.sql.dump.get_objects_to_dump', 'get_objects_to_dump', (['self.domain_name', '[]', '[]'], {}), '(self.domain_name, [], [])\n', (3305, 3331), False, 'from corehq.apps.dump_reload.sql.dump import get_model_iterator_builders_to_dump, get_objects_to_dump\n'), ((4919, 4951), 'django.db.models.signals.post_save._live_receivers', 'post_save._live_receivers', (['model'], {}), '(model)\n', (4944, 4951), False, 'from django.db.models.signals import post_delete, post_save\n'), ((5852, 5892), 'corehq.apps.dump_reload.sql.dump.get_objects_to_dump', 'get_objects_to_dump', (['domain_name', '[]', '[]'], {}), '(domain_name, [], [])\n', (5871, 5892), False, 'from corehq.apps.dump_reload.sql.dump import get_model_iterator_builders_to_dump, get_objects_to_dump\n'), ((6802, 6840), 'corehq.form_processor.tests.utils.create_form_for_test', 'create_form_for_test', (['self.domain_name'], {}), '(self.domain_name)\n', (6822, 6840), False, 'from corehq.form_processor.tests.utils import FormProcessorTestUtils, create_form_for_test, sharded\n'), ((6854, 6892), 'corehq.form_processor.tests.utils.create_form_for_test', 'create_form_for_test', (['self.domain_name'], {}), '(self.domain_name)\n', (6874, 6892), False, 'from corehq.form_processor.tests.utils import FormProcessorTestUtils, create_form_for_test, sharded\n'), ((10690, 10777), 'corehq.apps.case_search.models.FuzzyProperties', 'FuzzyProperties', ([], {'domain': 'self.domain', 'case_type': '"""dog"""', 'properties': "['breed', 'color']"}), "(domain=self.domain, case_type='dog', properties=['breed',\n 'color'])\n", (10705, 10777), False, 'from corehq.apps.case_search.models import CaseSearchConfig, FuzzyProperties\n'), ((10787, 10862), 'corehq.apps.case_search.models.FuzzyProperties', 'FuzzyProperties', ([], {'domain': 'self.domain', 'case_type': '"""owner"""', 'properties': "['name']"}), "(domain=self.domain, case_type='owner', properties=['name'])\n", (10802, 10862), False, 'from corehq.apps.case_search.models import CaseSearchConfig, FuzzyProperties\n'), ((15261, 15273), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (15271, 15273), False, 'import uuid\n'), ((24365, 24443), 'collections.Counter', 'Counter', (['{CaseUploadFileMeta: 1, CaseUploadRecord: 1, CaseUploadFormRecord: 1}'], {}), '({CaseUploadFileMeta: 1, CaseUploadRecord: 1, CaseUploadFormRecord: 1})\n', (24372, 24443), False, 'from collections import Counter\n'), ((25005, 25061), 'collections.Counter', 'Counter', (['{TransifexOrganization: 1, TransifexProject: 2}'], {}), '({TransifexOrganization: 1, TransifexProject: 2})\n', (25012, 25061), False, 'from collections import Counter\n'), ((25685, 25713), 'corehq.apps.locations.models.LocationType.objects.count', 'LocationType.objects.count', ([], {}), '()\n', (25711, 25713), False, 'from corehq.apps.locations.models import LocationType\n'), ((26050, 26062), 'corehq.messaging.scheduling.models.SMSContent', 'SMSContent', ([], {}), '()\n', (26060, 26062), False, 'from corehq.messaging.scheduling.models import AlertSchedule, SMSContent, AlertEvent\n'), ((26417, 26474), 'collections.Counter', 'Counter', (['{AlertSchedule: 1, AlertEvent: 2, SMSContent: 2}'], {}), '({AlertSchedule: 1, AlertEvent: 2, SMSContent: 2})\n', (26424, 26474), False, 'from collections import Counter\n'), ((26776, 26808), 'collections.Counter', 'Counter', (['{ZapierSubscription: 1}'], {}), '({ZapierSubscription: 1})\n', (26783, 26808), False, 'from collections import Counter\n'), ((26984, 27073), 'corehq.apps.products.models.SQLProduct.objects.create', 'SQLProduct.objects.create', ([], {'domain': 'self.domain_name', 'product_id': '"""test1"""', 'name': '"""test1"""'}), "(domain=self.domain_name, product_id='test1', name\n ='test1')\n", (27009, 27073), False, 'from corehq.apps.products.models import SQLProduct\n'), ((27082, 27171), 'corehq.apps.products.models.SQLProduct.objects.create', 'SQLProduct.objects.create', ([], {'domain': 'self.domain_name', 'product_id': '"""test2"""', 'name': '"""test2"""'}), "(domain=self.domain_name, product_id='test2', name\n ='test2')\n", (27107, 27171), False, 'from corehq.apps.products.models import SQLProduct\n'), ((27180, 27269), 'corehq.apps.products.models.SQLProduct.objects.create', 'SQLProduct.objects.create', ([], {'domain': 'self.domain_name', 'product_id': '"""test3"""', 'name': '"""test3"""'}), "(domain=self.domain_name, product_id='test3', name\n ='test3')\n", (27205, 27269), False, 'from corehq.apps.products.models import SQLProduct\n'), ((28092, 28161), 'mock.patch', 'mock.patch', (['"""corehq.apps.dump_reload.sql.load.CHUNK_SIZE"""', 'chunk_size'], {}), "('corehq.apps.dump_reload.sql.load.CHUNK_SIZE', chunk_size)\n", (28102, 28161), False, 'import mock\n'), ((5590, 5627), 'django.db.transaction.atomic', 'transaction.atomic', ([], {'using': 'iterator.db'}), '(using=iterator.db)\n', (5608, 5627), False, 'from django.db import transaction, IntegrityError\n'), ((5648, 5687), 'corehq.apps.dump_reload.sql.load.constraint_checks_deferred', 'constraint_checks_deferred', (['iterator.db'], {}), '(iterator.db)\n', (5674, 5687), False, 'from corehq.apps.dump_reload.sql.load import DefaultDictWithKey, constraint_checks_deferred\n'), ((5717, 5749), 'django.contrib.admin.utils.NestedObjects', 'NestedObjects', ([], {'using': 'iterator.db'}), '(using=iterator.db)\n', (5730, 5749), False, 'from django.contrib.admin.utils import NestedObjects\n'), ((7983, 8103), 'casexml.apps.case.mock.CaseStructure', 'CaseStructure', ([], {'case_id': 'pre_cases[0].case_id', 'attrs': "{'external_id': 'billie jean', 'update': {'name': 'Billie Jean'}}"}), "(case_id=pre_cases[0].case_id, attrs={'external_id':\n 'billie jean', 'update': {'name': 'Billie Jean'}})\n", (7996, 8103), False, 'from casexml.apps.case.mock import CaseFactory, CaseIndex, CaseStructure\n'), ((8868, 8928), 'corehq.apps.commtrack.tests.util.get_single_balance_block', 'get_single_balance_block', (['case.case_id', 'self.product._id', '(10)'], {}), '(case.case_id, self.product._id, 10)\n', (8892, 8928), False, 'from corehq.apps.commtrack.tests.util import get_single_balance_block\n'), ((8999, 9058), 'corehq.apps.commtrack.tests.util.get_single_balance_block', 'get_single_balance_block', (['case.case_id', 'self.product._id', '(5)'], {}), '(case.case_id, self.product._id, 5)\n', (9023, 9058), False, 'from corehq.apps.commtrack.tests.util import get_single_balance_block\n'), ((13134, 13166), 'corehq.apps.users.models.Permissions', 'Permissions', ([], {'edit_web_users': '(True)'}), '(edit_web_users=True)\n', (13145, 13166), False, 'from corehq.apps.users.models import UserRole, Permissions, RoleAssignableBy, RolePermission\n'), ((16525, 16582), 'corehq.apps.products.models.SQLProduct.active_objects.filter', 'SQLProduct.active_objects.filter', ([], {'domain': 'self.domain_name'}), '(domain=self.domain_name)\n', (16557, 16582), False, 'from corehq.apps.products.models import SQLProduct\n'), ((20594, 20611), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (20609, 20611), False, 'from datetime import datetime\n'), ((20751, 20768), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (20766, 20768), False, 'from datetime import datetime\n'), ((21020, 21037), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (21035, 21037), False, 'from datetime import datetime\n'), ((23311, 23343), 'corehq.apps.sms.models.SQLMobileBackend.objects.first', 'SQLMobileBackend.objects.first', ([], {}), '()\n', (23341, 23343), False, 'from corehq.apps.sms.models import SQLMobileBackend, SQLMobileBackendMapping\n'), ((23420, 23459), 'corehq.apps.sms.models.SQLMobileBackendMapping.objects.first', 'SQLMobileBackendMapping.objects.first', ([], {}), '()\n', (23457, 23459), False, 'from corehq.apps.sms.models import SQLMobileBackend, SQLMobileBackendMapping\n'), ((24012, 24024), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (24022, 24024), False, 'import uuid\n'), ((24046, 24058), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (24056, 24058), False, 'import uuid\n'), ((25631, 25655), 'collections.Counter', 'Counter', (['{SQLProduct: 1}'], {}), '({SQLProduct: 1})\n', (25638, 25655), False, 'from collections import Counter\n'), ((26298, 26391), 'corehq.messaging.scheduling.scheduling_partitioned.dbaccessors.delete_alert_schedule_instances_for_schedule', 'delete_alert_schedule_instances_for_schedule', (['AlertScheduleInstance', 'schedule.schedule_id'], {}), '(AlertScheduleInstance,\n schedule.schedule_id)\n', (26342, 26391), False, 'from corehq.messaging.scheduling.scheduling_partitioned.dbaccessors import delete_alert_schedule_instances_for_schedule\n'), ((27673, 27712), 'corehq.apps.dump_reload.sql.SqlDataDumper', 'SqlDataDumper', (['self.domain_name', '[]', '[]'], {}), '(self.domain_name, [], [])\n', (27686, 27712), False, 'from corehq.apps.dump_reload.sql import SqlDataDumper, SqlDataLoader\n'), ((3104, 3143), 'corehq.apps.dump_reload.sql.SqlDataDumper', 'SqlDataDumper', (['self.domain_name', '[]', '[]'], {}), '(self.domain_name, [], [])\n', (3117, 3143), False, 'from corehq.apps.dump_reload.sql import SqlDataDumper, SqlDataLoader\n'), ((4497, 4513), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (4507, 4513), False, 'import json\n'), ((5140, 5168), 'inspect.getargspec', 'inspect.getargspec', (['receiver'], {}), '(receiver)\n', (5158, 5168), False, 'import inspect\n'), ((13521, 13534), 'corehq.apps.users.models.Permissions', 'Permissions', ([], {}), '()\n', (13532, 13534), False, 'from corehq.apps.users.models import UserRole, Permissions, RoleAssignableBy, RolePermission\n'), ((13664, 13696), 'corehq.apps.users.models.Permissions', 'Permissions', ([], {'edit_web_users': '(True)'}), '(edit_web_users=True)\n', (13675, 13696), False, 'from corehq.apps.users.models import UserRole, Permissions, RoleAssignableBy, RolePermission\n'), ((16437, 16494), 'corehq.apps.products.models.SQLProduct.active_objects.filter', 'SQLProduct.active_objects.filter', ([], {'domain': 'self.domain_name'}), '(domain=self.domain_name)\n', (16469, 16494), False, 'from corehq.apps.products.models import SQLProduct\n'), ((23802, 23814), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (23812, 23814), False, 'import uuid\n'), ((24086, 24098), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (24096, 24098), False, 'import uuid\n'), ((24309, 24321), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (24319, 24321), False, 'import uuid\n'), ((26131, 26160), 'corehq.messaging.scheduling.models.AlertEvent', 'AlertEvent', ([], {'minutes_to_wait': '(5)'}), '(minutes_to_wait=5)\n', (26141, 26160), False, 'from corehq.messaging.scheduling.models import AlertSchedule, SMSContent, AlertEvent\n'), ((26162, 26174), 'corehq.messaging.scheduling.models.SMSContent', 'SMSContent', ([], {}), '()\n', (26172, 26174), False, 'from corehq.messaging.scheduling.models import AlertSchedule, SMSContent, AlertEvent\n'), ((26194, 26224), 'corehq.messaging.scheduling.models.AlertEvent', 'AlertEvent', ([], {'minutes_to_wait': '(15)'}), '(minutes_to_wait=15)\n', (26204, 26224), False, 'from corehq.messaging.scheduling.models import AlertSchedule, SMSContent, AlertEvent\n'), ((26226, 26238), 'corehq.messaging.scheduling.models.SMSContent', 'SMSContent', ([], {}), '()\n', (26236, 26238), False, 'from corehq.messaging.scheduling.models import AlertSchedule, SMSContent, AlertEvent\n'), ((21368, 21380), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (21378, 21380), False, 'import uuid\n'), ((21603, 21623), 'datetime.datetime', 'datetime', (['(2017)', '(3)', '(1)'], {}), '(2017, 3, 1)\n', (21611, 21623), False, 'from datetime import datetime\n'), ((21680, 21692), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (21690, 21692), False, 'import uuid\n'), ((7797, 7884), 'casexml.apps.case.mock.CaseStructure', 'CaseStructure', ([], {'attrs': "{'case_name': 'parent', 'update': {'age': 42}, 'create': True}"}), "(attrs={'case_name': 'parent', 'update': {'age': 42}, 'create':\n True})\n", (7810, 7884), False, 'from casexml.apps.case.mock import CaseFactory, CaseIndex, CaseStructure\n'), ((15706, 15718), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (15716, 15718), False, 'import uuid\n'), ((21487, 21499), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (21497, 21499), False, 'import uuid\n')] |
the-moliver/keras | tests/keras/test_activations.py | 4fa7e5d454dd4f3f33f1d756a2a8659f2e789141 | import pytest
import numpy as np
from numpy.testing import assert_allclose
from keras import backend as K
from keras import activations
def get_standard_values():
'''
These are just a set of floats used for testing the activation
functions, and are useful in multiple tests.
'''
return np.array([[0, 0.1, 0.5, 0.9, 1.0]], dtype=K.floatx())
def test_softmax():
'''
Test using a reference implementation of softmax
'''
def softmax(values):
m = np.max(values)
e = np.exp(values - m)
return e / np.sum(e)
x = K.placeholder(ndim=2)
f = K.function([x], [activations.softmax(x)])
test_values = get_standard_values()
result = f([test_values])[0]
expected = softmax(test_values)
assert_allclose(result, expected, rtol=1e-05)
def test_time_distributed_softmax():
x = K.placeholder(shape=(1, 1, 5))
f = K.function([x], [activations.softmax(x)])
test_values = get_standard_values()
test_values = np.reshape(test_values, (1, 1, np.size(test_values)))
f([test_values])[0]
def test_softplus():
'''
Test using a reference softplus implementation
'''
def softplus(x):
return np.log(np.ones_like(x) + np.exp(x))
x = K.placeholder(ndim=2)
f = K.function([x], [activations.softplus(x)])
test_values = get_standard_values()
result = f([test_values])[0]
expected = softplus(test_values)
assert_allclose(result, expected, rtol=1e-05)
def test_softsign():
'''
Test using a reference softsign implementation
'''
def softsign(x):
return np.divide(x, np.ones_like(x) + np.absolute(x))
x = K.placeholder(ndim=2)
f = K.function([x], [activations.softsign(x)])
test_values = get_standard_values()
result = f([test_values])[0]
expected = softsign(test_values)
assert_allclose(result, expected, rtol=1e-05)
def test_sigmoid():
'''
Test using a numerically stable reference sigmoid implementation
'''
def ref_sigmoid(x):
if x >= 0:
return 1 / (1 + np.exp(-x))
else:
z = np.exp(x)
return z / (1 + z)
sigmoid = np.vectorize(ref_sigmoid)
x = K.placeholder(ndim=2)
f = K.function([x], [activations.sigmoid(x)])
test_values = get_standard_values()
result = f([test_values])[0]
expected = sigmoid(test_values)
assert_allclose(result, expected, rtol=1e-05)
def test_hard_sigmoid():
'''
Test using a reference hard sigmoid implementation
'''
def ref_hard_sigmoid(x):
'''
Reference hard sigmoid with slope and shift values from theano, see
https://github.com/Theano/Theano/blob/master/theano/tensor/nnet/sigm.py
'''
x = (x * 0.2) + 0.5
z = 0.0 if x <= 0 else (1.0 if x >= 1 else x)
return z
hard_sigmoid = np.vectorize(ref_hard_sigmoid)
x = K.placeholder(ndim=2)
f = K.function([x], [activations.hard_sigmoid(x)])
test_values = get_standard_values()
result = f([test_values])[0]
expected = hard_sigmoid(test_values)
assert_allclose(result, expected, rtol=1e-05)
def test_relu():
'''
Relu implementation doesn't depend on the value being
a theano variable. Testing ints, floats and theano tensors.
'''
x = K.placeholder(ndim=2)
f = K.function([x], [activations.relu(x)])
test_values = get_standard_values()
result = f([test_values])[0]
# because no negatives in test values
assert_allclose(result, test_values, rtol=1e-05)
def test_elu():
x = K.placeholder(ndim=2)
f = K.function([x], [activations.elu(x, 0.5)])
test_values = get_standard_values()
result = f([test_values])[0]
# because no negatives in test values
assert_allclose(result, test_values, rtol=1e-05)
negative_values = np.array([[-1, -2]], dtype=K.floatx())
result = f([negative_values])[0]
true_result = (np.exp(negative_values) - 1) / 2
assert_allclose(result, true_result)
def test_tanh():
test_values = get_standard_values()
x = K.placeholder(ndim=2)
exp = activations.tanh(x)
f = K.function([x], [exp])
result = f([test_values])[0]
expected = np.tanh(test_values)
assert_allclose(result, expected, rtol=1e-05)
def test_linear():
'''
This function does no input validation, it just returns the thing
that was passed in.
'''
xs = [1, 5, True, None, 'foo']
for x in xs:
assert(x == activations.linear(x))
if __name__ == '__main__':
pytest.main([__file__])
| [((575, 596), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (588, 596), True, 'from keras import backend as K\n'), ((761, 806), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'expected'], {'rtol': '(1e-05)'}), '(result, expected, rtol=1e-05)\n', (776, 806), False, 'from numpy.testing import assert_allclose\n'), ((854, 884), 'keras.backend.placeholder', 'K.placeholder', ([], {'shape': '(1, 1, 5)'}), '(shape=(1, 1, 5))\n', (867, 884), True, 'from keras import backend as K\n'), ((1242, 1263), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (1255, 1263), True, 'from keras import backend as K\n'), ((1430, 1475), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'expected'], {'rtol': '(1e-05)'}), '(result, expected, rtol=1e-05)\n', (1445, 1475), False, 'from numpy.testing import assert_allclose\n'), ((1658, 1679), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (1671, 1679), True, 'from keras import backend as K\n'), ((1846, 1891), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'expected'], {'rtol': '(1e-05)'}), '(result, expected, rtol=1e-05)\n', (1861, 1891), False, 'from numpy.testing import assert_allclose\n'), ((2167, 2192), 'numpy.vectorize', 'np.vectorize', (['ref_sigmoid'], {}), '(ref_sigmoid)\n', (2179, 2192), True, 'import numpy as np\n'), ((2202, 2223), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (2215, 2223), True, 'from keras import backend as K\n'), ((2388, 2433), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'expected'], {'rtol': '(1e-05)'}), '(result, expected, rtol=1e-05)\n', (2403, 2433), False, 'from numpy.testing import assert_allclose\n'), ((2859, 2889), 'numpy.vectorize', 'np.vectorize', (['ref_hard_sigmoid'], {}), '(ref_hard_sigmoid)\n', (2871, 2889), True, 'import numpy as np\n'), ((2899, 2920), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (2912, 2920), True, 'from keras import backend as K\n'), ((3095, 3140), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'expected'], {'rtol': '(1e-05)'}), '(result, expected, rtol=1e-05)\n', (3110, 3140), False, 'from numpy.testing import assert_allclose\n'), ((3306, 3327), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (3319, 3327), True, 'from keras import backend as K\n'), ((3496, 3544), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'test_values'], {'rtol': '(1e-05)'}), '(result, test_values, rtol=1e-05)\n', (3511, 3544), False, 'from numpy.testing import assert_allclose\n'), ((3571, 3592), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (3584, 3592), True, 'from keras import backend as K\n'), ((3765, 3813), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'test_values'], {'rtol': '(1e-05)'}), '(result, test_values, rtol=1e-05)\n', (3780, 3813), False, 'from numpy.testing import assert_allclose\n'), ((3970, 4006), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'true_result'], {}), '(result, true_result)\n', (3985, 4006), False, 'from numpy.testing import assert_allclose\n'), ((4075, 4096), 'keras.backend.placeholder', 'K.placeholder', ([], {'ndim': '(2)'}), '(ndim=2)\n', (4088, 4096), True, 'from keras import backend as K\n'), ((4107, 4126), 'keras.activations.tanh', 'activations.tanh', (['x'], {}), '(x)\n', (4123, 4126), False, 'from keras import activations\n'), ((4135, 4157), 'keras.backend.function', 'K.function', (['[x]', '[exp]'], {}), '([x], [exp])\n', (4145, 4157), True, 'from keras import backend as K\n'), ((4207, 4227), 'numpy.tanh', 'np.tanh', (['test_values'], {}), '(test_values)\n', (4214, 4227), True, 'import numpy as np\n'), ((4232, 4277), 'numpy.testing.assert_allclose', 'assert_allclose', (['result', 'expected'], {'rtol': '(1e-05)'}), '(result, expected, rtol=1e-05)\n', (4247, 4277), False, 'from numpy.testing import assert_allclose\n'), ((4537, 4560), 'pytest.main', 'pytest.main', (['[__file__]'], {}), '([__file__])\n', (4548, 4560), False, 'import pytest\n'), ((491, 505), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (497, 505), True, 'import numpy as np\n'), ((518, 536), 'numpy.exp', 'np.exp', (['(values - m)'], {}), '(values - m)\n', (524, 536), True, 'import numpy as np\n'), ((351, 361), 'keras.backend.floatx', 'K.floatx', ([], {}), '()\n', (359, 361), True, 'from keras import backend as K\n'), ((556, 565), 'numpy.sum', 'np.sum', (['e'], {}), '(e)\n', (562, 565), True, 'import numpy as np\n'), ((622, 644), 'keras.activations.softmax', 'activations.softmax', (['x'], {}), '(x)\n', (641, 644), False, 'from keras import activations\n'), ((910, 932), 'keras.activations.softmax', 'activations.softmax', (['x'], {}), '(x)\n', (929, 932), False, 'from keras import activations\n'), ((1024, 1044), 'numpy.size', 'np.size', (['test_values'], {}), '(test_values)\n', (1031, 1044), True, 'import numpy as np\n'), ((1289, 1312), 'keras.activations.softplus', 'activations.softplus', (['x'], {}), '(x)\n', (1309, 1312), False, 'from keras import activations\n'), ((1705, 1728), 'keras.activations.softsign', 'activations.softsign', (['x'], {}), '(x)\n', (1725, 1728), False, 'from keras import activations\n'), ((2112, 2121), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (2118, 2121), True, 'import numpy as np\n'), ((2249, 2271), 'keras.activations.sigmoid', 'activations.sigmoid', (['x'], {}), '(x)\n', (2268, 2271), False, 'from keras import activations\n'), ((2946, 2973), 'keras.activations.hard_sigmoid', 'activations.hard_sigmoid', (['x'], {}), '(x)\n', (2970, 2973), False, 'from keras import activations\n'), ((3353, 3372), 'keras.activations.relu', 'activations.relu', (['x'], {}), '(x)\n', (3369, 3372), False, 'from keras import activations\n'), ((3618, 3641), 'keras.activations.elu', 'activations.elu', (['x', '(0.5)'], {}), '(x, 0.5)\n', (3633, 3641), False, 'from keras import activations\n'), ((3864, 3874), 'keras.backend.floatx', 'K.floatx', ([], {}), '()\n', (3872, 3874), True, 'from keras import backend as K\n'), ((3932, 3955), 'numpy.exp', 'np.exp', (['negative_values'], {}), '(negative_values)\n', (3938, 3955), True, 'import numpy as np\n'), ((4481, 4502), 'keras.activations.linear', 'activations.linear', (['x'], {}), '(x)\n', (4499, 4502), False, 'from keras import activations\n'), ((1204, 1219), 'numpy.ones_like', 'np.ones_like', (['x'], {}), '(x)\n', (1216, 1219), True, 'import numpy as np\n'), ((1222, 1231), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (1228, 1231), True, 'import numpy as np\n'), ((1615, 1630), 'numpy.ones_like', 'np.ones_like', (['x'], {}), '(x)\n', (1627, 1630), True, 'import numpy as np\n'), ((1633, 1647), 'numpy.absolute', 'np.absolute', (['x'], {}), '(x)\n', (1644, 1647), True, 'import numpy as np\n'), ((2070, 2080), 'numpy.exp', 'np.exp', (['(-x)'], {}), '(-x)\n', (2076, 2080), True, 'import numpy as np\n')] |
robertsawko/proteus | scripts/H5toXMF.py | 6f1e4c2ca1af85a906b35a5162430006f0343861 |
#import numpy
#import os
#from xml.etree.ElementTree import *
import tables
#from Xdmf import *
def H5toXMF(basename,size,start,finaltime,stride):
# Open XMF files
for step in range(start,finaltime+1,stride):
XMFfile = open(basename+"."+str(step)+".xmf","w")
XMFfile.write(r"""<?xml version="1.0" ?>
<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>
<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">
<Domain>"""+"\n")
XMFfile.write(r' <Grid GridType="Collection" CollectionType="Spatial">'+"\n")
for proc in range(0,size):
filename="solution.p"+str(proc)+"."+str(step)+".h5"
print filename
f1 = tables.openFile(filename)
XMFfile.write (r'<Grid GridType="Uniform">'+"\n")
XMFfile.write(r' <Time Value="'+str(step)+'" />'+"\n")
for tmp in f1.root:
if tmp.name == "elements":
XMFfile.write (r'<Topology NumberOfElements="' +str(len(tmp[:]))+ '" Type="Tetrahedron">'+"\n")
XMFfile.write (r' <DataItem DataType="Int" Dimensions="' +str(len(tmp[:]))+ ' 4" Format="HDF">' + filename + ':/elements</DataItem>'+"\n")
XMFfile.write (r'</Topology>'+"\n")
if tmp.name == "nodes":
XMFfile.write (r'<Geometry Type="XYZ">'+"\n")
XMFfile.write (r' <DataItem DataType="Float" Dimensions="' +str(len(tmp[:]))+ ' 3" Format="HDF" Precision="8">' + filename + ':/nodes</DataItem>'+"\n")
XMFfile.write (r'</Geometry>'+"\n")
if tmp.name == "u":
XMFfile.write (r'<Attribute AttributeType="Scalar" Center="Node" Name="u">'+"\n")
XMFfile.write (r' <DataItem DataType="Float" Dimensions="' +str(len(tmp[:]))+ '" Format="HDF" Precision="8">' + filename + ':/u</DataItem>'+"\n")
XMFfile.write (r'</Attribute>'+"\n")
if tmp.name == "v":
XMFfile.write (r'<Attribute AttributeType="Scalar" Center="Node" Name="v">'+"\n")
XMFfile.write (r' <DataItem DataType="Float" Dimensions="' +str(len(tmp[:]))+ '" Format="HDF" Precision="8">' + filename + ':/v</DataItem>'+"\n")
XMFfile.write (r'</Attribute>'+"\n")
if tmp.name == "w":
XMFfile.write (r'<Attribute AttributeType="Scalar" Center="Node" Name="w">'+"\n")
XMFfile.write (r' <DataItem DataType="Float" Dimensions="' +str(len(tmp[:]))+ '" Format="HDF" Precision="8">' + filename + ':/w</DataItem>'+"\n")
XMFfile.write (r'</Attribute>'+"\n")
if tmp.name == "p":
XMFfile.write (r'<Attribute AttributeType="Scalar" Center="Node" Name="p">'+"\n")
XMFfile.write (r' <DataItem DataType="Float" Dimensions="' +str(len(tmp[:]))+ '" Format="HDF" Precision="8">' + filename + ':/p</DataItem>'+"\n")
XMFfile.write (r'</Attribute>'+"\n")
if tmp.name == "phid":
XMFfile.write (r'<Attribute AttributeType="Scalar" Center="Node" Name="phid">'+"\n")
XMFfile.write (r' <DataItem DataType="Float" Dimensions="' +str(len(tmp[:]))+ '" Format="HDF" Precision="8">' + filename + ':/phid</DataItem>'+"\n")
XMFfile.write (r'</Attribute>'+"\n")
f1.close()
XMFfile.write(' </Grid>'+"\n")
XMFfile.write(' </Grid>'+"\n")
XMFfile.write(' </Domain>'+"\n")
XMFfile.write(' </Xdmf>'+"\n")
XMFfile.close()
if __name__ == '__main__':
from optparse import OptionParser
usage = ""
parser = OptionParser(usage=usage)
parser.add_option("-n","--size",
help="number of processors for run",
action="store",
type="int",
dest="size",
default=1)
parser.add_option("-s","--stride",
help="stride for solution output",
action="store",
type="int",
dest="stride",
default=0)
parser.add_option("-t","--finaltime",
help="finaltime",
action="store",
type="int",
dest="finaltime",
default=1000)
parser.add_option("-f","--filebase_flow",
help="base name for storage files",
action="store",
type="string",
dest="filebase",
default="solution")
(opts,args) = parser.parse_args()
start = 0
if opts.stride == 0 :
start = opts.finaltime
opts.stride = 1
H5toXMF(opts.filebase,opts.size,start,opts.finaltime,opts.stride)
| [] |
agustinhenze/mibs.snmplabs.com | pysnmp-with-texts/CISCO-TRUSTSEC-POLICY-MIB.py | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | #
# PySNMP MIB module CISCO-TRUSTSEC-POLICY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-TRUSTSEC-POLICY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:14:36 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
Cisco2KVlanList, CiscoVrfName = mibBuilder.importSymbols("CISCO-TC", "Cisco2KVlanList", "CiscoVrfName")
CtsAclNameOrEmpty, CtsAclList, CtsGenerationId, CtsAclName, CtsAclListOrEmpty, CtsSgaclMonitorMode, CtsSecurityGroupTag = mibBuilder.importSymbols("CISCO-TRUSTSEC-TC-MIB", "CtsAclNameOrEmpty", "CtsAclList", "CtsGenerationId", "CtsAclName", "CtsAclListOrEmpty", "CtsSgaclMonitorMode", "CtsSecurityGroupTag")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
InetAddressType, InetAddress, InetAddressPrefixLength = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress", "InetAddressPrefixLength")
VlanIndex, = mibBuilder.importSymbols("Q-BRIDGE-MIB", "VlanIndex")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
Counter32, Unsigned32, Bits, ObjectIdentity, iso, Counter64, Gauge32, Integer32, TimeTicks, MibIdentifier, ModuleIdentity, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Unsigned32", "Bits", "ObjectIdentity", "iso", "Counter64", "Gauge32", "Integer32", "TimeTicks", "MibIdentifier", "ModuleIdentity", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress")
DisplayString, StorageType, TruthValue, RowStatus, DateAndTime, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "StorageType", "TruthValue", "RowStatus", "DateAndTime", "TextualConvention")
ciscoTrustSecPolicyMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 713))
ciscoTrustSecPolicyMIB.setRevisions(('2012-12-19 00:00', '2009-11-06 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoTrustSecPolicyMIB.setRevisionsDescriptions(('Added following OBJECT-GROUP: - ctspNotifCtrlGroup - ctspNotifGroup - ctspNotifInfoGroup - ctspIfSgtMappingGroup - ctspVlanSgtMappingGroup - ctspSgtCachingGroup - ctspSgaclMonitorGroup - ctspSgaclMonitorStatisticGroup Added new compliance - ciscoTrustSecPolicyMIBCompliances Modified ctspIpSgtSource to add l3if(6), vlan(7), caching(8).', 'Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoTrustSecPolicyMIB.setLastUpdated('201212190000Z')
if mibBuilder.loadTexts: ciscoTrustSecPolicyMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoTrustSecPolicyMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: [email protected]')
if mibBuilder.loadTexts: ciscoTrustSecPolicyMIB.setDescription('This MIB module defines managed objects that facilitate the management of various policies within the Cisco Trusted Security (TrustSec) infrastructure. The information available through this MIB includes: o Device and interface level configuration for enabling SGACL (Security Group Access Control List) enforcement on Layer2/3 traffic. o Administrative and operational SGACL mapping to Security Group Tag (SGT). o Various statistics counters for traffic subject to SGACL enforcement. o TrustSec policies with respect to peer device. o Interface level configuration for enabling the propagation of SGT along with the Layer 3 traffic in portions of network which does not have the capability to support TrustSec feature. o TrustSec policies with respect to SGT propagation with Layer 3 traffic. The following terms are used throughout this MIB: VRF: Virtual Routing and Forwarding. SGACL: Security Group Access Control List. ACE: Access Control Entries. SXP: SGT Propagation Protocol. SVI: Switch Virtual Interface. IPM: Identity Port Mapping. SGT (Security Group Tag) is a unique 16 bits value assigned to every security group and used by network devices to enforce SGACL. Peer is another device connected to the local device on the other side of a TrustSec link. Default Policy: Policy applied to traffic when there is no explicit policy between the SGT associated with the originator of the traffic and the SGT associated with the destination of the traffic.')
ciscoTrustSecPolicyMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 0))
ciscoTrustSecPolicyMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1))
ciscoTrustSecPolicyMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 2))
ctspSgacl = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1))
ctspPeerPolicy = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2))
ctspLayer3Transport = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3))
ctspIpSgtMappings = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4))
ctspSgtPolicy = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5))
ctspIfSgtMappings = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6))
ctspVlanSgtMappings = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 7))
ctspSgtCaching = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 8))
ctspNotifsControl = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 9))
ctspNotifsOnlyInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 10))
ctspSgaclGlobals = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1))
ctspSgaclMappings = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2))
ctspSgaclStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3))
ctspSgaclEnforcementEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("l3Only", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspSgaclEnforcementEnable.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclEnforcementEnable.setDescription("This object specifies whether SGACL enforcement for all Layer 3 interfaces (excluding SVIs) is enabled at the managed system. 'none' indicates that SGACL enforcement for all Layer 3 interfaces (excluding SVIs) is disabled. 'l3Only' indicates that SGACL enforcement is enabled on every TrustSec capable Layer3 interface (excluding SVIs) in the device.")
ctspSgaclIpv4DropNetflowMonitor = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 2), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspSgaclIpv4DropNetflowMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclIpv4DropNetflowMonitor.setDescription('This object specifies an existing flexible netflow monitor name used to collect and export the IPv4 traffic dropped packets statistics due to SGACL enforcement. The zero-length string indicates that no such netflow monitor is configured in the device.')
ctspSgaclIpv6DropNetflowMonitor = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 3), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspSgaclIpv6DropNetflowMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclIpv6DropNetflowMonitor.setDescription('This object specifies an existing flexible netflow monitor name used to collect and export the IPv6 traffic dropped packets statistics due to SGACL enforcement. The zero-length string indicates that no such netflow monitor is configured in the device.')
ctspVlanConfigTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4), )
if mibBuilder.loadTexts: ctspVlanConfigTable.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigTable.setDescription('This table lists the SGACL enforcement for Layer 2 and Layer 3 switched packet in a VLAN as well as VRF information for VLANs in the device.')
ctspVlanConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanConfigIndex"))
if mibBuilder.loadTexts: ctspVlanConfigEntry.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigEntry.setDescription('Each row contains the SGACL enforcement information for Layer 2 and Layer 3 switched packets in a VLAN identified by its VlanIndex value. Entry in this table is populated for VLANs which contains SGACL enforcement or VRF configuration.')
ctspVlanConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4, 1, 1), VlanIndex())
if mibBuilder.loadTexts: ctspVlanConfigIndex.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigIndex.setDescription('This object indicates the VLAN-ID of this VLAN.')
ctspVlanConfigSgaclEnforcement = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4, 1, 2), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspVlanConfigSgaclEnforcement.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigSgaclEnforcement.setDescription("This object specifies the configured SGACL enforcement status for this VLAN i.e., 'true' = enabled and 'false' = disabled.")
ctspVlanSviActive = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4, 1, 3), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspVlanSviActive.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSviActive.setDescription("This object indicates if there is an active SVI associated with this VLAN. 'true' indicates that there is an active SVI associated with this VLAN. and SGACL is enforced for both Layer 2 and Layer 3 switched packets within that VLAN. 'false' indicates that there is no active SVI associated with this VLAN, and SGACL is only enforced for Layer 2 switched packets within that VLAN.")
ctspVlanConfigVrfName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4, 1, 4), CiscoVrfName()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspVlanConfigVrfName.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigVrfName.setDescription('This object specifies an existing VRF where this VLAN belongs to. The zero length value indicates this VLAN belongs to the default VRF.')
ctspVlanConfigStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4, 1, 5), StorageType().clone('volatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspVlanConfigStorageType.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigStorageType.setDescription('The objects specifies the storage type for this conceptual row.')
ctspVlanConfigRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 1, 4, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspVlanConfigRowStatus.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigRowStatus.setDescription("The status of this conceptual row entry. This object is used to manage creation and deletion of rows in this table. When this object value is 'active', other writable objects in the same row cannot be modified.")
ctspConfigSgaclMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1), )
if mibBuilder.loadTexts: ctspConfigSgaclMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingTable.setDescription('This table contains the SGACLs information which is applied to unicast IP traffic which carries a source SGT and travels to a destination SGT.')
ctspConfigSgaclMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingIpTrafficType"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingDestSgt"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingSourceSgt"))
if mibBuilder.loadTexts: ctspConfigSgaclMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingEntry.setDescription('Each row contains the SGACL mapping to source and destination SGT for a certain traffic type as well as status of this instance. A row instance can be created or removed by setting the appropriate value of its RowStatus object.')
ctspConfigSgaclMappingIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: ctspConfigSgaclMappingIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingIpTrafficType.setDescription('This object indicates the type of the unicast IP traffic carrying the source SGT and travelling to destination SGT and subjected to SGACL enforcement.')
ctspConfigSgaclMappingDestSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1, 2), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspConfigSgaclMappingDestSgt.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingDestSgt.setDescription('This object indicates the destination SGT value. Value of zero indicates that the destination SGT is unknown.')
ctspConfigSgaclMappingSourceSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1, 3), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspConfigSgaclMappingSourceSgt.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingSourceSgt.setDescription('This object indicates the source SGT value. Value of zero indicates that the source SGT is unknown.')
ctspConfigSgaclMappingSgaclName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1, 4), CtsAclList()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspConfigSgaclMappingSgaclName.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingSgaclName.setDescription('This object specifies the list of existing SGACLs which is administratively configured to apply to unicast IP traffic carrying the source SGT to the destination SGT.')
ctspConfigSgaclMappingStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1, 5), StorageType().clone('volatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspConfigSgaclMappingStorageType.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingStorageType.setDescription('The storage type for this conceptual row.')
ctspConfigSgaclMappingRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspConfigSgaclMappingRowStatus.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingRowStatus.setDescription('This object is used to manage the creation and deletion of rows in this table. ctspConfigSgaclName may be modified at any time.')
ctspConfigSgaclMonitor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 1, 1, 7), CtsSgaclMonitorMode().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspConfigSgaclMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMonitor.setDescription('This object specifies whether SGACL monitor mode is turned on for the configured SGACL enforced traffic.')
ctspDefConfigIpv4Sgacls = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 2), CtsAclListOrEmpty()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspDefConfigIpv4Sgacls.setStatus('current')
if mibBuilder.loadTexts: ctspDefConfigIpv4Sgacls.setDescription('This object specifies the SGACLs of the unicast default policy for IPv4 traffic. If there is no SGACL configured for unicast default policy for IPv4 traffic, the value of this object is the zero-length string.')
ctspDefConfigIpv6Sgacls = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 3), CtsAclListOrEmpty()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspDefConfigIpv6Sgacls.setStatus('current')
if mibBuilder.loadTexts: ctspDefConfigIpv6Sgacls.setDescription('This object specifies the SGACLs of the unicast default policy for IPv6 traffic. If there is no SGACL configured for unicast default policy for IPv6 traffic, the value of this object is the zero-length string.')
ctspDownloadedSgaclMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4), )
if mibBuilder.loadTexts: ctspDownloadedSgaclMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclMappingTable.setDescription('This table contains the downloaded SGACLs information applied to unicast IP traffic which carries a source SGT and travels to a destination SGT.')
ctspDownloadedSgaclMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclDestSgt"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclSourceSgt"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclIndex"))
if mibBuilder.loadTexts: ctspDownloadedSgaclMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclMappingEntry.setDescription('Each row contains the downloaded SGACLs mapping. A row instance is added for each pair of <source SGT, destination SGT> which contains SGACL that is dynamically downloaded from ACS server.')
ctspDownloadedSgaclDestSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1, 1), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspDownloadedSgaclDestSgt.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclDestSgt.setDescription('This object indicates the destination SGT value. Value of zero indicates that the destination SGT is unknown.')
ctspDownloadedSgaclSourceSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1, 2), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspDownloadedSgaclSourceSgt.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclSourceSgt.setDescription('This object indicates the source SGT value. Value of zero indicates that the source SGT is unknown.')
ctspDownloadedSgaclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: ctspDownloadedSgaclIndex.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclIndex.setDescription('This object identifies the downloaded SGACL which is applied to unicast IP traffic carrying the source SGT to the destination SGT.')
ctspDownloadedSgaclName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1, 4), CtsAclName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedSgaclName.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclName.setDescription('This object indicates the name of downloaded SGACL which is applied to unicast IP traffic carrying the source SGT to the destination SGT.')
ctspDownloadedSgaclGenId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1, 5), CtsGenerationId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedSgaclGenId.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclGenId.setDescription('This object indicates the generation identification of downloaded SGACL which is applied to unicast IP traffic carrying the source SGT to the destination SGT.')
ctspDownloadedIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1, 6), Bits().clone(namedValues=NamedValues(("ipv4", 0), ("ipv6", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedIpTrafficType.setDescription('This object indicates the type of the unicast IP traffic carrying the source SGT and travelling to destination SGT and subjected to SGACL enforcement by this downloaded default policy.')
ctspDownloadedSgaclMonitor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 4, 1, 7), CtsSgaclMonitorMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedSgaclMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclMonitor.setDescription('This object indicates whether SGACL monitor mode is turned on for the downloaded SGACL enforced traffic.')
ctspDefDownloadedSgaclMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 5), )
if mibBuilder.loadTexts: ctspDefDownloadedSgaclMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspDefDownloadedSgaclMappingTable.setDescription('This table contains the downloaded SGACLs information of the default policy applied to unicast IP traffic.')
ctspDefDownloadedSgaclMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 5, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDefDownloadedSgaclIndex"))
if mibBuilder.loadTexts: ctspDefDownloadedSgaclMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspDefDownloadedSgaclMappingEntry.setDescription('Each row contains the downloaded SGACLs mapping. A row instance contains the SGACL information of the default policy dynamically downloaded from ACS server for unicast IP traffic.')
ctspDefDownloadedSgaclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: ctspDefDownloadedSgaclIndex.setStatus('current')
if mibBuilder.loadTexts: ctspDefDownloadedSgaclIndex.setDescription('This object identifies the SGACL of downloaded default policy applied to unicast IP traffic.')
ctspDefDownloadedSgaclName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 5, 1, 2), CtsAclName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefDownloadedSgaclName.setStatus('current')
if mibBuilder.loadTexts: ctspDefDownloadedSgaclName.setDescription('This object indicates the name of the SGACL of downloaded default policy applied to unicast IP traffic.')
ctspDefDownloadedSgaclGenId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 5, 1, 3), CtsGenerationId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefDownloadedSgaclGenId.setStatus('current')
if mibBuilder.loadTexts: ctspDefDownloadedSgaclGenId.setDescription('This object indicates the generation identification of the SGACL of downloaded default policy applied to unicast IP traffic.')
ctspDefDownloadedIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 5, 1, 4), Bits().clone(namedValues=NamedValues(("ipv4", 0), ("ipv6", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefDownloadedIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspDefDownloadedIpTrafficType.setDescription('This object indicates the type of the IP traffic subjected to SGACL enforcement by this downloaded default policy.')
ctspDefDownloadedSgaclMonitor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 5, 1, 5), CtsSgaclMonitorMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefDownloadedSgaclMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspDefDownloadedSgaclMonitor.setDescription('This object indicates whether SGACL monitor mode is turned on for the default downloaded SGACL enforced traffic.')
ctspOperSgaclMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6), )
if mibBuilder.loadTexts: ctspOperSgaclMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclMappingTable.setDescription('This table contains the operational SGACLs information applied to unicast IP traffic which carries a source SGT and travels to a destination SGT.')
ctspOperSgaclMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspOperIpTrafficType"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclDestSgt"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclSourceSgt"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclIndex"))
if mibBuilder.loadTexts: ctspOperSgaclMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclMappingEntry.setDescription('Each row contains the operational SGACLs mapping. A row instance is added for each pair of <source SGT, destination SGT> which contains the SGACL that either statically configured at the device or dynamically downloaded from ACS server.')
ctspOperIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: ctspOperIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspOperIpTrafficType.setDescription('This object indicates the type of the unicast IP traffic carrying the source SGT and travelling to destination SGT and subjected to SGACL enforcement.')
ctspOperSgaclDestSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 2), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspOperSgaclDestSgt.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclDestSgt.setDescription('This object indicates the destination SGT value. Value of zero indicates that the destination SGT is unknown.')
ctspOperSgaclSourceSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 3), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspOperSgaclSourceSgt.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclSourceSgt.setDescription('This object indicates the source SGT value. Value of zero indicates that the source SGT is unknown.')
ctspOperSgaclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: ctspOperSgaclIndex.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclIndex.setDescription('This object identifies the SGACL operationally applied to unicast IP traffic carrying the source SGT to the destination SGT.')
ctspOperationalSgaclName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 5), CtsAclName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspOperationalSgaclName.setStatus('current')
if mibBuilder.loadTexts: ctspOperationalSgaclName.setDescription('This object indicates the name of the SGACL operationally applied to unicast IP traffic carrying the source SGT to the destination SGT.')
ctspOperationalSgaclGenId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 6), CtsGenerationId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspOperationalSgaclGenId.setStatus('current')
if mibBuilder.loadTexts: ctspOperationalSgaclGenId.setDescription('This object indicates the generation identification of the SGACL operationally applied to unicast IP traffic carrying the source SGT to the destination SGT.')
ctspOperSgaclMappingSource = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("configured", 1), ("downloaded", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspOperSgaclMappingSource.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclMappingSource.setDescription("This object indicates the source of SGACL mapping for the SGACL operationally applied to unicast IP traffic carrying the source SGT to the destination SGT. 'downloaded' indicates that the mapping is downloaded from ACS server. 'configured' indicates that the mapping is locally configured in the device.")
ctspOperSgaclConfigSource = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("configured", 1), ("downloaded", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspOperSgaclConfigSource.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclConfigSource.setDescription("This object indicates the source of SGACL creation for this SGACL. 'configured' indicates that the SGACL is locally configured in the local device. 'downloaded' indicates that the SGACL is created at ACS server and downloaded to the local device.")
ctspOperSgaclMonitor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 6, 1, 9), CtsSgaclMonitorMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspOperSgaclMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclMonitor.setDescription('This object indicates whether SGACL monitor mode is turned on for the SGACL enforced traffic.')
ctspDefOperSgaclMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7), )
if mibBuilder.loadTexts: ctspDefOperSgaclMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperSgaclMappingTable.setDescription('This table contains the operational SGACLs information of the default policy applied to unicast IP traffic.')
ctspDefOperSgaclMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDefOperIpTrafficType"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDefOperSgaclIndex"))
if mibBuilder.loadTexts: ctspDefOperSgaclMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperSgaclMappingEntry.setDescription('A row instance contains the SGACL information of the default policy which is either statically configured at the device or dynamically downloaded from ACS server for unicast IP traffic.')
ctspDefOperIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: ctspDefOperIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperIpTrafficType.setDescription('This object indicates the type of the unicast IP traffic subjected to default policy enforcement.')
ctspDefOperSgaclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: ctspDefOperSgaclIndex.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperSgaclIndex.setDescription('This object identifies the SGACL of default policy operationally applied to unicast IP traffic.')
ctspDefOperationalSgaclName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1, 3), CtsAclName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefOperationalSgaclName.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperationalSgaclName.setDescription('This object indicates the name of the SGACL of default policy operationally applied to unicast IP traffic.')
ctspDefOperationalSgaclGenId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1, 4), CtsGenerationId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefOperationalSgaclGenId.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperationalSgaclGenId.setDescription('This object indicates the generation identification of the SGACL of default policy operationally applied to unicast IP traffic.')
ctspDefOperSgaclMappingSource = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("configured", 1), ("downloaded", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefOperSgaclMappingSource.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperSgaclMappingSource.setDescription("This object indicates the source of SGACL mapping for the SGACL of default policy operationally applied to unicast IP traffic. 'downloaded' indicates that the mapping is downloaded from ACS server. 'configured' indicates that the mapping is locally configured in the device.")
ctspDefOperSgaclConfigSource = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("configured", 1), ("downloaded", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefOperSgaclConfigSource.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperSgaclConfigSource.setDescription("This object indicates the source of SGACL creation for the SGACL of default policy operationally applied to unicast IP traffic. 'downloaded' indicates that the SGACL is created at ACS server and downloaded to the local device. 'configured' indicates that the SGACL is locally configured in the local device.")
ctspDefOperSgaclMonitor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 7, 1, 7), CtsSgaclMonitorMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefOperSgaclMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspDefOperSgaclMonitor.setDescription('This object indicates whether SGACL monitor mode is turned on for the SGACL of default policy enforced traffic.')
ctspDefConfigIpv4SgaclsMonitor = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 8), CtsSgaclMonitorMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspDefConfigIpv4SgaclsMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspDefConfigIpv4SgaclsMonitor.setDescription('This object specifies whether SGACL monitor mode is turned on for the default configured SGACL enforced Ipv4 traffic.')
ctspDefConfigIpv6SgaclsMonitor = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 9), CtsSgaclMonitorMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspDefConfigIpv6SgaclsMonitor.setStatus('current')
if mibBuilder.loadTexts: ctspDefConfigIpv6SgaclsMonitor.setDescription('This object specifies whether SGACL monitor mode is turned on for the default configured SGACL enforced Ipv6 traffic.')
ctspSgaclMonitorEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 2, 10), CtsSgaclMonitorMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspSgaclMonitorEnable.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclMonitorEnable.setDescription('This object specifies whether SGACL monitor mode is turned on for the entire system. It has precedence than the per SGACL ctspConfigSgaclMonitor control. It could act as safety mechanism to turn off monitor in case the monitor feature impact system performance.')
ctspSgtStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1), )
if mibBuilder.loadTexts: ctspSgtStatsTable.setStatus('current')
if mibBuilder.loadTexts: ctspSgtStatsTable.setDescription('This table describes SGACL statistics counters per a pair of <source SGT, destination SGT> that is capable of providing this information.')
ctspSgtStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsIpTrafficType"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsDestSgt"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsSourceSgt"))
if mibBuilder.loadTexts: ctspSgtStatsEntry.setStatus('current')
if mibBuilder.loadTexts: ctspSgtStatsEntry.setDescription('Each row contains the SGACL statistics related to IPv4 or IPv6 packets carrying the source SGT travelling to the destination SGT and subjected to SGACL enforcement.')
ctspStatsIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: ctspStatsIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspStatsIpTrafficType.setDescription('This object indicates the type of the unicast IP traffic carrying the source SGT and travelling to destination SGT and subjected to SGACL enforcement.')
ctspStatsDestSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 2), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspStatsDestSgt.setStatus('current')
if mibBuilder.loadTexts: ctspStatsDestSgt.setDescription('This object indicates the destination SGT value. Value of zero indicates that the destination SGT is unknown.')
ctspStatsSourceSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 3), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspStatsSourceSgt.setStatus('current')
if mibBuilder.loadTexts: ctspStatsSourceSgt.setDescription('This object indicates the source SGT value. Value of zero indicates that the source SGT is unknown.')
ctspStatsIpSwDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspStatsIpSwDropPkts.setStatus('current')
if mibBuilder.loadTexts: ctspStatsIpSwDropPkts.setDescription('This object indicates the number of software-forwarded IP packets which are dropped by SGACL.')
ctspStatsIpHwDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspStatsIpHwDropPkts.setStatus('current')
if mibBuilder.loadTexts: ctspStatsIpHwDropPkts.setDescription('This object indicates the number of hardware-forwarded IP packets which are dropped by SGACL.')
ctspStatsIpSwPermitPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspStatsIpSwPermitPkts.setStatus('current')
if mibBuilder.loadTexts: ctspStatsIpSwPermitPkts.setDescription('This object indicates the number of software-forwarded IP packets which are permitted by SGACL.')
ctspStatsIpHwPermitPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspStatsIpHwPermitPkts.setStatus('current')
if mibBuilder.loadTexts: ctspStatsIpHwPermitPkts.setDescription('This object indicates the number of hardware-forwarded IP packets which are permitted by SGACL.')
ctspStatsIpSwMonitorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspStatsIpSwMonitorPkts.setStatus('current')
if mibBuilder.loadTexts: ctspStatsIpSwMonitorPkts.setDescription('This object indicates the number of software-forwarded IP packets which are SGACL enforced & monitored.')
ctspStatsIpHwMonitorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 1, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspStatsIpHwMonitorPkts.setStatus('current')
if mibBuilder.loadTexts: ctspStatsIpHwMonitorPkts.setDescription('This object indicates the number of hardware-forwarded IP packets which are SGACL enforced & monitored.')
ctspDefStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2), )
if mibBuilder.loadTexts: ctspDefStatsTable.setStatus('current')
if mibBuilder.loadTexts: ctspDefStatsTable.setDescription('This table describes statistics counters for unicast IP traffic subjected to default unicast policy.')
ctspDefStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDefIpTrafficType"))
if mibBuilder.loadTexts: ctspDefStatsEntry.setStatus('current')
if mibBuilder.loadTexts: ctspDefStatsEntry.setDescription('Each row contains the statistics counter for each IP traffic type.')
ctspDefIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: ctspDefIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspDefIpTrafficType.setDescription('This object indicates the type of the IP traffic subjected to default unicast policy enforcement.')
ctspDefIpSwDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefIpSwDropPkts.setStatus('current')
if mibBuilder.loadTexts: ctspDefIpSwDropPkts.setDescription('This object indicates the number of software-forwarded IP packets which are dropped by default unicast policy.')
ctspDefIpHwDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefIpHwDropPkts.setStatus('current')
if mibBuilder.loadTexts: ctspDefIpHwDropPkts.setDescription('This object indicates the number of hardware-forwarded IP packets which are dropped by default unicast policy.')
ctspDefIpSwPermitPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefIpSwPermitPkts.setStatus('current')
if mibBuilder.loadTexts: ctspDefIpSwPermitPkts.setDescription('This object indicates the number of software-forwarded IP packets which are permitted by default unicast policy.')
ctspDefIpHwPermitPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefIpHwPermitPkts.setStatus('current')
if mibBuilder.loadTexts: ctspDefIpHwPermitPkts.setDescription('This object indicates the number of hardware-forwarded IP packets which are permitted by default unicast policy.')
ctspDefIpSwMonitorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefIpSwMonitorPkts.setStatus('current')
if mibBuilder.loadTexts: ctspDefIpSwMonitorPkts.setDescription('This object indicates the number of software-forwarded IP packets which are monitored by default unicast policy.')
ctspDefIpHwMonitorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 1, 3, 2, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDefIpHwMonitorPkts.setStatus('current')
if mibBuilder.loadTexts: ctspDefIpHwMonitorPkts.setDescription('This object indicates the number of hardware-forwarded IP packets which are monitored by default unicast policy.')
ctspAllPeerPolicyAction = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("refresh", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspAllPeerPolicyAction.setStatus('current')
if mibBuilder.loadTexts: ctspAllPeerPolicyAction.setDescription("This object allows user to specify the action to be taken with respect to all peer policies in the device. When read, this object always returns the value 'none'. 'none' - No operation. 'refresh' - Refresh all peer policies in the device.")
ctspPeerPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2), )
if mibBuilder.loadTexts: ctspPeerPolicyTable.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyTable.setDescription('This table lists the peer policy information for each peer device.')
ctspPeerPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1), ).setIndexNames((1, "CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerName"))
if mibBuilder.loadTexts: ctspPeerPolicyEntry.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyEntry.setDescription('Each row contains the managed objects for peer policies for each peer device based on its name.')
ctspPeerName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 128)))
if mibBuilder.loadTexts: ctspPeerName.setStatus('current')
if mibBuilder.loadTexts: ctspPeerName.setDescription('This object uniquely identifies a peer device.')
ctspPeerSgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1, 2), CtsSecurityGroupTag()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspPeerSgt.setStatus('current')
if mibBuilder.loadTexts: ctspPeerSgt.setDescription('This object indicates the SGT value of this peer device.')
ctspPeerSgtGenId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1, 3), CtsGenerationId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspPeerSgtGenId.setStatus('current')
if mibBuilder.loadTexts: ctspPeerSgtGenId.setDescription('This object indicates the generation identification of the SGT value assigned to this peer device.')
ctspPeerTrustState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("trusted", 1), ("noTrust", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspPeerTrustState.setStatus('current')
if mibBuilder.loadTexts: ctspPeerTrustState.setDescription("This object indicates the TrustSec trust state of this peer device. 'trusted' indicates that this is a trusted peer device. 'noTrust' indicates that this peer device is not trusted.")
ctspPeerPolicyLifeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1, 5), Unsigned32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspPeerPolicyLifeTime.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyLifeTime.setDescription('This object indicates the policy life time which provides the time interval during which the peer policy is valid.')
ctspPeerPolicyLastUpdate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1, 6), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspPeerPolicyLastUpdate.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyLastUpdate.setDescription('This object indicates the time when this peer policy is last updated.')
ctspPeerPolicyAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 2, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("refresh", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspPeerPolicyAction.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyAction.setDescription("This object allows user to specify the action to be taken with this peer policy. When read, this object always returns the value 'none'. 'none' - No operation. 'refresh' - Refresh this peer policy.")
ctspLayer3PolicyTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 1), )
if mibBuilder.loadTexts: ctspLayer3PolicyTable.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3PolicyTable.setDescription('This table describes Layer 3 transport policy for IP traffic regarding SGT propagation.')
ctspLayer3PolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 1, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspLayer3PolicyIpTrafficType"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspLayer3PolicyType"))
if mibBuilder.loadTexts: ctspLayer3PolicyEntry.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3PolicyEntry.setDescription('Each row contains the Layer 3 transport policies per IP traffic type per policy type.')
ctspLayer3PolicyIpTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: ctspLayer3PolicyIpTrafficType.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3PolicyIpTrafficType.setDescription("This object indicates the type of the IP traffic affected by Layer-3 transport policy. 'ipv4' indicates that the affected traffic is IPv4 traffic. 'ipv6' indicates that the affected traffic is IPv6 traffic.")
ctspLayer3PolicyType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("exception", 2))))
if mibBuilder.loadTexts: ctspLayer3PolicyType.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3PolicyType.setDescription("This object indicates the type of the Layer-3 transport policy affecting IP traffic regarding SGT propagation. 'permit' indicates that the transport policy is used to classify Layer-3 traffic which is subject to SGT propagation. 'exception' indicates that the transport policy is used to classify Layer-3 traffic which is NOT subject to SGT propagation.")
ctspLayer3PolicyLocalConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 1, 1, 3), CtsAclNameOrEmpty()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspLayer3PolicyLocalConfig.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3PolicyLocalConfig.setDescription('This object specifies the name of an ACL that is administratively configured to classify Layer3 traffic. Zero-length string indicates there is no such configured policy.')
ctspLayer3PolicyDownloaded = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 1, 1, 4), CtsAclNameOrEmpty()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspLayer3PolicyDownloaded.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3PolicyDownloaded.setDescription('This object specifies the name of an ACL that is downloaded from policy server to classify Layer3 traffic. Zero-length string indicates there is no such downloaded policy.')
ctspLayer3PolicyOperational = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 1, 1, 5), CtsAclNameOrEmpty()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspLayer3PolicyOperational.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3PolicyOperational.setDescription('This object specifies the name of an operational ACL currently used to classify Layer3 traffic. Zero-length string indicates there is no such policy in effect.')
ctspIfL3PolicyConfigTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 2), )
if mibBuilder.loadTexts: ctspIfL3PolicyConfigTable.setStatus('current')
if mibBuilder.loadTexts: ctspIfL3PolicyConfigTable.setDescription('This table lists the interfaces which support Layer3 Transport policy.')
ctspIfL3PolicyConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: ctspIfL3PolicyConfigEntry.setStatus('current')
if mibBuilder.loadTexts: ctspIfL3PolicyConfigEntry.setDescription('Each row contains managed objects for Layer3 Transport on interface capable of providing this information.')
ctspIfL3Ipv4PolicyEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 2, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspIfL3Ipv4PolicyEnabled.setStatus('current')
if mibBuilder.loadTexts: ctspIfL3Ipv4PolicyEnabled.setDescription("This object specifies whether the Layer3 Transport policies will be applied on this interface for egress IPv4 traffic. 'true' indicates that Layer3 permit and exception policy will be applied at this interface for egress IPv4 traffic. 'false' indicates that Layer3 permit and exception policy will not be applied at this interface for egress IPv4 traffic.")
ctspIfL3Ipv6PolicyEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 3, 2, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspIfL3Ipv6PolicyEnabled.setStatus('current')
if mibBuilder.loadTexts: ctspIfL3Ipv6PolicyEnabled.setDescription("This object specifies whether the Layer3 Transport policies will be applied on this interface for egress IPv6 traffic. 'true' indicates that Layer3 permit and exception policy will be applied at this interface for egress IPv6 traffic. 'false' indicates that Layer3 permit and exception policy will not be applied at this interface for egress IPv6 traffic.")
ctspIpSgtMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1), )
if mibBuilder.loadTexts: ctspIpSgtMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtMappingTable.setDescription('This table contains the IP-to-SGT mapping information in the device.')
ctspIpSgtMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtVrfName"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtAddressType"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtIpAddress"), (0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtAddressLength"))
if mibBuilder.loadTexts: ctspIpSgtMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtMappingEntry.setDescription('Each row contains the IP-to-SGT mapping and status of this instance. Entry in this table is either populated automatically by the device or manually configured by a user. A manually configured row instance can be created or removed by setting the appropriate value of its RowStatus object.')
ctspIpSgtVrfName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 1), CiscoVrfName())
if mibBuilder.loadTexts: ctspIpSgtVrfName.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtVrfName.setDescription('This object indicates the VRF where IP-SGT mapping belongs to. The zero length value indicates the default VRF.')
ctspIpSgtAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 2), InetAddressType())
if mibBuilder.loadTexts: ctspIpSgtAddressType.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtAddressType.setDescription('This object indicates the type of Internet address.')
ctspIpSgtIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 3), InetAddress())
if mibBuilder.loadTexts: ctspIpSgtIpAddress.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtIpAddress.setDescription('This object indicates an Internet address. The type of this address is determined by the value of ctspIpSgtAddressType object.')
ctspIpSgtAddressLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 4), InetAddressPrefixLength())
if mibBuilder.loadTexts: ctspIpSgtAddressLength.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtAddressLength.setDescription('This object indicates the length of an Internet address prefix.')
ctspIpSgtValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 5), CtsSecurityGroupTag()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIpSgtValue.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtValue.setDescription('This object specifies the SGT value assigned to an Internet address.')
ctspIpSgtSource = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("configured", 1), ("arp", 2), ("localAuthenticated", 3), ("sxp", 4), ("internal", 5), ("l3if", 6), ("vlan", 7), ("caching", 8)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIpSgtSource.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtSource.setDescription("This object indicates the source of the mapping. 'configured' indicates that the mapping is manually configured by user. 'arp' indicates that the mapping is dynamically learnt from tagged ARP replies. 'localAuthenticated' indicates that the mapping is dynamically learnt from the device authentication of a host. 'sxp' indicates that the mapping is dynamically learnt from SXP (SGT Propagation Protocol). 'internal' indicates that the mapping is automatically created by the device between the device IP addresses and the device own SGT. 'l3if' indicates that Interface-SGT mapping is configured by user. 'vlan' indicates that Vlan-SGT mapping is configured by user. 'cached' indicates that sgt mapping is cached. Only 'configured' value is accepted when setting this object.")
ctspIpSgtStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 7), StorageType().clone('volatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIpSgtStorageType.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtStorageType.setDescription('The storage type for this conceptual row.')
ctspIpSgtRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 4, 1, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIpSgtRowStatus.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtRowStatus.setDescription("This object is used to manage the creation and deletion of rows in this table. If this object value is 'active', user cannot modify any writable object in this row. If value of ctspIpSgtSource object in an entry is not 'configured', user cannot change the value of this object.")
ctspAllSgtPolicyAction = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("refresh", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspAllSgtPolicyAction.setStatus('current')
if mibBuilder.loadTexts: ctspAllSgtPolicyAction.setDescription("This object allows user to specify the action to be taken with respect to all SGT policies in the device. When read, this object always returns the value 'none'. 'none' - No operation. 'refresh' - Refresh all SGT policies in the device.")
ctspDownloadedSgtPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 2), )
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyTable.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyTable.setDescription('This table lists the SGT policy information downloaded by the device.')
ctspDownloadedSgtPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 2, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgtPolicySgt"))
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyEntry.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyEntry.setDescription('Each row contains the managed objects for SGT policies downloaded by the device.')
ctspDownloadedSgtPolicySgt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 2, 1, 1), CtsSecurityGroupTag())
if mibBuilder.loadTexts: ctspDownloadedSgtPolicySgt.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgtPolicySgt.setDescription('This object indicates the SGT value for which the downloaded policy is applied to. Value of zero indicates that the SGT is unknown.')
ctspDownloadedSgtPolicySgtGenId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 2, 1, 2), CtsGenerationId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedSgtPolicySgtGenId.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgtPolicySgtGenId.setDescription('This object indicates the generation identification of the SGT value denoted by ctspDownloadedSgtPolicySgt object.')
ctspDownloadedSgtPolicyLifeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 2, 1, 3), Unsigned32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyLifeTime.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyLifeTime.setDescription('This object indicates the policy life time which provides the time interval during which this downloaded policy is valid.')
ctspDownloadedSgtPolicyLastUpdate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 2, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyLastUpdate.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyLastUpdate.setDescription('This object indicates the time when this downloaded SGT policy is last updated.')
ctspDownloadedSgtPolicyAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("refresh", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyAction.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgtPolicyAction.setDescription("This object allows user to specify the action to be taken with this downloaded SGT policy. When read, this object always returns the value 'none'. 'none' - No operation. 'refresh' - Refresh this SGT policy.")
ctspDownloadedDefSgtPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 3), )
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyTable.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyTable.setDescription('This table lists the default SGT policy information downloaded by the device.')
ctspDownloadedDefSgtPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 3, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedDefSgtPolicyType"))
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyEntry.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyEntry.setDescription('Each row contains the managed objects for default SGT policies downloaded by the device.')
ctspDownloadedDefSgtPolicyType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("unicastDefault", 1))))
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyType.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyType.setDescription("This object indicates the downloaded default SGT policy type. 'unicastDefault' indicates the SGT policy applied to traffic which carries the default unicast SGT.")
ctspDownloadedDefSgtPolicySgtGenId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 3, 1, 2), CtsGenerationId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicySgtGenId.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicySgtGenId.setDescription('This object indicates the generation identification of the downloaded default SGT policy.')
ctspDownloadedDefSgtPolicyLifeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 3, 1, 3), Unsigned32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyLifeTime.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyLifeTime.setDescription('This object indicates the policy life time which provides the time interval during which this download default policy is valid.')
ctspDownloadedDefSgtPolicyLastUpdate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 3, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyLastUpdate.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyLastUpdate.setDescription('This object indicates the time when this downloaded SGT policy is last updated.')
ctspDownloadedDefSgtPolicyAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 5, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("refresh", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyAction.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedDefSgtPolicyAction.setDescription("This object allows user to specify the action to be taken with this default downloaded SGT policy. When read, this object always returns the value 'none'. 'none' - No operation. 'refresh' - Refresh this default SGT policy.")
ctspIfSgtMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 1), )
if mibBuilder.loadTexts: ctspIfSgtMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtMappingTable.setDescription('This table contains the Interface-to-SGT mapping configuration information in the device.')
ctspIfSgtMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: ctspIfSgtMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtMappingEntry.setDescription('Each row contains the SGT mapping configuration of a particular interface. A row instance can be created or removed by setting ctspIfSgtRowStatus.')
ctspIfSgtValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 1, 1, 1), CtsSecurityGroupTag()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIfSgtValue.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtValue.setDescription('This object specifies the SGT value assigned to the interface.')
ctspIfSgName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 1, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIfSgName.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgName.setDescription('This object specifies the Security Group Name assigned to the interface.')
ctspIfSgtStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 1, 1, 3), StorageType().clone('volatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIfSgtStorageType.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtStorageType.setDescription('The storage type for this conceptual row.')
ctspIfSgtRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspIfSgtRowStatus.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtRowStatus.setDescription('This object is used to manage the creation and deletion of rows in this table.')
ctspIfSgtMappingInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 2), )
if mibBuilder.loadTexts: ctspIfSgtMappingInfoTable.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtMappingInfoTable.setDescription('This table contains the Interface-to-SGT mapping status information in the device.')
ctspIfSgtMappingInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: ctspIfSgtMappingInfoEntry.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtMappingInfoEntry.setDescription('Containing the Interface-to-SGT mapping status of the specified interface.')
ctspL3IPMStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("active", 2), ("inactive", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctspL3IPMStatus.setStatus('current')
if mibBuilder.loadTexts: ctspL3IPMStatus.setDescription('This object indicates the Layer 3 Identity Port Mapping(IPM) operational mode. disabled - The L3 IPM is not configured. active - The L3 IPM is configured for this interface, and SGT is available. inactive - The L3 IPM is configured for this interface, and SGT is unavailable.')
ctspVlanSgtMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 7, 1), )
if mibBuilder.loadTexts: ctspVlanSgtMappingTable.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSgtMappingTable.setDescription('This table contains the Vlan-SGT mapping information in the device.')
ctspVlanSgtMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 7, 1, 1), ).setIndexNames((0, "CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanSgtMappingIndex"))
if mibBuilder.loadTexts: ctspVlanSgtMappingEntry.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSgtMappingEntry.setDescription('Each row contains the SGT mapping configuration of a particular VLAN. A row instance can be created or removed by setting ctspVlanSgtRowStatus.')
ctspVlanSgtMappingIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 7, 1, 1, 1), VlanIndex())
if mibBuilder.loadTexts: ctspVlanSgtMappingIndex.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSgtMappingIndex.setDescription('This object specifies the VLAN-ID which is used as index.')
ctspVlanSgtMapValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 7, 1, 1, 2), CtsSecurityGroupTag()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspVlanSgtMapValue.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSgtMapValue.setDescription('This object specifies the SGT value assigned to the vlan.')
ctspVlanSgtStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 7, 1, 1, 3), StorageType().clone('volatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspVlanSgtStorageType.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSgtStorageType.setDescription('The storage type for this conceptual row.')
ctspVlanSgtRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 7, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ctspVlanSgtRowStatus.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSgtRowStatus.setDescription('This object is used to manage the creation and deletion of rows in this table.')
ctspSgtCachingMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 8, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("standAlone", 2), ("withEnforcement", 3), ("vlan", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspSgtCachingMode.setStatus('current')
if mibBuilder.loadTexts: ctspSgtCachingMode.setDescription("This object specifies which SGT-caching mode is configured for SGT caching capable interfaces at the managed system. 'none' indicates that sgt-caching for all Layer 3 interfaces (excluding SVIs) is disabled. 'standAlone' indicates that SGT-caching is enabled on every TrustSec capable Layer3 interface (excluding SVIs) in the device. 'withEnforcement' indicates that SGT-caching is enabled on interfaces that have RBAC enforcement enabled. 'vlan' indicates that SGT-caching is enabled on the VLANs specified by ctspSgtCachingVlansfFirst2K & ctspSgtCachingVlansSecond2K")
ctspSgtCachingVlansFirst2K = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 8, 2), Cisco2KVlanList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspSgtCachingVlansFirst2K.setStatus('current')
if mibBuilder.loadTexts: ctspSgtCachingVlansFirst2K.setDescription('A string of octets containing one bit per VLAN for VLANs 0 to 2047. If the bit corresponding to a VLAN is set to 1, it indicates SGT-caching is enabled on the VLAN. If the bit corresponding to a VLAN is set to 0, it indicates SGT-caching is disabled on the VLAN.')
ctspSgtCachingVlansSecond2K = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 8, 3), Cisco2KVlanList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspSgtCachingVlansSecond2K.setStatus('current')
if mibBuilder.loadTexts: ctspSgtCachingVlansSecond2K.setDescription('A string of octets containing one bit per VLAN for VLANs 2048 to 4095. If the bit corresponding to a VLAN is set to 1, it indicates SGT-caching is enabled on the VLAN. If the bit corresponding to a VLAN is set to 0, it indicates SGT-caching is disabled on the VLAN.')
ctspPeerPolicyUpdatedNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 9, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspPeerPolicyUpdatedNotifEnable.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyUpdatedNotifEnable.setDescription("This object specifies whether the system generates ctspPeerPolicyUpdatedNotif. A value of 'false' will prevent ctspPeerPolicyUpdatedNotif notifications from being generated by this system.")
ctspAuthorizationSgaclFailNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 9, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailNotifEnable.setStatus('current')
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailNotifEnable.setDescription("This object specifies whether this system generates the ctspAuthorizationSgaclFailNotif. A value of 'false' will prevent ctspAuthorizationSgaclFailNotif notifications from being generated by this system.")
ctspOldPeerSgt = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 10, 1), CtsSecurityGroupTag()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: ctspOldPeerSgt.setStatus('current')
if mibBuilder.loadTexts: ctspOldPeerSgt.setDescription('This object provides the old sgt value for ctspPeerPolicyUpdatedNotif, i.e., the sgt value before the policy is updated.')
ctspAuthorizationSgaclFailReason = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 10, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("downloadACE", 1), ("downloadSrc", 2), ("downloadDst", 3), ("installPolicy", 4), ("installPolicyStandby", 5), ("installForIP", 6), ("uninstall", 7)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailReason.setStatus('current')
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailReason.setDescription("This object indicates the reason of failure during SGACL acquisitions, installations and uninstallations, which is associated with ctspAuthorizationSgaclFailNotif; 'downloadACE' - Failure during downloading ACE in SGACL acquisition. 'downloadSrc' - Failure during downloading source list in SGACL acquisition. 'downloadDst' - Failure during downloading destination list in SGACL acquisition. 'installPolicy' - Failure during SGACL policy installation 'installPolicyStandby' - Failure during SGACL policy installation on standby 'installForIP' - Failure during SGACL installation for specific IP type. 'uninstall' - Failure during SGACL uninstallation.")
ctspAuthorizationSgaclFailInfo = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 713, 1, 10, 3), SnmpAdminString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailInfo.setStatus('current')
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailInfo.setDescription('This object provides additional information about authorization SGACL failure, which is associated with ctspAuthorizationSgaclFailNotif.')
ctspPeerPolicyUpdatedNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 713, 0, 1)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspOldPeerSgt"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerSgt"))
if mibBuilder.loadTexts: ctspPeerPolicyUpdatedNotif.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyUpdatedNotif.setDescription('A ctspPeerPolicyUpdatedNotif is generated when the SGT value of a peer device has been updated.')
ctspAuthorizationSgaclFailNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 713, 0, 2)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspAuthorizationSgaclFailReason"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspAuthorizationSgaclFailInfo"))
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailNotif.setStatus('current')
if mibBuilder.loadTexts: ctspAuthorizationSgaclFailNotif.setDescription('A ctspAuthorizationSgaclFailNotif is generated when the authorization of SGACL fails.')
ciscoTrustSecPolicyMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 1))
ciscoTrustSecPolicyMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2))
ciscoTrustSecPolicyMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 1, 1)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspGlobalSgaclEnforcementGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefSwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanConfigGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpHwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefHwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclIpv4DropNetflowMonitorGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclIpv6DropNetflowMonitorGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyActionGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspLayer3TransportGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfL3PolicyConfigGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgtPolicyGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoTrustSecPolicyMIBCompliance = ciscoTrustSecPolicyMIBCompliance.setStatus('deprecated')
if mibBuilder.loadTexts: ciscoTrustSecPolicyMIBCompliance.setDescription('The compliance statement for the CISCO-TRUSTSEC-POLICY-MIB')
ciscoTrustSecPolicyMIBComplianceRev2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 1, 2)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspGlobalSgaclEnforcementGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefSwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanConfigGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpHwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefHwStatisticsGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclIpv4DropNetflowMonitorGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclIpv6DropNetflowMonitorGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyActionGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspLayer3TransportGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfL3PolicyConfigGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgtPolicyGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfSgtMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanSgtMappingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgtCachingGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclMonitorGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclMonitorStatisticGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspNotifCtrlGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspNotifGroup"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspNotifInfoGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoTrustSecPolicyMIBComplianceRev2 = ciscoTrustSecPolicyMIBComplianceRev2.setStatus('current')
if mibBuilder.loadTexts: ciscoTrustSecPolicyMIBComplianceRev2.setDescription('The compliance statement for the CISCO-TRUSTSEC-POLICY-MIB')
ctspGlobalSgaclEnforcementGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 1)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclEnforcementEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspGlobalSgaclEnforcementGroup = ctspGlobalSgaclEnforcementGroup.setStatus('current')
if mibBuilder.loadTexts: ctspGlobalSgaclEnforcementGroup.setDescription('A collection of object which provides the SGACL enforcement information for all TrustSec capable Layer 3 interfaces (excluding SVIs) at the device level.')
ctspSgaclIpv4DropNetflowMonitorGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 2)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclIpv4DropNetflowMonitor"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspSgaclIpv4DropNetflowMonitorGroup = ctspSgaclIpv4DropNetflowMonitorGroup.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclIpv4DropNetflowMonitorGroup.setDescription('A collection of object which provides netflow monitor information for IPv4 traffic drop packet due to SGACL enforcement in the device.')
ctspSgaclIpv6DropNetflowMonitorGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 3)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclIpv6DropNetflowMonitor"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspSgaclIpv6DropNetflowMonitorGroup = ctspSgaclIpv6DropNetflowMonitorGroup.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclIpv6DropNetflowMonitorGroup.setDescription('A collection of object which provides netflow monitor information for IPv6 traffic drop packet due to SGACL enforcement in the device.')
ctspVlanConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 4)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanConfigSgaclEnforcement"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanSviActive"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanConfigVrfName"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanConfigStorageType"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanConfigRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspVlanConfigGroup = ctspVlanConfigGroup.setStatus('current')
if mibBuilder.loadTexts: ctspVlanConfigGroup.setDescription('A collection of object which provides the SGACL enforcement and VRF information for each VLAN.')
ctspConfigSgaclMappingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 5)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingSgaclName"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingStorageType"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMappingRowStatus"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefConfigIpv4Sgacls"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefConfigIpv6Sgacls"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspConfigSgaclMappingGroup = ctspConfigSgaclMappingGroup.setStatus('current')
if mibBuilder.loadTexts: ctspConfigSgaclMappingGroup.setDescription('A collection of objects which provides the administratively configured SGACL mapping information in the device.')
ctspDownloadedSgaclMappingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 6)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclName"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclGenId"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedIpTrafficType"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefDownloadedSgaclName"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefDownloadedSgaclGenId"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefDownloadedIpTrafficType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspDownloadedSgaclMappingGroup = ctspDownloadedSgaclMappingGroup.setStatus('current')
if mibBuilder.loadTexts: ctspDownloadedSgaclMappingGroup.setDescription('A collection of objects which provides the downloaded SGACL mapping information in the device.')
ctspOperSgaclMappingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 7)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspOperationalSgaclName"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspOperationalSgaclGenId"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclMappingSource"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclConfigSource"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefOperationalSgaclName"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefOperationalSgaclGenId"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefOperSgaclMappingSource"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefOperSgaclConfigSource"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspOperSgaclMappingGroup = ctspOperSgaclMappingGroup.setStatus('current')
if mibBuilder.loadTexts: ctspOperSgaclMappingGroup.setDescription('A collection of objects which provides the operational SGACL mapping information in the device.')
ctspIpSwStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 8)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsIpSwDropPkts"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsIpSwPermitPkts"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspIpSwStatisticsGroup = ctspIpSwStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: ctspIpSwStatisticsGroup.setDescription('A collection of objects which provides software statistics counters for unicast IP traffic subjected to SGACL enforcement.')
ctspIpHwStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 9)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsIpHwDropPkts"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsIpHwPermitPkts"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspIpHwStatisticsGroup = ctspIpHwStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: ctspIpHwStatisticsGroup.setDescription('A collection of objects which provides hardware statistics counters for unicast IP traffic subjected to SGACL enforcement.')
ctspDefSwStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 10)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefIpSwDropPkts"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefIpSwPermitPkts"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspDefSwStatisticsGroup = ctspDefSwStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: ctspDefSwStatisticsGroup.setDescription('A collection of objects which provides software statistics counters for unicast IP traffic subjected to unicast default policy enforcement.')
ctspDefHwStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 11)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefIpHwDropPkts"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefIpHwPermitPkts"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspDefHwStatisticsGroup = ctspDefHwStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: ctspDefHwStatisticsGroup.setDescription('A collection of objects which provides hardware statistics counters for unicast IP traffic subjected to unicast default policy enforcement.')
ctspPeerPolicyActionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 12)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspAllPeerPolicyAction"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspPeerPolicyActionGroup = ctspPeerPolicyActionGroup.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyActionGroup.setDescription('A collection of object which provides refreshing of all peer policies in the device.')
ctspPeerPolicyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 13)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerSgt"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerSgtGenId"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerTrustState"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyLifeTime"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyLastUpdate"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyAction"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspPeerPolicyGroup = ctspPeerPolicyGroup.setStatus('current')
if mibBuilder.loadTexts: ctspPeerPolicyGroup.setDescription('A collection of object which provides peer policy information in the device.')
ctspLayer3TransportGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 14)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspLayer3PolicyLocalConfig"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspLayer3PolicyDownloaded"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspLayer3PolicyOperational"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspLayer3TransportGroup = ctspLayer3TransportGroup.setStatus('current')
if mibBuilder.loadTexts: ctspLayer3TransportGroup.setDescription('A collection of objects which provides managed information regarding the SGT propagation along with Layer 3 traffic in the device.')
ctspIfL3PolicyConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 15)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfL3Ipv4PolicyEnabled"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfL3Ipv6PolicyEnabled"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspIfL3PolicyConfigGroup = ctspIfL3PolicyConfigGroup.setStatus('current')
if mibBuilder.loadTexts: ctspIfL3PolicyConfigGroup.setDescription('A collection of objects which provides managed information for Layer3 Tranport policy enforcement on capable interface in the device.')
ctspIpSgtMappingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 16)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtValue"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtSource"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtStorageType"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIpSgtRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspIpSgtMappingGroup = ctspIpSgtMappingGroup.setStatus('current')
if mibBuilder.loadTexts: ctspIpSgtMappingGroup.setDescription('A collection of objects which provides managed information regarding IP-to-Sgt mapping in the device.')
ctspSgtPolicyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 17)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspAllSgtPolicyAction"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgtPolicySgtGenId"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgtPolicyLifeTime"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgtPolicyLastUpdate"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgtPolicyAction"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedDefSgtPolicySgtGenId"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedDefSgtPolicyLifeTime"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedDefSgtPolicyLastUpdate"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedDefSgtPolicyAction"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspSgtPolicyGroup = ctspSgtPolicyGroup.setStatus('current')
if mibBuilder.loadTexts: ctspSgtPolicyGroup.setDescription('A collection of object which provides SGT policy information in the device.')
ctspIfSgtMappingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 18)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfSgtValue"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfSgName"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspL3IPMStatus"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfSgtStorageType"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspIfSgtRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspIfSgtMappingGroup = ctspIfSgtMappingGroup.setStatus('current')
if mibBuilder.loadTexts: ctspIfSgtMappingGroup.setDescription('A collection of objects which provides managed information regarding Interface-to-Sgt mapping in the device.')
ctspVlanSgtMappingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 19)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanSgtMapValue"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanSgtStorageType"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspVlanSgtRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspVlanSgtMappingGroup = ctspVlanSgtMappingGroup.setStatus('current')
if mibBuilder.loadTexts: ctspVlanSgtMappingGroup.setDescription('A collection of objects which provides sgt mapping information for the IP traffic in the specified Vlan.')
ctspSgtCachingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 20)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgtCachingMode"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgtCachingVlansFirst2K"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgtCachingVlansSecond2K"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspSgtCachingGroup = ctspSgtCachingGroup.setStatus('current')
if mibBuilder.loadTexts: ctspSgtCachingGroup.setDescription('A collection of objects which provides sgt Caching information.')
ctspSgaclMonitorGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 21)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspSgaclMonitorEnable"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspConfigSgaclMonitor"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefConfigIpv4SgaclsMonitor"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefConfigIpv6SgaclsMonitor"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDownloadedSgaclMonitor"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefDownloadedSgaclMonitor"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspOperSgaclMonitor"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefOperSgaclMonitor"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspSgaclMonitorGroup = ctspSgaclMonitorGroup.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclMonitorGroup.setDescription('A collection of objects which provides SGACL monitor information.')
ctspSgaclMonitorStatisticGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 22)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsIpSwMonitorPkts"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspStatsIpHwMonitorPkts"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefIpSwMonitorPkts"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspDefIpHwMonitorPkts"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspSgaclMonitorStatisticGroup = ctspSgaclMonitorStatisticGroup.setStatus('current')
if mibBuilder.loadTexts: ctspSgaclMonitorStatisticGroup.setDescription('A collection of objects which provides monitor statistics counters for unicast IP traffic subjected to SGACL enforcement.')
ctspNotifCtrlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 23)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyUpdatedNotifEnable"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspAuthorizationSgaclFailNotifEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspNotifCtrlGroup = ctspNotifCtrlGroup.setStatus('current')
if mibBuilder.loadTexts: ctspNotifCtrlGroup.setDescription('A collection of objects providing notification control for TrustSec policy notifications.')
ctspNotifGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 24)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspPeerPolicyUpdatedNotif"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspAuthorizationSgaclFailNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspNotifGroup = ctspNotifGroup.setStatus('current')
if mibBuilder.loadTexts: ctspNotifGroup.setDescription('A collection of notifications for TrustSec policy.')
ctspNotifInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 713, 2, 2, 25)).setObjects(("CISCO-TRUSTSEC-POLICY-MIB", "ctspOldPeerSgt"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspAuthorizationSgaclFailReason"), ("CISCO-TRUSTSEC-POLICY-MIB", "ctspAuthorizationSgaclFailInfo"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ctspNotifInfoGroup = ctspNotifInfoGroup.setStatus('current')
if mibBuilder.loadTexts: ctspNotifInfoGroup.setDescription('A collection of objects providing the variable binding for TrustSec policy notifications.')
mibBuilder.exportSymbols("CISCO-TRUSTSEC-POLICY-MIB", ctspDefDownloadedIpTrafficType=ctspDefDownloadedIpTrafficType, ctspLayer3PolicyType=ctspLayer3PolicyType, ctspPeerTrustState=ctspPeerTrustState, ctspIfSgtValue=ctspIfSgtValue, ctspDownloadedSgaclName=ctspDownloadedSgaclName, ctspSgtCachingVlansSecond2K=ctspSgtCachingVlansSecond2K, ctspDownloadedSgtPolicyLifeTime=ctspDownloadedSgtPolicyLifeTime, ctspSgacl=ctspSgacl, ctspDownloadedDefSgtPolicyLastUpdate=ctspDownloadedDefSgtPolicyLastUpdate, ctspLayer3PolicyLocalConfig=ctspLayer3PolicyLocalConfig, ctspSgaclMappings=ctspSgaclMappings, ctspAllPeerPolicyAction=ctspAllPeerPolicyAction, ctspDefOperationalSgaclGenId=ctspDefOperationalSgaclGenId, ctspSgaclStatistics=ctspSgaclStatistics, ctspDefStatsEntry=ctspDefStatsEntry, ctspOperSgaclMappingSource=ctspOperSgaclMappingSource, ctspDefIpSwPermitPkts=ctspDefIpSwPermitPkts, ciscoTrustSecPolicyMIBObjects=ciscoTrustSecPolicyMIBObjects, ctspIfSgtMappingGroup=ctspIfSgtMappingGroup, ctspVlanConfigStorageType=ctspVlanConfigStorageType, ctspOperSgaclSourceSgt=ctspOperSgaclSourceSgt, ctspDownloadedSgtPolicyLastUpdate=ctspDownloadedSgtPolicyLastUpdate, ctspPeerPolicyUpdatedNotifEnable=ctspPeerPolicyUpdatedNotifEnable, ctspIpSgtVrfName=ctspIpSgtVrfName, ctspConfigSgaclMappingEntry=ctspConfigSgaclMappingEntry, ctspDefIpHwDropPkts=ctspDefIpHwDropPkts, ctspDefOperSgaclMappingEntry=ctspDefOperSgaclMappingEntry, ctspOperIpTrafficType=ctspOperIpTrafficType, ctspStatsIpHwMonitorPkts=ctspStatsIpHwMonitorPkts, ctspDefDownloadedSgaclMappingTable=ctspDefDownloadedSgaclMappingTable, ctspOperSgaclDestSgt=ctspOperSgaclDestSgt, ctspIpSgtMappingGroup=ctspIpSgtMappingGroup, ctspIfSgtRowStatus=ctspIfSgtRowStatus, ctspDownloadedDefSgtPolicyType=ctspDownloadedDefSgtPolicyType, ctspLayer3PolicyDownloaded=ctspLayer3PolicyDownloaded, ctspStatsDestSgt=ctspStatsDestSgt, ctspPeerSgt=ctspPeerSgt, ctspVlanConfigIndex=ctspVlanConfigIndex, ctspDefDownloadedSgaclIndex=ctspDefDownloadedSgaclIndex, ctspConfigSgaclMappingStorageType=ctspConfigSgaclMappingStorageType, ctspPeerName=ctspPeerName, ctspDefIpTrafficType=ctspDefIpTrafficType, ctspOperSgaclMappingGroup=ctspOperSgaclMappingGroup, ctspPeerPolicyUpdatedNotif=ctspPeerPolicyUpdatedNotif, ctspSgtCaching=ctspSgtCaching, ciscoTrustSecPolicyMIBComplianceRev2=ciscoTrustSecPolicyMIBComplianceRev2, ciscoTrustSecPolicyMIBConformance=ciscoTrustSecPolicyMIBConformance, ctspDefOperSgaclIndex=ctspDefOperSgaclIndex, ctspOperSgaclMappingTable=ctspOperSgaclMappingTable, ctspDownloadedSgaclGenId=ctspDownloadedSgaclGenId, ctspIfSgtMappings=ctspIfSgtMappings, ctspSgaclIpv6DropNetflowMonitor=ctspSgaclIpv6DropNetflowMonitor, ciscoTrustSecPolicyMIBGroups=ciscoTrustSecPolicyMIBGroups, ctspNotifsOnlyInfo=ctspNotifsOnlyInfo, ctspVlanConfigEntry=ctspVlanConfigEntry, ctspPeerPolicy=ctspPeerPolicy, ctspDownloadedSgaclDestSgt=ctspDownloadedSgaclDestSgt, ctspDefIpHwMonitorPkts=ctspDefIpHwMonitorPkts, ctspLayer3TransportGroup=ctspLayer3TransportGroup, ctspGlobalSgaclEnforcementGroup=ctspGlobalSgaclEnforcementGroup, ctspDownloadedSgaclMappingEntry=ctspDownloadedSgaclMappingEntry, ctspPeerPolicyActionGroup=ctspPeerPolicyActionGroup, ctspSgaclGlobals=ctspSgaclGlobals, ctspNotifInfoGroup=ctspNotifInfoGroup, ctspSgaclMonitorEnable=ctspSgaclMonitorEnable, ctspStatsIpTrafficType=ctspStatsIpTrafficType, ctspConfigSgaclMonitor=ctspConfigSgaclMonitor, ctspDefConfigIpv4Sgacls=ctspDefConfigIpv4Sgacls, ctspVlanSgtMappingGroup=ctspVlanSgtMappingGroup, ctspSgtCachingGroup=ctspSgtCachingGroup, ctspIfL3PolicyConfigEntry=ctspIfL3PolicyConfigEntry, ctspConfigSgaclMappingRowStatus=ctspConfigSgaclMappingRowStatus, ctspIpSwStatisticsGroup=ctspIpSwStatisticsGroup, ctspDownloadedSgtPolicySgt=ctspDownloadedSgtPolicySgt, ctspDefConfigIpv6SgaclsMonitor=ctspDefConfigIpv6SgaclsMonitor, ctspOperSgaclIndex=ctspOperSgaclIndex, ctspVlanSgtMappingTable=ctspVlanSgtMappingTable, ctspIfSgtMappingEntry=ctspIfSgtMappingEntry, ctspAuthorizationSgaclFailNotif=ctspAuthorizationSgaclFailNotif, ctspConfigSgaclMappingGroup=ctspConfigSgaclMappingGroup, ctspIfSgtMappingTable=ctspIfSgtMappingTable, ctspStatsIpSwDropPkts=ctspStatsIpSwDropPkts, ctspIpSgtSource=ctspIpSgtSource, ctspConfigSgaclMappingSgaclName=ctspConfigSgaclMappingSgaclName, ctspLayer3PolicyEntry=ctspLayer3PolicyEntry, ctspDownloadedSgaclSourceSgt=ctspDownloadedSgaclSourceSgt, ctspVlanConfigSgaclEnforcement=ctspVlanConfigSgaclEnforcement, ctspDefDownloadedSgaclMappingEntry=ctspDefDownloadedSgaclMappingEntry, ctspIpSgtIpAddress=ctspIpSgtIpAddress, ctspDownloadedSgaclMappingTable=ctspDownloadedSgaclMappingTable, ctspDefOperSgaclMappingTable=ctspDefOperSgaclMappingTable, ctspL3IPMStatus=ctspL3IPMStatus, ctspIfL3Ipv6PolicyEnabled=ctspIfL3Ipv6PolicyEnabled, ctspOperSgaclMonitor=ctspOperSgaclMonitor, ctspIpSgtMappings=ctspIpSgtMappings, ctspPeerPolicyAction=ctspPeerPolicyAction, ctspDownloadedDefSgtPolicyTable=ctspDownloadedDefSgtPolicyTable, ctspPeerPolicyTable=ctspPeerPolicyTable, ctspIfSgtStorageType=ctspIfSgtStorageType, ctspConfigSgaclMappingTable=ctspConfigSgaclMappingTable, PYSNMP_MODULE_ID=ciscoTrustSecPolicyMIB, ctspVlanSgtMappings=ctspVlanSgtMappings, ctspSgtCachingVlansFirst2K=ctspSgtCachingVlansFirst2K, ctspDefOperIpTrafficType=ctspDefOperIpTrafficType, ctspVlanSgtMapValue=ctspVlanSgtMapValue, ctspAuthorizationSgaclFailInfo=ctspAuthorizationSgaclFailInfo, ctspVlanSviActive=ctspVlanSviActive, ctspDownloadedSgtPolicyTable=ctspDownloadedSgtPolicyTable, ctspLayer3PolicyTable=ctspLayer3PolicyTable, ctspDownloadedIpTrafficType=ctspDownloadedIpTrafficType, ctspDownloadedSgtPolicyEntry=ctspDownloadedSgtPolicyEntry, ctspDefOperSgaclMappingSource=ctspDefOperSgaclMappingSource, ctspPeerPolicyEntry=ctspPeerPolicyEntry, ctspSgtStatsTable=ctspSgtStatsTable, ctspIfL3Ipv4PolicyEnabled=ctspIfL3Ipv4PolicyEnabled, ctspSgaclMonitorStatisticGroup=ctspSgaclMonitorStatisticGroup, ctspOperationalSgaclName=ctspOperationalSgaclName, ctspIpSgtStorageType=ctspIpSgtStorageType, ctspStatsIpSwPermitPkts=ctspStatsIpSwPermitPkts, ctspVlanSgtMappingIndex=ctspVlanSgtMappingIndex, ctspNotifsControl=ctspNotifsControl, ctspVlanSgtRowStatus=ctspVlanSgtRowStatus, ctspStatsIpSwMonitorPkts=ctspStatsIpSwMonitorPkts, ctspDefHwStatisticsGroup=ctspDefHwStatisticsGroup, ctspDownloadedDefSgtPolicyEntry=ctspDownloadedDefSgtPolicyEntry, ctspIpSgtValue=ctspIpSgtValue, ctspLayer3PolicyOperational=ctspLayer3PolicyOperational, ctspDefIpSwMonitorPkts=ctspDefIpSwMonitorPkts, ctspSgaclIpv4DropNetflowMonitor=ctspSgaclIpv4DropNetflowMonitor, ciscoTrustSecPolicyMIBNotifs=ciscoTrustSecPolicyMIBNotifs, ctspAuthorizationSgaclFailReason=ctspAuthorizationSgaclFailReason, ciscoTrustSecPolicyMIBCompliance=ciscoTrustSecPolicyMIBCompliance, ctspIpSgtMappingEntry=ctspIpSgtMappingEntry, ctspSgtStatsEntry=ctspSgtStatsEntry, ctspIfL3PolicyConfigGroup=ctspIfL3PolicyConfigGroup, ctspSgtPolicyGroup=ctspSgtPolicyGroup, ctspSgtPolicy=ctspSgtPolicy, ctspVlanConfigTable=ctspVlanConfigTable, ctspStatsSourceSgt=ctspStatsSourceSgt, ctspLayer3PolicyIpTrafficType=ctspLayer3PolicyIpTrafficType, ctspPeerPolicyLifeTime=ctspPeerPolicyLifeTime, ctspDefDownloadedSgaclGenId=ctspDefDownloadedSgaclGenId, ctspStatsIpHwPermitPkts=ctspStatsIpHwPermitPkts, ctspIpHwStatisticsGroup=ctspIpHwStatisticsGroup, ctspIpSgtAddressLength=ctspIpSgtAddressLength, ctspDownloadedSgtPolicyAction=ctspDownloadedSgtPolicyAction, ctspAllSgtPolicyAction=ctspAllSgtPolicyAction, ctspDownloadedDefSgtPolicyLifeTime=ctspDownloadedDefSgtPolicyLifeTime, ctspVlanConfigVrfName=ctspVlanConfigVrfName, ctspDownloadedDefSgtPolicySgtGenId=ctspDownloadedDefSgtPolicySgtGenId, ctspPeerSgtGenId=ctspPeerSgtGenId, ctspIfSgName=ctspIfSgName, ctspSgaclMonitorGroup=ctspSgaclMonitorGroup, ctspVlanSgtStorageType=ctspVlanSgtStorageType, ctspSgaclEnforcementEnable=ctspSgaclEnforcementEnable, ctspDefOperSgaclMonitor=ctspDefOperSgaclMonitor, ctspDownloadedSgaclMappingGroup=ctspDownloadedSgaclMappingGroup, ctspPeerPolicyGroup=ctspPeerPolicyGroup, ctspDefDownloadedSgaclMonitor=ctspDefDownloadedSgaclMonitor, ctspIfL3PolicyConfigTable=ctspIfL3PolicyConfigTable, ctspDefDownloadedSgaclName=ctspDefDownloadedSgaclName, ctspDownloadedSgtPolicySgtGenId=ctspDownloadedSgtPolicySgtGenId, ciscoTrustSecPolicyMIB=ciscoTrustSecPolicyMIB, ctspVlanConfigRowStatus=ctspVlanConfigRowStatus, ctspIpSgtRowStatus=ctspIpSgtRowStatus, ctspAuthorizationSgaclFailNotifEnable=ctspAuthorizationSgaclFailNotifEnable, ctspConfigSgaclMappingSourceSgt=ctspConfigSgaclMappingSourceSgt, ctspVlanConfigGroup=ctspVlanConfigGroup, ctspDefConfigIpv4SgaclsMonitor=ctspDefConfigIpv4SgaclsMonitor, ctspDefIpSwDropPkts=ctspDefIpSwDropPkts, ctspDefConfigIpv6Sgacls=ctspDefConfigIpv6Sgacls, ctspConfigSgaclMappingIpTrafficType=ctspConfigSgaclMappingIpTrafficType, ciscoTrustSecPolicyMIBCompliances=ciscoTrustSecPolicyMIBCompliances, ctspStatsIpHwDropPkts=ctspStatsIpHwDropPkts, ctspVlanSgtMappingEntry=ctspVlanSgtMappingEntry, ctspDefIpHwPermitPkts=ctspDefIpHwPermitPkts, ctspOperationalSgaclGenId=ctspOperationalSgaclGenId, ctspDefOperationalSgaclName=ctspDefOperationalSgaclName, ctspOperSgaclMappingEntry=ctspOperSgaclMappingEntry, ctspIpSgtMappingTable=ctspIpSgtMappingTable, ctspIfSgtMappingInfoEntry=ctspIfSgtMappingInfoEntry, ctspLayer3Transport=ctspLayer3Transport, ctspSgaclIpv4DropNetflowMonitorGroup=ctspSgaclIpv4DropNetflowMonitorGroup, ctspSgtCachingMode=ctspSgtCachingMode, ctspOperSgaclConfigSource=ctspOperSgaclConfigSource, ctspDownloadedSgaclMonitor=ctspDownloadedSgaclMonitor, ctspDefSwStatisticsGroup=ctspDefSwStatisticsGroup, ctspIpSgtAddressType=ctspIpSgtAddressType, ctspPeerPolicyLastUpdate=ctspPeerPolicyLastUpdate, ctspDownloadedDefSgtPolicyAction=ctspDownloadedDefSgtPolicyAction, ctspOldPeerSgt=ctspOldPeerSgt, ctspNotifGroup=ctspNotifGroup, ctspDefOperSgaclConfigSource=ctspDefOperSgaclConfigSource, ctspDefStatsTable=ctspDefStatsTable, ctspSgaclIpv6DropNetflowMonitorGroup=ctspSgaclIpv6DropNetflowMonitorGroup, ctspConfigSgaclMappingDestSgt=ctspConfigSgaclMappingDestSgt, ctspIfSgtMappingInfoTable=ctspIfSgtMappingInfoTable, ctspNotifCtrlGroup=ctspNotifCtrlGroup, ctspDownloadedSgaclIndex=ctspDownloadedSgaclIndex)
| [] |
lrahmani/agents-aea | tests/test_cli/test_generate/test_generate.py | 9bd1d51530fc21bf41b5adea031cda19a94b048b | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This test module contains the tests for the aea.cli.generate sub-module."""
from unittest import TestCase, mock
from aea.cli.generate import _generate_item
from tests.test_cli.tools_for_testing import ContextMock
def _raise_file_exists(self, *args, **kwargs):
raise FileExistsError()
@mock.patch("builtins.open", mock.mock_open())
@mock.patch("aea.cli.generate.ConfigLoader")
@mock.patch("aea.cli.generate.os.path.join", return_value="joined-path")
@mock.patch("aea.cli.generate.ProtocolGenerator.generate", _raise_file_exists)
class GenerateItemTestCase(TestCase):
"""Test case for fetch_agent_locally method."""
def test__generate_item_file_exists(self, *mocks):
"""Test for fetch_agent_locally method positive result."""
ctx_mock = ContextMock()
with self.assertRaises(SystemExit):
_generate_item(ctx_mock, "protocol", "path")
| [((1142, 1185), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.generate.ConfigLoader"""'], {}), "('aea.cli.generate.ConfigLoader')\n", (1152, 1185), False, 'from unittest import TestCase, mock\n'), ((1187, 1258), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.generate.os.path.join"""'], {'return_value': '"""joined-path"""'}), "('aea.cli.generate.os.path.join', return_value='joined-path')\n", (1197, 1258), False, 'from unittest import TestCase, mock\n'), ((1260, 1337), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.generate.ProtocolGenerator.generate"""', '_raise_file_exists'], {}), "('aea.cli.generate.ProtocolGenerator.generate', _raise_file_exists)\n", (1270, 1337), False, 'from unittest import TestCase, mock\n'), ((1123, 1139), 'unittest.mock.mock_open', 'mock.mock_open', ([], {}), '()\n', (1137, 1139), False, 'from unittest import TestCase, mock\n'), ((1570, 1583), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (1581, 1583), False, 'from tests.test_cli.tools_for_testing import ContextMock\n'), ((1640, 1684), 'aea.cli.generate._generate_item', '_generate_item', (['ctx_mock', '"""protocol"""', '"""path"""'], {}), "(ctx_mock, 'protocol', 'path')\n", (1654, 1684), False, 'from aea.cli.generate import _generate_item\n')] |
PeerHerholz/smobsc | sphinx/ext/napoleon/__init__.py | db34d2bb96b80579bd4a3f4c198a6b524c5a134a | """
sphinx.ext.napoleon
~~~~~~~~~~~~~~~~~~~
Support for NumPy and Google style docstrings.
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from sphinx import __display_version__ as __version__
from sphinx.application import Sphinx
from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
if False:
# For type annotation
from typing import Any, Dict, List # NOQA
class Config:
"""Sphinx napoleon extension settings in `conf.py`.
Listed below are all the settings used by napoleon and their default
values. These settings can be changed in the Sphinx `conf.py` file. Make
sure that "sphinx.ext.napoleon" is enabled in `conf.py`::
# conf.py
# Add any Sphinx extension module names here, as strings
extensions = ['sphinx.ext.napoleon']
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_init_with_doc = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = False
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
napoleon_use_keyword = True
napoleon_custom_sections = None
.. _Google style:
https://google.github.io/styleguide/pyguide.html
.. _NumPy style:
https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
Attributes
----------
napoleon_google_docstring : :obj:`bool` (Defaults to True)
True to parse `Google style`_ docstrings. False to disable support
for Google style docstrings.
napoleon_numpy_docstring : :obj:`bool` (Defaults to True)
True to parse `NumPy style`_ docstrings. False to disable support
for NumPy style docstrings.
napoleon_include_init_with_doc : :obj:`bool` (Defaults to False)
True to list ``__init___`` docstrings separately from the class
docstring. False to fall back to Sphinx's default behavior, which
considers the ``__init___`` docstring as part of the class
documentation.
**If True**::
def __init__(self):
\"\"\"
This will be included in the docs because it has a docstring
\"\"\"
def __init__(self):
# This will NOT be included in the docs
napoleon_include_private_with_doc : :obj:`bool` (Defaults to False)
True to include private members (like ``_membername``) with docstrings
in the documentation. False to fall back to Sphinx's default behavior.
**If True**::
def _included(self):
\"\"\"
This will be included in the docs because it has a docstring
\"\"\"
pass
def _skipped(self):
# This will NOT be included in the docs
pass
napoleon_include_special_with_doc : :obj:`bool` (Defaults to False)
True to include special members (like ``__membername__``) with
docstrings in the documentation. False to fall back to Sphinx's
default behavior.
**If True**::
def __str__(self):
\"\"\"
This will be included in the docs because it has a docstring
\"\"\"
return unicode(self).encode('utf-8')
def __unicode__(self):
# This will NOT be included in the docs
return unicode(self.__class__.__name__)
napoleon_use_admonition_for_examples : :obj:`bool` (Defaults to False)
True to use the ``.. admonition::`` directive for the **Example** and
**Examples** sections. False to use the ``.. rubric::`` directive
instead. One may look better than the other depending on what HTML
theme is used.
This `NumPy style`_ snippet will be converted as follows::
Example
-------
This is just a quick example
**If True**::
.. admonition:: Example
This is just a quick example
**If False**::
.. rubric:: Example
This is just a quick example
napoleon_use_admonition_for_notes : :obj:`bool` (Defaults to False)
True to use the ``.. admonition::`` directive for **Notes** sections.
False to use the ``.. rubric::`` directive instead.
Note
----
The singular **Note** section will always be converted to a
``.. note::`` directive.
See Also
--------
:attr:`napoleon_use_admonition_for_examples`
napoleon_use_admonition_for_references : :obj:`bool` (Defaults to False)
True to use the ``.. admonition::`` directive for **References**
sections. False to use the ``.. rubric::`` directive instead.
See Also
--------
:attr:`napoleon_use_admonition_for_examples`
napoleon_use_ivar : :obj:`bool` (Defaults to False)
True to use the ``:ivar:`` role for instance variables. False to use
the ``.. attribute::`` directive instead.
This `NumPy style`_ snippet will be converted as follows::
Attributes
----------
attr1 : int
Description of `attr1`
**If True**::
:ivar attr1: Description of `attr1`
:vartype attr1: int
**If False**::
.. attribute:: attr1
Description of `attr1`
:type: int
napoleon_use_param : :obj:`bool` (Defaults to True)
True to use a ``:param:`` role for each function parameter. False to
use a single ``:parameters:`` role for all the parameters.
This `NumPy style`_ snippet will be converted as follows::
Parameters
----------
arg1 : str
Description of `arg1`
arg2 : int, optional
Description of `arg2`, defaults to 0
**If True**::
:param arg1: Description of `arg1`
:type arg1: str
:param arg2: Description of `arg2`, defaults to 0
:type arg2: int, optional
**If False**::
:parameters: * **arg1** (*str*) --
Description of `arg1`
* **arg2** (*int, optional*) --
Description of `arg2`, defaults to 0
napoleon_use_keyword : :obj:`bool` (Defaults to True)
True to use a ``:keyword:`` role for each function keyword argument.
False to use a single ``:keyword arguments:`` role for all the
keywords.
This behaves similarly to :attr:`napoleon_use_param`. Note unlike
docutils, ``:keyword:`` and ``:param:`` will not be treated the same
way - there will be a separate "Keyword Arguments" section, rendered
in the same fashion as "Parameters" section (type links created if
possible)
See Also
--------
:attr:`napoleon_use_param`
napoleon_use_rtype : :obj:`bool` (Defaults to True)
True to use the ``:rtype:`` role for the return type. False to output
the return type inline with the description.
This `NumPy style`_ snippet will be converted as follows::
Returns
-------
bool
True if successful, False otherwise
**If True**::
:returns: True if successful, False otherwise
:rtype: bool
**If False**::
:returns: *bool* -- True if successful, False otherwise
napoleon_custom_sections : :obj:`list` (Defaults to None)
Add a list of custom sections to include, expanding the list of parsed sections.
The entries can either be strings or tuples, depending on the intention:
* To create a custom "generic" section, just pass a string.
* To create an alias for an existing section, pass a tuple containing the
alias name and the original, in that order.
If an entry is just a string, it is interpreted as a header for a generic
section. If the entry is a tuple/list/indexed container, the first entry
is the name of the section, the second is the section key to emulate.
"""
_config_values = {
'napoleon_google_docstring': (True, 'env'),
'napoleon_numpy_docstring': (True, 'env'),
'napoleon_include_init_with_doc': (False, 'env'),
'napoleon_include_private_with_doc': (False, 'env'),
'napoleon_include_special_with_doc': (False, 'env'),
'napoleon_use_admonition_for_examples': (False, 'env'),
'napoleon_use_admonition_for_notes': (False, 'env'),
'napoleon_use_admonition_for_references': (False, 'env'),
'napoleon_use_ivar': (False, 'env'),
'napoleon_use_param': (True, 'env'),
'napoleon_use_rtype': (True, 'env'),
'napoleon_use_keyword': (True, 'env'),
'napoleon_custom_sections': (None, 'env')
}
def __init__(self, **settings):
# type: (Any) -> None
for name, (default, rebuild) in self._config_values.items():
setattr(self, name, default)
for name, value in settings.items():
setattr(self, name, value)
def setup(app):
# type: (Sphinx) -> Dict[str, Any]
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
Parameters
----------
app : sphinx.application.Sphinx
Application object representing the Sphinx process
See Also
--------
`The Sphinx documentation on Extensions
<http://sphinx-doc.org/extensions.html>`_
`The Extension Tutorial <http://sphinx-doc.org/extdev/tutorial.html>`_
`The Extension API <http://sphinx-doc.org/extdev/appapi.html>`_
"""
if not isinstance(app, Sphinx):
# probably called by tests
return {'version': __version__, 'parallel_read_safe': True}
_patch_python_domain()
app.setup_extension('sphinx.ext.autodoc')
app.connect('autodoc-process-docstring', _process_docstring)
app.connect('autodoc-skip-member', _skip_member)
for name, (default, rebuild) in Config._config_values.items():
app.add_config_value(name, default, rebuild)
return {'version': __version__, 'parallel_read_safe': True}
def _patch_python_domain():
# type: () -> None
try:
from sphinx.domains.python import PyTypedField
except ImportError:
pass
else:
import sphinx.domains.python
from sphinx.locale import _
for doc_field in sphinx.domains.python.PyObject.doc_field_types:
if doc_field.name == 'parameter':
doc_field.names = ('param', 'parameter', 'arg', 'argument')
break
sphinx.domains.python.PyObject.doc_field_types.append(
PyTypedField('keyword', label=_('Keyword Arguments'),
names=('keyword', 'kwarg', 'kwparam'),
typerolename='obj', typenames=('paramtype', 'kwtype'),
can_collapse=True))
def _process_docstring(app, what, name, obj, options, lines):
# type: (Sphinx, str, str, Any, Any, List[str]) -> None
"""Process the docstring for a given python object.
Called when autodoc has read and processed a docstring. `lines` is a list
of docstring lines that `_process_docstring` modifies in place to change
what Sphinx outputs.
The following settings in conf.py control what styles of docstrings will
be parsed:
* ``napoleon_google_docstring`` -- parse Google style docstrings
* ``napoleon_numpy_docstring`` -- parse NumPy style docstrings
Parameters
----------
app : sphinx.application.Sphinx
Application object representing the Sphinx process.
what : str
A string specifying the type of the object to which the docstring
belongs. Valid values: "module", "class", "exception", "function",
"method", "attribute".
name : str
The fully qualified name of the object.
obj : module, class, exception, function, method, or attribute
The object to which the docstring belongs.
options : sphinx.ext.autodoc.Options
The options given to the directive: an object with attributes
inherited_members, undoc_members, show_inheritance and noindex that
are True if the flag option of same name was given to the auto
directive.
lines : list of str
The lines of the docstring, see above.
.. note:: `lines` is modified *in place*
"""
result_lines = lines
docstring = None # type: GoogleDocstring
if app.config.napoleon_numpy_docstring:
docstring = NumpyDocstring(result_lines, app.config, app, what, name,
obj, options)
result_lines = docstring.lines()
if app.config.napoleon_google_docstring:
docstring = GoogleDocstring(result_lines, app.config, app, what, name,
obj, options)
result_lines = docstring.lines()
lines[:] = result_lines[:]
def _skip_member(app, what, name, obj, skip, options):
# type: (Sphinx, str, str, Any, bool, Any) -> bool
"""Determine if private and special class members are included in docs.
The following settings in conf.py determine if private and special class
members or init methods are included in the generated documentation:
* ``napoleon_include_init_with_doc`` --
include init methods if they have docstrings
* ``napoleon_include_private_with_doc`` --
include private members if they have docstrings
* ``napoleon_include_special_with_doc`` --
include special members if they have docstrings
Parameters
----------
app : sphinx.application.Sphinx
Application object representing the Sphinx process
what : str
A string specifying the type of the object to which the member
belongs. Valid values: "module", "class", "exception", "function",
"method", "attribute".
name : str
The name of the member.
obj : module, class, exception, function, method, or attribute.
For example, if the member is the __init__ method of class A, then
`obj` will be `A.__init__`.
skip : bool
A boolean indicating if autodoc will skip this member if `_skip_member`
does not override the decision
options : sphinx.ext.autodoc.Options
The options given to the directive: an object with attributes
inherited_members, undoc_members, show_inheritance and noindex that
are True if the flag option of same name was given to the auto
directive.
Returns
-------
bool
True if the member should be skipped during creation of the docs,
False if it should be included in the docs.
"""
has_doc = getattr(obj, '__doc__', False)
is_member = (what == 'class' or what == 'exception' or what == 'module')
if name != '__weakref__' and has_doc and is_member:
cls_is_owner = False
if what == 'class' or what == 'exception':
qualname = getattr(obj, '__qualname__', '')
cls_path, _, _ = qualname.rpartition('.')
if cls_path:
try:
if '.' in cls_path:
import importlib
import functools
mod = importlib.import_module(obj.__module__)
mod_path = cls_path.split('.')
cls = functools.reduce(getattr, mod_path, mod)
else:
cls = obj.__globals__[cls_path]
except Exception:
cls_is_owner = False
else:
cls_is_owner = (cls and hasattr(cls, name) and # type: ignore
name in cls.__dict__)
else:
cls_is_owner = False
if what == 'module' or cls_is_owner:
is_init = (name == '__init__')
is_special = (not is_init and name.startswith('__') and
name.endswith('__'))
is_private = (not is_init and not is_special and
name.startswith('_'))
inc_init = app.config.napoleon_include_init_with_doc
inc_special = app.config.napoleon_include_special_with_doc
inc_private = app.config.napoleon_include_private_with_doc
if ((is_special and inc_special) or
(is_private and inc_private) or
(is_init and inc_init)):
return False
return None
| [((13152, 13223), 'sphinx.ext.napoleon.docstring.NumpyDocstring', 'NumpyDocstring', (['result_lines', 'app.config', 'app', 'what', 'name', 'obj', 'options'], {}), '(result_lines, app.config, app, what, name, obj, options)\n', (13166, 13223), False, 'from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring\n'), ((13365, 13437), 'sphinx.ext.napoleon.docstring.GoogleDocstring', 'GoogleDocstring', (['result_lines', 'app.config', 'app', 'what', 'name', 'obj', 'options'], {}), '(result_lines, app.config, app, what, name, obj, options)\n', (13380, 13437), False, 'from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring\n'), ((11303, 11325), 'sphinx.locale._', '_', (['"""Keyword Arguments"""'], {}), "('Keyword Arguments')\n", (11304, 11325), False, 'from sphinx.locale import _\n'), ((15875, 15914), 'importlib.import_module', 'importlib.import_module', (['obj.__module__'], {}), '(obj.__module__)\n', (15898, 15914), False, 'import importlib\n'), ((16000, 16040), 'functools.reduce', 'functools.reduce', (['getattr', 'mod_path', 'mod'], {}), '(getattr, mod_path, mod)\n', (16016, 16040), False, 'import functools\n')] |
skodapetr/viset | plugins/similarity/rdkit/tanimoto/lbvs-entry.py | 87863ed6cde63392b2d503ceda53bb2cea367d69 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from rdkit import DataStructs
import plugin_api
__license__ = "X11"
class LbvsEntry(plugin_api.PluginInterface):
"""
Compute Tanimoto similarity.
"""
def __init__(self):
self.stream = None
self.counter = 0
self.first_entry = False
def execute(self, files):
query = LbvsEntry._load_file(files["query_file"])
database = LbvsEntry._load_file(files["database_file"])
with open(files["output_file"], "w") as stream:
self.stream = stream
self.write_output_header()
self.compute_and_write_similarities_for_items(query, database)
self.write_output_footer()
def write_output_header(self):
self.stream.write('{"data":[')
def write_output_footer(self):
self.stream.write(']}')
def compute_and_write_similarities_for_items(self, query, database):
self.first_entry = True
for query_item in query:
for database_item in database:
self._write_separator_if_needed()
self.first_entry = False
self._compute_and_write_similarity(query_item, database_item)
def _write_separator_if_needed(self):
if not self.first_entry:
self.stream.write(",")
def _compute_and_write_similarity(self, query, item):
similarity = LbvsEntry._compute_similarity(
query["value"], item["value"])
json.dump({
"query": query["id"],
"id": item["id"],
"value": similarity
}, self.stream)
@staticmethod
def _load_file(path):
with open(path) as stream:
return [{
"id": item["id"],
"value": LbvsEntry._as_sparse_vector(item["value"])
} for item in json.load(stream)["data"]]
@staticmethod
def _as_sparse_vector(data):
# Use max integer value as a size.
vector = DataStructs.cDataStructs.IntSparseIntVect(8388608)
for key in data:
vector[(int)(key)] = (int)(data[key])
return vector
@staticmethod
def _compute_similarity(left, right):
return DataStructs.TanimotoSimilarity(left, right)
def get_metadata(self) -> object:
return {
"id": "rdkit/tanimoto"
}
| [((1496, 1585), 'json.dump', 'json.dump', (["{'query': query['id'], 'id': item['id'], 'value': similarity}", 'self.stream'], {}), "({'query': query['id'], 'id': item['id'], 'value': similarity},\n self.stream)\n", (1505, 1585), False, 'import json\n'), ((2021, 2071), 'rdkit.DataStructs.cDataStructs.IntSparseIntVect', 'DataStructs.cDataStructs.IntSparseIntVect', (['(8388608)'], {}), '(8388608)\n', (2062, 2071), False, 'from rdkit import DataStructs\n'), ((2245, 2288), 'rdkit.DataStructs.TanimotoSimilarity', 'DataStructs.TanimotoSimilarity', (['left', 'right'], {}), '(left, right)\n', (2275, 2288), False, 'from rdkit import DataStructs\n'), ((1882, 1899), 'json.load', 'json.load', (['stream'], {}), '(stream)\n', (1891, 1899), False, 'import json\n')] |
524243642/taobao_spider | mall_spider/spiders/actions/proxy_service.py | 9cdaed1c7a67fc1f35ee2af2e18313cedf3b1e5e | # coding: utf-8
import time
from config.config_loader import global_config
from mall_spider.spiders.actions.context import Context
from mall_spider.spiders.actions.direct_proxy_action import DirectProxyAction
__proxy_service = None
class ProxyService(object):
proxies_set = set()
proxies_list = ['https://' + item['ip'] + ':' + item['port'] for item in global_config.s_proxy]
LOW_WATER_MARK = 5
proxy_fetch_url = "http://ip.11jsq.com/index.php/api/entry?method=proxyServer.generate_api_url&packid=1&fa=0&fetch_key=&qty=1&time=1&pro=&city=&port=1&format=json&ss=5&css=&dt=1&specialTxt=3&specialJson="
def __init__(self) -> None:
super().__init__()
self._counter = 0
def get_s_proxy(self, username):
proxy = global_config.s_proxy_dict[username]
url = 'https://' + proxy['ip'] + ':' + proxy['port']
return {
'https': url
}
def get_origin_s_proxy(self, username):
return global_config.s_proxy_dict[username]
def get_static_proxy(self, username):
if not global_config.static_proxy:
return None
proxy = global_config.static_proxy_dict[username]
if proxy['username'] and proxy['password']:
url = 'https://' + proxy['username'] + ':' + proxy['password'] + '@' + proxy['ip'] + ':' + proxy['port']
else:
url = 'https://' + proxy['ip'] + ':' + proxy['port']
return {
'https': url
}
def get_origin_static_proxy(self, username):
if not global_config.static_proxy:
return None
return global_config.static_proxy_dict[username]
def get_proxy(self):
if len(self.proxies_list) < self.LOW_WATER_MARK:
for i in range(0, int(self.LOW_WATER_MARK * 1) - len(self.proxies_list)):
self.fetch_proxy()
time.sleep(2)
proxy = self.proxies_list[self._counter % len(self.proxies_list)]
self._counter += 1
return {
'https': proxy
}
def fetch_proxy(self):
context = Context()
action = DirectProxyAction()
action.execute(context=context)
result = context.get(Context.KEY_PROXY_RESULT, [])
if result:
for item in result:
ip = item['IP']
port = str(item['Port'])
url = 'https://' + ip + ':' + port
if url not in self.proxies_set:
self.proxies_set.add(url)
self.proxies_list.append(url)
def remove_proxy(self, url, force=False):
if force:
self.proxies_set.remove(url)
self.proxies_list.remove(url)
def get_proxy_service():
global __proxy_service
if not __proxy_service:
__proxy_service = ProxyService()
return __proxy_service
| [((2086, 2095), 'mall_spider.spiders.actions.context.Context', 'Context', ([], {}), '()\n', (2093, 2095), False, 'from mall_spider.spiders.actions.context import Context\n'), ((2113, 2132), 'mall_spider.spiders.actions.direct_proxy_action.DirectProxyAction', 'DirectProxyAction', ([], {}), '()\n', (2130, 2132), False, 'from mall_spider.spiders.actions.direct_proxy_action import DirectProxyAction\n'), ((1871, 1884), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1881, 1884), False, 'import time\n')] |
joedanz/flask-weather | app/weather_tests.py | fe35aa359da6f5d7f942d97837403e153b5c5ede | import os
import weather
import datetime
import unittest
import tempfile
class WeatherTestCase(unittest.TestCase):
def setUp(self):
self.db_fd, weather.app.config['DATABASE'] = tempfile.mkstemp()
weather.app.config['TESTING'] = True
self.app = weather.app.test_client()
weather.init_db()
def tearDown(self):
os.close(self.db_fd)
os.unlink(weather.app.config['DATABASE'])
def test_empty_db(self):
"""Test empty database with no entries."""
rv = self.app.get('/')
assert 'Nothing logged yet.' in rv.data
def test_report(self):
"""Test reporting weather"""
rv = self.app.get('/report/11210/63/23', follow_redirects=True)
assert b'11210' in rv.data
def test_full_db(self):
"""Test reporting weather"""
rv = self.app.get('/', follow_redirects=True)
assert b'11210' in rv.data
if __name__ == '__main__':
unittest.main()
| [((949, 964), 'unittest.main', 'unittest.main', ([], {}), '()\n', (962, 964), False, 'import unittest\n'), ((191, 209), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (207, 209), False, 'import tempfile\n'), ((274, 299), 'weather.app.test_client', 'weather.app.test_client', ([], {}), '()\n', (297, 299), False, 'import weather\n'), ((308, 325), 'weather.init_db', 'weather.init_db', ([], {}), '()\n', (323, 325), False, 'import weather\n'), ((359, 379), 'os.close', 'os.close', (['self.db_fd'], {}), '(self.db_fd)\n', (367, 379), False, 'import os\n'), ((388, 429), 'os.unlink', 'os.unlink', (["weather.app.config['DATABASE']"], {}), "(weather.app.config['DATABASE'])\n", (397, 429), False, 'import os\n')] |
memristor/mep2 | modules/sensors/Activator.py | bc5cddacba3d740f791f3454b8cb51bda83ce202 | import asyncio
class Activator:
def __init__(self, name, packet_stream=None):
self.ps = None
self.name = name
self.future = None
self.data = 0
self.state = ''
if packet_stream:
self.set_packet_stream(packet_stream)
@_core.module_cmd
def wait_activator(self):
pass
@_core.module_cmd
def check_activator(self):
print('checking act')
if self.data:
self.future.set_result(1)
else:
self.state = 'check_chinch'
print('checking for chinch')
def export_cmds(self):
_core.export_cmd('wait_activator', self.wait_activator)
_core.export_cmd('check_activator', self.check_activator)
def on_recv(self, pkt):
if self.state == 'check_chinch' and self.future and pkt[0] == 1:
self.future.set_result(1)
self.state = 'chinch_ready'
print('waiting for activator')
if self.state == 'chinch_ready' and self.future and pkt[0] == 0:
self.future.set_result(1)
def set_packet_stream(self, ps):
ps.recv = self.on_recv
self.ps = ps
| [] |
ArthurCamara/beir | examples/retrieval/evaluation/sparse/evaluate_deepct.py | 2739990b719f2d4814d88473cf9965d92d4f4c18 | """
This example shows how to evaluate DeepCT (using Anserini) in BEIR.
For more details on DeepCT, refer here: https://arxiv.org/abs/1910.10687
The original DeepCT repository is not modularised and only works with Tensorflow 1.x (1.15).
We modified the DeepCT repository to work with Tensorflow latest (2.x).
We do not change the core-prediction code, only few input/output file format and structure to adapt to BEIR formats.
For more details on changes, check: https://github.com/NThakur20/DeepCT and compare it with original repo!
Please follow the steps below to install DeepCT:
1. git clone https://github.com/NThakur20/DeepCT.git
Since Anserini uses Java-11, we would advise you to use docker for running Pyserini.
To be able to run the code below you must have docker locally installed in your machine.
To install docker on your local machine, please refer here: https://docs.docker.com/get-docker/
After docker installation, please follow the steps below to get docker container up and running:
1. docker pull docker pull beir/pyserini-fastapi
2. docker build -t pyserini-fastapi .
3. docker run -p 8000:8000 -it --rm pyserini-fastapi
Usage: python evaluate_deepct.py
"""
from DeepCT.deepct import run_deepct # git clone https://github.com/NThakur20/DeepCT.git
from beir import util, LoggingHandler
from beir.datasets.data_loader import GenericDataLoader
from beir.retrieval.evaluation import EvaluateRetrieval
from beir.generation.models import QGenModel
from tqdm.autonotebook import trange
import pathlib, os, json
import logging
import requests
import random
#### Just some code to print debug information to stdout
logging.basicConfig(format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
handlers=[LoggingHandler()])
#### /print debug information to stdout
#### Download scifact.zip dataset and unzip the dataset
dataset = "scifact"
url = "https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/{}.zip".format(dataset)
out_dir = os.path.join(pathlib.Path(__file__).parent.absolute(), "datasets")
data_path = util.download_and_unzip(url, out_dir)
corpus, queries, qrels = GenericDataLoader(data_path).load(split="test")
#### 1. Download Google BERT-BASE, Uncased model ####
# Ref: https://github.com/google-research/bert
base_model_url = "https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-12_H-768_A-12.zip"
out_dir = os.path.join(pathlib.Path(__file__).parent.absolute(), "models")
bert_base_dir = util.download_and_unzip(base_model_url, out_dir)
#### 2. Download DeepCT MSMARCO Trained BERT checkpoint ####
# Credits to DeepCT authors: Zhuyun Dai, Jamie Callan, (https://github.com/AdeDZY/DeepCT)
model_url = "http://boston.lti.cs.cmu.edu/appendices/arXiv2019-DeepCT-Zhuyun-Dai/outputs/marco.zip"
out_dir = os.path.join(pathlib.Path(__file__).parent.absolute(), "models")
checkpoint_dir = util.download_and_unzip(model_url, out_dir)
##################################################
#### 3. Configure Params for DeepCT inference ####
##################################################
# We cannot use the original Repo (https://github.com/AdeDZY/DeepCT) as it only runs with TF 1.15.
# We reformatted the code (https://github.com/NThakur20/DeepCT) and made it working with latest TF 2.X!
if not os.path.isfile(os.path.join(data_path, "deepct.jsonl")):
################################
#### Command-Line Arugments ####
################################
run_deepct.FLAGS.task_name = "beir" # Defined a seperate BEIR task in DeepCT. Check out run_deepct.
run_deepct.FLAGS.do_train = False # We only want to use the code for inference.
run_deepct.FLAGS.do_eval = False # No evaluation.
run_deepct.FLAGS.do_predict = True # True, as we would use DeepCT model for only prediction.
run_deepct.FLAGS.data_dir = os.path.join(data_path, "corpus.jsonl") # Provide original path to corpus data, follow beir format.
run_deepct.FLAGS.vocab_file = os.path.join(bert_base_dir, "vocab.txt") # Provide bert-base-uncased model vocabulary.
run_deepct.FLAGS.bert_config_file = os.path.join(bert_base_dir, "bert_config.json") # Provide bert-base-uncased config.json file.
run_deepct.FLAGS.init_checkpoint = os.path.join(checkpoint_dir, "model.ckpt-65816") # Provide DeepCT MSMARCO model (bert-base-uncased) checkpoint file.
run_deepct.FLAGS.max_seq_length = 350 # Provide Max Sequence Length used for consideration. (Max: 512)
run_deepct.FLAGS.train_batch_size = 128 # Inference batch size, Larger more Memory but faster!
run_deepct.FLAGS.output_dir = data_path # Output directory, this will contain two files: deepct.jsonl (output-file) and predict.tf_record
run_deepct.FLAGS.output_file = "deepct.jsonl" # Output file for storing final DeepCT produced corpus.
run_deepct.FLAGS.m = 100 # Scaling parameter for DeepCT weights: scaling parameter > 0, recommend 100
run_deepct.FLAGS.smoothing = "sqrt" # Use sqrt to smooth weights. DeepCT Paper uses None.
run_deepct.FLAGS.keep_all_terms = True # Do not allow DeepCT to delete terms.
# Runs DeepCT model on the corpus.jsonl
run_deepct.main()
#### Download Docker Image beir/pyserini-fastapi ####
#### Locally run the docker Image + FastAPI ####
docker_beir_pyserini = "http://127.0.0.1:8000"
#### Upload Multipart-encoded files ####
with open(os.path.join(data_path, "deepct.jsonl"), "rb") as fIn:
r = requests.post(docker_beir_pyserini + "/upload/", files={"file": fIn}, verify=False)
#### Index documents to Pyserini #####
index_name = "beir/you-index-name" # beir/scifact
r = requests.get(docker_beir_pyserini + "/index/", params={"index_name": index_name})
######################################
#### 2. Pyserini-Retrieval (BM25) ####
######################################
#### Retrieve documents from Pyserini #####
retriever = EvaluateRetrieval()
qids = list(queries)
query_texts = [queries[qid] for qid in qids]
payload = {"queries": query_texts, "qids": qids, "k": max(retriever.k_values),
"fields": {"contents": 1.0}, "bm25": {"k1": 18, "b": 0.7}}
#### Retrieve pyserini results (format of results is identical to qrels)
results = json.loads(requests.post(docker_beir_pyserini + "/lexical/batch_search/", json=payload).text)["results"]
#### Retrieve RM3 expanded pyserini results (format of results is identical to qrels)
# results = json.loads(requests.post(docker_beir_pyserini + "/lexical/rm3/batch_search/", json=payload).text)["results"]
#### Evaluate your retrieval using NDCG@k, MAP@K ...
logging.info("Retriever evaluation for k in: {}".format(retriever.k_values))
ndcg, _map, recall, precision = retriever.evaluate(qrels, results, retriever.k_values)
#### Retrieval Example ####
query_id, scores_dict = random.choice(list(results.items()))
logging.info("Query : %s\n" % queries[query_id])
scores = sorted(scores_dict.items(), key=lambda item: item[1], reverse=True)
for rank in range(10):
doc_id = scores[rank][0]
logging.info("Doc %d: %s [%s] - %s\n" % (rank+1, doc_id, corpus[doc_id].get("title"), corpus[doc_id].get("text")))
| [((2165, 2202), 'beir.util.download_and_unzip', 'util.download_and_unzip', (['url', 'out_dir'], {}), '(url, out_dir)\n', (2188, 2202), False, 'from beir import util, LoggingHandler\n'), ((2571, 2619), 'beir.util.download_and_unzip', 'util.download_and_unzip', (['base_model_url', 'out_dir'], {}), '(base_model_url, out_dir)\n', (2594, 2619), False, 'from beir import util, LoggingHandler\n'), ((2966, 3009), 'beir.util.download_and_unzip', 'util.download_and_unzip', (['model_url', 'out_dir'], {}), '(model_url, out_dir)\n', (2989, 3009), False, 'from beir import util, LoggingHandler\n'), ((6283, 6368), 'requests.get', 'requests.get', (["(docker_beir_pyserini + '/index/')"], {'params': "{'index_name': index_name}"}), "(docker_beir_pyserini + '/index/', params={'index_name':\n index_name})\n", (6295, 6368), False, 'import requests\n'), ((6540, 6559), 'beir.retrieval.evaluation.EvaluateRetrieval', 'EvaluateRetrieval', ([], {}), '()\n', (6557, 6559), False, 'from beir.retrieval.evaluation import EvaluateRetrieval\n'), ((7481, 7529), 'logging.info', 'logging.info', (["('Query : %s\\n' % queries[query_id])"], {}), "('Query : %s\\n' % queries[query_id])\n", (7493, 7529), False, 'import logging\n'), ((4128, 4167), 'os.path.join', 'os.path.join', (['data_path', '"""corpus.jsonl"""'], {}), "(data_path, 'corpus.jsonl')\n", (4140, 4167), False, 'import pathlib, os, json\n'), ((4282, 4322), 'os.path.join', 'os.path.join', (['bert_base_dir', '"""vocab.txt"""'], {}), "(bert_base_dir, 'vocab.txt')\n", (4294, 4322), False, 'import pathlib, os, json\n'), ((4426, 4473), 'os.path.join', 'os.path.join', (['bert_base_dir', '"""bert_config.json"""'], {}), "(bert_base_dir, 'bert_config.json')\n", (4438, 4473), False, 'import pathlib, os, json\n'), ((4563, 4611), 'os.path.join', 'os.path.join', (['checkpoint_dir', '"""model.ckpt-65816"""'], {}), "(checkpoint_dir, 'model.ckpt-65816')\n", (4575, 4611), False, 'import pathlib, os, json\n'), ((5821, 5838), 'DeepCT.deepct.run_deepct.main', 'run_deepct.main', ([], {}), '()\n', (5836, 5838), False, 'from DeepCT.deepct import run_deepct\n'), ((6105, 6192), 'requests.post', 'requests.post', (["(docker_beir_pyserini + '/upload/')"], {'files': "{'file': fIn}", 'verify': '(False)'}), "(docker_beir_pyserini + '/upload/', files={'file': fIn},\n verify=False)\n", (6118, 6192), False, 'import requests\n'), ((2228, 2256), 'beir.datasets.data_loader.GenericDataLoader', 'GenericDataLoader', (['data_path'], {}), '(data_path)\n', (2245, 2256), False, 'from beir.datasets.data_loader import GenericDataLoader\n'), ((3390, 3429), 'os.path.join', 'os.path.join', (['data_path', '"""deepct.jsonl"""'], {}), "(data_path, 'deepct.jsonl')\n", (3402, 3429), False, 'import pathlib, os, json\n'), ((6042, 6081), 'os.path.join', 'os.path.join', (['data_path', '"""deepct.jsonl"""'], {}), "(data_path, 'deepct.jsonl')\n", (6054, 6081), False, 'import pathlib, os, json\n'), ((1842, 1858), 'beir.LoggingHandler', 'LoggingHandler', ([], {}), '()\n', (1856, 1858), False, 'from beir import util, LoggingHandler\n'), ((6871, 6947), 'requests.post', 'requests.post', (["(docker_beir_pyserini + '/lexical/batch_search/')"], {'json': 'payload'}), "(docker_beir_pyserini + '/lexical/batch_search/', json=payload)\n", (6884, 6947), False, 'import requests\n'), ((2099, 2121), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (2111, 2121), False, 'import pathlib, os, json\n'), ((2503, 2525), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (2515, 2525), False, 'import pathlib, os, json\n'), ((2897, 2919), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (2909, 2919), False, 'import pathlib, os, json\n')] |
AmirHosseinNamadchi/PyNite | Examples/Space Truss - Nodal Load.py | 8cc1fe3262e1efe029c6860394d2436601272e33 | # Engineering Mechanics: Statics, 4th Edition
# Bedford and Fowler
# Problem 6.64
# Units for this model are meters and kilonewtons
# Import 'FEModel3D' and 'Visualization' from 'PyNite'
from PyNite import FEModel3D
from PyNite import Visualization
# Create a new model
truss = FEModel3D()
# Define the nodes
truss.AddNode('A', 1.1, -0.4, 0)
truss.AddNode('B', 1, 0, 0)
truss.AddNode('C', 0, 0, 0.6)
truss.AddNode('D', 0, 0, -0.4)
truss.AddNode('E', 0, 0.8, 0)
# Define the supports
truss.DefineSupport('C', True, True, True, True, True, True)
truss.DefineSupport('D', True, True, True, True, True, True)
truss.DefineSupport('E', True, True, True, True, True, True)
# Create members
# Member properties were not given for this problem, so assumed values will be used
# To make all the members act rigid, the modulus of elasticity will be set to a very large value
E = 99999999
truss.AddMember('AB', 'A', 'B', E, 100, 100, 100, 100, 100)
truss.AddMember('AC', 'A', 'C', E, 100, 100, 100, 100, 100)
truss.AddMember('AD', 'A', 'D', E, 100, 100, 100, 100, 100)
truss.AddMember('BC', 'B', 'C', E, 100, 100, 100, 100, 100)
truss.AddMember('BD', 'B', 'D', E, 100, 100, 100, 100, 100)
truss.AddMember('BE', 'B', 'E', E, 100, 100, 100, 100, 100)
# Release the moments at the ends of the members to make truss members
truss.DefineReleases('AC', False, False, False, False, True, True, \
False, False, False, False, True, True)
truss.DefineReleases('AD', False, False, False, False, True, True, \
False, False, False, False, True, True)
truss.DefineReleases('BC', False, False, False, False, True, True, \
False, False, False, False, True, True)
truss.DefineReleases('BD', False, False, False, False, True, True, \
False, False, False, False, True, True)
truss.DefineReleases('BE', False, False, False, False, True, True, \
False, False, False, False, True, True)
# Add nodal loads
truss.AddNodeLoad('A', 'FX', 10)
truss.AddNodeLoad('A', 'FY', 60)
truss.AddNodeLoad('A', 'FZ', 20)
# Analyze the model
truss.Analyze()
# Print results
print('Member BC calculated axial force: ' + str(truss.GetMember('BC').MaxAxial()))
print('Member BC expected axial force: 32.7 Tension')
print('Member BD calculated axial force: ' + str(truss.GetMember('BD').MaxAxial()))
print('Member BD expected axial force: 45.2 Tension')
print('Member BE calculated axial force: ' + str(truss.GetMember('BE').MaxAxial()))
print('Member BE expected axial force: 112.1 Compression')
# Render the model for viewing. The text height will be set to 50 mm.
# Because the members in this example are nearly rigid, there will be virtually no deformation. The deformed shape won't be rendered.
# The program has created a default load case 'Case 1' and a default load combo 'Combo 1' since we didn't specify any. We'll display 'Case 1'.
Visualization.RenderModel(truss, text_height=0.05, render_loads=True, case='Case 1')
| [((280, 291), 'PyNite.FEModel3D', 'FEModel3D', ([], {}), '()\n', (289, 291), False, 'from PyNite import FEModel3D\n'), ((2933, 3022), 'PyNite.Visualization.RenderModel', 'Visualization.RenderModel', (['truss'], {'text_height': '(0.05)', 'render_loads': '(True)', 'case': '"""Case 1"""'}), "(truss, text_height=0.05, render_loads=True, case=\n 'Case 1')\n", (2958, 3022), False, 'from PyNite import Visualization\n')] |
CodeMaster7000/Sending-Emails-in-Python | Using Yagmail to make sending emails easier.py | 2ec44f6520a6b98508c8adf372a191f2577fbf98 | import yagmail
receiver = "[email protected]" #Receiver's gmail address
body = "Hello there from Yagmail"
filename = "document.pdf"
yag = yagmail.SMTP("[email protected]")#Your gmail address
yag.send(
to=receiver,
subject="Yagmail test (attachment included",
contents=body,
attachments=filename,
)
| [((143, 171), 'yagmail.SMTP', 'yagmail.SMTP', (['"""[email protected]"""'], {}), "('[email protected]')\n", (155, 171), False, 'import yagmail\n')] |
markendr/esys-escript.github.io | pycad/py_src/transformations.py | 0023eab09cd71f830ab098cb3a468e6139191e8d |
##############################################################################
#
# Copyright (c) 2003-2020 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
# Development from 2019 by School of Earth and Environmental Sciences
#
##############################################################################
from __future__ import print_function, division
__copyright__="""Copyright (c) 2003-2020 by The University of Queensland
http://www.uq.edu.au
Primary Business: Queensland, Australia"""
__license__="""Licensed under the Apache License, version 2.0
http://www.apache.org/licenses/LICENSE-2.0"""
__url__="https://launchpad.net/escript-finley"
"""
transformations
:var __author__: name of author
:var __copyright__: copyrights
:var __license__: licence agreement
:var __url__: url entry point on documentation
:var __version__: version
:var __date__: date of the version
:var DEG: unit of degree
:var RAD: unit of radiant
"""
__author__="Lutz Gross, [email protected]"
import numpy
import math
_TYPE=numpy.float64
DEG=math.pi/180.
RAD=1.
class Transformation(object):
"""
General class to define an affine transformation *x->Ax+b*.
"""
def __init__(self):
"""
Creates a linear transformation.
"""
pass
def __call__(self,x=numpy.zeros((3,))):
"""
Applies transformation to ``x``.
"""
raise NotImplementeError()
class Translation(Transformation):
"""
Defines a translation *x->x+b*.
"""
def __init__(self,b=numpy.zeros((3,),dtype=_TYPE)):
"""
Creates the linear transformation *x->x+b*.
"""
super(Translation, self).__init__()
self.__b=numpy.array(b,_TYPE)
def __call__(self,x=numpy.zeros((3,))):
"""
Applies translation to ``x``.
"""
return numpy.array(x,_TYPE)+self.__b
class Rotatation(Transformation):
"""
Defines a rotation.
"""
def __init__(self,axis=numpy.ones((3,),dtype=_TYPE),point=numpy.zeros((3,),dtype=_TYPE),angle=0.*RAD):
"""
Creates a rotation using an axis and a point on the axis.
"""
self.__axis=numpy.array(axis,dtype=_TYPE)
self.__point=numpy.array(point,dtype=_TYPE)
lax=numpy.dot(self.__axis,self.__axis)
if not lax>0:
raise ValueError("points must be distinct.")
self.__axis/=math.sqrt(lax)
self.__angle=float(angle)
def __call__(self,x=numpy.zeros((3,))):
"""
Applies the rotation to ``x``.
"""
x=numpy.array(x,_TYPE)
z=x-self.__point
z0=numpy.dot(z,self.__axis)
z_per=z-z0*self.__axis
lz_per=numpy.dot(z_per,z_per)
if lz_per>0:
axis1=z_per/math.sqrt(lz_per)
axis2=_cross(axis1,self.__axis)
lax2=numpy.dot(axis2,axis2)
if lax2>0:
axis2/=math.sqrt(lax2)
return z0*self.__axis+math.sqrt(lz_per)*(math.cos(self.__angle)*axis1-math.sin(self.__angle)*axis2)+self.__point
else:
return x
else:
return x
def _cross(x, y):
"""
Returns the cross product of ``x`` and ``y``.
"""
return numpy.array([x[1] * y[2] - x[2] * y[1], x[2] * y[0] - x[0] * y[2], x[0] * y[1] - x[1] * y[0]], _TYPE)
class Dilation(Transformation):
"""
Defines a dilation.
"""
def __init__(self,factor=1.,center=numpy.zeros((3,),dtype=_TYPE)):
"""
Creates a dilation with a center and a given expansion/contraction
factor.
"""
if not abs(factor)>0:
raise ValueError("factor must be non-zero.")
self.__factor=factor
self.__center=numpy.array(center,dtype=_TYPE)
def __call__(self,x=numpy.zeros((3,))):
"""
Applies dilation to ``x``.
"""
x=numpy.array(x,_TYPE)
return self.__factor*(x-self.__center)+self.__center
class Reflection(Transformation):
"""
Defines a reflection on a plane.
"""
def __init__(self,normal=numpy.ones((3,),dtype=_TYPE),offset=0.):
"""
Defines a reflection on a plane defined in normal form.
"""
self.__normal=numpy.array(normal,dtype=_TYPE)
ln=math.sqrt(numpy.dot(self.__normal,self.__normal))
if not ln>0.:
raise ValueError("normal must have positive length.")
self.__normal/=ln
if isinstance(offset,float) or isinstance(offset,int):
self.__offset=offset/ln
else:
self.__offset=numpy.dot(numpy.array(offset,dtype=_TYPE),self.__normal)
def __call__(self,x=numpy.zeros((3,))):
"""
Applies reflection to ``x``.
"""
x=numpy.array(x,_TYPE)
return x - 2*(numpy.dot(x,self.__normal)-self.__offset)*self.__normal
| [((3484, 3590), 'numpy.array', 'numpy.array', (['[x[1] * y[2] - x[2] * y[1], x[2] * y[0] - x[0] * y[2], x[0] * y[1] - x[1] *\n y[0]]', '_TYPE'], {}), '([x[1] * y[2] - x[2] * y[1], x[2] * y[0] - x[0] * y[2], x[0] * y\n [1] - x[1] * y[0]], _TYPE)\n', (3495, 3590), False, 'import numpy\n'), ((1623, 1640), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {}), '((3,))\n', (1634, 1640), False, 'import numpy\n'), ((1851, 1881), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {'dtype': '_TYPE'}), '((3,), dtype=_TYPE)\n', (1862, 1881), False, 'import numpy\n'), ((2015, 2036), 'numpy.array', 'numpy.array', (['b', '_TYPE'], {}), '(b, _TYPE)\n', (2026, 2036), False, 'import numpy\n'), ((2061, 2078), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {}), '((3,))\n', (2072, 2078), False, 'import numpy\n'), ((2286, 2315), 'numpy.ones', 'numpy.ones', (['(3,)'], {'dtype': '_TYPE'}), '((3,), dtype=_TYPE)\n', (2296, 2315), False, 'import numpy\n'), ((2321, 2351), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {'dtype': '_TYPE'}), '((3,), dtype=_TYPE)\n', (2332, 2351), False, 'import numpy\n'), ((2472, 2502), 'numpy.array', 'numpy.array', (['axis'], {'dtype': '_TYPE'}), '(axis, dtype=_TYPE)\n', (2483, 2502), False, 'import numpy\n'), ((2522, 2553), 'numpy.array', 'numpy.array', (['point'], {'dtype': '_TYPE'}), '(point, dtype=_TYPE)\n', (2533, 2553), False, 'import numpy\n'), ((2564, 2599), 'numpy.dot', 'numpy.dot', (['self.__axis', 'self.__axis'], {}), '(self.__axis, self.__axis)\n', (2573, 2599), False, 'import numpy\n'), ((2695, 2709), 'math.sqrt', 'math.sqrt', (['lax'], {}), '(lax)\n', (2704, 2709), False, 'import math\n'), ((2768, 2785), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {}), '((3,))\n', (2779, 2785), False, 'import numpy\n'), ((2857, 2878), 'numpy.array', 'numpy.array', (['x', '_TYPE'], {}), '(x, _TYPE)\n', (2868, 2878), False, 'import numpy\n'), ((2912, 2937), 'numpy.dot', 'numpy.dot', (['z', 'self.__axis'], {}), '(z, self.__axis)\n', (2921, 2937), False, 'import numpy\n'), ((2981, 3004), 'numpy.dot', 'numpy.dot', (['z_per', 'z_per'], {}), '(z_per, z_per)\n', (2990, 3004), False, 'import numpy\n'), ((3698, 3728), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {'dtype': '_TYPE'}), '((3,), dtype=_TYPE)\n', (3709, 3728), False, 'import numpy\n'), ((3974, 4006), 'numpy.array', 'numpy.array', (['center'], {'dtype': '_TYPE'}), '(center, dtype=_TYPE)\n', (3985, 4006), False, 'import numpy\n'), ((4031, 4048), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {}), '((3,))\n', (4042, 4048), False, 'import numpy\n'), ((4116, 4137), 'numpy.array', 'numpy.array', (['x', '_TYPE'], {}), '(x, _TYPE)\n', (4127, 4137), False, 'import numpy\n'), ((4314, 4343), 'numpy.ones', 'numpy.ones', (['(3,)'], {'dtype': '_TYPE'}), '((3,), dtype=_TYPE)\n', (4324, 4343), False, 'import numpy\n'), ((4461, 4493), 'numpy.array', 'numpy.array', (['normal'], {'dtype': '_TYPE'}), '(normal, dtype=_TYPE)\n', (4472, 4493), False, 'import numpy\n'), ((4878, 4895), 'numpy.zeros', 'numpy.zeros', (['(3,)'], {}), '((3,))\n', (4889, 4895), False, 'import numpy\n'), ((4965, 4986), 'numpy.array', 'numpy.array', (['x', '_TYPE'], {}), '(x, _TYPE)\n', (4976, 4986), False, 'import numpy\n'), ((2154, 2175), 'numpy.array', 'numpy.array', (['x', '_TYPE'], {}), '(x, _TYPE)\n', (2165, 2175), False, 'import numpy\n'), ((3118, 3141), 'numpy.dot', 'numpy.dot', (['axis2', 'axis2'], {}), '(axis2, axis2)\n', (3127, 3141), False, 'import numpy\n'), ((4513, 4552), 'numpy.dot', 'numpy.dot', (['self.__normal', 'self.__normal'], {}), '(self.__normal, self.__normal)\n', (4522, 4552), False, 'import numpy\n'), ((3045, 3062), 'math.sqrt', 'math.sqrt', (['lz_per'], {}), '(lz_per)\n', (3054, 3062), False, 'import math\n'), ((3180, 3195), 'math.sqrt', 'math.sqrt', (['lax2'], {}), '(lax2)\n', (3189, 3195), False, 'import math\n'), ((4806, 4838), 'numpy.array', 'numpy.array', (['offset'], {'dtype': '_TYPE'}), '(offset, dtype=_TYPE)\n', (4817, 4838), False, 'import numpy\n'), ((5007, 5034), 'numpy.dot', 'numpy.dot', (['x', 'self.__normal'], {}), '(x, self.__normal)\n', (5016, 5034), False, 'import numpy\n'), ((3230, 3247), 'math.sqrt', 'math.sqrt', (['lz_per'], {}), '(lz_per)\n', (3239, 3247), False, 'import math\n'), ((3249, 3271), 'math.cos', 'math.cos', (['self.__angle'], {}), '(self.__angle)\n', (3257, 3271), False, 'import math\n'), ((3278, 3300), 'math.sin', 'math.sin', (['self.__angle'], {}), '(self.__angle)\n', (3286, 3300), False, 'import math\n')] |
ThomasHelfer/BosonStar | example/complex_scalar_star_solver.py | 5442a6e6171122a3ba1d6b079e6483ab72aa7338 | from bosonstar.ComplexBosonStar import Complex_Boson_Star
# =====================
# All imporntnat definitions
# =====================
# Physics defintions
phi0 = 0.40 # centeral phi
D = 5.0 # Dimension (total not only spacial)
Lambda = -0.2 # Cosmological constant
# Solver definitions
Rstart = 3
Rend = 50.00
deltaR = 1
N = 100000
e_pow_minus_delta_guess = 0.4999
verbose = 2
eps = 1e-10 # Small epsilon to avoid r \neq 0
# ====================================
# Main routine
# ====================================
pewpew = Complex_Boson_Star(e_pow_minus_delta_guess, phi0, D, Lambda, verbose)
pewpew.print_parameters()
alpha0 = pewpew.radial_walker(Rstart, Rend, deltaR, N, eps)
# =====================================
# Output and plotting
# =====================================
soldict = pewpew.get_solution()
# Makes sure that lapse goes asymptotically to 1
# (Not an essential step, but recommended)
pewpew.normalise_edelta()
pewpew.check_Einstein_equation()
# ===============================
path = pewpew.get_path()
pewpew.plot_solution()
pewpew.print_solution()
| [((561, 630), 'bosonstar.ComplexBosonStar.Complex_Boson_Star', 'Complex_Boson_Star', (['e_pow_minus_delta_guess', 'phi0', 'D', 'Lambda', 'verbose'], {}), '(e_pow_minus_delta_guess, phi0, D, Lambda, verbose)\n', (579, 630), False, 'from bosonstar.ComplexBosonStar import Complex_Boson_Star\n')] |
ouyhlan/fastNLP | setup.py | cac13311e28c1e8e3c866d50656173650eb5c7a1 | #!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('LICENSE', encoding='utf-8') as f:
license = f.read()
with open('requirements.txt', encoding='utf-8') as f:
reqs = f.read()
pkgs = [p for p in find_packages() if p.startswith('fastNLP')]
print(pkgs)
setup(
name='FastNLP',
version='0.7.0',
url='https://gitee.com/fastnlp/fastNLP',
description='fastNLP: Deep Learning Toolkit for NLP, developed by Fudan FastNLP Team',
long_description=readme,
long_description_content_type='text/markdown',
license='Apache License',
author='Fudan FastNLP Team',
python_requires='>=3.6',
packages=pkgs,
install_requires=reqs.strip().split('\n'),
)
| [((315, 330), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (328, 330), False, 'from setuptools import setup, find_packages\n')] |
ory/sdk-generator | clients/client/python/ory_client/__init__.py | 958314d130922ad6f20f439b5230141a832231a5 | # flake8: noqa
"""
Ory APIs
Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. # noqa: E501
The version of the OpenAPI document: v0.0.1-alpha.187
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
__version__ = "v0.0.1-alpha.187"
# import ApiClient
from ory_client.api_client import ApiClient
# import Configuration
from ory_client.configuration import Configuration
# import exceptions
from ory_client.exceptions import OpenApiException
from ory_client.exceptions import ApiAttributeError
from ory_client.exceptions import ApiTypeError
from ory_client.exceptions import ApiValueError
from ory_client.exceptions import ApiKeyError
from ory_client.exceptions import ApiException
| [] |
ambiata/atmosphere-python-sdk | atmosphere/custom_activity/base_class.py | 48880a8553000cdea59d63b0fba49e1f0f482784 | from abc import ABC, abstractmethod
from typing import Tuple
from requests import Response
from .pydantic_models import (AppliedExclusionConditionsResponse,
BiasAttributeConfigListResponse,
ComputeRewardResponse, DefaultPredictionResponse,
ExclusionRuleConditionListResponse,
PredictionResponsePayloadFormatListResponse)
class BaseActivityCustomCode(ABC):
"""
The main class of this repository: the one to be implemented
"""
is_for_mocker: bool
def __init__(self, is_for_mocker: bool = False):
self.is_for_mocker = is_for_mocker
@abstractmethod
def validate_prediction_request(self, prediction_request: dict) -> None:
"""Raise a ValidationError if the received prediction request is not valid"""
@abstractmethod
def validate_outcome_request(self, outcome_request: dict) -> None:
"""Raise a ValidationError if the received outcome request is not valid"""
@abstractmethod
def compute_reward(self, outcome_request: dict) -> ComputeRewardResponse:
"""From an outcome, compute the reward"""
@abstractmethod
def get_module_version(self) -> str:
"""Return the version of the module."""
@abstractmethod
def send_mock_prediction_request(
self, url_prediction_endpoint: str
) -> Tuple[Response, dict]:
"""
Send a mock request to the provided url and returns the corresponding response
with extra information if required for computing the prediction.
The response and dictionary will be provided to
the `send_mock_outcome_request`.
"""
@abstractmethod
def send_mock_outcome_request(
self,
url_outcome_endpoint: str,
prediction_response: Response,
info_from_prediction: dict,
) -> Response:
"""
Send a mock request to the provided url and returns the corresponding response.
Provide the prediction response and extra information created while
creating the prediction request from `send_mock_prediction_request`.
"""
def get_prediction_response_payload_formats(
self,
) -> PredictionResponsePayloadFormatListResponse:
"""
Return the list of available format of the prediction payload.
Every format should have a name and a description
The name of the format should be unique.
"""
return {"prediction_response_payload_formats": []}
def format_prediction_payload_response(
self,
default_prediction_response: DefaultPredictionResponse,
payload_format: str, # noqa pylint: disable=unused-argument
) -> dict:
"""
You can format the prediction the way you want based
on the information returned by default
"""
return default_prediction_response
def get_exclusion_rule_conditions(self) -> ExclusionRuleConditionListResponse:
"""
Define the exclusion rules for the activity
"""
return ExclusionRuleConditionListResponse(exclusion_rule_conditions=[])
def get_applied_exclusion_conditions(
self, prediction_request: dict # noqa pylint: disable=unused-argument
) -> AppliedExclusionConditionsResponse:
"""
Define the exclusion rules for the activity
"""
return AppliedExclusionConditionsResponse(applied_exclusion_conditions=[])
def get_bias_attribute_configs(self) -> BiasAttributeConfigListResponse:
"""
Define the bias attribute configs, these decide which attributes may be
used by atmospherex as bias attributes
"""
return BiasAttributeConfigListResponse(bias_attribute_configs=[])
| [] |
modulo16/PfNE | Module1/file3.py | 9706afc42c44dcfd1490e5ac074156f41e5515a8 | from __future__ import print_function, unicode_literals
#Ensures Unicode is used for all strings.
my_str = 'whatever'
#Shows the String type, which should be unicode
type(my_str)
#declare string:
ip_addr = '192.168.1.1'
#check it with boolean:(True)
ip_addr == '192.168.1.1'
#(false)
ip_addr == '10.1.1.1'
#is this substring in this variable?
'192.168' in ip_addr
'1.1' in ip_addr
'15.1' not in ip_addr
#Strings also have indices starting at '0'
#in the case below we get '1' which is the first character
ip_addr[0]
#we can also get the last using negative notation. The follow gets the last:
ip_addr[-1]
#second to last:
ip_addr[-2]
#show length of string:
len(ip_addr)
#Example string concatenation
my_str = 'Hello'
my_str + ' something'
| [] |
arcticmatter/pipresents-beep | pp_io_plugins/pp_kbddriver_plus.py | e5945f929b47249f19b0cb3433a138e874b592db | #enhanced keyboard driver
import copy
import os
import configparser
from pp_displaymanager import DisplayManager
class pp_kbddriver_plus(object):
# control list items
NAME=0 # symbolic name for input and output
DIRECTION = 1 # in/out
MATCH = 2 # for input the character/string to match (no EOL)
MODE= 3 # for input the match mode any-char,char,any-line,line
TEMPLATE=['','','','']
# CLASS VARIABLES (pp_kbddriver_plus.)
driver_active=False
title='' # usd for error reporting and logging
tick_interval='' # mS between polls of the serial input
match_mode='' # char or line, whether input characters are matched for each character or a complete line
inputs={}
# executed by main program and by each object using the driver
def __init__(self):
self.dm=DisplayManager()
# executed once from main program
def init(self,filename,filepath,widget,pp_dir,pp_home,pp_profile,event_callback=None):
# instantiate arguments
self.widget=widget
self.filename=filename
self.filepath=filepath
self.event_callback=event_callback
pp_kbddriver_plus.driver_active = False
# read pp_kbddriver_plus.cfg file.
reason,message=self._read(self.filename,self.filepath)
if reason =='error':
return 'error',message
if self.config.has_section('DRIVER') is False:
return 'error','No DRIVER section in '+self.filepath
# all the below are used by another instance of pp_kbddriver_plus so must reference class variables
# read information from DRIVER section
pp_kbddriver_plus.title=self.config.get('DRIVER','title')
pp_kbddriver_plus.bind_printing = self.config.get('DRIVER','bind-printing')
# construct the control list from the config file
pp_kbddriver_plus.in_names=[]
pp_kbddriver_plus.out_names=[]
for section in self.config.sections():
if section == 'DRIVER':
continue
entry=copy.deepcopy(pp_kbddriver_plus.TEMPLATE)
entry[pp_kbddriver_plus.NAME]=self.config.get(section,'name')
entry[pp_kbddriver_plus.DIRECTION]=self.config.get(section,'direction')
if entry[pp_kbddriver_plus.DIRECTION] == 'none':
continue
elif entry[pp_kbddriver_plus.DIRECTION] == 'in':
entry[pp_kbddriver_plus.MODE]=self.config.get(section,'mode')
if entry[pp_kbddriver_plus.MODE] in ('specific-character','specific-line'):
entry[pp_kbddriver_plus.MATCH]=self.config.get(section,'match')
pp_kbddriver_plus.in_names.append(copy.deepcopy(entry))
else:
return 'error',pp_kbddriver_plus.title + ' direction not in or out'
# print pp_kbddriver_plus.in_names
# bind the keys
self._bind_keys(widget,self._key_received)
# all ok so indicate the driver is active
pp_kbddriver_plus.driver_active=True
# init must return two arguments
return 'normal',pp_kbddriver_plus.title + ' active'
# sets up tkinter keyboard events such that any key press
# does a callback to _key_received() with the event object
def _bind_keys(self,widget,callback):
for display_name in DisplayManager.display_map:
status,message,display_id,canvas=self.dm.id_of_canvas(display_name)
if status !='normal':
continue
# bind all the normal keys that return a printing character such that x produces pp-key-x (but fileterd in _key_received)
canvas.bind("<Key>", lambda event,match='<Key>',name='': self._key_received(event,match,name))
# print 'bind printing'
# Bind <Return> so that eol detection works, <Return> cannot be used to trigger an input event
# if you wnt that use keys.cfg
canvas.bind("<Return>", lambda event,match='<Return>',name='': self._key_received(event,match,name))
# print 'bind Return to make eol work'
# go through entries and bind all specific-character matches to _key_received
for entry in pp_kbddriver_plus.in_names:
if entry[pp_kbddriver_plus.MODE] == 'specific-character':
match = entry[pp_kbddriver_plus.MATCH]
name = entry[pp_kbddriver_plus.NAME]
canvas.bind(match, lambda event, match=match,name=name: self._key_received(event,match,name))
# print 'bind specific-char', match,name
# start method must be defined. If not using inputs just pass
def start(self):
pp_kbddriver_plus.inputs['current-character']=''
pp_kbddriver_plus.inputs['current-line']=''
pp_kbddriver_plus.inputs['previous-line']=''
def _key_received(self,event,match,name):
# generate the events with symbolic names if driver is active
if pp_kbddriver_plus.driver_active is True:
char=event.char
# print 'received ',char,match,name
# if char is eol then match the line and start a new line
if match =='<Return>':
# do match of line
# print 'do match line',pp_kbddriver_plus.inputs['current-line']
self.match_line(pp_kbddriver_plus.inputs['current-line'])
# shuffle and empty the buffer
pp_kbddriver_plus.inputs['previous-line'] = pp_kbddriver_plus.inputs['current-line']
pp_kbddriver_plus.inputs['current-line']=''
pp_kbddriver_plus.inputs['current-character']=''
if name !='':
# print 'bound <Return> key'
if self.event_callback is not None:
self.event_callback(name,pp_kbddriver_plus.title)
else:
# process a character
if char == '' and match == '<Key>':
# unbound special key
# print 'unbound special key ', match
pass
else:
# a character has been received
pp_kbddriver_plus.inputs['current-character']=char
pp_kbddriver_plus.inputs['current-line']+=char
# print pp_kbddriver_plus.inputs['current-character'],pp_kbddriver_plus.inputs['current-line']
if match == '<Key>' and char != '' and self.bind_printing =='yes':
# print 'printable key, bind-printing is yes',char,match
# printable character without overiding section
if self.event_callback is not None:
self.event_callback('pp-key-'+ char,pp_kbddriver_plus.title)
else:
if name != '':
# print 'bound non-printable character',char,name
if self.event_callback is not None:
self.event_callback(name,pp_kbddriver_plus.title)
# look through entries for any-character
for entry in pp_kbddriver_plus.in_names:
if entry[pp_kbddriver_plus.MODE] == 'any-character':
# print 'match any character', char, 'current line is ',pp_kbddriver_plus.inputs['current-line']
if self.event_callback is not None:
self.event_callback(entry[pp_kbddriver_plus.NAME],pp_kbddriver_plus.title)
def match_line(self,line):
for entry in pp_kbddriver_plus.in_names:
if entry[pp_kbddriver_plus.MODE] == 'any-line':
# print 'match any line',line
if self.event_callback is not None:
self.event_callback(entry[pp_kbddriver_plus.NAME],pp_kbddriver_plus.title)
if entry[pp_kbddriver_plus.MODE] == 'specific-line' and line == entry[pp_kbddriver_plus.MATCH]:
# print 'match specific line', line
if self.event_callback is not None:
self.event_callback(entry[pp_kbddriver_plus.NAME],pp_kbddriver_plus.title)
# allow track plugins (or anything else) to access analog input values
def get_input(self,key):
if key in pp_kbddriver_plus.inputs:
return True, pp_kbddriver_plus.inputs[key]
else:
return False, None
# allow querying of driver state
def is_active(self):
return pp_kbddriver_plus.driver_active
# called by main program only. Called when PP is closed down
def terminate(self):
pp_kbddriver_plus.driver_active = False
# ************************************************
# output interface method
# this can be called from many objects so needs to operate on class variables
# ************************************************
# execute an output event
def handle_output_event(self,name,param_type,param_values,req_time):
return 'normal','no output methods'
# ***********************************
# reading .cfg file
# ************************************
def _read(self,filename,filepath):
if os.path.exists(filepath):
self.config = configparser.ConfigParser(inline_comment_prefixes = (';',))
self.config.read(filepath)
return 'normal',filename+' read'
else:
return 'error',filename+' not found at: '+filepath
if __name__ == '__main__':
from tkinter import *
def key_callback(symbol,source):
print('callback',symbol,source,'\n')
if symbol=='pp-stop':
idd.terminate()
exit()
pass
root = Tk()
w = Label(root, text="pp_kbddriver_plus.py test harness")
w.pack()
idd=pp_kbddriver_plus()
reason,message=idd.init('pp_kbddriver_plus.cfg','/home/pi/pipresents/pp_io_config/keys_plus.cfg',root,key_callback)
print(reason,message)
if reason != 'error':
idd.start()
root.mainloop()
| [((884, 900), 'pp_displaymanager.DisplayManager', 'DisplayManager', ([], {}), '()\n', (898, 900), False, 'from pp_displaymanager import DisplayManager\n'), ((9565, 9589), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (9579, 9589), False, 'import os\n'), ((2132, 2173), 'copy.deepcopy', 'copy.deepcopy', (['pp_kbddriver_plus.TEMPLATE'], {}), '(pp_kbddriver_plus.TEMPLATE)\n', (2145, 2173), False, 'import copy\n'), ((9617, 9674), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {'inline_comment_prefixes': "(';',)"}), "(inline_comment_prefixes=(';',))\n", (9642, 9674), False, 'import configparser\n'), ((2783, 2803), 'copy.deepcopy', 'copy.deepcopy', (['entry'], {}), '(entry)\n', (2796, 2803), False, 'import copy\n')] |
akshay-kapase/shopping | grocery/migrations/0003_alter_item_comments.py | 7bf3bac4a78d07bca9a9f9d44d85e11bb826a366 | # Generated by Django 3.2.6 on 2021-09-03 15:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('grocery', '0002_alter_item_comments'),
]
operations = [
migrations.AlterField(
model_name='item',
name='comments',
field=models.CharField(blank=True, default='null', max_length=200),
preserve_default=False,
),
]
| [((337, 397), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""null"""', 'max_length': '(200)'}), "(blank=True, default='null', max_length=200)\n", (353, 397), False, 'from django.db import migrations, models\n')] |
yjf18340/webots | projects/objects/buildings/protos/textures/colored_textures/textures_generator.py | 60d441c362031ab8fde120cc0cd97bdb1a31a3d5 | #!/usr/bin/env python
# Copyright 1996-2019 Cyberbotics Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate textures prepared for OSM, based on image templates."""
import glob
import os
from PIL import Image
# change directory to this script directory in order to allow this script to be called from another directory.
os.chdir(os.path.dirname(os.path.realpath(__file__)))
# get all the template files in put them in a list of tuples
templates = []
for f in glob.glob("*_diffuse_template.jpg"):
templates.append((f, f.replace('_diffuse_', '_color_mask_')))
# target colors
# ref: http://wiki.openstreetmap.org/wiki/Key:colour
# TODO: is it sufficient?
colors = {
'000000': (0.0, 0.0, 0.0),
'FFFFFF': (0.84, 0.84, 0.84),
'808080': (0.4, 0.4, 0.4),
'C0C0C0': (0.65, 0.65, 0.65),
'800000': (0.4, 0.15, 0.15),
'FF0000': (0.45, 0.0, 0.0),
'808000': (0.4, 0.4, 0.2),
'FFFF00': (0.7, 0.6, 0.15),
'008000': (0.15, 0.3, 0.15),
'00FF00': (0.55, 0.69, 0.52),
'008080': (0.15, 0.3, 0.3),
'00FFFF': (0.6, 0.7, 0.7),
'000080': (0.2, 0.2, 0.3),
'0000FF': (0.4, 0.4, 0.75),
'800080': (0.5, 0.4, 0.5),
'FF00FF': (0.9, 0.75, 0.85),
'F5DEB3': (0.83, 0.78, 0.65),
'8B4513': (0.3, 0.1, 0.05)
}
effectFactor = 0.5 # power of the effect, found empirically
# foreach template
for template in templates:
# load the templates
diffuse = Image.open(template[0])
mask = Image.open(template[1])
assert diffuse.size == mask.size
width, height = diffuse.size
# create an image per color
for colorString, color in colors.iteritems():
image = Image.new('RGB', diffuse.size)
pixels = image.load()
for x in range(height):
for y in range(width):
dR, dG, dB = diffuse.getpixel((x, y))
mR, mG, mB = mask.getpixel((x, y))
r = dR + int(255.0 * (mR / 255.0) * (color[0] * 2.0 - 1.0) * effectFactor)
g = dG + int(255.0 * (mG / 255.0) * (color[1] * 2.0 - 1.0) * effectFactor)
b = dB + int(255.0 * (mB / 255.0) * (color[2] * 2.0 - 1.0) * effectFactor)
pixels[x, y] = (r, g, b)
image.save(template[0].replace('_diffuse_template', '_' + colorString))
| [((975, 1010), 'glob.glob', 'glob.glob', (['"""*_diffuse_template.jpg"""'], {}), "('*_diffuse_template.jpg')\n", (984, 1010), False, 'import glob\n'), ((1915, 1938), 'PIL.Image.open', 'Image.open', (['template[0]'], {}), '(template[0])\n', (1925, 1938), False, 'from PIL import Image\n'), ((1950, 1973), 'PIL.Image.open', 'Image.open', (['template[1]'], {}), '(template[1])\n', (1960, 1973), False, 'from PIL import Image\n'), ((860, 886), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (876, 886), False, 'import os\n'), ((2143, 2173), 'PIL.Image.new', 'Image.new', (['"""RGB"""', 'diffuse.size'], {}), "('RGB', diffuse.size)\n", (2152, 2173), False, 'from PIL import Image\n')] |
viz4biz/PyDataNYC2015 | tutorial/test_env.py | 066154ea9f1837c355e6108a28b85889f3020da3 | """
test local env
"""
import os
for k, v in os.environ.iteritems():
print k, '=', v
| [] |
filipefborba/MarriageNSFG | project2/marriage.py | d550301fbb9d80ddabf391a6168d2c8636113ed9 | """This file contains code for use with "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import bisect
import numpy as np
import pandas as pd
import scipy.stats
import gzip
import matplotlib.pyplot as plt
from collections import defaultdict
from collections import OrderedDict
from collections import Counter
import thinkstats2
import thinkplot
import survival
def ResampleResps(resps, remove_missing=False, jitter=0):
"""Resamples each dataframe and then concats them.
resps: list of DataFrame
returns: DataFrame
"""
# we have to resample the data from each cycle separately
samples = [ResampleRowsWeighted(resp) for resp in resps]
# then join the cycles into one big sample
sample = pd.concat(samples, ignore_index=True, sort=False)
# remove married people with unknown marriage dates
if remove_missing:
sample = sample[~sample.missing]
# jittering the ages reflects the idea that the resampled people
# are not identical to the actual respondents
if jitter:
Jitter(sample, 'age', jitter=jitter)
Jitter(sample, 'agemarry', jitter=jitter)
DigitizeResp(resp)
return sample
def ResampleRowsWeighted(df, column='finalwgt'):
"""Resamples the rows in df in accordance with a weight column.
df: DataFrame
returns: DataFrame
"""
weights = df['finalwgt'].copy()
weights /= sum(weights)
indices = np.random.choice(df.index, len(df), replace=True, p=weights)
return df.loc[indices]
def Jitter(df, column, jitter=1):
"""Adds random noise to a column.
df: DataFrame
column: string column name
jitter: standard deviation of noise
"""
df[column] += np.random.uniform(-jitter, jitter, size=len(df))
def EstimateSurvival(resp, cutoff=None):
"""Estimates the survival curve.
resp: DataFrame of respondents
cutoff: where to truncate the estimated functions
returns: pair of HazardFunction, SurvivalFunction
"""
complete = resp.loc[resp.complete, 'complete_var'].dropna()
ongoing = resp.loc[~resp.complete, 'ongoing_var'].dropna()
hf = survival.EstimateHazardFunction(complete, ongoing)
if cutoff:
hf.Truncate(cutoff)
sf = hf.MakeSurvival()
return hf, sf
def PropensityMatch(target, group, colname='agemarry'):
"""Choose a random subset of `group` to matches propensity with `target`.
target: DataFrame
group: DataFrame
colname: string name of column with propensity scores
returns: DataFrame with sample of rows from `group`
"""
rv = scipy.stats.norm(scale=1)
values = group[colname].fillna(100)
def ChooseIndex(value):
weights = rv.pdf(values-value)
weights /= sum(weights)
return np.random.choice(group.index, 1, p=weights)[0]
indices = [ChooseIndex(value) for value in target[colname]]
return group.loc[indices]
def EstimateSurvivalByCohort(resps, iters=101,
cutoffs=None, predict_flag=False,
prop_match=None, error_rate=0):
"""Makes survival curves for resampled data.
resps: list of DataFrames
iters: number of resamples to plot
predict_flag: whether to also plot predictions
cutoffs: map from cohort to the first unreliable age_index
returns: map from group name to list of survival functions
"""
if cutoffs == None:
cutoffs = {}
sf_map = defaultdict(list)
# iters is the number of resampling runs to make
for i in range(iters):
sample = ResampleResps(resps)
# group by decade
grouped = sample.groupby('birth_index')
if prop_match:
last = grouped.get_group(prop_match)
# and estimate (hf, sf) for each group
hf_map = OrderedDict()
for name, group in iter(grouped):
if prop_match:
group = PropensityMatch(last, group)
if error_rate:
AddErrors(group, 'complete_missing', error_rate)
AddErrors(group, 'ongoing_missing', error_rate)
# the amount of missing data is small; I think it is better
# to drop it than to fill with random data
#FillMissingColumn(group, 'complete_var', 'complete_missing')
#FillMissingColumn(group, 'ongoing_var', 'ongoing_missing')
cutoff = cutoffs.get(name, 100)
hf_map[name] = EstimateSurvival(group, cutoff)
# make predictions if desired
if predict_flag:
MakePredictions(hf_map)
# extract the sf from each pair and accumulate the results
for name, (hf, sf) in hf_map.items():
sf_map[name].append(sf)
return sf_map
def AddErrors(group, colname, error_rate):
"""
NOTE: This will not work if there are actual missing values!
"""
group[colname] = np.random.random(len(group)) < error_rate
def FillMissingColumn(group, colname, missing_colname):
"""Fills missing values of the given column.
group: DataFrame
colname: string
"""
null = group[group[missing_colname]]
if len(null) == 0:
return
# print(len(null), len(group))
valid = group[colname].dropna()
fill = valid.sample(len(null), replace=True)
fill.index = null.index
group[colname].fillna(fill, inplace=True)
def PlotSurvivalFunctions(sf_map, predict_flag=False, colormap=None):
"""Plot estimated survival functions.
sf_map: map from group name to sequence of survival functions
predict_flag: whether the lines are predicted or actual
colormap: map from group name to color
"""
for name, sf_seq in sorted(sf_map.items(), reverse=True):
if len(sf_seq) == 0:
continue
sf = sf_seq[0]
if len(sf) == 0:
continue
ts, rows = MakeSurvivalCI(sf_seq, [10, 50, 90])
thinkplot.FillBetween(ts, rows[0], rows[2], color='gray', alpha=0.2)
if not predict_flag:
if colormap:
color = colormap[name]
thinkplot.Plot(ts, rows[1], label='%ds'%name, color=color)
else:
thinkplot.Plot(ts, rows[1], label='%ds'%name)
def MakePredictions(hf_map):
"""Extends a set of hazard functions and recomputes survival functions.
For each group in hf_map, we extend hf and recompute sf.
hf_map: map from group name to (HazardFunction, SurvivalFunction)
"""
names = list(hf_map.keys())
names.sort()
hfs = [hf_map[name][0] for name in names]
# extend each hazard function using data from the previous cohort,
# and update the survival function
for i, name in enumerate(names):
hf, sf = hf_map[name]
if i > 0:
hf.Extend(hfs[i-1])
sf = hf.MakeSurvival()
hf_map[name] = hf, sf
def MakeSurvivalCI(sf_seq, percents):
"""Makes confidence intervals from a list of survival functions.
sf_seq: list of SurvivalFunction
percents: list of percentiles to select, like [5, 95]
returns: (ts, rows) where ts is a sequence of times and
rows contains one row of values for each percent
"""
# find the union of all ts where the sfs are evaluated
ts = set()
for sf in sf_seq:
ts |= set(sf.ts)
ts = list(ts)
ts.sort()
# evaluate each sf at all times
ss_seq = [sf.Probs(ts) for sf in sf_seq if len(sf) > 0]
# return the requested percentiles from each column
rows = thinkstats2.PercentileRows(ss_seq, percents)
return ts, rows
def ReadFemResp1982():
"""Reads respondent data from NSFG Cycle 3.
returns: DataFrame
"""
dat_file = '1982NSFGData.dat.gz'
names = ['finalwgt', 'ageint', 'mar2p', 'cmmarrhx', 'fmarital',
'cmintvw', 'cmbirth', 'f18m1', 'cmdivorcx', 'cmstphsbx', 'fmarno']
colspecs = [(976-1, 982),
(1001-1, 1002),
(1268-1, 1271),
(1037-1, 1040),
(1041-1, 1041),
(841-1, 844),
(12-1, 15),
(606-1, 606),
(619-1, 622),
(625-1, 628),
(1142-1, 1143),
]
df = pd.read_fwf(dat_file,
colspecs=colspecs,
names=names,
header=None,
nrows=7969,
compression='gzip')
df.cmintvw.replace([9797, 9898, 9999], np.nan, inplace=True)
df.cmbirth.replace([9797, 9898, 9999], np.nan, inplace=True)
df.cmmarrhx.replace([9797, 9898, 9999], np.nan, inplace=True)
df.cmdivorcx.replace([9797, 9898, 9999], np.nan, inplace=True)
df.cmstphsbx.replace([9797, 9898, 9999], np.nan, inplace=True)
df.f18m1.replace([7, 8, 9], np.nan, inplace=True)
# CM values above 9000 indicate month unknown
df.loc[df.cmintvw>9000, 'cmintvw'] -= 9000
df.loc[df.cmbirth>9000, 'cmbirth'] -= 9000
df.loc[df.cmmarrhx>9000, 'cmmarrhx'] -= 9000
df.loc[df.cmdivorcx>9000, 'cmdivorcx'] -= 9000
df.loc[df.cmstphsbx>9000, 'cmstphsbx'] -= 9000
df['evrmarry'] = (df.fmarno > 0)
df['divorced'] = (df.f18m1 == 4)
df['separated'] = (df.f18m1 == 5)
df['widowed'] = (df.f18m1 == 3)
df['stillma'] = (df.fmarno==1) & (df.fmarital==1)
df['cycle'] = 3
CleanResp(df)
return df
def ReadFemResp1988():
"""Reads respondent data from NSFG Cycle 4.
Read as if were a standard ascii file
returns: DataFrame
"""
filename = '1988FemRespDataLines.dat.gz'
names = ['finalwgt', 'ageint', 'currentcm',
'firstcm', 'cmintvw', 'cmbirth',
'f23m1', 'cmdivorcx', 'cmstphsbx', 'fmarno']
colspecs = [(2568-1, 2574),
(36-1, 37),
(1521-1, 1525),
(1538-1, 1542),
(12-1, 16),
(26-1, 30),
(1554-1, 1554),
(1565-1, 1569),
(1570-1, 1574),
(2441-1, 2442),
]
df = pd.read_fwf(filename,
colspecs=colspecs,
names=names,
header=None,
compression='gzip')
df.cmintvw.replace([0, 99999], np.nan, inplace=True)
df.cmbirth.replace([0, 99999], np.nan, inplace=True)
df.firstcm.replace([0, 99999], np.nan, inplace=True)
df.currentcm.replace([0, 99999], np.nan, inplace=True)
df.cmdivorcx.replace([0, 99999], np.nan, inplace=True)
df.cmstphsbx.replace([0, 99999], np.nan, inplace=True)
# CM values above 9000 indicate month unknown
df.loc[df.cmintvw>90000, 'cmintvw'] -= 90000
df.loc[df.cmbirth>90000, 'cmbirth'] -= 90000
df.loc[df.firstcm>90000, 'firstcm'] -= 90000
df.loc[df.currentcm>90000, 'currentcm'] -= 90000
df.loc[df.cmdivorcx>90000, 'cmdivorcx'] -= 90000
df.loc[df.cmstphsbx>90000, 'cmstphsbx'] -= 90000
# combine current and first marriage
df['cmmarrhx'] = df.firstcm
df.cmmarrhx.fillna(df.currentcm, inplace=True)
# define evrmarry if either currentcm or firstcm is non-zero
df['evrmarry'] = (df.fmarno > 0)
df['divorced'] = (df.f23m1==2)
df['separated'] = (df.f23m1==3)
df['widowed'] = (df.f23m1==1)
df['stillma'] = (df.fmarno==1) & (df.f23m1.isnull())
df['cycle'] = 4
CleanResp(df)
return df
def ReadFemResp1995():
"""Reads respondent data from NSFG Cycle 5.
returns: DataFrame
"""
dat_file = '1995FemRespData.dat.gz'
names = ['cmintvw', 'timesmar', 'cmmarrhx', 'cmbirth', 'finalwgt',
'marend01', 'cmdivorcx', 'cmstphsbx', 'marstat']
colspecs = [(12360-1, 12363),
(4637-1, 4638),
(11759-1, 11762),
(14-1, 16),
(12350-1, 12359),
(4713-1, 4713),
(4718-1, 4721),
(4722-1, 4725),
(17-1, 17)]
df = pd.read_fwf(dat_file,
compression='gzip',
colspecs=colspecs,
names=names)
invalid = [9997, 9998, 9999]
df.cmintvw.replace(invalid, np.nan, inplace=True)
df.cmbirth.replace(invalid, np.nan, inplace=True)
df.cmmarrhx.replace(invalid, np.nan, inplace=True)
df.cmdivorcx.replace(invalid, np.nan, inplace=True)
df.cmstphsbx.replace(invalid, np.nan, inplace=True)
df.timesmar.replace([98, 99], np.nan, inplace=True)
df['evrmarry'] = (df.timesmar > 0)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.timesmar==1) & (df.marend01.isnull())
df['cycle'] = 5
CleanResp(df)
return df
def ReadFemResp2002():
"""Reads respondent data from NSFG Cycle 6.
returns: DataFrame
"""
usecols = ['caseid', 'cmmarrhx', 'cmdivorcx', 'cmbirth', 'cmintvw',
'evrmarry', 'parity', 'finalwgt',
'mardat01', 'marend01', 'mardis01', 'rmarital',
'fmarno', 'mar1diss']
df = ReadResp('2002FemResp.dct', '2002FemResp.dat.gz', usecols=usecols)
invalid = [9997, 9998, 9999]
df.cmintvw.replace(invalid, np.nan, inplace=True)
df.cmbirth.replace(invalid, np.nan, inplace=True)
df.cmmarrhx.replace(invalid, np.nan, inplace=True)
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['cycle'] = 6
CleanResp(df)
return df
def ReadFemResp2010():
"""Reads respondent data from NSFG Cycle 7.
returns: DataFrame
"""
usecols = ['caseid', 'cmmarrhx', 'cmdivorcx', 'cmbirth', 'cmintvw',
'evrmarry', 'parity', 'wgtq1q16',
'mardat01', 'marend01', 'mardis01', 'rmarital',
'fmarno', 'mar1diss']
df = ReadResp('2006_2010_FemRespSetup.dct',
'2006_2010_FemResp.dat.gz',
usecols=usecols)
invalid = [9997, 9998, 9999]
df.cmintvw.replace(invalid, np.nan, inplace=True)
df.cmbirth.replace(invalid, np.nan, inplace=True)
df.cmmarrhx.replace(invalid, np.nan, inplace=True)
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgtq1q16
df['cycle'] = 7
CleanResp(df)
return df
def ReadFemResp2013():
"""Reads respondent data from NSFG Cycle 8.
returns: DataFrame
"""
usecols = ['caseid', 'cmmarrhx', 'cmdivorcx', 'cmbirth', 'cmintvw',
'evrmarry', 'parity', 'wgt2011_2013',
'mardat01', 'marend01', 'mardis01', 'rmarital',
'fmarno', 'mar1diss']
df = ReadResp('2011_2013_FemRespSetup.dct',
'2011_2013_FemRespData.dat.gz',
usecols=usecols)
invalid = [9997, 9998, 9999]
df.cmintvw.replace(invalid, np.nan, inplace=True)
df.cmbirth.replace(invalid, np.nan, inplace=True)
df.cmmarrhx.replace(invalid, np.nan, inplace=True)
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgt2011_2013
df['cycle'] = 8
CleanResp(df)
return df
def ReadFemResp2015():
"""Reads respondent data from NSFG Cycle 9.
returns: DataFrame
"""
usecols = ['caseid', 'cmmarrhx', 'cmdivorcx', 'cmbirth', 'cmintvw',
'evrmarry', 'parity', 'wgt2013_2015',
'mardat01', 'marend01', 'mardis01', 'rmarital',
'fmarno', 'mar1diss']
df = ReadResp('2013_2015_FemRespSetup.dct',
'2013_2015_FemRespData.dat.gz',
usecols=usecols)
invalid = [9997, 9998, 9999]
df.cmintvw.replace(invalid, np.nan, inplace=True)
df.cmbirth.replace(invalid, np.nan, inplace=True)
df.cmmarrhx.replace(invalid, np.nan, inplace=True)
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgt2013_2015
df['cycle'] = 9
CleanResp(df)
return df
def ReadFemResp2017():
"""Reads respondent data from NSFG Cycle 10.
returns: DataFrame
"""
# removed 'cmmarrhx', 'cmdivorcx', 'cmbirth',
usecols = ['caseid', 'cmintvw', 'ager',
'evrmarry', 'parity', 'wgt2015_2017',
'mardat01', 'marend01', 'mardis01', 'rmarital',
'fmarno', 'mar1diss']
df = ReadResp('2015_2017_FemRespSetup.dct',
'2015_2017_FemRespData.dat.gz',
usecols=usecols)
invalid = [9997, 9998, 9999]
df.cmintvw.replace(invalid, np.nan, inplace=True)
#df.cmbirth.replace(invalid, np.nan, inplace=True)
#df.cmmarrhx.replace(invalid, np.nan, inplace=True)
# since cmbirth and cmmarrhx are no longer included,
# we have to compute them based on other variables;
# the result can be off by up to 12 months
df['cmbirth'] = df.cmintvw - df.ager*12
df['cmmarrhx'] = (df.mardat01-1900) * 12
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgt2015_2017
df['cycle'] = 10
# Instead of calling CleanResp, we have to customize
#CleanResp(df)
df['agemarry'] = (df.cmmarrhx - df.cmbirth) / 12.0
df['age'] = (df.cmintvw - df.cmbirth) / 12.0
# if married, we need agemarry; if not married, we need age
df['missing'] = np.where(df.evrmarry,
df.agemarry.isnull(),
df.age.isnull())
month0 = pd.to_datetime('1899-12-15')
dates = [month0 + pd.DateOffset(months=cm)
for cm in df.cmbirth]
df['year'] = (pd.DatetimeIndex(dates).year - 1900)
DigitizeResp(df)
return df
def ReadResp(dct_file, dat_file, **options):
"""Reads the NSFG respondent data.
dct_file: string file name
dat_file: string file name
returns: DataFrame
"""
dct = thinkstats2.ReadStataDct(dct_file, encoding='iso-8859-1')
df = dct.ReadFixedWidth(dat_file, compression='gzip', **options)
return df
def CleanResp(resp):
"""Cleans a respondent DataFrame.
resp: DataFrame of respondents
Adds columns: agemarry, age, decade, fives
"""
resp['agemarry'] = (resp.cmmarrhx - resp.cmbirth) / 12.0
resp['age'] = (resp.cmintvw - resp.cmbirth) / 12.0
# if married, we need agemarry; if not married, we need age
resp['missing'] = np.where(resp.evrmarry,
resp.agemarry.isnull(),
resp.age.isnull())
month0 = pd.to_datetime('1899-12-15')
dates = [month0 + pd.DateOffset(months=cm)
for cm in resp.cmbirth]
resp['year'] = (pd.DatetimeIndex(dates).year - 1900)
#resp['decade'] = resp.year // 10
#resp['fives'] = resp.year // 5
DigitizeResp(resp)
def DigitizeResp(df):
"""Computes indices for age, agemarry, and birth year.
Groups each of these variables into bins and then assigns
an index to each bin.
For example, anyone between 30 and 30.99 year old is
assigned age_index 30. Anyone born in the 80s is given
the year_index 80.
This function allows me to run the analysis with different
levels of granularity.
df: DataFrame
"""
age_min = 10
age_max = 55
age_step = 1
age_bins = np.arange(age_min, age_max, age_step)
year_min = 0
year_max = 120
year_step = 10
year_bins = np.arange(year_min, year_max, year_step)
df['age_index'] = np.digitize(df.age, age_bins) * age_step
df.age_index += age_min - age_step
df.loc[df.age.isnull(), 'age_index'] = np.nan
df['agemarry_index'] = np.digitize(df.agemarry, age_bins) * age_step
df.agemarry_index += age_min - age_step
df.loc[df.agemarry.isnull(), 'agemarry_index'] = np.nan
df['birth_index'] = np.digitize(df.year, year_bins) * year_step
df.birth_index += year_min - year_step
def ReadCanadaCycle5():
"""
"""
#age at first marriage: CC232
#age of respondent at interview: C3
#final weight: C1
#marital status: C5
#Respondent every married: CC227
pass
def ReadCanadaCycle6():
"""
"""
#age at first marriage: CC232
#age of respondent at interview: C3
#final weight: C1
#marital status: C5
#Respondent every married: CC227
pass
def ReadMaleResp2002():
"""Reads respondent data from NSFG Cycle 6.
returns: DataFrame
"""
usecols = ['caseid', 'mardat01', 'cmdivw', 'cmbirth', 'cmintvw',
'evrmarry', 'finalwgt', 'fmarit', 'timesmar', 'marrend4',
#'marrend', 'marrend2', 'marrend3', marrend5', 'marrend6',
]
df = ReadResp('2002Male.dct', '2002Male.dat.gz', usecols=usecols)
#df.marrend.replace([8,9], np.nan, inplace=True)
#df.marrend2.replace([8,9], np.nan, inplace=True)
#df.marrend3.replace([8,9], np.nan, inplace=True)
df.marrend4.replace([8,9], np.nan, inplace=True)
#df.marrend5.replace([8,9], np.nan, inplace=True)
#df.marrend6.replace([8,9], np.nan, inplace=True)
df.timesmar.replace([98,99], np.nan, inplace=True)
# the way marriage ends are recorded is really confusing,
# but it looks like marrend4 is the end of the first marriage.
df['marend01'] = df.marrend4
df['cmmarrhx'] = df.mardat01
df['evrmarry'] = (df.timesmar > 0)
df['divorced'] = (df.marend01==2) | (df.marend01==3)
df['separated'] = (df.marend01==4)
df['widowed'] = (df.marend01==1)
df['stillma'] = (df.timesmar== 1) & (df.fmarit==1)
df['cycle'] = 6
CleanResp(df)
return df
def ReadMaleResp2010():
"""Reads respondent data from NSFG Cycle 7.
returns: DataFrame
"""
usecols = ['caseid', 'mardat01', 'cmdivw', 'cmbirth', 'cmintvw',
'evrmarry', 'wgtq1q16',
'marend01', 'rmarital', 'fmarno', 'mar1diss']
df = ReadResp('2006_2010_MaleSetup.dct',
'2006_2010_Male.dat.gz',
usecols=usecols)
df['cmmarrhx'] = df.mardat01
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgtq1q16
df['cycle'] = 7
CleanResp(df)
return df
def ReadMaleResp2013():
"""Reads respondent data from NSFG Cycle 8.
returns: DataFrame
"""
usecols = ['caseid', 'mardat01', 'cmdivw', 'cmbirth', 'cmintvw',
'evrmarry', 'wgt2011_2013',
'marend01', 'rmarital', 'fmarno', 'mar1diss']
df = ReadResp('2011_2013_MaleSetup.dct',
'2011_2013_MaleData.dat.gz',
usecols=usecols)
df['cmmarrhx'] = df.mardat01
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgt2011_2013
df['cycle'] = 8
CleanResp(df)
return df
def ReadMaleResp2015():
"""Reads respondent data from NSFG Cycle 9.
returns: DataFrame
"""
usecols = ['caseid', 'mardat01', 'cmdivw', 'cmbirth', 'cmintvw',
'evrmarry', 'wgt2013_2015',
'marend01', 'rmarital', 'fmarno', 'mar1diss']
df = ReadResp('2013_2015_MaleSetup.dct',
'2013_2015_MaleData.dat.gz',
usecols=usecols)
df['cmmarrhx'] = df.mardat01
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgt2013_2015
df['cycle'] = 9
CleanResp(df)
return df
def ReadMaleResp2017():
"""Reads respondent data from NSFG Cycle 10.
returns: DataFrame
"""
usecols = ['caseid', 'mardat01', 'cmintvw', 'ager',
'evrmarry', 'wgt2015_2017',
'marend01', 'rmarital', 'fmarno', 'mar1diss']
df = ReadResp('2015_2017_MaleSetup.dct',
'2015_2017_MaleData.dat.gz',
usecols=usecols)
# since cmbirth and cmmarrhx are no longer included,
# we have to compute them based on other variables;
# the result can be off by up to 12 months
df['cmbirth'] = df.cmintvw - df.ager*12
df['cmmarrhx'] = (df.mardat01-1900) * 12
df['evrmarry'] = (df.evrmarry==1)
df['divorced'] = (df.marend01==1)
df['separated'] = (df.marend01==2)
df['widowed'] = (df.marend01==3)
df['stillma'] = (df.fmarno == 1) & (df.rmarital==1)
df['finalwgt'] = df.wgt2015_2017
df['cycle'] = 10
# Instead of calling CleanResp, we have to customize
#CleanResp(df)
df['agemarry'] = (df.cmmarrhx - df.cmbirth) / 12.0
df['age'] = (df.cmintvw - df.cmbirth) / 12.0
# if married, we need agemarry; if not married, we need age
df['missing'] = np.where(df.evrmarry,
df.agemarry.isnull(),
df.age.isnull())
month0 = pd.to_datetime('1899-12-15')
dates = [month0 + pd.DateOffset(months=cm)
for cm in df.cmbirth]
df['year'] = (pd.DatetimeIndex(dates).year - 1900)
DigitizeResp(df)
return df
def Validate1982(df):
assert(len(df) == 7969)
assert(len(df[df.evrmarry]) == 4651)
assert(df.agemarry.value_counts().max() == 71)
def Validate1988(df):
assert(len(df) == 8450)
assert(len(df[df.evrmarry]) == 5290)
assert(df.agemarry.value_counts().max() == 73)
def Validate1995(df):
assert(len(df) == 10847)
assert(len(df[df.evrmarry]) == 6841)
assert(df.agemarry.value_counts().max() == 79)
def Validate2002(df):
assert(len(df) == 7643)
assert(sum(df.evrmarry) == 4126)
assert(df.agemarry.value_counts().max() == 45)
def Validate2010(df):
assert(len(df) == 12279)
assert(sum(df.evrmarry) == 5534)
assert(df.agemarry.value_counts().max() == 64)
def Validate2013(df):
assert(len(df) == 5601)
assert(sum(df.evrmarry) == 2452)
assert(df.agemarry.value_counts().max() == 33)
def Validate2015(df):
assert(len(df) == 5699)
assert(sum(df.evrmarry) == 2401)
assert(df.agemarry.value_counts().max() == 25)
def Validate2017(df):
assert(len(df) == 5554)
assert(sum(df.evrmarry) == 2582)
assert(df.agemarry.value_counts().max() == 29)
def main():
print('Cycle 10')
resp10 = ReadFemResp2017()
Validate2017(resp10)
print('Cycle 9')
resp9 = ReadFemResp2015()
Validate2015(resp9)
print('Cycle 8')
resp8 = ReadFemResp2013()
Validate2013(resp8)
print('Cycle 7')
resp7 = ReadFemResp2010()
Validate2010(resp7)
print('Cycle 6')
resp6 = ReadFemResp2002()
Validate2002(resp6)
print('Cycle 5')
resp5 = ReadFemResp1995()
Validate1995(resp5)
print('Cycle 4')
resp4 = ReadFemResp1988()
Validate1988(resp4)
print('Cycle 3')
resp3 = ReadFemResp1982()
Validate1982(resp3)
if __name__ == '__main__':
main()
| [((888, 937), 'pandas.concat', 'pd.concat', (['samples'], {'ignore_index': '(True)', 'sort': '(False)'}), '(samples, ignore_index=True, sort=False)\n', (897, 937), True, 'import pandas as pd\n'), ((2280, 2330), 'survival.EstimateHazardFunction', 'survival.EstimateHazardFunction', (['complete', 'ongoing'], {}), '(complete, ongoing)\n', (2311, 2330), False, 'import survival\n'), ((3592, 3609), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3603, 3609), False, 'from collections import defaultdict\n'), ((7647, 7691), 'thinkstats2.PercentileRows', 'thinkstats2.PercentileRows', (['ss_seq', 'percents'], {}), '(ss_seq, percents)\n', (7673, 7691), False, 'import thinkstats2\n'), ((8368, 8471), 'pandas.read_fwf', 'pd.read_fwf', (['dat_file'], {'colspecs': 'colspecs', 'names': 'names', 'header': 'None', 'nrows': '(7969)', 'compression': '"""gzip"""'}), "(dat_file, colspecs=colspecs, names=names, header=None, nrows=\n 7969, compression='gzip')\n", (8379, 8471), True, 'import pandas as pd\n'), ((10211, 10301), 'pandas.read_fwf', 'pd.read_fwf', (['filename'], {'colspecs': 'colspecs', 'names': 'names', 'header': 'None', 'compression': '"""gzip"""'}), "(filename, colspecs=colspecs, names=names, header=None,\n compression='gzip')\n", (10222, 10301), True, 'import pandas as pd\n'), ((12108, 12181), 'pandas.read_fwf', 'pd.read_fwf', (['dat_file'], {'compression': '"""gzip"""', 'colspecs': 'colspecs', 'names': 'names'}), "(dat_file, compression='gzip', colspecs=colspecs, names=names)\n", (12119, 12181), True, 'import pandas as pd\n'), ((18226, 18254), 'pandas.to_datetime', 'pd.to_datetime', (['"""1899-12-15"""'], {}), "('1899-12-15')\n", (18240, 18254), True, 'import pandas as pd\n'), ((18620, 18677), 'thinkstats2.ReadStataDct', 'thinkstats2.ReadStataDct', (['dct_file'], {'encoding': '"""iso-8859-1"""'}), "(dct_file, encoding='iso-8859-1')\n", (18644, 18677), False, 'import thinkstats2\n'), ((19260, 19288), 'pandas.to_datetime', 'pd.to_datetime', (['"""1899-12-15"""'], {}), "('1899-12-15')\n", (19274, 19288), True, 'import pandas as pd\n'), ((20024, 20061), 'numpy.arange', 'np.arange', (['age_min', 'age_max', 'age_step'], {}), '(age_min, age_max, age_step)\n', (20033, 20061), True, 'import numpy as np\n'), ((20134, 20174), 'numpy.arange', 'np.arange', (['year_min', 'year_max', 'year_step'], {}), '(year_min, year_max, year_step)\n', (20143, 20174), True, 'import numpy as np\n'), ((25842, 25870), 'pandas.to_datetime', 'pd.to_datetime', (['"""1899-12-15"""'], {}), "('1899-12-15')\n", (25856, 25870), True, 'import pandas as pd\n'), ((3941, 3954), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3952, 3954), False, 'from collections import OrderedDict\n'), ((6042, 6110), 'thinkplot.FillBetween', 'thinkplot.FillBetween', (['ts', 'rows[0]', 'rows[2]'], {'color': '"""gray"""', 'alpha': '(0.2)'}), "(ts, rows[0], rows[2], color='gray', alpha=0.2)\n", (6063, 6110), False, 'import thinkplot\n'), ((20198, 20227), 'numpy.digitize', 'np.digitize', (['df.age', 'age_bins'], {}), '(df.age, age_bins)\n', (20209, 20227), True, 'import numpy as np\n'), ((20356, 20390), 'numpy.digitize', 'np.digitize', (['df.agemarry', 'age_bins'], {}), '(df.agemarry, age_bins)\n', (20367, 20390), True, 'import numpy as np\n'), ((20531, 20562), 'numpy.digitize', 'np.digitize', (['df.year', 'year_bins'], {}), '(df.year, year_bins)\n', (20542, 20562), True, 'import numpy as np\n'), ((2913, 2956), 'numpy.random.choice', 'np.random.choice', (['group.index', '(1)'], {'p': 'weights'}), '(group.index, 1, p=weights)\n', (2929, 2956), True, 'import numpy as np\n'), ((18277, 18301), 'pandas.DateOffset', 'pd.DateOffset', ([], {'months': 'cm'}), '(months=cm)\n', (18290, 18301), True, 'import pandas as pd\n'), ((18355, 18378), 'pandas.DatetimeIndex', 'pd.DatetimeIndex', (['dates'], {}), '(dates)\n', (18371, 18378), True, 'import pandas as pd\n'), ((19311, 19335), 'pandas.DateOffset', 'pd.DateOffset', ([], {'months': 'cm'}), '(months=cm)\n', (19324, 19335), True, 'import pandas as pd\n'), ((19393, 19416), 'pandas.DatetimeIndex', 'pd.DatetimeIndex', (['dates'], {}), '(dates)\n', (19409, 19416), True, 'import pandas as pd\n'), ((25893, 25917), 'pandas.DateOffset', 'pd.DateOffset', ([], {'months': 'cm'}), '(months=cm)\n', (25906, 25917), True, 'import pandas as pd\n'), ((25975, 25998), 'pandas.DatetimeIndex', 'pd.DatetimeIndex', (['dates'], {}), '(dates)\n', (25991, 25998), True, 'import pandas as pd\n'), ((6221, 6281), 'thinkplot.Plot', 'thinkplot.Plot', (['ts', 'rows[1]'], {'label': "('%ds' % name)", 'color': 'color'}), "(ts, rows[1], label='%ds' % name, color=color)\n", (6235, 6281), False, 'import thinkplot\n'), ((6314, 6361), 'thinkplot.Plot', 'thinkplot.Plot', (['ts', 'rows[1]'], {'label': "('%ds' % name)"}), "(ts, rows[1], label='%ds' % name)\n", (6328, 6361), False, 'import thinkplot\n')] |
ErwinP/cctbx_project | xfel/merging/application/reflection_table_utils.py | 58f9fb5ed38c7391510e892f0ca9520467b692c1 | from __future__ import absolute_import, division, print_function
from six.moves import range
from dials.array_family import flex
import math
class reflection_table_utils(object):
@staticmethod
def get_next_hkl_reflection_table(reflections):
'''Generate asu hkl slices from an asu hkl-sorted reflection table'''
if reflections.size() == 0:
yield reflections
i_begin = 0
hkl_ref = reflections[0].get('miller_index_asymmetric')
for i in range(reflections.size()):
hkl = reflections[i].get('miller_index_asymmetric')
if hkl == hkl_ref:
continue
else:
yield reflections[i_begin:i]
i_begin = i
hkl_ref = hkl
yield reflections[i_begin:i+1]
@staticmethod
def select_odd_experiment_reflections(reflections):
'Select reflections from experiments with odd ids. An experiment id must be a string representing a hexadecimal number'
sel = flex.bool()
for refl in reflections:
sel.append(int(refl['exp_id'], 16)%2 != 0)
return reflections.select(sel)
@staticmethod
def select_even_experiment_reflections(reflections):
'Select reflections from experiments with even ids. An experiment id must be a string representing a hexadecimal number'
sel = flex.bool()
for refl in reflections:
sel.append(int(refl['exp_id'], 16)%2 == 0)
return reflections.select(sel)
@staticmethod
def merged_reflection_table():
'''Create a reflection table for storing merged HKLs'''
table = flex.reflection_table()
table['miller_index'] = flex.miller_index()
table['intensity'] = flex.double()
table['sigma'] = flex.double()
table['multiplicity'] = flex.int()
return table
@staticmethod
def merge_reflections(reflections, min_multiplicity):
'''Merge intensities of multiply-measured symmetry-reduced HKLs'''
merged_reflections = reflection_table_utils.merged_reflection_table()
for refls in reflection_table_utils.get_next_hkl_reflection_table(reflections=reflections):
if refls.size() == 0:
break # unless the input "reflections" list is empty, generated "refls" lists cannot be empty
hkl = refls[0]['miller_index_asymmetric']
# This assert is timeconsuming when using a small number of cores
#assert not (hkl in merged_reflections['miller_index']) # i.e. assert that the input reflection table came in sorted
refls = refls.select(refls['intensity.sum.variance'] > 0.0)
if refls.size() >= min_multiplicity:
weighted_intensity_array = refls['intensity.sum.value'] / refls['intensity.sum.variance']
weights_array = flex.double(refls.size(), 1.0) / refls['intensity.sum.variance']
weighted_mean_intensity = flex.sum(weighted_intensity_array) / flex.sum(weights_array)
standard_error_of_weighted_mean_intensity = 1.0/math.sqrt(flex.sum(weights_array))
merged_reflections.append(
{'miller_index' : hkl,
'intensity' : weighted_mean_intensity,
'sigma' : standard_error_of_weighted_mean_intensity,
'multiplicity' : refls.size()})
return merged_reflections
@staticmethod
def prune_reflection_table_keys(reflections, keys_to_delete=None, keys_to_keep=None):
'''Remove reflection table keys: either inclusive or exclusive'''
if len(reflections) != 0:
all_keys = list()
for key in reflections[0]:
all_keys.append(key)
if keys_to_delete != None:
for key in keys_to_delete:
if key in all_keys:
del reflections[key]
elif keys_to_keep != None:
for key in all_keys:
#if not key in ['intensity.sum.value', 'intensity.sum.variance', 'miller_index', 'miller_index_asymmetric', 'exp_id', 'odd_frame', 's1']:
if not key in keys_to_keep:
del reflections[key]
return reflections
| [((926, 937), 'dials.array_family.flex.bool', 'flex.bool', ([], {}), '()\n', (935, 937), False, 'from dials.array_family import flex\n'), ((1258, 1269), 'dials.array_family.flex.bool', 'flex.bool', ([], {}), '()\n', (1267, 1269), False, 'from dials.array_family import flex\n'), ((1505, 1528), 'dials.array_family.flex.reflection_table', 'flex.reflection_table', ([], {}), '()\n', (1526, 1528), False, 'from dials.array_family import flex\n'), ((1557, 1576), 'dials.array_family.flex.miller_index', 'flex.miller_index', ([], {}), '()\n', (1574, 1576), False, 'from dials.array_family import flex\n'), ((1602, 1615), 'dials.array_family.flex.double', 'flex.double', ([], {}), '()\n', (1613, 1615), False, 'from dials.array_family import flex\n'), ((1637, 1650), 'dials.array_family.flex.double', 'flex.double', ([], {}), '()\n', (1648, 1650), False, 'from dials.array_family import flex\n'), ((1679, 1689), 'dials.array_family.flex.int', 'flex.int', ([], {}), '()\n', (1687, 1689), False, 'from dials.array_family import flex\n'), ((2728, 2762), 'dials.array_family.flex.sum', 'flex.sum', (['weighted_intensity_array'], {}), '(weighted_intensity_array)\n', (2736, 2762), False, 'from dials.array_family import flex\n'), ((2765, 2788), 'dials.array_family.flex.sum', 'flex.sum', (['weights_array'], {}), '(weights_array)\n', (2773, 2788), False, 'from dials.array_family import flex\n'), ((2855, 2878), 'dials.array_family.flex.sum', 'flex.sum', (['weights_array'], {}), '(weights_array)\n', (2863, 2878), False, 'from dials.array_family import flex\n')] |
jptomo/pypy-lang-scheme | rpython/memory/test/test_transformed_gc.py | 55edb2cec69d78f86793282a4566fcbc1ef9fcac | import py
import inspect
from rpython.rlib.objectmodel import compute_hash, compute_identity_hash
from rpython.translator.c import gc
from rpython.annotator import model as annmodel
from rpython.rtyper.llannotation import SomePtr
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi, llgroup
from rpython.memory.gctransform import framework, shadowstack
from rpython.rtyper.lltypesystem.lloperation import llop, void
from rpython.rlib.objectmodel import compute_unique_id, we_are_translated
from rpython.rlib.debug import ll_assert
from rpython.rlib import rgc
from rpython.conftest import option
from rpython.rlib.rstring import StringBuilder
from rpython.rlib.rarithmetic import LONG_BIT
WORD = LONG_BIT // 8
def rtype(func, inputtypes, specialize=True, gcname='ref',
backendopt=False, **extraconfigopts):
from rpython.translator.translator import TranslationContext
t = TranslationContext()
# XXX XXX XXX mess
t.config.translation.gc = gcname
t.config.translation.gcremovetypeptr = True
t.config.set(**extraconfigopts)
ann = t.buildannotator()
ann.build_types(func, inputtypes)
if specialize:
t.buildrtyper().specialize()
if backendopt:
from rpython.translator.backendopt.all import backend_optimizations
backend_optimizations(t)
if option.view:
t.viewcg()
return t
ARGS = lltype.FixedSizeArray(lltype.Signed, 3)
class GCTest(object):
gcpolicy = None
GC_CAN_MOVE = False
taggedpointers = False
def setup_class(cls):
cls.marker = lltype.malloc(rffi.CArray(lltype.Signed), 1,
flavor='raw', zero=True)
funcs0 = []
funcs2 = []
cleanups = []
name_to_func = {}
mixlevelstuff = []
for fullname in dir(cls):
if not fullname.startswith('define'):
continue
definefunc = getattr(cls, fullname)
_, name = fullname.split('_', 1)
func_fixup = definefunc.im_func(cls)
cleanup = None
if isinstance(func_fixup, tuple):
func, cleanup, fixup = func_fixup
mixlevelstuff.append(fixup)
else:
func = func_fixup
func.func_name = "f_%s" % name
if cleanup:
cleanup.func_name = "clean_%s" % name
nargs = len(inspect.getargspec(func)[0])
name_to_func[name] = len(funcs0)
if nargs == 2:
funcs2.append(func)
funcs0.append(None)
elif nargs == 0:
funcs0.append(func)
funcs2.append(None)
else:
raise NotImplementedError(
"defined test functions should have 0/2 arguments")
# used to let test cleanup static root pointing to runtime
# allocated stuff
cleanups.append(cleanup)
def entrypoint(args):
num = args[0]
func = funcs0[num]
if func:
res = func()
else:
func = funcs2[num]
res = func(args[1], args[2])
cleanup = cleanups[num]
if cleanup:
cleanup()
return res
from rpython.translator.c.genc import CStandaloneBuilder
s_args = SomePtr(lltype.Ptr(ARGS))
t = rtype(entrypoint, [s_args], gcname=cls.gcname,
taggedpointers=cls.taggedpointers)
for fixup in mixlevelstuff:
if fixup:
fixup(t)
cbuild = CStandaloneBuilder(t, entrypoint, config=t.config,
gcpolicy=cls.gcpolicy)
db = cbuild.generate_graphs_for_llinterp()
entrypointptr = cbuild.getentrypointptr()
entrygraph = entrypointptr._obj.graph
if option.view:
t.viewcg()
cls.name_to_func = name_to_func
cls.entrygraph = entrygraph
cls.rtyper = t.rtyper
cls.db = db
def runner(self, name, transformer=False):
db = self.db
name_to_func = self.name_to_func
entrygraph = self.entrygraph
from rpython.rtyper.llinterp import LLInterpreter
llinterp = LLInterpreter(self.rtyper)
gct = db.gctransformer
if self.__class__.__dict__.get('_used', False):
teardowngraph = gct.frameworkgc__teardown_ptr.value._obj.graph
llinterp.eval_graph(teardowngraph, [])
self.__class__._used = True
# FIIIIISH
setupgraph = gct.frameworkgc_setup_ptr.value._obj.graph
# setup => resets the gc
llinterp.eval_graph(setupgraph, [])
def run(args):
ll_args = lltype.malloc(ARGS, immortal=True)
ll_args[0] = name_to_func[name]
for i in range(len(args)):
ll_args[1+i] = args[i]
res = llinterp.eval_graph(entrygraph, [ll_args])
return res
if transformer:
return run, gct
else:
return run
class GenericGCTests(GCTest):
GC_CAN_SHRINK_ARRAY = False
def define_instances(cls):
class A(object):
pass
class B(A):
def __init__(self, something):
self.something = something
def malloc_a_lot():
i = 0
first = None
while i < 10:
i += 1
a = somea = A()
a.last = first
first = a
j = 0
while j < 30:
b = B(somea)
b.last = first
j += 1
return 0
return malloc_a_lot
def test_instances(self):
run = self.runner("instances")
run([])
def define_llinterp_lists(cls):
def malloc_a_lot():
i = 0
while i < 10:
i += 1
a = [1] * 10
j = 0
while j < 30:
j += 1
a.append(j)
return 0
return malloc_a_lot
def test_llinterp_lists(self):
run = self.runner("llinterp_lists")
run([])
def define_llinterp_tuples(cls):
def malloc_a_lot():
i = 0
while i < 10:
i += 1
a = (1, 2, i)
b = [a] * 10
j = 0
while j < 20:
j += 1
b.append((1, j, i))
return 0
return malloc_a_lot
def test_llinterp_tuples(self):
run = self.runner("llinterp_tuples")
run([])
def define_llinterp_dict(self):
class A(object):
pass
def malloc_a_lot():
i = 0
while i < 10:
i += 1
a = (1, 2, i)
b = {a: A()}
j = 0
while j < 20:
j += 1
b[1, j, i] = A()
return 0
return malloc_a_lot
def test_llinterp_dict(self):
run = self.runner("llinterp_dict")
run([])
def skipdefine_global_list(cls):
gl = []
class Box:
def __init__(self):
self.lst = gl
box = Box()
def append_to_list(i, j):
box.lst.append([i] * 50)
llop.gc__collect(lltype.Void)
return box.lst[j][0]
return append_to_list, None, None
def test_global_list(self):
py.test.skip("doesn't fit in the model, tested elsewhere too")
run = self.runner("global_list")
res = run([0, 0])
assert res == 0
for i in range(1, 5):
res = run([i, i - 1])
assert res == i - 1 # crashes if constants are not considered roots
def define_string_concatenation(cls):
def concat(j, dummy):
lst = []
for i in range(j):
lst.append(str(i))
return len("".join(lst))
return concat
def test_string_concatenation(self):
run = self.runner("string_concatenation")
res = run([100, 0])
assert res == len(''.join([str(x) for x in range(100)]))
def define_nongc_static_root(cls):
T1 = lltype.GcStruct("C", ('x', lltype.Signed))
T2 = lltype.Struct("C", ('p', lltype.Ptr(T1)))
static = lltype.malloc(T2, immortal=True)
def f():
t1 = lltype.malloc(T1)
t1.x = 42
static.p = t1
llop.gc__collect(lltype.Void)
return static.p.x
def cleanup():
static.p = lltype.nullptr(T1)
return f, cleanup, None
def test_nongc_static_root(self):
run = self.runner("nongc_static_root")
res = run([])
assert res == 42
def define_finalizer(cls):
class B(object):
pass
b = B()
b.nextid = 0
b.num_deleted = 0
class A(object):
def __init__(self):
self.id = b.nextid
b.nextid += 1
def __del__(self):
b.num_deleted += 1
def f(x, y):
a = A()
i = 0
while i < x:
i += 1
a = A()
llop.gc__collect(lltype.Void)
llop.gc__collect(lltype.Void)
return b.num_deleted
return f
def test_finalizer(self):
run = self.runner("finalizer")
res = run([5, 42]) #XXX pure lazyness here too
assert res == 6
def define_finalizer_calls_malloc(cls):
class B(object):
pass
b = B()
b.nextid = 0
b.num_deleted = 0
class AAA(object):
def __init__(self):
self.id = b.nextid
b.nextid += 1
def __del__(self):
b.num_deleted += 1
C()
class C(AAA):
def __del__(self):
b.num_deleted += 1
def f(x, y):
a = AAA()
i = 0
while i < x:
i += 1
a = AAA()
llop.gc__collect(lltype.Void)
llop.gc__collect(lltype.Void)
return b.num_deleted
return f
def test_finalizer_calls_malloc(self):
run = self.runner("finalizer_calls_malloc")
res = run([5, 42]) #XXX pure lazyness here too
assert res == 12
def define_finalizer_resurrects(cls):
class B(object):
pass
b = B()
b.nextid = 0
b.num_deleted = 0
class A(object):
def __init__(self):
self.id = b.nextid
b.nextid += 1
def __del__(self):
b.num_deleted += 1
b.a = self
def f(x, y):
a = A()
i = 0
while i < x:
i += 1
a = A()
llop.gc__collect(lltype.Void)
llop.gc__collect(lltype.Void)
aid = b.a.id
b.a = None
# check that __del__ is not called again
llop.gc__collect(lltype.Void)
llop.gc__collect(lltype.Void)
return b.num_deleted * 10 + aid + 100 * (b.a is None)
return f
def test_finalizer_resurrects(self):
run = self.runner("finalizer_resurrects")
res = run([5, 42]) #XXX pure lazyness here too
assert 160 <= res <= 165
def define_custom_trace(cls):
#
S = lltype.GcStruct('S', ('x', llmemory.Address))
T = lltype.GcStruct('T', ('z', lltype.Signed))
offset_of_x = llmemory.offsetof(S, 'x')
def customtrace(gc, obj, callback, arg):
gc._trace_callback(callback, arg, obj + offset_of_x)
lambda_customtrace = lambda: customtrace
#
def setup():
rgc.register_custom_trace_hook(S, lambda_customtrace)
tx = lltype.malloc(T)
tx.z = 4243
s1 = lltype.malloc(S)
s1.x = llmemory.cast_ptr_to_adr(tx)
return s1
def f():
s1 = setup()
llop.gc__collect(lltype.Void)
return llmemory.cast_adr_to_ptr(s1.x, lltype.Ptr(T)).z
return f
def test_custom_trace(self):
run = self.runner("custom_trace")
res = run([])
assert res == 4243
def define_weakref(cls):
import weakref, gc
class A(object):
pass
def g():
a = A()
return weakref.ref(a)
def f():
a = A()
ref = weakref.ref(a)
result = ref() is a
ref = g()
llop.gc__collect(lltype.Void)
result = result and (ref() is None)
# check that a further collection is fine
llop.gc__collect(lltype.Void)
result = result and (ref() is None)
return result
return f
def test_weakref(self):
run = self.runner("weakref")
res = run([])
assert res
def define_weakref_to_object_with_finalizer(cls):
import weakref, gc
class A(object):
count = 0
a = A()
class B(object):
def __del__(self):
a.count += 1
def g():
b = B()
return weakref.ref(b)
def f():
ref = g()
llop.gc__collect(lltype.Void)
llop.gc__collect(lltype.Void)
result = a.count == 1 and (ref() is None)
return result
return f
def test_weakref_to_object_with_finalizer(self):
run = self.runner("weakref_to_object_with_finalizer")
res = run([])
assert res
def define_collect_during_collect(cls):
class B(object):
pass
b = B()
b.nextid = 1
b.num_deleted = 0
b.num_deleted_c = 0
class A(object):
def __init__(self):
self.id = b.nextid
b.nextid += 1
def __del__(self):
llop.gc__collect(lltype.Void)
b.num_deleted += 1
C()
C()
class C(A):
def __del__(self):
b.num_deleted += 1
b.num_deleted_c += 1
def f(x, y):
persistent_a1 = A()
persistent_a2 = A()
i = 0
while i < x:
i += 1
a = A()
persistent_a3 = A()
persistent_a4 = A()
llop.gc__collect(lltype.Void)
llop.gc__collect(lltype.Void)
b.bla = persistent_a1.id + persistent_a2.id + persistent_a3.id + persistent_a4.id
# NB print would create a static root!
llop.debug_print(lltype.Void, b.num_deleted_c)
return b.num_deleted
return f
def test_collect_during_collect(self):
run = self.runner("collect_during_collect")
# runs collect recursively 4 times
res = run([4, 42]) #XXX pure lazyness here too
assert res == 12
def define_collect_0(cls):
def concat(j, dummy):
lst = []
for i in range(j):
lst.append(str(i))
result = len("".join(lst))
if we_are_translated():
llop.gc__collect(lltype.Void, 0)
return result
return concat
def test_collect_0(self):
run = self.runner("collect_0")
res = run([100, 0])
assert res == len(''.join([str(x) for x in range(100)]))
def define_interior_ptrs(cls):
from rpython.rtyper.lltypesystem.lltype import Struct, GcStruct, GcArray
from rpython.rtyper.lltypesystem.lltype import Array, Signed, malloc
S1 = Struct("S1", ('x', Signed))
T1 = GcStruct("T1", ('s', S1))
def f1():
t = malloc(T1)
t.s.x = 1
return t.s.x
S2 = Struct("S2", ('x', Signed))
T2 = GcArray(S2)
def f2():
t = malloc(T2, 1)
t[0].x = 1
return t[0].x
S3 = Struct("S3", ('x', Signed))
T3 = GcStruct("T3", ('items', Array(S3)))
def f3():
t = malloc(T3, 1)
t.items[0].x = 1
return t.items[0].x
S4 = Struct("S4", ('x', Signed))
T4 = Struct("T4", ('s', S4))
U4 = GcArray(T4)
def f4():
u = malloc(U4, 1)
u[0].s.x = 1
return u[0].s.x
S5 = Struct("S5", ('x', Signed))
T5 = GcStruct("T5", ('items', Array(S5)))
def f5():
t = malloc(T5, 1)
return len(t.items)
T6 = GcStruct("T6", ('s', Array(Signed)))
def f6():
t = malloc(T6, 1)
t.s[0] = 1
return t.s[0]
def func():
return (f1() * 100000 +
f2() * 10000 +
f3() * 1000 +
f4() * 100 +
f5() * 10 +
f6())
assert func() == 111111
return func
def test_interior_ptrs(self):
run = self.runner("interior_ptrs")
res = run([])
assert res == 111111
def define_id(cls):
class A(object):
pass
a1 = A()
def func():
a2 = A()
a3 = A()
id1 = compute_unique_id(a1)
id2 = compute_unique_id(a2)
id3 = compute_unique_id(a3)
llop.gc__collect(lltype.Void)
error = 0
if id1 != compute_unique_id(a1): error += 1
if id2 != compute_unique_id(a2): error += 2
if id3 != compute_unique_id(a3): error += 4
return error
return func
def test_id(self):
run = self.runner("id")
res = run([])
assert res == 0
def define_can_move(cls):
TP = lltype.GcArray(lltype.Float)
def func():
return rgc.can_move(lltype.malloc(TP, 1))
return func
def test_can_move(self):
run = self.runner("can_move")
res = run([])
assert res == self.GC_CAN_MOVE
def define_shrink_array(cls):
from rpython.rtyper.lltypesystem.rstr import STR
def f():
ptr = lltype.malloc(STR, 3)
ptr.hash = 0x62
ptr.chars[0] = '0'
ptr.chars[1] = 'B'
ptr.chars[2] = 'C'
ptr2 = rgc.ll_shrink_array(ptr, 2)
return ((ptr == ptr2) +
ord(ptr2.chars[0]) +
(ord(ptr2.chars[1]) << 8) +
(len(ptr2.chars) << 16) +
(ptr2.hash << 24))
return f
def test_shrink_array(self):
run = self.runner("shrink_array")
if self.GC_CAN_SHRINK_ARRAY:
expected = 0x62024231
else:
expected = 0x62024230
assert run([]) == expected
def define_string_builder_over_allocation(cls):
import gc
def fn():
s = StringBuilder(4)
s.append("abcd")
s.append("defg")
s.append("rty")
s.append_multiple_char('y', 1000)
gc.collect()
s.append_multiple_char('y', 1000)
res = s.build()[1000]
gc.collect()
return ord(res)
return fn
def test_string_builder_over_allocation(self):
fn = self.runner("string_builder_over_allocation")
res = fn([])
assert res == ord('y')
class GenericMovingGCTests(GenericGCTests):
GC_CAN_MOVE = True
GC_CAN_TEST_ID = False
def define_many_ids(cls):
class A(object):
pass
def f():
from rpython.rtyper.lltypesystem import rffi
alist = [A() for i in range(50)]
idarray = lltype.malloc(rffi.SIGNEDP.TO, len(alist), flavor='raw')
# Compute the id of all the elements of the list. The goal is
# to not allocate memory, so that if the GC needs memory to
# remember the ids, it will trigger some collections itself
i = 0
while i < len(alist):
idarray[i] = compute_unique_id(alist[i])
i += 1
j = 0
while j < 2:
if j == 1: # allocate some stuff between the two iterations
[A() for i in range(20)]
i = 0
while i < len(alist):
assert idarray[i] == compute_unique_id(alist[i])
i += 1
j += 1
lltype.free(idarray, flavor='raw')
return 0
return f
def test_many_ids(self):
if not self.GC_CAN_TEST_ID:
py.test.skip("fails for bad reasons in lltype.py :-(")
run = self.runner("many_ids")
run([])
@classmethod
def ensure_layoutbuilder(cls, translator):
jit2gc = getattr(translator, '_jit2gc', None)
if jit2gc:
assert 'invoke_after_minor_collection' in jit2gc
return jit2gc['layoutbuilder']
marker = cls.marker
GCClass = cls.gcpolicy.transformerclass.GCClass
layoutbuilder = framework.TransformerLayoutBuilder(translator, GCClass)
layoutbuilder.delay_encoding()
def seeme():
marker[0] += 1
translator._jit2gc = {
'layoutbuilder': layoutbuilder,
'invoke_after_minor_collection': seeme,
}
return layoutbuilder
def define_do_malloc_operations(cls):
P = lltype.GcStruct('P', ('x', lltype.Signed))
def g():
r = lltype.malloc(P)
r.x = 1
p = llop.do_malloc_fixedsize(llmemory.GCREF) # placeholder
p = lltype.cast_opaque_ptr(lltype.Ptr(P), p)
p.x = r.x
return p.x
def f():
i = 0
while i < 40:
g()
i += 1
return 0
if cls.gcname == 'incminimark':
marker = cls.marker
def cleanup():
assert marker[0] > 0
marker[0] = 0
else:
cleanup = None
def fix_graph_of_g(translator):
from rpython.translator.translator import graphof
from rpython.flowspace.model import Constant
from rpython.rtyper.lltypesystem import rffi
layoutbuilder = cls.ensure_layoutbuilder(translator)
type_id = layoutbuilder.get_type_id(P)
#
# now fix the do_malloc_fixedsize in the graph of g
graph = graphof(translator, g)
for op in graph.startblock.operations:
if op.opname == 'do_malloc_fixedsize':
op.args = [Constant(type_id, llgroup.HALFWORD),
Constant(llmemory.sizeof(P), lltype.Signed),
Constant(False, lltype.Bool), # has_finalizer
Constant(False, lltype.Bool), # is_finalizer_light
Constant(False, lltype.Bool)] # contains_weakptr
break
else:
assert 0, "oups, not found"
return f, cleanup, fix_graph_of_g
def test_do_malloc_operations(self):
run = self.runner("do_malloc_operations")
run([])
def define_do_malloc_operations_in_call(cls):
P = lltype.GcStruct('P', ('x', lltype.Signed))
def g():
llop.do_malloc_fixedsize(llmemory.GCREF) # placeholder
def f():
q = lltype.malloc(P)
q.x = 1
i = 0
while i < 40:
g()
i += q.x
return 0
def fix_graph_of_g(translator):
from rpython.translator.translator import graphof
from rpython.flowspace.model import Constant
from rpython.rtyper.lltypesystem import rffi
layoutbuilder = cls.ensure_layoutbuilder(translator)
type_id = layoutbuilder.get_type_id(P)
#
# now fix the do_malloc_fixedsize in the graph of g
graph = graphof(translator, g)
for op in graph.startblock.operations:
if op.opname == 'do_malloc_fixedsize':
op.args = [Constant(type_id, llgroup.HALFWORD),
Constant(llmemory.sizeof(P), lltype.Signed),
Constant(False, lltype.Bool), # has_finalizer
Constant(False, lltype.Bool), # is_finalizer_light
Constant(False, lltype.Bool)] # contains_weakptr
break
else:
assert 0, "oups, not found"
return f, None, fix_graph_of_g
def test_do_malloc_operations_in_call(self):
run = self.runner("do_malloc_operations_in_call")
run([])
def define_gc_heap_stats(cls):
S = lltype.GcStruct('S', ('x', lltype.Signed))
l1 = []
l2 = []
l3 = []
l4 = []
def f():
for i in range(10):
s = lltype.malloc(S)
l1.append(s)
l2.append(s)
if i < 3:
l3.append(s)
l4.append(s)
# We cheat here and only read the table which we later on
# process ourselves, otherwise this test takes ages
llop.gc__collect(lltype.Void)
tb = rgc._heap_stats()
a = 0
nr = 0
b = 0
c = 0
d = 0
e = 0
for i in range(len(tb)):
if tb[i].count == 10:
a += 1
nr = i
if tb[i].count > 50:
d += 1
for i in range(len(tb)):
if tb[i].count == 4:
b += 1
c += tb[i].links[nr]
e += tb[i].size
return d * 1000 + c * 100 + b * 10 + a
return f
def test_gc_heap_stats(self):
py.test.skip("this test makes the following test crash. Investigate.")
run = self.runner("gc_heap_stats")
res = run([])
assert res % 10000 == 2611
totsize = (res / 10000)
size_of_int = rffi.sizeof(lltype.Signed)
assert (totsize - 26 * size_of_int) % 4 == 0
# ^^^ a crude assumption that totsize - varsize would be dividable by 4
# (and give fixedsize)
def define_writebarrier_before_copy(cls):
S = lltype.GcStruct('S', ('x', lltype.Char))
TP = lltype.GcArray(lltype.Ptr(S))
def fn():
l = lltype.malloc(TP, 100)
l2 = lltype.malloc(TP, 100)
for i in range(100):
l[i] = lltype.malloc(S)
rgc.ll_arraycopy(l, l2, 50, 0, 50)
# force nursery collect
x = []
for i in range(20):
x.append((1, lltype.malloc(S)))
for i in range(50):
assert l2[i] == l[50 + i]
return 0
return fn
def test_writebarrier_before_copy(self):
run = self.runner("writebarrier_before_copy")
run([])
# ________________________________________________________________
class TestSemiSpaceGC(GenericMovingGCTests):
gcname = "semispace"
GC_CAN_SHRINK_ARRAY = True
class gcpolicy(gc.BasicFrameworkGcPolicy):
class transformerclass(shadowstack.ShadowStackFrameworkGCTransformer):
from rpython.memory.gc.semispace import SemiSpaceGC as GCClass
GC_PARAMS = {'space_size': 512*WORD,
'translated_to_c': False}
root_stack_depth = 200
class TestGenerationGC(GenericMovingGCTests):
gcname = "generation"
GC_CAN_SHRINK_ARRAY = True
class gcpolicy(gc.BasicFrameworkGcPolicy):
class transformerclass(shadowstack.ShadowStackFrameworkGCTransformer):
from rpython.memory.gc.generation import GenerationGC as \
GCClass
GC_PARAMS = {'space_size': 512*WORD,
'nursery_size': 32*WORD,
'translated_to_c': False}
root_stack_depth = 200
def define_weakref_across_minor_collection(cls):
import weakref
class A:
pass
def f():
x = 20 # for GenerationGC, enough for a minor collection
a = A()
a.foo = x
ref = weakref.ref(a)
all = [None] * x
i = 0
while i < x:
all[i] = [i] * i
i += 1
assert ref() is a
llop.gc__collect(lltype.Void)
assert ref() is a
return a.foo + len(all)
return f
def test_weakref_across_minor_collection(self):
run = self.runner("weakref_across_minor_collection")
res = run([])
assert res == 20 + 20
def define_nongc_static_root_minor_collect(cls):
T1 = lltype.GcStruct("C", ('x', lltype.Signed))
T2 = lltype.Struct("C", ('p', lltype.Ptr(T1)))
static = lltype.malloc(T2, immortal=True)
def f():
t1 = lltype.malloc(T1)
t1.x = 42
static.p = t1
x = 20
all = [None] * x
i = 0
while i < x: # enough to cause a minor collect
all[i] = [i] * i
i += 1
i = static.p.x
llop.gc__collect(lltype.Void)
return static.p.x + i
def cleanup():
static.p = lltype.nullptr(T1)
return f, cleanup, None
def test_nongc_static_root_minor_collect(self):
run = self.runner("nongc_static_root_minor_collect")
res = run([])
assert res == 84
def define_static_root_minor_collect(cls):
class A:
pass
class B:
pass
static = A()
static.p = None
def f():
t1 = B()
t1.x = 42
static.p = t1
x = 20
all = [None] * x
i = 0
while i < x: # enough to cause a minor collect
all[i] = [i] * i
i += 1
i = static.p.x
llop.gc__collect(lltype.Void)
return static.p.x + i
def cleanup():
static.p = None
return f, cleanup, None
def test_static_root_minor_collect(self):
run = self.runner("static_root_minor_collect")
res = run([])
assert res == 84
def define_many_weakrefs(cls):
# test for the case where allocating the weakref itself triggers
# a collection
import weakref
class A:
pass
def f():
a = A()
i = 0
while i < 17:
ref = weakref.ref(a)
assert ref() is a
i += 1
return 0
return f
def test_many_weakrefs(self):
run = self.runner("many_weakrefs")
run([])
def define_immutable_to_old_promotion(cls):
T_CHILD = lltype.Ptr(lltype.GcStruct('Child', ('field', lltype.Signed)))
T_PARENT = lltype.Ptr(lltype.GcStruct('Parent', ('sub', T_CHILD)))
child = lltype.malloc(T_CHILD.TO)
child2 = lltype.malloc(T_CHILD.TO)
parent = lltype.malloc(T_PARENT.TO)
parent2 = lltype.malloc(T_PARENT.TO)
parent.sub = child
child.field = 3
parent2.sub = child2
child2.field = 8
T_ALL = lltype.Ptr(lltype.GcArray(T_PARENT))
all = lltype.malloc(T_ALL.TO, 2)
all[0] = parent
all[1] = parent2
def f(x, y):
res = all[x]
#all[x] = lltype.nullptr(T_PARENT.TO)
return res.sub.field
return f
def test_immutable_to_old_promotion(self):
run, transformer = self.runner("immutable_to_old_promotion", transformer=True)
run([1, 4])
if not transformer.GCClass.prebuilt_gc_objects_are_static_roots:
assert len(transformer.layoutbuilder.addresses_of_static_ptrs) == 0
else:
assert len(transformer.layoutbuilder.addresses_of_static_ptrs) >= 4
# NB. Remember that the number above does not count
# the number of prebuilt GC objects, but the number of locations
# within prebuilt GC objects that are of type Ptr(Gc).
# At the moment we get additional_roots_sources == 6:
# * all[0]
# * all[1]
# * parent.sub
# * parent2.sub
# * the GcArray pointer from gc.wr_to_objects_with_id
# * the GcArray pointer from gc.object_id_dict.
def define_adr_of_nursery(cls):
class A(object):
pass
def f():
# we need at least 1 obj to allocate a nursery
a = A()
nf_a = llop.gc_adr_of_nursery_free(llmemory.Address)
nt_a = llop.gc_adr_of_nursery_top(llmemory.Address)
nf0 = nf_a.address[0]
nt0 = nt_a.address[0]
a0 = A()
a1 = A()
nf1 = nf_a.address[0]
nt1 = nt_a.address[0]
assert nf1 > nf0
assert nt1 > nf1
assert nt1 == nt0
return 0
return f
def test_adr_of_nursery(self):
run = self.runner("adr_of_nursery")
res = run([])
class TestGenerationalNoFullCollectGC(GCTest):
# test that nursery is doing its job and that no full collection
# is needed when most allocated objects die quickly
gcname = "generation"
class gcpolicy(gc.BasicFrameworkGcPolicy):
class transformerclass(shadowstack.ShadowStackFrameworkGCTransformer):
from rpython.memory.gc.generation import GenerationGC
class GCClass(GenerationGC):
__ready = False
def setup(self):
from rpython.memory.gc.generation import GenerationGC
GenerationGC.setup(self)
self.__ready = True
def semispace_collect(self, size_changing=False):
ll_assert(not self.__ready,
"no full collect should occur in this test")
def _teardown(self):
self.__ready = False # collecting here is expected
GenerationGC._teardown(self)
GC_PARAMS = {'space_size': 512*WORD,
'nursery_size': 128*WORD,
'translated_to_c': False}
root_stack_depth = 200
def define_working_nursery(cls):
def f():
total = 0
i = 0
while i < 40:
lst = []
j = 0
while j < 5:
lst.append(i*j)
j += 1
total += len(lst)
i += 1
return total
return f
def test_working_nursery(self):
run = self.runner("working_nursery")
res = run([])
assert res == 40 * 5
class TestHybridGC(TestGenerationGC):
gcname = "hybrid"
class gcpolicy(gc.BasicFrameworkGcPolicy):
class transformerclass(shadowstack.ShadowStackFrameworkGCTransformer):
from rpython.memory.gc.hybrid import HybridGC as GCClass
GC_PARAMS = {'space_size': 512*WORD,
'nursery_size': 32*WORD,
'large_object': 8*WORD,
'translated_to_c': False}
root_stack_depth = 200
def define_ref_from_rawmalloced_to_regular(cls):
import gc
S = lltype.GcStruct('S', ('x', lltype.Signed))
A = lltype.GcStruct('A', ('p', lltype.Ptr(S)),
('a', lltype.Array(lltype.Char)))
def setup(j):
p = lltype.malloc(S)
p.x = j*2
lst = lltype.malloc(A, j)
# the following line generates a write_barrier call at the moment,
# which is important because the 'lst' can be allocated directly
# in generation 2. This can only occur with varsized mallocs.
lst.p = p
return lst
def f(i, j):
lst = setup(j)
gc.collect()
return lst.p.x
return f
def test_ref_from_rawmalloced_to_regular(self):
run = self.runner("ref_from_rawmalloced_to_regular")
res = run([100, 100])
assert res == 200
def define_write_barrier_direct(cls):
from rpython.rlib import rgc
S = lltype.GcForwardReference()
S.become(lltype.GcStruct('S',
('x', lltype.Signed),
('prev', lltype.Ptr(S)),
('next', lltype.Ptr(S))))
s0 = lltype.malloc(S, immortal=True)
def f():
s = lltype.malloc(S)
s.x = 42
llop.bare_setfield(lltype.Void, s0, void('next'), s)
llop.gc_writebarrier(lltype.Void, llmemory.cast_ptr_to_adr(s0))
rgc.collect(0)
return s0.next.x
def cleanup():
s0.next = lltype.nullptr(S)
return f, cleanup, None
def test_write_barrier_direct(self):
run = self.runner("write_barrier_direct")
res = run([])
assert res == 42
class TestMiniMarkGC(TestHybridGC):
gcname = "minimark"
GC_CAN_TEST_ID = True
class gcpolicy(gc.BasicFrameworkGcPolicy):
class transformerclass(shadowstack.ShadowStackFrameworkGCTransformer):
from rpython.memory.gc.minimark import MiniMarkGC as GCClass
GC_PARAMS = {'nursery_size': 32*WORD,
'page_size': 16*WORD,
'arena_size': 64*WORD,
'small_request_threshold': 5*WORD,
'large_object': 8*WORD,
'card_page_indices': 4,
'translated_to_c': False,
}
root_stack_depth = 200
def define_no_clean_setarrayitems(cls):
# The optimization find_clean_setarrayitems() in
# gctransformer/framework.py does not work with card marking.
# Check that it is turned off.
S = lltype.GcStruct('S', ('x', lltype.Signed))
A = lltype.GcArray(lltype.Ptr(S))
def sub(lst):
lst[15] = lltype.malloc(S) # 'lst' is set the single mark "12-15"
lst[15].x = 123
lst[0] = lst[15] # that would be a "clean_setarrayitem"
def f():
lst = lltype.malloc(A, 16) # 16 > 10
rgc.collect()
sub(lst)
null = lltype.nullptr(S)
lst[15] = null # clear, so that A() is only visible via lst[0]
rgc.collect() # -> crash
return lst[0].x
return f
def test_no_clean_setarrayitems(self):
run = self.runner("no_clean_setarrayitems")
res = run([])
assert res == 123
def define_nursery_hash_base(cls):
class A:
pass
def fn():
objects = []
hashes = []
for i in range(200):
rgc.collect(0) # nursery-only collection, if possible
obj = A()
objects.append(obj)
hashes.append(compute_identity_hash(obj))
unique = {}
for i in range(len(objects)):
assert compute_identity_hash(objects[i]) == hashes[i]
unique[hashes[i]] = None
return len(unique)
return fn
def test_nursery_hash_base(self):
res = self.runner('nursery_hash_base')
assert res([]) >= 195
def define_instantiate_nonmovable(cls):
from rpython.rlib import objectmodel
from rpython.rtyper import annlowlevel
class A:
pass
def fn():
a1 = A()
a = objectmodel.instantiate(A, nonmovable=True)
a.next = a1 # 'a' is known young here, so no write barrier emitted
res = rgc.can_move(annlowlevel.cast_instance_to_base_ptr(a))
rgc.collect()
objectmodel.keepalive_until_here(a)
return res
return fn
def test_instantiate_nonmovable(self):
res = self.runner('instantiate_nonmovable')
assert res([]) == 0
class TestIncrementalMiniMarkGC(TestMiniMarkGC):
gcname = "incminimark"
class gcpolicy(gc.BasicFrameworkGcPolicy):
class transformerclass(shadowstack.ShadowStackFrameworkGCTransformer):
from rpython.memory.gc.incminimark import IncrementalMiniMarkGC \
as GCClass
GC_PARAMS = {'nursery_size': 32*WORD,
'page_size': 16*WORD,
'arena_size': 64*WORD,
'small_request_threshold': 5*WORD,
'large_object': 8*WORD,
'card_page_indices': 4,
'translated_to_c': False,
}
root_stack_depth = 200
def define_malloc_array_of_gcptr(self):
S = lltype.GcStruct('S', ('x', lltype.Signed))
A = lltype.GcArray(lltype.Ptr(S))
def f():
lst = lltype.malloc(A, 5)
return (lst[0] == lltype.nullptr(S)
and lst[1] == lltype.nullptr(S)
and lst[2] == lltype.nullptr(S)
and lst[3] == lltype.nullptr(S)
and lst[4] == lltype.nullptr(S))
return f
def test_malloc_array_of_gcptr(self):
run = self.runner('malloc_array_of_gcptr')
res = run([])
assert res
def define_malloc_struct_of_gcptr(cls):
S1 = lltype.GcStruct('S', ('x', lltype.Signed))
S = lltype.GcStruct('S',
('x', lltype.Signed),
('filed1', lltype.Ptr(S1)),
('filed2', lltype.Ptr(S1)))
s0 = lltype.malloc(S)
def f():
return (s0.filed1 == lltype.nullptr(S1) and s0.filed2 == lltype.nullptr(S1))
return f
def test_malloc_struct_of_gcptr(self):
run = self.runner("malloc_struct_of_gcptr")
res = run([])
assert res
# ________________________________________________________________
# tagged pointers
class TaggedPointerGCTests(GCTest):
taggedpointers = True
def define_tagged_simple(cls):
class Unrelated(object):
pass
u = Unrelated()
u.x = UnboxedObject(47)
def fn(n):
rgc.collect() # check that a prebuilt tagged pointer doesn't explode
if n > 0:
x = BoxedObject(n)
else:
x = UnboxedObject(n)
u.x = x # invoke write barrier
rgc.collect()
return x.meth(100)
def func():
return fn(1000) + fn(-1000)
assert func() == 205
return func
def test_tagged_simple(self):
func = self.runner("tagged_simple")
res = func([])
assert res == 205
def define_tagged_prebuilt(cls):
class F:
pass
f = F()
f.l = [UnboxedObject(10)]
def fn(n):
if n > 0:
x = BoxedObject(n)
else:
x = UnboxedObject(n)
f.l.append(x)
rgc.collect()
return f.l[-1].meth(100)
def func():
return fn(1000) ^ fn(-1000)
assert func() == -1999
return func
def test_tagged_prebuilt(self):
func = self.runner("tagged_prebuilt")
res = func([])
assert res == -1999
def define_gettypeid(cls):
class A(object):
pass
def fn():
a = A()
return rgc.get_typeid(a)
return fn
def test_gettypeid(self):
func = self.runner("gettypeid")
res = func([])
print res
from rpython.rlib.objectmodel import UnboxedValue
class TaggedBase(object):
__slots__ = ()
def meth(self, x):
raise NotImplementedError
class BoxedObject(TaggedBase):
attrvalue = 66
def __init__(self, normalint):
self.normalint = normalint
def meth(self, x):
return self.normalint + x + 2
class UnboxedObject(TaggedBase, UnboxedValue):
__slots__ = 'smallint'
def meth(self, x):
return self.smallint + x + 3
class TestHybridTaggedPointerGC(TaggedPointerGCTests):
gcname = "hybrid"
class gcpolicy(gc.BasicFrameworkGcPolicy):
class transformerclass(shadowstack.ShadowStackFrameworkGCTransformer):
from rpython.memory.gc.generation import GenerationGC as \
GCClass
GC_PARAMS = {'space_size': 512*WORD,
'nursery_size': 32*WORD,
'translated_to_c': False}
root_stack_depth = 200
def test_gettypeid(self):
py.test.skip("fails for obscure reasons")
| [] |
Subsets and Splits