repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
yk/jina | tests/unit/peapods/runtimes/remote/ssh/test_ssh_remote.py | ab66e233e74b956390f266881ff5dc4e0110d3ff | import pytest
from jina.enums import RemoteAccessType
from jina.flow import Flow
from jina.parser import set_pea_parser, set_pod_parser
from jina.peapods.pods import BasePod
from jina.peapods.runtimes.remote.ssh import SSHRuntime
from jina.proto import jina_pb2
@pytest.mark.skip('works locally, but until I findout how to mock ssh, this has to be skipped')
def test_ssh_pea():
p = set_pea_parser().parse_args(['--host', '[email protected]', '--timeout', '5000'])
with SSHRuntime(p, kind='pea') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('works locally, but until I find out how to mock ssh, this has to be skipped')
def test_ssh_pod():
p = set_pod_parser().parse_args(['--host', '[email protected]', '--timeout', '5000'])
with SSHRuntime(p, kind='pod') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('not implemented yet')
def test_ssh_mutable_pod():
p = set_pod_parser().parse_args(['--host', '[email protected]', '--timeout', '5000'])
p = BasePod(p)
with SSHRuntime(p, kind='pod') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('not implemented yet')
def test_flow():
f = Flow().add().add(host='[email protected]', remote_access=RemoteAccessType.SSH)
with f:
pass
| [((266, 370), 'pytest.mark.skip', 'pytest.mark.skip', (['"""works locally, but until I findout how to mock ssh, this has to be skipped"""'], {}), "(\n 'works locally, but until I findout how to mock ssh, this has to be skipped'\n )\n", (282, 370), False, 'import pytest\n'), ((621, 726), 'pytest.mark.skip', 'pytest.mark.skip', (['"""works locally, but until I find out how to mock ssh, this has to be skipped"""'], {}), "(\n 'works locally, but until I find out how to mock ssh, this has to be skipped'\n )\n", (637, 726), False, 'import pytest\n'), ((976, 1015), 'pytest.mark.skip', 'pytest.mark.skip', (['"""not implemented yet"""'], {}), "('not implemented yet')\n", (992, 1015), False, 'import pytest\n'), ((1302, 1341), 'pytest.mark.skip', 'pytest.mark.skip', (['"""not implemented yet"""'], {}), "('not implemented yet')\n", (1318, 1341), False, 'import pytest\n'), ((1140, 1150), 'jina.peapods.pods.BasePod', 'BasePod', (['p'], {}), '(p)\n', (1147, 1150), False, 'from jina.peapods.pods import BasePod\n'), ((479, 504), 'jina.peapods.runtimes.remote.ssh.SSHRuntime', 'SSHRuntime', (['p'], {'kind': '"""pea"""'}), "(p, kind='pea')\n", (489, 504), False, 'from jina.peapods.runtimes.remote.ssh import SSHRuntime\n'), ((834, 859), 'jina.peapods.runtimes.remote.ssh.SSHRuntime', 'SSHRuntime', (['p'], {'kind': '"""pod"""'}), "(p, kind='pod')\n", (844, 859), False, 'from jina.peapods.runtimes.remote.ssh import SSHRuntime\n'), ((1160, 1185), 'jina.peapods.runtimes.remote.ssh.SSHRuntime', 'SSHRuntime', (['p'], {'kind': '"""pod"""'}), "(p, kind='pod')\n", (1170, 1185), False, 'from jina.peapods.runtimes.remote.ssh import SSHRuntime\n'), ((389, 405), 'jina.parser.set_pea_parser', 'set_pea_parser', ([], {}), '()\n', (403, 405), False, 'from jina.parser import set_pea_parser, set_pod_parser\n'), ((745, 761), 'jina.parser.set_pod_parser', 'set_pod_parser', ([], {}), '()\n', (759, 761), False, 'from jina.parser import set_pea_parser, set_pod_parser\n'), ((1052, 1068), 'jina.parser.set_pod_parser', 'set_pod_parser', ([], {}), '()\n', (1066, 1068), False, 'from jina.parser import set_pea_parser, set_pod_parser\n'), ((1367, 1373), 'jina.flow.Flow', 'Flow', ([], {}), '()\n', (1371, 1373), False, 'from jina.flow import Flow\n')] |
dotX12/waio | waio/factory/models/basic.py | 6bc41df2d650f31fdb11a1a2b67c6149afa0e11a | from dataclasses import dataclass
@dataclass
class PayloadSender:
phone: int
name: str
@dataclass
class PayloadBaseModel:
sender: PayloadSender
payload_id: str
| [] |
Nithanaroy/random_scripts | Uber/validExpression.py | 908e539e2b7050a09e03b4fc0d2621b23733d65a | def main(expr):
openingParams = '({['
closingParams = ')}]'
stack = []
for c in expr:
if c in openingParams:
stack.append(c)
elif c in closingParams:
topOfStack = stack.pop()
openingIndex = openingParams.find(topOfStack)
closingIndex = closingParams.find(c)
if openingIndex is not closingIndex:
return False
if len(stack) == 0:
return True
return False
if __name__ =='__main__':
print main('{(abc})')
| [] |
KrasnitzLab/sgains | sgains/tool.py | 501c42bfdad4542725f00ca8199983eccf8c0b3f | import os
import sys
from copy import deepcopy
import traceback
import functools
from collections import defaultdict
import yaml
from argparse import ArgumentParser,\
RawDescriptionHelpFormatter, ArgumentDefaultsHelpFormatter
from sgains.configuration.parser import SgainsValidator, Config
from sgains.configuration.schema import sgains_schema
from sgains.executor import Executor
from sgains.pipelines.mappableregions_pipeline import MappableRegionsPipeline
from sgains.pipelines.genomeindex_pipeline import GenomeIndexPipeline
from sgains.pipelines.bins_pipeline import BinsPipeline
from sgains.pipelines.mapping_pipeline import MappingPipeline
from sgains.pipelines.extract_10x_pipeline import Extract10xPipeline
from sgains.pipelines.varbin_10x_pipeline import Varbin10xPipeline
from sgains.pipelines.varbin_pipeline import VarbinPipeline
from sgains.pipelines.r_pipeline import Rpipeline
from sgains.pipelines.composite_pipeline import CompositePipeline
SGAINS_COMMANDS = {
"genomeindex": {
"config_groups": ["aligner", "genome"],
"help": "builds appropriate hisat2 or bowtie index for the "
"reference genome",
},
"mappable_regions": {
"config_groups": ["aligner", "genome", "mappable_regions", "sge"],
"help": "finds all mappable regions in specified genome",
},
"bins": {
"config_groups": ["genome", "mappable_regions", "bins", "sge"],
"help": "calculates all bins boundaries for specified bins count "
"and read length",
},
"prepare": {
"config_groups": [
"aligner", "genome", "mappable_regions", "bins", "sge"],
"help": "combines all preparation steps ('genome', 'mappable-regions' "
"and 'bins') into single command",
},
"mapping": {
"config_groups": ["aligner", "genome", "reads", "mapping", "sge"],
"help": "performs mapping of cells reads to the reference genome",
},
"extract_10x": {
"config_groups": [
"data_10x", "reads", "sge"],
"help": "extracts cells reads from 10x Genomics datasets",
},
"varbin": {
"config_groups": ["bins", "mapping", "varbin", "sge"],
"help": "applies varbin algorithm to count read mappings in each bin",
},
"varbin_10x": {
"config_groups": [
"data_10x", "bins", "varbin", "sge"],
"help": "applies varbin algorithm to count read mappings in each bin "
"to 10x Genomics datasets without realigning",
},
"scclust": {
"config_groups": ["bins", "varbin", "scclust"],
"help": "segmentation and clustering based bin counts and "
"preparation of the SCGV input data"
},
"process": {
"config_groups": [
"aligner", "genome", "reads", "mapping", "bins", "varbin",
"scclust", "sge"],
"help": "combines all process steps ('mapping', 'varbin' "
"and 'scclust') into single command"
},
}
def build_common_options(parser):
parser.add_argument(
"-v", "--verbose",
dest="verbose",
action="count",
help="set verbosity level [default: %(default)s]",
default=0
)
parser.add_argument(
"-c", "--config",
dest="config",
help="configuration file",
metavar="path"
)
parser.add_argument(
"-n", "--dry-run",
dest="dry_run",
action="store_true",
help="perform a trial run with no changes made",
default=False
)
parser.add_argument(
"--force", "-F",
dest="force",
action="store_true",
help="allows overwriting nonempty results directory",
default=False
)
parser.add_argument(
"--parallel", "-p",
dest="parallel",
help="number of task to run in parallel",
type=int,
default=1
)
parser.add_argument(
"--sge",
dest="sge",
action="store_true",
help="parallelilizes commands using SGE cluster manager",
default=False
)
def _get_config_value(config, group_name, name):
if config is None:
return None
group = config.config.get(group_name)
if group is None:
return None
result = getattr(group, name)
return result
def build_cli_options(argparser, command=None, config=None, sge_flag=False):
work_dirname = os.getcwd()
if config is not None:
work_dirname = config.work_dirname
validator = SgainsValidator(
deepcopy(sgains_schema), work_dirname=work_dirname)
if command is None:
config_groups = list(validator.schema.keys())
else:
assert command in SGAINS_COMMANDS
command = SGAINS_COMMANDS[command]
config_groups = command["config_groups"]
for group_name in config_groups:
if group_name == "sge" and not sge_flag:
continue
group = validator.schema.get(group_name)
group_parser = argparser.add_argument_group(f"{group_name} group:")
assert group["type"] == "dict", (group_name, group)
group_schema = group["schema"]
for arg_name, arg_spec in group_schema.items():
name = f"--{arg_name.replace('_', '-')}"
arg_type = str
arg_type = arg_spec.get("type", "string")
if arg_type == "string":
arg_type = str
elif arg_type == "integer":
arg_type = int
elif arg_type == "float":
arg_type = float
elif arg_type == "list":
arg_type = list
else:
raise ValueError(f"wrong argument type {arg_type}")
help_data = None
meta_data = arg_spec.get("meta")
if meta_data is not None:
help_data = meta_data.get("help")
arg_default = _get_config_value(config, group_name, arg_name)
if arg_default is None:
arg_default = arg_spec.get("default")
group_parser.add_argument(
name,
help=help_data,
dest=arg_name,
type=arg_type,
default=arg_default)
return argparser
def parse_cli_options(args):
config_dict = defaultdict(dict)
work_dirname = os.getcwd()
if args.config is not None:
assert os.path.exists(args.config), args.config
with open(args.config, "r") as infile:
config_dict = yaml.safe_load(infile)
work_dirname = os.path.dirname(args.config)
validator = SgainsValidator(
deepcopy(sgains_schema), work_dirname=work_dirname)
result = defaultdict(dict)
config_groups = list(validator.schema.keys())
for group_name in config_groups:
if group_name == "sge" and not args.sge:
continue
group = validator.schema.get(group_name)
group_schema = group.get("schema")
if group_schema is None:
continue
group_result = {}
for arg_name in group_schema.keys():
arg_value = getattr(args, arg_name, None)
if arg_value is not None:
group_result[arg_name] = arg_value
else:
config_value = config_dict.get(group_name, None)
if config_value is not None:
config_value = config_value.get(arg_name, None)
if config_value is not None:
group_result[arg_name] = config_value
if group_result:
result[group_name] = group_result
config = Config.from_dict(result, work_dirname)
config.verbose = args.verbose
config.config_file = args.config
config.dry_run = args.dry_run
config.force = args.force
config.parallel = args.parallel
config.sge = args.sge
return config
def main(argv=sys.argv[1:]):
program_name = os.path.basename(sys.argv[0])
program_shortdesc = \
'sgains - sparse genomic analysis of individual nuclei by ' \
'sequencing pipeline'
program_description = '''%s
USAGE
''' % (program_shortdesc, )
try:
config = Config.parse_argv(argv)
sge_flag = Config.check_sge_argv(argv)
argparser = ArgumentParser(
description=program_description,
formatter_class=ArgumentDefaultsHelpFormatter)
build_common_options(argparser)
subparsers = argparser.add_subparsers(
title="sGAINS subcommands"
)
for command in SGAINS_COMMANDS:
command_name = command.replace("_", "-")
command_help = SGAINS_COMMANDS[command].get("help", "")
subparser = subparsers.add_parser(
name=command_name,
help=command_help,
formatter_class=ArgumentDefaultsHelpFormatter
)
build_cli_options(subparser, command, config, sge_flag=sge_flag)
subparser.set_defaults(func=functools.partial(execute, command))
args = argparser.parse_args(argv)
args.func(args)
except KeyboardInterrupt:
traceback.print_exc()
return 0
except Exception as e:
traceback.print_exc()
indent = len(program_name) * " "
sys.stderr.write(program_name + ": " + repr(e) + "\n")
sys.stderr.write(indent + " for help use --help")
sys.stderr.write('\n')
return 2
def create_pipeline(command, config):
if command == "genomeindex":
return GenomeIndexPipeline(config)
elif command == "mappable_regions":
return MappableRegionsPipeline(config)
elif command == "bins":
return BinsPipeline(config)
elif command == "mapping":
return MappingPipeline(config)
elif command == "varbin":
return VarbinPipeline(config)
elif command == "scclust":
return Rpipeline(config)
elif command == "extract_10x":
return Extract10xPipeline(config)
elif command == "varbin_10x":
return Varbin10xPipeline(config)
elif command == "prepare":
pipelines = [
GenomeIndexPipeline(config),
MappableRegionsPipeline(config),
BinsPipeline(config),
]
return CompositePipeline(config, pipelines)
elif command == "process":
pipelines = [
MappingPipeline(config),
VarbinPipeline(config),
Rpipeline(config),
]
return CompositePipeline(config, pipelines)
raise ValueError(f"Unexpected command: {command}")
def execute(command, args):
config = parse_cli_options(args)
pipeline = create_pipeline(command, config)
assert pipeline is not None, command
executor = Executor(config)
executor.run_pipeline(pipeline)
if __name__ == "__main__":
sys.exit(main())
| [((4403, 4414), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4412, 4414), False, 'import os\n'), ((6285, 6302), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (6296, 6302), False, 'from collections import defaultdict\n'), ((6322, 6333), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6331, 6333), False, 'import os\n'), ((6680, 6697), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (6691, 6697), False, 'from collections import defaultdict\n'), ((7603, 7641), 'sgains.configuration.parser.Config.from_dict', 'Config.from_dict', (['result', 'work_dirname'], {}), '(result, work_dirname)\n', (7619, 7641), False, 'from sgains.configuration.parser import SgainsValidator, Config\n'), ((7908, 7937), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (7924, 7937), False, 'import os\n'), ((10742, 10758), 'sgains.executor.Executor', 'Executor', (['config'], {}), '(config)\n', (10750, 10758), False, 'from sgains.executor import Executor\n'), ((4527, 4550), 'copy.deepcopy', 'deepcopy', (['sgains_schema'], {}), '(sgains_schema)\n', (4535, 4550), False, 'from copy import deepcopy\n'), ((6382, 6409), 'os.path.exists', 'os.path.exists', (['args.config'], {}), '(args.config)\n', (6396, 6409), False, 'import os\n'), ((6543, 6571), 'os.path.dirname', 'os.path.dirname', (['args.config'], {}), '(args.config)\n', (6558, 6571), False, 'import os\n'), ((6614, 6637), 'copy.deepcopy', 'deepcopy', (['sgains_schema'], {}), '(sgains_schema)\n', (6622, 6637), False, 'from copy import deepcopy\n'), ((8159, 8182), 'sgains.configuration.parser.Config.parse_argv', 'Config.parse_argv', (['argv'], {}), '(argv)\n', (8176, 8182), False, 'from sgains.configuration.parser import SgainsValidator, Config\n'), ((8202, 8229), 'sgains.configuration.parser.Config.check_sge_argv', 'Config.check_sge_argv', (['argv'], {}), '(argv)\n', (8223, 8229), False, 'from sgains.configuration.parser import SgainsValidator, Config\n'), ((8251, 8350), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': 'program_description', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), '(description=program_description, formatter_class=\n ArgumentDefaultsHelpFormatter)\n', (8265, 8350), False, 'from argparse import ArgumentParser, RawDescriptionHelpFormatter, ArgumentDefaultsHelpFormatter\n'), ((9527, 9554), 'sgains.pipelines.genomeindex_pipeline.GenomeIndexPipeline', 'GenomeIndexPipeline', (['config'], {}), '(config)\n', (9546, 9554), False, 'from sgains.pipelines.genomeindex_pipeline import GenomeIndexPipeline\n'), ((6497, 6519), 'yaml.safe_load', 'yaml.safe_load', (['infile'], {}), '(infile)\n', (6511, 6519), False, 'import yaml\n'), ((9132, 9153), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (9151, 9153), False, 'import traceback\n'), ((9206, 9227), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (9225, 9227), False, 'import traceback\n'), ((9341, 9391), 'sys.stderr.write', 'sys.stderr.write', (["(indent + ' for help use --help')"], {}), "(indent + ' for help use --help')\n", (9357, 9391), False, 'import sys\n'), ((9400, 9422), 'sys.stderr.write', 'sys.stderr.write', (['"""\n"""'], {}), "('\\n')\n", (9416, 9422), False, 'import sys\n'), ((9610, 9641), 'sgains.pipelines.mappableregions_pipeline.MappableRegionsPipeline', 'MappableRegionsPipeline', (['config'], {}), '(config)\n', (9633, 9641), False, 'from sgains.pipelines.mappableregions_pipeline import MappableRegionsPipeline\n'), ((9685, 9705), 'sgains.pipelines.bins_pipeline.BinsPipeline', 'BinsPipeline', (['config'], {}), '(config)\n', (9697, 9705), False, 'from sgains.pipelines.bins_pipeline import BinsPipeline\n'), ((8989, 9024), 'functools.partial', 'functools.partial', (['execute', 'command'], {}), '(execute, command)\n', (9006, 9024), False, 'import functools\n'), ((9752, 9775), 'sgains.pipelines.mapping_pipeline.MappingPipeline', 'MappingPipeline', (['config'], {}), '(config)\n', (9767, 9775), False, 'from sgains.pipelines.mapping_pipeline import MappingPipeline\n'), ((9821, 9843), 'sgains.pipelines.varbin_pipeline.VarbinPipeline', 'VarbinPipeline', (['config'], {}), '(config)\n', (9835, 9843), False, 'from sgains.pipelines.varbin_pipeline import VarbinPipeline\n'), ((9890, 9907), 'sgains.pipelines.r_pipeline.Rpipeline', 'Rpipeline', (['config'], {}), '(config)\n', (9899, 9907), False, 'from sgains.pipelines.r_pipeline import Rpipeline\n'), ((9958, 9984), 'sgains.pipelines.extract_10x_pipeline.Extract10xPipeline', 'Extract10xPipeline', (['config'], {}), '(config)\n', (9976, 9984), False, 'from sgains.pipelines.extract_10x_pipeline import Extract10xPipeline\n'), ((10034, 10059), 'sgains.pipelines.varbin_10x_pipeline.Varbin10xPipeline', 'Varbin10xPipeline', (['config'], {}), '(config)\n', (10051, 10059), False, 'from sgains.pipelines.varbin_10x_pipeline import Varbin10xPipeline\n'), ((10258, 10294), 'sgains.pipelines.composite_pipeline.CompositePipeline', 'CompositePipeline', (['config', 'pipelines'], {}), '(config, pipelines)\n', (10275, 10294), False, 'from sgains.pipelines.composite_pipeline import CompositePipeline\n'), ((10125, 10152), 'sgains.pipelines.genomeindex_pipeline.GenomeIndexPipeline', 'GenomeIndexPipeline', (['config'], {}), '(config)\n', (10144, 10152), False, 'from sgains.pipelines.genomeindex_pipeline import GenomeIndexPipeline\n'), ((10166, 10197), 'sgains.pipelines.mappableregions_pipeline.MappableRegionsPipeline', 'MappableRegionsPipeline', (['config'], {}), '(config)\n', (10189, 10197), False, 'from sgains.pipelines.mappableregions_pipeline import MappableRegionsPipeline\n'), ((10211, 10231), 'sgains.pipelines.bins_pipeline.BinsPipeline', 'BinsPipeline', (['config'], {}), '(config)\n', (10223, 10231), False, 'from sgains.pipelines.bins_pipeline import BinsPipeline\n'), ((10477, 10513), 'sgains.pipelines.composite_pipeline.CompositePipeline', 'CompositePipeline', (['config', 'pipelines'], {}), '(config, pipelines)\n', (10494, 10513), False, 'from sgains.pipelines.composite_pipeline import CompositePipeline\n'), ((10360, 10383), 'sgains.pipelines.mapping_pipeline.MappingPipeline', 'MappingPipeline', (['config'], {}), '(config)\n', (10375, 10383), False, 'from sgains.pipelines.mapping_pipeline import MappingPipeline\n'), ((10397, 10419), 'sgains.pipelines.varbin_pipeline.VarbinPipeline', 'VarbinPipeline', (['config'], {}), '(config)\n', (10411, 10419), False, 'from sgains.pipelines.varbin_pipeline import VarbinPipeline\n'), ((10433, 10450), 'sgains.pipelines.r_pipeline.Rpipeline', 'Rpipeline', (['config'], {}), '(config)\n', (10442, 10450), False, 'from sgains.pipelines.r_pipeline import Rpipeline\n')] |
rsumner31/Detectron | lib/modeling/VGG16.py | 021685d42f7e8ac097e2bcf79fecb645f211378e | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""VGG16 from https://arxiv.org/abs/1409.1556."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from core.config import cfg
def add_VGG16_conv5_body(model):
model.Conv('data', 'conv1_1', 3, 64, 3, pad=1, stride=1)
model.Relu('conv1_1', 'conv1_1')
model.Conv('conv1_1', 'conv1_2', 64, 64, 3, pad=1, stride=1)
model.Relu('conv1_2', 'conv1_2')
model.MaxPool('conv1_2', 'pool1', kernel=2, pad=0, stride=2)
model.Conv('pool1', 'conv2_1', 64, 128, 3, pad=1, stride=1)
model.Relu('conv2_1', 'conv2_1')
model.Conv('conv2_1', 'conv2_2', 128, 128, 3, pad=1, stride=1)
model.Relu('conv2_2', 'conv2_2')
model.MaxPool('conv2_2', 'pool2', kernel=2, pad=0, stride=2)
model.StopGradient('pool2', 'pool2')
model.Conv('pool2', 'conv3_1', 128, 256, 3, pad=1, stride=1)
model.Relu('conv3_1', 'conv3_1')
model.Conv('conv3_1', 'conv3_2', 256, 256, 3, pad=1, stride=1)
model.Relu('conv3_2', 'conv3_2')
model.Conv('conv3_2', 'conv3_3', 256, 256, 3, pad=1, stride=1)
model.Relu('conv3_3', 'conv3_3')
model.MaxPool('conv3_3', 'pool3', kernel=2, pad=0, stride=2)
model.Conv('pool3', 'conv4_1', 256, 512, 3, pad=1, stride=1)
model.Relu('conv4_1', 'conv4_1')
model.Conv('conv4_1', 'conv4_2', 512, 512, 3, pad=1, stride=1)
model.Relu('conv4_2', 'conv4_2')
model.Conv('conv4_2', 'conv4_3', 512, 512, 3, pad=1, stride=1)
model.Relu('conv4_3', 'conv4_3')
model.MaxPool('conv4_3', 'pool4', kernel=2, pad=0, stride=2)
model.Conv('pool4', 'conv5_1', 512, 512, 3, pad=1, stride=1)
model.Relu('conv5_1', 'conv5_1')
model.Conv('conv5_1', 'conv5_2', 512, 512, 3, pad=1, stride=1)
model.Relu('conv5_2', 'conv5_2')
model.Conv('conv5_2', 'conv5_3', 512, 512, 3, pad=1, stride=1)
blob_out = model.Relu('conv5_3', 'conv5_3')
return blob_out, 512, 1. / 16.
def add_VGG16_roi_fc_head(model, blob_in, dim_in, spatial_scale):
model.RoIFeatureTransform(
blob_in,
'pool5',
blob_rois='rois',
method=cfg.FAST_RCNN.ROI_XFORM_METHOD,
resolution=7,
sampling_ratio=cfg.FAST_RCNN.ROI_XFORM_SAMPLING_RATIO,
spatial_scale=spatial_scale
)
model.FC('pool5', 'fc6', dim_in * 7 * 7, 4096)
model.Relu('fc6', 'fc6')
model.FC('fc6', 'fc7', 4096, 4096)
blob_out = model.Relu('fc7', 'fc7')
return blob_out, 4096
| [] |
yangjing1127/xmind2testcase | setup.py | 49a581159a0d8e028f89939777399493662df111 | #!/usr/env/bin python
# -*- coding: utf-8 -*-
import io
import os
import sys
from shutil import rmtree
from setuptools import setup, find_packages, Command
about = {}
here = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(here, 'xmind2testcase', '__about__.py'), encoding='utf-8') as f: # custom
exec(f.read(), about)
with io.open('README.md', encoding='utf-8') as f:
long_description = f.read()
install_requires = [ # custom
"xmind",
"flask",
"arrow",
]
class PyPiCommand(Command):
""" Build and publish this package and make a tag.
Support: python setup.py pypi
Copied from requests_html
"""
user_options = []
@staticmethod
def status(s):
"""Prints things in green color."""
print('\033[0;32m{0}\033[0m'.format(s))
def initialize_options(self):
""" override
"""
pass
def finalize_options(self):
""" override
"""
pass
def run(self):
self.status('Building Source and Wheel (universal) distribution...')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPi via Twine...')
os.system('twine upload dist/*')
self.status('Publishing git tags...')
os.system('git tag v{0}'.format(about['__version__']))
os.system('git push --tags')
try:
self.status('Removing current build artifacts...')
rmtree(os.path.join(here, 'dist'))
rmtree(os.path.join(here, 'build'))
rmtree(os.path.join(here, 'xmind2testcase.egg-info')) # custom
except OSError:
pass
self.status('Congratulations! Upload PyPi and publish git tag successfully...')
sys.exit()
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
long_description=long_description,
long_description_content_type='text/markdown',
keywords=about['__keywords__'],
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
license=about['__license__'],
packages=find_packages(exclude=['tests', 'test.*', 'docs']), # custom
package_data={ # custom
'': ['README.md'],
'webtool': ['static/*', 'static/css/*', 'static/guide/*', 'templates/*', 'schema.sql'],
},
install_requires=install_requires,
extras_require={},
python_requires='>=3.0, <4', # custom
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points={ # custom
'console_scripts': [
'xmind2testcase=xmind2testcase.cli:cli_main',
]
},
cmdclass={
# python3 setup.py pypi
'pypi': PyPiCommand
}
)
| [((191, 216), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (206, 216), False, 'import os\n'), ((351, 389), 'io.open', 'io.open', (['"""README.md"""'], {'encoding': '"""utf-8"""'}), "('README.md', encoding='utf-8')\n", (358, 389), False, 'import io\n'), ((231, 283), 'os.path.join', 'os.path.join', (['here', '"""xmind2testcase"""', '"""__about__.py"""'], {}), "(here, 'xmind2testcase', '__about__.py')\n", (243, 283), False, 'import os\n'), ((1237, 1269), 'os.system', 'os.system', (['"""twine upload dist/*"""'], {}), "('twine upload dist/*')\n", (1246, 1269), False, 'import os\n'), ((1388, 1416), 'os.system', 'os.system', (['"""git push --tags"""'], {}), "('git push --tags')\n", (1397, 1416), False, 'import os\n'), ((1803, 1813), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1811, 1813), False, 'import sys\n'), ((2203, 2253), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', 'test.*', 'docs']"}), "(exclude=['tests', 'test.*', 'docs'])\n", (2216, 2253), False, 'from setuptools import setup, find_packages, Command\n'), ((1513, 1539), 'os.path.join', 'os.path.join', (['here', '"""dist"""'], {}), "(here, 'dist')\n", (1525, 1539), False, 'import os\n'), ((1560, 1587), 'os.path.join', 'os.path.join', (['here', '"""build"""'], {}), "(here, 'build')\n", (1572, 1587), False, 'import os\n'), ((1608, 1653), 'os.path.join', 'os.path.join', (['here', '"""xmind2testcase.egg-info"""'], {}), "(here, 'xmind2testcase.egg-info')\n", (1620, 1653), False, 'import os\n')] |
Tillsten/skultrafast | skultrafast/styles.py | 778eaf1539b6d85f21ac53b011472605673ef7e8 | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 17 21:33:24 2015
@author: Tillsten
"""
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
tableau20 = [(r/255., g/255., b/255.) for r,g,b, in tableau20]
#plt.rcParams['savefig.dpi'] = 110
#plt.rcParams['font.family'] = 'Vera Sans'
out_ticks = {'xtick.direction': 'out',
'xtick.major.width': 1.5,
'xtick.minor.width': 1,
'xtick.major.size': 6,
'xtick.minor.size': 3,
'xtick.minor.visible': True,
'ytick.direction': 'out',
'ytick.major.width': 1.5,
'ytick.minor.width': 1,
'ytick.major.size': 6,
'ytick.minor.size': 3,
'ytick.minor.visible': True,
'axes.spines.top': False,
'axes.spines.right': False,
'text.hinting': True,
'axes.titlesize': 'xx-large',
'axes.titleweight': 'semibold',
}
plt.figure(figsize=(6,4))
with plt.style.context(out_ticks):
ax = plt.subplot(111)
x = np.linspace(0, 7, 1000)
y = np.exp(-x/1.5)*np.cos(x/1*(2*np.pi))#*np.cos(x/0.05*(2*np.pi))
l, = plt.plot(x, np.exp(-x/1.5), lw=0.5, color='grey')
l, = plt.plot(x, -np.exp(-x/1.5), lw=0.5, color='grey')
l, = plt.plot(x, y, lw=1.1)
#l.set_clip_on(0)
plt.tick_params(which='both', top=False, right=False)
plt.margins(0.01)
ax.text(7, 1, r'$y(t)=\exp\left(-t/1.5\right)\cos(\omega_1t)\cos(\omega_2t)$',
fontsize=18, va='top', ha='right')
#plt.title("Hallo")
plt.setp(plt.gca(), xlabel='Time [s]', ylabel='Amplitude')
ax = plt.axes([0.57, 0.25, 0.3, .2])
#ax.plot(np.fft.fftfreq(x.size)[:y.size/2], abs(np.fft.fft(y))[:y.size/2])
ax.fill_between(np.fft.fftfreq(x.size, x[1]-x[0])[:y.size/2],
abs(np.fft.fft(y))[:y.size/2], alpha=0.2, color='r')
ax.set_xlim(0, 10)
ax.set_xlabel("Frequency")
ax.xaxis.labelpad = 1
plt.locator_params(nbins=4)
plt.tick_params(which='both', top=False, right=False)
plt.tick_params(which='minor', bottom=False, left=False)
#plt.grid(1, axis='y', linestyle='-', alpha=0.3, lw=.5)
plt.show()
| [((1415, 1441), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 4)'}), '(figsize=(6, 4))\n', (1425, 1441), True, 'import matplotlib.pyplot as plt\n'), ((2658, 2668), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2666, 2668), True, 'import matplotlib.pyplot as plt\n'), ((1449, 1477), 'matplotlib.pyplot.style.context', 'plt.style.context', (['out_ticks'], {}), '(out_ticks)\n', (1466, 1477), True, 'import matplotlib.pyplot as plt\n'), ((1489, 1505), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (1500, 1505), True, 'import matplotlib.pyplot as plt\n'), ((1515, 1538), 'numpy.linspace', 'np.linspace', (['(0)', '(7)', '(1000)'], {}), '(0, 7, 1000)\n', (1526, 1538), True, 'import numpy as np\n'), ((1742, 1764), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {'lw': '(1.1)'}), '(x, y, lw=1.1)\n', (1750, 1764), True, 'import matplotlib.pyplot as plt\n'), ((1793, 1846), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'which': '"""both"""', 'top': '(False)', 'right': '(False)'}), "(which='both', top=False, right=False)\n", (1808, 1846), True, 'import matplotlib.pyplot as plt\n'), ((1852, 1869), 'matplotlib.pyplot.margins', 'plt.margins', (['(0.01)'], {}), '(0.01)\n', (1863, 1869), True, 'import matplotlib.pyplot as plt\n'), ((2102, 2134), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.57, 0.25, 0.3, 0.2]'], {}), '([0.57, 0.25, 0.3, 0.2])\n', (2110, 2134), True, 'import matplotlib.pyplot as plt\n'), ((2445, 2472), 'matplotlib.pyplot.locator_params', 'plt.locator_params', ([], {'nbins': '(4)'}), '(nbins=4)\n', (2463, 2472), True, 'import matplotlib.pyplot as plt\n'), ((2478, 2531), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'which': '"""both"""', 'top': '(False)', 'right': '(False)'}), "(which='both', top=False, right=False)\n", (2493, 2531), True, 'import matplotlib.pyplot as plt\n'), ((2537, 2593), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'which': '"""minor"""', 'bottom': '(False)', 'left': '(False)'}), "(which='minor', bottom=False, left=False)\n", (2552, 2593), True, 'import matplotlib.pyplot as plt\n'), ((1548, 1564), 'numpy.exp', 'np.exp', (['(-x / 1.5)'], {}), '(-x / 1.5)\n', (1554, 1564), True, 'import numpy as np\n'), ((1563, 1590), 'numpy.cos', 'np.cos', (['(x / 1 * (2 * np.pi))'], {}), '(x / 1 * (2 * np.pi))\n', (1569, 1590), True, 'import numpy as np\n'), ((1633, 1649), 'numpy.exp', 'np.exp', (['(-x / 1.5)'], {}), '(-x / 1.5)\n', (1639, 1649), True, 'import numpy as np\n'), ((2042, 2051), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2049, 2051), True, 'import matplotlib.pyplot as plt\n'), ((1694, 1710), 'numpy.exp', 'np.exp', (['(-x / 1.5)'], {}), '(-x / 1.5)\n', (1700, 1710), True, 'import numpy as np\n'), ((2235, 2270), 'numpy.fft.fftfreq', 'np.fft.fftfreq', (['x.size', '(x[1] - x[0])'], {}), '(x.size, x[1] - x[0])\n', (2249, 2270), True, 'import numpy as np\n'), ((2306, 2319), 'numpy.fft.fft', 'np.fft.fft', (['y'], {}), '(y)\n', (2316, 2319), True, 'import numpy as np\n')] |
ezequielramos/oci-python-sdk | src/oci/devops/models/github_build_run_source.py | cc4235cf217beaf9feed75760e9ce82610222762 | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .build_run_source import BuildRunSource
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class GithubBuildRunSource(BuildRunSource):
"""
Specifies details of build run through GitHub.
"""
def __init__(self, **kwargs):
"""
Initializes a new GithubBuildRunSource object with values from keyword arguments. The default value of the :py:attr:`~oci.devops.models.GithubBuildRunSource.source_type` attribute
of this class is ``GITHUB`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param source_type:
The value to assign to the source_type property of this GithubBuildRunSource.
Allowed values for this property are: "MANUAL", "GITHUB", "GITLAB", "DEVOPS_CODE_REPOSITORY"
:type source_type: str
:param trigger_id:
The value to assign to the trigger_id property of this GithubBuildRunSource.
:type trigger_id: str
:param trigger_info:
The value to assign to the trigger_info property of this GithubBuildRunSource.
:type trigger_info: oci.devops.models.TriggerInfo
"""
self.swagger_types = {
'source_type': 'str',
'trigger_id': 'str',
'trigger_info': 'TriggerInfo'
}
self.attribute_map = {
'source_type': 'sourceType',
'trigger_id': 'triggerId',
'trigger_info': 'triggerInfo'
}
self._source_type = None
self._trigger_id = None
self._trigger_info = None
self._source_type = 'GITHUB'
@property
def trigger_id(self):
"""
**[Required]** Gets the trigger_id of this GithubBuildRunSource.
The trigger that invoked the build run.
:return: The trigger_id of this GithubBuildRunSource.
:rtype: str
"""
return self._trigger_id
@trigger_id.setter
def trigger_id(self, trigger_id):
"""
Sets the trigger_id of this GithubBuildRunSource.
The trigger that invoked the build run.
:param trigger_id: The trigger_id of this GithubBuildRunSource.
:type: str
"""
self._trigger_id = trigger_id
@property
def trigger_info(self):
"""
**[Required]** Gets the trigger_info of this GithubBuildRunSource.
:return: The trigger_info of this GithubBuildRunSource.
:rtype: oci.devops.models.TriggerInfo
"""
return self._trigger_info
@trigger_info.setter
def trigger_info(self, trigger_info):
"""
Sets the trigger_info of this GithubBuildRunSource.
:param trigger_info: The trigger_info of this GithubBuildRunSource.
:type: oci.devops.models.TriggerInfo
"""
self._trigger_info = trigger_info
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| [((3416, 3441), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (3435, 3441), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')] |
anoopkcn/aiida-fleur | aiida_fleur/tests/tools/test_common_fleur_wf.py | 5d4cc2092b7c3ce5402f1d4b89787eae53b2e60f | from __future__ import absolute_import
import pytest
import os
# is_code
def test_is_code_interface(fixture_code):
from aiida_fleur.tools.common_fleur_wf import is_code
assert is_code('random_string') is None
assert is_code('fleur.inpGUT') is None
assert is_code(99999) is None
code = fixture_code('fleur.inpgen')
code.store()
assert is_code(code.uuid)
assert is_code(code.pk)
assert is_code('@'.join([code.label, code.get_computer_name()]))
assert is_code(code)
def test_get_inputs_fleur():
'''
Tests if get_inputs_fleur assembles inputs correctly.
Note it is the work of FleurCalculation
to check if input types are correct i.e. 'code' is a Fleur code etc.
'''
from aiida_fleur.tools.common_fleur_wf import get_inputs_fleur
from aiida.orm import Dict
inputs = {'code': 'code', 'remote': 'remote', 'fleurinp': 'fleurinp',
'options': {'custom_scheduler_commands': 'test_command'}, 'label': 'label',
'description': 'description', 'settings': {'test': 1}, 'serial': False}
results = get_inputs_fleur(**inputs)
out_options = results['options'].get_dict()
out_settings = results['settings'].get_dict()
assert results['code'] == 'code'
assert results['fleurinpdata'] == 'fleurinp'
assert results['parent_folder'] == 'remote'
assert results['description'] == 'description'
assert results['label'] == 'label'
assert out_options == {'custom_scheduler_commands': 'test_command',
'withmpi': True}
assert out_settings == {'test': 1}
inputs = {'code': 'code', 'remote': 'remote', 'fleurinp': 'fleurinp',
'options': {'custom_scheduler_commands': 'test_command'}, 'serial': True}
results = get_inputs_fleur(**inputs)
out_options = results['options'].get_dict()
assert results['description'] == ''
assert results['label'] == ''
assert out_options == {'custom_scheduler_commands': 'test_command',
'withmpi': False, 'resources': {"num_machines": 1}}
def test_get_inputs_inpgen(fixture_code, generate_structure):
'''
Tests if get_inputs_fleur assembles inputs correctly.
Note it is the work of FleurinputgenCalculation
to check if input types are correct i.e. 'code' is a Fleur code etc.
'''
from aiida_fleur.tools.common_fleur_wf import get_inputs_inpgen
from aiida.orm import Dict
code = fixture_code('fleur.inpgen')
structure = generate_structure()
params = Dict(dict={'test': 1})
inputs = {'structure': structure, 'inpgencode': code, 'options': {},
'label': 'label', 'description': 'description',
'params': params}
returns = {'metadata': {
'options': {'withmpi': False, 'resources': {'num_machines': 1}},
'description': 'description', 'label': 'label'},
'code': code, 'parameters': params, 'structure': structure
}
assert get_inputs_inpgen(**inputs) == returns
# repeat without a label and description
inputs = {'structure': structure, 'inpgencode': code, 'options': {},
'params': params}
returns = {'metadata': {
'options': {'withmpi': False, 'resources': {'num_machines': 1}},
'description': '', 'label': ''},
'code': code, 'parameters': params, 'structure': structure}
assert get_inputs_inpgen(**inputs) == returns
@pytest.mark.skip(reason="Test is not implemented")
def test_get_scheduler_extras():
from aiida_fleur.tools.common_fleur_wf import get_scheduler_extras
# test_and_get_codenode
def test_test_and_get_codenode_inpgen(fixture_code):
from aiida_fleur.tools.common_fleur_wf import test_and_get_codenode
from aiida.orm import Code
from aiida.common.exceptions import NotExistent
# install code setup code
code = fixture_code('fleur.inpgen')
code_fleur = fixture_code('fleur.fleur')
code_fleur.label = 'fleur_test'
code_fleur.store()
expected = 'fleur.inpgen'
nonexpected = 'fleur.fleur'
not_existing = 'fleur.not_existing'
assert isinstance(test_and_get_codenode(code, expected), Code)
with pytest.raises(ValueError) as msg:
test_and_get_codenode(code, nonexpected, use_exceptions=True)
assert str(msg.value) == ("Given Code node is not of expected code type.\n"
"Valid labels for a fleur.fleur executable are:\n"
"* fleur_test@localhost-test")
with pytest.raises(ValueError) as msg:
test_and_get_codenode(code, not_existing, use_exceptions=True)
assert str(msg.value) == ("Code not valid, and no valid codes for fleur.not_existing.\n"
"Configure at least one first using\n"
" verdi code setup")
def test_get_kpoints_mesh_from_kdensity(generate_structure):
from aiida_fleur.tools.common_fleur_wf import get_kpoints_mesh_from_kdensity
from aiida.orm import KpointsData
a, b = get_kpoints_mesh_from_kdensity(generate_structure(), 0.1)
assert a == ([21, 21, 21], [0.0, 0.0, 0.0])
assert isinstance(b, KpointsData)
@pytest.mark.skip(reason="Test is not implemented")
def test_determine_favorable_reaction():
from aiida_fleur.tools.common_fleur_wf import determine_favorable_reaction
# @pytest.mark.skip(reason="There seems to be now way to add outputs to CalcJobNode")
def test_performance_extract_calcs(fixture_localhost,
generate_calc_job_node):
from aiida_fleur.tools.common_fleur_wf import performance_extract_calcs
from aiida.common.links import LinkType
from aiida.orm import Dict
out = Dict(dict={'title': 'A Fleur input generator calculation with aiida',
'energy': -138529.7052157,
'bandgap': 6.0662e-06,
'end_date': {'date': '2019/11/12', 'time': '16:12:08'},
'unparsed': [],
'walltime': 43,
'warnings': {'info': {}, 'debug': {}, 'error': {}, 'warning': {}},
'start_date': {'date': '2019/11/12', 'time': '16:11:25'},
'parser_info': 'AiiDA Fleur Parser v0.2beta',
'CalcJob_uuid': '3dc62d43-b607-4415-920f-e0d34e805711',
'creator_name': 'fleur 30',
'energy_units': 'eV',
'kmax': 4.2,
'fermi_energy': 0.0605833326,
'spin_density': 0.0792504665,
'bandgap_units': 'eV',
'force_largest': 0.0,
'energy_hartree': -5090.8728101494,
'walltime_units': 'seconds',
'charge_density1': 0.0577674505,
'charge_density2': 0.0461840944,
'number_of_atoms': 4,
'parser_warnings': [],
'magnetic_moments': [3.3720063737, 3.3719345944, 3.3719329177, 3.3719329162],
'number_of_kpoints': 8,
'number_of_species': 1,
'fermi_energy_units': 'Htr',
'sum_of_eigenvalues': -2973.4129786677,
'output_file_version': '0.27',
'energy_hartree_units': 'Htr',
'number_of_atom_types': 4,
'number_of_iterations': 11,
'number_of_symmetries': 8,
'energy_core_electrons': -2901.8120489845,
'magnetic_moment_units': 'muBohr',
'overall_charge_density': 0.0682602474,
'creator_target_structure': ' ',
'energy_valence_electrons': -71.6009296831,
'magnetic_spin_up_charges': [9.1494766577,
9.1494806151,
9.1494806833,
9.1494806834],
'orbital_magnetic_moments': [],
'density_convergence_units': 'me/bohr^3',
'number_of_spin_components': 2,
'charge_den_xc_den_integral': -223.295208608,
'magnetic_spin_down_charges': [5.777470284,
5.7775460208,
5.7775477657,
5.7775477672],
'number_of_iterations_total': 11,
'creator_target_architecture': 'GEN',
'orbital_magnetic_moment_units': 'muBohr',
'orbital_magnetic_spin_up_charges': [],
'orbital_magnetic_spin_down_charges': []})
out.store()
node = generate_calc_job_node('fleur.fleur', fixture_localhost)
node.store()
out.add_incoming(node, link_type=LinkType.CREATE, link_label='output_parameters')
result = performance_extract_calcs([node.pk])
assert result == {'n_symmetries': [8], 'n_spin_components': [2], 'n_kpoints': [8],
'n_iterations': [11], 'walltime_sec': [43],
'walltime_sec_per_it': [3.909090909090909],
'n_iterations_total': [11], 'density_distance': [0.0682602474],
'computer': ['localhost-test'],
'n_atoms': [4], 'kmax': [4.2], 'cost': [75866.11200000001],
'costkonstant': [147.02734883720933], 'walltime_sec_cor': [43],
'total_cost': [834527.2320000001], 'fermi_energy': [0.0605833326],
'bandgap': [6.0662e-06], 'energy': [-138529.7052157],
'force_largest': [0.0],
'ncores': [12], 'pk': [node.pk], 'uuid': [node.uuid],
'serial': [False],
'resources': [{'num_machines': 1, 'num_mpiprocs_per_machine': 1}]}
inputs_optimize = [(4, 8, 3, True, 0.5, None, 720),
(4, 8, 3, True, 2, None, 720),
(4, 8, 3, True, 100, None, 720),
(4, 8, 3, True, 100, None, 720, 0.5),
(4, 8, 3, False, 0.5, None, 720)]
results_optimize = [
(4, 3, 8, 'Computational setup is perfect! Nodes: 4, MPIs per node 3, OMP per MPI 8. Number of k-points is 720'),
(4, 6, 4, 'Computational setup is perfect! Nodes: 4, MPIs per node 6, OMP per MPI 4. Number of k-points is 720'),
(4, 12, 2, 'Computational setup is perfect! Nodes: 4, MPIs per node 12, OMP per MPI 2. Number of k-points is 720'),
(3, 24, 1, 'WARNING: Changed the number of nodes from 4 to 3'),
(4, 20, 1, 'WARNING: Changed the number of MPIs per node from 8 to 20 an OMP from 3 to 1. Changed the number of nodes from 4 to 4. Number of k-points is 720.')]
@pytest.mark.parametrize('input,result_correct', zip(inputs_optimize, results_optimize))
def test_optimize_calc_options(input, result_correct):
from aiida_fleur.tools.common_fleur_wf import optimize_calc_options
result = optimize_calc_options(*input)
assert result == result_correct
def test_find_last_in_restart(fixture_localhost,
generate_calc_job_node, generate_work_chain_node):
from aiida_fleur.tools.common_fleur_wf import find_last_in_restart
from aiida.common.links import LinkType
node1 = generate_calc_job_node('fleur.fleur', fixture_localhost)
node2 = generate_calc_job_node('fleur.fleur', fixture_localhost)
node3 = generate_calc_job_node('fleur.fleur', fixture_localhost)
node_main = generate_work_chain_node('fleur.base_relax', fixture_localhost)
node1.add_incoming(node_main, link_type=LinkType.CALL_CALC, link_label='CALL')
node2.add_incoming(node_main, link_type=LinkType.CALL_CALC, link_label='CALL')
node3.add_incoming(node_main, link_type=LinkType.CALL_CALC, link_label='CALL')
node1.store()
node2.store()
node3.store()
result = find_last_in_restart(node_main)
assert result == node3.uuid
| [((3424, 3474), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Test is not implemented"""'}), "(reason='Test is not implemented')\n", (3440, 3474), False, 'import pytest\n'), ((5167, 5217), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Test is not implemented"""'}), "(reason='Test is not implemented')\n", (5183, 5217), False, 'import pytest\n'), ((367, 385), 'aiida_fleur.tools.common_fleur_wf.is_code', 'is_code', (['code.uuid'], {}), '(code.uuid)\n', (374, 385), False, 'from aiida_fleur.tools.common_fleur_wf import is_code\n'), ((397, 413), 'aiida_fleur.tools.common_fleur_wf.is_code', 'is_code', (['code.pk'], {}), '(code.pk)\n', (404, 413), False, 'from aiida_fleur.tools.common_fleur_wf import is_code\n'), ((494, 507), 'aiida_fleur.tools.common_fleur_wf.is_code', 'is_code', (['code'], {}), '(code)\n', (501, 507), False, 'from aiida_fleur.tools.common_fleur_wf import is_code\n'), ((1094, 1120), 'aiida_fleur.tools.common_fleur_wf.get_inputs_fleur', 'get_inputs_fleur', ([], {}), '(**inputs)\n', (1110, 1120), False, 'from aiida_fleur.tools.common_fleur_wf import get_inputs_fleur\n'), ((1778, 1804), 'aiida_fleur.tools.common_fleur_wf.get_inputs_fleur', 'get_inputs_fleur', ([], {}), '(**inputs)\n', (1794, 1804), False, 'from aiida_fleur.tools.common_fleur_wf import get_inputs_fleur\n'), ((2534, 2556), 'aiida.orm.Dict', 'Dict', ([], {'dict': "{'test': 1}"}), "(dict={'test': 1})\n", (2538, 2556), False, 'from aiida.orm import Dict\n'), ((5702, 7653), 'aiida.orm.Dict', 'Dict', ([], {'dict': "{'title': 'A Fleur input generator calculation with aiida', 'energy': -\n 138529.7052157, 'bandgap': 6.0662e-06, 'end_date': {'date':\n '2019/11/12', 'time': '16:12:08'}, 'unparsed': [], 'walltime': 43,\n 'warnings': {'info': {}, 'debug': {}, 'error': {}, 'warning': {}},\n 'start_date': {'date': '2019/11/12', 'time': '16:11:25'}, 'parser_info':\n 'AiiDA Fleur Parser v0.2beta', 'CalcJob_uuid':\n '3dc62d43-b607-4415-920f-e0d34e805711', 'creator_name': 'fleur 30',\n 'energy_units': 'eV', 'kmax': 4.2, 'fermi_energy': 0.0605833326,\n 'spin_density': 0.0792504665, 'bandgap_units': 'eV', 'force_largest': \n 0.0, 'energy_hartree': -5090.8728101494, 'walltime_units': 'seconds',\n 'charge_density1': 0.0577674505, 'charge_density2': 0.0461840944,\n 'number_of_atoms': 4, 'parser_warnings': [], 'magnetic_moments': [\n 3.3720063737, 3.3719345944, 3.3719329177, 3.3719329162],\n 'number_of_kpoints': 8, 'number_of_species': 1, 'fermi_energy_units':\n 'Htr', 'sum_of_eigenvalues': -2973.4129786677, 'output_file_version':\n '0.27', 'energy_hartree_units': 'Htr', 'number_of_atom_types': 4,\n 'number_of_iterations': 11, 'number_of_symmetries': 8,\n 'energy_core_electrons': -2901.8120489845, 'magnetic_moment_units':\n 'muBohr', 'overall_charge_density': 0.0682602474,\n 'creator_target_structure': ' ', 'energy_valence_electrons': -\n 71.6009296831, 'magnetic_spin_up_charges': [9.1494766577, 9.1494806151,\n 9.1494806833, 9.1494806834], 'orbital_magnetic_moments': [],\n 'density_convergence_units': 'me/bohr^3', 'number_of_spin_components': \n 2, 'charge_den_xc_den_integral': -223.295208608,\n 'magnetic_spin_down_charges': [5.777470284, 5.7775460208, 5.7775477657,\n 5.7775477672], 'number_of_iterations_total': 11,\n 'creator_target_architecture': 'GEN', 'orbital_magnetic_moment_units':\n 'muBohr', 'orbital_magnetic_spin_up_charges': [],\n 'orbital_magnetic_spin_down_charges': []}"}), "(dict={'title': 'A Fleur input generator calculation with aiida',\n 'energy': -138529.7052157, 'bandgap': 6.0662e-06, 'end_date': {'date':\n '2019/11/12', 'time': '16:12:08'}, 'unparsed': [], 'walltime': 43,\n 'warnings': {'info': {}, 'debug': {}, 'error': {}, 'warning': {}},\n 'start_date': {'date': '2019/11/12', 'time': '16:11:25'}, 'parser_info':\n 'AiiDA Fleur Parser v0.2beta', 'CalcJob_uuid':\n '3dc62d43-b607-4415-920f-e0d34e805711', 'creator_name': 'fleur 30',\n 'energy_units': 'eV', 'kmax': 4.2, 'fermi_energy': 0.0605833326,\n 'spin_density': 0.0792504665, 'bandgap_units': 'eV', 'force_largest': \n 0.0, 'energy_hartree': -5090.8728101494, 'walltime_units': 'seconds',\n 'charge_density1': 0.0577674505, 'charge_density2': 0.0461840944,\n 'number_of_atoms': 4, 'parser_warnings': [], 'magnetic_moments': [\n 3.3720063737, 3.3719345944, 3.3719329177, 3.3719329162],\n 'number_of_kpoints': 8, 'number_of_species': 1, 'fermi_energy_units':\n 'Htr', 'sum_of_eigenvalues': -2973.4129786677, 'output_file_version':\n '0.27', 'energy_hartree_units': 'Htr', 'number_of_atom_types': 4,\n 'number_of_iterations': 11, 'number_of_symmetries': 8,\n 'energy_core_electrons': -2901.8120489845, 'magnetic_moment_units':\n 'muBohr', 'overall_charge_density': 0.0682602474,\n 'creator_target_structure': ' ', 'energy_valence_electrons': -\n 71.6009296831, 'magnetic_spin_up_charges': [9.1494766577, 9.1494806151,\n 9.1494806833, 9.1494806834], 'orbital_magnetic_moments': [],\n 'density_convergence_units': 'me/bohr^3', 'number_of_spin_components': \n 2, 'charge_den_xc_den_integral': -223.295208608,\n 'magnetic_spin_down_charges': [5.777470284, 5.7775460208, 5.7775477657,\n 5.7775477672], 'number_of_iterations_total': 11,\n 'creator_target_architecture': 'GEN', 'orbital_magnetic_moment_units':\n 'muBohr', 'orbital_magnetic_spin_up_charges': [],\n 'orbital_magnetic_spin_down_charges': []})\n", (5706, 7653), False, 'from aiida.orm import Dict\n'), ((9055, 9091), 'aiida_fleur.tools.common_fleur_wf.performance_extract_calcs', 'performance_extract_calcs', (['[node.pk]'], {}), '([node.pk])\n', (9080, 9091), False, 'from aiida_fleur.tools.common_fleur_wf import performance_extract_calcs\n'), ((11146, 11175), 'aiida_fleur.tools.common_fleur_wf.optimize_calc_options', 'optimize_calc_options', (['*input'], {}), '(*input)\n', (11167, 11175), False, 'from aiida_fleur.tools.common_fleur_wf import optimize_calc_options\n'), ((12068, 12099), 'aiida_fleur.tools.common_fleur_wf.find_last_in_restart', 'find_last_in_restart', (['node_main'], {}), '(node_main)\n', (12088, 12099), False, 'from aiida_fleur.tools.common_fleur_wf import find_last_in_restart\n'), ((187, 211), 'aiida_fleur.tools.common_fleur_wf.is_code', 'is_code', (['"""random_string"""'], {}), "('random_string')\n", (194, 211), False, 'from aiida_fleur.tools.common_fleur_wf import is_code\n'), ((231, 254), 'aiida_fleur.tools.common_fleur_wf.is_code', 'is_code', (['"""fleur.inpGUT"""'], {}), "('fleur.inpGUT')\n", (238, 254), False, 'from aiida_fleur.tools.common_fleur_wf import is_code\n'), ((274, 288), 'aiida_fleur.tools.common_fleur_wf.is_code', 'is_code', (['(99999)'], {}), '(99999)\n', (281, 288), False, 'from aiida_fleur.tools.common_fleur_wf import is_code\n'), ((2969, 2996), 'aiida_fleur.tools.common_fleur_wf.get_inputs_inpgen', 'get_inputs_inpgen', ([], {}), '(**inputs)\n', (2986, 2996), False, 'from aiida_fleur.tools.common_fleur_wf import get_inputs_inpgen\n'), ((3382, 3409), 'aiida_fleur.tools.common_fleur_wf.get_inputs_inpgen', 'get_inputs_inpgen', ([], {}), '(**inputs)\n', (3399, 3409), False, 'from aiida_fleur.tools.common_fleur_wf import get_inputs_inpgen\n'), ((4115, 4152), 'aiida_fleur.tools.common_fleur_wf.test_and_get_codenode', 'test_and_get_codenode', (['code', 'expected'], {}), '(code, expected)\n', (4136, 4152), False, 'from aiida_fleur.tools.common_fleur_wf import test_and_get_codenode\n'), ((4169, 4194), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4182, 4194), False, 'import pytest\n'), ((4211, 4272), 'aiida_fleur.tools.common_fleur_wf.test_and_get_codenode', 'test_and_get_codenode', (['code', 'nonexpected'], {'use_exceptions': '(True)'}), '(code, nonexpected, use_exceptions=True)\n', (4232, 4272), False, 'from aiida_fleur.tools.common_fleur_wf import test_and_get_codenode\n'), ((4505, 4530), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4518, 4530), False, 'import pytest\n'), ((4547, 4609), 'aiida_fleur.tools.common_fleur_wf.test_and_get_codenode', 'test_and_get_codenode', (['code', 'not_existing'], {'use_exceptions': '(True)'}), '(code, not_existing, use_exceptions=True)\n', (4568, 4609), False, 'from aiida_fleur.tools.common_fleur_wf import test_and_get_codenode\n')] |
admdev8/probnum | src/probnum/random_variables/_random_variable.py | 792b6299bac247cf8b1b5056756f0f078855d83a | """
Random Variables.
This module implements random variables. Random variables are the main in- and outputs
of probabilistic numerical methods.
"""
from typing import Any, Callable, Dict, Generic, Optional, Tuple, TypeVar, Union
import numpy as np
from probnum import utils as _utils
from probnum.type import (
ArrayLikeGetitemArgType,
DTypeArgType,
FloatArgType,
RandomStateArgType,
RandomStateType,
ShapeArgType,
ShapeType,
)
try:
# functools.cached_property is only available in Python >=3.8
from functools import cached_property
except ImportError:
from cached_property import cached_property
_ValueType = TypeVar("ValueType")
class RandomVariable(Generic[_ValueType]):
"""
Random variables are the main objects used by probabilistic numerical methods.
Every probabilistic numerical method takes a random variable encoding the prior
distribution as input and outputs a random variable whose distribution encodes the
uncertainty arising from finite computation. The generic signature of a
probabilistic numerical method is:
``output_rv = probnum_method(input_rv, method_params)``
In practice, most random variables used by methods in ProbNum have Dirac or Gaussian
measure.
Instances of :class:`RandomVariable` can be added, multiplied, etc. with arrays and
linear operators. This may change their ``distribution`` and not necessarily all
previously available methods are retained.
The internals of :class:`RandomVariable` objects are assumed to be constant over
their whole lifecycle. This is due to the caches used to make certain computations
more efficient. As a consequence, altering the internal state of a
:class:`RandomVariable` (e.g. its mean, cov, sampling function, etc.) will result in
undefined behavior. In particular, this should be kept in mind when subclassing
:class:`RandomVariable` or any of its descendants.
Parameters
----------
shape :
Shape of realizations of this random variable.
dtype :
Data type of realizations of this random variable. If ``object`` will be
converted to ``numpy.dtype``.
as_value_type :
Function which can be used to transform user-supplied arguments, interpreted as
realizations of this random variable, to an easy-to-process, normalized format.
Will be called internally to transform the argument of functions like
``in_support``, ``cdf`` and ``logcdf``, ``pmf`` and ``logpmf`` (in
:class:`DiscreteRandomVariable`), ``pdf`` and ``logpdf`` (in
:class:`ContinuousRandomVariable`), and potentially by similar functions in
subclasses.
For instance, this method is useful if (``log``)``cdf`` and (``log``)``pdf``
both only work on :class:`np.float_` arguments, but we still want the user to be
able to pass Python :class:`float`. Then ``as_value_type`` should be set to
something like ``lambda x: np.float64(x)``.
See Also
--------
asrandvar : Transform into a :class:`RandomVariable`.
Examples
--------
"""
# pylint: disable=too-many-instance-attributes,too-many-public-methods
def __init__(
self,
shape: ShapeArgType,
dtype: DTypeArgType,
random_state: RandomStateArgType = None,
parameters: Optional[Dict[str, Any]] = None,
sample: Optional[Callable[[ShapeType], _ValueType]] = None,
in_support: Optional[Callable[[_ValueType], bool]] = None,
cdf: Optional[Callable[[_ValueType], np.float_]] = None,
logcdf: Optional[Callable[[_ValueType], np.float_]] = None,
quantile: Optional[Callable[[FloatArgType], _ValueType]] = None,
mode: Optional[Callable[[], _ValueType]] = None,
median: Optional[Callable[[], _ValueType]] = None,
mean: Optional[Callable[[], _ValueType]] = None,
cov: Optional[Callable[[], _ValueType]] = None,
var: Optional[Callable[[], _ValueType]] = None,
std: Optional[Callable[[], _ValueType]] = None,
entropy: Optional[Callable[[], np.float_]] = None,
as_value_type: Optional[Callable[[Any], _ValueType]] = None,
):
# pylint: disable=too-many-arguments,too-many-locals
"""Create a new random variable."""
self.__shape = _utils.as_shape(shape)
# Data Types
self.__dtype = np.dtype(dtype)
self.__median_dtype = RandomVariable.infer_median_dtype(self.__dtype)
self.__moment_dtype = RandomVariable.infer_moment_dtype(self.__dtype)
self._random_state = _utils.as_random_state(random_state)
# Probability distribution of the random variable
self.__parameters = parameters.copy() if parameters is not None else {}
self.__sample = sample
self.__in_support = in_support
self.__cdf = cdf
self.__logcdf = logcdf
self.__quantile = quantile
# Properties of the random variable
self.__mode = mode
self.__median = median
self.__mean = mean
self.__cov = cov
self.__var = var
self.__std = std
self.__entropy = entropy
# Utilities
self.__as_value_type = as_value_type
def __repr__(self) -> str:
return f"<{self.shape} {self.__class__.__name__} with dtype={self.dtype}>"
@property
def shape(self) -> ShapeType:
"""Shape of realizations of the random variable."""
return self.__shape
@cached_property
def ndim(self) -> int:
return len(self.__shape)
@cached_property
def size(self) -> int:
return int(np.prod(self.__shape))
@property
def dtype(self) -> np.dtype:
"""Data type of (elements of) a realization of this random variable."""
return self.__dtype
@property
def median_dtype(self) -> np.dtype:
"""The dtype of the :attr:`median`. It will be set to the dtype arising from
the multiplication of values with dtypes :attr:`dtype` and :class:`np.float_`.
This is motivated by the fact that, even for discrete random variables, e.g.
integer-valued random variables, the :attr:`median` might lie in between two
values in which case these values are averaged. For example, a uniform random
variable on :math:`\\{ 1, 2, 3, 4 \\}` will have a median of :math:`2.5`.
"""
return self.__median_dtype
@property
def moment_dtype(self) -> np.dtype:
"""The dtype of any (function of a) moment of the random variable, e.g. its
:attr:`mean`, :attr:`cov`, :attr:`var`, or :attr:`std`. It will be set to the
dtype arising from the multiplication of values with dtypes :attr:`dtype`
and :class:`np.float_`. This is motivated by the mathematical definition of a
moment as a sum or an integral over products of probabilities and values of the
random variable, which are represented as using the dtypes :class:`np.float_`
and :attr:`dtype`, respectively.
"""
return self.__moment_dtype
@property
def random_state(self) -> RandomStateType:
"""Random state of the random variable.
This attribute defines the RandomState object to use for drawing
realizations from this random variable.
If None (or np.random), the global np.random state is used.
If integer, it is used to seed the local :class:`~numpy.random.RandomState`
instance.
"""
return self._random_state
@random_state.setter
def random_state(self, seed: RandomStateArgType):
"""Get or set the RandomState object of the underlying distribution.
This can be either None or an existing RandomState object.
If None (or np.random), use the RandomState singleton used by np.random.
If already a RandomState instance, use it.
If an int, use a new RandomState instance seeded with seed.
"""
self._random_state = _utils.as_random_state(seed)
@property
def parameters(self) -> Dict[str, Any]:
"""
Parameters of the probability distribution.
The parameters of the distribution such as mean, variance, et cetera stored in a
``dict``.
"""
return self.__parameters.copy()
@cached_property
def mode(self) -> _ValueType:
"""
Mode of the random variable.
Returns
-------
mode : float
The mode of the random variable.
"""
if self.__mode is None:
raise NotImplementedError
mode = self.__mode()
RandomVariable._check_property_value(
"mode",
mode,
shape=self.__shape,
dtype=self.__dtype,
)
# Make immutable
if isinstance(mode, np.ndarray):
mode.setflags(write=False)
return mode
@cached_property
def median(self) -> _ValueType:
"""
Median of the random variable.
To learn about the dtype of the median, see :attr:`median_dtype`.
Returns
-------
median : float
The median of the distribution.
"""
if self.__shape != ():
raise NotImplementedError(
"The median is only defined for scalar random variables."
)
median = self.__median()
RandomVariable._check_property_value(
"median",
median,
shape=self.__shape,
dtype=self.__median_dtype,
)
# Make immutable
if isinstance(median, np.ndarray):
median.setflags(write=False)
return median
@cached_property
def mean(self) -> _ValueType:
"""
Mean :math:`\\mathbb{E}(X)` of the distribution.
To learn about the dtype of the mean, see :attr:`moment_dtype`.
Returns
-------
mean : array-like
The mean of the distribution.
"""
if self.__mean is None:
raise NotImplementedError
mean = self.__mean()
RandomVariable._check_property_value(
"mean",
mean,
shape=self.__shape,
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(mean, np.ndarray):
mean.setflags(write=False)
return mean
@cached_property
def cov(self) -> _ValueType:
"""
Covariance :math:`\\operatorname{Cov}(X) = \\mathbb{E}((X-\\mathbb{E}(X))(X-\\mathbb{E}(X))^\\top)`
of the random variable.
To learn about the dtype of the covariance, see :attr:`moment_dtype`.
Returns
-------
cov : array-like
The kernels of the random variable.
""" # pylint: disable=line-too-long
if self.__cov is None:
raise NotImplementedError
cov = self.__cov()
RandomVariable._check_property_value(
"covariance",
cov,
shape=(self.size, self.size) if self.ndim > 0 else (),
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(cov, np.ndarray):
cov.setflags(write=False)
return cov
@cached_property
def var(self) -> _ValueType:
"""
Variance :math:`\\operatorname{Var}(X) = \\mathbb{E}((X-\\mathbb{E}(X))^2)` of
the distribution.
To learn about the dtype of the variance, see :attr:`moment_dtype`.
Returns
-------
var : array-like
The variance of the distribution.
"""
if self.__var is None:
try:
var = np.diag(self.cov).reshape(self.__shape).copy()
except NotImplementedError as exc:
raise NotImplementedError from exc
else:
var = self.__var()
RandomVariable._check_property_value(
"variance",
var,
shape=self.__shape,
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(var, np.ndarray):
var.setflags(write=False)
return var
@cached_property
def std(self) -> _ValueType:
"""
Standard deviation of the distribution.
To learn about the dtype of the standard deviation, see :attr:`moment_dtype`.
Returns
-------
std : array-like
The standard deviation of the distribution.
"""
if self.__std is None:
try:
std = np.sqrt(self.var)
except NotImplementedError as exc:
raise NotImplementedError from exc
else:
std = self.__std()
RandomVariable._check_property_value(
"standard deviation",
std,
shape=self.__shape,
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(std, np.ndarray):
std.setflags(write=False)
return std
@cached_property
def entropy(self) -> np.float_:
if self.__entropy is None:
raise NotImplementedError
entropy = self.__entropy()
entropy = RandomVariable._ensure_numpy_float(
"entropy", entropy, force_scalar=True
)
return entropy
def in_support(self, x: _ValueType) -> bool:
if self.__in_support is None:
raise NotImplementedError
in_support = self.__in_support(self._as_value_type(x))
if not isinstance(in_support, bool):
raise ValueError(
f"The function `in_support` must return a `bool`, but its return value "
f"is of type `{type(x)}`."
)
return in_support
def sample(self, size: ShapeArgType = ()) -> _ValueType:
"""
Draw realizations from a random variable.
Parameters
----------
size : tuple
Size of the drawn sample of realizations.
Returns
-------
sample : array-like
Sample of realizations with the given ``size`` and the inherent ``shape``.
"""
if self.__sample is None:
raise NotImplementedError("No sampling method provided.")
return self.__sample(size=_utils.as_shape(size))
def cdf(self, x: _ValueType) -> np.float_:
"""
Cumulative distribution function.
Parameters
----------
x : array-like
Evaluation points of the cumulative distribution function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The cdf evaluation will be broadcast over all additional dimensions.
Returns
-------
q : array-like
Value of the cumulative density function at the given points.
"""
if self.__cdf is not None:
return RandomVariable._ensure_numpy_float(
"cdf", self.__cdf(self._as_value_type(x))
)
elif self.__logcdf is not None:
cdf = np.exp(self.logcdf(self._as_value_type(x)))
assert isinstance(cdf, np.float_)
return cdf
else:
raise NotImplementedError(
f"Neither the `cdf` nor the `logcdf` of the random variable object "
f"with type `{type(self).__name__}` is implemented."
)
def logcdf(self, x: _ValueType) -> np.float_:
"""
Log-cumulative distribution function.
Parameters
----------
x : array-like
Evaluation points of the cumulative distribution function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The logcdf evaluation will be broadcast over all additional dimensions.
Returns
-------
q : array-like
Value of the log-cumulative density function at the given points.
"""
if self.__logcdf is not None:
return RandomVariable._ensure_numpy_float(
"logcdf", self.__logcdf(self._as_value_type(x))
)
elif self.__cdf is not None:
logcdf = np.log(self.__cdf(x))
assert isinstance(logcdf, np.float_)
return logcdf
else:
raise NotImplementedError(
f"Neither the `logcdf` nor the `cdf` of the random variable object "
f"with type `{type(self).__name__}` is implemented."
)
def quantile(self, p: FloatArgType) -> _ValueType:
"""Quantile function.
The quantile function :math:`Q \\colon [0, 1] \\to \\mathbb{R}` of a random
variable :math:`X` is defined as
:math:`Q(p) = \\inf\\{ x \\in \\mathbb{R} \\colon p \\le F_X(x) \\}`, where
:math:`F_X \\colon \\mathbb{R} \\to [0, 1]` is the :meth:`cdf` of the random
variable. From the definition it follows that the quantile function always
returns values of the same dtype as the random variable. For instance, for a
discrete distribution over the integers, the returned quantiles will also be
integers. This means that, in general, :math:`Q(0.5)` is not equal to the
:attr:`median` as it is defined in this class. See
https://en.wikipedia.org/wiki/Quantile_function for more details and examples.
"""
if self.__shape != ():
raise NotImplementedError(
"The quantile function is only defined for scalar random variables."
)
if self.__quantile is None:
raise NotImplementedError
try:
p = _utils.as_numpy_scalar(p, dtype=np.floating)
except TypeError as exc:
raise TypeError(
"The given argument `p` can not be cast to a `np.floating` object."
) from exc
quantile = self.__quantile(p)
if quantile.shape != self.__shape:
raise ValueError(
f"The quantile function should return values of the same shape as the "
f"random variable, i.e. {self.__shape}, but it returned a value with "
f"{quantile.shape}."
)
if quantile.dtype != self.__dtype:
raise ValueError(
f"The quantile function should return values of the same dtype as the "
f"random variable, i.e. `{self.__dtype.name}`, but it returned a value "
f"with dtype `{quantile.dtype.name}`."
)
return quantile
def __getitem__(self, key: ArrayLikeGetitemArgType) -> "RandomVariable":
return RandomVariable(
shape=np.empty(shape=self.shape)[key].shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: self.sample(size)[key],
mode=lambda: self.mode[key],
mean=lambda: self.mean[key],
var=lambda: self.var[key],
std=lambda: self.std[key],
entropy=lambda: self.entropy,
as_value_type=self.__as_value_type,
)
def reshape(self, newshape: ShapeArgType) -> "RandomVariable":
"""
Give a new shape to a random variable.
Parameters
----------
newshape : int or tuple of ints
New shape for the random variable. It must be compatible with the original
shape.
Returns
-------
reshaped_rv : ``self`` with the new dimensions of ``shape``.
"""
newshape = _utils.as_shape(newshape)
return RandomVariable(
shape=newshape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: self.sample(size).reshape(size + newshape),
mode=lambda: self.mode.reshape(newshape),
median=lambda: self.median.reshape(newshape),
mean=lambda: self.mean.reshape(newshape),
cov=lambda: self.cov,
var=lambda: self.var.reshape(newshape),
std=lambda: self.std.reshape(newshape),
entropy=lambda: self.entropy,
as_value_type=self.__as_value_type,
)
def transpose(self, *axes: int) -> "RandomVariable":
"""
Transpose the random variable.
Parameters
----------
axes : None, tuple of ints, or n ints
See documentation of numpy.ndarray.transpose.
Returns
-------
transposed_rv : The transposed random variable.
"""
return RandomVariable(
shape=np.empty(shape=self.shape).transpose(*axes).shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: self.sample(size).transpose(*axes),
mode=lambda: self.mode.transpose(*axes),
median=lambda: self.median.transpose(*axes),
mean=lambda: self.mean.transpose(*axes),
cov=lambda: self.cov,
var=lambda: self.var.transpose(*axes),
std=lambda: self.std.transpose(*axes),
entropy=lambda: self.entropy,
as_value_type=self.__as_value_type,
)
T = property(transpose)
# Unary arithmetic operations
def __neg__(self) -> "RandomVariable":
return RandomVariable(
shape=self.shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: -self.sample(size=size),
in_support=lambda x: self.in_support(-x),
mode=lambda: -self.mode,
median=lambda: -self.median,
mean=lambda: -self.mean,
cov=lambda: self.cov,
var=lambda: self.var,
std=lambda: self.std,
as_value_type=self.__as_value_type,
)
def __pos__(self) -> "RandomVariable":
return RandomVariable(
shape=self.shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: +self.sample(size=size),
in_support=lambda x: self.in_support(+x),
mode=lambda: +self.mode,
median=lambda: +self.median,
mean=lambda: +self.mean,
cov=lambda: self.cov,
var=lambda: self.var,
std=lambda: self.std,
as_value_type=self.__as_value_type,
)
def __abs__(self) -> "RandomVariable":
return RandomVariable(
shape=self.shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: abs(self.sample(size=size)),
)
# Binary arithmetic operations
__array_ufunc__ = None
"""
This prevents numpy from calling elementwise arithmetic
operations allowing expressions like: y = np.array([1, 1]) + RV
to call the arithmetic operations defined by RandomVariable
instead of elementwise. Thus no array of RandomVariables but a
RandomVariable with the correct shape is returned.
"""
def __add__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import add
return add(self, other)
def __radd__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import add
return add(other, self)
def __sub__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import sub
return sub(self, other)
def __rsub__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import sub
return sub(other, self)
def __mul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mul
return mul(self, other)
def __rmul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mul
return mul(other, self)
def __matmul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import matmul
return matmul(self, other)
def __rmatmul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import matmul
return matmul(other, self)
def __truediv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import truediv
return truediv(self, other)
def __rtruediv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import truediv
return truediv(other, self)
def __floordiv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import floordiv
return floordiv(self, other)
def __rfloordiv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import floordiv
return floordiv(other, self)
def __mod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mod
return mod(self, other)
def __rmod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mod
return mod(other, self)
def __divmod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import divmod_
return divmod_(self, other)
def __rdivmod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import divmod_
return divmod_(other, self)
def __pow__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import pow_
return pow_(self, other)
def __rpow__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import pow_
return pow_(other, self)
@staticmethod
def infer_median_dtype(value_dtype: DTypeArgType) -> np.dtype:
return RandomVariable.infer_moment_dtype(value_dtype)
@staticmethod
def infer_moment_dtype(value_dtype: DTypeArgType) -> np.dtype:
return np.promote_types(value_dtype, np.float_)
def _as_value_type(self, x: Any) -> _ValueType:
if self.__as_value_type is not None:
return self.__as_value_type(x)
return x
@staticmethod
def _check_property_value(
name: str,
value: Any,
shape: Optional[Tuple[int, ...]] = None,
dtype: Optional[np.dtype] = None,
):
if shape is not None:
if value.shape != shape:
raise ValueError(
f"The {name} of the random variable does not have the correct "
f"shape. Expected {shape} but got {value.shape}."
)
if dtype is not None:
if not np.issubdtype(value.dtype, dtype):
raise ValueError(
f"The {name} of the random variable does not have the correct "
f"dtype. Expected {dtype.name} but got {value.dtype.name}."
)
@classmethod
def _ensure_numpy_float(
cls, name: str, value: Any, force_scalar: bool = False
) -> Union[np.float_, np.ndarray]:
if np.isscalar(value):
if not isinstance(value, np.float_):
try:
value = _utils.as_numpy_scalar(value, dtype=np.float_)
except TypeError as err:
raise TypeError(
f"The function `{name}` specified via the constructor of "
f"`{cls.__name__}` must return a scalar value that can be "
f"converted to a `np.float_`, which is not possible for "
f"{value} of type {type(value)}."
) from err
elif not force_scalar:
try:
value = np.asarray(value, dtype=np.float_)
except TypeError as err:
raise TypeError(
f"The function `{name}` specified via the constructor of "
f"`{cls.__name__}` must return a value that can be converted "
f"to a `np.ndarray` of type `np.float_`, which is not possible "
f"for {value} of type {type(value)}."
) from err
else:
raise TypeError(
f"The function `{name}` specified via the constructor of "
f"`{cls.__name__}` must return a scalar value, but {value} of type "
f"{type(value)} is not scalar."
)
assert isinstance(value, (np.float_, np.ndarray))
return value
class DiscreteRandomVariable(RandomVariable[_ValueType]):
def __init__(
self,
shape: ShapeArgType,
dtype: DTypeArgType,
random_state: Optional[RandomStateType] = None,
parameters: Optional[Dict[str, Any]] = None,
sample: Optional[Callable[[ShapeArgType], _ValueType]] = None,
in_support: Optional[Callable[[_ValueType], bool]] = None,
pmf: Optional[Callable[[_ValueType], np.float_]] = None,
logpmf: Optional[Callable[[_ValueType], np.float_]] = None,
cdf: Optional[Callable[[_ValueType], np.float_]] = None,
logcdf: Optional[Callable[[_ValueType], np.float_]] = None,
quantile: Optional[Callable[[FloatArgType], _ValueType]] = None,
mode: Optional[Callable[[], _ValueType]] = None,
median: Optional[Callable[[], _ValueType]] = None,
mean: Optional[Callable[[], _ValueType]] = None,
cov: Optional[Callable[[], _ValueType]] = None,
var: Optional[Callable[[], _ValueType]] = None,
std: Optional[Callable[[], _ValueType]] = None,
entropy: Optional[Callable[[], np.float_]] = None,
):
# Probability mass function
self.__pmf = pmf
self.__logpmf = logpmf
super().__init__(
shape=shape,
dtype=dtype,
random_state=random_state,
parameters=parameters,
sample=sample,
in_support=in_support,
cdf=cdf,
logcdf=logcdf,
quantile=quantile,
mode=mode,
median=median,
mean=mean,
cov=cov,
var=var,
std=std,
entropy=entropy,
)
def pmf(self, x: _ValueType) -> np.float_:
if self.__pmf is not None:
return DiscreteRandomVariable._ensure_numpy_float("pmf", self.__pmf(x))
elif self.__logpmf is not None:
pmf = np.exp(self.__logpmf(x))
assert isinstance(pmf, np.float_)
return pmf
else:
raise NotImplementedError(
f"Neither the `pmf` nor the `logpmf` of the discrete random variable "
f"object with type `{type(self).__name__}` is implemented."
)
def logpmf(self, x: _ValueType) -> np.float_:
if self.__logpmf is not None:
return DiscreteRandomVariable._ensure_numpy_float(
"logpmf", self.__logpmf(self._as_value_type(x))
)
elif self.__pmf is not None:
logpmf = np.log(self.__pmf(self._as_value_type(x)))
assert isinstance(logpmf, np.float_)
return logpmf
else:
raise NotImplementedError(
f"Neither the `logpmf` nor the `pmf` of the discrete random variable "
f"object with type `{type(self).__name__}` is implemented."
)
class ContinuousRandomVariable(RandomVariable[_ValueType]):
def __init__(
self,
shape: ShapeArgType,
dtype: DTypeArgType,
random_state: Optional[RandomStateType] = None,
parameters: Optional[Dict[str, Any]] = None,
sample: Optional[Callable[[ShapeArgType], _ValueType]] = None,
in_support: Optional[Callable[[_ValueType], bool]] = None,
pdf: Optional[Callable[[_ValueType], np.float_]] = None,
logpdf: Optional[Callable[[_ValueType], np.float_]] = None,
cdf: Optional[Callable[[_ValueType], np.float_]] = None,
logcdf: Optional[Callable[[_ValueType], np.float_]] = None,
quantile: Optional[Callable[[FloatArgType], _ValueType]] = None,
mode: Optional[Callable[[], _ValueType]] = None,
median: Optional[Callable[[], _ValueType]] = None,
mean: Optional[Callable[[], _ValueType]] = None,
cov: Optional[Callable[[], _ValueType]] = None,
var: Optional[Callable[[], _ValueType]] = None,
std: Optional[Callable[[], _ValueType]] = None,
entropy: Optional[Callable[[], np.float_]] = None,
):
# Probability density function
self.__pdf = pdf
self.__logpdf = logpdf
super().__init__(
shape=shape,
dtype=dtype,
random_state=random_state,
parameters=parameters,
sample=sample,
in_support=in_support,
cdf=cdf,
logcdf=logcdf,
quantile=quantile,
mode=mode,
median=median,
mean=mean,
cov=cov,
var=var,
std=std,
entropy=entropy,
)
def pdf(self, x: _ValueType) -> np.float_:
"""
Probability density or mass function.
Following the predominant convention in mathematics, we express pdfs with
respect to the Lebesgue measure unless stated otherwise.
Parameters
----------
x : array-like
Evaluation points of the probability density / mass function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The pdf evaluation will be broadcast over all additional dimensions.
Returns
-------
p : array-like
Value of the probability density / mass function at the given points.
"""
if self.__pdf is not None:
return ContinuousRandomVariable._ensure_numpy_float(
"pdf", self.__pdf(self._as_value_type(x))
)
if self.__logpdf is not None:
pdf = np.exp(self.__logpdf(self._as_value_type(x)))
assert isinstance(pdf, np.float_)
return pdf
raise NotImplementedError(
f"Neither the `pdf` nor the `logpdf` of the continuous random variable "
f"object with type `{type(self).__name__}` is implemented."
)
def logpdf(self, x: _ValueType) -> np.float_:
"""
Natural logarithm of the probability density function.
Parameters
----------
x : array-like
Evaluation points of the log-probability density/mass function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The logpdf evaluation will be broadcast over all additional dimensions.
Returns
-------
logp : array-like
Value of the log-probability density / mass function at the given points.
"""
if self.__logpdf is not None:
return ContinuousRandomVariable._ensure_numpy_float(
"logpdf", self.__logpdf(self._as_value_type(x))
)
elif self.__pdf is not None:
logpdf = np.log(self.__pdf(self._as_value_type(x)))
assert isinstance(logpdf, np.float_)
return logpdf
else:
raise NotImplementedError(
f"Neither the `logpdf` nor the `pdf` of the continuous random variable "
f"object with type `{type(self).__name__}` is implemented."
)
| [((658, 678), 'typing.TypeVar', 'TypeVar', (['"""ValueType"""'], {}), "('ValueType')\n", (665, 678), False, 'from typing import Any, Callable, Dict, Generic, Optional, Tuple, TypeVar, Union\n'), ((4355, 4377), 'probnum.utils.as_shape', '_utils.as_shape', (['shape'], {}), '(shape)\n', (4370, 4377), True, 'from probnum import utils as _utils\n'), ((4423, 4438), 'numpy.dtype', 'np.dtype', (['dtype'], {}), '(dtype)\n', (4431, 4438), True, 'import numpy as np\n'), ((4625, 4661), 'probnum.utils.as_random_state', '_utils.as_random_state', (['random_state'], {}), '(random_state)\n', (4647, 4661), True, 'from probnum import utils as _utils\n'), ((8030, 8058), 'probnum.utils.as_random_state', '_utils.as_random_state', (['seed'], {}), '(seed)\n', (8052, 8058), True, 'from probnum import utils as _utils\n'), ((19830, 19855), 'probnum.utils.as_shape', '_utils.as_shape', (['newshape'], {}), '(newshape)\n', (19845, 19855), True, 'from probnum import utils as _utils\n'), ((27213, 27253), 'numpy.promote_types', 'np.promote_types', (['value_dtype', 'np.float_'], {}), '(value_dtype, np.float_)\n', (27229, 27253), True, 'import numpy as np\n'), ((28334, 28352), 'numpy.isscalar', 'np.isscalar', (['value'], {}), '(value)\n', (28345, 28352), True, 'import numpy as np\n'), ((5670, 5691), 'numpy.prod', 'np.prod', (['self.__shape'], {}), '(self.__shape)\n', (5677, 5691), True, 'import numpy as np\n'), ((17904, 17948), 'probnum.utils.as_numpy_scalar', '_utils.as_numpy_scalar', (['p'], {'dtype': 'np.floating'}), '(p, dtype=np.floating)\n', (17926, 17948), True, 'from probnum import utils as _utils\n'), ((12628, 12645), 'numpy.sqrt', 'np.sqrt', (['self.var'], {}), '(self.var)\n', (12635, 12645), True, 'import numpy as np\n'), ((14374, 14395), 'probnum.utils.as_shape', '_utils.as_shape', (['size'], {}), '(size)\n', (14389, 14395), True, 'from probnum import utils as _utils\n'), ((19023, 19067), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (19048, 19067), True, 'from probnum import utils as _utils\n'), ((19971, 20015), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (19996, 20015), True, 'from probnum import utils as _utils\n'), ((21005, 21049), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (21030, 21049), True, 'from probnum import utils as _utils\n'), ((21742, 21786), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (21767, 21786), True, 'from probnum import utils as _utils\n'), ((22334, 22378), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (22359, 22378), True, 'from probnum import utils as _utils\n'), ((22926, 22970), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (22951, 22970), True, 'from probnum import utils as _utils\n'), ((27923, 27956), 'numpy.issubdtype', 'np.issubdtype', (['value.dtype', 'dtype'], {}), '(value.dtype, dtype)\n', (27936, 27956), True, 'import numpy as np\n'), ((28452, 28498), 'probnum.utils.as_numpy_scalar', '_utils.as_numpy_scalar', (['value'], {'dtype': 'np.float_'}), '(value, dtype=np.float_)\n', (28474, 28498), True, 'from probnum import utils as _utils\n'), ((28987, 29021), 'numpy.asarray', 'np.asarray', (['value'], {'dtype': 'np.float_'}), '(value, dtype=np.float_)\n', (28997, 29021), True, 'import numpy as np\n'), ((18929, 18955), 'numpy.empty', 'np.empty', ([], {'shape': 'self.shape'}), '(shape=self.shape)\n', (18937, 18955), True, 'import numpy as np\n'), ((20899, 20925), 'numpy.empty', 'np.empty', ([], {'shape': 'self.shape'}), '(shape=self.shape)\n', (20907, 20925), True, 'import numpy as np\n'), ((11747, 11764), 'numpy.diag', 'np.diag', (['self.cov'], {}), '(self.cov)\n', (11754, 11764), True, 'import numpy as np\n')] |
IsaacHuang/google-cloud-sdk | platform/gcutil/lib/google_compute_engine/gcutil_lib/address_cmds_test.py | 52afa5d1a75dff08f4f5380c5cccc015bf796ca5 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for address collection commands."""
import path_initializer
path_initializer.InitSysPath()
import json
import unittest
import gflags as flags
from gcutil_lib import address_cmds
from gcutil_lib import gcutil_unittest
from gcutil_lib import mock_api
from gcutil_lib import mock_lists
FLAGS = flags.FLAGS
class AddressCmdsTest(gcutil_unittest.GcutilTestCase):
def setUp(self):
self.mock, self.api = mock_api.CreateApi(self.version)
def testReserveAddressPromptsForRegion(self):
expected_project = 'test_project'
expected_address = 'test_address'
expected_description = 'test address'
expected_region = 'test-region'
expected_source_address = '123.123.123.1'
set_flags = {
'project': expected_project,
'description': expected_description,
'source_address': expected_source_address,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReserveAddress, 'reserveaddress', set_flags=set_flags)
mock_lists.GetSampleRegionListCall(
command, self.mock, num_responses=1, name=[expected_region])
call = self.mock.Respond('compute.addresses.insert', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('POST', request.method)
self.assertEqual(expected_project, request.parameters['project'])
self.assertEquals(expected_region, request.parameters['region'])
body = json.loads(request.body)
self.assertEqual(body['name'], expected_address)
self.assertEqual(body['description'], expected_description)
self.assertEquals(body['address'], expected_source_address)
def testReserveAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
expected_description = 'test address'
submitted_region = 'test-region'
expected_source_address = '123.123.123.1'
set_flags = {
'project': expected_project,
'description': expected_description,
'region': submitted_region,
'source_address': expected_source_address,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReserveAddress, 'reserveaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.insert', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('POST', request.method)
self.assertEqual(expected_project, request.parameters['project'])
self.assertEquals(submitted_region, request.parameters['region'])
body = json.loads(request.body)
self.assertEqual(body['name'], expected_address)
self.assertEqual(body['description'], expected_description)
self.assertEquals(body['address'], expected_source_address)
def testGetAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.get', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('GET', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], submitted_region)
self.assertEqual(parameters['address'], expected_address)
def testGetAddressPrintNonEmptyUsers(self):
expected_project = 'test_project'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
data = command.GetDetailRow({'users': ['fr-1', 'fr-2']})
expected_data = {
'v1': [
('users', ['fr-1', 'fr-2'])
],
}
self.assertEquals(
gcutil_unittest.SelectTemplateForVersion(
expected_data, command.api.version),
data)
def testGetAddressPrintEmptyUsers(self):
expected_project = 'test_project'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
data = command.GetDetailRow({'users': []})
expected_data = {
'v1': [
('users', [])
],
}
self.assertEquals(
gcutil_unittest.SelectTemplateForVersion(
expected_data, command.api.version),
data)
def testReleaseAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.delete', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('DELETE', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], submitted_region)
self.assertEqual(parameters['address'], expected_address)
def testReleaseAddressWithoutRegionFlag(self):
expected_project = 'test_project'
expected_region = 'test-region'
expected_address = 'test_address'
address = ('projects/%s/regions/%s/addresses/%s' %
(expected_project, expected_region, expected_address))
set_flags = {
'project': 'incorrect_project',
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.delete', {})
command.Handle(address)
request = call.GetRequest()
self.assertEqual('DELETE', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], expected_region)
self.assertEqual(parameters['address'], expected_address)
def testReleaseMultipleAddresses(self):
expected_project = 'test_project'
expected_addresses = [
'test-addresses-%02d' % x for x in xrange(100)]
set_flags = {
'project': expected_project,
'region': 'region-a',
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
calls = [self.mock.Respond('compute.addresses.delete', {})
for x in xrange(len(expected_addresses))]
_, exceptions = command.Handle(*expected_addresses)
self.assertEqual(0, len(exceptions))
sorted_calls = sorted([call.GetRequest().parameters['address'] for
call in calls])
self.assertEqual(expected_addresses, sorted_calls)
if __name__ == '__main__':
unittest.main(testLoader=gcutil_unittest.GcutilLoader())
| [((674, 704), 'path_initializer.InitSysPath', 'path_initializer.InitSysPath', ([], {}), '()\n', (702, 704), False, 'import path_initializer\n'), ((1023, 1055), 'gcutil_lib.mock_api.CreateApi', 'mock_api.CreateApi', (['self.version'], {}), '(self.version)\n', (1041, 1055), False, 'from gcutil_lib import mock_api\n'), ((1593, 1692), 'gcutil_lib.mock_lists.GetSampleRegionListCall', 'mock_lists.GetSampleRegionListCall', (['command', 'self.mock'], {'num_responses': '(1)', 'name': '[expected_region]'}), '(command, self.mock, num_responses=1,\n name=[expected_region])\n', (1627, 1692), False, 'from gcutil_lib import mock_lists\n'), ((2026, 2050), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (2036, 2050), False, 'import json\n'), ((3136, 3160), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (3146, 3160), False, 'import json\n'), ((4699, 4775), 'gcutil_lib.gcutil_unittest.SelectTemplateForVersion', 'gcutil_unittest.SelectTemplateForVersion', (['expected_data', 'command.api.version'], {}), '(expected_data, command.api.version)\n', (4739, 4775), False, 'from gcutil_lib import gcutil_unittest\n'), ((5310, 5386), 'gcutil_lib.gcutil_unittest.SelectTemplateForVersion', 'gcutil_unittest.SelectTemplateForVersion', (['expected_data', 'command.api.version'], {}), '(expected_data, command.api.version)\n', (5350, 5386), False, 'from gcutil_lib import gcutil_unittest\n'), ((7994, 8024), 'gcutil_lib.gcutil_unittest.GcutilLoader', 'gcutil_unittest.GcutilLoader', ([], {}), '()\n', (8022, 8024), False, 'from gcutil_lib import gcutil_unittest\n')] |
jnegrete2005/JuradoFMS | vote/migrations/0005_auto_20210204_1900.py | 25848037e51de1781c419155615d0fb41edc07ec | # Generated by Django 3.1.5 on 2021-02-05 00:00
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vote', '0004_auto_20210131_1621'),
]
operations = [
migrations.AlterField(
model_name='competitor',
name='min1',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9, verbose_name='minuto 1'),
),
migrations.AlterField(
model_name='competitor',
name='min2',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9, verbose_name='minuto 2'),
),
]
| [((426, 460), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {}), '()\n', (458, 460), False, 'from django.db import migrations, models\n'), ((694, 728), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {}), '()\n', (726, 728), False, 'from django.db import migrations, models\n')] |
ngzhian/emscripten | tools/wasm-sourcemap.py | 94b1555a09f869d65354a2033da724ce77a43106 | #!/usr/bin/env python
# Copyright 2018 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
"""Utility tools that extracts DWARF information encoded in a wasm output
produced by the LLVM tools, and encodes it as a wasm source map. Additionally,
it can collect original sources, change files prefixes, and strip debug
sections from a wasm file.
"""
import argparse
from collections import OrderedDict, namedtuple
import json
import logging
from math import floor, log
import os
import re
from subprocess import Popen, PIPE
import sys
sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from tools.shared import asstr
logger = logging.getLogger('wasm-sourcemap')
def parse_args():
parser = argparse.ArgumentParser(prog='wasm-sourcemap.py', description=__doc__)
parser.add_argument('wasm', help='wasm file')
parser.add_argument('-o', '--output', help='output source map')
parser.add_argument('-p', '--prefix', nargs='*', help='replace source debug filename prefix for source map', default=[])
parser.add_argument('-s', '--sources', action='store_true', help='read and embed source files from file system into source map')
parser.add_argument('-l', '--load-prefix', nargs='*', help='replace source debug filename prefix for reading sources from file system (see also --sources)', default=[])
parser.add_argument('-w', nargs='?', help='set output wasm file')
parser.add_argument('-x', '--strip', action='store_true', help='removes debug and linking sections')
parser.add_argument('-u', '--source-map-url', nargs='?', help='specifies sourceMappingURL section contest')
parser.add_argument('--dwarfdump', help="path to llvm-dwarfdump executable")
parser.add_argument('--dwarfdump-output', nargs='?', help=argparse.SUPPRESS)
return parser.parse_args()
class Prefixes:
def __init__(self, args):
prefixes = []
for p in args:
if '=' in p:
prefix, replacement = p.split('=')
prefixes.append({'prefix': prefix, 'replacement': replacement})
else:
prefixes.append({'prefix': p, 'replacement': None})
self.prefixes = prefixes
self.cache = {}
def resolve(self, name):
if name in self.cache:
return self.cache[name]
result = name
for p in self.prefixes:
if name.startswith(p['prefix']):
if p['replacement'] is None:
result = name[len(p['prefix'])::]
else:
result = p['replacement'] + name[len(p['prefix'])::]
break
self.cache[name] = result
return result
# SourceMapPrefixes contains resolver for file names that are:
# - "sources" is for names that output to source maps JSON
# - "load" is for paths that used to load source text
SourceMapPrefixes = namedtuple('SourceMapPrefixes', 'sources, load')
def encode_vlq(n):
VLQ_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
x = (n << 1) if n >= 0 else ((-n << 1) + 1)
result = ""
while x > 31:
result = result + VLQ_CHARS[32 + (x & 31)]
x = x >> 5
return result + VLQ_CHARS[x]
def read_var_uint(wasm, pos):
n = 0
shift = 0
b = ord(wasm[pos:pos + 1])
pos = pos + 1
while b >= 128:
n = n | ((b - 128) << shift)
b = ord(wasm[pos:pos + 1])
pos = pos + 1
shift += 7
return n + (b << shift), pos
def strip_debug_sections(wasm):
logger.debug('Strip debug sections')
pos = 8
stripped = wasm[:pos]
while pos < len(wasm):
section_start = pos
section_id, pos_ = read_var_uint(wasm, pos)
section_size, section_body = read_var_uint(wasm, pos_)
pos = section_body + section_size
if section_id == 0:
name_len, name_pos = read_var_uint(wasm, section_body)
name_end = name_pos + name_len
name = wasm[name_pos:name_end]
if name == "linking" or name == "sourceMappingURL" or name.startswith("reloc..debug_") or name.startswith(".debug_"):
continue # skip debug related sections
stripped = stripped + wasm[section_start:pos]
return stripped
def encode_uint_var(n):
result = bytearray()
while n > 127:
result.append(128 | (n & 127))
n = n >> 7
result.append(n)
return bytes(result)
def append_source_mapping(wasm, url):
logger.debug('Append sourceMappingURL section')
section_name = "sourceMappingURL"
section_content = encode_uint_var(len(section_name)) + section_name + encode_uint_var(len(url)) + url
return wasm + encode_uint_var(0) + encode_uint_var(len(section_content)) + section_content
def get_code_section_offset(wasm):
logger.debug('Read sections index')
pos = 8
while pos < len(wasm):
section_id, pos_ = read_var_uint(wasm, pos)
section_size, pos = read_var_uint(wasm, pos_)
if section_id == 10:
return pos
pos = pos + section_size
def remove_dead_entries(entries):
# Remove entries for dead functions. It is a heuristics to ignore data if the
# function starting address near to 0 (is equal to its size field length).
block_start = 0
cur_entry = 0
while cur_entry < len(entries):
if not entries[cur_entry]['eos']:
cur_entry += 1
continue
fn_start = entries[block_start]['address']
# Calculate the LEB encoded function size (including size field)
fn_size_length = floor(log(entries[cur_entry]['address'] - fn_start + 1, 128)) + 1
min_live_offset = 1 + fn_size_length # 1 byte is for code section entries
if fn_start < min_live_offset:
# Remove dead code debug info block.
del entries[block_start:cur_entry + 1]
cur_entry = block_start
continue
cur_entry += 1
block_start = cur_entry
def read_dwarf_entries(wasm, options):
if options.dwarfdump_output:
output = open(options.dwarfdump_output, 'r').read()
elif options.dwarfdump:
logger.debug('Reading DWARF information from %s' % wasm)
if not os.path.exists(options.dwarfdump):
logger.error('llvm-dwarfdump not found: ' + options.dwarfdump)
sys.exit(1)
process = Popen([options.dwarfdump, "-debug-info", "-debug-line", wasm], stdout=PIPE)
output, err = process.communicate()
exit_code = process.wait()
if exit_code != 0:
logger.error('Error during llvm-dwarfdump execution (%s)' % exit_code)
sys.exit(1)
else:
logger.error('Please specify either --dwarfdump or --dwarfdump-output')
sys.exit(1)
entries = []
debug_line_chunks = re.split(r"debug_line\[(0x[0-9a-f]*)\]", asstr(output))
maybe_debug_info_content = debug_line_chunks[0]
for i in range(1, len(debug_line_chunks), 2):
stmt_list = debug_line_chunks[i]
comp_dir_match = re.search(r"DW_AT_stmt_list\s+\(" + stmt_list + r"\)\s+" +
r"DW_AT_comp_dir\s+\(\"([^\"]+)", maybe_debug_info_content)
comp_dir = comp_dir_match.group(1) if comp_dir_match is not None else ""
line_chunk = debug_line_chunks[i + 1]
# include_directories[ 1] = "/Users/yury/Work/junk/sqlite-playground/src"
# file_names[ 1]:
# name: "playground.c"
# dir_index: 1
# mod_time: 0x00000000
# length: 0x00000000
#
# Address Line Column File ISA Discriminator Flags
# ------------------ ------ ------ ------ --- ------------- -------------
# 0x0000000000000006 22 0 1 0 0 is_stmt
# 0x0000000000000007 23 10 1 0 0 is_stmt prologue_end
# 0x000000000000000f 23 3 1 0 0
# 0x0000000000000010 23 3 1 0 0 end_sequence
# 0x0000000000000011 28 0 1 0 0 is_stmt
include_directories = {'0': comp_dir}
for dir in re.finditer(r"include_directories\[\s*(\d+)\] = \"([^\"]*)", line_chunk):
include_directories[dir.group(1)] = dir.group(2)
files = {}
for file in re.finditer(r"file_names\[\s*(\d+)\]:\s+name: \"([^\"]*)\"\s+dir_index: (\d+)", line_chunk):
dir = include_directories[file.group(3)]
file_path = (dir + '/' if file.group(2)[0] != '/' else '') + file.group(2)
files[file.group(1)] = file_path
for line in re.finditer(r"\n0x([0-9a-f]+)\s+(\d+)\s+(\d+)\s+(\d+)(.*?end_sequence)?", line_chunk):
entry = {'address': int(line.group(1), 16), 'line': int(line.group(2)), 'column': int(line.group(3)), 'file': files[line.group(4)], 'eos': line.group(5) is not None}
if not entry['eos']:
entries.append(entry)
else:
# move end of function to the last END operator
entry['address'] -= 1
if entries[-1]['address'] == entry['address']:
# last entry has the same address, reusing
entries[-1]['eos'] = True
else:
entries.append(entry)
remove_dead_entries(entries)
# return entries sorted by the address field
return sorted(entries, key=lambda entry: entry['address'])
def build_sourcemap(entries, code_section_offset, prefixes, collect_sources):
sources = []
sources_content = [] if collect_sources else None
mappings = []
sources_map = {}
last_address = 0
last_source_id = 0
last_line = 1
last_column = 1
for entry in entries:
line = entry['line']
column = entry['column']
# ignore entries with line 0
if line == 0:
continue
# start at least at column 1
if column == 0:
column = 1
address = entry['address'] + code_section_offset
file_name = entry['file']
source_name = prefixes.sources.resolve(file_name)
if source_name not in sources_map:
source_id = len(sources)
sources_map[source_name] = source_id
sources.append(source_name)
if collect_sources:
load_name = prefixes.load.resolve(file_name)
try:
with open(load_name, 'r') as infile:
source_content = infile.read()
sources_content.append(source_content)
except IOError:
print('Failed to read source: %s' % load_name)
sources_content.append(None)
else:
source_id = sources_map[source_name]
address_delta = address - last_address
source_id_delta = source_id - last_source_id
line_delta = line - last_line
column_delta = column - last_column
mappings.append(encode_vlq(address_delta) + encode_vlq(source_id_delta) + encode_vlq(line_delta) + encode_vlq(column_delta))
last_address = address
last_source_id = source_id
last_line = line
last_column = column
return OrderedDict([('version', 3),
('names', []),
('sources', sources),
('sourcesContent', sources_content),
('mappings', ','.join(mappings))])
def main():
options = parse_args()
wasm_input = options.wasm
with open(wasm_input, 'rb') as infile:
wasm = infile.read()
entries = read_dwarf_entries(wasm_input, options)
code_section_offset = get_code_section_offset(wasm)
prefixes = SourceMapPrefixes(sources=Prefixes(options.prefix), load=Prefixes(options.load_prefix))
logger.debug('Saving to %s' % options.output)
map = build_sourcemap(entries, code_section_offset, prefixes, options.sources)
with open(options.output, 'w') as outfile:
json.dump(map, outfile, separators=(',', ':'))
if options.strip:
wasm = strip_debug_sections(wasm)
if options.source_map_url:
wasm = append_source_mapping(wasm, options.source_map_url)
if options.w:
logger.debug('Saving wasm to %s' % options.w)
with open(options.w, 'wb') as outfile:
outfile.write(wasm)
logger.debug('Done')
return 0
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG if os.environ.get('EMCC_DEBUG') else logging.INFO)
sys.exit(main())
| [((838, 873), 'logging.getLogger', 'logging.getLogger', (['"""wasm-sourcemap"""'], {}), "('wasm-sourcemap')\n", (855, 873), False, 'import logging\n'), ((2912, 2960), 'collections.namedtuple', 'namedtuple', (['"""SourceMapPrefixes"""', '"""sources, load"""'], {}), "('SourceMapPrefixes', 'sources, load')\n", (2922, 2960), False, 'from collections import OrderedDict, namedtuple\n'), ((905, 975), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""wasm-sourcemap.py"""', 'description': '__doc__'}), "(prog='wasm-sourcemap.py', description=__doc__)\n", (928, 975), False, 'import argparse\n'), ((6575, 6588), 'tools.shared.asstr', 'asstr', (['output'], {}), '(output)\n', (6580, 6588), False, 'from tools.shared import asstr\n'), ((6746, 6873), 're.search', 're.search', (['(\'DW_AT_stmt_list\\\\s+\\\\(\' + stmt_list + \'\\\\)\\\\s+\' +\n \'DW_AT_comp_dir\\\\s+\\\\(\\\\"([^\\\\"]+)\')', 'maybe_debug_info_content'], {}), '(\'DW_AT_stmt_list\\\\s+\\\\(\' + stmt_list + \'\\\\)\\\\s+\' +\n \'DW_AT_comp_dir\\\\s+\\\\(\\\\"([^\\\\"]+)\', maybe_debug_info_content)\n', (6755, 6873), False, 'import re\n'), ((7836, 7913), 're.finditer', 're.finditer', (['"""include_directories\\\\[\\\\s*(\\\\d+)\\\\] = \\\\"([^\\\\"]*)"""', 'line_chunk'], {}), '(\'include_directories\\\\[\\\\s*(\\\\d+)\\\\] = \\\\"([^\\\\"]*)\', line_chunk)\n', (7847, 7913), False, 'import re\n'), ((7997, 8107), 're.finditer', 're.finditer', (['"""file_names\\\\[\\\\s*(\\\\d+)\\\\]:\\\\s+name: \\\\"([^\\\\"]*)\\\\"\\\\s+dir_index: (\\\\d+)"""', 'line_chunk'], {}), '(\n \'file_names\\\\[\\\\s*(\\\\d+)\\\\]:\\\\s+name: \\\\"([^\\\\"]*)\\\\"\\\\s+dir_index: (\\\\d+)\'\n , line_chunk)\n', (8008, 8107), False, 'import re\n'), ((8274, 8369), 're.finditer', 're.finditer', (['"""\\\\n0x([0-9a-f]+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)(.*?end_sequence)?"""', 'line_chunk'], {}), "('\\\\n0x([0-9a-f]+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)(.*?end_sequence)?',\n line_chunk)\n", (8285, 8369), False, 'import re\n'), ((11336, 11382), 'json.dump', 'json.dump', (['map', 'outfile'], {'separators': "(',', ':')"}), "(map, outfile, separators=(',', ':'))\n", (11345, 11382), False, 'import json\n'), ((767, 792), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (782, 792), False, 'import os\n'), ((6131, 6206), 'subprocess.Popen', 'Popen', (["[options.dwarfdump, '-debug-info', '-debug-line', wasm]"], {'stdout': 'PIPE'}), "([options.dwarfdump, '-debug-info', '-debug-line', wasm], stdout=PIPE)\n", (6136, 6206), False, 'from subprocess import Popen, PIPE\n'), ((6484, 6495), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6492, 6495), False, 'import sys\n'), ((5416, 5470), 'math.log', 'log', (["(entries[cur_entry]['address'] - fn_start + 1)", '(128)'], {}), "(entries[cur_entry]['address'] - fn_start + 1, 128)\n", (5419, 5470), False, 'from math import floor, log\n'), ((5995, 6028), 'os.path.exists', 'os.path.exists', (['options.dwarfdump'], {}), '(options.dwarfdump)\n', (6009, 6028), False, 'import os\n'), ((6105, 6116), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6113, 6116), False, 'import sys\n'), ((6384, 6395), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6392, 6395), False, 'import sys\n'), ((11780, 11808), 'os.environ.get', 'os.environ.get', (['"""EMCC_DEBUG"""'], {}), "('EMCC_DEBUG')\n", (11794, 11808), False, 'import os\n')] |
agustinhenze/mibs.snmplabs.com | pysnmp-with-texts/ENTERASYS-NAC-APPLIANCE-MIB.py | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | #
# PySNMP MIB module ENTERASYS-NAC-APPLIANCE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ENTERASYS-NAC-APPLIANCE-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:04:09 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint")
etsysModules, = mibBuilder.importSymbols("ENTERASYS-MIB-NAMES", "etsysModules")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
Bits, ObjectIdentity, MibIdentifier, Counter64, iso, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, IpAddress, Unsigned32, TimeTicks, Gauge32, Integer32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "ObjectIdentity", "MibIdentifier", "Counter64", "iso", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "IpAddress", "Unsigned32", "TimeTicks", "Gauge32", "Integer32", "Counter32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
etsysNacApplianceMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73))
etsysNacApplianceMIB.setRevisions(('2010-03-09 13:03',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: etsysNacApplianceMIB.setRevisionsDescriptions(('The initial version of this MIB module.',))
if mibBuilder.loadTexts: etsysNacApplianceMIB.setLastUpdated('201003091303Z')
if mibBuilder.loadTexts: etsysNacApplianceMIB.setOrganization('Enterasys Networks, Inc')
if mibBuilder.loadTexts: etsysNacApplianceMIB.setContactInfo('Postal: Enterasys Networks 50 Minuteman Rd. Andover, MA 01810-1008 USA Phone: +1 978 684 1000 E-mail: [email protected] WWW: http://www.enterasys.com')
if mibBuilder.loadTexts: etsysNacApplianceMIB.setDescription("This MIB module defines a portion of the SNMP enterprise MIBs under Enterasys Networks' enterprise OID pertaining to NAC Appliance Status.")
etsysNacApplianceObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1))
etsysNacApplAuthenticationRequests = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationRequests.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationRequests.setDescription('Represents the number of authentication requests made since the NAC was started.')
etsysNacApplAuthenticationSuccesses = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationSuccesses.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationSuccesses.setDescription('Represents the number of successful authentication requests made since the NAC was started.')
etsysNacApplAuthenticationFailures = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationFailures.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationFailures.setDescription('Represents the number of failed authentication requests made since the NAC was started.')
etsysNacApplRadiusChallenges = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplRadiusChallenges.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplRadiusChallenges.setDescription('Represents the number of Radius challenges made since the NAC was started.')
etsysNacApplAuthenticationInvalidRequests = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationInvalidRequests.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationInvalidRequests.setDescription('Represents the number of invalid authentication requests made since the NAC was started.')
etsysNacApplAuthenticationDuplicateRequests = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationDuplicateRequests.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationDuplicateRequests.setDescription('Represents the number of duplicate authentication requests made since the NAC was started.')
etsysNacApplAuthenticationMalformedRequests = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationMalformedRequests.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationMalformedRequests.setDescription('Represents the number of malformed authentication requests made since the NAC was started.')
etsysNacApplAuthenticationBadRequests = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationBadRequests.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationBadRequests.setDescription('Represents the number of bad authentication requests made since the NAC was started.')
etsysNacApplAuthenticationDroppedPackets = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationDroppedPackets.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationDroppedPackets.setDescription('Represents the number of dropped authentication packets since the NAC was started.')
etsysNacApplAuthenticationUnknownTypes = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAuthenticationUnknownTypes.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAuthenticationUnknownTypes.setDescription('Represents the number of unknown authentication types since the NAC was started.')
etsysNacApplAssessmentRequests = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplAssessmentRequests.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplAssessmentRequests.setDescription('Represents the number of assessment requests made since the NAC was started.')
etsysNacApplCaptivePortalRequests = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplCaptivePortalRequests.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplCaptivePortalRequests.setDescription('Represents the number of captive portal requests made since the NAC was started.')
etsysNacApplContactLostSwitches = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplContactLostSwitches.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplContactLostSwitches.setDescription('Represents the number of configured switches with which the NAC has lost SNMP contact.')
etsysNacApplIPResolutionFailures = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplIPResolutionFailures.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplIPResolutionFailures.setDescription('Represents the number of failed IP Resolution attempts made since the NAC was started.')
etsysNacApplIPResolutionTimeouts = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplIPResolutionTimeouts.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplIPResolutionTimeouts.setDescription('Represents the number of IP Resolution attempts that timed out since the NAC was started.')
etsysNacApplConnectedAgents = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysNacApplConnectedAgents.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplConnectedAgents.setDescription('Represents the number of End-System Assessment Agents currently connected to the NAC.')
etsysNacApplianceMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 2))
etsysNacApplianceMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 2, 1))
etsysNacApplianceMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 2, 2))
etsysNacApplianceMIBGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 2, 1, 1)).setObjects(("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationRequests"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationSuccesses"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationFailures"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplRadiusChallenges"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationInvalidRequests"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationDuplicateRequests"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationMalformedRequests"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationBadRequests"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationDroppedPackets"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAuthenticationUnknownTypes"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplAssessmentRequests"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplCaptivePortalRequests"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplContactLostSwitches"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplIPResolutionFailures"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplIPResolutionTimeouts"), ("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplConnectedAgents"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysNacApplianceMIBGroup = etsysNacApplianceMIBGroup.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplianceMIBGroup.setDescription('The basic collection of objects providing status information about the NAC Appliance.')
etsysNacApplianceMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 73, 2, 2, 1)).setObjects(("ENTERASYS-NAC-APPLIANCE-MIB", "etsysNacApplianceMIBGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysNacApplianceMIBCompliance = etsysNacApplianceMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: etsysNacApplianceMIBCompliance.setDescription('The compliance statement for clients implementing the NAC Appliance Status MIB.')
mibBuilder.exportSymbols("ENTERASYS-NAC-APPLIANCE-MIB", etsysNacApplianceMIBCompliance=etsysNacApplianceMIBCompliance, etsysNacApplAuthenticationDuplicateRequests=etsysNacApplAuthenticationDuplicateRequests, etsysNacApplIPResolutionTimeouts=etsysNacApplIPResolutionTimeouts, etsysNacApplianceObjects=etsysNacApplianceObjects, etsysNacApplAuthenticationInvalidRequests=etsysNacApplAuthenticationInvalidRequests, etsysNacApplAuthenticationUnknownTypes=etsysNacApplAuthenticationUnknownTypes, etsysNacApplianceMIBCompliances=etsysNacApplianceMIBCompliances, etsysNacApplAssessmentRequests=etsysNacApplAssessmentRequests, etsysNacApplAuthenticationBadRequests=etsysNacApplAuthenticationBadRequests, etsysNacApplAuthenticationRequests=etsysNacApplAuthenticationRequests, etsysNacApplRadiusChallenges=etsysNacApplRadiusChallenges, etsysNacApplAuthenticationMalformedRequests=etsysNacApplAuthenticationMalformedRequests, etsysNacApplContactLostSwitches=etsysNacApplContactLostSwitches, etsysNacApplAuthenticationDroppedPackets=etsysNacApplAuthenticationDroppedPackets, etsysNacApplCaptivePortalRequests=etsysNacApplCaptivePortalRequests, etsysNacApplAuthenticationSuccesses=etsysNacApplAuthenticationSuccesses, etsysNacApplIPResolutionFailures=etsysNacApplIPResolutionFailures, etsysNacApplianceMIBConformance=etsysNacApplianceMIBConformance, PYSNMP_MODULE_ID=etsysNacApplianceMIB, etsysNacApplianceMIBGroups=etsysNacApplianceMIBGroups, etsysNacApplianceMIB=etsysNacApplianceMIB, etsysNacApplAuthenticationFailures=etsysNacApplAuthenticationFailures, etsysNacApplianceMIBGroup=etsysNacApplianceMIBGroup, etsysNacApplConnectedAgents=etsysNacApplConnectedAgents)
| [] |
Kebniss/AutoDetect | Unsupervised/pix2pixHD/extract_frames.py | 44ca4d6930ef5fbf044ebeed5c9fd925f04bc1a8 | import os
import cv2
import argparse
from utils import *
from tqdm import tqdm
from glob import glob
from pathlib import Path
def _extract_frames(video_path, parent, start=0, sampling_f=1):
vidcap = cv2.VideoCapture(video_path)
success, image = success, image = vidcap.read()
count = -1
saved = 0
print(f'Processing: {video_path}')
while success:
count += 1
if count % 300 == 0:
print('Processing frame: ', count)
if count % sampling_f == 0:
# sampling
cv2.imwrite(''.join([dest_folder, f"/{count + start}.jpg"]), image)
saved += 1
success, image = vidcap.read() # read next
print(f'Successfully saved {saved} frames to {dest_folder}')
return count + start
parser = argparse.ArgumentParser(
description='build a "frame dataset" from a given video')
parser.add_argument('-input', dest="input", required=True,
help='''Path to a single video or a folder. If path to folder the algorithm
will extract frames from all files with extension defined in
--extension and save them under separate folders under dest_folder.
The frames from each video will be saved under a folder with its name.
''')
parser.add_argument('--dest-folder', dest="dest_folder", default='./dataset/',
help='''Path where to store frames. NB all files in this folder will be
removed before adding the new frames''')
parser.add_argument('--same-folder', dest="same_folder", default=False,
help='''Set it to True if you want to save the frames of all videos to the
same folder in ascending order going from the first frame of the first video
to the last frame of the last video. If True frames will be saved in
dest_folder/frames.''')
parser.add_argument('--sampling', help='how many fps', default='3')
parser.add_argument('--run-type', help='train or test', default='train')
parser.add_argument('--extension', help='avi, mp4, mov...', default='mp4')
parser.add_argument('-width', help='output width', default=640, type=int)
parser.add_argument('-height', help='output height', default=480, type=int)
args = parser.parse_args()
mkdir(args.dest_folder)
if (args.width % 32 != 0) or (args.height % 32 != 0):
raise Exception("Please use width and height that are divisible by 32")
if os.path.isdir(args.input):
inp = str(Path(args.input) / f'*.{args.extension}')
videos = [v for v in glob(inp)]
if not videos:
raise Exception(f'No {args.extension} files in input directory {args.input}')
elif os.path.isfile(args.input):
_, ext = get_filename_extension(args.input)
if ext != args.extension:
raise ValueError(f'Correct inputs: folder or path to {args.extension} file only')
videos = [args.input]
else:
raise ValueError(f'Correct inputs: folder or path to {args.extension} file only')
if args.same_folder:
start = 0
dest_folder = str(Path(args.dest_folder) / f'{args.run_type}_frames')
mkdir(dest_folder)
for v in tqdm(videos):
if not args.same_folder:
start = 0
name, _ = get_filename_extension(v)
dest_folder = str(Path(args.dest_folder) / name)
mkdir(dest_folder)
start = _extract_frames(v, dest_folder, start, sampling_f=int(args.sampling))
| [((785, 871), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""build a "frame dataset" from a given video"""'}), '(description=\n \'build a "frame dataset" from a given video\')\n', (808, 871), False, 'import argparse\n'), ((2342, 2367), 'os.path.isdir', 'os.path.isdir', (['args.input'], {}), '(args.input)\n', (2355, 2367), False, 'import os\n'), ((3028, 3040), 'tqdm.tqdm', 'tqdm', (['videos'], {}), '(videos)\n', (3032, 3040), False, 'from tqdm import tqdm\n'), ((205, 233), 'cv2.VideoCapture', 'cv2.VideoCapture', (['video_path'], {}), '(video_path)\n', (221, 233), False, 'import cv2\n'), ((2571, 2597), 'os.path.isfile', 'os.path.isfile', (['args.input'], {}), '(args.input)\n', (2585, 2597), False, 'import os\n'), ((2383, 2399), 'pathlib.Path', 'Path', (['args.input'], {}), '(args.input)\n', (2387, 2399), False, 'from pathlib import Path\n'), ((2450, 2459), 'glob.glob', 'glob', (['inp'], {}), '(inp)\n', (2454, 2459), False, 'from glob import glob\n'), ((2943, 2965), 'pathlib.Path', 'Path', (['args.dest_folder'], {}), '(args.dest_folder)\n', (2947, 2965), False, 'from pathlib import Path\n'), ((3159, 3181), 'pathlib.Path', 'Path', (['args.dest_folder'], {}), '(args.dest_folder)\n', (3163, 3181), False, 'from pathlib import Path\n')] |
parsiya/Parsia-Code | burp-filter-options/filter-options.py | e75bd9f7f295e6d8e584de67f90dd02cb75ae915 | # modified "example traffic redirector"
# https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py
# Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt
# Usage: Put both files in a directory and add filter-options.py to Burp. Nees Jython.
# Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/
# support for burp-exceptions - see https://github.com/securityMB/burp-exceptions
try:
from exceptions_fix import FixBurpExceptions
import sys
except ImportError:
pass
# support for burputils - https://github.com/parsiya/burputils
try:
from burputils import BurpUtils
except ImportError:
pass
from burp import IBurpExtender
from burp import IHttpListener
class BurpExtender(IBurpExtender, IHttpListener):
# implement IBurpExtender
# set everything up
def registerExtenderCallbacks(self, callbacks):
# obtain an extension helpers object
self.utils = BurpUtils(callbacks.getHelpers())
# support for burp-exceptions
try:
sys.stdout = callbacks.getStdout()
except:
pass
# set our extension name
callbacks.setExtensionName("Filter OPTIONS")
# register an HTTP listener
callbacks.registerHttpListener(self)
#
# implement IHttpListener
#
def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo):
# only process responses
if messageIsRequest:
return
# now we only have responses
# get the request associated with the response
requestInfo = self.utils.getInfo(True, messageInfo)
# return if the request method was not OPTIONS
if requestInfo.getMethod() != "OPTIONS":
return
# get response info
responseInfo = self.utils.getInfo(False, messageInfo)
# get headers using utils
headers = self.utils.getHeaders(responseInfo)
# overwrite the Content-Type header. Overwrite adds the header if it
# does not exist.
headers.overwrite("Content-Type", "text/css; charset=UTF-8")
# put everything back together
bodyBytes = self.utils.getBody(messageIsRequest, messageInfo)
# Debug
# rawHeaders = headers.exportRaw()
# build message
modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes)
# set modified message response
self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo)
# this should be reflected in response tab
# done
print "--------"
return
# support for burp-exceptions
try:
FixBurpExceptions()
except:
pass | [] |
loftwah/appscale | AppServer/google/appengine/tools/devappserver2/login.py | 586fc1347ebc743d7a632de698f4dbfb09ae38d6 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Handles login/logout pages and dealing with user cookies.
Includes a WSGI application that serves the login page and handles login and
logout HTTP requests. It accepts these GET query parameters:
continue: URL to redirect to after a login or logout has completed.
email: Email address to set for the client.
admin: If 'True', the client should be logged in as an admin.
action: What action to take ('Login' or 'Logout').
To view the current user information and a form for logging in and out,
supply no parameters.
"""
import cgi
import Cookie
import hashlib
import logging
import os
import sha
import sys
import urllib
import uuid
import webapp2
app_dashboard_lib = '/../../../../../AppDashboard/lib'
sys.path.append(os.path.dirname(__file__) + app_dashboard_lib)
from app_dashboard_helper import AppDashboardHelper
# URL of the login page within the dev appserver.
LOGIN_URL_RELATIVE = '_ah/login'
# CGI parameter constants.
CONTINUE_PARAM = 'continue'
_EMAIL_PARAM = 'email'
_ADMIN_PARAM = 'admin'
ACTION_PARAM = 'action'
# Values for the action parameter.
LOGOUT_ACTION = 'logout'
LOGIN_ACTION = 'login'
# Name of the cookie that stores the user info.
_COOKIE_NAME = 'dev_appserver_login'
# Indicates that the user has admin access to all applications.
CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN'
# The port that the AppDashboard serves HTTPS traffic on.
DASHBOARD_HTTPS_PORT = "1443"
def get_user_info(http_cookie, cookie_name=_COOKIE_NAME):
"""Gets the requestor's user info from an HTTP Cookie header.
Args:
http_cookie: The value of the 'Cookie' HTTP request header.
cookie_name: The name of the cookie that stores the user info.
Returns:
A tuple (email, admin, user_id) where:
email: The user's email address, if any.
admin: True if the user is an admin; False otherwise.
user_id: The user ID, if any.
"""
try:
cookie = Cookie.SimpleCookie(http_cookie)
except Cookie.CookieError:
return '', False, ''
cookie_dict = dict((k, v.value) for k, v in cookie.iteritems())
return _get_user_info_from_dict(cookie_dict, cookie_name)
def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME):
"""Gets the requestor's user info from a cookie dictionary.
Args:
cookie_dict: A dictionary mapping cookie names onto values.
cookie_name: The name of the cookie that stores the user info.
Returns:
A tuple (email, admin, user_id) where:
email: The user's email address, if any.
admin: True if the user is an admin; False otherwise.
user_id: The user ID, if any.
"""
cookie_secret = os.environ['COOKIE_SECRET']
cookie_value = cookie_dict.get(cookie_name, '')
cookie_value = cookie_value.replace("%3A",":")
cookie_value = cookie_value.replace("%40",'@')
cookie_value = cookie_value.replace("%2C",",")
email, nickname, admin, hsh = (cookie_value.split(':') + ['', '', '', ''])[:4]
if email == '':
nickname = ''
admin = ''
return '', False, ''
else:
vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest()
if hsh != vhsh:
logging.info("{0} has an invalid cookie, so ignoring it.".format(email))
return '', False, ''
admin_apps = admin.split(',')
current_app = os.environ['APPLICATION_ID']
is_admin = current_app in admin_apps or CLOUD_ADMIN_MARKER in admin_apps
return email, is_admin, nickname
def _create_cookie_data(email, admin):
"""Creates cookie payload data.
Args:
email: The user's email address.
admin: True if the user is an admin; False otherwise.
Returns:
A string containing the cookie payload.
"""
if email:
user_id_digest = hashlib.md5(email.lower()).digest()
user_id = '1' + ''.join(['%02d' % ord(x) for x in user_id_digest])[:20]
else:
user_id = ''
return '%s:%s:%s' % (email, admin, user_id)
def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME):
"""Creates a cookie to set the user information for the requestor.
Args:
email: The email to set for the user.
admin: True if the user should be admin; False otherwise.
cookie_name: The name of the cookie that stores the user info.
Returns:
Set-Cookie value for setting the user info of the requestor.
"""
cookie_value = _create_cookie_data(email, admin)
cookie = Cookie.SimpleCookie()
cookie[cookie_name] = cookie_value
cookie[cookie_name]['path'] = '/'
return cookie[cookie_name].OutputString()
def _clear_user_info_cookie(cookie_name=_COOKIE_NAME):
"""Clears the user info cookie from the requestor, logging them out.
Args:
cookie_name: The name of the cookie that stores the user info.
Returns:
A Set-Cookie value for clearing the user info of the requestor.
"""
cookie = Cookie.SimpleCookie()
cookie[cookie_name] = ''
cookie[cookie_name]['path'] = '/'
cookie[cookie_name]['max-age'] = '0'
if AppDashboardHelper.USE_SHIBBOLETH:
cookie[cookie_name]['domain'] = AppDashboardHelper.\
SHIBBOLETH_COOKIE_DOMAIN
return cookie[cookie_name].OutputString()
_LOGIN_TEMPLATE = """<html>
<head>
<title>Login</title>
</head>
<body>
<form method="get" action="%(login_url)s"
style="text-align:center; font: 13px sans-serif">
<div style="width: 20em; margin: 1em auto;
text-align:left;
padding: 0 2em 1.25em 2em;
background-color: #d6e9f8;
border: 2px solid #67a7e3">
<h3>%(login_message)s</h3>
<p style="padding: 0; margin: 0">
<label for="email" style="width: 3em">Email:</label>
<input name="email" type="email" value="%(email)s" id="email"/>
</p>
<p style="margin: .5em 0 0 3em; font-size:12px">
<input name="admin" type="checkbox" value="True"
%(admin_checked)s id="admin"/>
<label for="admin">Sign in as Administrator</label>
</p>
<p style="margin-left: 3em">
<input name="action" value="Login" type="submit"
id="submit-login" />
<input name="action" value="Logout" type="submit"
id="submit-logout" />
</p>
</div>
<input name="continue" type="hidden" value="%(continue_url)s"/>
</form>
</body>
</html>
"""
def _render_login_template(login_url, continue_url, email, admin):
"""Renders the login page.
Args:
login_url: The parameter to _login_response.
continue_url: The parameter to _login_response.
email: The email address of the current user, if any.
admin: True if the user is currently an admin; False otherwise.
Returns:
A string containing the contents of the login page.
"""
if email:
login_message = 'Logged in'
else:
login_message = 'Not logged in'
email = 'test\x40example.com'
admin_checked = 'checked' if admin else ''
template_dict = {
'email': cgi.escape(email, quote=True),
'admin_checked': admin_checked,
'login_message': login_message,
'login_url': cgi.escape(login_url, quote=True),
'continue_url': cgi.escape(continue_url, quote=True),
}
return _LOGIN_TEMPLATE % template_dict
def login_redirect(application_url, continue_url, start_response):
"""Writes a login redirection URL to a user.
This redirects to login_url with a continue parameter to return to
continue_url. The login_url should be on the canonical front-end server,
regardless of the host:port the user connected to.
Args:
application_url: The URL of the dev appserver domain
(e.g., 'http://localhost:8080').
continue_url: The URL to continue to after the user logs in.
start_response: A WSGI start_response function.
Returns:
An (empty) iterable over strings containing the body of the HTTP response.
"""
if AppDashboardHelper.USE_SHIBBOLETH:
redirect_url = '{0}:{1}/login?{2}={3}'.format(
AppDashboardHelper.SHIBBOLETH_CONNECTOR,
AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT,
CONTINUE_PARAM,
urllib.quote(continue_url)
)
else:
hostname = os.environ['NGINX_HOST']
redirect_url = 'https://{0}:{1}/login?{2}={3}'.format(
hostname,
DASHBOARD_HTTPS_PORT,
CONTINUE_PARAM,
urllib.quote(continue_url))
start_response('302 Requires login',
[('Location', redirect_url)])
return []
def fake_admin():
""" Generate the fake admin login secret
Returns:
A string containing the fake login secret
"""
return hashlib.sha1('{}/{}'.format(
os.environ.get('APPNAME', str(uuid.uuid4())),
os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest()
class Handler(webapp2.RequestHandler):
"""The request handler for the login and logout pages."""
def get(self):
action = self.request.get(ACTION_PARAM)
set_email = self.request.get(_EMAIL_PARAM)
set_admin = self.request.get(_ADMIN_PARAM).lower() == 'true'
continue_url = self.request.get(CONTINUE_PARAM)
login_url = self.request.path_url
if action:
redirect_url = continue_url or login_url
# Perform the action.
if action.lower() == LOGOUT_ACTION.lower():
self.response.headers['Set-Cookie'] = _clear_user_info_cookie()
if AppDashboardHelper.USE_SHIBBOLETH:
redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL
elif action.lower() == LOGIN_ACTION.lower() and set_email:
self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email,
set_admin)
# URLs should be ASCII-only byte strings.
if isinstance(redirect_url, unicode):
redirect_url = redirect_url.encode('ascii')
# Redirect the user after performing the action.
self.response.status = 302
self.response.status_message = 'Redirecting to continue URL'
self.response.headers['Location'] = redirect_url
else:
# Send the user to the AppDashboard to log in before letting them view the
# specified URL.
if AppDashboardHelper.USE_SHIBBOLETH:
appscale_login_url = "{0}:{1}/login".format(
AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT)
else:
appscale_login_url = "https://{0}:{1}/login".format(
os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT)
redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM,
continue_url)
self.response.status = 302
self.response.status_message = 'Redirecting to login service URL'
self.response.headers['Location'] = redirect_url
application = webapp2.WSGIApplication([('/.*', Handler)], debug=True)
| [((11072, 11127), 'webapp2.WSGIApplication', 'webapp2.WSGIApplication', (["[('/.*', Handler)]"], {'debug': '(True)'}), "([('/.*', Handler)], debug=True)\n", (11095, 11127), False, 'import webapp2\n'), ((4895, 4916), 'Cookie.SimpleCookie', 'Cookie.SimpleCookie', ([], {}), '()\n', (4914, 4916), False, 'import Cookie\n'), ((5335, 5356), 'Cookie.SimpleCookie', 'Cookie.SimpleCookie', ([], {}), '()\n', (5354, 5356), False, 'import Cookie\n'), ((1337, 1362), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1352, 1362), False, 'import os\n'), ((2493, 2525), 'Cookie.SimpleCookie', 'Cookie.SimpleCookie', (['http_cookie'], {}), '(http_cookie)\n', (2512, 2525), False, 'import Cookie\n'), ((7365, 7394), 'cgi.escape', 'cgi.escape', (['email'], {'quote': '(True)'}), '(email, quote=True)\n', (7375, 7394), False, 'import cgi\n'), ((7491, 7524), 'cgi.escape', 'cgi.escape', (['login_url'], {'quote': '(True)'}), '(login_url, quote=True)\n', (7501, 7524), False, 'import cgi\n'), ((7548, 7584), 'cgi.escape', 'cgi.escape', (['continue_url'], {'quote': '(True)'}), '(continue_url, quote=True)\n', (7558, 7584), False, 'import cgi\n'), ((8483, 8509), 'urllib.quote', 'urllib.quote', (['continue_url'], {}), '(continue_url)\n', (8495, 8509), False, 'import urllib\n'), ((8695, 8721), 'urllib.quote', 'urllib.quote', (['continue_url'], {}), '(continue_url)\n', (8707, 8721), False, 'import urllib\n'), ((3606, 3655), 'sha.new', 'sha.new', (['(email + nickname + admin + cookie_secret)'], {}), '(email + nickname + admin + cookie_secret)\n', (3613, 3655), False, 'import sha\n'), ((9023, 9035), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (9033, 9035), False, 'import uuid\n'), ((9081, 9093), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (9091, 9093), False, 'import uuid\n')] |
noah-goodrich/beam | sdks/python/apache_beam/runners/portability/job_server.py | 5a851b734f53206c20efe08d93d15760bbc15b0c | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import atexit
import os
import shutil
import signal
import subprocess
import sys
import tempfile
import threading
import grpc
from apache_beam.portability.api import beam_job_api_pb2_grpc
from apache_beam.runners.portability import local_job_service
from apache_beam.utils import subprocess_server
from apache_beam.version import __version__ as beam_version
class JobServer(object):
def start(self):
"""Starts this JobServer, returning a grpc service to which to submit jobs.
"""
raise NotImplementedError(type(self))
def stop(self):
"""Stops this job server."""
raise NotImplementedError(type(self))
class ExternalJobServer(JobServer):
def __init__(self, endpoint, timeout=None):
self._endpoint = endpoint
self._timeout = timeout
def start(self):
channel = grpc.insecure_channel(self._endpoint)
grpc.channel_ready_future(channel).result(timeout=self._timeout)
return beam_job_api_pb2_grpc.JobServiceStub(channel)
def stop(self):
pass
class EmbeddedJobServer(JobServer):
def start(self):
return local_job_service.LocalJobServicer()
def stop(self):
pass
class StopOnExitJobServer(JobServer):
"""Wraps a JobServer such that its stop will automatically be called on exit.
"""
def __init__(self, job_server):
self._lock = threading.Lock()
self._job_server = job_server
self._started = False
def start(self):
with self._lock:
if not self._started:
self._endpoint = self._job_server.start()
self._started = True
atexit.register(self.stop)
signal.signal(signal.SIGINT, self.stop)
return self._endpoint
def stop(self):
with self._lock:
if self._started:
self._job_server.stop()
self._started = False
class SubprocessJobServer(JobServer):
"""An abstract base class for JobServers run as an external process."""
def __init__(self):
self._local_temp_root = None
self._server = None
def subprocess_cmd_and_endpoint(self):
raise NotImplementedError(type(self))
def start(self):
if self._server is None:
self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp')
cmd, endpoint = self.subprocess_cmd_and_endpoint()
port = int(endpoint.split(':')[-1])
self._server = subprocess_server.SubprocessServer(
beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port)
return self._server.start()
def stop(self):
if self._local_temp_root:
shutil.rmtree(self._local_temp_root)
self._local_temp_root = None
return self._server.stop()
def local_temp_dir(self, **kwargs):
return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs)
class JavaJarJobServer(SubprocessJobServer):
MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam'
JAR_CACHE = os.path.expanduser("~/.apache_beam/cache")
def java_arguments(self, job_port, artifacts_dir):
raise NotImplementedError(type(self))
def path_to_jar(self):
raise NotImplementedError(type(self))
@staticmethod
def path_to_beam_jar(gradle_target):
return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target)
@staticmethod
def local_jar(url):
return subprocess_server.JavaJarServer.local_jar(url)
def subprocess_cmd_and_endpoint(self):
jar_path = self.local_jar(self.path_to_jar())
artifacts_dir = self.local_temp_dir(prefix='artifacts')
job_port, = subprocess_server.pick_port(None)
return (
['java', '-jar', jar_path] + list(
self.java_arguments(job_port, artifacts_dir)),
'localhost:%s' % job_port)
class DockerizedJobServer(SubprocessJobServer):
"""
Spins up the JobServer in a docker container for local execution.
"""
def __init__(self, job_host="localhost",
job_port=None,
artifact_port=None,
expansion_port=None,
harness_port_range=(8100, 8200),
max_connection_retries=5):
super(DockerizedJobServer, self).__init__()
self.job_host = job_host
self.job_port = job_port
self.expansion_port = expansion_port
self.artifact_port = artifact_port
self.harness_port_range = harness_port_range
self.max_connection_retries = max_connection_retries
def subprocess_cmd_and_endpoint(self):
# TODO This is hardcoded to Flink at the moment but should be changed
job_server_image_name = os.environ['USER'] + \
"-docker-apache.bintray.io/beam/flink-job-server:latest"
docker_path = subprocess.check_output(
['which', 'docker']).strip().decode('utf-8')
cmd = ["docker", "run",
# We mount the docker binary and socket to be able to spin up
# "sibling" containers for the SDK harness.
"-v", ':'.join([docker_path, "/bin/docker"]),
"-v", "/var/run/docker.sock:/var/run/docker.sock"]
self.job_port, self.artifact_port, self.expansion_port = (
subprocess_server.pick_port(
self.job_port, self.artifact_port, self.expansion_port))
args = ['--job-host', self.job_host,
'--job-port', str(self.job_port),
'--artifact-port', str(self.artifact_port),
'--expansion-port', str(self.expansion_port)]
if sys.platform == "darwin":
# Docker-for-Mac doesn't support host networking, so we need to explictly
# publish ports from the Docker container to be able to connect to it.
# Also, all other containers need to be aware that they run Docker-on-Mac
# to connect against the internal Docker-for-Mac address.
cmd += ["-e", "DOCKER_MAC_CONTAINER=1"]
cmd += ["-p", "{}:{}".format(self.job_port, self.job_port)]
cmd += ["-p", "{}:{}".format(self.artifact_port, self.artifact_port)]
cmd += ["-p", "{}:{}".format(self.expansion_port, self.expansion_port)]
cmd += ["-p", "{0}-{1}:{0}-{1}".format(
self.harness_port_range[0], self.harness_port_range[1])]
else:
# This shouldn't be set for MacOS because it detroys port forwardings,
# even though host networking is not supported on MacOS.
cmd.append("--network=host")
cmd.append(job_server_image_name)
return cmd + args, '%s:%s' % (self.job_host, self.job_port)
| [((3638, 3680), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.apache_beam/cache"""'], {}), "('~/.apache_beam/cache')\n", (3656, 3680), False, 'import os\n'), ((1631, 1668), 'grpc.insecure_channel', 'grpc.insecure_channel', (['self._endpoint'], {}), '(self._endpoint)\n', (1652, 1668), False, 'import grpc\n'), ((1749, 1794), 'apache_beam.portability.api.beam_job_api_pb2_grpc.JobServiceStub', 'beam_job_api_pb2_grpc.JobServiceStub', (['channel'], {}), '(channel)\n', (1785, 1794), False, 'from apache_beam.portability.api import beam_job_api_pb2_grpc\n'), ((1891, 1927), 'apache_beam.runners.portability.local_job_service.LocalJobServicer', 'local_job_service.LocalJobServicer', ([], {}), '()\n', (1925, 1927), False, 'from apache_beam.runners.portability import local_job_service\n'), ((2133, 2149), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2147, 2149), False, 'import threading\n'), ((3446, 3499), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'dir': 'self._local_temp_root'}), '(dir=self._local_temp_root, **kwargs)\n', (3462, 3499), False, 'import tempfile\n'), ((3912, 3975), 'apache_beam.utils.subprocess_server.JavaJarServer.path_to_beam_jar', 'subprocess_server.JavaJarServer.path_to_beam_jar', (['gradle_target'], {}), '(gradle_target)\n', (3960, 3975), False, 'from apache_beam.utils import subprocess_server\n'), ((4026, 4072), 'apache_beam.utils.subprocess_server.JavaJarServer.local_jar', 'subprocess_server.JavaJarServer.local_jar', (['url'], {}), '(url)\n', (4067, 4072), False, 'from apache_beam.utils import subprocess_server\n'), ((4241, 4274), 'apache_beam.utils.subprocess_server.pick_port', 'subprocess_server.pick_port', (['None'], {}), '(None)\n', (4268, 4274), False, 'from apache_beam.utils import subprocess_server\n'), ((5757, 5845), 'apache_beam.utils.subprocess_server.pick_port', 'subprocess_server.pick_port', (['self.job_port', 'self.artifact_port', 'self.expansion_port'], {}), '(self.job_port, self.artifact_port, self.\n expansion_port)\n', (5784, 5845), False, 'from apache_beam.utils import subprocess_server\n'), ((2949, 2985), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""beam-temp"""'}), "(prefix='beam-temp')\n", (2965, 2985), False, 'import tempfile\n'), ((3106, 3198), 'apache_beam.utils.subprocess_server.SubprocessServer', 'subprocess_server.SubprocessServer', (['beam_job_api_pb2_grpc.JobServiceStub', 'cmd'], {'port': 'port'}), '(beam_job_api_pb2_grpc.JobServiceStub,\n cmd, port=port)\n', (3140, 3198), False, 'from apache_beam.utils import subprocess_server\n'), ((3293, 3329), 'shutil.rmtree', 'shutil.rmtree', (['self._local_temp_root'], {}), '(self._local_temp_root)\n', (3306, 3329), False, 'import shutil\n'), ((1673, 1707), 'grpc.channel_ready_future', 'grpc.channel_ready_future', (['channel'], {}), '(channel)\n', (1698, 1707), False, 'import grpc\n'), ((2366, 2392), 'atexit.register', 'atexit.register', (['self.stop'], {}), '(self.stop)\n', (2381, 2392), False, 'import atexit\n'), ((2401, 2440), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'self.stop'], {}), '(signal.SIGINT, self.stop)\n', (2414, 2440), False, 'import signal\n'), ((5332, 5376), 'subprocess.check_output', 'subprocess.check_output', (["['which', 'docker']"], {}), "(['which', 'docker'])\n", (5355, 5376), False, 'import subprocess\n')] |
tachycline/sympy | sympy/printing/pycode.py | abf6fec12012852c7e6fae38461da9723cadc8b9 | from collections import defaultdict
from functools import wraps
from itertools import chain
from sympy.core import sympify
from .precedence import precedence
from .codeprinter import CodePrinter
_kw_py2and3 = {
'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif',
'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in',
'is', 'lambda', 'not', 'or', 'pass', 'raise', 'return', 'try', 'while',
'with', 'yield', 'None' # 'None' is actually not in Python 2's keyword.kwlist
}
_kw_only_py2 = {'exec', 'print'}
_kw_only_py3 = {'False', 'nonlocal', 'True'}
_known_functions = {
'Abs': 'abs',
}
_known_functions_math = {
'acos': 'acos',
'acosh': 'acosh',
'asin': 'asin',
'asinh': 'asinh',
'atan': 'atan',
'atan2': 'atan2',
'atanh': 'atanh',
'ceiling': 'ceil',
'cos': 'cos',
'cosh': 'cosh',
'erf': 'erf',
'erfc': 'erfc',
'exp': 'exp',
'expm1': 'expm1',
'factorial': 'factorial',
'floor': 'floor',
'gamma': 'gamma',
'hypot': 'hypot',
'loggamma': 'lgamma',
'log': 'log',
'log10': 'log10',
'log1p': 'log1p',
'log2': 'log2',
'sin': 'sin',
'sinh': 'sinh',
'Sqrt': 'sqrt',
'tan': 'tan',
'tanh': 'tanh'
} # Not used from ``math``: [copysign isclose isfinite isinf isnan ldexp frexp pow modf
# radians trunc fmod fsum gcd degrees fabs]
_known_constants_math = {
'Exp1': 'e',
'Pi': 'pi',
# Only in python >= 3.5:
# 'Infinity': 'inf',
# 'NaN': 'nan'
}
def _print_known_func(self, expr):
known = self.known_functions[expr.__class__.__name__]
return '{name}({args})'.format(name=self._module_format(known),
args=', '.join(map(self._print, expr.args)))
def _print_known_const(self, expr):
known = self.known_constants[expr.__class__.__name__]
return self._module_format(known)
class PythonCodePrinter(CodePrinter):
printmethod = "_pythoncode"
language = "Python"
standard = "python3"
reserved_words = _kw_py2and3.union(_kw_only_py3)
modules = None # initialized to a set in __init__
tab = ' '
_kf = dict(chain(
_known_functions.items(),
[(k, 'math.' + v) for k, v in _known_functions_math.items()]
))
_kc = {k: 'math.'+v for k, v in _known_constants_math.items()}
_operators = {'and': 'and', 'or': 'or', 'not': 'not'}
_default_settings = dict(
CodePrinter._default_settings,
user_functions={},
precision=17,
inline=True,
fully_qualified_modules=True
)
def __init__(self, settings=None):
super(PythonCodePrinter, self).__init__(settings)
self.module_imports = defaultdict(set)
self.known_functions = dict(self._kf, **(settings or {}).get(
'user_functions', {}))
self.known_constants = dict(self._kc, **(settings or {}).get(
'user_constants', {}))
def _declare_number_const(self, name, value):
return "%s = %s" % (name, value)
def _module_format(self, fqn, register=True):
parts = fqn.split('.')
if register and len(parts) > 1:
self.module_imports['.'.join(parts[:-1])].add(parts[-1])
if self._settings['fully_qualified_modules']:
return fqn
else:
return fqn.split('(')[0].split('[')[0].split('.')[-1]
def _format_code(self, lines):
return lines
def _get_comment(self, text):
return " # {0}".format(text)
def _print_NaN(self, expr):
return "float('nan')"
def _print_Infinity(self, expr):
return "float('inf')"
def _print_Mod(self, expr):
PREC = precedence(expr)
return ('{0} % {1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args)))
def _print_Piecewise(self, expr):
result = []
i = 0
for arg in expr.args:
e = arg.expr
c = arg.cond
result.append('((')
result.append(self._print(e))
result.append(') if (')
result.append(self._print(c))
result.append(') else (')
i += 1
result = result[:-1]
result.append(') else None)')
result.append(')'*(2*i - 2))
return ''.join(result)
def _print_ITE(self, expr):
from sympy.functions.elementary.piecewise import Piecewise
return self._print(expr.rewrite(Piecewise))
def _print_Sum(self, expr):
loops = (
'for {i} in range({a}, {b}+1)'.format(
i=self._print(i),
a=self._print(a),
b=self._print(b))
for i, a, b in expr.limits)
return '(builtins.sum({function} {loops}))'.format(
function=self._print(expr.function),
loops=' '.join(loops))
def _print_ImaginaryUnit(self, expr):
return '1j'
def _print_MatrixBase(self, expr):
name = expr.__class__.__name__
func = self.known_functions.get(name, name)
return "%s(%s)" % (func, self._print(expr.tolist()))
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
lambda self, expr: self._print_MatrixBase(expr)
for k in PythonCodePrinter._kf:
setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func)
for k in _known_constants_math:
setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const)
def pycode(expr, **settings):
return PythonCodePrinter(settings).doprint(expr)
_not_in_mpmath = 'log1p log2'.split()
_in_mpmath = [(k, v) for k, v in _known_functions_math.items() if k not in _not_in_mpmath]
_known_functions_mpmath = dict(_in_mpmath)
_known_constants_mpmath = {
'Pi': 'pi'
}
class MpmathPrinter(PythonCodePrinter):
"""
Lambda printer for mpmath which maintains precision for floats
"""
printmethod = "_mpmathcode"
_kf = dict(chain(
_known_functions.items(),
[(k, 'mpmath.' + v) for k, v in _known_functions_mpmath.items()]
))
def _print_Integer(self, e):
return '%s(%d)' % (self._module_format('mpmath.mpf'), e)
def _print_Float(self, e):
# XXX: This does not handle setting mpmath.mp.dps. It is assumed that
# the caller of the lambdified function will have set it to sufficient
# precision to match the Floats in the expression.
# Remove 'mpz' if gmpy is installed.
args = str(tuple(map(int, e._mpf_)))
return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args)
def _print_uppergamma(self,e): #printer for the uppergamma function
return "{0}({1}, {2}, {3})".format(
self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]),
self._module_format('mpmath.inf'))
def _print_lowergamma(self,e): #printer for the lowergamma functioin
return "{0}({1}, 0, {2})".format(
self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]))
def _print_log2(self, e):
return '{0}({1})/{0}(2)'.format(
self._module_format('mpmath.log'), self._print(e.args[0]))
def _print_log1p(self, e):
return '{0}({1}+1)'.format(
self._module_format('mpmath.log'), self._print(e.args[0]))
for k in MpmathPrinter._kf:
setattr(MpmathPrinter, '_print_%s' % k, _print_known_func)
for k in _known_constants_mpmath:
setattr(MpmathPrinter, '_print_%s' % k, _print_known_const)
_not_in_numpy = 'erf erfc factorial gamma lgamma'.split()
_in_numpy = [(k, v) for k, v in _known_functions_math.items() if k not in _not_in_numpy]
_known_functions_numpy = dict(_in_numpy, **{
'acos': 'arccos',
'acosh': 'arccosh',
'asin': 'arcsin',
'asinh': 'arcsinh',
'atan': 'arctan',
'atan2': 'arctan2',
'atanh': 'arctanh',
'exp2': 'exp2',
})
class NumPyPrinter(PythonCodePrinter):
"""
Numpy printer which handles vectorized piecewise functions,
logical operators, etc.
"""
printmethod = "_numpycode"
_kf = dict(chain(
PythonCodePrinter._kf.items(),
[(k, 'numpy.' + v) for k, v in _known_functions_numpy.items()]
))
_kc = {k: 'numpy.'+v for k, v in _known_constants_math.items()}
def _print_seq(self, seq, delimiter=', '):
"General sequence printer: converts to tuple"
# Print tuples here instead of lists because numba supports
# tuples in nopython mode.
return '({},)'.format(delimiter.join(self._print(item) for item in seq))
def _print_MatMul(self, expr):
"Matrix multiplication printer"
return '({0})'.format(').dot('.join(self._print(i) for i in expr.args))
def _print_DotProduct(self, expr):
# DotProduct allows any shape order, but numpy.dot does matrix
# multiplication, so we have to make sure it gets 1 x n by n x 1.
arg1, arg2 = expr.args
if arg1.shape[0] != 1:
arg1 = arg1.T
if arg2.shape[1] != 1:
arg2 = arg2.T
return "%s(%s, %s)" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2))
def _print_Piecewise(self, expr):
"Piecewise function printer"
exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args))
conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args))
# If [default_value, True] is a (expr, cond) sequence in a Piecewise object
# it will behave the same as passing the 'default' kwarg to select()
# *as long as* it is the last element in expr.args.
# If this is not the case, it may be triggered prematurely.
return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs)
def _print_Relational(self, expr):
"Relational printer for Equality and Unequality"
op = {
'==' :'equal',
'!=' :'not_equal',
'<' :'less',
'<=' :'less_equal',
'>' :'greater',
'>=' :'greater_equal',
}
if expr.rel_op in op:
lhs = self._print(expr.lhs)
rhs = self._print(expr.rhs)
return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]),
lhs=lhs, rhs=rhs)
return super(NumPyPrinter, self)._print_Relational(expr)
def _print_And(self, expr):
"Logical And printer"
# We have to override LambdaPrinter because it uses Python 'and' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_and' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in expr.args))
def _print_Or(self, expr):
"Logical Or printer"
# We have to override LambdaPrinter because it uses Python 'or' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_or' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in expr.args))
def _print_Not(self, expr):
"Logical Not printer"
# We have to override LambdaPrinter because it uses Python 'not' keyword.
# If LambdaPrinter didn't define it, we would still have to define our
# own because StrPrinter doesn't define it.
return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in expr.args))
def _print_Min(self, expr):
return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in expr.args))
def _print_Max(self, expr):
return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in expr.args))
def _print_Pow(self, expr):
if expr.exp == 0.5:
return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base))
else:
return super(NumPyPrinter, self)._print_Pow(expr)
def _print_arg(self, expr):
return "%s(%s)" % (self._module_format('numpy.angle'), self._print(expr.args[0]))
def _print_im(self, expr):
return "%s(%s)" % (self._module_format('numpy.imag', self._print(expr.args[0])))
def _print_Mod(self, expr):
return "%s(%s)" % (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args)))
def _print_re(self, expr):
return "%s(%s)" % (self._module_format('numpy.real'), self._print(expr.args[0]))
def _print_MatrixBase(self, expr):
func = self.known_functions.get(expr.__class__.__name__, None)
if func is None:
func = self._module_format('numpy.array')
return "%s(%s)" % (func, self._print(expr.tolist()))
for k in NumPyPrinter._kf:
setattr(NumPyPrinter, '_print_%s' % k, _print_known_func)
for k in NumPyPrinter._kc:
setattr(NumPyPrinter, '_print_%s' % k, _print_known_const)
_known_functions_scipy_special = {
'erf': 'erf',
'erfc': 'erfc',
'gamma': 'gamma',
'loggamma': 'gammaln'
}
_known_constants_scipy_constants = {
'GoldenRatio': 'golden_ratio'
}
class SciPyPrinter(NumPyPrinter):
_kf = dict(chain(
NumPyPrinter._kf.items(),
[(k, 'scipy.special.' + v) for k, v in _known_functions_scipy_special.items()]
))
_kc = {k: 'scipy.constants.' + v for k, v in _known_constants_scipy_constants.items()}
def _print_SparseMatrix(self, expr):
i, j, data = [], [], []
for (r, c), v in expr._smat.items():
i.append(r)
j.append(c)
data.append(v)
return "{name}({data}, ({i}, {j}), shape={shape})".format(
name=self._module_format('scipy.sparse.coo_matrix'),
data=data, i=i, j=j, shape=expr.shape
)
_print_ImmutableSparseMatrix = _print_SparseMatrix
for k in SciPyPrinter._kf:
setattr(SciPyPrinter, '_print_%s' % k, _print_known_func)
for k in SciPyPrinter._kc:
setattr(SciPyPrinter, '_print_%s' % k, _print_known_const)
class SymPyPrinter(PythonCodePrinter):
_kf = dict([(k, 'sympy.' + v) for k, v in chain(
_known_functions.items(),
_known_functions_math.items()
)])
def _print_Function(self, expr):
mod = expr.func.__module__ or ''
return '%s(%s)' % (self._module_format(mod + ('.' if mod else '') + expr.func.__name__),
', '.join(map(self._print, expr.args)))
| [((2720, 2736), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (2731, 2736), False, 'from collections import defaultdict\n')] |
Projectoy/ml_framework | arguments_setting.py | f3d37d632a1aec314eb186a3da6d174a5dc4beee | import argparse, os
class ArgumentManager:
def __init__(self, model_list):
self.model_list = model_list
self.args = self.get_input_arguments()
self.validate_arguments()
def get_input_arguments(self):
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument("--configuration", "-c", required=True, help="the path of a configuration file(json type)")
parser.add_argument("--model", "-m", required=True, help="the model to process")
parser.add_argument("--task", "-t", required=True, help="training/testing")
return parser.parse_args()
def validate_arguments(self):
self.validate_configuration_path()
self.validate_model()
self.validate_task()
def validate_task(self):
task = self.args.task
assert task == "training" or task == "testing", "task should be training or testing"
def validate_model(self):
model = self.args.model
assert model in self.model_list, "model is not in the prepared model list"
def validate_configuration_path(self):
config_path = self.args.configuration
assert os.path.exists(config_path), "configuration path is inappropriate (not found file)"
def get_configuraiton_file_path(self):
return self.args.configuration
def get_model_type(self):
return self.args.model
def get_task_type(self):
return self.args.task | [((251, 312), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process some integers."""'}), "(description='Process some integers.')\n", (274, 312), False, 'import argparse, os\n'), ((1183, 1210), 'os.path.exists', 'os.path.exists', (['config_path'], {}), '(config_path)\n', (1197, 1210), False, 'import argparse, os\n')] |
Omer-Sella/ldpc | fileHandler.py | 955c0bc32236e171365cbbb88f00574302771610 | # -*- coding: utf-8 -*-
"""
Created on Thu Nov 28 12:10:11 2019
@author: Omer
"""
## File handler
## This file was initially intended purely to generate the matrices for the near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf
## The values from the above pdf were copied manually to a txt file, and it is the purpose of this file to parse it.
## The emphasis here is on correctness, I currently do not see a reason to generalise this file, since matrices will be saved in either json or some matrix friendly format.
import numpy as np
from scipy.linalg import circulant
#import matplotlib.pyplot as plt
import scipy.io
import common
import hashlib
import os
projectDir = os.environ.get('LDPC')
if projectDir == None:
import pathlib
projectDir = pathlib.Path(__file__).parent.absolute()
## Omer Sella: added on 01/12/2020, need to make sure this doesn't break anything.
import sys
sys.path.insert(1, projectDir)
FILE_HANDLER_INT_DATA_TYPE = np.int32
GENERAL_CODE_MATRIX_DATA_TYPE = np.int32
NIBBLE_CONVERTER = np.array([8, 4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
def nibbleToHex(inputArray):
n = NIBBLE_CONVERTER.dot(inputArray)
if n == 10:
h = 'A'
elif n== 11:
h = 'B'
elif n== 12:
h = 'C'
elif n== 13:
h = 'D'
elif n== 14:
h = 'E'
elif n== 15:
h = 'F'
else:
h = str(n)
return h
def binaryArraytoHex(inputArray):
d1 = len(inputArray)
assert (d1 % 4 == 0)
outputArray = np.zeros(d1//4, dtype = str)
outputString = ''
for j in range(d1//4):
nibble = inputArray[4 * j : 4 * j + 4]
h = nibbleToHex(nibble)
outputArray[j] = h
outputString = outputString + h
return outputArray, outputString
def hexStringToBinaryArray(hexString):
outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
for i in hexString:
if i == '0':
nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '1':
nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '2':
nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '3':
nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '4':
nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '5':
nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '6':
nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '7':
nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '8':
nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '9':
nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'A':
nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'B':
nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'C':
nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'D':
nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'E':
nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'F':
nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
else:
#print('Error, 0-9 or A-F')
pass
nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
outputBinary = np.hstack((outputBinary, nibble))
return outputBinary
def hexToCirculant(hexStr, circulantSize):
binaryArray = hexStringToBinaryArray(hexStr)
if len(binaryArray) < circulantSize:
binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE))
else:
binaryArray = binaryArray[1:]
circulantMatrix = circulant(binaryArray)
circulantMatrix = circulantMatrix.T
return circulantMatrix
def hotLocationsToCirculant(locationList, circulantSize):
generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
generatingVector[locationList] = 1
newCirculant = circulant(generatingVector)
newCirculant = newCirculant.T
return newCirculant
def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow = True, isHex = True, isGenerator = True ):
# This function assumes that each line in the file contains the non zero locations of the first row of a circulant.
# Each line in the file then defines a circulant, and the order in which they are defined is top to bottom left to right, i.e.:
# line 0 defines circulant 0,0
with open(fileName) as fid:
lines = fid.readlines()
if isGenerator:
for i in range((dim0 // circulantSize) ):
bLeft = hexToCirculant(lines[2 * i], circulantSize)
bRight = hexToCirculant(lines[2 * i + 1], circulantSize)
newBlock = np.hstack((bLeft, bRight))
if i == 0:
accumulatedBlock = newBlock
else:
accumulatedBlock = np.vstack((accumulatedBlock, newBlock))
newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock))
else:
for i in range((dim1 // circulantSize)):
locationList1 = list(lines[ i].rstrip('\n').split(','))
locationList1 = list(map(int, locationList1))
upBlock = hotLocationsToCirculant(locationList1, circulantSize)
if i == 0:
accumulatedUpBlock1 = upBlock
else:
accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock))
for i in range((dim1 // circulantSize)):
locationList = list(lines[(dim1 // circulantSize) + i].rstrip('\n').split(','))
locationList = list(map(int, locationList))
newBlock = hotLocationsToCirculant(locationList, circulantSize)
if i == 0:
accumulatedBlock2 = newBlock
else:
accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock))
newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2))
return newMatrix
def binaryMatrixToHexString(binaryMatrix, circulantSize):
leftPadding = np.array(4 - (circulantSize % 4))
m,n = binaryMatrix.shape
#print(m)
#print(n)
assert( m % circulantSize == 0)
assert (n % circulantSize == 0)
M = m // circulantSize
N = n // circulantSize
hexName = ''
for r in range(M):
for k in range(N):
nextLine = np.hstack((leftPadding, binaryMatrix[ r * circulantSize , k * circulantSize : (k + 1) * circulantSize]))
hexArray, hexString = binaryArraytoHex(nextLine)
hexName = hexName + hexString
return hexName
def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None, path = None, evaluationTime = 0, numberOfNonZero = 0, fileName = None):
print("*** in saveCodeInstance ...")
m, n = parityMatrix.shape
M = m // circulantSize
N = n // circulantSize
if fileName == None:
fileName = binaryMatrixToHexString(parityMatrix, circulantSize)
fileNameSHA224 = str(circulantSize) + '_' + str(M) + '_' + str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest())
fileNameWithPath = path + fileNameSHA224
else:
fileNameWithPath = path + fileName
print("*** " + fileName)
workspaceDict = {}
workspaceDict['parityMatrix'] = parityMatrix
workspaceDict['fileName'] = fileName
if evaluationData != None:
scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2()
workspaceDict['snrData'] = scatterSNR
workspaceDict['berData'] = scatterBER
workspaceDict['itrData'] = scatterITR
workspaceDict['averageSnrAxis'] = averageSnrAxis
workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations
workspaceDict['evaluationTime'] = evaluationTime
workspaceDict['nonZero'] = numberOfNonZero
scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict)
#evaluationData.plotStats(codewordSize, fileNameWithPath)
print("*** Finishing saveCodeInstance !")
return fileName
def testFileHandler():
nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True, True)
nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)
return 'OK'
def plotResults(path, makeMat = False):
i = 10
evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE)
evalTimes = []
numberOfIterationsAtHigh = []
for root, dirs, files in os.walk(path):
for file in files:
if str(file).endswith('.mat'):
i = i + 1
mat = scipy.io.loadmat(str(os.path.join(root, file)))
snrAxis = mat['snrAxis']
snrActual = mat['averageSnrAxis']
if len(snrAxis) < 3:
evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1
berAxis = mat['berData']
if ('evaluationTime' in mat.keys()):
evalTimes.append(mat['evaluationTime'])
averageNumberOfIterations = mat['averageNumberOfIterations']
numberOfIterationsAtHigh.append(averageNumberOfIterations[-1])
common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel = '', figureNumber = i, figureName = str(file))
else:
pass
return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh
#plt.imshow(nearEarthParity)
#nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)
#import networkx as nx
#from networkx.algorithms import bipartite
#B = nx.Graph()
#B.add_nodes_from(range(1022), bipartite=0)
#B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1)
# Add edges only between nodes of opposite node sets
#for i in range(8176):
# for j in range(1022):
# if nearEarthParity[j,i] != 0:
# B.add_edges_from([(j, 7156 + i)])
#X, Y = bipartite.sets(B)
#pos = dict()
#pos.update( (n, (1, i)) for i, n in enumerate(X) )
#pos.update( (n, (2, i)) for i, n in enumerate(Y) )
#nx.draw(B, pos=pos)
#plt.show()
| [((694, 716), 'os.environ.get', 'os.environ.get', (['"""LDPC"""'], {}), "('LDPC')\n", (708, 716), False, 'import os\n'), ((911, 941), 'sys.path.insert', 'sys.path.insert', (['(1)', 'projectDir'], {}), '(1, projectDir)\n', (926, 941), False, 'import sys\n'), ((1042, 1101), 'numpy.array', 'np.array', (['[8, 4, 2, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([8, 4, 2, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (1050, 1101), True, 'import numpy as np\n'), ((1528, 1556), 'numpy.zeros', 'np.zeros', (['(d1 // 4)'], {'dtype': 'str'}), '(d1 // 4, dtype=str)\n', (1536, 1556), True, 'import numpy as np\n'), ((1852, 1901), 'numpy.array', 'np.array', (['[]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (1860, 1901), True, 'import numpy as np\n'), ((4360, 4382), 'scipy.linalg.circulant', 'circulant', (['binaryArray'], {}), '(binaryArray)\n', (4369, 4382), False, 'from scipy.linalg import circulant\n'), ((4532, 4592), 'numpy.zeros', 'np.zeros', (['circulantSize'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '(circulantSize, dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (4540, 4592), True, 'import numpy as np\n'), ((4653, 4680), 'scipy.linalg.circulant', 'circulant', (['generatingVector'], {}), '(generatingVector)\n', (4662, 4680), False, 'from scipy.linalg import circulant\n'), ((6796, 6827), 'numpy.array', 'np.array', (['(4 - circulantSize % 4)'], {}), '(4 - circulantSize % 4)\n', (6804, 6827), True, 'import numpy as np\n'), ((9221, 9266), 'numpy.zeros', 'np.zeros', (['(4)'], {'dtype': 'FILE_HANDLER_INT_DATA_TYPE'}), '(4, dtype=FILE_HANDLER_INT_DATA_TYPE)\n', (9229, 9266), True, 'import numpy as np\n'), ((9351, 9364), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (9358, 9364), False, 'import os\n'), ((3967, 4000), 'numpy.hstack', 'np.hstack', (['(outputBinary, nibble)'], {}), '((outputBinary, nibble))\n', (3976, 4000), True, 'import numpy as np\n'), ((6629, 6680), 'numpy.vstack', 'np.vstack', (['(accumulatedUpBlock1, accumulatedBlock2)'], {}), '((accumulatedUpBlock1, accumulatedBlock2))\n', (6638, 6680), True, 'import numpy as np\n'), ((1971, 2030), 'numpy.array', 'np.array', (['[0, 0, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (1979, 2030), True, 'import numpy as np\n'), ((5437, 5463), 'numpy.hstack', 'np.hstack', (['(bLeft, bRight)'], {}), '((bLeft, bRight))\n', (5446, 5463), True, 'import numpy as np\n'), ((7108, 7215), 'numpy.hstack', 'np.hstack', (['(leftPadding, binaryMatrix[(r * circulantSize), k * circulantSize:(k + 1) *\n circulantSize])'], {}), '((leftPadding, binaryMatrix[(r * circulantSize), k * circulantSize\n :(k + 1) * circulantSize]))\n', (7117, 7215), True, 'import numpy as np\n'), ((776, 798), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (788, 798), False, 'import pathlib\n'), ((2076, 2135), 'numpy.array', 'np.array', (['[0, 0, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2084, 2135), True, 'import numpy as np\n'), ((5584, 5623), 'numpy.vstack', 'np.vstack', (['(accumulatedBlock, newBlock)'], {}), '((accumulatedBlock, newBlock))\n', (5593, 5623), True, 'import numpy as np\n'), ((5655, 5704), 'numpy.eye', 'np.eye', (['dim0'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '(dim0, dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (5661, 5704), True, 'import numpy as np\n'), ((6122, 6163), 'numpy.hstack', 'np.hstack', (['(accumulatedUpBlock1, upBlock)'], {}), '((accumulatedUpBlock1, upBlock))\n', (6131, 6163), True, 'import numpy as np\n'), ((6568, 6608), 'numpy.hstack', 'np.hstack', (['(accumulatedBlock2, newBlock)'], {}), '((accumulatedBlock2, newBlock))\n', (6577, 6608), True, 'import numpy as np\n'), ((2194, 2253), 'numpy.array', 'np.array', (['[0, 0, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2202, 2253), True, 'import numpy as np\n'), ((2312, 2371), 'numpy.array', 'np.array', (['[0, 0, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2320, 2371), True, 'import numpy as np\n'), ((9505, 9529), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (9517, 9529), False, 'import os\n'), ((2430, 2489), 'numpy.array', 'np.array', (['[0, 1, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2438, 2489), True, 'import numpy as np\n'), ((2548, 2607), 'numpy.array', 'np.array', (['[0, 1, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2556, 2607), True, 'import numpy as np\n'), ((2666, 2725), 'numpy.array', 'np.array', (['[0, 1, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2674, 2725), True, 'import numpy as np\n'), ((2784, 2843), 'numpy.array', 'np.array', (['[0, 1, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2792, 2843), True, 'import numpy as np\n'), ((2902, 2961), 'numpy.array', 'np.array', (['[1, 0, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2910, 2961), True, 'import numpy as np\n'), ((3020, 3079), 'numpy.array', 'np.array', (['[1, 0, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3028, 3079), True, 'import numpy as np\n'), ((3138, 3197), 'numpy.array', 'np.array', (['[1, 0, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3146, 3197), True, 'import numpy as np\n'), ((3256, 3315), 'numpy.array', 'np.array', (['[1, 0, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3264, 3315), True, 'import numpy as np\n'), ((3374, 3433), 'numpy.array', 'np.array', (['[1, 1, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3382, 3433), True, 'import numpy as np\n'), ((3492, 3551), 'numpy.array', 'np.array', (['[1, 1, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3500, 3551), True, 'import numpy as np\n'), ((3610, 3669), 'numpy.array', 'np.array', (['[1, 1, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3618, 3669), True, 'import numpy as np\n'), ((3728, 3787), 'numpy.array', 'np.array', (['[1, 1, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3736, 3787), True, 'import numpy as np\n'), ((3892, 3941), 'numpy.array', 'np.array', (['[]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3900, 3941), True, 'import numpy as np\n')] |
Sentienz/datacollector-tests | stage/configuration/test_amazon_s3_origin.py | ca27988351dc3366488098b5db6c85a8be2f7b85 | import logging
import pytest
from streamsets.testframework.markers import aws, sdc_min_version
from streamsets.testframework.utils import get_random_string
logger = logging.getLogger(__name__)
S3_SANDBOX_PREFIX = 'sandbox'
LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1},
{'fieldPath': '/time', 'group': 2},
{'fieldPath': '/timehalf', 'group': 3},
{'fieldPath': '/info', 'group': 4},
{'fieldPath': '/file', 'group': 5},
{'fieldPath': '/message', 'group': 6}]
REGULAR_EXPRESSION = r'(\S+) (\S+) (\S+) (\S+) (\S+) (.*)'
# log to be written int the file on s3
data_format_content = {
'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] '
'"GET /apache.gif HTTP/1.0" 200 232',
'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown - This is sample log message',
'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client '
'denied by server configuration:/export/home/live/ap/htdocs/test',
'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache.gif'
' HTTP/1.0" 200 2326 "http://www.example.com/strt.html" "Mozilla/4.08'
' [en] (Win98; I ;Nav)"',
'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] "GET '
'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 '
'HTTP/1.1" 500 17 ',
'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 '
'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.',
'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware '
'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime',
'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'}
# data to verify the output of amazon s3 origin.
get_data_to_verify_output = {
'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j',
'ndc': 'unknown', 'message': 'This is sample log message'},
'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident': '-', 'response': '200', 'bytes':
'232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None,
'timestamp': '10/Oct/2000:13:55:36 -0700'},
'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server configuration:/export/home/live/ap/htdocs/'
'test', 'timestamp': 'Wed Oct 11 14:32:52 2000', 'loglevel': 'error',
'clientip': '127.0.0.1'},
'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '"Mozilla/4.08 [en] (Win98; I ;Nav)"', 'auth':
'frank', 'ident': '-', 'verb': 'GET', 'referrer': '"http://www.example.com/strt.'
'html"', 'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1',
'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'},
'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET '
'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1',
'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'},
'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.', 'request':
'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'},
'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL',
'version': 'NS10.0'},
'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-',
'ident': '-', 'response': '500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET',
'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'},
'LEEF': {'eventId': '4000030', 'product': 'Deep Security Agent', 'extensions': {'cat': 'Realtime'},
'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version': '<DSA version>'},
'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM',
'/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}}
@pytest.mark.skip('Not yet implemented')
def test_configuration_access_key_id(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_bucket(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_connection_timeout(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_content(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('task', ['COPY_OBJECT'])
@pytest.mark.parametrize('delete_original_object', [False, True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object):
pass
@pytest.mark.parametrize('region', ['OTHER'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_endpoint(sdc_builder, sdc_executor, region):
pass
@pytest.mark.parametrize('task', ['COPY_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_new_object_path(sdc_builder, sdc_executor, task):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_object(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_preconditions(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_region(sdc_builder, sdc_executor, region):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_required_fields(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_retry_count(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_secret_access_key(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_socket_timeout(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_tags(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_task(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('use_proxy', [False, True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy):
pass
@aws('s3')
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT',
'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF'])
def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format):
"""Check whether S3 origin can parse different log format or not. A log file is being created in s3 bucket
mentioned below .S3 origin reads the log file and parse the same.
Pipeline for the same-
s3_origin >> trash
s3_origin >= pipeline_finisher_executor
"""
if log_format == 'GROK':
file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT']
else:
file_content = data_format_content[log_format]
client = aws.s3
s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}'
attributes = {'bucket': aws.s3_bucket_name,
'prefix_pattern': f'{s3_key}/*',
'number_of_threads': 1,
'read_order': 'LEXICOGRAPHICAL',
'data_format': data_format,
'log_format': log_format,
'custom_log_format': '%h %l %u [%t] "%r" %>s %b',
'regular_expression': REGULAR_EXPRESSION,
'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING
}
pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws)
s3_origin = pipeline.origin_stage
try:
client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content)
output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline)
assert output_records[0].field == get_data_to_verify_output[log_format]
finally:
if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING':
sdc_executor.stop_pipeline(pipeline)
# cleaning up s3 bucket
delete_aws_objects(client, aws, s3_key)
def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws):
# Build pipeline.
builder = sdc_builder.get_pipeline_builder()
builder.add_error_stage('Discard')
s3_origin = builder.add_stage('Amazon S3', type='origin')
s3_origin.set_attributes(**attributes)
trash = builder.add_stage('Trash')
pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor')
pipeline_finisher_executor.set_attributes(stage_record_preconditions=["${record:eventType() == 'no-more-data'}"])
s3_origin >> trash
s3_origin >= pipeline_finisher_executor
s3_origin_pipeline = builder.build().configure_for_environment(aws)
s3_origin_pipeline.configuration['shouldRetry'] = False
return s3_origin_pipeline
def delete_aws_objects(client, aws, s3_key):
# Clean up S3.
delete_keys = {'Objects': [{'Key': k['Key']}
for k in
client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]}
client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys)
def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline):
sdc_executor.add_pipeline(pipeline)
snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot
output_records = snapshot[s3_origin].output
return output_records
| [((167, 194), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (184, 194), False, 'import logging\n'), ((4949, 4988), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (4965, 4988), False, 'import pytest\n'), ((5066, 5105), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (5082, 5105), False, 'import pytest\n'), ((5176, 5215), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (5192, 5215), False, 'import pytest\n'), ((5298, 5352), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""task"""', "['CREATE_NEW_OBJECT']"], {}), "('task', ['CREATE_NEW_OBJECT'])\n", (5321, 5352), False, 'import pytest\n'), ((5354, 5393), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (5370, 5393), False, 'import pytest\n'), ((5471, 5519), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""task"""', "['COPY_OBJECT']"], {}), "('task', ['COPY_OBJECT'])\n", (5494, 5519), False, 'import pytest\n'), ((5521, 5585), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""delete_original_object"""', '[False, True]'], {}), "('delete_original_object', [False, True])\n", (5544, 5585), False, 'import pytest\n'), ((5587, 5626), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (5603, 5626), False, 'import pytest\n'), ((5743, 5787), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""region"""', "['OTHER']"], {}), "('region', ['OTHER'])\n", (5766, 5787), False, 'import pytest\n'), ((5789, 5828), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (5805, 5828), False, 'import pytest\n'), ((5909, 5957), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""task"""', "['COPY_OBJECT']"], {}), "('task', ['COPY_OBJECT'])\n", (5932, 5957), False, 'import pytest\n'), ((5959, 5998), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (5975, 5998), False, 'import pytest\n'), ((6084, 6123), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (6100, 6123), False, 'import pytest\n'), ((6194, 6282), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""on_record_error"""', "['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']"], {}), "('on_record_error', ['DISCARD', 'STOP_PIPELINE',\n 'TO_ERROR'])\n", (6217, 6282), False, 'import pytest\n'), ((6280, 6319), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (6296, 6319), False, 'import pytest\n'), ((6416, 6455), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (6432, 6455), False, 'import pytest\n'), ((6533, 6577), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_proxy"""', '[True]'], {}), "('use_proxy', [True])\n", (6556, 6577), False, 'import pytest\n'), ((6579, 6618), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (6595, 6618), False, 'import pytest\n'), ((6704, 6748), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_proxy"""', '[True]'], {}), "('use_proxy', [True])\n", (6727, 6748), False, 'import pytest\n'), ((6750, 6789), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (6766, 6789), False, 'import pytest\n'), ((6879, 6923), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_proxy"""', '[True]'], {}), "('use_proxy', [True])\n", (6902, 6923), False, 'import pytest\n'), ((6925, 6964), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (6941, 6964), False, 'import pytest\n'), ((7050, 7094), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_proxy"""', '[True]'], {}), "('use_proxy', [True])\n", (7073, 7094), False, 'import pytest\n'), ((7096, 7135), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (7112, 7135), False, 'import pytest\n'), ((7221, 7570), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""region"""', "['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1',\n 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1',\n 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3',\n 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1',\n 'US_WEST_1', 'US_WEST_2']"], {}), "('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2',\n 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1',\n 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1',\n 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1',\n 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2'])\n", (7244, 7570), False, 'import pytest\n'), ((7556, 7595), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (7572, 7595), False, 'import pytest\n'), ((7674, 7713), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (7690, 7713), False, 'import pytest\n'), ((7793, 7832), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (7809, 7832), False, 'import pytest\n'), ((7908, 7947), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (7924, 7947), False, 'import pytest\n'), ((8029, 8068), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (8045, 8068), False, 'import pytest\n'), ((8147, 8206), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""task"""', "['CHANGE_EXISTING_OBJECT']"], {}), "('task', ['CHANGE_EXISTING_OBJECT'])\n", (8170, 8206), False, 'import pytest\n'), ((8208, 8247), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (8224, 8247), False, 'import pytest\n'), ((8322, 8421), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""task"""', "['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']"], {}), "('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT',\n 'CREATE_NEW_OBJECT'])\n", (8345, 8421), False, 'import pytest\n'), ((8419, 8458), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (8435, 8458), False, 'import pytest\n'), ((8533, 8584), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_proxy"""', '[False, True]'], {}), "('use_proxy', [False, True])\n", (8556, 8584), False, 'import pytest\n'), ((8586, 8625), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not yet implemented"""'], {}), "('Not yet implemented')\n", (8602, 8625), False, 'import pytest\n'), ((8710, 8719), 'streamsets.testframework.markers.aws', 'aws', (['"""s3"""'], {}), "('s3')\n", (8713, 8719), False, 'from streamsets.testframework.markers import aws, sdc_min_version\n'), ((8721, 8768), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""data_format"""', "['LOG']"], {}), "('data_format', ['LOG'])\n", (8744, 8768), False, 'import pytest\n'), ((8770, 8957), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""log_format"""', "['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT',\n 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF']"], {}), "('log_format', ['COMMON_LOG_FORMAT',\n 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT',\n 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF'])\n", (8793, 8957), False, 'import pytest\n'), ((9593, 9612), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (9610, 9612), False, 'from streamsets.testframework.utils import get_random_string\n'), ((10312, 10331), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (10329, 10331), False, 'from streamsets.testframework.utils import get_random_string\n')] |
eubr-atmosphere/a-MLLibrary | model_building/svr_experiment_configuration.py | b6ba472baacea6d793ab4f03275cdfa874e83bc3 | """
Copyright 2019 Marco Lattuada
Copyright 2019 Danilo Ardagna
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sklearn.svm as svm
import model_building.experiment_configuration as ec
class SVRExperimentConfiguration(ec.ExperimentConfiguration):
"""
Class representing a single experiment configuration for linear regression
Attributes
----------
_linear_regression : LinearRegression
The actual scikt object which performs the linear regression
Methods
-------
_train()
Performs the actual building of the linear model
compute_estimations()
Compute the estimated values for a give set of data
"""
def __init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix):
"""
campaign_configuration: dict of dict:
The set of options specified by the user though command line and campaign configuration files
hyperparameters: dictionary
The set of hyperparameters of this experiment configuration
regression_inputs: RegressionInputs
The input of the regression problem to be solved
"""
super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix)
self.technique = ec.Technique.SVR
self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'],
gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'],
degree=self._hyperparameters['degree'])
def _compute_signature(self, prefix):
"""
Compute the signature associated with this experiment configuration
"""
signature = prefix.copy()
signature.append("C_" + str(self._hyperparameters['C']))
signature.append("epsilon_" + str(self._hyperparameters['epsilon']))
signature.append("gamma_" + str(self._hyperparameters['gamma']))
signature.append("kernel_" + str(self._hyperparameters['kernel']))
signature.append("degree_" + str(self._hyperparameters['degree']))
return signature
def _train(self):
"""
Build the model with the experiment configuration represented by this object
"""
self._logger.debug("Building model for %s", self._signature)
assert self._regression_inputs
xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split["training"])
self._regressor.fit(xdata, ydata)
self._logger.debug("Model built")
# for idx, col_name in enumerate(self._regression_inputs.x_columns):
# self._logger.debug("The coefficient for %s is %f", col_name, self._linear_regression.coef_[idx])
def compute_estimations(self, rows):
"""
Compute the estimations and the MAPE for runs in rows
"""
xdata, _ = self._regression_inputs.get_xy_data(rows)
return self._regressor.predict(xdata)
| [((1803, 2010), 'sklearn.svm.SVR', 'svm.SVR', ([], {'C': "self._hyperparameters['C']", 'epsilon': "self._hyperparameters['epsilon']", 'gamma': "self._hyperparameters['gamma']", 'kernel': "self._hyperparameters['kernel']", 'degree': "self._hyperparameters['degree']"}), "(C=self._hyperparameters['C'], epsilon=self._hyperparameters[\n 'epsilon'], gamma=self._hyperparameters['gamma'], kernel=self.\n _hyperparameters['kernel'], degree=self._hyperparameters['degree'])\n", (1810, 2010), True, 'import sklearn.svm as svm\n')] |
south-coast-science/scs_host_rpi | src/scs_host/sys/host_gpi.py | a02afde3fd2e1f2b8c6dc08beef8c74039108a64 | """
Created on 12 May 2017
@author: Bruno Beloff ([email protected])
"""
from scs_host.sys.host_gpio import HostGPIO
# --------------------------------------------------------------------------------------------------------------------
# noinspection PyUnusedLocal,PyAbstractClass
class HostGPI(HostGPIO):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, pin):
raise NotImplementedError()
# ----------------------------------------------------------------------------------------------------------------
@property
def state(self):
raise NotImplementedError()
def wait(self, edge):
raise NotImplementedError()
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
raise NotImplementedError()
| [] |
dcragusa/PythonMorsels | 30-39/35. final_class/final_class.py | 5f75b51a68769036e4004e9ccdada6b220124ab6 |
class Unsubclassable:
def __init_subclass__(cls, **kwargs):
raise TypeError('Unacceptable base type')
def prevent_subclassing():
raise TypeError('Unacceptable base type')
def final_class(cls):
setattr(cls, '__init_subclass__', prevent_subclassing)
return cls
class UnsubclassableType(type):
def __new__(cls, name, bases, dct):
c = super().__new__(cls, name, bases, dct)
setattr(c, '__init_subclass__', prevent_subclassing)
return c
| [] |
victor-estrade/SystGradDescent | benchmark/AMS/HIGGSTES/TP.py | 822e7094290301ec47a99433381a8d6406798aff | #!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
# Command line :
# python -m benchmark.VAR.GG.TP
import os
import logging
from config import SEED
from config import _ERROR
from config import _TRUTH
import numpy as np
import pandas as pd
from visual.misc import set_plot_config
set_plot_config()
from utils.log import set_logger
from utils.log import flush
from utils.log import print_line
from utils.model import get_model
from utils.model import get_optimizer
from utils.model import train_or_load_neural_net
from utils.evaluation import evaluate_summary_computer
from utils.images import gather_images
from visual.misc import plot_params
from problem.higgs import HiggsConfigTesOnly as Config
from problem.higgs import get_generators_torch
from problem.higgs import GeneratorCPU
from problem.higgs import GeneratorTorch
from problem.higgs import HiggsNLL as NLLComputer
from model.tangent_prop import TangentPropClassifier
from archi.classic import L4 as ARCHI
from ...my_argparser import TP_parse_args
from collections import OrderedDict
from .common import measurement
DATA_NAME = 'HIGGSTES'
BENCHMARK_NAME = 'VAR-'+DATA_NAME
N_ITER = 30
class TrainGenerator:
def __init__(self, data_generator, cuda=False):
self.data_generator = data_generator
if cuda:
self.data_generator.cuda()
else:
self.data_generator.cpu()
self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True)
self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True)
self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True)
self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True)
self.params = (self.tes, self.jes, self.tes, self.mu)
self.nuisance_params = OrderedDict([
('tes', self.tes),
('jes', self.jes),
('les', self.les),
])
def generate(self, n_samples=None):
X, y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples)
return X, y, w
def reset(self):
self.data_generator.reset()
def tensor(self, data, requires_grad=False, dtype=None):
return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype)
def build_model(args, i_cv):
args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit)
args.optimizer = get_optimizer(args)
model = get_model(args, TangentPropClassifier)
model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv)
return model
# =====================================================================
# MAIN
# =====================================================================
def main():
# BASIC SETUP
logger = set_logger()
args = TP_parse_args(main_description="Training launcher for INFERNO on GG benchmark")
logger.info(args)
flush(logger)
# INFO
model = build_model(args, -1)
os.makedirs(model.results_directory, exist_ok=True)
# RUN
logger.info(f'Running runs [{args.start_cv},{args.end_cv}[')
results = [run(args, i_cv) for i_cv in range(args.start_cv, args.end_cv)]
results = pd.concat(results, ignore_index=True)
# EVALUATION
results.to_csv(os.path.join(model.results_directory, 'threshold.csv'))
print(results)
print("DONE !")
def run(args, i_cv):
logger = logging.getLogger()
print_line()
logger.info('Running iter n°{}'.format(i_cv))
print_line()
# LOAD/GENERATE DATA
logger.info('Set up data generator')
config = Config()
seed = SEED + i_cv * 5
train_generator, valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda)
train_generator = TrainGenerator(train_generator, cuda=args.cuda)
valid_generator = GeneratorCPU(valid_generator)
test_generator = GeneratorCPU(test_generator)
# SET MODEL
logger.info('Set up classifier')
model = build_model(args, i_cv)
os.makedirs(model.results_path, exist_ok=True)
flush(logger)
# TRAINING / LOADING
train_or_load_neural_net(model, train_generator, retrain=args.retrain)
# MEASUREMENT
results = measurement(model, i_cv, config, valid_generator, test_generator)
print(results)
return results
if __name__ == '__main__':
main()
| [((420, 437), 'visual.misc.set_plot_config', 'set_plot_config', ([], {}), '()\n', (435, 437), False, 'from visual.misc import set_plot_config\n'), ((2527, 2570), 'archi.classic.L4', 'ARCHI', ([], {'n_in': '(29)', 'n_out': '(2)', 'n_unit': 'args.n_unit'}), '(n_in=29, n_out=2, n_unit=args.n_unit)\n', (2532, 2570), True, 'from archi.classic import L4 as ARCHI\n'), ((2592, 2611), 'utils.model.get_optimizer', 'get_optimizer', (['args'], {}), '(args)\n', (2605, 2611), False, 'from utils.model import get_optimizer\n'), ((2624, 2662), 'utils.model.get_model', 'get_model', (['args', 'TangentPropClassifier'], {}), '(args, TangentPropClassifier)\n', (2633, 2662), False, 'from utils.model import get_model\n'), ((2928, 2940), 'utils.log.set_logger', 'set_logger', ([], {}), '()\n', (2938, 2940), False, 'from utils.log import set_logger\n'), ((3058, 3071), 'utils.log.flush', 'flush', (['logger'], {}), '(logger)\n', (3063, 3071), False, 'from utils.log import flush\n'), ((3121, 3172), 'os.makedirs', 'os.makedirs', (['model.results_directory'], {'exist_ok': '(True)'}), '(model.results_directory, exist_ok=True)\n', (3132, 3172), False, 'import os\n'), ((3340, 3377), 'pandas.concat', 'pd.concat', (['results'], {'ignore_index': '(True)'}), '(results, ignore_index=True)\n', (3349, 3377), True, 'import pandas as pd\n'), ((3545, 3564), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (3562, 3564), False, 'import logging\n'), ((3569, 3581), 'utils.log.print_line', 'print_line', ([], {}), '()\n', (3579, 3581), False, 'from utils.log import print_line\n'), ((3636, 3648), 'utils.log.print_line', 'print_line', ([], {}), '()\n', (3646, 3648), False, 'from utils.log import print_line\n'), ((3730, 3738), 'problem.higgs.HiggsConfigTesOnly', 'Config', ([], {}), '()\n', (3736, 3738), True, 'from problem.higgs import HiggsConfigTesOnly as Config\n'), ((3821, 3863), 'problem.higgs.get_generators_torch', 'get_generators_torch', (['seed'], {'cuda': 'args.cuda'}), '(seed, cuda=args.cuda)\n', (3841, 3863), False, 'from problem.higgs import get_generators_torch\n'), ((3956, 3985), 'problem.higgs.GeneratorCPU', 'GeneratorCPU', (['valid_generator'], {}), '(valid_generator)\n', (3968, 3985), False, 'from problem.higgs import GeneratorCPU\n'), ((4007, 4035), 'problem.higgs.GeneratorCPU', 'GeneratorCPU', (['test_generator'], {}), '(test_generator)\n', (4019, 4035), False, 'from problem.higgs import GeneratorCPU\n'), ((4130, 4176), 'os.makedirs', 'os.makedirs', (['model.results_path'], {'exist_ok': '(True)'}), '(model.results_path, exist_ok=True)\n', (4141, 4176), False, 'import os\n'), ((4181, 4194), 'utils.log.flush', 'flush', (['logger'], {}), '(logger)\n', (4186, 4194), False, 'from utils.log import flush\n'), ((4225, 4295), 'utils.model.train_or_load_neural_net', 'train_or_load_neural_net', (['model', 'train_generator'], {'retrain': 'args.retrain'}), '(model, train_generator, retrain=args.retrain)\n', (4249, 4295), False, 'from utils.model import train_or_load_neural_net\n'), ((1910, 1980), 'collections.OrderedDict', 'OrderedDict', (["[('tes', self.tes), ('jes', self.jes), ('les', self.les)]"], {}), "([('tes', self.tes), ('jes', self.jes), ('les', self.les)])\n", (1921, 1980), False, 'from collections import OrderedDict\n'), ((3414, 3468), 'os.path.join', 'os.path.join', (['model.results_directory', '"""threshold.csv"""'], {}), "(model.results_directory, 'threshold.csv')\n", (3426, 3468), False, 'import os\n')] |
dmartinpro/papermill | papermill/tests/test_adl.py | fbb0a60c97cde70e3b278f778cbd366cf54f83f0 | import unittest
from ..adl import ADL
import six
if six.PY3:
from unittest.mock import Mock, MagicMock
else:
from mock import Mock, MagicMock
class ADLTest(unittest.TestCase):
"""
Tests for `ADL`
"""
def setUp(self):
self.ls = Mock(return_value=["foo", "bar", "baz"])
self.fakeFile = MagicMock()
self.fakeFile.__iter__.return_value = [b"a", b"b", b"c"]
self.fakeFile.__enter__.return_value = self.fakeFile
self.open = Mock(return_value=self.fakeFile)
self.fakeAdapter = Mock(open=self.open, ls=self.ls)
self.adl = ADL()
self.adl._create_adapter = Mock(return_value=self.fakeAdapter)
def test_split_url_raises_exception_on_invalid_url(self):
with self.assertRaises(Exception) as context:
ADL._split_url("this_is_not_a_valid_url")
self.assertTrue("Invalid ADL url 'this_is_not_a_valid_url'" in str(context.exception))
def test_split_url_splits_valid_url(self):
(store_name, path) = ADL._split_url("adl://foo.azuredatalakestore.net/bar/baz")
self.assertEqual(store_name, "foo")
self.assertEqual(path, "bar/baz")
def test_listdir_calls_ls_on_adl_adapter(self):
self.assertEqual(
self.adl.listdir("adl://foo_store.azuredatalakestore.net/path/to/file"),
["foo", "bar", "baz"],
)
self.ls.assert_called_once_with("path/to/file")
def test_read_opens_and_reads_file(self):
self.assertEquals(
self.adl.read("adl://foo_store.azuredatalakestore.net/path/to/file"), ["a", "b", "c"]
)
self.fakeFile.__iter__.assert_called_once_with()
def test_write_opens_file_and_writes_to_it(self):
self.adl.write("hello world", "adl://foo_store.azuredatalakestore.net/path/to/file")
self.fakeFile.write.assert_called_once_with(b"hello world")
| [((263, 303), 'mock.Mock', 'Mock', ([], {'return_value': "['foo', 'bar', 'baz']"}), "(return_value=['foo', 'bar', 'baz'])\n", (267, 303), False, 'from mock import Mock, MagicMock\n'), ((328, 339), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (337, 339), False, 'from mock import Mock, MagicMock\n'), ((486, 518), 'mock.Mock', 'Mock', ([], {'return_value': 'self.fakeFile'}), '(return_value=self.fakeFile)\n', (490, 518), False, 'from mock import Mock, MagicMock\n'), ((546, 578), 'mock.Mock', 'Mock', ([], {'open': 'self.open', 'ls': 'self.ls'}), '(open=self.open, ls=self.ls)\n', (550, 578), False, 'from mock import Mock, MagicMock\n'), ((639, 674), 'mock.Mock', 'Mock', ([], {'return_value': 'self.fakeAdapter'}), '(return_value=self.fakeAdapter)\n', (643, 674), False, 'from mock import Mock, MagicMock\n')] |
AnvarKhan/django-python | users/views.py | bd54e44deb290f43ea5982c2ca9f37cd6c946879 | from django.views.generic import CreateView
from django.urls import reverse_lazy
from .forms import CustomUserCreationForm
class SignUpView(CreateView):
form_class = CustomUserCreationForm
success_url = reverse_lazy('login')
template_name = 'signup.html'
| [((206, 227), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (218, 227), False, 'from django.urls import reverse_lazy\n')] |
Thom1729/package_util | st3/package_util/compat/typing.py | 3ddec00d8ab4a52f0f5ce3fe8b09247c1518547f | try:
from typing import * # noqa: F401, F403
except ImportError:
from .typing_stubs import * # type: ignore # noqa: F401, F403
| [] |
rasimuvaikas/stanza | stanza/models/common/dropout.py | 21793519a531b0e9d7151e42d180d97785c9a5b8 | import torch
import torch.nn as nn
class WordDropout(nn.Module):
""" A word dropout layer that's designed for embedded inputs (e.g., any inputs to an LSTM layer).
Given a batch of embedded inputs, this layer randomly set some of them to be a replacement state.
Note that this layer assumes the last dimension of the input to be the hidden dimension of a unit.
"""
def __init__(self, dropprob):
super().__init__()
self.dropprob = dropprob
def forward(self, x, replacement=None):
if not self.training or self.dropprob == 0:
return x
masksize = [y for y in x.size()]
masksize[-1] = 1
dropmask = torch.rand(*masksize, device=x.device) < self.dropprob
res = x.masked_fill(dropmask, 0)
if replacement is not None:
res = res + dropmask.float() * replacement
return res
def extra_repr(self):
return 'p={}'.format(self.dropprob)
class LockedDropout(nn.Module):
"""
A variant of dropout layer that consistently drops out the same parameters over time. Also known as the variational dropout.
This implementation was modified from the LockedDropout implementation in the flair library (https://github.com/zalandoresearch/flair).
"""
def __init__(self, dropprob, batch_first=True):
super().__init__()
self.dropprob = dropprob
self.batch_first = batch_first
def forward(self, x):
if not self.training or self.dropprob == 0:
return x
if not self.batch_first:
m = x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob)
else:
m = x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob)
mask = m.div(1 - self.dropprob).expand_as(x)
return mask * x
def extra_repr(self):
return 'p={}'.format(self.dropprob)
class SequenceUnitDropout(nn.Module):
""" A unit dropout layer that's designed for input of sequence units (e.g., word sequence, char sequence, etc.).
Given a sequence of unit indices, this layer randomly set some of them to be a replacement id (usually set to be <UNK>).
"""
def __init__(self, dropprob, replacement_id):
super().__init__()
self.dropprob = dropprob
self.replacement_id = replacement_id
def forward(self, x):
""" :param: x must be a LongTensor of unit indices. """
if not self.training or self.dropprob == 0:
return x
masksize = [y for y in x.size()]
dropmask = torch.rand(*masksize, device=x.device) < self.dropprob
res = x.masked_fill(dropmask, self.replacement_id)
return res
def extra_repr(self):
return 'p={}, replacement_id={}'.format(self.dropprob, self.replacement_id)
| [((679, 717), 'torch.rand', 'torch.rand', (['*masksize'], {'device': 'x.device'}), '(*masksize, device=x.device)\n', (689, 717), False, 'import torch\n'), ((2609, 2647), 'torch.rand', 'torch.rand', (['*masksize'], {'device': 'x.device'}), '(*masksize, device=x.device)\n', (2619, 2647), False, 'import torch\n')] |
bdfd/Python_Zero2Hero_DS | Day01-15/code/Day15/pdf2.py | 9dafe90b8112fdc3d07e1aa02e41ed3f019f733c | """
读取PDF文件
Version: 0.1
Author: BDFD
Date: 2018-03-26
"""
from PyPDF2 import PdfFileReader
with open('./res/Python课程大纲.pdf', 'rb') as f:
reader = PdfFileReader(f, strict=False)
print(reader.numPages)
if reader.isEncrypted:
reader.decrypt('')
current_page = reader.getPage(5)
print(current_page)
print(current_page.extractText())
| [((154, 184), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['f'], {'strict': '(False)'}), '(f, strict=False)\n', (167, 184), False, 'from PyPDF2 import PdfFileReader\n')] |
DazEB2/SimplePyScripts | qt__pyqt__pyside__pyqode/qt__class_tree__parse_and_print__recursively__from__doc_qt_io/gui.py | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
from PyQt5 import QtWidgets as qtw
from PyQt5.QtTest import QTest
import time
import requests
from bs4 import BeautifulSoup
from console import get_inherited_children, ROOT_URL
class MainWindow(qtw.QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io')
self.tree = qtw.QTreeWidget()
self.tree.setAlternatingRowColors(True)
self.tree.setHeaderLabel('NAME')
self.setCentralWidget(self.tree)
self.number_total_class = 0
def _fill_root(self, node: qtw.QTreeWidgetItem, url: str, global_number: int, indent_level=0):
if global_number > 0 and self.number_total_class >= global_number:
return
QTest.qWait(1000)
indent = ' ' * indent_level
rs = requests.get(url)
root = BeautifulSoup(rs.content, 'html.parser')
name_class = root.select_one('.context > .title').text.split()[0]
inherited_children = get_inherited_children(url, root)
number_inherited_children = len(inherited_children)
if number_inherited_children > 0:
name_class = '{} ({})'.format(name_class, number_inherited_children)
print(indent + name_class + ':')
else:
print(indent + name_class)
item = qtw.QTreeWidgetItem([name_class])
if not node:
self.tree.addTopLevelItem(item)
else:
node.addChild(item)
node.setExpanded(True)
self.number_total_class += 1
for name, url in inherited_children:
self._fill_root(item, url, global_number, indent_level + 1)
def fill_tree(self, global_number=-1):
self.number_total_class = 0
self.tree.clear()
t = time.clock()
self._fill_root(None, ROOT_URL, global_number)
qtw.QMessageBox.information(
self,
'Complete!',
'Items: {}.\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock() - t)
)
def closeEvent(self, e):
quit()
if __name__ == '__main__':
app = qtw.QApplication([])
w = MainWindow()
w.resize(500, 500)
w.show()
w.fill_tree()
app.exec()
| [((2221, 2241), 'PyQt5.QtWidgets.QApplication', 'qtw.QApplication', (['[]'], {}), '([])\n', (2237, 2241), True, 'from PyQt5 import QtWidgets as qtw\n'), ((456, 473), 'PyQt5.QtWidgets.QTreeWidget', 'qtw.QTreeWidget', ([], {}), '()\n', (471, 473), True, 'from PyQt5 import QtWidgets as qtw\n'), ((845, 862), 'PyQt5.QtTest.QTest.qWait', 'QTest.qWait', (['(1000)'], {}), '(1000)\n', (856, 862), False, 'from PyQt5.QtTest import QTest\n'), ((915, 932), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (927, 932), False, 'import requests\n'), ((948, 988), 'bs4.BeautifulSoup', 'BeautifulSoup', (['rs.content', '"""html.parser"""'], {}), "(rs.content, 'html.parser')\n", (961, 988), False, 'from bs4 import BeautifulSoup\n'), ((1094, 1127), 'console.get_inherited_children', 'get_inherited_children', (['url', 'root'], {}), '(url, root)\n', (1116, 1127), False, 'from console import get_inherited_children, ROOT_URL\n'), ((1425, 1458), 'PyQt5.QtWidgets.QTreeWidgetItem', 'qtw.QTreeWidgetItem', (['[name_class]'], {}), '([name_class])\n', (1444, 1458), True, 'from PyQt5 import QtWidgets as qtw\n'), ((1881, 1893), 'time.clock', 'time.clock', ([], {}), '()\n', (1891, 1893), False, 'import time\n'), ((2109, 2121), 'time.clock', 'time.clock', ([], {}), '()\n', (2119, 2121), False, 'import time\n')] |
kyle-ibm/op-test | common/OpTestASM.py | df8dbf8cbff1390668c22632052adb46ebf277c1 | #!/usr/bin/env python3
# encoding=utf8
# IBM_PROLOG_BEGIN_TAG
# This is an automatically generated prolog.
#
# $Source: op-test-framework/common/OpTestASM.py $
#
# OpenPOWER Automated Test Project
#
# Contributors Listed Below - COPYRIGHT 2017
# [+] International Business Machines Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# IBM_PROLOG_END_TAG
'''
OpTestASM: Advanced System Management (FSP Web UI)
--------------------------------------------------
This class can contains common functions which are useful for
FSP ASM Web page. Some functionality is only accessible through
the FSP Web UI (such as progress codes), so we scrape it.
'''
import time
import subprocess
import os
import pexpect
import sys
import subprocess
from .OpTestConstants import OpTestConstants as BMC_CONST
from .OpTestError import OpTestError
import http.cookiejar
import urllib.request
import urllib.parse
import urllib.error
import re
import ssl
class OpTestASM:
def __init__(self, i_fspIP, i_fspUser, i_fspPasswd):
self.host_name = i_fspIP
self.user_name = i_fspUser
self.password = i_fspPasswd
self.url = "https://%s/cgi-bin/cgi?" % self.host_name
self.cj = http.cookiejar.CookieJar()
context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context))
opener.addheaders = [('User-agent', 'LTCTest')]
opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj))
urllib.request.install_opener(opener)
self.setforms()
def setforms(self):
if "FW860" in self.ver():
self.hrdwr = 'p8'
self.frms = {'pwr': '59',
'dbg': '78',
'immpwroff': '32'}
else:
self.hrdwr = 'p7'
self.frms = {'pwr': '60',
'dbg': '79',
'immpwroff': '33'}
def getcsrf(self, form):
while True:
try:
myurl = urllib.request.urlopen(self.url+form, timeout=10)
except urllib.error.URLError:
time.sleep(2)
continue
break
out = myurl.read().decode("utf-8")
if 'CSRF_TOKEN' in out:
return re.findall('CSRF_TOKEN.*value=\'(.*)\'', out)[0]
else:
return '0'
def getpage(self, form):
myurl = urllib.request.urlopen(self.url+form, timeout=60)
return myurl.read().decode("utf-8")
def submit(self, form, param):
param['CSRF_TOKEN'] = self.getcsrf(form)
data = urllib.parse.urlencode(param).encode("utf-8")
req = urllib.request.Request(self.url+form, data)
return urllib.request.urlopen(req)
def login(self):
if not len(self.cj) == 0:
return True
param = {'user': self.user_name,
'password': self.password,
'login': 'Log in',
'lang': '0',
'CSRF_TOKEN': ''}
form = "form=2"
resp = self.submit(form, param)
count = 0
while count < 2:
if not len(self.cj) == 0:
break
# the login can quietly fail because the FSP has 'too many users' logged in,
# even though it actually doesn't. let's check to see if this is the case
# by trying a request.
if "Too many users" in self.getpage("form=2"):
raise OpTestError("FSP reports 'Too many users', FSP needs power cycle")
time.sleep(10)
self.submit(form, param)
msg = "Login failed with user:{0} and password:{1}".format(
self.user_name, self.password)
print(msg)
count += 1
if count == 2:
print(msg)
return False
return True
def logout(self):
param = {'submit': 'Log out',
'CSRF_TOKEN': ''}
form = "form=1"
self.submit(form, param)
def ver(self):
form = "form=1"
return self.getpage(form)
def execommand(self, cmd):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '16',
'exe': 'Execute',
'CSRF_TOKEN': '',
'cmd': cmd}
form = "form=16&frm=0"
self.submit(form, param)
def disablefirewall(self):
if not self.login():
raise OpTestError("Failed to login ASM page")
self.execommand('iptables -F')
self.logout()
def clearlogs(self):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '30',
'clear': "Clear all error/event log entries",
'CSRF_TOKEN': ''}
form = "form=30"
self.submit(form, param)
self.logout()
def powerstat(self):
form = "form=%s" % self.frms['pwr']
return self.getpage(form)
def start_debugvtty_session(self, partitionId='0', sessionId='0',
sessionTimeout='600'):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '81',
'p': partitionId,
's': sessionId,
't': sessionTimeout,
'Save settings': 'Save settings',
'CSRF_TOKEN': ''}
form = "form=81"
self.submit(form, param)
self.logout()
def enable_err_injct_policy(self):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '56',
'p': '1',
'submit': 'Save settings',
'CSRF_TOKEN': ''}
form = "form=56"
self.submit(form, param)
self.logout()
| [((1747, 1775), 'ssl.create_default_context', 'ssl.create_default_context', ([], {}), '()\n', (1773, 1775), False, 'import ssl\n'), ((4191, 4205), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (4201, 4205), False, 'import time\n'), ((2895, 2938), 're.findall', 're.findall', (['"""CSRF_TOKEN.*value=\'(.*)\'"""', 'out'], {}), '("CSRF_TOKEN.*value=\'(.*)\'", out)\n', (2905, 2938), False, 'import re\n'), ((2744, 2757), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2754, 2757), False, 'import time\n')] |
jrabasco/PyPasser | test/test_storage.py | 3cc6ecdfa9b5fe22f5a88c221517fe09d2df9db6 | #!/usr/bin/python3.4
__author__ = "Jeremy Rabasco"
import sys
import os
sys.path.append("..")
import unittest
from modules import storage
from modules.service import Service
from modules.database import Database
class TestStorage(unittest.TestCase):
def setUp(self):
self.service = Service()
self.database = Database()
open("test.service", "w+").close()
open("test.db", "w+").close()
def test_write_read_service(self):
self.service.service_name = "Hello"
self.service.username = "This"
self.service.password = "Works"
storage.write("test", self.service, "test.service")
service2 = Service()
storage.read("test", service2, "test.service")
self.assertEqual(service2.service_name, self.service.service_name)
self.assertEqual(service2.username, self.service.username)
self.assertEqual(service2.password, self.service.password)
def test_write_read_database(self):
self.database.add_service(Service())
self.database.add_service(Service())
self.database.name = "Hey"
storage.write("test", self.database, "test.db")
database2 = Database()
storage.read("test", database2, "test.db")
self.assertEqual(database2.name, self.database.name)
for i in range(len(self.database.services)):
self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name)
self.assertEqual(database2.services[i].username, self.database.services[i].username)
self.assertEqual(database2.services[i].password, self.database.services[i].password)
def tearDown(self):
os.remove(os.getcwd() + "/test.service")
os.remove(os.getcwd() + "/test.db")
if __name__ == "__main__":
unittest.main() | [((73, 94), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (88, 94), False, 'import sys\n'), ((1809, 1824), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1822, 1824), False, 'import unittest\n'), ((298, 307), 'modules.service.Service', 'Service', ([], {}), '()\n', (305, 307), False, 'from modules.service import Service\n'), ((332, 342), 'modules.database.Database', 'Database', ([], {}), '()\n', (340, 342), False, 'from modules.database import Database\n'), ((596, 647), 'modules.storage.write', 'storage.write', (['"""test"""', 'self.service', '"""test.service"""'], {}), "('test', self.service, 'test.service')\n", (609, 647), False, 'from modules import storage\n'), ((667, 676), 'modules.service.Service', 'Service', ([], {}), '()\n', (674, 676), False, 'from modules.service import Service\n'), ((685, 731), 'modules.storage.read', 'storage.read', (['"""test"""', 'service2', '"""test.service"""'], {}), "('test', service2, 'test.service')\n", (697, 731), False, 'from modules import storage\n'), ((1116, 1163), 'modules.storage.write', 'storage.write', (['"""test"""', 'self.database', '"""test.db"""'], {}), "('test', self.database, 'test.db')\n", (1129, 1163), False, 'from modules import storage\n'), ((1184, 1194), 'modules.database.Database', 'Database', ([], {}), '()\n', (1192, 1194), False, 'from modules.database import Database\n'), ((1203, 1245), 'modules.storage.read', 'storage.read', (['"""test"""', 'database2', '"""test.db"""'], {}), "('test', database2, 'test.db')\n", (1215, 1245), False, 'from modules import storage\n'), ((1016, 1025), 'modules.service.Service', 'Service', ([], {}), '()\n', (1023, 1025), False, 'from modules.service import Service\n'), ((1061, 1070), 'modules.service.Service', 'Service', ([], {}), '()\n', (1068, 1070), False, 'from modules.service import Service\n'), ((1702, 1713), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1711, 1713), False, 'import os\n'), ((1751, 1762), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1760, 1762), False, 'import os\n')] |
lakhlaifi/RedHat-Ansible | virt/ansible-latest/lib/python2.7/site-packages/ansible/plugins/lookup/template.py | 27c5077cced9d416081fcd5d69ea44bca0317fa4 | # Copyright: (c) 2012, Michael DeHaan <[email protected]>
# Copyright: (c) 2012-17, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: template
author: Michael DeHaan <[email protected]>
version_added: "0.9"
short_description: retrieve contents of file after templating with Jinja2
description:
- Returns a list of strings; for each template in the list of templates you pass in, returns a string containing the results of processing that template.
options:
_terms:
description: list of files to template
convert_data:
type: bool
description: whether to convert YAML into data. If False, strings that are YAML will be left untouched.
variable_start_string:
description: The string marking the beginning of a print statement.
default: '{{'
version_added: '2.8'
type: str
variable_end_string:
description: The string marking the end of a print statement.
default: '}}'
version_added: '2.8'
type: str
"""
EXAMPLES = """
- name: show templating results
debug:
msg: "{{ lookup('template', './some_template.j2') }}"
- name: show templating results with different variable start and end string
debug:
msg: "{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}"
"""
RETURN = """
_raw:
description: file(s) content after templating
"""
import os
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.module_utils._text import to_bytes, to_text
from ansible.template import generate_ansible_template_vars
from ansible.utils.display import Display
display = Display()
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
convert_data_p = kwargs.get('convert_data', True)
lookup_template_vars = kwargs.get('template_vars', {})
ret = []
variable_start_string = kwargs.get('variable_start_string', None)
variable_end_string = kwargs.get('variable_end_string', None)
for term in terms:
display.debug("File lookup term: %s" % term)
lookupfile = self.find_file_in_search_path(variables, 'templates', term)
display.vvvv("File lookup using %s as file" % lookupfile)
if lookupfile:
b_template_data, show_data = self._loader._get_file_contents(lookupfile)
template_data = to_text(b_template_data, errors='surrogate_or_strict')
# set jinja2 internal search path for includes
searchpath = variables.get('ansible_search_path', [])
if searchpath:
# our search paths aren't actually the proper ones for jinja includes.
# We want to search into the 'templates' subdir of each search path in
# addition to our original search paths.
newsearchpath = []
for p in searchpath:
newsearchpath.append(os.path.join(p, 'templates'))
newsearchpath.append(p)
searchpath = newsearchpath
searchpath.insert(0, os.path.dirname(lookupfile))
self._templar.environment.loader.searchpath = searchpath
if variable_start_string is not None:
self._templar.environment.variable_start_string = variable_start_string
if variable_end_string is not None:
self._templar.environment.variable_end_string = variable_end_string
# The template will have access to all existing variables,
# plus some added by ansible (e.g., template_{path,mtime}),
# plus anything passed to the lookup with the template_vars=
# argument.
vars = variables.copy()
vars.update(generate_ansible_template_vars(lookupfile))
vars.update(lookup_template_vars)
self._templar.set_available_variables(vars)
# do the templating
res = self._templar.template(template_data, preserve_trailing_newlines=True,
convert_data=convert_data_p, escape_backslashes=False)
ret.append(res)
else:
raise AnsibleError("the template file %s could not be found for the lookup" % term)
return ret
| [((1893, 1902), 'ansible.utils.display.Display', 'Display', ([], {}), '()\n', (1900, 1902), False, 'from ansible.utils.display import Display\n'), ((2658, 2712), 'ansible.module_utils._text.to_text', 'to_text', (['b_template_data'], {'errors': '"""surrogate_or_strict"""'}), "(b_template_data, errors='surrogate_or_strict')\n", (2665, 2712), False, 'from ansible.module_utils._text import to_bytes, to_text\n'), ((4578, 4655), 'ansible.errors.AnsibleError', 'AnsibleError', (["('the template file %s could not be found for the lookup' % term)"], {}), "('the template file %s could not be found for the lookup' % term)\n", (4590, 4655), False, 'from ansible.errors import AnsibleError\n'), ((3408, 3435), 'os.path.dirname', 'os.path.dirname', (['lookupfile'], {}), '(lookupfile)\n', (3423, 3435), False, 'import os\n'), ((4122, 4164), 'ansible.template.generate_ansible_template_vars', 'generate_ansible_template_vars', (['lookupfile'], {}), '(lookupfile)\n', (4152, 4164), False, 'from ansible.template import generate_ansible_template_vars\n'), ((3246, 3274), 'os.path.join', 'os.path.join', (['p', '"""templates"""'], {}), "(p, 'templates')\n", (3258, 3274), False, 'import os\n')] |
ripiuk/fant_sizer | setup.py | dcc0908c79ed76af3f4189ebd2a75cecf7a89e34 | from setuptools import setup, find_packages
from os.path import join, dirname
setup(
name="fant_sizer",
version="0.7",
author="Rypiuk Oleksandr",
author_email="[email protected]",
description="fant_sizer command-line file-information",
url="https://github.com/ripiuk/fant_sizer",
keywords="file command-line information size tool recursively",
license="MIT",
classifiers=[
'Topic :: Utilities',
'Environment :: Console',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3.6'
],
packages=find_packages(),
long_description=open(join(dirname(__file__), "README.rst")).read(),
entry_points={
"console_scripts":
['fant_sizer = fant_sizer.fant_sizer:_main'],
},
)
| [((861, 876), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (874, 876), False, 'from setuptools import setup, find_packages\n'), ((913, 930), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (920, 930), False, 'from os.path import join, dirname\n')] |
elvisyjlin/google-code-jam | 2018/Round 1A/A.py | 7fe8244c5ae07a9896acf9c48f3a06b306b393b1 | def solve():
# Read input
R, C, H, V = map(int, input().split())
choco = []
for _ in range(R):
choco.append([0] * C)
choco_row, choco_col = [0]*R, [0]*C
num_choco = 0
for i in range(R):
row = input()
for j in range(C):
if row[j] == '@':
choco_col[j] += 1
choco[i][j] = 1
choco_row[i] = row.count('@')
num_choco += choco_row[i]
# Find H and V cuts
if num_choco == 0:
return 'POSSIBLE'
H_idx, V_idx = [], []
flag = True
if num_choco%(H+1)==0 and num_choco%(V+1)==0:
num_choco_h = num_choco/(H+1)
num_choco_v = num_choco/(V+1)
accum = 0
for i, r in enumerate(choco_row):
accum += r
if accum == num_choco_h:
accum = 0
H_idx.append(i)
elif accum > num_choco_h:
flag = False
break
if not flag:
return 'IMPOSSIBLE'
accum = 0
for i, c in enumerate(choco_col):
accum += c
if accum == num_choco_v:
accum = 0
V_idx.append(i)
elif accum > num_choco_v:
flag = False
break
if not flag:
return 'IMPOSSIBLE'
else:
return 'IMPOSSIBLE'
# Check each piece
r_from = 0
num_prev = None
for r in H_idx:
c_from = 0
for c in V_idx:
num = 0
for i in range(r_from, r+1):
for j in range(c_from, c+1):
num += choco[i][j]
if num_prev is None:
num_prev = num
elif num_prev != num:
return 'IMPOSSIBLE'
c_from = c+1
r_from = r+1
return 'POSSIBLE'
if __name__ == '__main__':
T = int(input())
for t in range(T):
print('Case #{}: {}'.format(t+1, solve()))
| [] |
vinaymundada27/Hue | desktop/libs/liboozie/src/liboozie/submittion_tests.py | 7bffb33bbe7cfa34d340241c4ba3b19476211b2a | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.contrib.auth.models import User
from nose.plugins.attrib import attr
from nose.tools import assert_equal, assert_true, assert_not_equal
from hadoop import cluster, pseudo_hdfs4
from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS
from liboozie.submittion import Submission
from oozie.tests import OozieMockBase
from desktop.lib.test_utils import clear_sys_caches
from desktop.lib.django_test_util import make_logged_in_client
LOG = logging.getLogger(__name__)
@attr('requires_hadoop')
def test_copy_files():
cluster = pseudo_hdfs4.shared_cluster()
try:
c = make_logged_in_client()
user = User.objects.get(username='test')
prefix = '/tmp/test_copy_files'
if cluster.fs.exists(prefix):
cluster.fs.rmtree(prefix)
# Jars in various locations
deployment_dir = '%s/workspace' % prefix
external_deployment_dir = '%s/deployment' % prefix
jar_1 = '%s/udf1.jar' % prefix
jar_2 = '%s/lib/udf2.jar' % prefix
jar_3 = '%s/udf3.jar' % deployment_dir
jar_4 = '%s/lib/udf4.jar' % deployment_dir # Never move
cluster.fs.mkdir(prefix)
cluster.fs.create(jar_1)
cluster.fs.create(jar_2)
cluster.fs.create(jar_3)
cluster.fs.create(jar_4)
class MockNode():
def __init__(self, jar_path):
self.jar_path = jar_path
class MockJob():
def __init__(self):
self.node_list = [
MockNode(jar_1),
MockNode(jar_2),
MockNode(jar_3),
MockNode(jar_4),
]
def get_application_filename(self):
return 'workflow.xml'
submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt)
submission._copy_files(deployment_dir, "<xml>My XML</xml>")
submission._copy_files(external_deployment_dir, "<xml>My XML</xml>")
# All sources still there
assert_true(cluster.fs.exists(jar_1))
assert_true(cluster.fs.exists(jar_2))
assert_true(cluster.fs.exists(jar_3))
assert_true(cluster.fs.exists(jar_4))
deployment_dir = deployment_dir + '/lib'
external_deployment_dir = external_deployment_dir + '/lib'
list_dir_workspace = cluster.fs.listdir(deployment_dir)
list_dir_deployement = cluster.fs.listdir(external_deployment_dir)
# All destinations there
assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement)
stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar')
stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar')
stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar')
stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar')
submission._copy_files('%s/workspace' % prefix, "<xml>My XML</xml>")
assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId'])
assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId'])
assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId'])
assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId'])
finally:
try:
cluster.fs.rmtree(prefix)
except:
LOG.exception('failed to remove %s' % prefix)
class MockFs():
def __init__(self, logical_name=None):
self.fs_defaultfs = 'hdfs://curacao:8020'
self.logical_name = logical_name if logical_name else ''
class MockJt():
def __init__(self, logical_name=None):
self.logical_name = logical_name if logical_name else ''
class TestSubmission(OozieMockBase):
def test_get_properties(self):
submission = Submission(self.user, fs=MockFs())
assert_equal({}, submission.properties)
submission._update_properties('curacao:8032', '/deployment_dir')
assert_equal({
'jobTracker': 'curacao:8032',
'nameNode': 'hdfs://curacao:8020'
}, submission.properties)
def test_get_logical_properties(self):
submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname'))
assert_equal({}, submission.properties)
submission._update_properties('curacao:8032', '/deployment_dir')
assert_equal({
'jobTracker': 'jtname',
'nameNode': 'fsname'
}, submission.properties)
def test_update_properties(self):
finish = []
finish.append(MR_CLUSTERS.set_for_testing({'default': {}}))
finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))
finish.append(YARN_CLUSTERS.set_for_testing({'default': {}}))
finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))
try:
properties = {
'user.name': 'hue',
'test.1': 'http://localhost/test?test1=test&test2=test',
'nameNode': 'hdfs://curacao:8020',
'jobTracker': 'jtaddress'
}
final_properties = properties.copy()
submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs())
assert_equal(properties, submission.properties)
submission._update_properties('jtaddress', 'deployment-directory')
assert_equal(final_properties, submission.properties)
clear_sys_caches()
fs = cluster.get_hdfs()
jt = cluster.get_next_ha_mrcluster()[1]
final_properties = properties.copy()
final_properties.update({
'jobTracker': 'jtaddress',
'nameNode': fs.fs_defaultfs
})
submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt)
assert_equal(properties, submission.properties)
submission._update_properties('jtaddress', 'deployment-directory')
assert_equal(final_properties, submission.properties)
finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode'))
finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker'))
clear_sys_caches()
fs = cluster.get_hdfs()
jt = cluster.get_next_ha_mrcluster()[1]
final_properties = properties.copy()
final_properties.update({
'jobTracker': 'jobtracker',
'nameNode': 'namenode'
})
submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt)
assert_equal(properties, submission.properties)
submission._update_properties('jtaddress', 'deployment-directory')
assert_equal(final_properties, submission.properties)
finally:
clear_sys_caches()
for reset in finish:
reset()
def test_get_external_parameters(self):
xml = """
<workflow-app name="Pig" xmlns="uri:oozie:workflow:0.4">
<start to="Pig"/>
<action name="Pig">
<pig>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<prepare>
<delete path="${output}"/>
</prepare>
<script>aggregate.pig</script>
<argument>-param</argument>
<argument>INPUT=${input}</argument>
<argument>-param</argument>
<argument>OUTPUT=${output}</argument>
<configuration>
<property>
<name>mapred.input.format.class</name>
<value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
</property>
</configuration>
</pig>
<ok to="end"/>
<error to="kill"/>
</action>
<kill name="kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<end name="end"/>
</workflow-app>
"""
properties = """
#
# Licensed to the Hue
#
nameNode=hdfs://localhost:8020
jobTracker=localhost:8021
queueName=default
examplesRoot=examples
oozie.use.system.libpath=true
oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig
"""
parameters = Submission(self.user)._get_external_parameters(xml, properties)
assert_equal({'oozie.use.system.libpath': 'true',
'input': '',
'jobTracker': 'localhost:8021',
'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig',
'examplesRoot': 'examples',
'output': '',
'nameNode': 'hdfs://localhost:8020',
'queueName': 'default'
},
parameters)
| [((1269, 1296), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1286, 1296), False, 'import logging\n'), ((1300, 1323), 'nose.plugins.attrib.attr', 'attr', (['"""requires_hadoop"""'], {}), "('requires_hadoop')\n", (1304, 1323), False, 'from nose.plugins.attrib import attr\n'), ((1359, 1388), 'hadoop.pseudo_hdfs4.shared_cluster', 'pseudo_hdfs4.shared_cluster', ([], {}), '()\n', (1386, 1388), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((1405, 1428), 'desktop.lib.django_test_util.make_logged_in_client', 'make_logged_in_client', ([], {}), '()\n', (1426, 1428), False, 'from desktop.lib.django_test_util import make_logged_in_client\n'), ((1440, 1473), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""test"""'}), "(username='test')\n", (1456, 1473), False, 'from django.contrib.auth.models import User\n'), ((1519, 1544), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (['prefix'], {}), '(prefix)\n', (1536, 1544), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((1893, 1917), 'hadoop.cluster.fs.mkdir', 'cluster.fs.mkdir', (['prefix'], {}), '(prefix)\n', (1909, 1917), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((1922, 1946), 'hadoop.cluster.fs.create', 'cluster.fs.create', (['jar_1'], {}), '(jar_1)\n', (1939, 1946), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((1951, 1975), 'hadoop.cluster.fs.create', 'cluster.fs.create', (['jar_2'], {}), '(jar_2)\n', (1968, 1975), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((1980, 2004), 'hadoop.cluster.fs.create', 'cluster.fs.create', (['jar_3'], {}), '(jar_3)\n', (1997, 2004), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((2009, 2033), 'hadoop.cluster.fs.create', 'cluster.fs.create', (['jar_4'], {}), '(jar_4)\n', (2026, 2033), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((2952, 2986), 'hadoop.cluster.fs.listdir', 'cluster.fs.listdir', (['deployment_dir'], {}), '(deployment_dir)\n', (2970, 2986), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3014, 3057), 'hadoop.cluster.fs.listdir', 'cluster.fs.listdir', (['external_deployment_dir'], {}), '(external_deployment_dir)\n', (3032, 3057), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3831, 3877), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf1.jar')"], {}), "(deployment_dir + '/udf1.jar')\n", (3847, 3877), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3895, 3941), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf2.jar')"], {}), "(deployment_dir + '/udf2.jar')\n", (3911, 3941), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3959, 4005), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf3.jar')"], {}), "(deployment_dir + '/udf3.jar')\n", (3975, 4005), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((4023, 4069), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf4.jar')"], {}), "(deployment_dir + '/udf4.jar')\n", (4039, 4069), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((5080, 5119), 'nose.tools.assert_equal', 'assert_equal', (['{}', 'submission.properties'], {}), '({}, submission.properties)\n', (5092, 5119), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((5195, 5301), 'nose.tools.assert_equal', 'assert_equal', (["{'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020'}", 'submission.properties'], {}), "({'jobTracker': 'curacao:8032', 'nameNode':\n 'hdfs://curacao:8020'}, submission.properties)\n", (5207, 5301), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((5477, 5516), 'nose.tools.assert_equal', 'assert_equal', (['{}', 'submission.properties'], {}), '({}, submission.properties)\n', (5489, 5516), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((5592, 5680), 'nose.tools.assert_equal', 'assert_equal', (["{'jobTracker': 'jtname', 'nameNode': 'fsname'}", 'submission.properties'], {}), "({'jobTracker': 'jtname', 'nameNode': 'fsname'}, submission.\n properties)\n", (5604, 5680), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((9331, 9644), 'nose.tools.assert_equal', 'assert_equal', (["{'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker':\n 'localhost:8021', 'oozie.wf.application.path':\n '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig',\n 'examplesRoot': 'examples', 'output': '', 'nameNode':\n 'hdfs://localhost:8020', 'queueName': 'default'}", 'parameters'], {}), "({'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker':\n 'localhost:8021', 'oozie.wf.application.path':\n '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig',\n 'examplesRoot': 'examples', 'output': '', 'nameNode':\n 'hdfs://localhost:8020', 'queueName': 'default'}, parameters)\n", (9343, 9644), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((1552, 1577), 'hadoop.cluster.fs.rmtree', 'cluster.fs.rmtree', (['prefix'], {}), '(prefix)\n', (1569, 1577), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((2665, 2689), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (['jar_1'], {}), '(jar_1)\n', (2682, 2689), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((2707, 2731), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (['jar_2'], {}), '(jar_2)\n', (2724, 2731), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((2749, 2773), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (['jar_3'], {}), '(jar_3)\n', (2766, 2773), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((2791, 2815), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (['jar_4'], {}), '(jar_4)\n', (2808, 2815), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3104, 3151), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(deployment_dir + '/udf1.jar')"], {}), "(deployment_dir + '/udf1.jar')\n", (3121, 3151), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3189, 3236), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(deployment_dir + '/udf2.jar')"], {}), "(deployment_dir + '/udf2.jar')\n", (3206, 3236), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3274, 3321), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(deployment_dir + '/udf3.jar')"], {}), "(deployment_dir + '/udf3.jar')\n", (3291, 3321), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3359, 3406), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(deployment_dir + '/udf4.jar')"], {}), "(deployment_dir + '/udf4.jar')\n", (3376, 3406), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3445, 3501), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(external_deployment_dir + '/udf1.jar')"], {}), "(external_deployment_dir + '/udf1.jar')\n", (3462, 3501), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3541, 3597), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(external_deployment_dir + '/udf2.jar')"], {}), "(external_deployment_dir + '/udf2.jar')\n", (3558, 3597), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3637, 3693), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(external_deployment_dir + '/udf3.jar')"], {}), "(external_deployment_dir + '/udf3.jar')\n", (3654, 3693), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((3733, 3789), 'hadoop.cluster.fs.exists', 'cluster.fs.exists', (["(external_deployment_dir + '/udf4.jar')"], {}), "(external_deployment_dir + '/udf4.jar')\n", (3750, 3789), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((4572, 4597), 'hadoop.cluster.fs.rmtree', 'cluster.fs.rmtree', (['prefix'], {}), '(prefix)\n', (4589, 4597), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((5772, 5816), 'hadoop.conf.MR_CLUSTERS.set_for_testing', 'MR_CLUSTERS.set_for_testing', (["{'default': {}}"], {}), "({'default': {}})\n", (5799, 5816), False, 'from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS\n'), ((5910, 5956), 'hadoop.conf.YARN_CLUSTERS.set_for_testing', 'YARN_CLUSTERS.set_for_testing', (["{'default': {}}"], {}), "({'default': {}})\n", (5939, 5956), False, 'from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS\n'), ((6381, 6428), 'nose.tools.assert_equal', 'assert_equal', (['properties', 'submission.properties'], {}), '(properties, submission.properties)\n', (6393, 6428), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((6508, 6561), 'nose.tools.assert_equal', 'assert_equal', (['final_properties', 'submission.properties'], {}), '(final_properties, submission.properties)\n', (6520, 6561), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((6569, 6587), 'desktop.lib.test_utils.clear_sys_caches', 'clear_sys_caches', ([], {}), '()\n', (6585, 6587), False, 'from desktop.lib.test_utils import clear_sys_caches\n'), ((6599, 6617), 'hadoop.cluster.get_hdfs', 'cluster.get_hdfs', ([], {}), '()\n', (6615, 6617), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((6838, 6908), 'liboozie.submittion.Submission', 'Submission', (['None'], {'properties': 'properties', 'oozie_id': '"""test"""', 'fs': 'fs', 'jt': 'jt'}), "(None, properties=properties, oozie_id='test', fs=fs, jt=jt)\n", (6848, 6908), False, 'from liboozie.submittion import Submission\n'), ((6915, 6962), 'nose.tools.assert_equal', 'assert_equal', (['properties', 'submission.properties'], {}), '(properties, submission.properties)\n', (6927, 6962), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((7042, 7095), 'nose.tools.assert_equal', 'assert_equal', (['final_properties', 'submission.properties'], {}), '(final_properties, submission.properties)\n', (7054, 7095), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((7277, 7295), 'desktop.lib.test_utils.clear_sys_caches', 'clear_sys_caches', ([], {}), '()\n', (7293, 7295), False, 'from desktop.lib.test_utils import clear_sys_caches\n'), ((7307, 7325), 'hadoop.cluster.get_hdfs', 'cluster.get_hdfs', ([], {}), '()\n', (7323, 7325), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((7542, 7612), 'liboozie.submittion.Submission', 'Submission', (['None'], {'properties': 'properties', 'oozie_id': '"""test"""', 'fs': 'fs', 'jt': 'jt'}), "(None, properties=properties, oozie_id='test', fs=fs, jt=jt)\n", (7552, 7612), False, 'from liboozie.submittion import Submission\n'), ((7619, 7666), 'nose.tools.assert_equal', 'assert_equal', (['properties', 'submission.properties'], {}), '(properties, submission.properties)\n', (7631, 7666), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((7746, 7799), 'nose.tools.assert_equal', 'assert_equal', (['final_properties', 'submission.properties'], {}), '(final_properties, submission.properties)\n', (7758, 7799), False, 'from nose.tools import assert_equal, assert_true, assert_not_equal\n'), ((7819, 7837), 'desktop.lib.test_utils.clear_sys_caches', 'clear_sys_caches', ([], {}), '()\n', (7835, 7837), False, 'from desktop.lib.test_utils import clear_sys_caches\n'), ((4188, 4234), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf1.jar')"], {}), "(deployment_dir + '/udf1.jar')\n", (4204, 4234), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((4289, 4335), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf2.jar')"], {}), "(deployment_dir + '/udf2.jar')\n", (4305, 4335), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((4390, 4436), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf3.jar')"], {}), "(deployment_dir + '/udf3.jar')\n", (4406, 4436), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((4487, 4533), 'hadoop.cluster.fs.stats', 'cluster.fs.stats', (["(deployment_dir + '/udf4.jar')"], {}), "(deployment_dir + '/udf4.jar')\n", (4503, 4533), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((6629, 6660), 'hadoop.cluster.get_next_ha_mrcluster', 'cluster.get_next_ha_mrcluster', ([], {}), '()\n', (6658, 6660), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((7337, 7368), 'hadoop.cluster.get_next_ha_mrcluster', 'cluster.get_next_ha_mrcluster', ([], {}), '()\n', (7366, 7368), False, 'from hadoop import cluster, pseudo_hdfs4\n'), ((9262, 9283), 'liboozie.submittion.Submission', 'Submission', (['self.user'], {}), '(self.user)\n', (9272, 9283), False, 'from liboozie.submittion import Submission\n')] |
Wenyuan-Vincent-Li/SSL_Seg_GAN | Training/train_baseHD.py | 8f6c45fd000ea12468dccf211b376fadbf4759c6 | import torch.nn as nn
import torch.optim as optim
import torch.utils.data
from Training import functions
from Training.imresize import imresize
import matplotlib.pyplot as plt
from Models.pix2pixHD_base import GANLoss, VGGLoss
from Models.pix2pixHD2 import mask2onehot
class Losses():
def __init__(self, opt):
self.criterionGAN = GANLoss(not opt.no_lsgan)
self.criterionFeat = nn.L1Loss()
if opt.contour:
self.crossEntropy = nn.BCEWithLogitsLoss()
else:
self.crossEntropy = nn.CrossEntropyLoss()
if not opt.no_vgg_loss:
self.criterionVGG = VGGLoss()
def train_single_scale(dataloader, netD, netG, netS, reals, Gs, Ss, in_s, in_s_S, NoiseAmp, NoiseAmpS, opt):
'''
:param netD: currD
:param netG: currG
:param netS: currS
:param reals: a list of image pyramid ## TODO: you can just pass image shape here
:param Gs: list of prev netG
:param Ss: list of prev netS
:param in_s: 0-> all zero [1, 3, 26, 26]
:param NoiseAmp: [] -> [1]
:param opt: config
:return:
'''
loss = Losses(opt)
real = reals[opt.scale_num] # find the current level image xn
opt.nzx = real[0]
opt.nzy = real[1]
# z_opt = 0 ## dummy z_opt
alpha = opt.alpha
# setup optimizer
optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999))
optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999))
optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999))
schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8], gamma=opt.gamma)
schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8], gamma=opt.gamma)
schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8],
gamma=opt.gamma)
errD2plot = []
errG2plot = []
D_real2plot = []
D_fake2plot = []
for epoch in range(opt.niter): # niter = 2000
if Gs == [] and Ss == []:
noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) # [None, 1, 32, 32]
noise_ = noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy)
## Noise_: for generated false samples through generator
else:
noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize)
for j, data in enumerate(dataloader):
data['image'] = data['image'].to(opt.device)
data['label'] = data['label'].long().to(opt.device)
############################
# (1) Update D network: maximize D(x) + D(G(z))
###########################
# train with real
netD.zero_grad()
pred_real = netD(data['image'], data['label'][:,0:1,...])
loss_D_real = loss.criterionGAN(pred_real, True)
D_x = loss_D_real.item()
# train with fake
if (j == 0) & (epoch == 0): # first iteration training in this level
if Gs == [] and Ss == []:
prev = torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy], 0, device=opt.device)
in_s = prev # full of 0 [None, 3, 32, 32]
prev_S = torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy], 0, device=opt.device)
in_s_S = prev_S # full of 0 [None, 4, 32, 32]
mask = data['label'][:,0:1,...]
opt.noise_amp = opt.noise_amp_init
opt.noise_amp_S = opt.noise_amp_init
else:
prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt)
## given a new noise, prev is a image generated by previous Generator with bilinear upsampling [1, 3, 33, 33]
criterion = nn.MSELoss()
RMSE = torch.sqrt(criterion(data['image'], prev))
opt.noise_amp = opt.noise_amp_init * RMSE
prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) ## prob with [None, 4, 32, 32]
onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc)
RMSE_S = torch.sqrt(criterion(onehot_label, prev_S))
# RMSE_S = 0
opt.noise_amp_S = opt.noise_amp_init * RMSE_S
mask = data['label'][:,0:1,...]
else:
prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt)
prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt)
mask = data['label'][:,0:1,...]
if Gs == []:
noise = noise_ ## Gausiaan noise for generating image [None, 3, 42, 42]
else:
noise = opt.noise_amp * noise_ + prev ## [None, 3, 43, 43] new noise is equal to the prev generated image plus the gaussian noise.
fake = netG(noise.detach(), prev, mask) # [None, 3, 32, 32] the same size with the input image
# detach() make sure that the gradients don't go to the noise.
# prev:[None, 3, 42, 42] -> [None, 3, 43, 43] first step prev = 0, second step prev = a image generated by previous Generator with bilinaer upsampling
pred_fake = netD(fake.detach(), data['label'][:,0:1,...]) # output shape [1, 1, 16, 16] -> [1, 1, 23, 23]
# print(len(pred_fake), len(pred_fake[0]))
loss_D_fake = loss.criterionGAN(pred_fake, False)
D_G_z = loss_D_fake.item()
# segment_logit, segment_mask = netS(data['image'], mask2onehot(prev_S, opt.label_nc))
# print(data['image'].shape, onehot.shape)
# print(epoch, j)
segment_logit, segment_prob, segment_mask = netS(data['image'], prev_S.detach())
pred_fake_S = netD(data['image'], segment_prob.detach())
loss_D_fake_S = loss.criterionGAN(pred_fake_S, False)
D_S_z = loss_D_fake_S.item()
errD = (loss_D_real + 0.5 * loss_D_fake + 0.5 * loss_D_fake_S) ## Todo: figure out a proper coefficient
errD.backward()
optimizerD.step()
errD2plot.append(errD.detach()) ## errD for each iteration
############################
# (2) Update G network: maximize D(G(z))
###########################
netG.zero_grad()
pred_fake = netD(fake, data['label'][:,0:1,...])
loss_G_GAN = 0.5 * loss.criterionGAN(pred_fake, True)
# GAN feature matching loss
loss_G_GAN_Feat = 0
if not opt.no_ganFeat_loss:
feat_weights = 4.0 / (opt.n_layers_D + 1)
D_weights = 1.0 / opt.num_D
for i in range(opt.num_D):
for j in range(len(pred_fake[i]) - 1):
loss_G_GAN_Feat += D_weights * feat_weights * \
loss.criterionFeat(pred_fake[i][j],
pred_real[i][j].detach()) * opt.lambda_feat
# VGG feature matching loss
loss_G_VGG = 0
if not opt.no_vgg_loss:
loss_G_VGG = loss.criterionVGG(fake, data['image']) * opt.lambda_feat
## reconstruction loss
if alpha != 0: ## alpha = 10 calculate the reconstruction loss
Recloss = nn.MSELoss()
rec_loss = alpha * Recloss(fake, data['image'])
else:
rec_loss = 0
errG = loss_G_GAN + loss_G_GAN_Feat + loss_G_VGG + rec_loss
errG.backward()
optimizerG.step()
############################
# (3) Update S network: maximize D(S(z))
###########################
netS.zero_grad()
pred_fake_S = netD(data['image'], segment_prob)
loss_G_GAN_S = 0.03 * loss.criterionGAN(pred_fake_S, True)
# Segmentation loss
if opt.contour:
loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float())
else:
loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1))
# GAN feature matching loss
loss_G_GAN_Feat_S = 0
if not opt.no_ganFeat_loss:
feat_weights = 4.0 / (opt.n_layers_D + 1)
D_weights = 1.0 / opt.num_D
for i in range(opt.num_D):
for j in range(len(pred_fake_S[i]) - 1):
loss_G_GAN_Feat_S += D_weights * feat_weights * \
loss.criterionFeat(pred_fake_S[i][j],
pred_real[i][j].detach()) * opt.lambda_feat
errS = loss_G_GAN_S + loss_G_GAN_Feat_S + loss_G_Seg
errS.backward()
optimizerS.step()
## for every epoch, do the following:
errG2plot.append(errG.detach()) ## ErrG for each iteration
D_real2plot.append(D_x) ## discriminator loss on real
D_fake2plot.append(D_G_z + D_S_z) ## discriminator loss on fake
if epoch % 25 == 0 or epoch == (opt.niter - 1):
print('scale %d:[%d/%d]' % (opt.scale_num, epoch, opt.niter))
if epoch % 25 == 0 or epoch == (opt.niter - 1):
plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch),
functions.convert_image_np(fake.detach()), vmin=0, vmax=1)
plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch),
functions.convert_image_np(data['image']), vmin=0, vmax=1)
plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch),
functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc))
plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch),
functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc))
schedulerD.step()
schedulerG.step()
schedulerS.step()
functions.save_networks(netG, netD, netS, opt) ## save netG, netD, z_opt, opt is used to parser output path
return in_s, in_s_S, netG, netS
def draw_concat(Gs, masks, reals, NoiseAmp, in_s, mode, opt):
'''
:param Gs: [G0]
:param mask: [down scaled _mask]
:param reals: [image pyramid] only used to represent the image shape
:param NoiseAmp: [1]
:param in_s: all zeros [1, 3, 26, 26]
:param mode: 'rand'
:param opt:
:return:
'''
G_z = in_s[:opt.batchSize, :, :, :] # [None, 3, 26, 26] all zeros, image input for the corest level
if len(Gs) > 0:
if mode == 'generator':
count = 0
for G, mask, real_curr, real_next, noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp):
if count == 0:
z = functions.generate_noise([1, real_curr[0], real_curr[1]],
opt.batchSize)
z = z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3])
else:
z = functions.generate_noise(
[opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize)
G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32, 32]
z_in = noise_amp * z + G_z
G_z = G(z_in.detach(), G_z, mask) ## [1, 3, 26, 26] output of previous generator
G_z = imresize(G_z, real_next[1] / real_curr[1], opt)
G_z = G_z[:, :, 0:real_next[0],
0:real_next[1]] ## resize the image to be compatible with current G [1, 3, 33, 33]
count += 1
elif mode == 'segment':
count = 0
for G, mask, real_curr, real_next, noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp):
G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32, 32]
_, G_z, _ = G(mask, G_z) ## [1, 3, 26, 26] output of previous generator
if opt.contour:
G_z = torch.cat((G_z, 1-G_z), 1)
G_z = imresize(G_z, real_next[1] / real_curr[1], opt)
G_z = G_z[:, :, 0:real_next[0],
0:real_next[1]] ## resize the image to be compatible with current G [1, 3, 33, 33]
count += 1
return G_z
| [((10379, 10425), 'Training.functions.save_networks', 'functions.save_networks', (['netG', 'netD', 'netS', 'opt'], {}), '(netG, netD, netS, opt)\n', (10402, 10425), False, 'from Training import functions\n'), ((343, 368), 'Models.pix2pixHD_base.GANLoss', 'GANLoss', (['(not opt.no_lsgan)'], {}), '(not opt.no_lsgan)\n', (350, 368), False, 'from Models.pix2pixHD_base import GANLoss, VGGLoss\n'), ((398, 409), 'torch.nn.L1Loss', 'nn.L1Loss', ([], {}), '()\n', (407, 409), True, 'import torch.nn as nn\n'), ((466, 488), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {}), '()\n', (486, 488), True, 'import torch.nn as nn\n'), ((535, 556), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (554, 556), True, 'import torch.nn as nn\n'), ((621, 630), 'Models.pix2pixHD_base.VGGLoss', 'VGGLoss', ([], {}), '()\n', (628, 630), False, 'from Models.pix2pixHD_base import GANLoss, VGGLoss\n'), ((2172, 2234), 'Training.functions.generate_noise', 'functions.generate_noise', (['[1, opt.nzx, opt.nzy]', 'opt.batchSize'], {}), '([1, opt.nzx, opt.nzy], opt.batchSize)\n', (2196, 2234), False, 'from Training import functions\n'), ((2431, 2493), 'Training.functions.generate_noise', 'functions.generate_noise', (['[1, opt.nzx, opt.nzy]', 'opt.batchSize'], {}), '([1, opt.nzx, opt.nzy], opt.batchSize)\n', (2455, 2493), False, 'from Training import functions\n'), ((7670, 7682), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (7680, 7682), True, 'import torch.nn as nn\n'), ((9887, 9928), 'Training.functions.convert_image_np', 'functions.convert_image_np', (["data['image']"], {}), "(data['image'])\n", (9913, 9928), False, 'from Training import functions\n'), ((10042, 10128), 'Training.functions.convert_mask_np', 'functions.convert_mask_np', (["data['label'][:, 0:1, (...)]"], {'num_classes': 'opt.label_nc'}), "(data['label'][:, 0:1, (...)], num_classes=opt.\n label_nc)\n", (10067, 10128), False, 'from Training import functions\n'), ((11804, 11851), 'Training.imresize.imresize', 'imresize', (['G_z', '(real_next[1] / real_curr[1])', 'opt'], {}), '(G_z, real_next[1] / real_curr[1], opt)\n', (11812, 11851), False, 'from Training.imresize import imresize\n'), ((3990, 4002), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (4000, 4002), True, 'import torch.nn as nn\n'), ((4314, 4369), 'Models.pix2pixHD2.mask2onehot', 'mask2onehot', (["data['label'][:, 0:1, (...)]", 'opt.label_nc'], {}), "(data['label'][:, 0:1, (...)], opt.label_nc)\n", (4325, 4369), False, 'from Models.pix2pixHD2 import mask2onehot\n'), ((11192, 11264), 'Training.functions.generate_noise', 'functions.generate_noise', (['[1, real_curr[0], real_curr[1]]', 'opt.batchSize'], {}), '([1, real_curr[0], real_curr[1]], opt.batchSize)\n', (11216, 11264), False, 'from Training import functions\n'), ((11446, 11525), 'Training.functions.generate_noise', 'functions.generate_noise', (['[opt.nc_z, real_curr[0], real_curr[1]]', 'opt.batchSize'], {}), '([opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize)\n', (11470, 11525), False, 'from Training import functions\n'), ((12477, 12524), 'Training.imresize.imresize', 'imresize', (['G_z', '(real_next[1] / real_curr[1])', 'opt'], {}), '(G_z, real_next[1] / real_curr[1], opt)\n', (12485, 12524), False, 'from Training.imresize import imresize\n')] |
0xreza/tvm | tests/python/unittest/test_tir_pass_inject_double_buffer.py | f08d5d78ee000b2c113ac451f8d73817960eafd5 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import te
def test_double_buffer():
dtype = 'int64'
n = 100
m = 4
tx = te.thread_axis("threadIdx.x")
ib = tvm.tir.ir_builder.create()
A = ib.pointer("float32", name="A")
C = ib.pointer("float32", name="C")
ib.scope_attr(tx, "thread_extent", 1)
with ib.for_range(0, n) as i:
B = ib.allocate("float32", m, name="B", scope="shared")
with ib.new_scope():
ib.scope_attr(B.asobject(), "double_buffer_scope", 1)
with ib.for_range(0, m) as j:
B[j] = A[i * 4 + j]
with ib.for_range(0, m) as j:
C[j] = B[j] + 1
stmt = ib.get()
stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2)
stmt = tvm.tir.ir_pass.Simplify(stmt)
assert isinstance(stmt.body.body, tvm.tir.Allocate)
assert stmt.body.body.extents[0].value == 2
mod = tvm.IRModule({
"db" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt)
})
f = tvm.tir.transform.ThreadSync("shared")(mod)["db"]
count = [0]
def count_sync(op):
if isinstance(op, tvm.tir.Call) and op.name == "tvm_storage_sync":
count[0] += 1
tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync)
assert count[0] == 4
if __name__ == "__main__":
test_double_buffer()
| [((893, 922), 'tvm.te.thread_axis', 'te.thread_axis', (['"""threadIdx.x"""'], {}), "('threadIdx.x')\n", (907, 922), False, 'from tvm import te\n'), ((932, 959), 'tvm.tir.ir_builder.create', 'tvm.tir.ir_builder.create', ([], {}), '()\n', (957, 959), False, 'import tvm\n'), ((1451, 1494), 'tvm.tir.ir_pass.InjectDoubleBuffer', 'tvm.tir.ir_pass.InjectDoubleBuffer', (['stmt', '(2)'], {}), '(stmt, 2)\n', (1485, 1494), False, 'import tvm\n'), ((1506, 1536), 'tvm.tir.ir_pass.Simplify', 'tvm.tir.ir_pass.Simplify', (['stmt'], {}), '(stmt)\n', (1530, 1536), False, 'import tvm\n'), ((1944, 1994), 'tvm.tir.ir_pass.PostOrderVisit', 'tvm.tir.ir_pass.PostOrderVisit', (['f.body', 'count_sync'], {}), '(f.body, count_sync)\n', (1974, 1994), False, 'import tvm\n'), ((1749, 1787), 'tvm.tir.transform.ThreadSync', 'tvm.tir.transform.ThreadSync', (['"""shared"""'], {}), "('shared')\n", (1777, 1787), False, 'import tvm\n')] |
shivupoojar/openfaas-pi | read_sensor.py | 5eda501368a1ac321954cb2aaf58be617977bd58 | import requests
from sense_hat import SenseHat
import smbus
import time
while True:
try:
pressure=0
sense = SenseHat()
pressure = sense.get_pressure()
data = {'pressure':pressure}
print(pressure)
#send http request to sense serverless function with pressure
#data
r=requests.post('http://127.0.0.1:8080/function/sensor',data)
print(r.text)
sense=SenseHat()
sense.show_message(r.text)
except KeyboardInterrupt:
sys.exit()
| [] |
highfestiva/life | trabantsim/prototypes/space_invaders.py | b05b592502d72980ab55e13e84330b74a966f377 | #!/usr/bin/env python3
# Space Invadersishkebab.
from trabant import *
# ASCII geometries.
shipascii = r'''
/\
/XXXXXXXX\
v v
'''
invader = r'''
/XXXXXX\
/XXXXXXXX\
XXXXXXXXXX
XX XX XX
\XXXXXXXX/
/XX XX\
/X/ \/ \X\
X/ \X
'''
cam(distance=250)
gravity((0,0,0))
ship = create_ascii_object(shipascii, pos=(0,0,-100), col='#070')
shots = []
invaderspeeds,isi = [(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0
invaders = set()
for y in range(2):
for x in range(8):
invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True))
for invader in invaders:
invader.vel(invaderspeeds[0])
while loop():
# Steering.
vel = keydir()*50 + tapdir(ship.pos())*4
ship.vel((vel.x,0,0)) # Only move in X.
# Shooting.
is_tap_close = taps() and tapdir(ship.pos()).x < 3
is_shooting = 'Space' in keys() or 'LCtrl' in keys() or is_tap_close
if is_shooting and timeout(0.7, first_hit=True):
shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')]
sound(sound_bang, shots[-1].pos())
# Run invaders.
if timeout(3, timer='invaders'):
isi = (isi+1)%len(invaderspeeds)
[i.vel(invaderspeeds[isi]) for i in invaders]
# Check collisions, make explosions.
for o in collided_objects():
if o in invaders:
invaders.remove(o)
explode(o.pos(),o.vel(),5)
elif o == ship:
while loop():
pass
o.release()
| [] |
Shang-XH/BAFTT | model/backbone/xception.py | 62392325342f48b8a89f0c2bf71e48026dd90629 | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d
def fixed_padding(inputs, kernel_size, dilation):
kernel_size_effective = kernel_size + (kernel_size - 1) * (dilation - 1)
pad_total = kernel_size_effective - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
padded_inputs = F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end))
return padded_inputs
class SeparableConv2d(nn.Module):
def __init__(self, inplanes, planes, kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None):
super(SeparableConv2d, self).__init__()
self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0, dilation,
groups=inplanes, bias=bias)
self.bn = BatchNorm(inplanes)
self.pointwise = nn.Conv2d(inplanes, planes, 1, 1, 0, 1, 1, bias=bias)
def forward(self, x):
x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0])
x = self.conv1(x)
x = self.bn(x)
x = self.pointwise(x)
return x
class Block(nn.Module):
def __init__(self, inplanes, planes, reps, stride=1, dilation=1, BatchNorm=None,
start_with_relu=True, grow_first=True, is_last=False):
super(Block, self).__init__()
if planes != inplanes or stride != 1:
self.skip = nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False)
self.skipbn = BatchNorm(planes)
else:
self.skip = None
self.relu = nn.ReLU(inplace=True)
rep = []
filters = inplanes
if grow_first:
rep.append(self.relu)
rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
filters = planes
for i in range(reps - 1):
rep.append(self.relu)
rep.append(SeparableConv2d(filters, filters, 3, 1, dilation, BatchNorm=BatchNorm))
rep.append(BatchNorm(filters))
if not grow_first:
rep.append(self.relu)
rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
if stride != 1:
rep.append(self.relu)
rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
if stride == 1 and is_last:
rep.append(self.relu)
rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
if not start_with_relu:
rep = rep[1:]
self.rep = nn.Sequential(*rep)
def forward(self, inp):
x = self.rep(inp)
if self.skip is not None:
skip = self.skip(inp)
skip = self.skipbn(skip)
else:
skip = inp
x = x + skip
return x
class AlignedXception(nn.Module):
"""
Modified Alighed Xception
"""
def __init__(self, output_stride, BatchNorm,
pretrained=True):
super(AlignedXception, self).__init__()
if output_stride == 16:
entry_block3_stride = 2
middle_block_dilation = 1
exit_block_dilations = (1, 2)
elif output_stride == 8:
entry_block3_stride = 1
middle_block_dilation = 2
exit_block_dilations = (2, 4)
else:
raise NotImplementedError
# Entry flow
self.conv1 = nn.Conv2d(3, 32, 3, stride=2, padding=1, bias=False)
self.bn1 = BatchNorm(32)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32, 64, 3, stride=1, padding=1, bias=False)
self.bn2 = BatchNorm(64)
self.block1 = Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False)
self.block2 = Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False,
grow_first=True)
self.block3 = Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm,
start_with_relu=True, grow_first=True, is_last=True)
# Middle flow
self.block4 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block5 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block6 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block7 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block8 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block9 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block10 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block11 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block12 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block13 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block14 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block15 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block16 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block17 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block18 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block19 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
# Exit flow
self.block20 = Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0],
BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True)
self.conv3 = SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)
self.bn3 = BatchNorm(1536)
self.conv4 = SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)
self.bn4 = BatchNorm(1536)
self.conv5 = SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)
self.bn5 = BatchNorm(2048)
# Init weights
self._init_weight()
# Load pretrained model
if pretrained:
self._load_pretrained_model()
def forward(self, x):
# Entry flow
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.bn2(x)
x = self.relu(x)
x = self.block1(x)
# add relu here
x = self.relu(x)
low_level_feat = x
x = self.block2(x)
x = self.block3(x)
# Middle flow
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.block8(x)
x = self.block9(x)
x = self.block10(x)
x = self.block11(x)
x = self.block12(x)
x = self.block13(x)
x = self.block14(x)
x = self.block15(x)
x = self.block16(x)
x = self.block17(x)
x = self.block18(x)
x = self.block19(x)
# Exit flow
x = self.block20(x)
x = self.relu(x)
x = self.conv3(x)
x = self.bn3(x)
x = self.relu(x)
x = self.conv4(x)
x = self.bn4(x)
x = self.relu(x)
x = self.conv5(x)
x = self.bn5(x)
x = self.relu(x)
return x, low_level_feat
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, SynchronizedBatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _load_pretrained_model(self):
pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth')
model_dict = {}
state_dict = self.state_dict()
for k, v in pretrain_dict.items():
if k in model_dict:
if 'pointwise' in k:
v = v.unsqueeze(-1).unsqueeze(-1)
if k.startswith('block11'):
model_dict[k] = v
model_dict[k.replace('block11', 'block12')] = v
model_dict[k.replace('block11', 'block13')] = v
model_dict[k.replace('block11', 'block14')] = v
model_dict[k.replace('block11', 'block15')] = v
model_dict[k.replace('block11', 'block16')] = v
model_dict[k.replace('block11', 'block17')] = v
model_dict[k.replace('block11', 'block18')] = v
model_dict[k.replace('block11', 'block19')] = v
elif k.startswith('block12'):
model_dict[k.replace('block12', 'block20')] = v
elif k.startswith('bn3'):
model_dict[k] = v
model_dict[k.replace('bn3', 'bn4')] = v
elif k.startswith('conv4'):
model_dict[k.replace('conv4', 'conv5')] = v
elif k.startswith('bn4'):
model_dict[k.replace('bn4', 'bn5')] = v
else:
model_dict[k] = v
state_dict.update(model_dict)
self.load_state_dict(state_dict)
if __name__ == "__main__":
import torch
model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16)
input = torch.rand(1, 3, 512, 512)
output, low_level_feat = model(input)
print(output.size())
print(low_level_feat.size()) | [((441, 492), 'torch.nn.functional.pad', 'F.pad', (['inputs', '(pad_beg, pad_end, pad_beg, pad_end)'], {}), '(inputs, (pad_beg, pad_end, pad_beg, pad_end))\n', (446, 492), True, 'import torch.nn.functional as F\n'), ((11424, 11450), 'torch.rand', 'torch.rand', (['(1)', '(3)', '(512)', '(512)'], {}), '(1, 3, 512, 512)\n', (11434, 11450), False, 'import torch\n'), ((731, 827), 'torch.nn.Conv2d', 'nn.Conv2d', (['inplanes', 'inplanes', 'kernel_size', 'stride', '(0)', 'dilation'], {'groups': 'inplanes', 'bias': 'bias'}), '(inplanes, inplanes, kernel_size, stride, 0, dilation, groups=\n inplanes, bias=bias)\n', (740, 827), True, 'import torch.nn as nn\n'), ((917, 970), 'torch.nn.Conv2d', 'nn.Conv2d', (['inplanes', 'planes', '(1)', '(1)', '(0)', '(1)', '(1)'], {'bias': 'bias'}), '(inplanes, planes, 1, 1, 0, 1, 1, bias=bias)\n', (926, 970), True, 'import torch.nn as nn\n'), ((1641, 1662), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1648, 1662), True, 'import torch.nn as nn\n'), ((2796, 2815), 'torch.nn.Sequential', 'nn.Sequential', (['*rep'], {}), '(*rep)\n', (2809, 2815), True, 'import torch.nn as nn\n'), ((3662, 3714), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(32)', '(3)'], {'stride': '(2)', 'padding': '(1)', 'bias': '(False)'}), '(3, 32, 3, stride=2, padding=1, bias=False)\n', (3671, 3714), True, 'import torch.nn as nn\n'), ((3768, 3789), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3775, 3789), True, 'import torch.nn as nn\n'), ((3812, 3865), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)', '(3)'], {'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(32, 64, 3, stride=1, padding=1, bias=False)\n', (3821, 3865), True, 'import torch.nn as nn\n'), ((9729, 9821), 'torch.utils.model_zoo.load_url', 'model_zoo.load_url', (['"""http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth"""'], {}), "(\n 'http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth')\n", (9747, 9821), True, 'import torch.utils.model_zoo as model_zoo\n'), ((1475, 1532), 'torch.nn.Conv2d', 'nn.Conv2d', (['inplanes', 'planes', '(1)'], {'stride': 'stride', 'bias': '(False)'}), '(inplanes, planes, 1, stride=stride, bias=False)\n', (1484, 1532), True, 'import torch.nn as nn\n'), ((9391, 9409), 'math.sqrt', 'math.sqrt', (['(2.0 / n)'], {}), '(2.0 / n)\n', (9400, 9409), False, 'import math\n')] |
IrinaMBejan/Autonom | Backend/autonomus/utils/mail.py | 4a97da1b26ed22e3ec8bb939359148765392b692 | from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail, Substitution
API_KEY = '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'
API_KEY_ID = 'egd1yywWRbeVF2gcGhTH2Q'
ENCODING = "utf-8"
DEFAULT_MAIL="[email protected]"
def link(urlsafe):
return "https://develop-dot-autonomus.appspot.com/events/details?event_id=" + urlsafe
def send_newsletter(users, event1, event2):
for user in users:
send_mail(DEFAULT_MAIL, user.username, user.email, event1, event2)
def send_mail(from_mail, username, to_mails, event1, event2):
message = Mail(
from_email=from_mail,
to_emails=to_mails
)
message.dynamic_template_data = {
'name': username,
'title1' : event1.title,
'src1' : link(event1.urlsafe),
'loc1': event1.location,
'date1': event1.date.strftime('%d-%m-%Y %H:%M'),
'title2' : event2.title,
'src2' : link(event2.urlsafe),
'loc2': event2.location,
'date2': event2.date.strftime('%d-%m-%Y %H:%M')
}
print('before')
message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6'
client = SendGridAPIClient(API_KEY)
response = client.send(message)
code = response.status_code
print('after')
was_successful = lambda ret_code: ret_code // 100 in (2, 3)
if not was_successful(code):
raise Exception("Couldn't send e-mail: {} {}".format(code, response.body))
| [((604, 650), 'sendgrid.helpers.mail.Mail', 'Mail', ([], {'from_email': 'from_mail', 'to_emails': 'to_mails'}), '(from_email=from_mail, to_emails=to_mails)\n', (608, 650), False, 'from sendgrid.helpers.mail import Mail, Substitution\n'), ((1135, 1161), 'sendgrid.SendGridAPIClient', 'SendGridAPIClient', (['API_KEY'], {}), '(API_KEY)\n', (1152, 1161), False, 'from sendgrid import SendGridAPIClient\n')] |
percygautam/yellowbrick | yellowbrick/features/pca.py | 1ba6774a257bc85768a990293790caf4c14a5653 | # -*- coding: utf-8 -*-
# yellowbrick.features.pca
# Decomposition based feature visualization with PCA.
#
# Author: Carlo Morales
# Author: Raúl Peralta Lozada
# Author: Benjamin Bengfort
# Created: Tue May 23 18:34:27 2017 -0400
#
# Copyright (C) 2017 The scikit-yb developers
# For license information, see LICENSE.txt
#
# ID: pca.py [] [email protected] $
"""
Decomposition based feature visualization with PCA.
"""
##########################################################################
## Imports
##########################################################################
# NOTE: must import mplot3d to load the 3D projection
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from yellowbrick.style import palettes
from yellowbrick.features.projection import ProjectionVisualizer
from yellowbrick.exceptions import YellowbrickValueError, NotFitted
from sklearn.pipeline import Pipeline
from sklearn.decomposition import PCA as PCATransformer
from sklearn.preprocessing import StandardScaler
from sklearn.exceptions import NotFittedError
##########################################################################
# 2D and 3D PCA Visualizer
##########################################################################
class PCA(ProjectionVisualizer):
"""
Produce a two or three dimensional principal component plot of a data array
projected onto its largest sequential principal components. It is common
practice to scale the data array ``X`` before applying a PC decomposition.
Variable scaling can be controlled using the ``scale`` argument.
Parameters
----------
ax : matplotlib Axes, default: None
The axes to plot the figure on. If None is passed in, the current axes
will be used (or generated if required).
features : list, default: None
The names of the features specified by the columns of the input dataset.
This length of this list must match the number of columns in X, otherwise
an exception will be raised on ``fit()``.
classes : list, default: None
The class labels for each class in y, ordered by sorted class index. These
names act as a label encoder for the legend, identifying integer classes
or renaming string labels. If omitted, the class labels will be taken from
the unique values in y.
Note that the length of this list must match the number of unique values in
y, otherwise an exception is raised. This parameter is only used in the
discrete target type case and is ignored otherwise.
scale : bool, default: True
Boolean that indicates if user wants to scale data.
projection : int or string, default: 2
The number of axes to project into, either 2d or 3d. To plot 3d plots
with matplotlib, please ensure a 3d axes is passed to the visualizer,
otherwise one will be created using the current figure.
proj_features : bool, default: False
Boolean that indicates if the user wants to project the features
in the projected space. If True the plot will be similar to a biplot.
colors : list or tuple, default: None
A single color to plot all instances as or a list of colors to color each
instance according to its class in the discrete case or as an ordered
colormap in the sequential case. If not enough colors per class are
specified then the colors are treated as a cycle.
colormap : string or cmap, default: None
The colormap used to create the individual colors. In the discrete case
it is used to compute the number of colors needed for each class and
in the continuous case it is used to create a sequential color map based
on the range of the target.
alpha : float, default: 0.75
Specify a transparency where 1 is completely opaque and 0 is completely
transparent. This property makes densely clustered points more visible.
random_state : int, RandomState instance or None, optional (default None)
This parameter sets the random state on this solver. If the input X is
larger than 500x500 and the number of components to extract is lower
than 80% of the smallest dimension of the data, then the more efficient
`randomized` solver is enabled.
colorbar : bool, default: True
If the target_type is "continous" draw a colorbar to the right of the
scatter plot. The colobar axes is accessible using the cax property.
heatmap : bool, default: False
Add a heatmap showing contribution of each feature in the principal components.
Also draws a colorbar for readability purpose. The heatmap is accessible
using lax property and colorbar using uax property.
kwargs : dict
Keyword arguments that are passed to the base class and may influence
the visualization as defined in other Visualizers.
Attributes
----------
pca_components_ : ndarray, shape (n_features, n_components)
This tells about the magnitude of each feature in the pricipal components.
This is primarily used to draw the biplots.
classes_ : ndarray, shape (n_classes,)
The class labels that define the discrete values in the target. Only
available if the target type is discrete. This is guaranteed to be
strings even if the classes are a different type.
features_ : ndarray, shape (n_features,)
The names of the features discovered or used in the visualizer that
can be used as an index to access or modify data in X. If a user passes
feature names in, those features are used. Otherwise the columns of a
DataFrame are used or just simply the indices of the data array.
range_ : (min y, max y)
A tuple that describes the minimum and maximum values in the target.
Only available if the target type is continuous.
Examples
--------
>>> from sklearn import datasets
>>> iris = datasets.load_iris()
>>> X = iris.data
>>> y = iris.target
>>> visualizer = PCA()
>>> visualizer.fit_transform(X, y)
>>> visualizer.show()
"""
def __init__(
self,
ax=None,
features=None,
classes=None,
scale=True,
projection=2,
proj_features=False,
colors=None,
colormap=None,
alpha=0.75,
random_state=None,
colorbar=True,
heatmap=False,
**kwargs
):
super(PCA, self).__init__(
ax=ax,
features=features,
classes=classes,
colors=colors,
colormap=colormap,
projection=projection,
alpha=alpha,
colorbar=colorbar,
**kwargs
)
# Data Parameters
self.scale = scale
self.proj_features = proj_features
# Create the PCA transformer
self.pca_transformer = Pipeline(
[
("scale", StandardScaler(with_std=self.scale)),
("pca", PCATransformer(self.projection, random_state=random_state)),
]
)
self.alpha = alpha
# Visual Parameters
self.heatmap = heatmap
self._uax, self._lax = None, None
# No heatmap can be drawn with 3d plots as they do not have permit axes
# division.
if self.projection == 3 and self.heatmap:
raise YellowbrickValueError(
"heatmap and colorbar are not compatible with 3d projections"
)
@property
def uax(self):
"""
The axes of the colorbar, bottom of scatter plot. This is the colorbar
for heatmap and not for the scatter plot.
"""
if self._uax is None:
raise AttributeError("This visualizer does not have an axes for colorbar")
return self._uax
@property
def lax(self):
"""
The axes of the heatmap below scatter plot.
"""
if self._lax is None:
raise AttributeError("This visualizer does not have an axes for heatmap")
return self._lax
def layout(self, divider=None):
"""
Creates the layout for colorbar and heatmap, adding new axes for the heatmap
if necessary and modifying the aspect ratio. Does not modify the axes or the
layout if ``self.heatmap`` is ``False`` or ``None``.
Parameters
----------
divider: AxesDivider
An AxesDivider to be passed among all layout calls.
"""
# Ensure matplotlib version compatibility
if make_axes_locatable is None:
raise YellowbrickValueError(
(
"heatmap requires matplotlib 2.0.2 or greater "
"please upgrade matplotlib or set heatmap=False on the visualizer"
)
)
# Create the new axes for the colorbar and heatmap
if divider is None:
divider = make_axes_locatable(self.ax)
# Call to super class ensures that a colorbar is drawn when target is
# continuous.
super(PCA, self).layout(divider)
if self.heatmap:
# Axes for colorbar(for heatmap).
if self._uax is None:
self._uax = divider.append_axes("bottom", size="10%", pad=0.7)
# Axes for heatmap
if self._lax is None:
self._lax = divider.append_axes("bottom", size="15%", pad=0.5)
def fit(self, X, y=None, **kwargs):
"""
Fits the PCA transformer, transforms the data in X, then draws the
decomposition in either 2D or 3D space as a scatter plot.
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features.
y : ndarray or Series of length n
An array or series of target or class values.
Returns
-------
self : visualizer
Returns self for use in Pipelines.
"""
# Call super fit to compute features, classes, colors, etc.
super(PCA, self).fit(X=X, y=y, **kwargs)
self.pca_transformer.fit(X)
self.pca_components_ = self.pca_transformer.named_steps["pca"].components_
return self
def transform(self, X, y=None, **kwargs):
"""
Calls the internal `transform` method of the scikit-learn PCA transformer, which
performs a dimensionality reduction on the input features ``X``. Next calls the
``draw`` method of the Yellowbrick visualizer, finally returning a new array of
transformed features of shape ``(len(X), projection)``.
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features.
y : ndarray or Series of length n
An array or series of target or class values.
Returns
-------
Xp : ndarray or DataFrame of shape n x m
Returns a new array-like object of transformed features of shape
``(len(X), projection)``.
"""
try:
Xp = self.pca_transformer.transform(X)
self.draw(Xp, y)
return Xp
except NotFittedError:
raise NotFitted.from_estimator(self, "transform")
def draw(self, Xp, y):
"""
Plots a scatterplot of points that represented the decomposition,
`pca_features_`, of the original features, `X`, projected into either 2 or
3 dimensions.
If 2 dimensions are selected, a colorbar and heatmap can also be optionally
included to show the magnitude of each feature value to the component.
Parameters
----------
Xp : array-like of shape (n, 2) or (n, 3)
The matrix produced by the ``transform()`` method.
y : array-like of shape (n,), optional
The target, used to specify the colors of the points.
Returns
-------
self.ax : matplotlib Axes object
Returns the axes that the scatter plot was drawn on.
"""
# Call to super draw which draws the scatter plot.
super(PCA, self).draw(Xp, y)
if self.proj_features:
# Draws projection features in transformed space.
self._draw_projection_features(Xp, y)
if self.projection == 2:
if self.heatmap:
if not self.colormap:
self.colormap = palettes.DEFAULT_SEQUENCE
# TODO: change to pcolormesh instead of imshow per #615 spec
im = self.lax.imshow(
self.pca_components_,
interpolation="none",
cmap=self.colormap,
aspect="auto",
)
plt.colorbar(
im,
cax=self.uax,
orientation="horizontal",
ticks=[self.pca_components_.min(), 0, self.pca_components_.max()],
)
return self.ax
def _draw_projection_features(self, Xp, y):
"""
Draw the projection of features in the transformed space.
Parameters
----------
Xp : array-like of shape (n, 2) or (n, 3)
The matrix produced by the ``transform()`` method.
y : array-like of shape (n,), optional
The target, used to specify the colors of the points.
Returns
-------
self.ax : matplotlib Axes object
Returns the axes that the scatter plot was drawn on.
"""
x_vector = self.pca_components_[0]
y_vector = self.pca_components_[1]
max_x = max(Xp[:, 0])
max_y = max(Xp[:, 1])
if self.projection == 2:
for i in range(self.pca_components_.shape[1]):
self.ax.arrow(
x=0,
y=0,
dx=x_vector[i] * max_x,
dy=y_vector[i] * max_y,
color="r",
head_width=0.05,
width=0.005,
)
self.ax.text(
x_vector[i] * max_x * 1.05,
y_vector[i] * max_y * 1.05,
self.features_[i],
color="r",
)
elif self.projection == 3:
z_vector = self.pca_components_[2]
max_z = max(Xp[:, 1])
for i in range(self.pca_components_.shape[1]):
self.ax.plot(
[0, x_vector[i] * max_x],
[0, y_vector[i] * max_y],
[0, z_vector[i] * max_z],
color="r",
)
self.ax.text(
x_vector[i] * max_x * 1.05,
y_vector[i] * max_y * 1.05,
z_vector[i] * max_z * 1.05,
self.features_[i],
color="r",
)
else:
raise YellowbrickValueError("Projection dimensions must be either 2 or 3")
return self.ax
def finalize(self, **kwargs):
"""
Draws the title, labels, legends, heatmap, and colorbar as specified by the
keyword arguments.
"""
super(PCA, self).finalize()
self.ax.set_title("Principal Component Plot")
self.ax.set_xlabel("$PC_1$")
self.ax.set_ylabel("$PC_2$")
if self.projection == 3:
self.ax.set_zlabel("$PC_3$")
if self.heatmap == True:
self.lax.set_xticks(np.arange(-0.5, len(self.features_)))
self.lax.set_xticklabels([])
# Makes the labels centered.
self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True)
self.lax.set_xticklabels(
self.features_, rotation=90, fontsize=12, minor=True
)
self.lax.set_yticks(np.arange(0.5, 2))
self.lax.set_yticklabels(["$PC_1$", "$PC_2$"], va="bottom", fontsize=10)
self.fig.tight_layout()
##########################################################################
## Quick Method
##########################################################################
def pca_decomposition(
X,
y=None,
ax=None,
features=None,
classes=None,
scale=True,
projection=2,
proj_features=False,
colors=None,
colormap=None,
alpha=0.75,
random_state=None,
colorbar=True,
heatmap=False,
show=True,
**kwargs
):
"""
Produce a two or three dimensional principal component plot of the data array ``X``
projected onto its largest sequential principal components. It is common practice
to scale the data array ``X`` before applying a PC decomposition. Variable scaling
can be controlled using the ``scale`` argument.
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features.
y : ndarray or Series of length n
An array or series of target or class values.
ax : matplotlib Axes, default: None
The axes to plot the figure on. If None is passed in, the current axes
will be used (or generated if required).
features : list, default: None
The names of the features specified by the columns of the input dataset.
This length of this list must match the number of columns in X, otherwise
an exception will be raised on ``fit()``.
classes : list, default: None
The class labels for each class in y, ordered by sorted class index. These
names act as a label encoder for the legend, identifying integer classes
or renaming string labels. If omitted, the class labels will be taken from
the unique values in y.
Note that the length of this list must match the number of unique values in
y, otherwise an exception is raised. This parameter is only used in the
discrete target type case and is ignored otherwise.
scale : bool, default: True
Boolean that indicates if user wants to scale data.
projection : int or string, default: 2
The number of axes to project into, either 2d or 3d. To plot 3d plots
with matplotlib, please ensure a 3d axes is passed to the visualizer,
otherwise one will be created using the current figure.
proj_features : bool, default: False
Boolean that indicates if the user wants to project the features
in the projected space. If True the plot will be similar to a biplot.
colors : list or tuple, default: None
A single color to plot all instances as or a list of colors to color each
instance according to its class in the discrete case or as an ordered
colormap in the sequential case. If not enough colors per class are
specified then the colors are treated as a cycle.
colormap : string or cmap, default: None
The colormap used to create the individual colors. In the discrete case
it is used to compute the number of colors needed for each class and
in the continuous case it is used to create a sequential color map based
on the range of the target.
alpha : float, default: 0.75
Specify a transparency where 1 is completely opaque and 0 is completely
transparent. This property makes densely clustered points more visible.
random_state : int, RandomState instance or None, optional (default None)
This parameter sets the random state on this solver. If the input X is
larger than 500x500 and the number of components to extract is lower
than 80% of the smallest dimension of the data, then the more efficient
`randomized` solver is enabled.
colorbar : bool, default: True
If the target_type is "continous" draw a colorbar to the right of the
scatter plot. The colobar axes is accessible using the cax property.
heatmap : bool, default: False
Add a heatmap showing contribution of each feature in the principal components.
Also draws a colorbar for readability purpose. The heatmap is accessible
using lax property and colorbar using uax property.
show : bool, default: True
If True, calls ``show()``, which in turn calls ``plt.show()`` however you cannot
call ``plt.savefig`` from this signature, nor ``clear_figure``. If False, simply
calls ``finalize()``
kwargs : dict
Keyword arguments that are passed to the base class and may influence
the visualization as defined in other Visualizers.
Attributes
----------
pca_components_ : ndarray, shape (n_features, n_components)
This tells about the magnitude of each feature in the pricipal components.
This is primarily used to draw the biplots.
classes_ : ndarray, shape (n_classes,)
The class labels that define the discrete values in the target. Only
available if the target type is discrete. This is guaranteed to be
strings even if the classes are a different type.
features_ : ndarray, shape (n_features,)
The names of the features discovered or used in the visualizer that
can be used as an index to access or modify data in X. If a user passes
feature names in, those features are used. Otherwise the columns of a
DataFrame are used or just simply the indices of the data array.
range_ : (min y, max y)
A tuple that describes the minimum and maximum values in the target.
Only available if the target type is continuous.
Examples
--------
>>> from sklearn import datasets
>>> iris = datasets.load_iris()
>>> X = iris.data
>>> y = iris.target
>>> pca_decomposition(X, y, colors=['r', 'g', 'b'], projection=3)
"""
# Instantiate the visualizer
visualizer = PCA(
ax=ax,
features=features,
scale=scale,
projection=projection,
proj_features=proj_features,
colors=colors,
colormap=colormap,
alpha=alpha,
random_state=random_state,
colorbar=colorbar,
heatmap=heatmap,
**kwargs
)
# Fit and transform the visualizer (calls draw)
visualizer.fit(X, y)
visualizer.transform(X, y)
if show:
visualizer.show()
else:
visualizer.finalize()
# Returns the visualizer object.
return visualizer
# Alias for PCA
PCADecomposition = PCA
| [((7537, 7626), 'yellowbrick.exceptions.YellowbrickValueError', 'YellowbrickValueError', (['"""heatmap and colorbar are not compatible with 3d projections"""'], {}), "(\n 'heatmap and colorbar are not compatible with 3d projections')\n", (7558, 7626), False, 'from yellowbrick.exceptions import YellowbrickValueError, NotFitted\n'), ((8767, 8911), 'yellowbrick.exceptions.YellowbrickValueError', 'YellowbrickValueError', (['"""heatmap requires matplotlib 2.0.2 or greater please upgrade matplotlib or set heatmap=False on the visualizer"""'], {}), "(\n 'heatmap requires matplotlib 2.0.2 or greater please upgrade matplotlib or set heatmap=False on the visualizer'\n )\n", (8788, 8911), False, 'from yellowbrick.exceptions import YellowbrickValueError, NotFitted\n'), ((9105, 9133), 'mpl_toolkits.axes_grid1.make_axes_locatable', 'make_axes_locatable', (['self.ax'], {}), '(self.ax)\n', (9124, 9133), False, 'from mpl_toolkits.axes_grid1 import make_axes_locatable\n'), ((11420, 11463), 'yellowbrick.exceptions.NotFitted.from_estimator', 'NotFitted.from_estimator', (['self', '"""transform"""'], {}), "(self, 'transform')\n", (11444, 11463), False, 'from yellowbrick.exceptions import YellowbrickValueError, NotFitted\n'), ((15191, 15259), 'yellowbrick.exceptions.YellowbrickValueError', 'YellowbrickValueError', (['"""Projection dimensions must be either 2 or 3"""'], {}), "('Projection dimensions must be either 2 or 3')\n", (15212, 15259), False, 'from yellowbrick.exceptions import YellowbrickValueError, NotFitted\n'), ((16110, 16127), 'numpy.arange', 'np.arange', (['(0.5)', '(2)'], {}), '(0.5, 2)\n', (16119, 16127), True, 'import numpy as np\n'), ((7091, 7126), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {'with_std': 'self.scale'}), '(with_std=self.scale)\n', (7105, 7126), False, 'from sklearn.preprocessing import StandardScaler\n'), ((7153, 7211), 'sklearn.decomposition.PCA', 'PCATransformer', (['self.projection'], {'random_state': 'random_state'}), '(self.projection, random_state=random_state)\n', (7167, 7211), True, 'from sklearn.decomposition import PCA as PCATransformer\n')] |
Jarvan-Wang/k2 | k2/python/host/k2host/properties.py | 7f164ecb804d15006fd30e8564d80e0fa212f011 | # Copyright (c) 2020 Xiaomi Corporation (author: Haowen Qiu)
# See ../../../LICENSE for clarification regarding multiple authors
import torch
from torch.utils.dlpack import to_dlpack
from .fsa import Fsa
from _k2host import _is_valid
from _k2host import _is_top_sorted
from _k2host import _is_arc_sorted
from _k2host import _has_self_loops
from _k2host import _is_acyclic
from _k2host import _is_deterministic
from _k2host import _is_epsilon_free
from _k2host import _is_connected
from _k2host import _is_empty
def is_valid(fsa: Fsa) -> bool:
return _is_valid(fsa.get_base())
def is_top_sorted(fsa: Fsa) -> bool:
return _is_top_sorted(fsa.get_base())
def is_arc_sorted(fsa: Fsa) -> bool:
return _is_arc_sorted(fsa.get_base())
def has_self_loops(fsa: Fsa) -> bool:
return _has_self_loops(fsa.get_base())
def is_acyclic(fsa: Fsa) -> bool:
return _is_acyclic(fsa.get_base())
def is_deterministic(fsa: Fsa) -> bool:
return _is_deterministic(fsa.get_base())
def is_epsilon_free(fsa: Fsa) -> bool:
return _is_epsilon_free(fsa.get_base())
def is_connected(fsa: Fsa) -> bool:
return _is_connected(fsa.get_base())
def is_empty(fsa: Fsa) -> bool:
return _is_empty(fsa.get_base())
| [] |
cmd410/OrigamiBot | origamibot/core/teletypes/poll_option.py | 03667d069f0c0b088671936ce36bf8f85a029b93 | from .base import TelegramStructure, Field
class PollOption(TelegramStructure):
text = Field()
voter_count = Field()
def __init__(self,
text: str,
voter_count: int
):
self.text = \
Field(text, [str])
self.voter_count = \
Field(voter_count, [int])
| [] |
xiki-tempula/spack | var/spack/repos/builtin/packages/r-viridislite/package.py | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RViridislite(RPackage):
"""viridisLite: Default Color Maps from 'matplotlib' (Lite Version)"""
homepage = "https://github.com/sjmgarnier/viridisLite"
url = "https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/viridisLite"
version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af')
version('0.2.0', sha256='2d4d909f21c51e720bd685f05041ba158294e0a4064e0946d0bd916709818694')
depends_on('[email protected]:', type=('build', 'run'))
| [] |
AnanyaRamanA/shiSock | shiSock-0.2.0/test_two/PySock/server.py | 51efb0eba17eb106b9480598d278536ddd7732c3 | from re import S
import select
import socket
import queue
import threading
import sys
import pickle
import base64
import os
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.backends import default_backend
import hashlib
import yaml
import random
import time
class IPNC():
def __init__(self):
pass
def _read_yml(self,file = None):
with open(file) as file:
documents = yaml.full_load(file)
return documents
def _write_yml(self,file = None, dict_data = None,mode = "a+"):
with open(file, mode) as file:
yaml.dump(dict_data, file)
def _add_node(self,file = None, node = None):
try:
read = self._read_yml(file)
if read != None:
read[node[0]]
self._change_node_value(file,node)
else:
raise KeyError
except KeyError:
node_dict = {
node[0] : node[1]
}
self._write_yml(file, node_dict)
def _change_node_value(self,file = None, node = None):
r_yml = self._read_yml(file)
r_yml[node[0]] = node[1]
self._write_yml(file = file, dict_data = r_yml, mode = "w")
def _get_node(self,file = None, key = None, wait = True):
if key == None:
return self._read_yml(file)
if wait:
while True:
r_yml = self._read_yml(file)
try:
value = r_yml[key]
return value
except KeyError:
pass
except TypeError:
pass
else:
r_yml = self._read_yml(file)
try:
value = r_yml[key]
return value
except KeyError:
return None
except TypeError:
pass
def _remove_node(self,file,node):
try:
r_yml = self._read_yml(file = file)
r_yml[node]
r_yml.pop(node)
self._write_yml(file = file, dict_data = r_yml, mode = "w")
except KeyError:
return False
except:
pass
def _name_generator(self,_len_ = 16, onlyText = False):
lower_case = list("abcdefghijklmnopqrstuvwxyz")
upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
special = list("!@#$%&*?")
number = list("0123456789")
if onlyText:
_all_ = lower_case + upper_case
else:
_all_ = lower_case + upper_case + special + number
random.shuffle(_all_)
return "".join(random.sample(_all_,_len_))
class DSP():
def __init__(
self,
msg : str = None,
DSP_type : str = None,
device_id : int = None,
universalAesKey : bytes = None,
nonce : bytes = None,
aad : str = None,
):
if msg is not None:
self.msg = msg
else:
self.msg = msg
self.DSP_type = DSP_type
self.device_id = device_id
if universalAesKey is not None:
self.UNIVERSAL_AES_KEY = universalAesKey
else:
self.UNIVERSAL_AES_KEY = b't\x89\xcc\x87\xcca\xe8\xfb\x06\xed\xcf+\x0eVB\xd2\xd3\xbeMk\xfa\xd1J\xa7\xc8@\xf8\x05\x0f\xfc\x18\x00'
if nonce is not None:
self.NONCE = nonce
else:
self.NONCE = b'\xfe\x1e1\xc0\xfc`s\xbc6\x9fQ\xb2'
if aad is not None:
self.AAD = aad
else:
self.AAD = b"au$tica&tedbut@u32nencr#cdscypteddatafdrj"
def _messanger(self,MSG = None):
if MSG is not None:
self.msg = MSG
data = f'DSP("{self.msg}","{self.DSP_type}")'
data = pickle.dumps(data)
pickled_data = data
encrypted_data = [self.device_id, self.__encrypt(pickled_data)]
p_e_d = pickle.dumps(encrypted_data)
ret = base64.b64encode(p_e_d)
return ret
def __repr__(self):
return "_main.DSP._"
def __encrypt(self,data):
aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,)
ct = aesgcm.encrypt(
self.NONCE,
data,
self.AAD
)
return ct
def _convert_to_class(self,OBJECT : bytes = None,secure : bool = True, secure_dict : list = None):
try:
OBJECT = base64.b64decode(OBJECT)
OBJECT = pickle.loads(OBJECT)
if secure == True:
if secure_dict is None:
raise TypeError(
"convert_to_class() missing 1 required positional argument: 'secure_lst'")
else:
secure_dict = pickle.loads(base64.b64decode(secure_dict))
aesgcm = AESGCM(secure_dict["aes_key"])
ct = aesgcm.decrypt(
secure_dict["nonce"], OBJECT[-1], secure_dict["aad"])
ct = pickle.loads(ct)
return eval(ct)
else:
aesgcm = AESGCM(self.UNIVERSAL_AES_KEY)
ct = aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD)
ct = pickle.loads(ct)
return eval(ct)
except TypeError:
sys.exit()
except ValueError:
print("sender has not done the handshake")
class MAIN(IPNC):
def __init__(self,secure : bool = True,file = None):
"""async_server initializer class that will create the a asyncronouse tcp server.
"""
IPNC.__init__(self)
self.__secure = secure
self.__file_location = file
self.READABLE = []
self.WRITABLE = []
self.INPUTS = []
self.OUTPUTS = []
self.MESSAGE_QUEUES = {}
self.REQUEST_LIST = []
self.REQUEST_RESPONSE_LIST = []
self.MESSAGE_LIST = []
self.__VARIFIED_DEVICES = []
self.__CLIENT_KEYS = {}
self.__CUSTOM_CHANNEL = []
self.__CUSTOM_CHANNEL_MSG_REC = []
self.__CUSTOM_CHANNEL_MSG_SEND = []
self.__VARIFIER_LIST = []
self.__CALLBACK_LOOP = []
self.__RECEIVING_MSG = []
get = self._get_node(file = self.__file_location,key = hashlib.sha256(bytes("key", "utf-8")).digest(), wait = False)
if get is not None:
self.__CLIENT_KEYS = get
self.__VARIFIED_DEVICES.extend(list(get.keys()))
def SERVER(self,address : str = None, port : int = None, listeners : int = None):
self.address = address
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 )
self.sock.setblocking(0)
self.sock.bind((self.address,self.port))
self.sock.listen(listeners)
print("[SERVER IS ACTIVATED | LISTENING]")
self.INPUTS.append(self.sock)
thread1 = threading.Thread(
target = self.receive_func,
args = (
self.__RECEIVING_MSG,
self.__VARIFIED_DEVICES,
self.__VARIFIER_LIST,
self.__CLIENT_KEYS,
self.OUTPUTS,
self.REQUEST_LIST,
self.REQUEST_RESPONSE_LIST,
self.MESSAGE_LIST,
self.__CUSTOM_CHANNEL_MSG_REC,
)
)
thread2 = threading.Thread(
target = self.send_func,
args = (
self.WRITABLE,
self.MESSAGE_QUEUES,
self.MESSAGE_LIST,
self.REQUEST_LIST,
self.REQUEST_RESPONSE_LIST,
self.__VARIFIER_LIST,
self.__CUSTOM_CHANNEL_MSG_SEND
)
)
thread3 = threading.Thread(
target = self.__callback_loop,
args = (
self.__CALLBACK_LOOP,
)
)
# thread1.daemon = True
thread1.start()
# thread2.daemon = True
thread2.start()
# thread3.daemon = True
thread3.start()
thread = threading.Thread(target = self.__server)
# thread.daemon = True
thread.start()
def __server(self):
data_recv_len = []
while True:
readable, writable, exceptions = select.select(self.INPUTS, self.OUTPUTS, self.INPUTS)
# handling the inputs
for r in readable:
if r is self.sock:
connection,addr = r.accept()
connection.setblocking(0)
self.INPUTS.append(connection)
self.MESSAGE_QUEUES[connection] = queue.Queue()
else:
ini = list(zip(*data_recv_len))
if len(ini) == 0 or r not in ini[0]:
try:
data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip("0").encode("utf-8")))
except ConnectionResetError:
print("Client Disconnected")
if r in self.OUTPUTS:
self.OUTPUTS.remove(r)
if r in self.WRITABLE:
self.WRITABLE.remove(r)
self.INPUTS.remove(r)
r.close()
del self.MESSAGE_QUEUES[r]
continue
except Exception as e:
pass
if data_len:
if type(data_len) == type([]):
data_recv_len.append(
[
r,
data_len[0]
]
)
else:
print("User Disconnected")
if r in self.OUTPUTS:
self.OUTPUTS.remove(r)
self.INPUTS.remove(r)
if r in self.WRITABLE:
self.WRITABLE.remove(r)
r.close()
del self.MESSAGE_QUEUES[r]
continue
else:
qwe = list(zip(*data_recv_len))
INDEX = qwe[0].index(r)
try:
recv_len = data_recv_len.pop(INDEX)[1]
data = r.recv(recv_len)
try:
data = data.decode().strip("0").encode("utf-8")
except:
print("Error in decoding")
self.__RECEIVING_MSG.append(data)
self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0])
if r not in self.OUTPUTS:
self.OUTPUTS.append(r)
except Exception as e:
print("User Disconnected")
readable.remove(r)
self.INPUTS.remove(r)
writable.remove(r)
self.OUTPUTS.remove(r)
if r in self.WRITABLE:
self.WRITABLE.remove(r)
del self.MESSAGE_QUEUES[r]
continue
# handling the outputs
for w in writable:
if w not in self.WRITABLE:
self.WRITABLE.append(w)
# handling the errors
for e in exceptions:
self.INPUTS.remove(e)
if e in self.OUTPUTS:
self.OUTPUTS.remove(e)
e.close()
del self.MESSAGE_QUEUES[e]
def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs, __request_lst, __request_res_lst, __message_lst, __custom_c_m_r):
# __receiving_msg = self.__RECEIVING_MSG,
# __varified_devices = self.__VARIFIED_DEVICES,
# __varifier_lst = self.__VARIFIER_LIST,
# __client_keys = self.__CLIENT_KEYS,
# __outputs = self.OUTPUTS,
# __request_lst = self.REQUEST_LIST
# __request_res_lst = self.REQUEST_RESPONSE_LIST
# __message_lst = self.MESSAGE_LIS
# __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC
while True:
try:
for INDEX,_data_ in enumerate(__receiving_msg):
data = pickle.loads(base64.b64decode(_data_))
# print(f"data[0] : {data[0]}")
# print(f"__varified_devices : {__varified_devices}")
if data[0] not in __varified_devices:
_recv_ = DSP()._convert_to_class(_data_, secure = False)
if _recv_.DSP_type == "username_secure":
resolved_data = eval(_recv_.msg)
aes_key = AESGCM.generate_key(256)
nonce = os.urandom(32)
aad = bytes(self._name_generator(),"utf-8")
qw = {
"aes_key" : aes_key,
"nonce" : nonce,
"aad" : aad,
}
pickle_qw = pickle.dumps(qw)
b64_aes_key_pack = base64.b64encode(pickle_qw)
key = load_ssh_public_key(
bytes(
resolved_data["data"],
"utf-8"
),
backend=default_backend()
)
ciphertext = key.encrypt(
b64_aes_key_pack,
padding.OAEP(
mgf = padding.MGF1(algorithm = hashes.SHA256()),
algorithm = hashes.SHA256(),
label = None
)
)
ciphertext = base64.b64encode(ciphertext)
prepare_data = {"key" : ciphertext}
dsp_data = DSP(
DSP_type="username_secure_response"
)._messanger(
MSG = prepare_data
)
dsp_data = [resolved_data["username"],dsp_data]
__varifier_lst.append(dsp_data)
__varified_devices.append(resolved_data["username"])
__client_keys[resolved_data["username"]] = b64_aes_key_pack
get = self._get_node(
file = self.__file_location,
key = hashlib.sha256(bytes("key","utf-8")).digest(),
wait = False
)
if get is not None:
get[resolved_data["username"]] = b64_aes_key_pack
self._add_node(
file = self.__file_location,
node = [
hashlib.sha256(bytes("key","utf-8")).digest(),
get
]
)
else:
self._add_node(
file = self.__file_location,
node = [
hashlib.sha256(bytes("key","utf-8")).digest(),
{
resolved_data["username"] : b64_aes_key_pack
}
]
)
__receiving_msg.pop(INDEX)
else:
aes_key_pack = __client_keys[data[0]]
_recv_ = DSP()._convert_to_class(
OBJECT = _data_,
secure = True,
secure_dict = aes_key_pack
)
if _recv_.DSP_type == "DSP_REQ":
try:
resolved_data = eval(_recv_.msg)
resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))
__request_lst.append(
[
resolved_data["target_name"],
_recv_.msg
]
)
__receiving_msg.remove(_data_)
except:
pass
elif _recv_.DSP_type == "DSP_REQ_RES":
try:
resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))
__request_res_lst.append(
[
resolved_data["target_name"],
_recv_.msg
]
)
__receiving_msg.remove(_data_)
except:
pass
elif _recv_.DSP_type == "DSP_MSG":
try:
resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))
__message_lst.append(
[
resolved_data['target_name'],
_recv_.msg
]
)
__receiving_msg.remove(_data_)
except:
pass
elif _recv_.DSP_type in self.__CUSTOM_CHANNEL:
try:
resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))
__custom_c_m_r.append(resolved_data)
__receiving_msg.remove(_data_)
except:
pass
except:
pass
def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend):
while True:
# print(f"Writable : {Writable}")
# time.sleep(2)
for s in Writable:
if s._closed == True and s.fileno() == -1:
Writable.remove(s)
# try:
try:
username = message_q[s].get_nowait()
message_q[s].put(username)
msg_lst = list(list(zip(*message_list)))
req_lst = list(list(zip(*requestList)))
req_res_lst = list(list(zip(*requestResList)))
vari_lst = list(list(zip(*varifierList)))
send_c_msg = list(zip(*customChannelMessageSend))
except KeyError:
pass
if len(msg_lst) > 0:
if username in msg_lst[0]:
INDEX = msg_lst[0].index(username)
aes_key_pack = self.__CLIENT_KEYS[username]
aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack))
dsp_data = DSP(
DSP_type = "DSP_MSG",
universalAesKey = aes_key_pack["aes_key"],
nonce = aes_key_pack["nonce"],
aad = aes_key_pack["aad"]
)._messanger(
MSG = f"{msg_lst[1][INDEX]}"
).decode().center(len(msg_lst[1][INDEX]) + 100, "|").encode("utf-8")
try:
s.send(bytes(f"{len(dsp_data)}".center(16,"|"),"utf-8"))
s.send(
dsp_data
)
message_list.pop(INDEX)
except OSError:
pass
if len(req_lst) > 0:
if username in req_lst[0]:
INDEX = req_lst[0].index(username)
try:
aes_key_pack = self.__CLIENT_KEYS[username]
except KeyError:
continue
aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack))
dsp_data = DSP(
DSP_type = "DSP_handshake_request",
universalAesKey = aes_key_pack["aes_key"],
nonce = aes_key_pack["nonce"],
aad = aes_key_pack["aad"]
)._messanger(
MSG = f"{req_lst[1][INDEX]}"
).decode().center(len(req_lst[1][INDEX]) + 100, "|").encode("utf-8")
s.send(bytes(f"{len(dsp_data)+100}".center(16,"|"),"utf-8"))
s.send(
dsp_data
)
requestList.pop(INDEX)
if len(req_res_lst) > 0:
if username in req_res_lst[0]:
INDEX = req_res_lst[0].index(username)
aes_key_pack = self.__CLIENT_KEYS[username]
aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack))
dsp_data = DSP(
DSP_type = "DSP_handshake_request_res",
universalAesKey = aes_key_pack["aes_key"],
nonce = aes_key_pack["nonce"],
aad = aes_key_pack["aad"]
)._messanger(
MSG = f"{req_res_lst[1][INDEX]}"
).decode().center(len(req_res_lst[1][INDEX]) + 100, "|").encode("utf-8")
s.send(bytes(f"{len(dsp_data)+100}".center(16,"|"),"utf-8"))
s.send(
dsp_data
)
requestResList.pop(INDEX)
if len(vari_lst) > 0:
if username in vari_lst[0]:
INDEX = vari_lst[0].index(username)
s.send(bytes(f"{len(vari_lst[1][INDEX])}".center(16,"|"),"utf-8"))
s.send(
vari_lst[1][INDEX]
)
varifierList.pop(INDEX)
if len(send_c_msg) > 0:
if username in send_c_msg[0]:
INDEX = send_c_msg[0].index(username)
s.send(bytes(f"{len(send_c_msg[1][INDEX])}".center(16,"|"),"utf-8"))
s.send(send_c_msg[1][INDEX])
customChannelMessageSend.pop(INDEX)
# except:
# pass
def CREATE_CHANNEL(self,channel_name = None, multiple : bool = False):
if multiple:
if type(channel_name) == type([]):
for channel in channel_name:
if channel not in self.__CUSTOM_CHANNEL:
self.__CUSTOM_CHANNEL.append(channel)
else:
print(f"Channel : {channel} already exists.")
else:
raise TypeError("When 'mutliple' is to True then channel_name should be a list of multiple channel names")
else:
if channel_name not in self.__CUSTOM_CHANNEL:
self.__CUSTOM_CHANNEL.append(channel_name)
def LISTEN(self,channel : str = None,function : object = None,args = None):
if channel is not None:
found = False
index = None
if channel in self.__CUSTOM_CHANNEL:
for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC):
if d["channel"] == channel:
found = True
index = i
break
if found:
if args is None:
p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index)
self.__CALLBACK_LOOP.append([function,[p_data]])
else:
p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index)
args = list(args)
args.insert(0,p_data)
self.__CALLBACK_LOOP.append([function,args])
else:
raise TypeError("'channel' should not be None")
def __callback_loop(self,__callback_loop):
while True:
for index,func in enumerate(__callback_loop):
__callback_loop.pop(index)
func[0](*func[1])
def SEND(self,channel_name,target_name,data):
if channel_name in self.__CUSTOM_CHANNEL:
key_pack = self.__CLIENT_KEYS[target_name]
key_pack = pickle.loads(base64.b64decode(key_pack))
dsp_data = DSP(
DSP_type = channel_name,
universalAesKey=key_pack["aes_key"],
nonce = key_pack["nonce"],
aad= key_pack["aad"]
)._messanger(
MSG = base64.b64encode(pickle.dumps(data))
)
self.__CUSTOM_CHANNEL_MSG_SEND.append(
[
target_name,
dsp_data
]
)
class server():
def __init__(self, file = None, debug : bool = False, MTCL : bool = True, MPCL : bool = False, safeMode : bool = True):
"""
This class allows user to create multi-client server.
args:
secure : bool = True -> this should set to the default value True,
file : str = None -> here user need to pass a yaml file which saves all the keys and configurations.
if not specified, will raise an TypeError
"""
if not file:
raise TypeError("asyncServer() missing 1 required positional argument: 'file'")
__parent = MAIN(file,debug,MTCL,MPCL,safeMode)
self.SERVER = __parent.SERVER
self.CREATE_CHANNEL = __parent.CREATE_CHANNEL
self.LISTEN = __parent.LISTEN
self.SEND = __parent.SEND
| [((2900, 2921), 'random.shuffle', 'random.shuffle', (['_all_'], {}), '(_all_)\n', (2914, 2921), False, 'import random\n'), ((4091, 4109), 'pickle.dumps', 'pickle.dumps', (['data'], {}), '(data)\n', (4103, 4109), False, 'import pickle\n'), ((4226, 4254), 'pickle.dumps', 'pickle.dumps', (['encrypted_data'], {}), '(encrypted_data)\n', (4238, 4254), False, 'import pickle\n'), ((4269, 4292), 'base64.b64encode', 'base64.b64encode', (['p_e_d'], {}), '(p_e_d)\n', (4285, 4292), False, 'import base64\n'), ((4422, 4452), 'cryptography.hazmat.primitives.ciphers.aead.AESGCM', 'AESGCM', (['self.UNIVERSAL_AES_KEY'], {}), '(self.UNIVERSAL_AES_KEY)\n', (4428, 4452), False, 'from cryptography.hazmat.primitives.ciphers.aead import AESGCM\n'), ((6930, 6979), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (6943, 6979), False, 'import socket\n'), ((7281, 7545), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.receive_func', 'args': '(self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.\n __CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.\n REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC)'}), '(target=self.receive_func, args=(self.__RECEIVING_MSG, self\n .__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.\n OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.\n MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC))\n', (7297, 7545), False, 'import threading\n'), ((7747, 7964), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.send_func', 'args': '(self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST,\n self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.\n __CUSTOM_CHANNEL_MSG_SEND)'}), '(target=self.send_func, args=(self.WRITABLE, self.\n MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.\n REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.\n __CUSTOM_CHANNEL_MSG_SEND))\n', (7763, 7964), False, 'import threading\n'), ((8133, 8208), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.__callback_loop', 'args': '(self.__CALLBACK_LOOP,)'}), '(target=self.__callback_loop, args=(self.__CALLBACK_LOOP,))\n', (8149, 8208), False, 'import threading\n'), ((8466, 8504), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.__server'}), '(target=self.__server)\n', (8482, 8504), False, 'import threading\n'), ((692, 712), 'yaml.full_load', 'yaml.full_load', (['file'], {}), '(file)\n', (706, 712), False, 'import yaml\n'), ((863, 889), 'yaml.dump', 'yaml.dump', (['dict_data', 'file'], {}), '(dict_data, file)\n', (872, 889), False, 'import yaml\n'), ((2945, 2972), 'random.sample', 'random.sample', (['_all_', '_len_'], {}), '(_all_, _len_)\n', (2958, 2972), False, 'import random\n'), ((4716, 4740), 'base64.b64decode', 'base64.b64decode', (['OBJECT'], {}), '(OBJECT)\n', (4732, 4740), False, 'import base64\n'), ((4762, 4782), 'pickle.loads', 'pickle.loads', (['OBJECT'], {}), '(OBJECT)\n', (4774, 4782), False, 'import pickle\n'), ((8687, 8740), 'select.select', 'select.select', (['self.INPUTS', 'self.OUTPUTS', 'self.INPUTS'], {}), '(self.INPUTS, self.OUTPUTS, self.INPUTS)\n', (8700, 8740), False, 'import select\n'), ((5133, 5163), 'cryptography.hazmat.primitives.ciphers.aead.AESGCM', 'AESGCM', (["secure_dict['aes_key']"], {}), "(secure_dict['aes_key'])\n", (5139, 5163), False, 'from cryptography.hazmat.primitives.ciphers.aead import AESGCM\n'), ((5296, 5312), 'pickle.loads', 'pickle.loads', (['ct'], {}), '(ct)\n', (5308, 5312), False, 'import pickle\n'), ((5389, 5419), 'cryptography.hazmat.primitives.ciphers.aead.AESGCM', 'AESGCM', (['self.UNIVERSAL_AES_KEY'], {}), '(self.UNIVERSAL_AES_KEY)\n', (5395, 5419), False, 'from cryptography.hazmat.primitives.ciphers.aead import AESGCM\n'), ((5511, 5527), 'pickle.loads', 'pickle.loads', (['ct'], {}), '(ct)\n', (5523, 5527), False, 'import pickle\n'), ((5599, 5609), 'sys.exit', 'sys.exit', ([], {}), '()\n', (5607, 5609), False, 'import sys\n'), ((26766, 26792), 'base64.b64decode', 'base64.b64decode', (['key_pack'], {}), '(key_pack)\n', (26782, 26792), False, 'import base64\n'), ((9042, 9055), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (9053, 9055), False, 'import queue\n'), ((5060, 5089), 'base64.b64decode', 'base64.b64decode', (['secure_dict'], {}), '(secure_dict)\n', (5076, 5089), False, 'import base64\n'), ((13243, 13267), 'base64.b64decode', 'base64.b64decode', (['_data_'], {}), '(_data_)\n', (13259, 13267), False, 'import base64\n'), ((27061, 27079), 'pickle.dumps', 'pickle.dumps', (['data'], {}), '(data)\n', (27073, 27079), False, 'import pickle\n'), ((13698, 13722), 'cryptography.hazmat.primitives.ciphers.aead.AESGCM.generate_key', 'AESGCM.generate_key', (['(256)'], {}), '(256)\n', (13717, 13722), False, 'from cryptography.hazmat.primitives.ciphers.aead import AESGCM\n'), ((13759, 13773), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (13769, 13773), False, 'import os\n'), ((14099, 14115), 'pickle.dumps', 'pickle.dumps', (['qw'], {}), '(qw)\n', (14111, 14115), False, 'import pickle\n'), ((14163, 14190), 'base64.b64encode', 'base64.b64encode', (['pickle_qw'], {}), '(pickle_qw)\n', (14179, 14190), False, 'import base64\n'), ((14966, 14994), 'base64.b64encode', 'base64.b64encode', (['ciphertext'], {}), '(ciphertext)\n', (14982, 14994), False, 'import base64\n'), ((20836, 20866), 'base64.b64decode', 'base64.b64decode', (['aes_key_pack'], {}), '(aes_key_pack)\n', (20852, 20866), False, 'import base64\n'), ((22076, 22106), 'base64.b64decode', 'base64.b64decode', (['aes_key_pack'], {}), '(aes_key_pack)\n', (22092, 22106), False, 'import base64\n'), ((23133, 23163), 'base64.b64decode', 'base64.b64decode', (['aes_key_pack'], {}), '(aes_key_pack)\n', (23149, 23163), False, 'import base64\n'), ((14463, 14480), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (14478, 14480), False, 'from cryptography.hazmat.backends import default_backend\n'), ((11390, 11412), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (11406, 11412), False, 'import base64\n'), ((14795, 14810), 'cryptography.hazmat.primitives.hashes.SHA256', 'hashes.SHA256', ([], {}), '()\n', (14808, 14810), False, 'from cryptography.hazmat.primitives import hashes\n'), ((14729, 14744), 'cryptography.hazmat.primitives.hashes.SHA256', 'hashes.SHA256', ([], {}), '()\n', (14742, 14744), False, 'from cryptography.hazmat.primitives import hashes\n')] |
zhoulhb/teleport | server/www/packages/packages-windows/x86/ldap3/utils/asn1.py | 54da194697898ef77537cfe7032d774555dc1335 | """
"""
# Created on 2015.08.19
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1 import __version__ as pyasn1_version
from pyasn1.codec.ber import decoder # for usage in other modules
from pyasn1.codec.ber.encoder import Encoder # for monkeypatching of boolean value
from ..core.results import RESULT_CODES
from ..utils.conv import to_unicode
from ..protocol.convert import referrals_to_list
CLASSES = {(False, False): 0, # Universal
(False, True): 1, # Application
(True, False): 2, # Context
(True, True): 3} # Private
# Monkeypatching of pyasn1 for encoding Boolean with the value 0xFF for TRUE
# THIS IS NOT PART OF THE FAST BER DECODER
if pyasn1_version == 'xxx0.2.3':
from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode
from pyasn1.type.univ import Boolean
from pyasn1.compat.octets import ints2octs
class BooleanCEREncoder(BooleanEncoder):
_true = ints2octs((255,))
tagMap[Boolean.tagSet] = BooleanCEREncoder()
else:
from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder
from pyasn1.type.univ import Boolean
from copy import deepcopy
class LDAPBooleanEncoder(AbstractItemEncoder):
supportIndefLenMode = False
if pyasn1_version <= '0.2.3':
from pyasn1.compat.octets import ints2octs
_true = ints2octs((255,))
_false = ints2octs((0,))
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value and self._true or self._false, 0
elif pyasn1_version <= '0.3.1':
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value and (255,) or (0,), False, False
elif pyasn1_version <= '0.3.4':
def encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False):
return value and (255,) or (0,), False, False
elif pyasn1_version <= '0.3.7':
def encodeValue(self, value, encodeFun, **options):
return value and (255,) or (0,), False, False
else:
def encodeValue(self, value, asn1Spec, encodeFun, **options):
return value and (255,) or (0,), False, False
customTagMap = deepcopy(tagMap)
customTypeMap = deepcopy(typeMap)
customTagMap[Boolean.tagSet] = LDAPBooleanEncoder()
customTypeMap[Boolean.typeId] = LDAPBooleanEncoder()
encode = Encoder(customTagMap, customTypeMap)
# end of monkey patching
# a fast BER decoder for LDAP responses only
def compute_ber_size(data):
"""
Compute size according to BER definite length rules
Returns size of value and value offset
"""
if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
return data[1], 2
else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
bytes_length = data[1] - 128
value_length = 0
cont = bytes_length
for byte in data[2: 2 + bytes_length]:
cont -= 1
value_length += byte * (256 ** cont)
return value_length, bytes_length + 2
def decode_message_fast(message):
ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start of sequence, at maximum 3 bytes for length
decoded = decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT)
return {
'messageID': decoded[0][3],
'protocolOp': decoded[1][2],
'payload': decoded[1][3],
'controls': decoded[2][3] if len(decoded) == 3 else None
}
def decode_sequence(message, start, stop, context_decoders=None):
decoded = []
while start < stop:
octet = get_byte(message[start])
ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))]
ber_constructed = bool(octet & 0b00100000)
ber_type = octet & 0b00011111
ber_decoder = DECODERS[(ber_class, octet & 0b00011111)] if ber_class < 2 else None
ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10]))
start += ber_value_offset
if ber_decoder:
value = ber_decoder(message, start, start + ber_len, context_decoders) # call value decode function
else:
# try:
value = context_decoders[ber_type](message, start, start + ber_len) # call value decode function for context class
# except KeyError:
# if ber_type == 3: # Referral in result
# value = decode_sequence(message, start, start + ber_len)
# else:
# raise # re-raise, should never happen
decoded.append((ber_class, ber_constructed, ber_type, value))
start += ber_len
return decoded
def decode_integer(message, start, stop, context_decoders=None):
first = message[start]
value = -1 if get_byte(first) & 0x80 else 0
for octet in message[start: stop]:
value = value << 8 | get_byte(octet)
return value
def decode_octet_string(message, start, stop, context_decoders=None):
return message[start: stop]
def decode_boolean(message, start, stop, context_decoders=None):
return False if message[start: stop] == 0 else True
def decode_bind_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT)
def decode_extended_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT)
def decode_intermediate_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT)
def decode_controls(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, CONTROLS_CONTEXT)
def ldap_result_to_dict_fast(response):
response_dict = dict()
response_dict['result'] = int(response[0][3]) # resultCode
response_dict['description'] = RESULT_CODES[response_dict['result']]
response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN
response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage
if len(response) == 4:
response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]]) # referrals
else:
response_dict['referrals'] = None
return response_dict
######
if str is not bytes: # Python 3
def get_byte(x):
return x
def get_bytes(x):
return x
else: # Python 2
def get_byte(x):
return ord(x)
def get_bytes(x):
return bytearray(x)
DECODERS = {
# Universal
(0, 1): decode_boolean, # Boolean
(0, 2): decode_integer, # Integer
(0, 4): decode_octet_string, # Octet String
(0, 10): decode_integer, # Enumerated
(0, 16): decode_sequence, # Sequence
(0, 17): decode_sequence, # Set
# Application
(1, 1): decode_bind_response, # Bind response
(1, 4): decode_sequence, # Search result entry
(1, 5): decode_sequence, # Search result done
(1, 7): decode_sequence, # Modify response
(1, 9): decode_sequence, # Add response
(1, 11): decode_sequence, # Delete response
(1, 13): decode_sequence, # ModifyDN response
(1, 15): decode_sequence, # Compare response
(1, 19): decode_sequence, # Search result reference
(1, 24): decode_extended_response, # Extended response
(1, 25): decode_intermediate_response, # intermediate response
(2, 3): decode_octet_string #
}
BIND_RESPONSE_CONTEXT = {
7: decode_octet_string # SaslCredentials
}
EXTENDED_RESPONSE_CONTEXT = {
10: decode_octet_string, # ResponseName
11: decode_octet_string # Response Value
}
INTERMEDIATE_RESPONSE_CONTEXT = {
0: decode_octet_string, # IntermediateResponseName
1: decode_octet_string # IntermediateResponseValue
}
LDAP_MESSAGE_CONTEXT = {
0: decode_controls, # Controls
3: decode_sequence # Referral
}
CONTROLS_CONTEXT = {
0: decode_sequence # Control
}
| [((3083, 3099), 'copy.deepcopy', 'deepcopy', (['tagMap'], {}), '(tagMap)\n', (3091, 3099), False, 'from copy import deepcopy\n'), ((3121, 3138), 'copy.deepcopy', 'deepcopy', (['typeMap'], {}), '(typeMap)\n', (3129, 3138), False, 'from copy import deepcopy\n'), ((3270, 3306), 'pyasn1.codec.ber.encoder.Encoder', 'Encoder', (['customTagMap', 'customTypeMap'], {}), '(customTagMap, customTypeMap)\n', (3277, 3306), False, 'from pyasn1.codec.ber.encoder import Encoder\n'), ((1727, 1744), 'pyasn1.compat.octets.ints2octs', 'ints2octs', (['(255,)'], {}), '((255,))\n', (1736, 1744), False, 'from pyasn1.compat.octets import ints2octs\n'), ((2163, 2180), 'pyasn1.compat.octets.ints2octs', 'ints2octs', (['(255,)'], {}), '((255,))\n', (2172, 2180), False, 'from pyasn1.compat.octets import ints2octs\n'), ((2203, 2218), 'pyasn1.compat.octets.ints2octs', 'ints2octs', (['(0,)'], {}), '((0,))\n', (2212, 2218), False, 'from pyasn1.compat.octets import ints2octs\n')] |
SimeonZhang/detectron2_tensorflow | lib/utils/arg_scope.py | ca03f633111d540ea91b3de75dbfa1da813647be | import copy
from contextlib import contextmanager
from functools import wraps
from collections import defaultdict
import tensorflow as tf
_ArgScopeStack = []
@contextmanager
def arg_scope(layers, **kwargs):
"""
Args:
layers (list or layer): layer or list of layers to apply the arguments.
Returns:
a context where all appearance of these layer will by default have the
arguments specified by kwargs.
Example:
.. code-block:: python
with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32):
x = Conv2D('conv0', x)
x = Conv2D('conv1', x)
x = Conv2D('conv2', x, out_channel=64) # override argscope
"""
if not isinstance(layers, list):
layers = [layers]
for l in layers:
assert hasattr(l, '__arg_scope_enabled__'), "Argscope not supported for {}".format(l)
# need to deepcopy so that changes to new_scope does not affect outer scope
new_scope = copy.deepcopy(get_arg_scope())
for l in layers:
new_scope[l.__name__].update(kwargs)
_ArgScopeStack.append(new_scope)
yield
del _ArgScopeStack[-1]
def get_arg_scope():
"""
Returns:
dict: the current argscope.
An argscope is a dict of dict: ``dict[layername] = {arg: val}``
"""
if len(_ArgScopeStack) > 0:
return _ArgScopeStack[-1]
else:
return defaultdict(dict)
def add_arg_scope(cls):
"""Decorator for function to support argscope
Example:
.. code-block:: python
from mylib import MyClass
myfunc = add_arg_scope(MyClass)
Args:
func: A function mapping one or multiple tensors to one or multiple
tensors.
Remarks:
If the function ``func`` returns multiple input or output tensors,
only the first input/output tensor shape is displayed during logging.
Returns:
The decorated function.
"""
original_init = cls.__init__
@wraps(original_init)
def wrapped_init(self, *args, **kwargs):
actual_args = copy.copy(get_arg_scope()[cls.__name__])
actual_args.update(kwargs)
instance = original_init(self, *args, **actual_args)
return instance
cls.__arg_scope_enabled__ = True
cls.__init__ = wrapped_init
return cls
| [((2018, 2038), 'functools.wraps', 'wraps', (['original_init'], {}), '(original_init)\n', (2023, 2038), False, 'from functools import wraps\n'), ((1427, 1444), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (1438, 1444), False, 'from collections import defaultdict\n')] |
AnkitDeshwal89/NETMIKO | CORN-TEST/textfsm_parse.py | 81c164e9cff46d11b56612f6adc343b6bcdfe87a | import textfsm
import subprocess
import random
res = subprocess.run('ifconfig',stdout=subprocess.PIPE)
intstatus = res.stdout.decode('ascii')
with open("datafile","w+") as a:
a.write(intstatus)
a.close()
template_file= "ifconfig-template.template"
template = open(template_file)
with open("datafile") as f:
raw_data = f.read()
re_table = textfsm.TextFSM(template)
data = re_table.ParseText(raw_data)
print(data)
NL = []
for x in data:
NLD = {
'Interface' : x[0].split(':')[0],
'TX' : int(x[1])+int(random.randint(1,100))
}
NL.append(NLD)
print(NL)
import json
print('#'*12)
print(json.dumps(NL))
#Enter template FileName :ifconfig-template.template
#Input Data file : ifconfig_output.txt
| [((55, 105), 'subprocess.run', 'subprocess.run', (['"""ifconfig"""'], {'stdout': 'subprocess.PIPE'}), "('ifconfig', stdout=subprocess.PIPE)\n", (69, 105), False, 'import subprocess\n'), ((359, 384), 'textfsm.TextFSM', 'textfsm.TextFSM', (['template'], {}), '(template)\n', (374, 384), False, 'import textfsm\n'), ((645, 659), 'json.dumps', 'json.dumps', (['NL'], {}), '(NL)\n', (655, 659), False, 'import json\n'), ((549, 571), 'random.randint', 'random.randint', (['(1)', '(100)'], {}), '(1, 100)\n', (563, 571), False, 'import random\n')] |
jared-jorgenson/mini_game | classes.py | ac73987ac4c32c0e9f521d7bcf8d4d9ee4ded85a | import pygame
class Player(pygame.sprite.Sprite):
death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'),
pygame.image.load('Images/death3.png'),
pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'),
pygame.image.load('Images/death6.png'),
pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'),
pygame.image.load('Images/death9.png'),
pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'),
pygame.image.load('Images/death12.png'),
pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'),
pygame.image.load('Images/death15.png'),
pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'),
pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'),
pygame.image.load('Images/death20.png')]
p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'),
pygame.image.load('Images/p1left2.png'),
pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'),
pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')]
p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'),
pygame.image.load('Images/p1left2shield.png'),
pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'),
pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')]
p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'),
pygame.image.load('Images/p1right2.png'),
pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'),
pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')]
p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'),
pygame.image.load('Images/p1right2shield.png'),
pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'),
pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')]
p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'),
pygame.image.load('Images/p1front2.png'),
pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'),
pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')]
p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'),
pygame.image.load('Images/p1front2shield.png'),
pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'),
pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')]
p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'),
pygame.image.load('Images/p1back2.png'),
pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'),
pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')]
p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'),
pygame.image.load('Images/p1back2shield.png'),
pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'),
pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')]
p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'),
pygame.image.load('Images/p2left2.png'),
pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'),
pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')]
p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'),
pygame.image.load('Images/p2right2.png'),
pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'),
pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')]
p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'),
pygame.image.load('Images/p2front2.png'),
pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'),
pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')]
p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'),
pygame.image.load('Images/p2back2.png'),
pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'),
pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')]
p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'),
pygame.image.load('Images/p2left2shield.png'),
pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'),
pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')]
p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'),
pygame.image.load('Images/p2right2shield.png'),
pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'),
pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')]
p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'),
pygame.image.load('Images/p2front2shield.png'),
pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'),
pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')]
p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'),
pygame.image.load('Images/p2back2shield.png'),
pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'),
pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')]
# Constructor function
def __init__(self, x, y, number):
super().__init__()
self.image = pygame.Surface([24, 28])
self.image.fill((0,0,0))
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.front = True
self.back = False
self.left = False
self.right = False
self.number = number
self.change_x = 0
self.change_y = 0
self.walkCount = 0
self.walls = None
self.alive = True
self.canmove = True
self.deathCount = 0
self.gotomenu=False
self.speed=3
self.superspeed=False
self.superspeedcount=0
self.shield=False
self.shieldcount=0
self.megabombs=False
self.megabombcount = 0
def changespeed(self, x, y):
self.change_x += x
self.change_y += y
if self.superspeed and self.change_x==0 and self.change_y==0:
self.speed=6
if self.superspeedcount>=150:
self.superspeed = False
self.speed=3
self.superspeedcount=0
def update(self):
if self.canmove:
self.rect.x += self.change_x
if self.change_x <0:
self.left=True
self.right=False
self.front=False
self.back=False
elif self.change_x >0:
self.left=False
self.right=True
self.front=False
self.back=False
block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)
for block in block_hit_list:
if self.change_x > 0:
self.rect.right = block.rect.left
else:
self.rect.left = block.rect.right
self.rect.y += self.change_y
if self.change_y <0:
self.left=False
self.right=False
self.front=False
self.back=True
elif self.change_y >0:
self.left=False
self.right=False
self.front=True
self.back=False
block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)
for block in block_hit_list:
if self.change_y > 0:
self.rect.bottom = block.rect.top
else:
self.rect.top = block.rect.bottom
def draw(self, screen):
if self.number == 1:
screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0))
if self.alive:
if self.front:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1frontshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1front.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.back:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1backshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1back.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.left:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1leftshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1left.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.right:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1rightshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1right.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.number == 2:
screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0))
if self.alive:
if self.front:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2frontshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2front.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.back:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2backshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2back.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.left:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2leftshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2left.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.right:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2rightshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2right.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
if self.alive == False and self.deathCount < 200:
screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y))
self.deathCount += 1
if self.deathCount >= 200:
self.rect.x = 1000
self.gotomenu=True
def reset(self,x,y):
self.gotomenu = False
self.alive = True
self.deathCount = 0
self.rect.x = x
self.rect.y = y
self.canmove = True
self.front = True
self.change_x=0
self.change_y=0
self.superspeed=False
self.speed=3
self.shield=False
self.megabombs=False
self.megabombcount=0
class Wall(pygame.sprite.Sprite):
def __init__(self, x, y, width, height):
super().__init__()
self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
class powerup(pygame.sprite.Sprite):
superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'),
pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'),
pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')]
shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'),
pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'),
pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')]
megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'),
pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'),
pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')]
def __init__(self, x, y, number):
super().__init__()
self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.number = number
self.spawntimer=0
self.respawntimer=0
self.exists=True
self.animationcount=0
def draw(self, screen):
if self.number==1:
if self.exists and self.spawntimer>50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
elif self.number==2:
if self.exists and self.spawntimer > 50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
else:
if self.exists and self.spawntimer > 50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
def reset(self):
self.spawntimer=0
self.respawntimer=0
self.exists=True
class bomb(pygame.sprite.Sprite):
def __init__(self, x, y, width, height, bomb_count, bomb_type):
super().__init__()
self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.width = width
self.height = height
self.bomb_count = bomb_count
self.bomb_type = bomb_type
self.walls = None
self.leftcheck = self.rect.x - 32
self.rightcheck = self.rect.x + self.width
self.upcheck = self.rect.y - 32
self.downcheck = self.rect.y + self.height
self.expleft = True
self.doubleexpleft = True
self.expright = True
self.doubleexpright = True
self.expup = True
self.doubleexpup = True
self.expdown = True
self.doubleexpdown = True
self.expboxlist = []
def draw(self, screen):
if self.bomb_count < 30:
if self.bomb_type==0:
screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 60:
if self.bomb_type == 0:
screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 90:
if self.bomb_type == 0:
screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 120:
if self.bomb_type==0:
for i in self.walls:
if i.rect.collidepoint(self.leftcheck,self.rect.y):
self.expleft = False
if i.rect.collidepoint(self.rightcheck,self.rect.y):
self.expright = False
if i.rect.collidepoint(self.rect.x,self.upcheck):
self.expup = False
if i.rect.collidepoint(self.rect.x,self.downcheck):
self.expdown = False
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))
if self.expleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32))
if self.expright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32))
if self.expup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32))
if self.expdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32))
elif self.bomb_type==1:
for i in self.walls:
if i.rect.collidepoint(self.leftcheck, self.rect.y):
self.expleft = False
if i.rect.collidepoint(self.leftcheck-32, self.rect.y):
self.doubleexpleft = False
if i.rect.collidepoint(self.rightcheck, self.rect.y):
self.expright = False
if i.rect.collidepoint(self.rightcheck+32, self.rect.y):
self.doubleexpright = False
if i.rect.collidepoint(self.rect.x, self.upcheck):
self.expup = False
if i.rect.collidepoint(self.rect.x, self.upcheck-32):
self.doubleexpup = False
if i.rect.collidepoint(self.rect.x, self.downcheck):
self.expdown = False
if i.rect.collidepoint(self.rect.x, self.downcheck+32):
self.doubleexpdown = False
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))
if self.expleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32))
if self.doubleexpleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32))
if self.expright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32))
if self.doubleexpright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32))
if self.expup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32))
if self.doubleexpup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32))
if self.expdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32))
if self.doubleexpdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+16, 32, 32)) | [((69, 107), 'pygame.image.load', 'pygame.image.load', (['"""Images/death1.png"""'], {}), "('Images/death1.png')\n", (86, 107), False, 'import pygame\n'), ((109, 147), 'pygame.image.load', 'pygame.image.load', (['"""Images/death2.png"""'], {}), "('Images/death2.png')\n", (126, 147), False, 'import pygame\n'), ((163, 201), 'pygame.image.load', 'pygame.image.load', (['"""Images/death3.png"""'], {}), "('Images/death3.png')\n", (180, 201), False, 'import pygame\n'), ((217, 255), 'pygame.image.load', 'pygame.image.load', (['"""Images/death4.png"""'], {}), "('Images/death4.png')\n", (234, 255), False, 'import pygame\n'), ((257, 295), 'pygame.image.load', 'pygame.image.load', (['"""Images/death5.png"""'], {}), "('Images/death5.png')\n", (274, 295), False, 'import pygame\n'), ((311, 349), 'pygame.image.load', 'pygame.image.load', (['"""Images/death6.png"""'], {}), "('Images/death6.png')\n", (328, 349), False, 'import pygame\n'), ((365, 403), 'pygame.image.load', 'pygame.image.load', (['"""Images/death7.png"""'], {}), "('Images/death7.png')\n", (382, 403), False, 'import pygame\n'), ((405, 443), 'pygame.image.load', 'pygame.image.load', (['"""Images/death8.png"""'], {}), "('Images/death8.png')\n", (422, 443), False, 'import pygame\n'), ((459, 497), 'pygame.image.load', 'pygame.image.load', (['"""Images/death9.png"""'], {}), "('Images/death9.png')\n", (476, 497), False, 'import pygame\n'), ((513, 552), 'pygame.image.load', 'pygame.image.load', (['"""Images/death10.png"""'], {}), "('Images/death10.png')\n", (530, 552), False, 'import pygame\n'), ((554, 593), 'pygame.image.load', 'pygame.image.load', (['"""Images/death11.png"""'], {}), "('Images/death11.png')\n", (571, 593), False, 'import pygame\n'), ((609, 648), 'pygame.image.load', 'pygame.image.load', (['"""Images/death12.png"""'], {}), "('Images/death12.png')\n", (626, 648), False, 'import pygame\n'), ((664, 703), 'pygame.image.load', 'pygame.image.load', (['"""Images/death13.png"""'], {}), "('Images/death13.png')\n", (681, 703), False, 'import pygame\n'), ((705, 744), 'pygame.image.load', 'pygame.image.load', (['"""Images/death14.png"""'], {}), "('Images/death14.png')\n", (722, 744), False, 'import pygame\n'), ((760, 799), 'pygame.image.load', 'pygame.image.load', (['"""Images/death15.png"""'], {}), "('Images/death15.png')\n", (777, 799), False, 'import pygame\n'), ((815, 854), 'pygame.image.load', 'pygame.image.load', (['"""Images/death16.png"""'], {}), "('Images/death16.png')\n", (832, 854), False, 'import pygame\n'), ((856, 895), 'pygame.image.load', 'pygame.image.load', (['"""Images/death17.png"""'], {}), "('Images/death17.png')\n", (873, 895), False, 'import pygame\n'), ((911, 950), 'pygame.image.load', 'pygame.image.load', (['"""Images/death18.png"""'], {}), "('Images/death18.png')\n", (928, 950), False, 'import pygame\n'), ((951, 990), 'pygame.image.load', 'pygame.image.load', (['"""Images/death19.png"""'], {}), "('Images/death19.png')\n", (968, 990), False, 'import pygame\n'), ((1006, 1045), 'pygame.image.load', 'pygame.image.load', (['"""Images/death20.png"""'], {}), "('Images/death20.png')\n", (1023, 1045), False, 'import pygame\n'), ((1066, 1104), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left.png"""'], {}), "('Images/p1left.png')\n", (1083, 1104), False, 'import pygame\n'), ((1106, 1145), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1.png"""'], {}), "('Images/p1left1.png')\n", (1123, 1145), False, 'import pygame\n'), ((1161, 1200), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2.png"""'], {}), "('Images/p1left2.png')\n", (1178, 1200), False, 'import pygame\n'), ((1216, 1255), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left3.png"""'], {}), "('Images/p1left3.png')\n", (1233, 1255), False, 'import pygame\n'), ((1257, 1296), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2.png"""'], {}), "('Images/p1left2.png')\n", (1274, 1296), False, 'import pygame\n'), ((1312, 1351), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1.png"""'], {}), "('Images/p1left1.png')\n", (1329, 1351), False, 'import pygame\n'), ((1353, 1391), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left.png"""'], {}), "('Images/p1left.png')\n", (1370, 1391), False, 'import pygame\n'), ((1418, 1462), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1leftshield.png"""'], {}), "('Images/p1leftshield.png')\n", (1435, 1462), False, 'import pygame\n'), ((1464, 1509), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1shield.png"""'], {}), "('Images/p1left1shield.png')\n", (1481, 1509), False, 'import pygame\n'), ((1530, 1575), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2shield.png"""'], {}), "('Images/p1left2shield.png')\n", (1547, 1575), False, 'import pygame\n'), ((1596, 1641), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left3shield.png"""'], {}), "('Images/p1left3shield.png')\n", (1613, 1641), False, 'import pygame\n'), ((1643, 1688), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2shield.png"""'], {}), "('Images/p1left2shield.png')\n", (1660, 1688), False, 'import pygame\n'), ((1709, 1754), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1shield.png"""'], {}), "('Images/p1left1shield.png')\n", (1726, 1754), False, 'import pygame\n'), ((1756, 1800), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1leftshield.png"""'], {}), "('Images/p1leftshield.png')\n", (1773, 1800), False, 'import pygame\n'), ((1822, 1861), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right.png"""'], {}), "('Images/p1right.png')\n", (1839, 1861), False, 'import pygame\n'), ((1863, 1903), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1.png"""'], {}), "('Images/p1right1.png')\n", (1880, 1903), False, 'import pygame\n'), ((1924, 1964), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2.png"""'], {}), "('Images/p1right2.png')\n", (1941, 1964), False, 'import pygame\n'), ((1985, 2025), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right3.png"""'], {}), "('Images/p1right3.png')\n", (2002, 2025), False, 'import pygame\n'), ((2027, 2067), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2.png"""'], {}), "('Images/p1right2.png')\n", (2044, 2067), False, 'import pygame\n'), ((2088, 2128), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1.png"""'], {}), "('Images/p1right1.png')\n", (2105, 2128), False, 'import pygame\n'), ((2130, 2169), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right.png"""'], {}), "('Images/p1right.png')\n", (2147, 2169), False, 'import pygame\n'), ((2197, 2242), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1rightshield.png"""'], {}), "('Images/p1rightshield.png')\n", (2214, 2242), False, 'import pygame\n'), ((2244, 2290), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1shield.png"""'], {}), "('Images/p1right1shield.png')\n", (2261, 2290), False, 'import pygame\n'), ((2312, 2358), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2shield.png"""'], {}), "('Images/p1right2shield.png')\n", (2329, 2358), False, 'import pygame\n'), ((2380, 2426), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right3shield.png"""'], {}), "('Images/p1right3shield.png')\n", (2397, 2426), False, 'import pygame\n'), ((2428, 2474), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2shield.png"""'], {}), "('Images/p1right2shield.png')\n", (2445, 2474), False, 'import pygame\n'), ((2496, 2542), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1shield.png"""'], {}), "('Images/p1right1shield.png')\n", (2513, 2542), False, 'import pygame\n'), ((2544, 2589), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1rightshield.png"""'], {}), "('Images/p1rightshield.png')\n", (2561, 2589), False, 'import pygame\n'), ((2611, 2650), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front.png"""'], {}), "('Images/p1front.png')\n", (2628, 2650), False, 'import pygame\n'), ((2652, 2692), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1.png"""'], {}), "('Images/p1front1.png')\n", (2669, 2692), False, 'import pygame\n'), ((2714, 2754), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2.png"""'], {}), "('Images/p1front2.png')\n", (2731, 2754), False, 'import pygame\n'), ((2776, 2816), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front3.png"""'], {}), "('Images/p1front3.png')\n", (2793, 2816), False, 'import pygame\n'), ((2818, 2858), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2.png"""'], {}), "('Images/p1front2.png')\n", (2835, 2858), False, 'import pygame\n'), ((2880, 2920), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1.png"""'], {}), "('Images/p1front1.png')\n", (2897, 2920), False, 'import pygame\n'), ((2922, 2961), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front.png"""'], {}), "('Images/p1front.png')\n", (2939, 2961), False, 'import pygame\n'), ((2989, 3034), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1frontshield.png"""'], {}), "('Images/p1frontshield.png')\n", (3006, 3034), False, 'import pygame\n'), ((3036, 3082), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1shield.png"""'], {}), "('Images/p1front1shield.png')\n", (3053, 3082), False, 'import pygame\n'), ((3104, 3150), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2shield.png"""'], {}), "('Images/p1front2shield.png')\n", (3121, 3150), False, 'import pygame\n'), ((3172, 3218), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front3shield.png"""'], {}), "('Images/p1front3shield.png')\n", (3189, 3218), False, 'import pygame\n'), ((3220, 3266), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2shield.png"""'], {}), "('Images/p1front2shield.png')\n", (3237, 3266), False, 'import pygame\n'), ((3288, 3334), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1shield.png"""'], {}), "('Images/p1front1shield.png')\n", (3305, 3334), False, 'import pygame\n'), ((3336, 3381), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1frontshield.png"""'], {}), "('Images/p1frontshield.png')\n", (3353, 3381), False, 'import pygame\n'), ((3402, 3440), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back.png"""'], {}), "('Images/p1back.png')\n", (3419, 3440), False, 'import pygame\n'), ((3442, 3481), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1.png"""'], {}), "('Images/p1back1.png')\n", (3459, 3481), False, 'import pygame\n'), ((3503, 3542), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2.png"""'], {}), "('Images/p1back2.png')\n", (3520, 3542), False, 'import pygame\n'), ((3564, 3603), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back3.png"""'], {}), "('Images/p1back3.png')\n", (3581, 3603), False, 'import pygame\n'), ((3605, 3644), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2.png"""'], {}), "('Images/p1back2.png')\n", (3622, 3644), False, 'import pygame\n'), ((3666, 3705), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1.png"""'], {}), "('Images/p1back1.png')\n", (3683, 3705), False, 'import pygame\n'), ((3707, 3745), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back.png"""'], {}), "('Images/p1back.png')\n", (3724, 3745), False, 'import pygame\n'), ((3772, 3816), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1backshield.png"""'], {}), "('Images/p1backshield.png')\n", (3789, 3816), False, 'import pygame\n'), ((3818, 3863), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1shield.png"""'], {}), "('Images/p1back1shield.png')\n", (3835, 3863), False, 'import pygame\n'), ((3884, 3929), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2shield.png"""'], {}), "('Images/p1back2shield.png')\n", (3901, 3929), False, 'import pygame\n'), ((3950, 3995), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back3shield.png"""'], {}), "('Images/p1back3shield.png')\n", (3967, 3995), False, 'import pygame\n'), ((3997, 4042), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2shield.png"""'], {}), "('Images/p1back2shield.png')\n", (4014, 4042), False, 'import pygame\n'), ((4063, 4108), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1shield.png"""'], {}), "('Images/p1back1shield.png')\n", (4080, 4108), False, 'import pygame\n'), ((4110, 4154), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1backshield.png"""'], {}), "('Images/p1backshield.png')\n", (4127, 4154), False, 'import pygame\n'), ((4175, 4213), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left.png"""'], {}), "('Images/p2left.png')\n", (4192, 4213), False, 'import pygame\n'), ((4215, 4254), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1.png"""'], {}), "('Images/p2left1.png')\n", (4232, 4254), False, 'import pygame\n'), ((4275, 4314), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2.png"""'], {}), "('Images/p2left2.png')\n", (4292, 4314), False, 'import pygame\n'), ((4335, 4374), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left3.png"""'], {}), "('Images/p2left3.png')\n", (4352, 4374), False, 'import pygame\n'), ((4376, 4415), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2.png"""'], {}), "('Images/p2left2.png')\n", (4393, 4415), False, 'import pygame\n'), ((4436, 4475), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1.png"""'], {}), "('Images/p2left1.png')\n", (4453, 4475), False, 'import pygame\n'), ((4477, 4515), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left.png"""'], {}), "('Images/p2left.png')\n", (4494, 4515), False, 'import pygame\n'), ((4537, 4576), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right.png"""'], {}), "('Images/p2right.png')\n", (4554, 4576), False, 'import pygame\n'), ((4578, 4618), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1.png"""'], {}), "('Images/p2right1.png')\n", (4595, 4618), False, 'import pygame\n'), ((4640, 4680), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2.png"""'], {}), "('Images/p2right2.png')\n", (4657, 4680), False, 'import pygame\n'), ((4702, 4742), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right3.png"""'], {}), "('Images/p2right3.png')\n", (4719, 4742), False, 'import pygame\n'), ((4744, 4784), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2.png"""'], {}), "('Images/p2right2.png')\n", (4761, 4784), False, 'import pygame\n'), ((4806, 4846), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1.png"""'], {}), "('Images/p2right1.png')\n", (4823, 4846), False, 'import pygame\n'), ((4848, 4887), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right.png"""'], {}), "('Images/p2right.png')\n", (4865, 4887), False, 'import pygame\n'), ((4909, 4948), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front.png"""'], {}), "('Images/p2front.png')\n", (4926, 4948), False, 'import pygame\n'), ((4950, 4990), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1.png"""'], {}), "('Images/p2front1.png')\n", (4967, 4990), False, 'import pygame\n'), ((5012, 5052), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2.png"""'], {}), "('Images/p2front2.png')\n", (5029, 5052), False, 'import pygame\n'), ((5074, 5114), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front3.png"""'], {}), "('Images/p2front3.png')\n", (5091, 5114), False, 'import pygame\n'), ((5116, 5156), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2.png"""'], {}), "('Images/p2front2.png')\n", (5133, 5156), False, 'import pygame\n'), ((5178, 5218), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1.png"""'], {}), "('Images/p2front1.png')\n", (5195, 5218), False, 'import pygame\n'), ((5220, 5259), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front.png"""'], {}), "('Images/p2front.png')\n", (5237, 5259), False, 'import pygame\n'), ((5280, 5318), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back.png"""'], {}), "('Images/p2back.png')\n", (5297, 5318), False, 'import pygame\n'), ((5320, 5359), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1.png"""'], {}), "('Images/p2back1.png')\n", (5337, 5359), False, 'import pygame\n'), ((5380, 5419), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2.png"""'], {}), "('Images/p2back2.png')\n", (5397, 5419), False, 'import pygame\n'), ((5440, 5479), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back3.png"""'], {}), "('Images/p2back3.png')\n", (5457, 5479), False, 'import pygame\n'), ((5481, 5520), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2.png"""'], {}), "('Images/p2back2.png')\n", (5498, 5520), False, 'import pygame\n'), ((5541, 5580), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1.png"""'], {}), "('Images/p2back1.png')\n", (5558, 5580), False, 'import pygame\n'), ((5582, 5620), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back.png"""'], {}), "('Images/p2back.png')\n", (5599, 5620), False, 'import pygame\n'), ((5647, 5691), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2leftshield.png"""'], {}), "('Images/p2leftshield.png')\n", (5664, 5691), False, 'import pygame\n'), ((5693, 5738), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1shield.png"""'], {}), "('Images/p2left1shield.png')\n", (5710, 5738), False, 'import pygame\n'), ((5759, 5804), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2shield.png"""'], {}), "('Images/p2left2shield.png')\n", (5776, 5804), False, 'import pygame\n'), ((5825, 5870), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left3shield.png"""'], {}), "('Images/p2left3shield.png')\n", (5842, 5870), False, 'import pygame\n'), ((5872, 5917), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2shield.png"""'], {}), "('Images/p2left2shield.png')\n", (5889, 5917), False, 'import pygame\n'), ((5938, 5983), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1shield.png"""'], {}), "('Images/p2left1shield.png')\n", (5955, 5983), False, 'import pygame\n'), ((5985, 6029), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2leftshield.png"""'], {}), "('Images/p2leftshield.png')\n", (6002, 6029), False, 'import pygame\n'), ((6057, 6102), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2rightshield.png"""'], {}), "('Images/p2rightshield.png')\n", (6074, 6102), False, 'import pygame\n'), ((6104, 6150), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1shield.png"""'], {}), "('Images/p2right1shield.png')\n", (6121, 6150), False, 'import pygame\n'), ((6172, 6218), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2shield.png"""'], {}), "('Images/p2right2shield.png')\n", (6189, 6218), False, 'import pygame\n'), ((6240, 6286), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right3shield.png"""'], {}), "('Images/p2right3shield.png')\n", (6257, 6286), False, 'import pygame\n'), ((6288, 6334), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2shield.png"""'], {}), "('Images/p2right2shield.png')\n", (6305, 6334), False, 'import pygame\n'), ((6356, 6402), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1shield.png"""'], {}), "('Images/p2right1shield.png')\n", (6373, 6402), False, 'import pygame\n'), ((6404, 6449), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2rightshield.png"""'], {}), "('Images/p2rightshield.png')\n", (6421, 6449), False, 'import pygame\n'), ((6477, 6522), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2frontshield.png"""'], {}), "('Images/p2frontshield.png')\n", (6494, 6522), False, 'import pygame\n'), ((6524, 6570), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1shield.png"""'], {}), "('Images/p2front1shield.png')\n", (6541, 6570), False, 'import pygame\n'), ((6592, 6638), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2shield.png"""'], {}), "('Images/p2front2shield.png')\n", (6609, 6638), False, 'import pygame\n'), ((6660, 6706), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front3shield.png"""'], {}), "('Images/p2front3shield.png')\n", (6677, 6706), False, 'import pygame\n'), ((6708, 6754), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2shield.png"""'], {}), "('Images/p2front2shield.png')\n", (6725, 6754), False, 'import pygame\n'), ((6776, 6822), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1shield.png"""'], {}), "('Images/p2front1shield.png')\n", (6793, 6822), False, 'import pygame\n'), ((6824, 6869), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2frontshield.png"""'], {}), "('Images/p2frontshield.png')\n", (6841, 6869), False, 'import pygame\n'), ((6896, 6940), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2backshield.png"""'], {}), "('Images/p2backshield.png')\n", (6913, 6940), False, 'import pygame\n'), ((6942, 6987), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1shield.png"""'], {}), "('Images/p2back1shield.png')\n", (6959, 6987), False, 'import pygame\n'), ((7008, 7053), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2shield.png"""'], {}), "('Images/p2back2shield.png')\n", (7025, 7053), False, 'import pygame\n'), ((7074, 7119), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back3shield.png"""'], {}), "('Images/p2back3shield.png')\n", (7091, 7119), False, 'import pygame\n'), ((7121, 7166), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2shield.png"""'], {}), "('Images/p2back2shield.png')\n", (7138, 7166), False, 'import pygame\n'), ((7187, 7232), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1shield.png"""'], {}), "('Images/p2back1shield.png')\n", (7204, 7232), False, 'import pygame\n'), ((7234, 7278), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2backshield.png"""'], {}), "('Images/p2backshield.png')\n", (7251, 7278), False, 'import pygame\n'), ((7397, 7421), 'pygame.Surface', 'pygame.Surface', (['[24, 28]'], {}), '([24, 28])\n', (7411, 7421), False, 'import pygame\n'), ((19857, 19909), 'pygame.Surface', 'pygame.Surface', (['[width, height]', 'pygame.SRCALPHA', '(32)'], {}), '([width, height], pygame.SRCALPHA, 32)\n', (19871, 19909), False, 'import pygame\n'), ((20112, 20155), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed1.png"""'], {}), "('Images/superspeed1.png')\n", (20129, 20155), False, 'import pygame\n'), ((20157, 20200), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed2.png"""'], {}), "('Images/superspeed2.png')\n", (20174, 20200), False, 'import pygame\n'), ((20216, 20259), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed3.png"""'], {}), "('Images/superspeed3.png')\n", (20233, 20259), False, 'import pygame\n'), ((20261, 20304), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed3.png"""'], {}), "('Images/superspeed3.png')\n", (20278, 20304), False, 'import pygame\n'), ((20320, 20363), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed2.png"""'], {}), "('Images/superspeed2.png')\n", (20337, 20363), False, 'import pygame\n'), ((20365, 20408), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed1.png"""'], {}), "('Images/superspeed1.png')\n", (20382, 20408), False, 'import pygame\n'), ((20434, 20473), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield1.png"""'], {}), "('Images/shield1.png')\n", (20451, 20473), False, 'import pygame\n'), ((20475, 20514), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield2.png"""'], {}), "('Images/shield2.png')\n", (20492, 20514), False, 'import pygame\n'), ((20544, 20583), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield3.png"""'], {}), "('Images/shield3.png')\n", (20561, 20583), False, 'import pygame\n'), ((20585, 20624), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield3.png"""'], {}), "('Images/shield3.png')\n", (20602, 20624), False, 'import pygame\n'), ((20654, 20693), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield2.png"""'], {}), "('Images/shield2.png')\n", (20671, 20693), False, 'import pygame\n'), ((20695, 20734), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield1.png"""'], {}), "('Images/shield1.png')\n", (20712, 20734), False, 'import pygame\n'), ((20762, 20808), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon1.png"""'], {}), "('Images2/megabombicon1.png')\n", (20779, 20808), False, 'import pygame\n'), ((20810, 20856), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon2.png"""'], {}), "('Images2/megabombicon2.png')\n", (20827, 20856), False, 'import pygame\n'), ((20882, 20928), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon3.png"""'], {}), "('Images2/megabombicon3.png')\n", (20899, 20928), False, 'import pygame\n'), ((20930, 20976), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon3.png"""'], {}), "('Images2/megabombicon3.png')\n", (20947, 20976), False, 'import pygame\n'), ((21002, 21048), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon2.png"""'], {}), "('Images2/megabombicon2.png')\n", (21019, 21048), False, 'import pygame\n'), ((21050, 21096), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon1.png"""'], {}), "('Images2/megabombicon1.png')\n", (21067, 21096), False, 'import pygame\n'), ((21187, 21232), 'pygame.Surface', 'pygame.Surface', (['[22, 28]', 'pygame.SRCALPHA', '(32)'], {}), '([22, 28], pygame.SRCALPHA, 32)\n', (21201, 21232), False, 'import pygame\n'), ((22768, 22813), 'pygame.Surface', 'pygame.Surface', (['[22, 28]', 'pygame.SRCALPHA', '(32)'], {}), '([22, 28], pygame.SRCALPHA, 32)\n', (22782, 22813), False, 'import pygame\n'), ((8927, 8979), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.walls', '(False)'], {}), '(self, self.walls, False)\n', (8954, 8979), False, 'import pygame\n'), ((9602, 9654), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.walls', '(False)'], {}), '(self, self.walls, False)\n', (9629, 9654), False, 'import pygame\n'), ((23713, 23750), 'pygame.image.load', 'pygame.image.load', (['"""Images/bomb3.png"""'], {}), "('Images/bomb3.png')\n", (23730, 23750), False, 'import pygame\n'), ((23828, 23870), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabomb3.png"""'], {}), "('Images2/megabomb3.png')\n", (23845, 23870), False, 'import pygame\n'), ((24002, 24039), 'pygame.image.load', 'pygame.image.load', (['"""Images/bomb2.png"""'], {}), "('Images/bomb2.png')\n", (24019, 24039), False, 'import pygame\n'), ((24117, 24159), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabomb2.png"""'], {}), "('Images2/megabomb2.png')\n", (24134, 24159), False, 'import pygame\n'), ((24291, 24328), 'pygame.image.load', 'pygame.image.load', (['"""Images/bomb1.png"""'], {}), "('Images/bomb1.png')\n", (24308, 24328), False, 'import pygame\n'), ((24406, 24448), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabomb1.png"""'], {}), "('Images2/megabomb1.png')\n", (24423, 24448), False, 'import pygame\n'), ((10220, 10265), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1frontshield.png"""'], {}), "('Images/p1frontshield.png')\n", (10237, 10265), False, 'import pygame\n'), ((10756, 10795), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front.png"""'], {}), "('Images/p1front.png')\n", (10773, 10795), False, 'import pygame\n'), ((25091, 25132), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25108, 25132), False, 'import pygame\n'), ((25202, 25247), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.rect.y', '(32)', '(32)'], {}), '(self.rect.x, self.rect.y, 32, 32)\n', (25213, 25247), False, 'import pygame\n'), ((11323, 11367), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1backshield.png"""'], {}), "('Images/p1backshield.png')\n", (11340, 11367), False, 'import pygame\n'), ((11857, 11895), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back.png"""'], {}), "('Images/p1back.png')\n", (11874, 11895), False, 'import pygame\n'), ((14790, 14835), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2frontshield.png"""'], {}), "('Images/p2frontshield.png')\n", (14807, 14835), False, 'import pygame\n'), ((15326, 15365), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front.png"""'], {}), "('Images/p2front.png')\n", (15343, 15365), False, 'import pygame\n'), ((25316, 25357), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25333, 25357), False, 'import pygame\n'), ((25434, 25482), 'pygame.Rect', 'pygame.Rect', (['self.leftcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck, self.rect.y, 32, 32)\n', (25445, 25482), False, 'import pygame\n'), ((25517, 25558), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25534, 25558), False, 'import pygame\n'), ((25638, 25691), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck + 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck + 16, self.rect.y, 32, 32)\n', (25649, 25691), False, 'import pygame\n'), ((25759, 25800), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25776, 25800), False, 'import pygame\n'), ((25878, 25927), 'pygame.Rect', 'pygame.Rect', (['self.rightcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck, self.rect.y, 32, 32)\n', (25889, 25927), False, 'import pygame\n'), ((25962, 26003), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25979, 26003), False, 'import pygame\n'), ((26084, 26138), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck - 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck - 16, self.rect.y, 32, 32)\n', (26095, 26138), False, 'import pygame\n'), ((26203, 26244), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26220, 26244), False, 'import pygame\n'), ((26319, 26365), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.upcheck', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck, 32, 32)\n', (26330, 26365), False, 'import pygame\n'), ((26400, 26441), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26417, 26441), False, 'import pygame\n'), ((26519, 26570), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck + 16)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck + 16, 32, 32)\n', (26530, 26570), False, 'import pygame\n'), ((26637, 26678), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26654, 26678), False, 'import pygame\n'), ((26755, 26803), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.downcheck', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck, 32, 32)\n', (26766, 26803), False, 'import pygame\n'), ((26838, 26879), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26855, 26879), False, 'import pygame\n'), ((26959, 27012), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck - 16)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck - 16, 32, 32)\n', (26970, 27012), False, 'import pygame\n'), ((28124, 28165), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28141, 28165), False, 'import pygame\n'), ((28235, 28280), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.rect.y', '(32)', '(32)'], {}), '(self.rect.x, self.rect.y, 32, 32)\n', (28246, 28280), False, 'import pygame\n'), ((12422, 12466), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1leftshield.png"""'], {}), "('Images/p1leftshield.png')\n", (12439, 12466), False, 'import pygame\n'), ((12956, 12994), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left.png"""'], {}), "('Images/p1left.png')\n", (12973, 12994), False, 'import pygame\n'), ((15893, 15937), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2backshield.png"""'], {}), "('Images/p2backshield.png')\n", (15910, 15937), False, 'import pygame\n'), ((16427, 16465), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back.png"""'], {}), "('Images/p2back.png')\n", (16444, 16465), False, 'import pygame\n'), ((28349, 28390), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28366, 28390), False, 'import pygame\n'), ((28467, 28515), 'pygame.Rect', 'pygame.Rect', (['self.leftcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck, self.rect.y, 32, 32)\n', (28478, 28515), False, 'import pygame\n'), ((28550, 28591), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28567, 28591), False, 'import pygame\n'), ((28673, 28726), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck + 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck + 16, self.rect.y, 32, 32)\n', (28684, 28726), False, 'import pygame\n'), ((29270, 29311), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29287, 29311), False, 'import pygame\n'), ((29389, 29438), 'pygame.Rect', 'pygame.Rect', (['self.rightcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck, self.rect.y, 32, 32)\n', (29400, 29438), False, 'import pygame\n'), ((29473, 29514), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29490, 29514), False, 'import pygame\n'), ((29597, 29651), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck - 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck - 16, self.rect.y, 32, 32)\n', (29608, 29651), False, 'import pygame\n'), ((30197, 30238), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30214, 30238), False, 'import pygame\n'), ((30313, 30359), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.upcheck', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck, 32, 32)\n', (30324, 30359), False, 'import pygame\n'), ((30394, 30435), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30411, 30435), False, 'import pygame\n'), ((30515, 30566), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck + 16)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck + 16, 32, 32)\n', (30526, 30566), False, 'import pygame\n'), ((31099, 31140), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31116, 31140), False, 'import pygame\n'), ((31217, 31265), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.downcheck', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck, 32, 32)\n', (31228, 31265), False, 'import pygame\n'), ((31300, 31341), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31317, 31341), False, 'import pygame\n'), ((31423, 31476), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck - 16)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck - 16, 32, 32)\n', (31434, 31476), False, 'import pygame\n'), ((13522, 13567), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1rightshield.png"""'], {}), "('Images/p1rightshield.png')\n", (13539, 13567), False, 'import pygame\n'), ((14058, 14097), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right.png"""'], {}), "('Images/p1right.png')\n", (14075, 14097), False, 'import pygame\n'), ((16992, 17036), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2leftshield.png"""'], {}), "('Images/p2leftshield.png')\n", (17009, 17036), False, 'import pygame\n'), ((17526, 17564), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left.png"""'], {}), "('Images/p2left.png')\n", (17543, 17564), False, 'import pygame\n'), ((28809, 28850), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28826, 28850), False, 'import pygame\n'), ((28934, 28987), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck - 32)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck - 32, self.rect.y, 32, 32)\n', (28945, 28987), False, 'import pygame\n'), ((29024, 29065), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29041, 29065), False, 'import pygame\n'), ((29149, 29202), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck - 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck - 16, self.rect.y, 32, 32)\n', (29160, 29202), False, 'import pygame\n'), ((29735, 29776), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29752, 29776), False, 'import pygame\n'), ((29861, 29915), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck + 32)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck + 32, self.rect.y, 32, 32)\n', (29872, 29915), False, 'import pygame\n'), ((29952, 29993), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29969, 29993), False, 'import pygame\n'), ((30078, 30132), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck + 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck + 16, self.rect.y, 32, 32)\n', (30089, 30132), False, 'import pygame\n'), ((30647, 30688), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30664, 30688), False, 'import pygame\n'), ((30770, 30821), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck - 32)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck - 32, 32, 32)\n', (30781, 30821), False, 'import pygame\n'), ((30858, 30899), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30875, 30899), False, 'import pygame\n'), ((30981, 31032), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck - 16)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck - 16, 32, 32)\n', (30992, 31032), False, 'import pygame\n'), ((31559, 31600), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31576, 31600), False, 'import pygame\n'), ((31684, 31737), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck + 32)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck + 32, 32, 32)\n', (31695, 31737), False, 'import pygame\n'), ((31774, 31815), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31791, 31815), False, 'import pygame\n'), ((31899, 31952), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck + 16)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck + 16, 32, 32)\n', (31910, 31952), False, 'import pygame\n'), ((18092, 18137), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2rightshield.png"""'], {}), "('Images/p2rightshield.png')\n", (18109, 18137), False, 'import pygame\n'), ((18628, 18667), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right.png"""'], {}), "('Images/p2right.png')\n", (18645, 18667), False, 'import pygame\n')] |
NSLS-II/installSynApps | installSynApps/data_model/install_config.py | 0f8e978939715bbba1a064ead3044fa36215cb09 | """A file containing representations of install configurations.
The core Data representation for installSynApps. An InstallConfiguration object
is parsed from a configuration, and is then used throughout the build process.
InjectorFile objects are used for representing text that need to be injected
into configuration files prior to builds.
"""
import os
import installSynApps
from installSynApps.data_model.install_module import InstallModule as IM
class InstallConfiguration:
"""
Class that represents an Install Configuration for installSynApps
It stores the top level install_location, the path to the configuration files,
any OS specific configurations, and the actual list of modules that will be
installed.
Attributes
----------
install_location : str
path to top level install location
path_to_configure : str
path to configure folder of installSynApps
modules : List of InsallModule
list of InstallModule objects representing the modules that will be installed
base_path : str
abs path to install location of EPICS base
support_path : str
abs path to install location of EPICS support modules
ad_path : str
abs path to install location of EPICS area detector
motor_path : str
abs path to install location of EPICS motor
module_map : dict of str -> int
Dictionary storing relation of module names to build index
injector_files : list of InjectorFile
list of injector files loaded by install configuration
build_flags : list of list of str
list of macro-value pairs enforced at build time
"""
def __init__(self, install_location, path_to_configure):
"""Constructor for the InstallConfiguration object
"""
# Paths to configure and output locations
self.path_to_configure = path_to_configure
self.install_location = os.path.abspath(install_location)
# Modules loaded into install config
self.modules = []
# Dict that maps module name to index in module list for easier searching.
self.module_map = {}
self.injector_files = []
self.build_flags = []
# Paths to the three install location paths used for relative path correction
self.base_path = None
self.support_path = None
self.ad_path = None
self.motor_path = None
self.extensions_path = None
def is_install_valid(self):
"""Function that checks if given install location is valid
Parameters
----------
self : InstallConfiguration
Self object
Returns
-------
bool
True if install location is valid, false otherwise
str
Error message if applicable, None otherwise
"""
valid = True
message = None
target = self.install_location
if not os.path.exists(target):
target = os.path.dirname(self.install_location)
if not os.path.exists(target):
valid = False
message = 'Install location and parent directory do not exist'
elif not os.access(target, os.W_OK | os.X_OK):
valid = False
message = 'Permission Error: {}'.format(target)
return valid, message
def add_module(self, module):
"""Function that adds a module to the InstallConfiguration module list
First checks if parameter is a valid InstallModule, then sets the config, and abs path,
then if it is one of the three key modules to track, sets the appropriate variables. Also,
add the module to the map of modules which will keep track of which position each module is
in in the list/build order
Parameters
----------
module : InstallModule
new installation module being added.
"""
if isinstance(module, IM):
# Updates the abs path
module.abs_path = self.convert_path_abs(module.rel_path)
# Key paths to track
if module.name == "EPICS_BASE":
self.base_path = module.abs_path
elif module.name == "SUPPORT":
self.support_path = module.abs_path
elif module.name == "AREA_DETECTOR":
self.ad_path = module.abs_path
elif module.name == "MOTOR":
self.motor_path = module.abs_path
elif module.name == "EXTENSIONS":
self.extensions_path = module.abs_path
self.module_map[module.name] = len(self.modules)
self.modules.append(module)
def add_injector_file(self, name, contents, target):
"""Function that adds a new injector file to the install_config object
Parameters
----------
name : str
name of the file
contents : str
The contents of the file
target : str
The target location file into which contents will be injected.
"""
new_injector = InjectorFile(self.path_to_configure, name, contents, target)
self.injector_files.append(new_injector)
def add_macros(self, macro_list):
"""Function that adds macro-value pairs to a list of macros
Parameters
----------
macro_list : list of [str, str]
list of new macros to append
"""
self.build_flags = self.build_flags + macro_list
def get_module_list(self):
"""Function that gets the list of modules in the configuration
Returns
-------
List
self.modules - list of modules to install in this install configuration
"""
return self.modules
def get_module_by_name(self, name):
"""Function that returns install module object given module name
Uses module name as a key in a dictionary to return reference to given module object.
Parameters
----------
name : str
Module name
Returns
-------
obj - InstallModule
Return matching module, or None if not found.
"""
if name in self.module_map.keys():
return self.modules[self.module_map[name]]
else:
return None
def get_module_build_index(self, name):
"""Function that returns the index in the build order for the module
Used for ensuring dependencies are built before lower level packages.
Parameters
----------
name : str
Module name
Returns
-------
int
Index of module in build order if found, otherwise -1
"""
if name in self.module_map.keys():
return self.module_map[name]
else:
return -1
def get_core_version(self):
"""Funciton that returns selected version of ADCore
"""
return self.get_module_by_name('ADCORE').version
def swap_module_positions(self, module_A, module_B):
"""Swaps build order of modules
Used to ensure dependencies are built before lower level packages
Parameters
----------
module_A : str
Name of first module
module_B : str
Name of second module
"""
index_A = self.get_module_build_index(module_A)
index_B = self.get_module_build_index(module_B)
if index_A >= 0 and index_B >= 0:
temp_A = self.get_module_by_name(module_B)
temp_B = self.get_module_by_name(module_A)
self.modules[index_A] = temp_A
self.modules[index_B] = temp_B
self.module_map[module_A] = index_B
self.module_map[module_B] = index_A
def convert_path_abs(self, rel_path):
"""Function that converts a given modules relative path to an absolute path
If the macro name can be found in the list of accounted for modules, replace it with that module's absolute path
Parameters
----------
rel_path : str
The relative installation path for the given module
Returns
-------
str
The absolute installation path for the module. (Macros are replaced)
"""
temp = rel_path.split('/', 1)[-1]
if "$(INSTALL)" in rel_path and self.install_location != None:
return installSynApps.join_path(self.install_location, temp)
elif "$(EPICS_BASE)" in rel_path and self.base_path != None:
return installSynApps.join_path(self.base_path, temp)
elif "$(SUPPORT)" in rel_path and self.support_path != None:
return installSynApps.join_path(self.support_path, temp)
elif "$(AREA_DETECTOR)" in rel_path and self.ad_path != None:
return installSynApps.join_path(self.ad_path, temp)
elif "$(MOTOR)" in rel_path and self.motor_path != None:
return installSynApps.join_path(self.motor_path, temp)
elif "$(EXTENSIONS)" in rel_path and self.extensions_path != None:
return installSynApps.join_path(self.extensions_path, temp)
elif "$(" in rel_path:
macro_part = rel_path.split(')')[0]
rel_to = macro_part.split('(')[1]
rel_to_module = self.get_module_by_name(rel_to)
if rel_to_module is not None:
return installSynApps.join_path(rel_to_module.abs_path, temp)
return rel_path
def print_installation_info(self, fp = None):
"""Function that prints installation info
Prints list of all modules including clone/build/package information
Parameters
----------
fp = None : file pointer
Optional pointer to an external log file
"""
if fp == None:
print(self.get_printable_string().strip())
else:
fp.write(self.get_printable_string())
def get_printable_string(self):
"""Function that gets a toString for an InstallConfigurations
Returns
-------
str
A string representing the install configuration
"""
out = "--------------------------------\n"
out = out + "Install Location = {}\n".format(self.install_location)
out = out + "This Install Config is saved at {}\n".format(self.path_to_configure)
for module in self.modules:
if module.clone == 'YES':
out = out + module.get_printable_string()
return out
def get_module_names_list(self):
"""Function that gets list of modules being built
Returns
-------
list of str
list of module names that are set to build
"""
out = []
for module in self.modules:
if module.build == 'YES':
out.append(module.name)
return out
class InjectorFile:
"""Class that represents an injector file and stores its name, contents, and target
Injector file classes are used to represent data that needs to be appended to target files
at build time. Used to add to commonPlugins, commonPlugin_settings, etc.
TODO: This class can probably be abstracted into a simpler data structure (since its used as a struct anyway)
Attributes
----------
path_to_configure : str
path to the configure dir that houses this injector file
name : str
name of the file
contents : str
The contents of the file
target : str
The target location file into which contents will be injected.
"""
def __init__(self, path_to_configure, name, contents, target):
"""Constructor of InjectorFile class
"""
self.path_to_configure = path_to_configure
self.name = name
self.contents = contents
self.target = target
def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True):
config = InstallConfiguration(target_install_loc, None)
y = 'YES'
n = 'NO'
gu = 'GIT_URL'
wu = 'WGET_URL'
base_org = 'https://github.com/epics-base/'
syn_org = 'https://github.com/EPICS-synApps/'
mod_org = 'https://github.com/epics-modules/'
ad_org = 'https://github.com/areaDetector/'
seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/'
psi_org = 'https://github.com/paulscherrerinstitute/'
# Add core modules that will generally always be built
config.add_module(IM("EPICS_BASE", "R7.0.3", "$(INSTALL)/base", gu, base_org, "epics-base", y, y, y))
config.add_module(IM("SUPPORT", "R6-1", "$(INSTALL)/support", gu, syn_org, "support", y, y, n))
config.add_module(IM("CONFIGURE", "R6-1", "$(SUPPORT)/configure", gu, syn_org, "configure", y, y, n))
config.add_module(IM("UTILS", "R6-1", "$(SUPPORT)/utils", gu, syn_org, "utils", y, y, n))
config.add_module(IM("SNCSEQ", "2.2.8", "$(SUPPORT)/seq", wu, seq_rel, "seq-2.2.8.tar.gz", y, y, y))
config.add_module(IM("IPAC", "2.15", "$(SUPPORT)/ipac", gu, mod_org, "ipac", y, y, y))
config.add_module(IM("ASYN", "R4-37", "$(SUPPORT)/asyn", gu, mod_org, "asyn", y, y, y))
config.add_module(IM("AUTOSAVE", "R5-10", "$(SUPPORT)/autosave", gu, mod_org, "autosave", y, y, y))
config.add_module(IM("BUSY", "R1-7-2", "$(SUPPORT)/busy", gu, mod_org, "busy", y, y, y))
config.add_module(IM("CALC", "R3-7-3", "$(SUPPORT)/calc", gu, mod_org, "calc", y, y, y))
config.add_module(IM("DEVIOCSTATS", "master", "$(SUPPORT)/iocStats", gu, mod_org, "iocStats", y, y, y))
config.add_module(IM("SSCAN", "R2-11-3", "$(SUPPORT)/sscan", gu, mod_org, "sscan", y, y, y))
config.add_module(IM("IPUNIDIG", "R2-11", "$(SUPPORT)/ipUnidig", gu, mod_org, "ipUnidig", y, y, y))
# Some modules that are commonly needed
config.add_module(IM("XSPRESS3", "master", "$(SUPPORT)/xspress3", gu, mod_org, "xspress3", y, y, y))
config.add_module(IM("MOTOR", "R7-1", "$(SUPPORT)/motor", gu, mod_org, "motor", y, y, y))
config.add_module(IM("QUADEM", "R9-3", "$(SUPPORT)/quadEM", gu, mod_org, "quadEM", y, y, y))
config.add_module(IM("STREAM", "2.8.10", "$(SUPPORT)/stream", gu, psi_org, "StreamDevice", y, y, y))
# AreaDetector and commonly used drivers
config.add_module(IM("AREA_DETECTOR", "R3-8", "$(SUPPORT)/areaDetector", gu, ad_org, "areaDetector", y, y, n))
config.add_module(IM("ADSUPPORT", "R1-9", "$(AREA_DETECTOR)/ADSupport", gu, ad_org, "ADSupport", y, y, y))
config.add_module(IM("ADCORE", "R3-8", "$(AREA_DETECTOR)/ADCore", gu, ad_org, "ADCore", y, y, y))
config.add_module(IM("ADPERKINELMER", "master", "$(AREA_DETECTOR)/ADPerkinElmer", gu, ad_org, "ADPerkinElmer", n, n, n))
config.add_module(IM("ADGENICAM", "master", "$(AREA_DETECTOR)/ADGenICam", gu, ad_org, "ADGenICam", n, n, n))
config.add_module(IM("ADANDOR3", "master", "$(AREA_DETECTOR)/ADAndor3", gu, ad_org, "ADAndor3", n, n, n))
config.add_module(IM("ADPROSILICA", "R2-5", "$(AREA_DETECTOR)/ADProsilica", gu, ad_org, "ADProsilica", n, n, n))
config.add_module(IM("ADSIMDETECTOR", "master", "$(AREA_DETECTOR)/ADSimDetector", gu, ad_org, "ADSimDetector", n, n, n))
config.add_module(IM("ADPILATUS", "R2-8", "$(AREA_DETECTOR)/ADPilatus", gu, ad_org, "ADPilatus", n, n, n))
config.add_module(IM("ADMERLIN", "master", "$(AREA_DETECTOR)/ADMerlin", gu, ad_org, "ADMerlin", n, n, n))
config.add_module(IM("ADARAVIS", "master", "$(AREA_DETECTOR)/ADAravis", gu, ad_org, "ADAravis", n, n, n))
config.add_module(IM("ADEIGER", "R2-6", "$(AREA_DETECTOR)/ADEiger", gu, ad_org, "ADEiger", n, n, n))
config.add_module(IM("ADVIMBA", "master", "$(AREA_DETECTOR)/ADVimba", gu, ad_org, "ADVimba", n, n, n))
config.add_module(IM("ADPOINTGREY", "master", "$(AREA_DETECTOR)/ADPointGrey", gu, ad_org, "ADPointGrey", n, n, n))
config.add_module(IM("ADANDOR", "R2-8", "$(AREA_DETECTOR)/ADAndor", gu, ad_org, "ADAndor", n, n, n))
config.add_module(IM("ADDEXELA", "R2-3", "$(AREA_DETECTOR)/ADDexela", gu, ad_org, "ADDexela", n, n, n))
config.add_module(IM("ADMYTHEN", "master", "$(AREA_DETECTOR)/ADMythen", gu, ad_org, "ADMythen", n, n, n))
config.add_module(IM("ADURL", "master", "$(AREA_DETECTOR)/ADURL", gu, ad_org, "ADURL", n, n, n))
common_plugins_str = 'dbLoadRecords("$(DEVIOCSTATS)/db/iocAdminSoft.db", "IOC=$(PREFIX)")\n'
autosave_str = 'file "sseqRecord_settings.req", P=$(P), S=AcquireSequence\n'
if with_pva:
autosave_str += 'file "NDPva_settings.req", P=$(P), R=Pva1:\n'
common_plugins_str += 'NDPvaConfigure("PVA1", $(QSIZE), 0, "$(PORT)", 0, $(PREFIX)Pva1:Image, 0, 0, 0)\n' \
'dbLoadRecords("NDPva.template", "P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)")\n' \
'# Must start PVA server if this is enabled\n' \
'startPVAServer\n' \
config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd')
config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req')
if update_versions:
installSynApps.sync_all_module_tags(config)
return config
| [((1937, 1970), 'os.path.abspath', 'os.path.abspath', (['install_location'], {}), '(install_location)\n', (1952, 1970), False, 'import os\n'), ((12720, 12806), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""EPICS_BASE"""', '"""R7.0.3"""', '"""$(INSTALL)/base"""', 'gu', 'base_org', '"""epics-base"""', 'y', 'y', 'y'], {}), "('EPICS_BASE', 'R7.0.3', '$(INSTALL)/base', gu, base_org, 'epics-base', y,\n y, y)\n", (12722, 12806), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((12840, 12916), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""SUPPORT"""', '"""R6-1"""', '"""$(INSTALL)/support"""', 'gu', 'syn_org', '"""support"""', 'y', 'y', 'n'], {}), "('SUPPORT', 'R6-1', '$(INSTALL)/support', gu, syn_org, 'support', y, y, n)\n", (12842, 12916), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((12958, 13044), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""CONFIGURE"""', '"""R6-1"""', '"""$(SUPPORT)/configure"""', 'gu', 'syn_org', '"""configure"""', 'y', 'y', 'n'], {}), "('CONFIGURE', 'R6-1', '$(SUPPORT)/configure', gu, syn_org, 'configure', y,\n y, n)\n", (12960, 13044), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13077, 13147), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""UTILS"""', '"""R6-1"""', '"""$(SUPPORT)/utils"""', 'gu', 'syn_org', '"""utils"""', 'y', 'y', 'n'], {}), "('UTILS', 'R6-1', '$(SUPPORT)/utils', gu, syn_org, 'utils', y, y, n)\n", (13079, 13147), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13195, 13280), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""SNCSEQ"""', '"""2.2.8"""', '"""$(SUPPORT)/seq"""', 'wu', 'seq_rel', '"""seq-2.2.8.tar.gz"""', 'y', 'y', 'y'], {}), "('SNCSEQ', '2.2.8', '$(SUPPORT)/seq', wu, seq_rel, 'seq-2.2.8.tar.gz', y,\n y, y)\n", (13197, 13280), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13321, 13388), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""IPAC"""', '"""2.15"""', '"""$(SUPPORT)/ipac"""', 'gu', 'mod_org', '"""ipac"""', 'y', 'y', 'y'], {}), "('IPAC', '2.15', '$(SUPPORT)/ipac', gu, mod_org, 'ipac', y, y, y)\n", (13323, 13388), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13439, 13507), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ASYN"""', '"""R4-37"""', '"""$(SUPPORT)/asyn"""', 'gu', 'mod_org', '"""asyn"""', 'y', 'y', 'y'], {}), "('ASYN', 'R4-37', '$(SUPPORT)/asyn', gu, mod_org, 'asyn', y, y, y)\n", (13441, 13507), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13557, 13642), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""AUTOSAVE"""', '"""R5-10"""', '"""$(SUPPORT)/autosave"""', 'gu', 'mod_org', '"""autosave"""', 'y', 'y', 'y'], {}), "('AUTOSAVE', 'R5-10', '$(SUPPORT)/autosave', gu, mod_org, 'autosave', y, y, y\n )\n", (13559, 13642), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13675, 13744), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""BUSY"""', '"""R1-7-2"""', '"""$(SUPPORT)/busy"""', 'gu', 'mod_org', '"""busy"""', 'y', 'y', 'y'], {}), "('BUSY', 'R1-7-2', '$(SUPPORT)/busy', gu, mod_org, 'busy', y, y, y)\n", (13677, 13744), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13793, 13862), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""CALC"""', '"""R3-7-3"""', '"""$(SUPPORT)/calc"""', 'gu', 'mod_org', '"""calc"""', 'y', 'y', 'y'], {}), "('CALC', 'R3-7-3', '$(SUPPORT)/calc', gu, mod_org, 'calc', y, y, y)\n", (13795, 13862), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13911, 13999), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""DEVIOCSTATS"""', '"""master"""', '"""$(SUPPORT)/iocStats"""', 'gu', 'mod_org', '"""iocStats"""', 'y', 'y', 'y'], {}), "('DEVIOCSTATS', 'master', '$(SUPPORT)/iocStats', gu, mod_org, 'iocStats',\n y, y, y)\n", (13913, 13999), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14029, 14102), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""SSCAN"""', '"""R2-11-3"""', '"""$(SUPPORT)/sscan"""', 'gu', 'mod_org', '"""sscan"""', 'y', 'y', 'y'], {}), "('SSCAN', 'R2-11-3', '$(SUPPORT)/sscan', gu, mod_org, 'sscan', y, y, y)\n", (14031, 14102), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14147, 14232), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""IPUNIDIG"""', '"""R2-11"""', '"""$(SUPPORT)/ipUnidig"""', 'gu', 'mod_org', '"""ipUnidig"""', 'y', 'y', 'y'], {}), "('IPUNIDIG', 'R2-11', '$(SUPPORT)/ipUnidig', gu, mod_org, 'ipUnidig', y, y, y\n )\n", (14149, 14232), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14310, 14395), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""XSPRESS3"""', '"""master"""', '"""$(SUPPORT)/xspress3"""', 'gu', 'mod_org', '"""xspress3"""', 'y', 'y', 'y'], {}), "('XSPRESS3', 'master', '$(SUPPORT)/xspress3', gu, mod_org, 'xspress3', y,\n y, y)\n", (14312, 14395), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14425, 14495), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""MOTOR"""', '"""R7-1"""', '"""$(SUPPORT)/motor"""', 'gu', 'mod_org', '"""motor"""', 'y', 'y', 'y'], {}), "('MOTOR', 'R7-1', '$(SUPPORT)/motor', gu, mod_org, 'motor', y, y, y)\n", (14427, 14495), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14537, 14610), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""QUADEM"""', '"""R9-3"""', '"""$(SUPPORT)/quadEM"""', 'gu', 'mod_org', '"""quadEM"""', 'y', 'y', 'y'], {}), "('QUADEM', 'R9-3', '$(SUPPORT)/quadEM', gu, mod_org, 'quadEM', y, y, y)\n", (14539, 14610), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14650, 14735), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""STREAM"""', '"""2.8.10"""', '"""$(SUPPORT)/stream"""', 'gu', 'psi_org', '"""StreamDevice"""', 'y', 'y', 'y'], {}), "('STREAM', '2.8.10', '$(SUPPORT)/stream', gu, psi_org, 'StreamDevice', y,\n y, y)\n", (14652, 14735), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14815, 14910), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""AREA_DETECTOR"""', '"""R3-8"""', '"""$(SUPPORT)/areaDetector"""', 'gu', 'ad_org', '"""areaDetector"""', 'y', 'y', 'n'], {}), "('AREA_DETECTOR', 'R3-8', '$(SUPPORT)/areaDetector', gu, ad_org,\n 'areaDetector', y, y, n)\n", (14817, 14910), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14945, 15036), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADSUPPORT"""', '"""R1-9"""', '"""$(AREA_DETECTOR)/ADSupport"""', 'gu', 'ad_org', '"""ADSupport"""', 'y', 'y', 'y'], {}), "('ADSUPPORT', 'R1-9', '$(AREA_DETECTOR)/ADSupport', gu, ad_org,\n 'ADSupport', y, y, y)\n", (14947, 15036), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15075, 15153), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADCORE"""', '"""R3-8"""', '"""$(AREA_DETECTOR)/ADCore"""', 'gu', 'ad_org', '"""ADCore"""', 'y', 'y', 'y'], {}), "('ADCORE', 'R3-8', '$(AREA_DETECTOR)/ADCore', gu, ad_org, 'ADCore', y, y, y)\n", (15077, 15153), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15205, 15310), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPERKINELMER"""', '"""master"""', '"""$(AREA_DETECTOR)/ADPerkinElmer"""', 'gu', 'ad_org', '"""ADPerkinElmer"""', 'n', 'n', 'n'], {}), "('ADPERKINELMER', 'master', '$(AREA_DETECTOR)/ADPerkinElmer', gu, ad_org,\n 'ADPerkinElmer', n, n, n)\n", (15207, 15310), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15336, 15429), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADGENICAM"""', '"""master"""', '"""$(AREA_DETECTOR)/ADGenICam"""', 'gu', 'ad_org', '"""ADGenICam"""', 'n', 'n', 'n'], {}), "('ADGENICAM', 'master', '$(AREA_DETECTOR)/ADGenICam', gu, ad_org,\n 'ADGenICam', n, n, n)\n", (15338, 15429), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15466, 15556), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADANDOR3"""', '"""master"""', '"""$(AREA_DETECTOR)/ADAndor3"""', 'gu', 'ad_org', '"""ADAndor3"""', 'n', 'n', 'n'], {}), "('ADANDOR3', 'master', '$(AREA_DETECTOR)/ADAndor3', gu, ad_org,\n 'ADAndor3', n, n, n)\n", (15468, 15556), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15596, 15693), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPROSILICA"""', '"""R2-5"""', '"""$(AREA_DETECTOR)/ADProsilica"""', 'gu', 'ad_org', '"""ADProsilica"""', 'n', 'n', 'n'], {}), "('ADPROSILICA', 'R2-5', '$(AREA_DETECTOR)/ADProsilica', gu, ad_org,\n 'ADProsilica', n, n, n)\n", (15598, 15693), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15726, 15831), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADSIMDETECTOR"""', '"""master"""', '"""$(AREA_DETECTOR)/ADSimDetector"""', 'gu', 'ad_org', '"""ADSimDetector"""', 'n', 'n', 'n'], {}), "('ADSIMDETECTOR', 'master', '$(AREA_DETECTOR)/ADSimDetector', gu, ad_org,\n 'ADSimDetector', n, n, n)\n", (15728, 15831), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15857, 15948), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPILATUS"""', '"""R2-8"""', '"""$(AREA_DETECTOR)/ADPilatus"""', 'gu', 'ad_org', '"""ADPilatus"""', 'n', 'n', 'n'], {}), "('ADPILATUS', 'R2-8', '$(AREA_DETECTOR)/ADPilatus', gu, ad_org,\n 'ADPilatus', n, n, n)\n", (15859, 15948), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15987, 16077), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADMERLIN"""', '"""master"""', '"""$(AREA_DETECTOR)/ADMerlin"""', 'gu', 'ad_org', '"""ADMerlin"""', 'n', 'n', 'n'], {}), "('ADMERLIN', 'master', '$(AREA_DETECTOR)/ADMerlin', gu, ad_org,\n 'ADMerlin', n, n, n)\n", (15989, 16077), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16117, 16207), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADARAVIS"""', '"""master"""', '"""$(AREA_DETECTOR)/ADAravis"""', 'gu', 'ad_org', '"""ADAravis"""', 'n', 'n', 'n'], {}), "('ADARAVIS', 'master', '$(AREA_DETECTOR)/ADAravis', gu, ad_org,\n 'ADAravis', n, n, n)\n", (16119, 16207), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16247, 16332), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADEIGER"""', '"""R2-6"""', '"""$(AREA_DETECTOR)/ADEiger"""', 'gu', 'ad_org', '"""ADEiger"""', 'n', 'n', 'n'], {}), "('ADEIGER', 'R2-6', '$(AREA_DETECTOR)/ADEiger', gu, ad_org, 'ADEiger', n,\n n, n)\n", (16249, 16332), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16377, 16464), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADVIMBA"""', '"""master"""', '"""$(AREA_DETECTOR)/ADVimba"""', 'gu', 'ad_org', '"""ADVimba"""', 'n', 'n', 'n'], {}), "('ADVIMBA', 'master', '$(AREA_DETECTOR)/ADVimba', gu, ad_org, 'ADVimba',\n n, n, n)\n", (16379, 16464), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16507, 16606), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPOINTGREY"""', '"""master"""', '"""$(AREA_DETECTOR)/ADPointGrey"""', 'gu', 'ad_org', '"""ADPointGrey"""', 'n', 'n', 'n'], {}), "('ADPOINTGREY', 'master', '$(AREA_DETECTOR)/ADPointGrey', gu, ad_org,\n 'ADPointGrey', n, n, n)\n", (16509, 16606), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16637, 16722), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADANDOR"""', '"""R2-8"""', '"""$(AREA_DETECTOR)/ADAndor"""', 'gu', 'ad_org', '"""ADAndor"""', 'n', 'n', 'n'], {}), "('ADANDOR', 'R2-8', '$(AREA_DETECTOR)/ADAndor', gu, ad_org, 'ADAndor', n,\n n, n)\n", (16639, 16722), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16767, 16855), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADDEXELA"""', '"""R2-3"""', '"""$(AREA_DETECTOR)/ADDexela"""', 'gu', 'ad_org', '"""ADDexela"""', 'n', 'n', 'n'], {}), "('ADDEXELA', 'R2-3', '$(AREA_DETECTOR)/ADDexela', gu, ad_org, 'ADDexela',\n n, n, n)\n", (16769, 16855), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16897, 16987), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADMYTHEN"""', '"""master"""', '"""$(AREA_DETECTOR)/ADMythen"""', 'gu', 'ad_org', '"""ADMythen"""', 'n', 'n', 'n'], {}), "('ADMYTHEN', 'master', '$(AREA_DETECTOR)/ADMythen', gu, ad_org,\n 'ADMythen', n, n, n)\n", (16899, 16987), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((17027, 17104), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADURL"""', '"""master"""', '"""$(AREA_DETECTOR)/ADURL"""', 'gu', 'ad_org', '"""ADURL"""', 'n', 'n', 'n'], {}), "('ADURL', 'master', '$(AREA_DETECTOR)/ADURL', gu, ad_org, 'ADURL', n, n, n)\n", (17029, 17104), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((18088, 18131), 'installSynApps.sync_all_module_tags', 'installSynApps.sync_all_module_tags', (['config'], {}), '(config)\n', (18123, 18131), False, 'import installSynApps\n'), ((2990, 3012), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (3004, 3012), False, 'import os\n'), ((3035, 3073), 'os.path.dirname', 'os.path.dirname', (['self.install_location'], {}), '(self.install_location)\n', (3050, 3073), False, 'import os\n'), ((3090, 3112), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (3104, 3112), False, 'import os\n'), ((8557, 8610), 'installSynApps.join_path', 'installSynApps.join_path', (['self.install_location', 'temp'], {}), '(self.install_location, temp)\n', (8581, 8610), False, 'import installSynApps\n'), ((3232, 3268), 'os.access', 'os.access', (['target', '(os.W_OK | os.X_OK)'], {}), '(target, os.W_OK | os.X_OK)\n', (3241, 3268), False, 'import os\n'), ((8699, 8745), 'installSynApps.join_path', 'installSynApps.join_path', (['self.base_path', 'temp'], {}), '(self.base_path, temp)\n', (8723, 8745), False, 'import installSynApps\n'), ((8834, 8883), 'installSynApps.join_path', 'installSynApps.join_path', (['self.support_path', 'temp'], {}), '(self.support_path, temp)\n', (8858, 8883), False, 'import installSynApps\n'), ((8973, 9017), 'installSynApps.join_path', 'installSynApps.join_path', (['self.ad_path', 'temp'], {}), '(self.ad_path, temp)\n', (8997, 9017), False, 'import installSynApps\n'), ((9102, 9149), 'installSynApps.join_path', 'installSynApps.join_path', (['self.motor_path', 'temp'], {}), '(self.motor_path, temp)\n', (9126, 9149), False, 'import installSynApps\n'), ((9244, 9296), 'installSynApps.join_path', 'installSynApps.join_path', (['self.extensions_path', 'temp'], {}), '(self.extensions_path, temp)\n', (9268, 9296), False, 'import installSynApps\n'), ((9547, 9601), 'installSynApps.join_path', 'installSynApps.join_path', (['rel_to_module.abs_path', 'temp'], {}), '(rel_to_module.abs_path, temp)\n', (9571, 9601), False, 'import installSynApps\n')] |
tarvitz/icu | apps/accounts/views.py | 9a7cdac9d26ea224539f68f678b90bf70084374d | # Create your views here.
# -*- coding: utf-8 -*-
from apps.core.helpers import render_to, ajax_response, get_object_or_None
from apps.core.decorators import lock, login_required_json
from apps.accounts.models import Invite
from apps.accounts.decorators import check_invite
from apps.accounts.forms import (
LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm
)
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.contrib import auth
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.db import transaction
from django.utils.translation import ugettext_lazy as _
@render_to('accounts/login.html')
def login(request):
form = LoginForm(request.POST or None)
if request.method == 'POST':
if form.is_valid():
user = form.cleaned_data['user']
auth.login(request, user)
return {'redirect': 'core:index'}
return {
'form': form
}
@render_to('index.html')
def logout(request):
auth.logout(request)
return {}
@render_to('accounts/profile.html')
def profile(request):
return {}
@login_required_json
@ajax_response
def generate_new_api_key(request):
if request.method == 'POST':
request.user.api_key.key = request.user.api_key.generate_key()
request.user.api_key.save()
key = request.user.api_key.key
return {'success': True, 'key': key}
return {'success': False}
@lock("REGISTER_ALLOWED")
@render_to('accounts/register.html')
def register(request):
form = AccountRegisterForm(request.POST or None)
if request.method == "POST":
if form.is_valid():
user = form.save(commit=False)
user.set_password(form.cleaned_data['password'])
user.save()
return {'redirect': 'core:index'}
return {
'form': form
}
@login_required
@render_to('accounts/invite.html')
def invite(request):
form = SendInviteForm(request.POST or None, request=request)
if request.method == 'POST':
if form.is_valid():
form.save(commit=False)
invite = form.instance
email = form.cleaned_data['email']
msg = settings.INVITE_MESSAGE % {
'user': request.user.username,
'link': "http://b3ban.blacklibrary.ru%s" % reverse('accounts:invite-register', args=(invite.sid, ))
}
#no mail send, no money :)
send_mail(
subject=unicode(_('You have been invited to b3ban service')),
message=unicode(msg),
from_email=settings.EMAIL_FROM,
recipient_list=[email]
)
invite.save()
return {'redirect': 'accounts:invite-success'}
return {
'form': form
}
#@check for possibility to register
@transaction.commit_on_success
@check_invite(sid='sid')
@render_to('accounts/invite_register.html')
def invite_register(request, sid):
invite = get_object_or_None(Invite, sid=sid)
if not invite:
return {'redirect': 'core:ufo'}
form = InviteRegisterForm(request.POST or None)
if request.method == 'POST':
if form.is_valid():
invite.is_verified = True
invite.save()
user = form.save(commit=False)
user.email = invite.email
user.set_password(form.cleaned_data['password'])
user.save()
return {'redirect': 'accounts:invite-register-success'}
return {'form': form, 'sid': sid}
| [((682, 714), 'apps.core.helpers.render_to', 'render_to', (['"""accounts/login.html"""'], {}), "('accounts/login.html')\n", (691, 714), False, 'from apps.core.helpers import render_to, ajax_response, get_object_or_None\n'), ((1011, 1034), 'apps.core.helpers.render_to', 'render_to', (['"""index.html"""'], {}), "('index.html')\n", (1020, 1034), False, 'from apps.core.helpers import render_to, ajax_response, get_object_or_None\n'), ((1098, 1132), 'apps.core.helpers.render_to', 'render_to', (['"""accounts/profile.html"""'], {}), "('accounts/profile.html')\n", (1107, 1132), False, 'from apps.core.helpers import render_to, ajax_response, get_object_or_None\n'), ((1499, 1523), 'apps.core.decorators.lock', 'lock', (['"""REGISTER_ALLOWED"""'], {}), "('REGISTER_ALLOWED')\n", (1503, 1523), False, 'from apps.core.decorators import lock, login_required_json\n'), ((1525, 1560), 'apps.core.helpers.render_to', 'render_to', (['"""accounts/register.html"""'], {}), "('accounts/register.html')\n", (1534, 1560), False, 'from apps.core.helpers import render_to, ajax_response, get_object_or_None\n'), ((1931, 1964), 'apps.core.helpers.render_to', 'render_to', (['"""accounts/invite.html"""'], {}), "('accounts/invite.html')\n", (1940, 1964), False, 'from apps.core.helpers import render_to, ajax_response, get_object_or_None\n'), ((2927, 2950), 'apps.accounts.decorators.check_invite', 'check_invite', ([], {'sid': '"""sid"""'}), "(sid='sid')\n", (2939, 2950), False, 'from apps.accounts.decorators import check_invite\n'), ((2952, 2994), 'apps.core.helpers.render_to', 'render_to', (['"""accounts/invite_register.html"""'], {}), "('accounts/invite_register.html')\n", (2961, 2994), False, 'from apps.core.helpers import render_to, ajax_response, get_object_or_None\n'), ((746, 777), 'apps.accounts.forms.LoginForm', 'LoginForm', (['(request.POST or None)'], {}), '(request.POST or None)\n', (755, 777), False, 'from apps.accounts.forms import LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm\n'), ((1060, 1080), 'django.contrib.auth.logout', 'auth.logout', (['request'], {}), '(request)\n', (1071, 1080), False, 'from django.contrib import auth\n'), ((1595, 1636), 'apps.accounts.forms.AccountRegisterForm', 'AccountRegisterForm', (['(request.POST or None)'], {}), '(request.POST or None)\n', (1614, 1636), False, 'from apps.accounts.forms import LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm\n'), ((1997, 2050), 'apps.accounts.forms.SendInviteForm', 'SendInviteForm', (['(request.POST or None)'], {'request': 'request'}), '(request.POST or None, request=request)\n', (2011, 2050), False, 'from apps.accounts.forms import LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm\n'), ((3043, 3078), 'apps.core.helpers.get_object_or_None', 'get_object_or_None', (['Invite'], {'sid': 'sid'}), '(Invite, sid=sid)\n', (3061, 3078), False, 'from apps.core.helpers import render_to, ajax_response, get_object_or_None\n'), ((3149, 3189), 'apps.accounts.forms.InviteRegisterForm', 'InviteRegisterForm', (['(request.POST or None)'], {}), '(request.POST or None)\n', (3167, 3189), False, 'from apps.accounts.forms import LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm\n'), ((896, 921), 'django.contrib.auth.login', 'auth.login', (['request', 'user'], {}), '(request, user)\n', (906, 921), False, 'from django.contrib import auth\n'), ((2382, 2437), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounts:invite-register"""'], {'args': '(invite.sid,)'}), "('accounts:invite-register', args=(invite.sid,))\n", (2389, 2437), False, 'from django.core.urlresolvers import reverse\n'), ((2547, 2590), 'django.utils.translation.ugettext_lazy', '_', (['"""You have been invited to b3ban service"""'], {}), "('You have been invited to b3ban service')\n", (2548, 2590), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
LaudateCorpus1/oci-ansible-collection | plugins/modules/oci_blockstorage_volume_backup_policy_facts.py | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | #!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_blockstorage_volume_backup_policy_facts
short_description: Fetches details about one or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure
- Lists all the volume backup policies available in the specified compartment.
- For more information about Oracle defined backup policies and user defined backup policies,
see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm).
- If I(policy_id) is specified, the details of a single VolumeBackupPolicy will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
policy_id:
description:
- The OCID of the volume backup policy.
- Required to get a specific volume_backup_policy.
type: str
aliases: ["id"]
compartment_id:
description:
- The OCID of the compartment.
If no compartment is specified, the Oracle defined backup policies are listed.
type: str
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ]
"""
EXAMPLES = """
- name: Get a specific volume_backup_policy
oci_blockstorage_volume_backup_policy_facts:
# required
policy_id: "ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx"
- name: List volume_backup_policies
oci_blockstorage_volume_backup_policy_facts:
# optional
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
"""
RETURN = """
volume_backup_policies:
description:
- List of VolumeBackupPolicy resources
returned: on success
type: complex
contains:
display_name:
description:
- A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
returned: on success
type: str
sample: display_name_example
id:
description:
- The OCID of the volume backup policy.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
schedules:
description:
- The collection of schedules that this policy will apply.
returned: on success
type: complex
contains:
backup_type:
description:
- The type of volume backup to create.
returned: on success
type: str
sample: FULL
offset_seconds:
description:
- The number of seconds that the volume backup start
time should be shifted from the default interval boundaries specified by
the period. The volume backup start time is the frequency start time plus the offset.
returned: on success
type: int
sample: 56
period:
description:
- The volume backup frequency.
returned: on success
type: str
sample: ONE_HOUR
offset_type:
description:
- Indicates how the offset is defined. If value is `STRUCTURED`,
then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month` fields are used
and `offsetSeconds` will be ignored in requests and users should ignore its
value from the responses.
- "`hourOfDay` is applicable for periods `ONE_DAY`,
`ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`."
- "`dayOfWeek` is applicable for period
`ONE_WEEK`."
- "`dayOfMonth` is applicable for periods `ONE_MONTH` and `ONE_YEAR`."
- "'month' is applicable for period 'ONE_YEAR'."
- They will be ignored in the requests for inapplicable periods.
- If value is `NUMERIC_SECONDS`, then `offsetSeconds`
will be used for both requests and responses and the structured fields will be
ignored in the requests and users should ignore their values from the responses.
- For clients using older versions of Apis and not sending `offsetType` in their
requests, the behaviour is just like `NUMERIC_SECONDS`.
returned: on success
type: str
sample: STRUCTURED
hour_of_day:
description:
- The hour of the day to schedule the volume backup.
returned: on success
type: int
sample: 56
day_of_week:
description:
- The day of the week to schedule the volume backup.
returned: on success
type: str
sample: MONDAY
day_of_month:
description:
- The day of the month to schedule the volume backup.
returned: on success
type: int
sample: 56
month:
description:
- The month of the year to schedule the volume backup.
returned: on success
type: str
sample: JANUARY
retention_seconds:
description:
- How long, in seconds, to keep the volume backups created by this schedule.
returned: on success
type: int
sample: 56
time_zone:
description:
- Specifies what time zone is the schedule in
returned: on success
type: str
sample: UTC
destination_region:
description:
- The paired destination region for copying scheduled backups to. Example `us-ashburn-1`.
See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired
regions.
returned: on success
type: str
sample: us-phoenix-1
time_created:
description:
- The date and time the volume backup policy was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
compartment_id:
description:
- The OCID of the compartment that contains the volume backup.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
sample: [{
"display_name": "display_name_example",
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"schedules": [{
"backup_type": "FULL",
"offset_seconds": 56,
"period": "ONE_HOUR",
"offset_type": "STRUCTURED",
"hour_of_day": 56,
"day_of_week": "MONDAY",
"day_of_month": 56,
"month": "JANUARY",
"retention_seconds": 56,
"time_zone": "UTC"
}],
"destination_region": "us-phoenix-1",
"time_created": "2013-10-20T19:20:30+01:00",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"freeform_tags": {'Department': 'Finance'}
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.core import BlockstorageClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"policy_id",
]
def get_required_params_for_list(self):
return []
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_volume_backup_policy,
policy_id=self.module.params.get("policy_id"),
)
def list_resources(self):
optional_list_method_params = [
"compartment_id",
"display_name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_volume_backup_policies, **optional_kwargs
)
VolumeBackupPolicyFactsHelperCustom = get_custom_class(
"VolumeBackupPolicyFactsHelperCustom"
)
class ResourceFactsHelper(
VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
policy_id=dict(aliases=["id"], type="str"),
compartment_id=dict(type="str"),
display_name=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="volume_backup_policy",
service_client_class=BlockstorageClient,
namespace="core",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(volume_backup_policies=result)
if __name__ == "__main__":
main()
| [((11059, 11114), 'ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils.get_custom_class', 'get_custom_class', (['"""VolumeBackupPolicyFactsHelperCustom"""'], {}), "('VolumeBackupPolicyFactsHelperCustom')\n", (11075, 11114), False, 'from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import OCIResourceFactsHelperBase, get_custom_class\n'), ((11268, 11306), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_common_arg_spec', 'oci_common_utils.get_common_arg_spec', ([], {}), '()\n', (11304, 11306), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils\n'), ((11519, 11559), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'module_args'}), '(argument_spec=module_args)\n', (11532, 11559), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((10901, 11000), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.list_all_resources', 'oci_common_utils.list_all_resources', (['self.client.list_volume_backup_policies'], {}), '(self.client.list_volume_backup_policies,\n **optional_kwargs)\n', (10936, 11000), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils\n')] |
mhozza/pi-control | pi_control/server_stats/apps.py | 0dce821b4702519fedc3950270ee0091ed484ef6 | from django.apps import AppConfig
class ServerStatsConfig(AppConfig):
name = "server_stats"
| [] |
jackvz/mezzanine-cartridge-api | testproject/testproject/settings.py | c956afa672fcf1035ab60cd5eb6589a06ccaafa0 |
from __future__ import absolute_import, unicode_literals
import os
from django import VERSION as DJANGO_VERSION
from django.utils.translation import ugettext_lazy as _
SECRET_KEY = '%29hnw7d-dy4n)!@1yi#ov#^@x0b=o*2o8^31oe!+(xw!!oc9a'
######################
# CARTRIDGE SETTINGS #
######################
# The following settings are already defined in cartridge.shop.defaults
# with default values, but are common enough to be put here, commented
# out, for conveniently overriding. Please consult the settings
# documentation for a full list of settings Cartridge implements:
# http://cartridge.jupo.org/configuration.html#default-settings
# Sequence of available credit card types for payment.
# SHOP_CARD_TYPES = ("Mastercard", "Visa", "Diners", "Amex")
# Setting to turn on featured images for shop categories. Defaults to False.
# SHOP_CATEGORY_USE_FEATURED_IMAGE = True
# If True, the checkout process is split into separate
# billing/shipping and payment steps.
# SHOP_CHECKOUT_STEPS_SPLIT = True
# If True, the checkout process has a final confirmation step before
# completion.
# SHOP_CHECKOUT_STEPS_CONFIRMATION = True
# Controls the formatting of monetary values accord to the locale
# module in the python standard library. If an empty string is
# used, will fall back to the system's locale.
SHOP_CURRENCY_LOCALE = "en_GB.UTF-8"
# Dotted package path and name of the function that
# is called on submit of the billing/shipping checkout step. This
# is where shipping calculation can be performed and set using the
# function ``cartridge.shop.utils.set_shipping``.
# SHOP_HANDLER_BILLING_SHIPPING = \
# "cartridge.shop.checkout.default_billship_handler"
# Dotted package path and name of the function that
# is called once an order is successful and all of the order
# object's data has been created. This is where any custom order
# processing should be implemented.
# SHOP_HANDLER_ORDER = "cartridge.shop.checkout.default_order_handler"
# Dotted package path and name of the function that
# is called on submit of the payment checkout step. This is where
# integration with a payment gateway should be implemented.
# SHOP_HANDLER_PAYMENT = "cartridge.shop.checkout.default_payment_handler"
# Sequence of value/name pairs for order statuses.
# SHOP_ORDER_STATUS_CHOICES = (
# (1, "Unprocessed"),
# (2, "Processed"),
# )
# Sequence of value/name pairs for types of product options,
# eg Size, Colour. NOTE: Increasing the number of these will
# require database migrations!
# SHOP_OPTION_TYPE_CHOICES = (
# (1, "Size"),
# (2, "Colour"),
# )
# Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES setting that
# control how the options should be ordered in the admin,
# eg for "Colour" then "Size" given the above:
# SHOP_OPTION_ADMIN_ORDER = (2, 1)
######################
# MEZZANINE SETTINGS #
######################
# The following settings are already defined with default values in
# the ``defaults.py`` module within each of Mezzanine's apps, but are
# common enough to be put here, commented out, for conveniently
# overriding. Please consult the settings documentation for a full list
# of settings Mezzanine implements:
# http://mezzanine.jupo.org/docs/configuration.html#default-settings
# Controls the ordering and grouping of the admin menu.
#
# ADMIN_MENU_ORDER = (
# ("Content", ("pages.Page", "blog.BlogPost",
# "generic.ThreadedComment", (_("Media Library"), "media-library"),)),
# (_("Shop"), ("shop.Product", "shop.ProductOption", "shop.DiscountCode",
# "shop.Sale", "shop.Order")),
# ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")),
# ("Users", ("auth.User", "auth.Group",)),
# )
# A three item sequence, each containing a sequence of template tags
# used to render the admin dashboard.
#
# DASHBOARD_TAGS = (
# ("blog_tags.quick_blog", "mezzanine_tags.app_list"),
# ("comment_tags.recent_comments",),
# ("mezzanine_tags.recent_actions",),
# )
# A sequence of templates used by the ``page_menu`` template tag. Each
# item in the sequence is a three item sequence, containing a unique ID
# for the template, a label for the template, and the template path.
# These templates are then available for selection when editing which
# menus a page should appear in. Note that if a menu template is used
# that doesn't appear in this setting, all pages will appear in it.
# PAGE_MENU_TEMPLATES = (
# (1, _("Top navigation bar"), "pages/menus/dropdown.html"),
# (2, _("Left-hand tree"), "pages/menus/tree.html"),
# (3, _("Footer"), "pages/menus/footer.html"),
# )
# A sequence of fields that will be injected into Mezzanine's (or any
# library's) models. Each item in the sequence is a four item sequence.
# The first two items are the dotted path to the model and its field
# name to be added, and the dotted path to the field class to use for
# the field. The third and fourth items are a sequence of positional
# args and a dictionary of keyword args, to use when creating the
# field instance. When specifying the field class, the path
# ``django.models.db.`` can be omitted for regular Django model fields.
#
# EXTRA_MODEL_FIELDS = (
# (
# # Dotted path to field.
# "mezzanine.blog.models.BlogPost.image",
# # Dotted path to field class.
# "somelib.fields.ImageField",
# # Positional args for field class.
# (_("Image"),),
# # Keyword args for field class.
# {"blank": True, "upload_to": "blog"},
# ),
# # Example of adding a field to *all* of Mezzanine's content types:
# (
# "mezzanine.pages.models.Page.another_field",
# "IntegerField", # 'django.db.models.' is implied if path is omitted.
# (_("Another name"),),
# {"blank": True, "default": 1},
# ),
# )
# Setting to turn on featured images for blog posts. Defaults to False.
#
# BLOG_USE_FEATURED_IMAGE = True
# If True, the django-modeltranslation will be added to the
# INSTALLED_APPS setting.
USE_MODELTRANSLATION = False
########################
# MAIN DJANGO SETTINGS #
########################
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en"
# Supported languages
LANGUAGES = (
('en', _('English')),
)
# A boolean that turns on/off debug mode. When set to ``True``, stack traces
# are displayed for error pages. Should always be set to ``False`` in
# production. Best set to ``True`` in local_settings.py
DEBUG = True
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# The numeric mode to set newly-uploaded files to. The value should be
# a mode you'd pass directly to os.chmod.
FILE_UPLOAD_PERMISSIONS = 0o644
#############
# DATABASES #
#############
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.dev',
}
}
#########
# PATHS #
#########
# Full filesystem path to the project.
PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_APP = os.path.basename(PROJECT_APP_PATH)
PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH)
# Every cache key will get prefixed with this value - here we set it to
# the name of the directory the project is in to try and use something
# project specific.
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = "/static/"
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = STATIC_URL + "media/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/"))
# Package/module name to import the root urlpatterns from for the project.
ROOT_URLCONF = "%s.urls" % PROJECT_APP
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [
os.path.join(PROJECT_ROOT, "templates")
],
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.static",
"django.template.context_processors.media",
"django.template.context_processors.request",
"django.template.context_processors.tz",
"mezzanine.conf.context_processors.settings",
"mezzanine.pages.context_processors.page",
],
"builtins": [
"mezzanine.template.loader_tags",
],
"loaders": [
"mezzanine.template.loaders.host_themes.Loader",
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
},
},
]
if DJANGO_VERSION < (1, 9):
del TEMPLATES[0]["OPTIONS"]["builtins"]
################
# APPLICATIONS #
################
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.pages",
"cartridge.shop",
"mezzanine.blog",
"mezzanine.forms",
"mezzanine.galleries",
"mezzanine.twitter",
# "mezzanine.accounts",
'corsheaders',
'rest_framework',
'rest_framework_api_key',
'drf_yasg',
# 'oauth2_provider',
# 'rest_framework.authtoken',
'mezzanine_cartridge_api',
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware',
# Uncomment if using internationalisation or localisation
# 'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
"cartridge.shop.middleware.ShopMiddleware",
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
'corsheaders.middleware.CorsMiddleware',
)
if DJANGO_VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
del MIDDLEWARE
# Store these package names here as they may change in the future since
# at the moment we are using custom forks of them.
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
#########################
# OPTIONAL APPLICATIONS #
#########################
# These will be added to ``INSTALLED_APPS``, only if available.
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
##################
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
# Instead of doing "from .local_settings import *", we use exec so that
# local_settings has full access to everything defined in this module.
# Also force into sys.modules so it's visible to Django's autoreload.
f = os.path.join(PROJECT_APP_PATH, "local_settings.py")
if os.path.exists(f):
import sys
import imp
module_name = "%s.local_settings" % PROJECT_APP
module = imp.new_module(module_name)
module.__file__ = f
sys.modules[module_name] = module
exec(open(f, "rb").read())
####################
# DYNAMIC SETTINGS #
####################
# set_dynamic_settings() will rewrite globals based on what has been
# defined so far, in order to provide some better defaults where
# applicable. We also allow this settings module to be imported
# without Mezzanine installed, as the case may be when using the
# fabfile, where setting the dynamic settings below isn't strictly
# required.
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())
| [((8031, 8065), 'os.path.basename', 'os.path.basename', (['PROJECT_APP_PATH'], {}), '(PROJECT_APP_PATH)\n', (8047, 8065), False, 'import os\n'), ((8092, 8125), 'os.path.dirname', 'os.path.dirname', (['PROJECT_APP_PATH'], {}), '(PROJECT_APP_PATH)\n', (8107, 8125), False, 'import os\n'), ((13603, 13654), 'os.path.join', 'os.path.join', (['PROJECT_APP_PATH', '"""local_settings.py"""'], {}), "(PROJECT_APP_PATH, 'local_settings.py')\n", (13615, 13654), False, 'import os\n'), ((13658, 13675), 'os.path.exists', 'os.path.exists', (['f'], {}), '(f)\n', (13672, 13675), False, 'import os\n'), ((7990, 8015), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (8005, 8015), False, 'import os\n'), ((13772, 13799), 'imp.new_module', 'imp.new_module', (['module_name'], {}), '(module_name)\n', (13786, 13799), False, 'import imp\n'), ((7002, 7014), 'django.utils.translation.ugettext_lazy', '_', (['"""English"""'], {}), "('English')\n", (7003, 7014), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((9399, 9438), 'os.path.join', 'os.path.join', (['PROJECT_ROOT', '"""templates"""'], {}), "(PROJECT_ROOT, 'templates')\n", (9411, 9438), False, 'import os\n')] |
ishaanbakhle/wordgen.us | wordgen/data_gen.py | 45c5247ce04b13badd2e1b3164cedc9176a805c7 | from wordgen import consts
import numpy as np
from sklearn import preprocessing
def fill_matrix(dataset):
assert type(dataset) == str
assert len(dataset) > 0, print("Dataset must be > 0")
matrix = []
for i in consts.rang:
matrix.append([])
for o in consts.rang:
matrix[i].append(0)
dataset = dataset.lower()
accepted = list("abcdefghijklmnopqrstuvqwxyz") + ['\n']
for i in range(len(dataset)-1):
# if (dataset[i+1] in accepted and dataset[i] in accepted):
if dataset[i] in accepted:
val2 = i+1
while (val2 < len(dataset) and not (dataset[val2] in accepted)):
val2 += 1
ind1 = consts.get_ord(dataset[i])
ind2 = consts.get_ord(dataset[val2])
matrix[ind2][ind1] += 1
matrix = preprocessing.normalize(matrix, norm='l1')
return matrix
if __name__ == '__main__':
print(fill_matrix("james as"))
| [((833, 875), 'sklearn.preprocessing.normalize', 'preprocessing.normalize', (['matrix'], {'norm': '"""l1"""'}), "(matrix, norm='l1')\n", (856, 875), False, 'from sklearn import preprocessing\n'), ((707, 733), 'wordgen.consts.get_ord', 'consts.get_ord', (['dataset[i]'], {}), '(dataset[i])\n', (721, 733), False, 'from wordgen import consts\n'), ((753, 782), 'wordgen.consts.get_ord', 'consts.get_ord', (['dataset[val2]'], {}), '(dataset[val2])\n', (767, 782), False, 'from wordgen import consts\n')] |
Cleptomania/arcade | arcade/gl/context.py | abb7f0a0229b7f3a7843856d4b0812a3a2b80468 | from ctypes import c_int, c_char_p, cast, c_float
from collections import deque
import logging
import weakref
from typing import Any, Dict, List, Tuple, Union, Sequence, Set
import pyglet
from pyglet.window import Window
from pyglet import gl
from .buffer import Buffer
from .program import Program
from .vertex_array import Geometry, VertexArray
from .framebuffer import Framebuffer, DefaultFrameBuffer
from typing import Optional
from .texture import Texture
from .query import Query
from .glsl import ShaderSource
from .types import BufferDescription
LOG = logging.getLogger(__name__)
class Context:
"""
Represents an OpenGL context. This context belongs to a ``pyglet.Window``
normally accessed through ``window.ctx``.
The Context class contains methods for creating resources,
global states and commonly used enums. All enums also exist
in the ``gl`` module. (``ctx.BLEND`` or ``arcade.gl.BLEND``).
"""
#: The active context
active: Optional["Context"] = None
# --- Store the most commonly used OpenGL constants
# Texture
#: Texture interpolation: Nearest pixel
NEAREST = 0x2600
#: Texture interpolation: Linear interpolate
LINEAR = 0x2601
#: Texture interpolation: Minification filter for mipmaps
NEAREST_MIPMAP_NEAREST = 0x2700
#: Texture interpolation: Minification filter for mipmaps
LINEAR_MIPMAP_NEAREST = 0x2701
#: Texture interpolation: Minification filter for mipmaps
NEAREST_MIPMAP_LINEAR = 0x2702
#: Texture interpolation: Minification filter for mipmaps
LINEAR_MIPMAP_LINEAR = 0x2703
#: Texture wrap mode: Repeat
REPEAT = gl.GL_REPEAT
# Texture wrap mode: Clamp to border pixel
CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE
# Texture wrap mode: Clamp to border color
CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER
# Texture wrap mode: Repeat mirrored
MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT
# Flags
#: Context flag: Blending
BLEND = gl.GL_BLEND
#: Context flag: Depth testing
DEPTH_TEST = gl.GL_DEPTH_TEST
#: Context flag: Face culling
CULL_FACE = gl.GL_CULL_FACE
#: Context flag: Enable ``gl_PointSize`` in shaders.
PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE
# Blend functions
#: Blend function
ZERO = 0x0000
#: Blend function
ONE = 0x0001
#: Blend function
SRC_COLOR = 0x0300
#: Blend function
ONE_MINUS_SRC_COLOR = 0x0301
#: Blend function
SRC_ALPHA = 0x0302
#: Blend function
ONE_MINUS_SRC_ALPHA = 0x0303
#: Blend function
DST_ALPHA = 0x0304
#: Blend function
ONE_MINUS_DST_ALPHA = 0x0305
#: Blend function
DST_COLOR = 0x0306
#: Blend function
ONE_MINUS_DST_COLOR = 0x0307
# Blend equations
#: source + destination
FUNC_ADD = 0x8006
#: Blend equations: source - destination
FUNC_SUBTRACT = 0x800A
#: Blend equations: destination - source
FUNC_REVERSE_SUBTRACT = 0x800B
#: Blend equations: Minimum of source and destination
MIN = 0x8007
#: Blend equations: Maximum of source and destination
MAX = 0x8008
# Blend mode shortcuts
#: Blend mode shortcut for default blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA``
BLEND_DEFAULT = 0x0302, 0x0303
#: Blend mode shortcut for additive blending: ``ONE, ONE``
BLEND_ADDITIVE = 0x0001, 0x0001
#: Blend mode shortcut for premultipled alpha: ``SRC_ALPHA, ONE``
BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001
# VertexArray: Primitives
#: Primitive mode
POINTS = gl.GL_POINTS # 0
#: Primitive mode
LINES = gl.GL_LINES # 1
#: Primitive mode
LINE_STRIP = gl.GL_LINE_STRIP # 3
#: Primitive mode
TRIANGLES = gl.GL_TRIANGLES # 4
#: Primitive mode
TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP # 5
#: Primitive mode
TRIANGLE_FAN = gl.GL_TRIANGLE_FAN # 6
#: Primitive mode
LINES_ADJACENCY = gl.GL_LINES_ADJACENCY # 10
#: Primitive mode
LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY # 11
#: Primitive mode
TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY # 12
#: Primitive mode
TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY # 13
#: Patch mode (tessellation)
PATCHES = gl.GL_PATCHES
# The most common error enums
_errors = {
gl.GL_INVALID_ENUM: "GL_INVALID_ENUM",
gl.GL_INVALID_VALUE: "GL_INVALID_VALUE",
gl.GL_INVALID_OPERATION: "GL_INVALID_OPERATION",
gl.GL_INVALID_FRAMEBUFFER_OPERATION: "GL_INVALID_FRAMEBUFFER_OPERATION",
gl.GL_OUT_OF_MEMORY: "GL_OUT_OF_MEMORY",
gl.GL_STACK_UNDERFLOW: "GL_STACK_UNDERFLOW",
gl.GL_STACK_OVERFLOW: "GL_STACK_OVERFLOW",
}
def __init__(self, window: pyglet.window.Window, gc_mode: str = "auto"):
self._window_ref = weakref.ref(window)
self.limits = Limits(self)
self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION)
Context.activate(self)
# Texture unit we use when doing operations on textures to avoid
# affecting currently bound textures in the first units
self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS - 1
# Detect the default framebuffer
self._screen = DefaultFrameBuffer(self)
# Tracking active program
self.active_program: Optional[Program] = None
# Tracking active framebuffer. On context creation the window is the default render target
self.active_framebuffer: Framebuffer = self._screen
self.stats: ContextStats = ContextStats(warn_threshold=1000)
# Hardcoded states
# This should always be enabled
gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS)
# Set primitive restart index to -1 by default
gl.glEnable(gl.GL_PRIMITIVE_RESTART)
self._primitive_restart_index = -1
self.primitive_restart_index = self._primitive_restart_index
# We enable scissor testing by default.
# This is always set to the same value as the viewport
# to avoid background color affecting areas outside the viewport
gl.glEnable(gl.GL_SCISSOR_TEST)
# States
self._blend_func = self.BLEND_DEFAULT
self._point_size = 1.0
self._flags: Set[int] = set()
# Normal garbage collection as default (what we expect in python)
self._gc_mode = "auto"
self.gc_mode = gc_mode
#: Collected objects to gc when gc_mode is "context_gc"
self.objects = deque()
@property
def window(self) -> Window:
"""
The window this context belongs to.
:type: ``pyglet.Window``
"""
return self._window_ref()
@property
def screen(self) -> Framebuffer:
"""
The framebuffer for the window.
:type: :py:class:`~arcade.Framebuffer`
"""
return self._screen
@property
def fbo(self) -> Framebuffer:
"""
Get the currently active framebuffer.
This property is read-only
:type: :py:class:`arcade.gl.Framebuffer`
"""
return self.active_framebuffer
@property
def gl_version(self) -> Tuple[int, int]:
"""
The OpenGL version as a 2 component tuple
:type: tuple (major, minor) version
"""
return self._gl_version
def gc(self):
"""
Run garbage collection of OpenGL objects for this context.
This is only needed when ``gc_mode`` is ``context_gc``.
"""
# Loop the array until all objects are gone.
# Deleting one object might add new ones so we need
while len(self.objects):
obj = self.objects.pop()
obj.delete()
@property
def gc_mode(self) -> str:
"""
Set the garbage collection mode for OpenGL resources.
Supported modes are:
# default: Auto
ctx.gc_mode = "auto"
"""
return self._gc_mode
@gc_mode.setter
def gc_mode(self, value: str):
modes = ["auto", "context_gc"]
if value not in modes:
raise ValueError("Unsupported gc_mode. Supported modes are:", modes)
self._gc_mode = value
@property
def error(self) -> Union[str, None]:
"""Check OpenGL error
Returns a string representation of the occurring error
or ``None`` of no errors has occurred.
Example::
err = ctx.error
if err:
raise RuntimeError("OpenGL error: {err}")
:type: str
"""
err = gl.glGetError()
if err == gl.GL_NO_ERROR:
return None
return self._errors.get(err, "GL_UNKNOWN_ERROR")
@classmethod
def activate(cls, ctx: "Context"):
"""Mark a context as the currently active one"""
cls.active = ctx
def enable(self, *args):
"""
Enables one or more context flags::
# Single flag
ctx.enable(ctx.BLEND)
# Multiple flags
ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE)
"""
self._flags.update(args)
for flag in args:
gl.glEnable(flag)
def enable_only(self, *args):
"""
Enable only some flags. This will disable all other flags.
This is a simple way to ensure that context flag states
are not lingering from other sections of your code base::
# Ensure all flags are disabled (enable no flags)
ctx.enable_only()
# Make sure only blending is enabled
ctx.enable_only(ctx.BLEND)
# Make sure only depth test and culling is enabled
ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE)
"""
self._flags = set(args)
if self.BLEND in self._flags:
gl.glEnable(self.BLEND)
else:
gl.glDisable(self.BLEND)
if self.DEPTH_TEST in self._flags:
gl.glEnable(self.DEPTH_TEST)
else:
gl.glDisable(self.DEPTH_TEST)
if self.CULL_FACE in self._flags:
gl.glEnable(self.CULL_FACE)
else:
gl.glDisable(self.CULL_FACE)
if self.PROGRAM_POINT_SIZE in self._flags:
gl.glEnable(self.PROGRAM_POINT_SIZE)
else:
gl.glDisable(self.PROGRAM_POINT_SIZE)
def disable(self, *args):
"""
Disable one or more context flags::
# Single flag
ctx.disable(ctx.BLEND)
# Multiple flags
ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE)
"""
self._flags -= set(args)
for flag in args:
gl.glDisable(flag)
def is_enabled(self, flag) -> bool:
"""
Check if a context flag is enabled
:type: bool
"""
return flag in self._flags
@property
def viewport(self) -> Tuple[int, int, int, int]:
"""
Get or set the viewport for the currently active framebuffer.
The viewport simply describes what pixels of the screen
OpenGL should render to. Normally it would be the size of
the window's framebuffer::
# 4:3 screen
ctx.viewport = 0, 0, 800, 600
# 1080p
ctx.viewport = 0, 0, 1920, 1080
# Using the current framebuffer size
ctx.viewport = 0, 0, *ctx.screen.size
:type: tuple (x, y, width, height)
"""
return self.active_framebuffer.viewport
@viewport.setter
def viewport(self, value: Tuple[int, int, int, int]):
self.active_framebuffer.viewport = value
@property
def blend_func(self) -> Tuple[int, int]:
"""
Get or the blend function::
ctx.blend_func = ctx.ONE, ctx.ONE
:type: tuple (src, dst)
"""
return self._blend_func
@blend_func.setter
def blend_func(self, value: Tuple[int, int]):
self._blend_func = value
gl.glBlendFunc(value[0], value[1])
# def blend_equation(self)
# def front_face(self)
# def cull_face(self)
@property
def patch_vertices(self) -> int:
"""
Get or set number of vertices that will be used to make up a single patch primitive.
Patch primitives are consumed by the tessellation control shader (if present) and subsequently used for tessellation.
:type: int
"""
value = c_int()
gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value)
return value.value
@patch_vertices.setter
def patch_vertices(self, value: int):
if not isinstance(value, int):
raise TypeError("patch_vertices must be an integer")
gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value)
@property
def point_size(self) -> float:
"""float: Get or set the point size."""
return self._point_size
@point_size.setter
def point_size(self, value: float):
gl.glPointSize(self._point_size)
self._point_size = value
@property
def primitive_restart_index(self) -> int:
"""Get or set the primitive restart index. Default is -1"""
return self._primitive_restart_index
@primitive_restart_index.setter
def primitive_restart_index(self, value: int):
self._primitive_restart_index = value
gl.glPrimitiveRestartIndex(value)
def finish(self) -> None:
"""Wait until all OpenGL rendering commands are completed"""
gl.glFinish()
# --- Resource methods ---
def buffer(
self, *, data: Optional[Any] = None, reserve: int = 0, usage: str = "static"
) -> Buffer:
"""Create a new OpenGL Buffer object.
:param Any data: The buffer data, This can be ``bytes`` or an object supporting the buffer protocol.
:param int reserve: The number of bytes reserve
:param str usage: Buffer usage. 'static', 'dynamic' or 'stream'
:rtype: :py:class:`~arcade.gl.Buffer`
"""
# create_with_size
return Buffer(self, data, reserve=reserve, usage=usage)
def framebuffer(
self,
*,
color_attachments: Union[Texture, List[Texture]] = None,
depth_attachment: Texture = None
) -> Framebuffer:
"""Create a Framebuffer.
:param List[arcade.gl.Texture] color_attachments: List of textures we want to render into
:param arcade.gl.Texture depth_attachment: Depth texture
:rtype: :py:class:`~arcade.gl.Framebuffer`
"""
return Framebuffer(
self, color_attachments=color_attachments, depth_attachment=depth_attachment
)
def texture(
self,
size: Tuple[int, int],
*,
components: int = 4,
dtype: str = "f1",
data: Any = None,
wrap_x: gl.GLenum = None,
wrap_y: gl.GLenum = None,
filter: Tuple[gl.GLenum, gl.GLenum] = None
) -> Texture:
"""Create a 2D Texture.
Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER``
Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST``
``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR``
Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR``
:param Tuple[int, int] size: The size of the texture
:param int components: Number of components (1: R, 2: RG, 3: RGB, 4: RGBA)
:param str dtype: The data type of each component: f1, f2, f4 / i1, i2, i4 / u1, u2, u4
:param Any data: The texture data (optional). Can be bytes or an object supporting the buffer protocol.
:param GLenum wrap_x: How the texture wraps in x direction
:param GLenum wrap_y: How the texture wraps in y direction
:param Tuple[GLenum,GLenum] filter: Minification and magnification filter
"""
return Texture(
self,
size,
components=components,
data=data,
dtype=dtype,
wrap_x=wrap_x,
wrap_y=wrap_y,
filter=filter,
)
def depth_texture(self, size: Tuple[int, int], *, data=None) -> Texture:
"""Create a 2D depth texture
:param Tuple[int, int] size: The size of the texture
:param Any data: The texture data (optional). Can be bytes or an object supporting the buffer protocol.
"""
return Texture(self, size, data=data, depth=True)
def geometry(
self,
content: Optional[Sequence[BufferDescription]] = None,
index_buffer: Buffer = None,
mode: int = None,
index_element_size: int = 4,
):
"""
Create a Geomtry instance.
:param list content: List of :py:class:`~arcade.gl.BufferDescription` (optional)
:param Buffer index_buffer: Index/element buffer (optional)
:param int mode: The default draw mode (optional)
:param int mode: The default draw mode (optional)
:param int index_element_size: Byte size of the index buffer type. Can be 1, 2 or 4 (8, 16 or 32 bit unsigned integer)
"""
return Geometry(self, content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size)
def program(
self,
*,
vertex_shader: str,
fragment_shader: str = None,
geometry_shader: str = None,
tess_control_shader: str = None,
tess_evaluation_shader: str = None,
defines: Dict[str, str] = None
) -> Program:
"""Create a :py:class:`~arcade.gl.Program` given the vertex, fragment and geometry shader.
:param str vertex_shader: vertex shader source
:param str fragment_shader: fragment shader source (optional)
:param str geometry_shader: geometry shader source (optional)
:param str tess_control_shader: tessellation control shader source (optional)
:param str tess_evaluation_shader: tessellation evaluation shader source (optional)
:param dict defines: Substitute #defines values in the source (optional)
:rtype: :py:class:`~arcade.gl.Program`
"""
source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER)
source_fs = (
ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER)
if fragment_shader
else None
)
source_geo = (
ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER)
if geometry_shader
else None
)
source_tc = (
ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER)
if tess_control_shader
else None
)
source_te = (
ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER)
if tess_evaluation_shader
else None
)
# If we don't have a fragment shader we are doing transform feedback.
# When a geometry shader is present the out attributes will be located there
out_attributes = [] # type: List[str]
if not source_fs:
if source_geo:
out_attributes = source_geo.out_attributes
else:
out_attributes = source_vs.out_attributes
return Program(
self,
vertex_shader=source_vs.get_source(defines=defines),
fragment_shader=source_fs.get_source(defines=defines)
if source_fs
else None,
geometry_shader=source_geo.get_source(defines=defines)
if source_geo
else None,
tess_control_shader=source_tc.get_source(defines=defines)
if source_tc
else None,
tess_evaluation_shader=source_te.get_source(defines=defines)
if source_te
else None,
out_attributes=out_attributes,
)
def query(self):
"""
Create a query object for measuring rendering calls in opengl.
:rtype: :py:class:`~arcade.gl.Query`
"""
return Query(self)
class ContextStats:
def __init__(self, warn_threshold=100):
self.warn_threshold = warn_threshold
# (created, freed)
self.texture = (0, 0)
self.framebuffer = (0, 0)
self.buffer = (0, 0)
self.program = (0, 0)
self.vertex_array = (0, 0)
self.geometry = (0, 0)
def incr(self, key):
created, freed = getattr(self, key)
setattr(self, key, (created + 1, freed))
if created % self.warn_threshold == 0 and created > 0:
LOG.debug(
"%s allocations passed threshold (%s) [created = %s] [freed = %s] [active = %s]",
key,
self.warn_threshold,
created,
freed,
created - freed,
)
def decr(self, key):
created, freed = getattr(self, key)
setattr(self, key, (created, freed + 1))
class Limits:
"""OpenGL Limitations"""
def __init__(self, ctx):
self._ctx = ctx
#: Minor version number of the OpenGL API supported by the current context
self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION)
#: Major version number of the OpenGL API supported by the current context.
self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION)
self.VENDOR = self.get_str(gl.GL_VENDOR)
self.RENDERER = self.get_str(gl.GL_RENDERER)
#: Value indicating the number of sample buffers associated with the framebuffer
self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS)
#: An estimate of the number of bits of subpixel resolution
#: that are used to position rasterized geometry in window coordinates
self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS)
#: A mask value indicating what context profile is used (core, compat etc.)
self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK)
#: Minimum required alignment for uniform buffer sizes and offset
self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get(
gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT
)
#: Value indicates the maximum number of layers allowed in an array texture, and must be at least 256
self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS)
#: A rough estimate of the largest 3D texture that the GL can handle. The value must be at least 64
self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE)
#: Maximum number of color attachments in a framebuffer
self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS)
#: Maximum number of samples in a color multisample texture
self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES)
#: the number of words for fragment shader uniform variables in all uniform blocks
self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get(
gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS
)
#: Number of words for geometry shader uniform variables in all uniform blocks
self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get(
gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS
)
#: Maximum supported texture image units that can be used to access texture maps from the vertex shader
self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get(
gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS
)
#: Maximum number of uniform blocks per program
self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS)
#: Number of words for vertex shader uniform variables in all uniform blocks
self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get(
gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS
)
#: A rough estimate of the largest cube-map texture that the GL can handle
self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE)
#: Maximum number of samples in a multisample depth or depth-stencil texture
self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES)
#: Maximum number of simultaneous outputs that may be written in a fragment shader
self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS)
#: Maximum number of active draw buffers when using dual-source blending
self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS)
#: Recommended maximum number of vertex array indices
self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES)
#: Recommended maximum number of vertex array vertices
self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES)
#: Maximum number of components of the inputs read by the fragment shader
self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get(
gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS
)
#: Maximum number of individual floating-point, integer, or boolean values that can be
#: held in uniform variable storage for a fragment shader
self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get(
gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS
)
#: maximum number of individual 4-vectors of floating-point, integer,
#: or boolean values that can be held in uniform variable storage for a fragment shader
self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS)
#: Maximum number of uniform blocks per fragment shader.
self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS)
#: Maximum number of components of inputs read by a geometry shader
self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get(
gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS
)
#: Maximum number of components of outputs written by a geometry shader
self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get(
gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS
)
#: Maximum supported texture image units that can be used to access texture maps from the geometry shader
self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get(
gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS
)
#: Maximum number of uniform blocks per geometry shader
self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS)
#: Maximum number of individual floating-point, integer, or boolean values that can
#: be held in uniform variable storage for a geometry shader
self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get(
gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS
)
#: Maximum number of samples supported in integer format multisample buffers
self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES)
#: Maximum samples for a framebuffer
self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES)
#: A rough estimate of the largest rectangular texture that the GL can handle
self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE)
#: Maximum supported size for renderbuffers
self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE)
#: Maximum number of sample mask words
self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS)
#: Maximum number of texels allowed in the texel array of a texture buffer object
self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE)
#: Maximum number of uniform buffer binding points on the context
self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS)
#: Maximum number of uniform buffer binding points on the context
self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS)
#: The value gives a rough estimate of the largest texture that the GL can handle
self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE)
#: Maximum number of uniform buffer binding points on the context
self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS)
#: Maximum size in basic machine units of a uniform block
self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE)
#: The number 4-vectors for varying variables
self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS)
#: Maximum number of 4-component generic vertex attributes accessible to a vertex shader.
self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS)
#: Maximum supported texture image units that can be used to access texture maps from the vertex shader.
self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get(
gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS
)
#: Maximum number of individual floating-point, integer, or boolean values that
#: can be held in uniform variable storage for a vertex shader
self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get(
gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS
)
#: Maximum number of 4-vectors that may be held in uniform variable storage for the vertex shader
self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS)
#: Maximum number of components of output written by a vertex shader
self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS)
#: Maximum number of uniform blocks per vertex shader.
self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS)
# self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET)
# self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS)
self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS)
# TODO: Missing in pyglet
# self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY)
err = self._ctx.error
if err:
from warnings import warn
warn("Error happened while querying of limits. Moving on ..")
def get(self, enum: gl.GLenum) -> int:
"""Get an integer limit"""
value = c_int()
gl.glGetIntegerv(enum, value)
return value.value
def get_float(self, enum) -> float:
"""Get a float limit"""
value = c_float()
gl.glGetFloatv(enum, value)
return value.value
def get_str(self, enum: gl.GLenum) -> str:
"""Get a string limit"""
return cast(gl.glGetString(enum), c_char_p).value.decode() # type: ignore
| [((563, 590), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (580, 590), False, 'import logging\n'), ((4798, 4817), 'weakref.ref', 'weakref.ref', (['window'], {}), '(window)\n', (4809, 4817), False, 'import weakref\n'), ((5661, 5705), 'pyglet.gl.glEnable', 'gl.glEnable', (['gl.GL_TEXTURE_CUBE_MAP_SEAMLESS'], {}), '(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS)\n', (5672, 5705), False, 'from pyglet import gl\n'), ((5769, 5805), 'pyglet.gl.glEnable', 'gl.glEnable', (['gl.GL_PRIMITIVE_RESTART'], {}), '(gl.GL_PRIMITIVE_RESTART)\n', (5780, 5805), False, 'from pyglet import gl\n'), ((6111, 6142), 'pyglet.gl.glEnable', 'gl.glEnable', (['gl.GL_SCISSOR_TEST'], {}), '(gl.GL_SCISSOR_TEST)\n', (6122, 6142), False, 'from pyglet import gl\n'), ((6500, 6507), 'collections.deque', 'deque', ([], {}), '()\n', (6505, 6507), False, 'from collections import deque\n'), ((8593, 8608), 'pyglet.gl.glGetError', 'gl.glGetError', ([], {}), '()\n', (8606, 8608), False, 'from pyglet import gl\n'), ((11991, 12025), 'pyglet.gl.glBlendFunc', 'gl.glBlendFunc', (['value[0]', 'value[1]'], {}), '(value[0], value[1])\n', (12005, 12025), False, 'from pyglet import gl\n'), ((12442, 12449), 'ctypes.c_int', 'c_int', ([], {}), '()\n', (12447, 12449), False, 'from ctypes import c_int, c_char_p, cast, c_float\n'), ((12458, 12503), 'pyglet.gl.glGetIntegerv', 'gl.glGetIntegerv', (['gl.GL_PATCH_VERTICES', 'value'], {}), '(gl.GL_PATCH_VERTICES, value)\n', (12474, 12503), False, 'from pyglet import gl\n'), ((12714, 12763), 'pyglet.gl.glPatchParameteri', 'gl.glPatchParameteri', (['gl.GL_PATCH_VERTICES', 'value'], {}), '(gl.GL_PATCH_VERTICES, value)\n', (12734, 12763), False, 'from pyglet import gl\n'), ((12966, 12998), 'pyglet.gl.glPointSize', 'gl.glPointSize', (['self._point_size'], {}), '(self._point_size)\n', (12980, 12998), False, 'from pyglet import gl\n'), ((13348, 13381), 'pyglet.gl.glPrimitiveRestartIndex', 'gl.glPrimitiveRestartIndex', (['value'], {}), '(value)\n', (13374, 13381), False, 'from pyglet import gl\n'), ((13490, 13503), 'pyglet.gl.glFinish', 'gl.glFinish', ([], {}), '()\n', (13501, 13503), False, 'from pyglet import gl\n'), ((30326, 30333), 'ctypes.c_int', 'c_int', ([], {}), '()\n', (30331, 30333), False, 'from ctypes import c_int, c_char_p, cast, c_float\n'), ((30342, 30371), 'pyglet.gl.glGetIntegerv', 'gl.glGetIntegerv', (['enum', 'value'], {}), '(enum, value)\n', (30358, 30371), False, 'from pyglet import gl\n'), ((30488, 30497), 'ctypes.c_float', 'c_float', ([], {}), '()\n', (30495, 30497), False, 'from ctypes import c_int, c_char_p, cast, c_float\n'), ((30506, 30533), 'pyglet.gl.glGetFloatv', 'gl.glGetFloatv', (['enum', 'value'], {}), '(enum, value)\n', (30520, 30533), False, 'from pyglet import gl\n'), ((9178, 9195), 'pyglet.gl.glEnable', 'gl.glEnable', (['flag'], {}), '(flag)\n', (9189, 9195), False, 'from pyglet import gl\n'), ((9846, 9869), 'pyglet.gl.glEnable', 'gl.glEnable', (['self.BLEND'], {}), '(self.BLEND)\n', (9857, 9869), False, 'from pyglet import gl\n'), ((9896, 9920), 'pyglet.gl.glDisable', 'gl.glDisable', (['self.BLEND'], {}), '(self.BLEND)\n', (9908, 9920), False, 'from pyglet import gl\n'), ((9977, 10005), 'pyglet.gl.glEnable', 'gl.glEnable', (['self.DEPTH_TEST'], {}), '(self.DEPTH_TEST)\n', (9988, 10005), False, 'from pyglet import gl\n'), ((10032, 10061), 'pyglet.gl.glDisable', 'gl.glDisable', (['self.DEPTH_TEST'], {}), '(self.DEPTH_TEST)\n', (10044, 10061), False, 'from pyglet import gl\n'), ((10117, 10144), 'pyglet.gl.glEnable', 'gl.glEnable', (['self.CULL_FACE'], {}), '(self.CULL_FACE)\n', (10128, 10144), False, 'from pyglet import gl\n'), ((10171, 10199), 'pyglet.gl.glDisable', 'gl.glDisable', (['self.CULL_FACE'], {}), '(self.CULL_FACE)\n', (10183, 10199), False, 'from pyglet import gl\n'), ((10264, 10300), 'pyglet.gl.glEnable', 'gl.glEnable', (['self.PROGRAM_POINT_SIZE'], {}), '(self.PROGRAM_POINT_SIZE)\n', (10275, 10300), False, 'from pyglet import gl\n'), ((10327, 10364), 'pyglet.gl.glDisable', 'gl.glDisable', (['self.PROGRAM_POINT_SIZE'], {}), '(self.PROGRAM_POINT_SIZE)\n', (10339, 10364), False, 'from pyglet import gl\n'), ((10682, 10700), 'pyglet.gl.glDisable', 'gl.glDisable', (['flag'], {}), '(flag)\n', (10694, 10700), False, 'from pyglet import gl\n'), ((30169, 30230), 'warnings.warn', 'warn', (['"""Error happened while querying of limits. Moving on .."""'], {}), "('Error happened while querying of limits. Moving on ..')\n", (30173, 30230), False, 'from warnings import warn\n'), ((30662, 30682), 'pyglet.gl.glGetString', 'gl.glGetString', (['enum'], {}), '(enum)\n', (30676, 30682), False, 'from pyglet import gl\n')] |
pixelater/queue-management | api/app/models/bookings/exam.py | 9881505d4af2b9860aeaf76b9572315dd016c7dc | '''Copyright 2018 Province of British Columbia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.'''
from app.models.bookings import Base
from qsystem import db
class Exam(Base):
exam_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
booking_id = db.Column(db.Integer, db.ForeignKey("booking.booking_id", ondelete="set null"), nullable=True)
exam_type_id = db.Column(db.Integer, db.ForeignKey("examtype.exam_type_id"), nullable=False)
office_id = db.Column(db.Integer, db.ForeignKey("office.office_id"), nullable=False)
event_id = db.Column(db.String(25), nullable=False)
exam_name = db.Column(db.String(50), nullable=False)
examinee_name = db.Column(db.String(50), nullable=True)
expiry_date = db.Column(db.DateTime, nullable=True)
notes = db.Column(db.String(400), nullable=True)
exam_received_date = db.Column(db.DateTime, nullable=True)
session_number = db.Column(db.Integer, nullable=True)
number_of_students = db.Column(db.Integer, nullable=True)
exam_method = db.Column(db.String(15), nullable=False)
deleted_date = db.Column(db.String(50), nullable=True)
exam_returned_ind = db.Column(db.Integer, nullable=False, default=0)
exam_returned_tracking_number = db.Column(db.String(50), nullable=True)
offsite_location = db.Column(db.String(50), nullable=True)
booking = db.relationship("Booking")
exam_type = db.relationship("ExamType")
office = db.relationship("Office")
def __repr__(self):
return '<Exam Name: (name={self.exam_name!r})>'.format(self=self)
def __init__(self, **kwargs):
super(Exam, self).__init__(**kwargs)
| [((667, 742), 'qsystem.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)', 'nullable': '(False)'}), '(db.Integer, primary_key=True, autoincrement=True, nullable=False)\n', (676, 742), False, 'from qsystem import db\n'), ((1232, 1269), 'qsystem.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(True)'}), '(db.DateTime, nullable=True)\n', (1241, 1269), False, 'from qsystem import db\n'), ((1348, 1385), 'qsystem.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(True)'}), '(db.DateTime, nullable=True)\n', (1357, 1385), False, 'from qsystem import db\n'), ((1407, 1443), 'qsystem.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(True)'}), '(db.Integer, nullable=True)\n', (1416, 1443), False, 'from qsystem import db\n'), ((1469, 1505), 'qsystem.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(True)'}), '(db.Integer, nullable=True)\n', (1478, 1505), False, 'from qsystem import db\n'), ((1648, 1696), 'qsystem.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)', 'default': '(0)'}), '(db.Integer, nullable=False, default=0)\n', (1657, 1696), False, 'from qsystem import db\n'), ((1852, 1878), 'qsystem.db.relationship', 'db.relationship', (['"""Booking"""'], {}), "('Booking')\n", (1867, 1878), False, 'from qsystem import db\n'), ((1895, 1922), 'qsystem.db.relationship', 'db.relationship', (['"""ExamType"""'], {}), "('ExamType')\n", (1910, 1922), False, 'from qsystem import db\n'), ((1936, 1961), 'qsystem.db.relationship', 'db.relationship', (['"""Office"""'], {}), "('Office')\n", (1951, 1961), False, 'from qsystem import db\n'), ((782, 838), 'qsystem.db.ForeignKey', 'db.ForeignKey', (['"""booking.booking_id"""'], {'ondelete': '"""set null"""'}), "('booking.booking_id', ondelete='set null')\n", (795, 838), False, 'from qsystem import db\n'), ((896, 934), 'qsystem.db.ForeignKey', 'db.ForeignKey', (['"""examtype.exam_type_id"""'], {}), "('examtype.exam_type_id')\n", (909, 934), False, 'from qsystem import db\n'), ((990, 1023), 'qsystem.db.ForeignKey', 'db.ForeignKey', (['"""office.office_id"""'], {}), "('office.office_id')\n", (1003, 1023), False, 'from qsystem import db\n'), ((1066, 1079), 'qsystem.db.String', 'db.String', (['(25)'], {}), '(25)\n', (1075, 1079), False, 'from qsystem import db\n'), ((1123, 1136), 'qsystem.db.String', 'db.String', (['(50)'], {}), '(50)\n', (1132, 1136), False, 'from qsystem import db\n'), ((1184, 1197), 'qsystem.db.String', 'db.String', (['(50)'], {}), '(50)\n', (1193, 1197), False, 'from qsystem import db\n'), ((1292, 1306), 'qsystem.db.String', 'db.String', (['(400)'], {}), '(400)\n', (1301, 1306), False, 'from qsystem import db\n'), ((1534, 1547), 'qsystem.db.String', 'db.String', (['(15)'], {}), '(15)\n', (1543, 1547), False, 'from qsystem import db\n'), ((1594, 1607), 'qsystem.db.String', 'db.String', (['(50)'], {}), '(50)\n', (1603, 1607), False, 'from qsystem import db\n'), ((1743, 1756), 'qsystem.db.String', 'db.String', (['(50)'], {}), '(50)\n', (1752, 1756), False, 'from qsystem import db\n'), ((1806, 1819), 'qsystem.db.String', 'db.String', (['(50)'], {}), '(50)\n', (1815, 1819), False, 'from qsystem import db\n')] |
jaredliw/python-question-bank | leetcode/1672 Richest Customer Wealth.py | 9c8c246623d8d171f875700b57772df0afcbdcdf | class Solution(object):
def maximumWealth(self, accounts):
"""
:type accounts: List[List[int]]
:rtype: int
"""
# Runtime: 36 ms
# Memory: 13.5 MB
return max(map(sum, accounts))
| [] |
gregneagle/sal | datatableview/tests/test_helpers.py | 74c583fb1c1b33d3201b308b147376b3dcaca33f | # -*- encoding: utf-8 -*-
from datetime import datetime
from functools import partial
from django import get_version
from datatableview import helpers
import six
from .testcase import DatatableViewTestCase
from .test_app.models import ExampleModel, RelatedM2MModel
if get_version().split('.') < ['1', '7']:
test_data_fixture = 'test_data_legacy.json'
else:
test_data_fixture = 'test_data.json'
class HelpersTests(DatatableViewTestCase):
fixtures = [test_data_fixture]
def test_link_to_model(self):
""" Verifies that link_to_model works. """
helper = helpers.link_to_model
# Verify that a model without get_absolute_url() raises a complaint
related = RelatedM2MModel.objects.get(pk=1)
with self.assertRaises(AttributeError) as cm:
helper(related)
self.assertEqual(str(cm.exception), "'RelatedM2MModel' object has no attribute 'get_absolute_url'")
# Verify simple use
instance = ExampleModel.objects.get(pk=1)
output = helper(instance)
self.assertEqual(output, '<a href="#1">ExampleModel 1</a>')
# Verify text override
output = helper(instance, text="Special text")
self.assertEqual(output, '<a href="#1">Special text</a>')
# Verify ``key`` access to transition an instance to a related field
instance = ExampleModel.objects.get(pk=2)
secondary_helper = helper(key=lambda o: o.related)
output = secondary_helper(instance)
self.assertEqual(output, '<a href="#1">RelatedModel object</a>')
# Verify ``key`` access version of custom text
output = secondary_helper(instance, text="Special text")
self.assertEqual(output, '<a href="#1">Special text</a>')
def test_make_boolean_checkmark(self):
""" Verifies that make_boolean_checkmark works. """
helper = helpers.make_boolean_checkmark
# Verify simple use
output = helper("True-ish value")
self.assertEqual(output, '✔')
output = helper("")
self.assertEqual(output, '✘')
# Verify custom values
output = helper("True-ish value", true_value="Yes", false_value="No")
self.assertEqual(output, 'Yes')
output = helper("", true_value="Yes", false_value="No")
self.assertEqual(output, 'No')
def test_format_date(self):
""" Verifies that format_date works. """
helper = helpers.format_date
# Verify simple use
data = datetime.now()
secondary_helper = helper("%m/%d/%Y")
output = secondary_helper(data)
self.assertEqual(output, data.strftime("%m/%d/%Y"))
# Verify that None objects get swallowed without complaint.
# This helps promise that the helper won't blow up for models.DateTimeField that are allowed
# to be null.
output = secondary_helper(None)
self.assertEqual(output, "")
def test_format(self):
""" Verifies that format works. """
helper = helpers.format
# Verify simple use
data = 1234567890
secondary_helper = helper("{0:,}")
output = secondary_helper(data)
self.assertEqual(output, "{0:,}".format(data))
# Verify ``cast`` argument
data = "1234.56789"
secondary_helper = helper("{0:.2f}", cast=float)
output = secondary_helper(data)
self.assertEqual(output, "{0:.2f}".format(float(data)))
def test_through_filter(self):
""" Verifies that through_filter works. """
helper = helpers.through_filter
target_function = lambda data, arg=None: (data, arg)
# Verify simple use
data = "Data string"
secondary_helper = helper(target_function)
output = secondary_helper(data)
self.assertEqual(output, (data, None))
# Verify ``arg`` argument
secondary_helper = helper(target_function, arg="Arg data")
output = secondary_helper(data)
self.assertEqual(output, (data, "Arg data"))
def test_itemgetter(self):
""" Verifies that itemgetter works. """
helper = helpers.itemgetter
# Verify simple index access
data = list(range(5))
secondary_helper = helper(-1)
output = secondary_helper(data)
self.assertEqual(output, data[-1])
# Verify slicing access
secondary_helper = helper(slice(1, 3))
output = secondary_helper(data)
self.assertEqual(output, data[1:3])
# Verify ellipsis works for strings
data = str(range(10))
secondary_helper = helper(slice(0, 5), ellipsis=True)
output = secondary_helper(data)
self.assertEqual(output, data[:5] + "...")
# Verify ellipsis can be customized
secondary_helper = helper(slice(0, 5), ellipsis="custom")
output = secondary_helper(data)
self.assertEqual(output, data[:5] + "custom")
# Verify ellipsis does nothing for non-string data types
data = range(10)
output = secondary_helper(data)
self.assertEqual(output, data[:5])
def test_attrgetter(self):
""" Verifies that attrgetter works. """
helper = helpers.attrgetter
# Verify simple attr lookup
data = ExampleModel.objects.get(pk=1)
secondary_helper = helper('pk')
output = secondary_helper(data)
self.assertEqual(output, data.pk)
# Verify bad attribrute lookup
data = ExampleModel.objects.get(pk=1)
secondary_helper = helper('bad field name')
with self.assertRaises(AttributeError) as cm:
output = secondary_helper(data)
self.assertEqual(str(cm.exception), "'ExampleModel' object has no attribute 'bad field name'")
def test_make_xeditable(self):
""" Verifies that make_xeditable works. """
helper = helpers.make_xeditable
# Items that the helper normally expects in a callback context
internals = {'field_name': 'name'}
# Verify chain calls don't trigger rendering
secondary_helper = helper()
tertiary_helper = secondary_helper()
self.assertEqual(type(secondary_helper), partial)
self.assertEqual(type(tertiary_helper), partial)
# Verify chain ends with provision of a value
data = ExampleModel.objects.get(pk=1)
# This needs a "url" arg because we want to test successful use
output = tertiary_helper(data, url="/", **internals)
self.assertTrue(isinstance(output, six.string_types))
# Verify that no "view" kwarg means the url is required from the call
with self.assertRaises(ValueError) as cm:
tertiary_helper(data, **internals)
self.assertEqual(str(cm.exception), "'make_xeditable' cannot determine a value for 'url'.")
# Verify kwargs accumulate
kwargs1 = { 'type': 'textarea' }
kwargs2 = { 'other_arg': True }
secondary_helper = helper(**kwargs1)
expected_kwargs = dict(kwargs1, extra_attrs=[])
self.assertEqual(secondary_helper.keywords, expected_kwargs)
tertiary_helper = secondary_helper(**kwargs2)
expected_kwargs = dict(kwargs1, **dict(kwargs2, extra_attrs=[]))
self.assertEqual(tertiary_helper.keywords, expected_kwargs)
# Verify default kwarg names end up as attributes
data = ExampleModel.objects.get(pk=1)
kwargs = {
'pk': "PK DATA",
'type': "TYPE DATA",
'url': "URL DATA",
'source': "SOURCE DATA",
'title': "TITLE DATA",
'placeholder': "PLACEHOLDER DATA",
# Extra stuff not in anticipated to appear in rendered string
'special': "SPECIAL DATA",
'data_custom': "DATA-CUSTOM DATA",
}
secondary_helper = helper(**kwargs)
output = secondary_helper(data, **internals)
expected_output = """
<a href="#" data-name="name"
data-pk="PK DATA"
data-placeholder="PLACEHOLDER DATA"
data-source="SOURCE DATA"
data-title="TITLE DATA"
data-type="TYPE DATA"
data-url="URL DATA"
data-value="1"
data-xeditable="xeditable">
ExampleModel 1
</a>
"""
self.assertHTMLEqual(output, expected_output)
# Verify that explicit additions via ``extra_attrs`` allows kwargs to appear in HTML as
# "data-*" attributes.
secondary_helper = helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs)
output = secondary_helper(data, **internals)
expected_output = """
<a href="#" data-name="name"
data-pk="PK DATA"
data-placeholder="PLACEHOLDER DATA"
data-source="SOURCE DATA"
data-title="TITLE DATA"
data-type="TYPE DATA"
data-url="URL DATA"
data-value="1"
data-special="SPECIAL DATA"
data-custom="DATA-CUSTOM DATA"
data-xeditable="xeditable">
ExampleModel 1
</a>
"""
self.assertHTMLEqual(output, expected_output)
| [((2514, 2528), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2526, 2528), False, 'from datetime import datetime\n'), ((274, 287), 'django.get_version', 'get_version', ([], {}), '()\n', (285, 287), False, 'from django import get_version\n')] |
naari3/seibaribot | discordbot.py | 3686206ed0b28b318a4032753350be8d9f2223fd | import traceback
from os import getenv
import discord
from discord import Message
from discord.ext import commands
from discord.ext.commands import Context
from asyncio import sleep
import asyncio
client = discord.Client()
# botの接頭辞を!にする
bot = commands.Bot(command_prefix='!')
# ギラティナのチャンネルのID
GIRATINA_CHANNEL_ID = 940610524415144036
WIP_CHANNEL_ID = 940966825087361025
@bot.event
async def on_command_error(ctx, error):
orig_error = getattr(error, 'original', error)
error_msg = ''.join(
traceback.TracebackException.from_exception(orig_error).format())
await ctx.send(error_msg)
# 起動時のメッセージの関数
async def ready_greet():
channel = bot.get_channel(GIRATINA_CHANNEL_ID)
await channel.send('ギラティナ、オォン!')
# Bot起動時に実行される関数
@bot.event
async def on_ready():
await ready_greet()
# ピンポン
@bot.command()
async def ping(ctx):
await ctx.send('pong')
@bot.event
async def on_message(message):
# 送信者がBotである場合は弾く
if message.author.bot:
return
# ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699
# メッセージの本文が ドナルド だった場合
if 'ドナルド' in str(message.content):
# 送信するメッセージをランダムで決める
# メッセージが送られてきたチャンネルに送る
await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293')
# メッセージに場合
if message.attachments and message.channel.id == WIP_CHANNEL_ID:
for attachment in message.attachments:
# Attachmentの拡張子がmp3, wavのどれかだった場合
# https://discordpy.readthedocs.io/ja/latest/api.html#attachment
if attachment.content_type and "audio" in attachment.content_type:
await attachment.save("input.mp3")
command = "ffmpeg -y -loop 1 -i input.jpg -i input.mp3 -vcodec libx264 -vb 50k -acodec aac -strict experimental -ab 128k -ac 2 -ar 48000 -pix_fmt yuv420p -shortest output.mp4"
proc = await asyncio.create_subprocess_exec(
*command.split(" "),
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
stdout, stderr = await proc.communicate()
await message.channel.send(file=discord.File("output.mp4"))
await bot.process_commands(message)
# チーバくんの、なのはな体操
@bot.command()
async def chiibakun(ctx):
await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss')
# かおすちゃんを送信
@bot.command()
async def kaosu(ctx):
await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large')
# イキス
@bot.command()
async def inm(ctx):
await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\n\n'
f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\n\n聖バリ「{ctx.author.name}'
'、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」')
# ギラティナの画像を送る
@bot.command()
async def giratina(ctx):
await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png')
# bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3
@bot.command()
async def bokuseku(ctx):
if ctx.author.voice is None:
await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい')
return
# ボイスチャンネルに接続する
await ctx.author.voice.channel.connect()
# 音声を再生する
ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3'))
# 音声が再生中か確認する
while ctx.guild.voice_client.is_playing():
await sleep(1)
# 切断する
await ctx.guild.voice_client.disconnect()
token = getenv('DISCORD_BOT_TOKEN')
bot.run(token)
| [((207, 223), 'discord.Client', 'discord.Client', ([], {}), '()\n', (221, 223), False, 'import discord\n'), ((246, 278), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': '"""!"""'}), "(command_prefix='!')\n", (258, 278), False, 'from discord.ext import commands\n'), ((3475, 3502), 'os.getenv', 'getenv', (['"""DISCORD_BOT_TOKEN"""'], {}), "('DISCORD_BOT_TOKEN')\n", (3481, 3502), False, 'from os import getenv\n'), ((3280, 3318), 'discord.FFmpegPCMAudio', 'discord.FFmpegPCMAudio', (['"""bokuseku.mp3"""'], {}), "('bokuseku.mp3')\n", (3302, 3318), False, 'import discord\n'), ((3399, 3407), 'asyncio.sleep', 'sleep', (['(1)'], {}), '(1)\n', (3404, 3407), False, 'from asyncio import sleep\n'), ((511, 566), 'traceback.TracebackException.from_exception', 'traceback.TracebackException.from_exception', (['orig_error'], {}), '(orig_error)\n', (554, 566), False, 'import traceback\n'), ((2197, 2223), 'discord.File', 'discord.File', (['"""output.mp4"""'], {}), "('output.mp4')\n", (2209, 2223), False, 'import discord\n')] |
arghyadip01/grpc | test/cpp/naming/utils/dns_server.py | 9e10bfc8a096ef91a327e22f84f10c0fabff4417 | #!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Starts a local DNS server for use in tests"""
import argparse
import sys
import yaml
import signal
import os
import threading
import time
import twisted
import twisted.internet
import twisted.internet.reactor
import twisted.internet.threads
import twisted.internet.defer
import twisted.internet.protocol
import twisted.names
import twisted.names.client
import twisted.names.dns
import twisted.names.server
from twisted.names import client, server, common, authority, dns
import argparse
import platform
_SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.' for twisted syntax
_SERVER_HEALTH_CHECK_RECORD_DATA = '123.123.123.123'
class NoFileAuthority(authority.FileAuthority):
def __init__(self, soa, records):
# skip FileAuthority
common.ResolverBase.__init__(self)
self.soa = soa
self.records = records
def start_local_dns_server(args):
all_records = {}
def _push_record(name, r):
print('pushing record: |%s|' % name)
if all_records.get(name) is not None:
all_records[name].append(r)
return
all_records[name] = [r]
def _maybe_split_up_txt_data(name, txt_data, r_ttl):
start = 0
txt_data_list = []
while len(txt_data[start:]) > 0:
next_read = len(txt_data[start:])
if next_read > 255:
next_read = 255
txt_data_list.append(txt_data[start:start + next_read])
start += next_read
_push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl))
with open(args.records_config_path) as config:
test_records_config = yaml.load(config)
common_zone_name = test_records_config['resolver_tests_common_zone_name']
for group in test_records_config['resolver_component_tests']:
for name in group['records'].keys():
for record in group['records'][name]:
r_type = record['type']
r_data = record['data']
r_ttl = int(record['TTL'])
record_full_name = '%s.%s' % (name, common_zone_name)
assert record_full_name[-1] == '.'
record_full_name = record_full_name[:-1]
if r_type == 'A':
_push_record(record_full_name,
dns.Record_A(r_data, ttl=r_ttl))
if r_type == 'AAAA':
_push_record(record_full_name,
dns.Record_AAAA(r_data, ttl=r_ttl))
if r_type == 'SRV':
p, w, port, target = r_data.split(' ')
p = int(p)
w = int(w)
port = int(port)
target_full_name = '%s.%s' % (target, common_zone_name)
r_data = '%s %s %s %s' % (p, w, port, target_full_name)
_push_record(
record_full_name,
dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl))
if r_type == 'TXT':
_maybe_split_up_txt_data(record_full_name, r_data, r_ttl)
# Add an optional IPv4 record is specified
if args.add_a_record:
extra_host, extra_host_ipv4 = args.add_a_record.split(':')
_push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0))
# Server health check record
_push_record(_SERVER_HEALTH_CHECK_RECORD_NAME,
dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0))
soa_record = dns.Record_SOA(mname=common_zone_name)
test_domain_com = NoFileAuthority(
soa=(common_zone_name, soa_record),
records=all_records,
)
server = twisted.names.server.DNSServerFactory(
authorities=[test_domain_com], verbose=2)
server.noisy = 2
twisted.internet.reactor.listenTCP(args.port, server)
dns_proto = twisted.names.dns.DNSDatagramProtocol(server)
dns_proto.noisy = 2
twisted.internet.reactor.listenUDP(args.port, dns_proto)
print('starting local dns server on 127.0.0.1:%s' % args.port)
print('starting twisted.internet.reactor')
twisted.internet.reactor.suggestThreadPoolSize(1)
twisted.internet.reactor.run()
def _quit_on_signal(signum, _frame):
print('Received SIGNAL %d. Quitting with exit code 0' % signum)
twisted.internet.reactor.stop()
sys.stdout.flush()
sys.exit(0)
def flush_stdout_loop():
num_timeouts_so_far = 0
sleep_time = 1
# Prevent zombies. Tests that use this server are short-lived.
max_timeouts = 60 * 10
while num_timeouts_so_far < max_timeouts:
sys.stdout.flush()
time.sleep(sleep_time)
num_timeouts_so_far += 1
print('Process timeout reached, or cancelled. Exitting 0.')
os.kill(os.getpid(), signal.SIGTERM)
def main():
argp = argparse.ArgumentParser(
description='Local DNS Server for resolver tests')
argp.add_argument('-p',
'--port',
default=None,
type=int,
help='Port for DNS server to listen on for TCP and UDP.')
argp.add_argument(
'-r',
'--records_config_path',
default=None,
type=str,
help=('Directory of resolver_test_record_groups.yaml file. '
'Defaults to path needed when the test is invoked as part '
'of run_tests.py.'))
argp.add_argument(
'--add_a_record',
default=None,
type=str,
help=('Add an A record via the command line. Useful for when we '
'need to serve a one-off A record that is under a '
'different domain then the rest the records configured in '
'--records_config_path (which all need to be under the '
'same domain). Format: <name>:<ipv4 address>'))
args = argp.parse_args()
signal.signal(signal.SIGTERM, _quit_on_signal)
signal.signal(signal.SIGINT, _quit_on_signal)
output_flush_thread = threading.Thread(target=flush_stdout_loop)
output_flush_thread.setDaemon(True)
output_flush_thread.start()
start_local_dns_server(args)
if __name__ == '__main__':
main()
| [((4166, 4204), 'twisted.names.dns.Record_SOA', 'dns.Record_SOA', ([], {'mname': 'common_zone_name'}), '(mname=common_zone_name)\n', (4180, 4204), False, 'from twisted.names import client, server, common, authority, dns\n'), ((4336, 4415), 'twisted.names.server.DNSServerFactory', 'twisted.names.server.DNSServerFactory', ([], {'authorities': '[test_domain_com]', 'verbose': '(2)'}), '(authorities=[test_domain_com], verbose=2)\n', (4373, 4415), False, 'import twisted\n'), ((4450, 4503), 'twisted.internet.reactor.listenTCP', 'twisted.internet.reactor.listenTCP', (['args.port', 'server'], {}), '(args.port, server)\n', (4484, 4503), False, 'import twisted\n'), ((4520, 4565), 'twisted.names.dns.DNSDatagramProtocol', 'twisted.names.dns.DNSDatagramProtocol', (['server'], {}), '(server)\n', (4557, 4565), False, 'import twisted\n'), ((4594, 4650), 'twisted.internet.reactor.listenUDP', 'twisted.internet.reactor.listenUDP', (['args.port', 'dns_proto'], {}), '(args.port, dns_proto)\n', (4628, 4650), False, 'import twisted\n'), ((4769, 4818), 'twisted.internet.reactor.suggestThreadPoolSize', 'twisted.internet.reactor.suggestThreadPoolSize', (['(1)'], {}), '(1)\n', (4815, 4818), False, 'import twisted\n'), ((4823, 4853), 'twisted.internet.reactor.run', 'twisted.internet.reactor.run', ([], {}), '()\n', (4851, 4853), False, 'import twisted\n'), ((4965, 4996), 'twisted.internet.reactor.stop', 'twisted.internet.reactor.stop', ([], {}), '()\n', (4994, 4996), False, 'import twisted\n'), ((5001, 5019), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5017, 5019), False, 'import sys\n'), ((5024, 5035), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5032, 5035), False, 'import sys\n'), ((5471, 5545), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Local DNS Server for resolver tests"""'}), "(description='Local DNS Server for resolver tests')\n", (5494, 5545), False, 'import argparse\n'), ((6520, 6566), 'signal.signal', 'signal.signal', (['signal.SIGTERM', '_quit_on_signal'], {}), '(signal.SIGTERM, _quit_on_signal)\n', (6533, 6566), False, 'import signal\n'), ((6571, 6616), 'signal.signal', 'signal.signal', (['signal.SIGINT', '_quit_on_signal'], {}), '(signal.SIGINT, _quit_on_signal)\n', (6584, 6616), False, 'import signal\n'), ((6643, 6685), 'threading.Thread', 'threading.Thread', ([], {'target': 'flush_stdout_loop'}), '(target=flush_stdout_loop)\n', (6659, 6685), False, 'import threading\n'), ((1434, 1468), 'twisted.names.common.ResolverBase.__init__', 'common.ResolverBase.__init__', (['self'], {}), '(self)\n', (1462, 1468), False, 'from twisted.names import client, server, common, authority, dns\n'), ((2299, 2316), 'yaml.load', 'yaml.load', (['config'], {}), '(config)\n', (2308, 2316), False, 'import yaml\n'), ((4094, 4147), 'twisted.names.dns.Record_A', 'dns.Record_A', (['_SERVER_HEALTH_CHECK_RECORD_DATA'], {'ttl': '(0)'}), '(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)\n', (4106, 4147), False, 'from twisted.names import client, server, common, authority, dns\n'), ((5258, 5276), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5274, 5276), False, 'import sys\n'), ((5285, 5307), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (5295, 5307), False, 'import time\n'), ((5417, 5428), 'os.getpid', 'os.getpid', ([], {}), '()\n', (5426, 5428), False, 'import os\n'), ((2174, 2215), 'twisted.names.dns.Record_TXT', 'dns.Record_TXT', (['*txt_data_list'], {'ttl': 'r_ttl'}), '(*txt_data_list, ttl=r_ttl)\n', (2188, 2215), False, 'from twisted.names import client, server, common, authority, dns\n'), ((3955, 3991), 'twisted.names.dns.Record_A', 'dns.Record_A', (['extra_host_ipv4'], {'ttl': '(0)'}), '(extra_host_ipv4, ttl=0)\n', (3967, 3991), False, 'from twisted.names import client, server, common, authority, dns\n'), ((2975, 3006), 'twisted.names.dns.Record_A', 'dns.Record_A', (['r_data'], {'ttl': 'r_ttl'}), '(r_data, ttl=r_ttl)\n', (2987, 3006), False, 'from twisted.names import client, server, common, authority, dns\n'), ((3129, 3163), 'twisted.names.dns.Record_AAAA', 'dns.Record_AAAA', (['r_data'], {'ttl': 'r_ttl'}), '(r_data, ttl=r_ttl)\n', (3144, 3163), False, 'from twisted.names import client, server, common, authority, dns\n'), ((3611, 3666), 'twisted.names.dns.Record_SRV', 'dns.Record_SRV', (['p', 'w', 'port', 'target_full_name'], {'ttl': 'r_ttl'}), '(p, w, port, target_full_name, ttl=r_ttl)\n', (3625, 3666), False, 'from twisted.names import client, server, common, authority, dns\n')] |
BPearlstine/colour | colour/examples/models/examples_ictcp.py | 40f0281295496774d2a19eee017d50fd0c265bd8 | # -*- coding: utf-8 -*-
"""
Showcases *ICTCP* *colour encoding* computations.
"""
import numpy as np
import colour
from colour.utilities import message_box
message_box('"ICTCP" Colour Encoding Computations')
RGB = np.array([0.45620519, 0.03081071, 0.04091952])
message_box(('Converting from "ITU-R BT.2020" colourspace to "ICTCP" colour '
'encoding given "RGB" values:\n'
'\n\t{0}'.format(RGB)))
print(colour.RGB_to_ICTCP(RGB))
print('\n')
ICTCP = np.array([0.07351364, 0.00475253, 0.09351596])
message_box(('Converting from "ICTCP" colour encoding to "ITU-R BT.2020" '
'colourspace given "ICTCP" values:\n'
'\n\t{0}'.format(ICTCP)))
print(colour.ICTCP_to_RGB(ICTCP))
| [((159, 210), 'colour.utilities.message_box', 'message_box', (['""""ICTCP" Colour Encoding Computations"""'], {}), '(\'"ICTCP" Colour Encoding Computations\')\n', (170, 210), False, 'from colour.utilities import message_box\n'), ((218, 264), 'numpy.array', 'np.array', (['[0.45620519, 0.03081071, 0.04091952]'], {}), '([0.45620519, 0.03081071, 0.04091952])\n', (226, 264), True, 'import numpy as np\n'), ((480, 526), 'numpy.array', 'np.array', (['[0.07351364, 0.00475253, 0.09351596]'], {}), '([0.07351364, 0.00475253, 0.09351596])\n', (488, 526), True, 'import numpy as np\n'), ((432, 456), 'colour.RGB_to_ICTCP', 'colour.RGB_to_ICTCP', (['RGB'], {}), '(RGB)\n', (451, 456), False, 'import colour\n'), ((698, 724), 'colour.ICTCP_to_RGB', 'colour.ICTCP_to_RGB', (['ICTCP'], {}), '(ICTCP)\n', (717, 724), False, 'import colour\n')] |
MauricioAntonioMartinez/django-workout-tracker-api | app/core/model/routine.py | 82f9499f172bd6d4b861f072948949dd6f8f6ec1 |
import os
import uuid
from django.conf import settings # this is how we can retrive variables
# for the settings file
from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager,
PermissionsMixin)
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.utils.translation import gettext_lazy as _
from multiselectfield import MultiSelectField
# Maneger User class is the class that provides the creation
# of user or admin and all methods out of the box
from rest_framework import exceptions
from rest_framework.authentication import TokenAuthentication
from user.custom_token import ExpiringToken
from .exercise import BaseSerie
class RoutineDay(models.Model):
name = models.CharField(max_length=255, blank=True)
routine = models.ForeignKey(
'Routine', related_name='routines', on_delete=models.CASCADE)
def sets(self):
return SetRoutine.objects.filter(routine=self)
class Routine(models.Model):
name = models.CharField(max_length=255)
user = models.ForeignKey(settings.AUTH_USER_MODEL,
on_delete=models.CASCADE)
class SerieRoutine(BaseSerie):
father_set = models.ForeignKey(
'SetRoutine', on_delete=models.CASCADE, related_name='series')
class SetRoutine(models.Model):
exercise = models.ForeignKey(
'Exercise', on_delete=models.CASCADE)
routine = models.ForeignKey(
'RoutineDay', on_delete=models.CASCADE) | [((791, 835), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (807, 835), False, 'from django.db import models\n'), ((850, 929), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Routine"""'], {'related_name': '"""routines"""', 'on_delete': 'models.CASCADE'}), "('Routine', related_name='routines', on_delete=models.CASCADE)\n", (867, 929), False, 'from django.db import models\n'), ((1057, 1089), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1073, 1089), False, 'from django.db import models\n'), ((1101, 1170), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (1118, 1170), False, 'from django.db import models\n'), ((1251, 1336), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""SetRoutine"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""series"""'}), "('SetRoutine', on_delete=models.CASCADE, related_name='series'\n )\n", (1268, 1336), False, 'from django.db import models\n'), ((1391, 1446), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Exercise"""'], {'on_delete': 'models.CASCADE'}), "('Exercise', on_delete=models.CASCADE)\n", (1408, 1446), False, 'from django.db import models\n'), ((1470, 1527), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""RoutineDay"""'], {'on_delete': 'models.CASCADE'}), "('RoutineDay', on_delete=models.CASCADE)\n", (1487, 1527), False, 'from django.db import models\n')] |
British-Oceanographic-Data-Centre/COAsT | example_scripts/transect_tutorial.py | 4d3d57c9afb61a92063b665626c1828dd2998d2b | """
This is a demonstration script for using the Transect class in the COAsT
package. This object has strict data formatting requirements, which are
outlined in tranect.py.
Transect subsetting (a vertical slice of data between two coordinates): Creating them and performing some custom diagnostics with them.
---
In this tutorial we take a look at subsetting the model data along a transect (a custom straight line) and creating some bespoke diagnostics along it. We look at:
1. Creating a TRANSECT object, defined between two points.
2. Plotting data along a transect.
3. Calculating flow normal to the transect
"""
## Create a transect subset of the example dataset
# Load packages and define some file paths
import coast
import xarray as xr
import matplotlib.pyplot as plt
fn_nemo_dat_t = "./example_files/nemo_data_T_grid.nc"
fn_nemo_dat_u = "./example_files/nemo_data_U_grid.nc"
fn_nemo_dat_v = "./example_files/nemo_data_V_grid.nc"
fn_nemo_dom = "./example_files/COAsT_example_NEMO_domain.nc"
# Configuration files describing the data files
fn_config_t_grid = "./config/example_nemo_grid_t.json"
fn_config_f_grid = "./config/example_nemo_grid_f.json"
fn_config_u_grid = "./config/example_nemo_grid_u.json"
fn_config_v_grid = "./config/example_nemo_grid_v.json"
# %% Load data variables that are on the NEMO t-grid
nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid)
# Now create a transect between the points (54 N 15 W) and (56 N, 12 W) using the `coast.TransectT` object. This needs to be passed the corresponding NEMO object and transect end points. The model points closest to these coordinates will be selected as the transect end points.
tran_t = coast.TransectT(nemo_t, (54, -15), (56, -12))
# Inspect the data
tran_t.data
# where `r_dim` is the dimension along the transect.
# %% Plot the data
# It is simple to plot a scalar such as temperature along the transect:
temp_mean = tran_t.data.temperature.mean(dim="t_dim")
plt.figure()
temp_mean.plot.pcolormesh(y="depth_0", yincrease=False)
plt.show()
# %% Flow across the transect
# With NEMO’s staggered grid, the first step is to define the transect on the f-grid so that the velocity components are between f-points. We do not need any model data on the f-grid, just the grid information, so create a nemo f-grid object
nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid)
# and a transect on the f-grid
tran_f = coast.TransectF(nemo_f, (54, -15), (56, -12))
tran_f.data
# We also need the i- and j-components of velocity so (lazy) load the model data on the u- and v-grid grids
nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid)
nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid)
# Now we can calculate the flow across the transect with the method
tran_f.calc_flow_across_transect(nemo_u, nemo_v)
# The flow across the transect is stored in a new dataset where the variables are all defined at the points between f-points.
tran_f.data_cross_tran_flow
# For example, to plot the time averaged velocity across the transect, we can plot the ‘normal_velocities’ variable
cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim="t_dim")
plt.figure()
cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y="depth_0", cbar_kwargs={"label": "m/s"})
plt.show()
# or the volume transport across the transect, we can plot the ‘normal_transports’ variable
plt.figure()
cross_transport_mean = tran_f.data_cross_tran_flow.normal_transports.mean(dim="t_dim")
cross_transport_mean.rolling(r_dim=2).mean().plot()
plt.ylabel("Sv")
plt.show()
| [((1351, 1440), 'coast.Gridded', 'coast.Gridded', ([], {'fn_data': 'fn_nemo_dat_t', 'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_t_grid'}), '(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=\n fn_config_t_grid)\n', (1364, 1440), False, 'import coast\n'), ((1724, 1769), 'coast.TransectT', 'coast.TransectT', (['nemo_t', '(54, -15)', '(56, -12)'], {}), '(nemo_t, (54, -15), (56, -12))\n', (1739, 1769), False, 'import coast\n'), ((2002, 2014), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2012, 2014), True, 'import matplotlib.pyplot as plt\n'), ((2071, 2081), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2079, 2081), True, 'import matplotlib.pyplot as plt\n'), ((2366, 2427), 'coast.Gridded', 'coast.Gridded', ([], {'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_f_grid'}), '(fn_domain=fn_nemo_dom, config=fn_config_f_grid)\n', (2379, 2427), False, 'import coast\n'), ((2471, 2516), 'coast.TransectF', 'coast.TransectF', (['nemo_f', '(54, -15)', '(56, -12)'], {}), '(nemo_f, (54, -15), (56, -12))\n', (2486, 2516), False, 'import coast\n'), ((2650, 2739), 'coast.Gridded', 'coast.Gridded', ([], {'fn_data': 'fn_nemo_dat_u', 'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_u_grid'}), '(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=\n fn_config_u_grid)\n', (2663, 2739), False, 'import coast\n'), ((2744, 2833), 'coast.Gridded', 'coast.Gridded', ([], {'fn_data': 'fn_nemo_dat_v', 'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_v_grid'}), '(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=\n fn_config_v_grid)\n', (2757, 2833), False, 'import coast\n'), ((3314, 3326), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3324, 3326), True, 'import matplotlib.pyplot as plt\n'), ((3447, 3457), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3455, 3457), True, 'import matplotlib.pyplot as plt\n'), ((3552, 3564), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3562, 3564), True, 'import matplotlib.pyplot as plt\n'), ((3704, 3720), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sv"""'], {}), "('Sv')\n", (3714, 3720), True, 'import matplotlib.pyplot as plt\n'), ((3721, 3731), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3729, 3731), True, 'import matplotlib.pyplot as plt\n')] |
skirpichev/diofant | diofant/logic/boolalg.py | 16e280fdd6053be10c3b60fbb66fc26b52ede27a | """
Boolean algebra module for Diofant.
"""
from collections import defaultdict
from itertools import combinations, product
from ..core import Atom, cacheit
from ..core.expr import Expr
from ..core.function import Application
from ..core.numbers import Number
from ..core.operations import LatticeOp
from ..core.singleton import S
from ..core.singleton import SingletonWithManagedProperties as Singleton
from ..core.sympify import converter, sympify
from ..utilities import ordered
class Boolean(Expr):
"""A boolean object is an object for which logic operations make sense."""
def __and__(self, other):
"""Overloading for & operator."""
return And(self, other)
__rand__ = __and__
def __or__(self, other):
"""Overloading for | operator."""
return Or(self, other)
__ror__ = __or__
def __invert__(self):
"""Overloading for ~ operator."""
return Not(self)
def __rshift__(self, other):
"""Overloading for >> operator."""
return Implies(self, other)
def __lshift__(self, other):
"""Overloading for << operator."""
return Implies(other, self)
__rrshift__ = __lshift__
__rlshift__ = __rshift__
def __xor__(self, other):
return Xor(self, other)
__rxor__ = __xor__
def equals(self, other, failing_expression=False):
"""
Returns True if the given formulas have the same truth table.
For two formulas to be equal they must have the same literals.
Examples
========
>>> (a >> b).equals(~b >> ~a)
True
>>> Not(And(a, b, c)).equals(And(Not(a), Not(b), Not(c)))
False
>>> Not(And(a, Not(a))).equals(Or(b, Not(b)))
False
"""
from ..core.relational import Relational
from .inference import satisfiable
other = sympify(other)
if self.has(Relational) or other.has(Relational):
raise NotImplementedError('handling of relationals')
return self.atoms() == other.atoms() and \
not satisfiable(Not(Equivalent(self, other)))
class BooleanAtom(Atom, Boolean):
"""Base class of BooleanTrue and BooleanFalse."""
is_Boolean = True
@property
def canonical(self):
return self
def __int__(self):
return int(bool(self))
class BooleanTrue(BooleanAtom, metaclass=Singleton):
"""Diofant version of True, a singleton that can be accessed via ``true``.
This is the Diofant version of True, for use in the logic module. The
primary advantage of using true instead of True is that shorthand boolean
operations like ~ and >> will work as expected on this class, whereas with
True they act bitwise on 1. Functions in the logic module will return this
class when they evaluate to true.
Notes
=====
There is liable to be some confusion as to when ``True`` should
be used and when ``true`` should be used in various contexts
throughout Diofant. An important thing to remember is that
``sympify(True)`` returns ``true``. This means that for the most
part, you can just use ``True`` and it will automatically be converted
to ``true`` when necessary, similar to how you can generally use 1
instead of ``Integer(1)``.
The rule of thumb is:
"If the boolean in question can be replaced by an arbitrary symbolic
``Boolean``, like ``Or(x, y)`` or ``x > 1``, use ``true``.
Otherwise, use ``True``".
In other words, use ``true`` only on those contexts where the
boolean is being used as a symbolic representation of truth.
For example, if the object ends up in the ``.args`` of any expression,
then it must necessarily be ``true`` instead of ``True``, as
elements of ``.args`` must be ``Basic``. On the other hand,
``==`` is not a symbolic operation in Diofant, since it always returns
``True`` or ``False``, and does so in terms of structural equality
rather than mathematical, so it should return ``True``. The assumptions
system should use ``True`` and ``False``. Aside from not satisfying
the above rule of thumb, the
assumptions system uses a three-valued logic (``True``, ``False``, ``None``),
whereas ``true`` and ``false`` represent a two-valued logic. When in
doubt, use ``True``.
"``true == True is True``."
While "``true is True``" is ``False``, "``true == True``"
is ``True``, so if there is any doubt over whether a function or
expression will return ``true`` or ``True``, just use ``==``
instead of ``is`` to do the comparison, and it will work in either
case. Finally, for boolean flags, it's better to just use ``if x``
instead of ``if x is True``. To quote PEP 8:
Don't compare boolean values to ``True`` or ``False``
using ``==``.
* Yes: ``if greeting:``
* No: ``if greeting == True:``
* Worse: ``if greeting is True:``
Examples
========
>>> sympify(True)
true
>>> ~true
false
>>> ~True
-2
>>> Or(True, False)
true
See Also
========
BooleanFalse
"""
def __bool__(self):
return True
def __hash__(self):
return hash(True)
def as_set(self):
"""
Rewrite logic operators and relationals in terms of real sets.
Examples
========
>>> true.as_set()
UniversalSet()
"""
return S.UniversalSet
class BooleanFalse(BooleanAtom, metaclass=Singleton):
"""Diofant version of False, a singleton that can be accessed via ``false``.
This is the Diofant version of False, for use in the logic module. The
primary advantage of using false instead of False is that shorthand boolean
operations like ~ and >> will work as expected on this class, whereas with
False they act bitwise on 0. Functions in the logic module will return this
class when they evaluate to false.
Notes
=====
See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`.
Examples
========
>>> sympify(False)
false
>>> false >> false
true
>>> False >> False
0
>>> Or(True, False)
true
See Also
========
BooleanTrue
"""
def __bool__(self):
return False
def __hash__(self):
return hash(False)
def as_set(self):
"""
Rewrite logic operators and relationals in terms of real sets.
Examples
========
>>> false.as_set()
EmptySet()
"""
from ..sets import EmptySet
return EmptySet()
true = BooleanTrue()
false: BooleanFalse = BooleanFalse()
# We want S.true and S.false to work, rather than S.BooleanTrue and
# S.BooleanFalse, but making the class and instance names the same causes some
# major issues (like the inability to import the class directly from this
# file).
S.true = true
S.false = false
converter[bool] = lambda x: true if x else false
class BooleanFunction(Application, Boolean):
"""Boolean function is a function that lives in a boolean space.
This is used as base class for And, Or, Not, etc.
"""
is_Boolean = True
def _eval_simplify(self, ratio, measure):
return simplify_logic(self)
def to_nnf(self, simplify=True):
return self._to_nnf(*self.args, simplify=simplify)
@classmethod
def _to_nnf(cls, *args, **kwargs):
simplify = kwargs.get('simplify', True)
argset = set()
for arg in args:
if not is_literal(arg):
arg = arg.to_nnf(simplify)
if simplify:
if isinstance(arg, cls):
arg = arg.args
else:
arg = arg,
for a in arg:
if Not(a) in argset:
return cls.zero
argset.add(a)
else:
argset.add(arg)
return cls(*argset)
class And(LatticeOp, BooleanFunction):
"""
Logical AND function.
It evaluates its arguments in order, giving False immediately
if any of them are False, and True if they are all True.
Examples
========
>>> x & y
x & y
Notes
=====
The ``&`` operator is provided as a convenience, but note that its use
here is different from its normal use in Python, which is bitwise
and. Hence, ``And(a, b)`` and ``a & b`` will return different things if
``a`` and ``b`` are integers.
>>> And(x, y).subs({x: 1})
y
"""
zero = false
identity = true
nargs = None
@classmethod
def _new_args_filter(cls, args):
newargs = []
rel = []
for x in reversed(list(args)):
if isinstance(x, Number) or x in (0, 1):
newargs.append(True if x else False)
continue
if x.is_Relational:
c = x.canonical
if c in rel:
continue
nc = (~c).canonical
if any(r == nc for r in rel):
return [false]
rel.append(c)
newargs.append(x)
return LatticeOp._new_args_filter(newargs, And)
def as_set(self):
"""
Rewrite logic operators and relationals in terms of real sets.
Examples
========
>>> And(x < 2, x > -2).as_set()
(-2, 2)
"""
from ..sets import Intersection
if len(self.free_symbols) == 1:
return Intersection(*[arg.as_set() for arg in self.args])
else:
raise NotImplementedError('Sorry, And.as_set has not yet been'
' implemented for multivariate'
' expressions')
class Or(LatticeOp, BooleanFunction):
"""
Logical OR function
It evaluates its arguments in order, giving True immediately
if any of them are True, and False if they are all False.
Examples
========
>>> x | y
x | y
Notes
=====
The ``|`` operator is provided as a convenience, but note that its use
here is different from its normal use in Python, which is bitwise
or. Hence, ``Or(a, b)`` and ``a | b`` will return different things if
``a`` and ``b`` are integers.
>>> Or(x, y).subs({x: 0})
y
"""
zero = true
identity = false
@classmethod
def _new_args_filter(cls, args):
newargs = []
rel = []
for x in args:
if isinstance(x, Number) or x in (0, 1):
newargs.append(True if x else False)
continue
if x.is_Relational:
c = x.canonical
if c in rel:
continue
nc = (~c).canonical
if any(r == nc for r in rel):
return [true]
rel.append(c)
newargs.append(x)
return LatticeOp._new_args_filter(newargs, Or)
def as_set(self):
"""
Rewrite logic operators and relationals in terms of real sets.
Examples
========
>>> Or(x > 2, x < -2).as_set()
[-oo, -2) U (2, oo]
"""
from ..sets import Union
if len(self.free_symbols) == 1:
return Union(*[arg.as_set() for arg in self.args])
else:
raise NotImplementedError('Sorry, Or.as_set has not yet been'
' implemented for multivariate'
' expressions')
class Not(BooleanFunction):
"""
Logical Not function (negation).
Returns True if the statement is False.
Returns False if the statement is True.
Examples
========
>>> Not(True)
false
>>> Not(False)
true
>>> Not(And(True, False))
true
>>> Not(Or(True, False))
false
>>> Not(And(And(True, x), Or(x, False)))
~x
>>> ~x
~x
>>> Not(And(Or(x, y), Or(~x, ~y)))
~((x | y) & (~x | ~y))
Notes
=====
The ``~`` operator is provided as a convenience, but note that its use
here is different from its normal use in Python, which is bitwise
not. In particular, ``~a`` and ``Not(a)`` will be different if ``a`` is
an integer. Furthermore, since bools in Python subclass from ``int``,
``~True`` is the same as ``~1`` which is ``-2``, which has a boolean
value of True. To avoid this issue, use the Diofant boolean types
``true`` and ``false``.
>>> ~True
-2
>>> ~true
false
"""
is_Not = True
@classmethod
def eval(cls, arg):
from ..core import (Equality, GreaterThan, LessThan, StrictGreaterThan,
StrictLessThan, Unequality)
if isinstance(arg, Number) or arg in (True, False):
return false if arg else true
if arg.is_Not:
return arg.args[0]
# Simplify Relational objects.
if isinstance(arg, Equality):
return Unequality(*arg.args)
if isinstance(arg, Unequality):
return Equality(*arg.args)
if isinstance(arg, StrictLessThan):
return GreaterThan(*arg.args)
if isinstance(arg, StrictGreaterThan):
return LessThan(*arg.args)
if isinstance(arg, LessThan):
return StrictGreaterThan(*arg.args)
if isinstance(arg, GreaterThan):
return StrictLessThan(*arg.args)
def as_set(self):
"""
Rewrite logic operators and relationals in terms of real sets.
Examples
========
>>> Not(x > 0, evaluate=False).as_set()
(-oo, 0]
"""
if len(self.free_symbols) == 1:
return self.args[0].as_set().complement(S.Reals)
else:
raise NotImplementedError('Sorry, Not.as_set has not yet been'
' implemented for mutivariate'
' expressions')
def to_nnf(self, simplify=True):
if is_literal(self):
return self
expr = self.args[0]
func, args = expr.func, expr.args
if func == And:
return Or._to_nnf(*[~arg for arg in args], simplify=simplify)
if func == Or:
return And._to_nnf(*[~arg for arg in args], simplify=simplify)
if func == Implies:
a, b = args
return And._to_nnf(a, ~b, simplify=simplify)
if func == Equivalent:
return And._to_nnf(Or(*args), Or(*[~arg for arg in args]), simplify=simplify)
if func == Xor:
result = []
for i in range(1, len(args)+1, 2):
for neg in combinations(args, i):
clause = [~s if s in neg else s for s in args]
result.append(Or(*clause))
return And._to_nnf(*result, simplify=simplify)
if func == ITE:
a, b, c = args
return And._to_nnf(Or(a, ~c), Or(~a, ~b), simplify=simplify)
raise ValueError(f'Illegal operator {func} in expression')
class Xor(BooleanFunction):
"""
Logical XOR (exclusive OR) function.
Returns True if an odd number of the arguments are True and the rest are
False.
Returns False if an even number of the arguments are True and the rest are
False.
Examples
========
>>> Xor(True, False)
true
>>> Xor(True, True)
false
>>> Xor(True, False, True, True, False)
true
>>> Xor(True, False, True, False)
false
>>> x ^ y
Xor(x, y)
Notes
=====
The ``^`` operator is provided as a convenience, but note that its use
here is different from its normal use in Python, which is bitwise xor. In
particular, ``a ^ b`` and ``Xor(a, b)`` will be different if ``a`` and
``b`` are integers.
>>> Xor(x, y).subs({y: 0})
x
"""
def __new__(cls, *args, **kwargs):
argset = set()
obj = super().__new__(cls, *args, **kwargs)
for arg in super(Xor, obj).args:
if isinstance(arg, Number) or arg in (True, False):
if not arg:
continue
else:
arg = true
if isinstance(arg, Xor):
for a in arg.args:
argset.remove(a) if a in argset else argset.add(a)
elif arg in argset:
argset.remove(arg)
else:
argset.add(arg)
rel = [(r, r.canonical, (~r).canonical) for r in argset if r.is_Relational]
odd = False # is number of complimentary pairs odd? start 0 -> False
remove = []
for i, (r, c, nc) in enumerate(rel):
for j in range(i + 1, len(rel)):
rj, cj = rel[j][:2]
if cj == nc:
odd = ~odd
break
elif cj == c:
break
else:
continue
remove.append((r, rj))
if odd:
argset.remove(true) if true in argset else argset.add(true)
for a, b in remove:
argset.remove(a)
argset.remove(b)
if len(argset) == 0:
return false
elif len(argset) == 1:
return argset.pop()
elif True in argset:
argset.remove(True)
return Not(Xor(*argset))
else:
obj._args = tuple(ordered(argset))
obj._argset = frozenset(argset)
return obj
@property # type: ignore[misc]
@cacheit
def args(self):
return tuple(ordered(self._argset))
def to_nnf(self, simplify=True):
args = []
for i in range(0, len(self.args)+1, 2):
for neg in combinations(self.args, i):
clause = [~s if s in neg else s for s in self.args]
args.append(Or(*clause))
return And._to_nnf(*args, simplify=simplify)
class Nand(BooleanFunction):
"""
Logical NAND function.
It evaluates its arguments in order, giving True immediately if any
of them are False, and False if they are all True.
Returns True if any of the arguments are False.
Returns False if all arguments are True.
Examples
========
>>> Nand(False, True)
true
>>> Nand(True, True)
false
>>> Nand(x, y)
~(x & y)
"""
@classmethod
def eval(cls, *args):
return Not(And(*args))
class Nor(BooleanFunction):
"""
Logical NOR function.
It evaluates its arguments in order, giving False immediately if any
of them are True, and True if they are all False.
Returns False if any argument is True.
Returns True if all arguments are False.
Examples
========
>>> Nor(True, False)
false
>>> Nor(True, True)
false
>>> Nor(False, True)
false
>>> Nor(False, False)
true
>>> Nor(x, y)
~(x | y)
"""
@classmethod
def eval(cls, *args):
return Not(Or(*args))
class Implies(BooleanFunction):
"""
Logical implication.
A implies B is equivalent to !A v B
Accepts two Boolean arguments; A and B.
Returns False if A is True and B is False.
Returns True otherwise.
Examples
========
>>> Implies(True, False)
false
>>> Implies(False, False)
true
>>> Implies(True, True)
true
>>> Implies(False, True)
true
>>> x >> y
Implies(x, y)
>>> y << x
Implies(x, y)
Notes
=====
The ``>>`` and ``<<`` operators are provided as a convenience, but note
that their use here is different from their normal use in Python, which is
bit shifts. Hence, ``Implies(a, b)`` and ``a >> b`` will return different
things if ``a`` and ``b`` are integers. In particular, since Python
considers ``True`` and ``False`` to be integers, ``True >> True`` will be
the same as ``1 >> 1``, i.e., 0, which has a truth value of False. To
avoid this issue, use the Diofant objects ``true`` and ``false``.
>>> True >> False
1
>>> true >> false
false
"""
@classmethod
def eval(cls, *args):
try:
newargs = []
for x in args:
if isinstance(x, Number) or x in (0, 1):
newargs.append(True if x else False)
else:
newargs.append(x)
A, B = newargs
except ValueError:
raise ValueError(f'{len(args)} operand(s) used for an Implies '
f'(pairs are required): {args!s}')
if A == true or A == false or B == true or B == false:
return Or(Not(A), B)
elif A == B:
return true
elif A.is_Relational and B.is_Relational:
if A.canonical == B.canonical:
return true
elif (~A).canonical == B.canonical:
return B
else:
return Expr.__new__(cls, *args)
def to_nnf(self, simplify=True):
a, b = self.args
return Or._to_nnf(~a, b, simplify=simplify)
class Equivalent(BooleanFunction):
"""
Equivalence relation.
Equivalent(A, B) is True iff A and B are both True or both False.
Returns True if all of the arguments are logically equivalent.
Returns False otherwise.
Examples
========
>>> Equivalent(False, False, False)
true
>>> Equivalent(True, False, False)
false
>>> Equivalent(x, And(x, True))
true
"""
def __new__(cls, *args, **options):
from ..core.relational import Relational
args = [sympify(arg, strict=True) for arg in args]
argset = set(args)
for x in args:
if isinstance(x, Number) or x in [True, False]: # Includes 0, 1
argset.discard(x)
argset.add(True if x else False)
rel = []
for r in argset:
if isinstance(r, Relational):
rel.append((r, r.canonical, (~r).canonical))
remove = []
for i, (r, c, nc) in enumerate(rel):
for j in range(i + 1, len(rel)):
rj, cj = rel[j][:2]
if cj == nc:
return false
elif cj == c:
remove.append((r, rj))
break
for a, b in remove:
argset.remove(a)
argset.remove(b)
argset.add(True)
if len(argset) <= 1:
return true
if True in argset:
argset.discard(True)
return And(*argset)
if False in argset:
argset.discard(False)
return And(*[~arg for arg in argset])
_args = frozenset(argset)
obj = super().__new__(cls, _args)
obj._argset = _args
return obj
@property # type: ignore[misc]
@cacheit
def args(self):
return tuple(ordered(self._argset))
def to_nnf(self, simplify=True):
args = []
for a, b in zip(self.args, self.args[1:]):
args.append(Or(~a, b))
args.append(Or(~self.args[-1], self.args[0]))
return And._to_nnf(*args, simplify=simplify)
class ITE(BooleanFunction):
"""
If then else clause.
ITE(A, B, C) evaluates and returns the result of B if A is true
else it returns the result of C.
Examples
========
>>> ITE(True, False, True)
false
>>> ITE(Or(True, False), And(True, True), Xor(True, True))
true
>>> ITE(x, y, z)
ITE(x, y, z)
>>> ITE(True, x, y)
x
>>> ITE(False, x, y)
y
>>> ITE(x, y, y)
y
"""
@classmethod
def eval(cls, *args):
try:
a, b, c = args
except ValueError:
raise ValueError('ITE expects exactly 3 arguments')
if a == true:
return b
elif a == false:
return c
elif b == c:
return b
elif b == true and c == false:
return a
elif b == false and c == true:
return Not(a)
def to_nnf(self, simplify=True):
a, b, c = self.args
return And._to_nnf(Or(~a, b), Or(a, c), simplify=simplify)
def _eval_derivative(self, x):
return self.func(self.args[0], *[a.diff(x) for a in self.args[1:]])
# end class definitions. Some useful methods
def conjuncts(expr):
"""Return a list of the conjuncts in the expr s.
Examples
========
>>> conjuncts(a & b) == frozenset([a, b])
True
>>> conjuncts(a | b) == frozenset([Or(a, b)])
True
"""
return And.make_args(expr)
def disjuncts(expr):
"""Return a list of the disjuncts in the sentence s.
Examples
========
>>> disjuncts(a | b) == frozenset([a, b])
True
>>> disjuncts(a & b) == frozenset([And(a, b)])
True
"""
return Or.make_args(expr)
def distribute_and_over_or(expr):
"""
Given a sentence s consisting of conjunctions and disjunctions
of literals, return an equivalent sentence in CNF.
Examples
========
>>> distribute_and_over_or(Or(a, And(Not(b), Not(c))))
(a | ~b) & (a | ~c)
"""
return _distribute((expr, And, Or))
def distribute_or_over_and(expr):
"""
Given a sentence s consisting of conjunctions and disjunctions
of literals, return an equivalent sentence in DNF.
Note that the output is NOT simplified.
Examples
========
>>> distribute_or_over_and(And(Or(Not(a), b), c))
(b & c) | (c & ~a)
"""
return _distribute((expr, Or, And))
def _distribute(info):
"""Distributes info[1] over info[2] with respect to info[0]."""
if isinstance(info[0], info[2]):
for arg in info[0].args:
if isinstance(arg, info[1]):
conj = arg
break
else:
return info[0]
rest = info[2](*[a for a in info[0].args if a is not conj])
return info[1](*list(map(_distribute,
((info[2](c, rest), info[1], info[2]) for c in conj.args))))
elif isinstance(info[0], info[1]):
return info[1](*list(map(_distribute,
((x, info[1], info[2]) for x in info[0].args))))
else:
return info[0]
def to_nnf(expr, simplify=True):
"""
Converts expr to Negation Normal Form.
A logical expression is in Negation Normal Form (NNF) if it
contains only And, Or and Not, and Not is applied only to literals.
If simplify is True, the result contains no redundant clauses.
Examples
========
>>> to_nnf(Not((~a & ~b) | (c & d)))
(a | b) & (~c | ~d)
>>> to_nnf(Equivalent(a >> b, b >> a))
(a | ~b | (a & ~b)) & (b | ~a | (b & ~a))
"""
expr = sympify(expr)
if is_nnf(expr, simplify):
return expr
return expr.to_nnf(simplify)
def to_cnf(expr, simplify=False):
"""
Convert a propositional logical sentence s to conjunctive normal form.
That is, of the form ((A | ~B | ...) & (B | C | ...) & ...).
If simplify is True, the expr is evaluated to its simplest CNF form.
Examples
========
>>> to_cnf(~(a | b) | c)
(c | ~a) & (c | ~b)
>>> to_cnf((a | b) & (a | ~a), True)
a | b
"""
expr = sympify(expr)
if not isinstance(expr, BooleanFunction):
return expr
if simplify:
return simplify_logic(expr, 'cnf', True)
# Don't convert unless we have to
if is_cnf(expr):
return expr
expr = eliminate_implications(expr)
return distribute_and_over_or(expr)
def to_dnf(expr, simplify=False):
"""
Convert a propositional logical sentence s to disjunctive normal form.
That is, of the form ((A & ~B & ...) | (B & C & ...) | ...).
If simplify is True, the expr is evaluated to its simplest DNF form.
Examples
========
>>> to_dnf(b & (a | c))
(a & b) | (b & c)
>>> to_dnf((a & b) | (a & ~b) | (b & c) | (~b & c), True)
a | c
"""
expr = sympify(expr)
if not isinstance(expr, BooleanFunction):
return expr
if simplify:
return simplify_logic(expr, 'dnf', True)
# Don't convert unless we have to
if is_dnf(expr):
return expr
expr = eliminate_implications(expr)
return distribute_or_over_and(expr)
def is_nnf(expr, simplified=True):
"""
Checks if expr is in Negation Normal Form.
A logical expression is in Negation Normal Form (NNF) if it
contains only And, Or and Not, and Not is applied only to literals.
If simplified is True, checks if result contains no redundant clauses.
Examples
========
>>> is_nnf(a & b | ~c)
True
>>> is_nnf((a | ~a) & (b | c))
False
>>> is_nnf((a | ~a) & (b | c), False)
True
>>> is_nnf(Not(a & b) | c)
False
>>> is_nnf((a >> b) & (b >> a))
False
"""
expr = sympify(expr)
if is_literal(expr):
return True
stack = [expr]
while stack:
expr = stack.pop()
if expr.func in (And, Or):
if simplified:
args = expr.args
for arg in args:
if Not(arg) in args:
return False
stack.extend(expr.args)
elif not is_literal(expr):
return False
return True
def is_cnf(expr):
"""
Test whether or not an expression is in conjunctive normal form.
Examples
========
>>> is_cnf(a | b | c)
True
>>> is_cnf(a & b & c)
True
>>> is_cnf((a & b) | c)
False
"""
return _is_form(expr, And, Or)
def is_dnf(expr):
"""
Test whether or not an expression is in disjunctive normal form.
Examples
========
>>> is_dnf(a | b | c)
True
>>> is_dnf(a & b & c)
True
>>> is_dnf((a & b) | c)
True
>>> is_dnf(a & (b | c))
False
"""
return _is_form(expr, Or, And)
def _is_form(expr, function1, function2):
"""Test whether or not an expression is of the required form."""
expr = sympify(expr)
# Special case of an Atom
if expr.is_Atom:
return True
# Special case of a single expression of function2
if isinstance(expr, function2):
for lit in expr.args:
if isinstance(lit, Not):
if not lit.args[0].is_Atom:
return False
else:
if not lit.is_Atom:
return False
return True
# Special case of a single negation
if isinstance(expr, Not):
if not expr.args[0].is_Atom:
return False
if not isinstance(expr, function1):
return False
for cls in expr.args:
if cls.is_Atom:
continue
if isinstance(cls, Not):
if not cls.args[0].is_Atom:
return False
elif not isinstance(cls, function2):
return False
for lit in cls.args:
if isinstance(lit, Not):
if not lit.args[0].is_Atom:
return False
else:
if not lit.is_Atom:
return False
return True
def eliminate_implications(expr):
"""
Change >>, <<, and Equivalent into &, |, and ~. That is, return an
expression that is equivalent to s, but has only &, |, and ~ as logical
operators.
Examples
========
>>> eliminate_implications(Implies(a, b))
b | ~a
>>> eliminate_implications(Equivalent(a, b))
(a | ~b) & (b | ~a)
>>> eliminate_implications(Equivalent(a, b, c))
(a | ~c) & (b | ~a) & (c | ~b)
"""
return to_nnf(expr)
def is_literal(expr):
"""
Returns True if expr is a literal, else False.
Examples
========
>>> is_literal(a)
True
>>> is_literal(~a)
True
>>> is_literal(a + b)
True
>>> is_literal(Or(a, b))
False
"""
if isinstance(expr, Not):
return not isinstance(expr.args[0], BooleanFunction)
else:
return not isinstance(expr, BooleanFunction)
def to_int_repr(clauses, symbols):
"""
Takes clauses in CNF format and puts them into an integer representation.
Examples
========
>>> to_int_repr([x | y, y], [x, y])
[{1, 2}, {2}]
"""
symbols = dict(zip(symbols, range(1, len(symbols) + 1)))
def append_symbol(arg, symbols):
if isinstance(arg, Not):
return -symbols[arg.args[0]]
else:
return symbols[arg]
return [{append_symbol(arg, symbols) for arg in Or.make_args(c)}
for c in clauses]
def _check_pair(minterm1, minterm2):
"""
Checks if a pair of minterms differs by only one bit. If yes, returns
index, else returns -1.
"""
index = -1
for x, (i, j) in enumerate(zip(minterm1, minterm2)):
if i != j:
if index == -1:
index = x
else:
return -1
return index
def _convert_to_varsSOP(minterm, variables):
"""
Converts a term in the expansion of a function from binary to it's
variable form (for SOP).
"""
temp = []
for i, m in enumerate(minterm):
if m == 0:
temp.append(Not(variables[i]))
elif m == 1:
temp.append(variables[i])
return And(*temp)
def _convert_to_varsPOS(maxterm, variables):
"""
Converts a term in the expansion of a function from binary to it's
variable form (for POS).
"""
temp = []
for i, m in enumerate(maxterm):
if m == 1:
temp.append(Not(variables[i]))
elif m == 0:
temp.append(variables[i])
return Or(*temp)
def _simplified_pairs(terms):
"""
Reduces a set of minterms, if possible, to a simplified set of minterms
with one less variable in the terms using QM method.
"""
simplified_terms = []
todo = list(range(len(terms)))
for i, ti in enumerate(terms[:-1]):
for j_i, tj in enumerate(terms[(i + 1):]):
index = _check_pair(ti, tj)
if index != -1:
todo[i] = todo[j_i + i + 1] = None
newterm = ti[:]
newterm[index] = 3
if newterm not in simplified_terms:
simplified_terms.append(newterm)
simplified_terms.extend(
[terms[i] for i in [_ for _ in todo if _ is not None]])
return simplified_terms
def _compare_term(minterm, term):
"""
Return True if a binary term is satisfied by the given term. Used
for recognizing prime implicants.
"""
for i, x in enumerate(term):
if x not in (3, minterm[i]):
return False
return True
def _rem_redundancy(l1, terms):
"""
After the truth table has been sufficiently simplified, use the prime
implicant table method to recognize and eliminate redundant pairs,
and return the essential arguments.
"""
essential = []
for x in terms:
temporary = []
for y in l1:
if _compare_term(x, y):
temporary.append(y)
if len(temporary) == 1:
if temporary[0] not in essential:
essential.append(temporary[0])
for x in terms:
for y in essential:
if _compare_term(x, y):
break
else:
for z in l1: # pragma: no branch
if _compare_term(x, z):
assert z not in essential
essential.append(z)
break
return essential
def SOPform(variables, minterms, dontcares=None):
"""
The SOPform function uses simplified_pairs and a redundant group-
eliminating algorithm to convert the list of all input combos that
generate '1' (the minterms) into the smallest Sum of Products form.
The variables must be given as the first argument.
Return a logical Or function (i.e., the "sum of products" or "SOP"
form) that gives the desired outcome. If there are inputs that can
be ignored, pass them as a list, too.
The result will be one of the (perhaps many) functions that satisfy
the conditions.
Examples
========
>>> minterms = [[0, 0, 0, 1], [0, 0, 1, 1],
... [0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 1, 1]]
>>> dontcares = [[0, 0, 0, 0], [0, 0, 1, 0], [0, 1, 0, 1]]
>>> SOPform([t, x, y, z], minterms, dontcares)
(y & z) | (z & ~t)
References
==========
* https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm
"""
variables = [sympify(v) for v in variables]
if minterms == []:
return false
minterms = [list(i) for i in minterms]
dontcares = [list(i) for i in (dontcares or [])]
for d in dontcares:
if d in minterms:
raise ValueError(f'{d} in minterms is also in dontcares')
old = None
new = minterms + dontcares
while new != old:
old = new
new = _simplified_pairs(old)
essential = _rem_redundancy(new, minterms)
return Or(*[_convert_to_varsSOP(x, variables) for x in essential])
def POSform(variables, minterms, dontcares=None):
"""
The POSform function uses simplified_pairs and a redundant-group
eliminating algorithm to convert the list of all input combinations
that generate '1' (the minterms) into the smallest Product of Sums form.
The variables must be given as the first argument.
Return a logical And function (i.e., the "product of sums" or "POS"
form) that gives the desired outcome. If there are inputs that can
be ignored, pass them as a list, too.
The result will be one of the (perhaps many) functions that satisfy
the conditions.
Examples
========
>>> minterms = [[0, 0, 0, 1], [0, 0, 1, 1], [0, 1, 1, 1],
... [1, 0, 1, 1], [1, 1, 1, 1]]
>>> dontcares = [[0, 0, 0, 0], [0, 0, 1, 0], [0, 1, 0, 1]]
>>> POSform([t, x, y, z], minterms, dontcares)
z & (y | ~t)
References
==========
* https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm
"""
variables = [sympify(v) for v in variables]
if minterms == []:
return false
minterms = [list(i) for i in minterms]
dontcares = [list(i) for i in (dontcares or [])]
for d in dontcares:
if d in minterms:
raise ValueError(f'{d} in minterms is also in dontcares')
maxterms = []
for t in product([0, 1], repeat=len(variables)):
t = list(t)
if (t not in minterms) and (t not in dontcares):
maxterms.append(t)
old = None
new = maxterms + dontcares
while new != old:
old = new
new = _simplified_pairs(old)
essential = _rem_redundancy(new, maxterms)
return And(*[_convert_to_varsPOS(x, variables) for x in essential])
def _find_predicates(expr):
"""Helper to find logical predicates in BooleanFunctions.
A logical predicate is defined here as anything within a BooleanFunction
that is not a BooleanFunction itself.
"""
if not isinstance(expr, BooleanFunction):
return {expr}
return set().union(*(_find_predicates(i) for i in expr.args))
def simplify_logic(expr, form=None, deep=True):
"""
This function simplifies a boolean function to its simplified version
in SOP or POS form. The return type is an Or or And object in Diofant.
Parameters
==========
expr : string or boolean expression
form : string ('cnf' or 'dnf') or None (default).
If 'cnf' or 'dnf', the simplest expression in the corresponding
normal form is returned; if None, the answer is returned
according to the form with fewest args (in CNF by default).
deep : boolean (default True)
indicates whether to recursively simplify any
non-boolean functions contained within the input.
Examples
========
>>> b = (~x & ~y & ~z) | (~x & ~y & z)
>>> simplify_logic(b)
~x & ~y
>>> sympify(b)
(z & ~x & ~y) | (~x & ~y & ~z)
>>> simplify_logic(_)
~x & ~y
"""
if form == 'cnf' or form == 'dnf' or form is None:
expr = sympify(expr)
if not isinstance(expr, BooleanFunction):
return expr
variables = _find_predicates(expr)
truthtable = []
for t in product([0, 1], repeat=len(variables)):
t = list(t)
if expr.xreplace(dict(zip(variables, t))):
truthtable.append(t)
if deep:
from ..simplify import simplify
variables = [simplify(v) for v in variables]
if form == 'dnf' or \
(form is None and len(truthtable) >= (2 ** (len(variables) - 1))):
return SOPform(variables, truthtable)
elif form == 'cnf' or form is None: # pragma: no branch
return POSform(variables, truthtable)
else:
raise ValueError('form can be cnf or dnf only')
def _finger(eq):
"""
Assign a 5-item fingerprint to each symbol in the equation:
[
# of times it appeared as a Symbol,
# of times it appeared as a Not(symbol),
# of times it appeared as a Symbol in an And or Or,
# of times it appeared as a Not(Symbol) in an And or Or,
sum of the number of arguments with which it appeared,
counting Symbol as 1 and Not(Symbol) as 2
]
>>> eq = Or(And(Not(y), a), And(Not(y), b), And(x, y))
>>> dict(_finger(eq))
{(0, 0, 1, 0, 2): [x],
(0, 0, 1, 0, 3): [a, b],
(0, 0, 1, 2, 8): [y]}
So y and x have unique fingerprints, but a and b do not.
"""
f = eq.free_symbols
d = {fi: [0] * 5 for fi in f}
for a in eq.args:
if a.is_Symbol:
d[a][0] += 1
elif a.is_Not:
d[a.args[0]][1] += 1
else:
o = len(a.args) + sum(isinstance(ai, Not) for ai in a.args)
for ai in a.args:
if ai.is_Symbol:
d[ai][2] += 1
d[ai][-1] += o
else:
d[ai.args[0]][3] += 1
d[ai.args[0]][-1] += o
inv = defaultdict(list)
for k, v in ordered(d.items()):
inv[tuple(v)].append(k)
return inv
def bool_map(bool1, bool2):
"""
Return the simplified version of bool1, and the mapping of variables
that makes the two expressions bool1 and bool2 represent the same
logical behaviour for some correspondence between the variables
of each.
If more than one mappings of this sort exist, one of them
is returned.
For example, And(x, y) is logically equivalent to And(a, b) for
the mapping {x: a, y:b} or {x: b, y:a}.
If no such mapping exists, return False.
Examples
========
>>> function1 = SOPform([x, z, y], [[1, 0, 1], [0, 0, 1]])
>>> function2 = SOPform([a, b, c], [[1, 0, 1], [1, 0, 0]])
>>> bool_map(function1, function2)
(y & ~z, {y: a, z: b})
The results are not necessarily unique, but they are canonical. Here,
``(t, z)`` could be ``(a, d)`` or ``(d, a)``:
>>> eq1 = Or(And(Not(y), t), And(Not(y), z), And(x, y))
>>> eq2 = Or(And(Not(c), a), And(Not(c), d), And(b, c))
>>> bool_map(eq1, eq2)
((x & y) | (t & ~y) | (z & ~y), {t: a, x: b, y: c, z: d})
>>> eq = And(Xor(a, b), c, And(c, d))
>>> bool_map(eq, eq.subs({c: x}))
(c & d & (a | b) & (~a | ~b), {a: a, b: b, c: d, d: x})
"""
def match(function1, function2):
"""Return the mapping that equates variables between two
simplified boolean expressions if possible.
By "simplified" we mean that a function has been denested
and is either an And (or an Or) whose arguments are either
symbols (x), negated symbols (Not(x)), or Or (or an And) whose
arguments are only symbols or negated symbols. For example,
And(x, Not(y), Or(w, Not(z))).
Basic.match is not robust enough (see issue sympy/sympy#4835) so this is
a workaround that is valid for simplified boolean expressions.
"""
# do some quick checks
if function1.__class__ != function2.__class__:
return
if len(function1.args) != len(function2.args):
return
if function1.is_Symbol:
return {function1: function2}
# get the fingerprint dictionaries
f1 = _finger(function1)
f2 = _finger(function2)
# more quick checks
if len(f1) != len(f2):
return
# assemble the match dictionary if possible
matchdict = {}
for k in f1:
if k not in f2 or len(f1[k]) != len(f2[k]):
return
for i, x in enumerate(f1[k]):
matchdict[x] = f2[k][i]
return matchdict if matchdict else None
a = simplify_logic(bool1)
b = simplify_logic(bool2)
m = match(a, b)
if m:
return a, m
return m is not None
| [((42099, 42116), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (42110, 42116), False, 'from collections import defaultdict\n'), ((17794, 17820), 'itertools.combinations', 'combinations', (['self.args', 'i'], {}), '(self.args, i)\n', (17806, 17820), False, 'from itertools import combinations, product\n'), ((14724, 14745), 'itertools.combinations', 'combinations', (['args', 'i'], {}), '(args, i)\n', (14736, 14745), False, 'from itertools import combinations, product\n')] |
JoanLee0826/amazon | amazon/goods_review_thread.py | 13fcbcb0e9e396af6d4b2287c2a1a06fd602ce98 | import pandas as pd
import requests
from lxml import etree
import re, time, random, datetime
from queue import Queue
import threading
class Review:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 \
(KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36"
}
proxies = {
"http": "http://117.91.131.74:9999",
}
def __init__(self, domain):
self.view_list = []
self.page_list = []
self.url_queue = Queue()
if domain.strip().lower() == 'jp':
self.row_url = "https://www.amazon.co.jp"
elif domain.strip().lower == 'com':
self.row_url = "https://www.amazon.com"
self.s = requests.Session()
self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies)
def get_review(self, url):
res = self.s.get(url, headers=self.headers, proxies=self.proxies)
if res.status_code != 200:
print("请求出错,状态码为:%s" % res.status_code)
print(res.text)
return
res_html = etree.HTML(res.text)
# 商品评价名称
view_goods = res_html.xpath('//span[@class="a-list-item"]/a/text()')[0]
# 商品评价容器
view_con = res_html.xpath('//div[@class="a-section review aok-relative"]')
for each_view in view_con:
# 评价人
view_name = each_view.xpath('.//span[@class="a-profile-name"]/text()')[0]
view_star_raw = each_view.xpath('.//div[@class="a-row"]/a[@class="a-link-normal"]/@title')[0]
# 评价星级
view_star = view_star_raw.split(' ')[0]
# 评价title
view_title = each_view.xpath('.//a[@data-hook="review-title"]/span/text()')[0]
# 评价日期
view_date = each_view.xpath('.//span[@data-hook="review-date"]/text()')[0]
view_format = each_view.xpath('.//a[@data-hook="format-strip"]/text()')
view_colour = None
view_size = None
try:
for each in view_format:
if re.search("color|colour|色", each, re.I):
view_colour = each.split(':')[1].strip()
if re.search("size|style|サイズ", each, re.I):
view_size = each.split(":")[1].strip()
except:
pass
# 评价内容
view_body = each_view.xpath('string(.//span[@data-hook="review-body"]/span)')
# 评价有用数量
try:
view_useful_raw = each_view.xpath('.//span[@data-hook="helpful-vote-statement"]/text()')[0]
view_useful = view_useful_raw.split(' ')[0]
if view_useful == 'one':
view_useful = 1
try:
view_useful = int(view_useful)
except:
pass
except:
view_useful = 0
# 商品的评价信息表
each_view_list = [view_goods, view_name, view_star, view_title, view_date, view_colour, view_size,
view_body, view_useful]
self.view_list.append(each_view_list)
# print(self.view_list[-1])
def run(self, data):
goods_data = pd.read_excel(data, encoding='utf-8')
base_url = self.row_url + "/product-reviews/"
# goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True)
for each_asin, each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]):
if each_asin and int(each_count) > 0:
if int(each_count) % 10 == 0:
end_page = int(each_count) // 10 + 1
else:
end_page = int(each_count) // 10 + 2
for page in range(1, end_page):
if page == 1:
url = base_url + each_asin
else:
url = base_url + each_asin + '?pageNumber=' + str(page)
self.url_queue.put(url)
print("review_page_%d" % page, url)
time.sleep(1.5)
while True:
try:
review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),))
for m in range(30) if not self.url_queue.empty()]
for each in review_threads:
each.start()
print("队列剩余数量", self.url_queue.qsize())
for each in review_threads:
each.join()
except:
print("请求链接出错,重试中...")
pass
time.sleep(random.uniform(0.5,2.1))
if self.url_queue.empty():
break
view_goods_pd = pd.DataFrame(self.view_list,
columns=['review_goods', 'review_name', 'review_star', 'review_title',
'review_date', 'review_colour', 'review_size', 'review_body',
'review_useful'])
view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True)
aft = datetime.datetime.now().strftime('%m%d%H%M')
file_name = r'../data/goods_review/' + "reviews_" + aft + ".xlsx"
view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter')
print("共获取评论数量:", len(self.view_list))
if __name__ == '__main__':
data = r"../data/category/Kid's Weighted Blankets_08_28_13_22.xlsx"
review = Review(domain='com')
review.run(data=data)
| [((508, 515), 'queue.Queue', 'Queue', ([], {}), '()\n', (513, 515), False, 'from queue import Queue\n'), ((728, 746), 'requests.Session', 'requests.Session', ([], {}), '()\n', (744, 746), False, 'import requests\n'), ((1089, 1109), 'lxml.etree.HTML', 'etree.HTML', (['res.text'], {}), '(res.text)\n', (1099, 1109), False, 'from lxml import etree\n'), ((3229, 3266), 'pandas.read_excel', 'pd.read_excel', (['data'], {'encoding': '"""utf-8"""'}), "(data, encoding='utf-8')\n", (3242, 3266), True, 'import pandas as pd\n'), ((4081, 4096), 'time.sleep', 'time.sleep', (['(1.5)'], {}), '(1.5)\n', (4091, 4096), False, 'import re, time, random, datetime\n'), ((4748, 4935), 'pandas.DataFrame', 'pd.DataFrame', (['self.view_list'], {'columns': "['review_goods', 'review_name', 'review_star', 'review_title',\n 'review_date', 'review_colour', 'review_size', 'review_body',\n 'review_useful']"}), "(self.view_list, columns=['review_goods', 'review_name',\n 'review_star', 'review_title', 'review_date', 'review_colour',\n 'review_size', 'review_body', 'review_useful'])\n", (4760, 4935), True, 'import pandas as pd\n'), ((4637, 4661), 'random.uniform', 'random.uniform', (['(0.5)', '(2.1)'], {}), '(0.5, 2.1)\n', (4651, 4661), False, 'import re, time, random, datetime\n'), ((5176, 5199), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5197, 5199), False, 'import re, time, random, datetime\n'), ((2068, 2107), 're.search', 're.search', (['"""color|colour|色"""', 'each', 're.I'], {}), "('color|colour|色', each, re.I)\n", (2077, 2107), False, 'import re, time, random, datetime\n'), ((2197, 2236), 're.search', 're.search', (['"""size|style|サイズ"""', 'each', 're.I'], {}), "('size|style|サイズ', each, re.I)\n", (2206, 2236), False, 'import re, time, random, datetime\n')] |
lumicks/pylake | lumicks/pylake/population/tests/conftest.py | b5875d156d6416793a371198f3f2590fca2be4cd | import pytest
import numpy as np
from pathlib import Path
def extract_param(data, n_states):
keys = ("initial_state_prob", "transition_prob", "means", "st_devs")
param = {"n_states": n_states}
for key in keys:
param[key] = data[f"{key}_{n_states}"]
return param
@pytest.fixture(scope="session", params=[2, 3, 4])
def trace_lownoise(request):
"""Trace data can be generated by running ./data/generate_trace_data.py """
data = np.load(Path(__file__).parent / "data/trace_data.npz")
n_states = request.param
param = extract_param(data, n_states)
y = data[f"y_{n_states}"]
sp = data[f"sp_{n_states}"]
return y, sp, param
@pytest.fixture(scope="session")
def trace_simple(request):
"""Trace data can be generated by running ./data/generate_trace_data.py """
data = np.load(Path(__file__).parent / "data/trace_data.npz")
n_states = 2
param = extract_param(data, n_states)
y = data[f"y_{n_states}"]
sp = data[f"sp_{n_states}"]
return y, sp, param
| [((291, 340), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'params': '[2, 3, 4]'}), "(scope='session', params=[2, 3, 4])\n", (305, 340), False, 'import pytest\n'), ((679, 710), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (693, 710), False, 'import pytest\n'), ((470, 484), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (474, 484), False, 'from pathlib import Path\n'), ((838, 852), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (842, 852), False, 'from pathlib import Path\n')] |
rafagarciac/ParallelProgrammingPython | Concurrent/PipelineDecomposingTask.py | bba91984018688f41049fd63961d3b8872876336 | #!/usr/bin/env python
"""
Artesanal example Pipe without Pipe class.
"""
__author__ = "Rafael García Cuéllar"
__email__ = "[email protected]"
__copyright__ = "Copyright (c) 2018 Rafael García Cuéllar"
__license__ = "MIT"
from concurrent.futures import ProcessPoolExecutor
import time
import random
def worker(arg):
time.sleep(random.random())
return arg
def pipeline(future):
pools[1].submit(worker, future.result()).add_done_callback(printer)
def printer(future):
pools[2].submit(worker, future.result()).add_done_callback(spout)
def spout(future):
print(future.result())
def instanceProcessPool():
pools = []
for i in range(3):
pool = ProcessPoolExecutor(2)
pools.append(pool)
return pools
def shutdownPools(pools):
for pool in pools:
pool.shutdown()
def runThreadsInPipeline(pools):
for pool in pools:
pool.submit(worker, random.random()).add_done_callback(pipeline)
if __name__ == "__main__":
__spec__ = None # Fix multiprocessing in Spyder's IPython
pools = instanceProcessPool() # pool = ProcessPoolExecutor([max_workers])
runThreadsInPipeline(pools) # pools[0].submit(worker, random.random()).add_done_callback(pipeline)
shutdownPools(pools) # pool.shutdown() | [((336, 351), 'random.random', 'random.random', ([], {}), '()\n', (349, 351), False, 'import random\n'), ((695, 717), 'concurrent.futures.ProcessPoolExecutor', 'ProcessPoolExecutor', (['(2)'], {}), '(2)\n', (714, 717), False, 'from concurrent.futures import ProcessPoolExecutor\n'), ((934, 949), 'random.random', 'random.random', ([], {}), '()\n', (947, 949), False, 'import random\n')] |
roaet/digibujogens | src/digibujogens/__main__.py | ab154edda69c091595902dd8b2e3fd273b2e7105 | """ Main application entry point.
python -m digibujogens ...
"""
def main():
""" Execute the application.
"""
raise NotImplementedError
# Make the script executable.
if __name__ == "__main__":
raise SystemExit(main())
| [] |
mrkajetanp/lisa | lisa/target.py | 15cfbc430f46b59f52a9d13769d0f6791ed6f154 | # SPDX-License-Identifier: Apache-2.0
#
# Copyright (C) 2018, ARM Limited and contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from datetime import datetime
import os
import os.path
import contextlib
import shlex
from collections.abc import Mapping
import copy
import sys
import argparse
import textwrap
import functools
import inspect
import pickle
import tempfile
from types import ModuleType, FunctionType
from operator import itemgetter
import devlib
from devlib.exception import TargetStableError
from devlib.utils.misc import which
from devlib.platform.gem5 import Gem5SimulationPlatform
from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized
from lisa.assets import ASSETS_PATH
from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable
from lisa.generic import TypedList
from lisa.platforms.platinfo import PlatformInfo
class PasswordKeyDesc(KeyDesc):
def pretty_format(self, v):
return '<password>'
# Make sure all submodules of devlib.module are imported so the classes
# are all created before we list them
import_all_submodules(devlib.module)
_DEVLIB_AVAILABLE_MODULES = {
cls.name
for cls in get_subclasses(devlib.module.Module)
if (
getattr(cls, 'name', None)
# early modules try to connect to UART and do very
# platform-specific things we are not interested in
and getattr(cls, 'stage') != 'early'
)
}
class TargetConf(SimpleMultiSrcConf, HideExekallID):
"""
Target connection settings.
Only keys defined below are allowed, with the given meaning and type:
{generated_help}
An instance can be created by calling :class:`~TargetConf` with a
dictionary. The top-level `target-conf` key is not needed here:
.. code-block:: python
TargetConf({{
'name': 'myboard',
'host': 192.0.2.1,
'kind': 'linux',
'username': 'foo',
'password': 'bar',
}})
Or alternatively, from a YAML configuration file:
Content of target_conf.yml:
.. literalinclude:: ../target_conf.yml
:language: YAML
::
TargetConf.from_yaml_map('target_conf.yml')
The following special YAML tags can be used in the configuration file:
.. code-block:: YAML
target-conf:
# "!env:<type> ENV_VAR_NAME" can be used to reference an
# environment variable.
name: !env:str BOARD_NAME
port: !env:int PORT
.. note:: Only load trusted YAML files as it can lead to abritrary code
execution.
.. note:: That structure in a YAML file is allowed and will work:
* file foo.yml::
target-conf:
name: myboard
* file bar.yml::
target-conf:
!include foo.yml
This will result in that structure which would normally be invalid, but
is handled as a special case::
target-conf:
target-conf:
name: myboard
"""
STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', (
KeyDesc('name', 'Board name, free-form value only used to embelish logs', [str]),
KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [str]),
KeyDesc('host', 'Hostname or IP address of the host', [str, None]),
KeyDesc('username', 'SSH username. On ADB connections, "root" username will root adb upon target connection', [str, None]),
PasswordKeyDesc('password', 'SSH password', [str, None]),
KeyDesc('port', 'SSH or ADB server port', [int, None]),
KeyDesc('device', 'ADB device. Takes precedence over "host"', [str, None]),
KeyDesc('keyfile', 'SSH private key file', [str, None]),
KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None]),
KeyDesc('workdir', 'Remote target workdir', [str]),
KeyDesc('tools', 'List of tools to install on the target', [TypedList[str]]),
KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed up the connection', [bool]),
LevelKeyDesc('wait-boot', 'Wait for the target to finish booting', (
KeyDesc('enable', 'Enable the boot check', [bool]),
KeyDesc('timeout', 'Timeout of the boot check', [int]),
)),
LevelKeyDesc('devlib', 'devlib configuration', (
# Using textual name of the Platform allows this YAML configuration
# to not use any python-specific YAML tags, so TargetConf files can
# be parsed and produced by any other third-party code
LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', (
KeyDesc('class', 'Name of the class to use', [str]),
KeyDesc('args', 'Keyword arguments to build the Platform object', [Mapping]),
)),
KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [TypedList[str]]),
KeyDesc('file-xfer', 'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)', [TypedList[str]]),
))
))
DEFAULT_SRC = {
'devlib': {
'platform': {
'class': 'devlib.platform.Platform'
}
}
}
class Target(Loggable, HideExekallID, ExekallTaggable, Configurable):
"""
Wrap :class:`devlib.target.Target` to provide additional features on top of
it.
{configurable_params}
:param devlib_platform: Instance of :class:`devlib.platform.Platform` to
use to build the :class:`devlib.target.Target`
:type devlib_platform: devlib.platform.Platform
:param plat_info: Platform information attached to this target, for the
benefits of user code.
:type plat_info: lisa.platforms.platinfo.PlatformInfo
You need to provide the information needed to connect to the
target. For SSH targets that means "host", "username" and
either "password" or "keyfile". All other fields are optional if
the relevant features aren't needed.
.. note:: The wrapping of :class:`devlib.target.Target` is done using
composition, as opposed to inheritance. This allows swapping the exact
class used under the hood, and avoids messing up with ``devlib``
internal members.
"""
ADB_PORT_DEFAULT = 5555
SSH_PORT_DEFAULT = 22
CRITICAL_TASKS = {
'linux': [
'init',
# We want to freeze everything except PID 1, we don't want to let
# sysmted-journald or systemd-timesyncd running.
'systemd[^-]',
'dbus',
'sh',
'ssh',
'rsyslogd',
'jbd2'
],
'android': [
'sh', 'adbd',
'usb', 'transport',
# We don't actually need this task but on Google Pixel it apparently
# cannot be frozen, so the cgroup state gets stuck in FREEZING if we
# try to freeze it.
'thermal-engine',
# Similar issue with HiKey960, the board will crash if this is frozen
# for too long.
'watchdogd',
]
}
"""
Dictionary mapping OS name to list of task names that we can't afford to
freeze when using :meth:`freeze_userspace`.
"""
CONF_CLASS = TargetConf
INIT_KWARGS_KEY_MAP = {
'devlib_excluded_modules': ['devlib', 'excluded-modules'],
'devlib_file_xfer': ['devlib', 'file-xfer'],
'wait_boot': ['wait-boot', 'enable'],
'wait_boot_timeout': ['wait-boot', 'timeout'],
}
def __init__(self, kind, name='<noname>', tools=[], res_dir=None,
plat_info=None, lazy_platinfo=False, workdir=None, device=None, host=None, port=None,
username=None, password=None, keyfile=None, strict_host_check=None,
devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None,
wait_boot=True, wait_boot_timeout=10,
):
# pylint: disable=dangerous-default-value
super().__init__()
logger = self.get_logger()
self.name = name
res_dir = res_dir if res_dir else self._get_res_dir(
root=os.path.join(LISA_HOME, RESULT_DIR),
relative='',
name=f'{self.__class__.__qualname__}-{self.name}',
append_time=True,
symlink=True
)
self._res_dir = res_dir
os.makedirs(self._res_dir, exist_ok=True)
if os.listdir(self._res_dir):
raise ValueError(f'res_dir must be empty: {self._res_dir}')
if plat_info is None:
plat_info = PlatformInfo()
else:
# Make a copy of the PlatformInfo so we don't modify the original
# one we were passed when adding the target source to it
plat_info = copy.copy(plat_info)
logger.info(f'User-defined platform information:\n{plat_info}')
self.plat_info = plat_info
# Take the board name from the target configuration so it becomes
# available for later inspection. That board name is mostly free form
# and no specific value should be expected for a given kind of board
# (i.e. a Juno board might be named "foo-bar-juno-on-my-desk")
if name:
self.plat_info.add_src('target-conf', dict(name=name))
# Determine file transfer method. Currently avaliable options
# are 'sftp' and 'scp', defaults to sftp.
if devlib_file_xfer and devlib_file_xfer not in ('scp', 'sftp'):
raise ValueError(f'Invalid file transfer method: {devlib_file_xfer}')
use_scp = devlib_file_xfer == 'scp'
self._installed_tools = set()
self.target = self._init_target(
kind=kind,
name=name,
workdir=workdir,
device=device,
host=host,
port=port,
username=username,
password=password,
keyfile=keyfile,
strict_host_check=strict_host_check,
use_scp=use_scp,
devlib_platform=devlib_platform,
wait_boot=wait_boot,
wait_boot_timeout=wait_boot_timeout,
)
devlib_excluded_modules = set(devlib_excluded_modules)
# Sorry, can't let you do that. Messing with cgroups in a systemd
# system is pretty bad idea.
if self._uses_systemd:
logger.warning('Will not load cgroups devlib module: target is using systemd, which already uses cgroups')
devlib_excluded_modules.add('cgroups')
self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules
# Initialize binary tools to deploy
if tools:
logger.info(f'Tools to install: {tools}')
self.install_tools(tools)
# Autodetect information from the target, after the Target is
# initialized. Expensive computations are deferred so they will only be
# computed when actually needed.
rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib')
os.makedirs(rta_calib_res_dir)
self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True)
logger.info(f'Effective platform information:\n{self.plat_info}')
@property
@memoized
def _uses_systemd(self):
try:
# Check if systemd is being used, according to:
# https://www.freedesktop.org/software/systemd/man/sd_booted.html
self.execute('test -d /run/systemd/system/', check_exit_code=True)
except TargetStableError:
return False
else:
return True
def is_module_available(self, module):
"""
Check if the given devlib module is available.
:returns: ``True`` if module is available, ``False`` otherwise.
:param module: Devlib module to check.
:type module: str
.. note:: This will attempt to load the module if it's not loaded
already, and bail out if it fails to load.
"""
if module not in _DEVLIB_AVAILABLE_MODULES:
raise ValueError(f'"{module}" is not a devlib module')
try:
getattr(self, module)
except Exception: # pylint: disable=broad-except
return False
else:
return True
def __getattr__(self, attr):
"""
Forward all non-overriden attributes/method accesses to the underlying
:class:`devlib.target.Target`.
.. note:: That will not forward special methods like __str__, since the
interpreter bypasses __getattr__ when looking them up.
.. note:: Devlib modules are loaded on demand when accessed.
"""
def get():
return getattr(self.target, attr)
try:
return get()
except AttributeError:
# Load the module on demand
if attr in self._devlib_loadable_modules:
self.get_logger().info(f'Loading target devlib module {attr}')
self.target.install_module(attr)
return get()
# If it was not in the loadable list, it
# has been excluded explicitly
elif attr in _DEVLIB_AVAILABLE_MODULES:
# pylint: disable=raise-missing-from
raise AttributeError(f'Devlib target module {attr} was explicitly excluded, not loading it')
# Something else that does not exist ...
else:
raise
def __dir__(self):
"""
List our attributes plus the ones from the underlying target, and the
devlib modules that could be loaded on-demand.
"""
attrs = set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules
return sorted(attrs)
@classmethod
def from_conf(cls, conf: TargetConf, res_dir: ArtifactPath = None, plat_info: PlatformInfo = None) -> 'Target':
cls.get_logger().info(f'Target configuration:\n{conf}')
kwargs = cls.conf_to_init_kwargs(conf)
kwargs['res_dir'] = res_dir
kwargs['plat_info'] = plat_info
# Create a devlib Platform instance out of the configuration file
devlib_platform_conf = conf['devlib']['platform']
devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class'])
devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {}))
# Hack for Gem5 devlib Platform, that requires a "host_output_dir"
# argument computed at runtime.
# Note: lisa.target.Gem5SimulationPlatformWrapper should be used instead
# of the original one to benefit from mapping configuration
if issubclass(devlib_platform_cls, Gem5SimulationPlatform):
devlib_platform_kwargs.setdefault('host_output_dir', res_dir)
# Actually build the devlib Platform object
devlib_platform = devlib_platform_cls(**devlib_platform_kwargs)
kwargs['devlib_platform'] = devlib_platform
cls.check_init_param(**kwargs)
return cls(**kwargs)
@classmethod
def from_default_conf(cls):
"""
Create a :class:`Target` from the YAML configuration file pointed by
``LISA_CONF`` environment variable.
.. note:: Only load trusted YAML files as it can lead to abritrary code
execution.
"""
path = os.environ['LISA_CONF']
return cls.from_one_conf(path)
@classmethod
def from_one_conf(cls, path):
"""
Create a :class:`Target` from a single YAML configuration file.
This file will be used to provide a :class:`TargetConf` and
:class:`lisa.platforms.platinfo.PlatformInfo` instances.
.. note:: Only load trusted YAML files as it can lead to abritrary code
execution.
"""
conf = TargetConf.from_yaml_map(path)
try:
plat_info = PlatformInfo.from_yaml_map(path)
except Exception as e: # pylint: disable=broad-except
cls.get_logger().warning(f'No platform information could be found: {e}')
plat_info = None
return cls.from_conf(conf=conf, plat_info=plat_info)
@classmethod
# Keep the signature without *args and **kwargs so that it's usable by exekall
def from_cli(cls, argv=None, params=None) -> 'Target':
"""
Same as :meth:`from_custom_cli` without the custom parameters
capabilities.
:return: A connected :class:`Target`
"""
_, target = cls.from_custom_cli(argv=argv, params=params)
return target
@classmethod
def from_custom_cli(cls, argv=None, params=None):
"""
Create a Target from command line arguments.
:param argv: The list of arguments. ``sys.argv[1:]`` will be used if
this is ``None``.
:type argv: list(str)
:param params: Dictionary of custom parameters to add to the parser. It
is in the form of
``{param_name: {dict of ArgumentParser.add_argument() options}}``.
:type params: dict(str, dict)
:return: A tuple ``(args, target)``
.. note:: This method should not be relied upon to implement long-term
scripts, it's more designed for quick scripting.
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(
"""
Connect to a target using the provided configuration in order
to run a test.
EXAMPLES
--conf can point to a YAML target configuration file
with all the necessary connection information:
$ {script} --conf my_target.yml
Alternatively, --kind must be set along the relevant credentials:
$ {script} --kind linux --host 192.0.2.1 --username root --password root
In both cases, --conf can also contain a PlatformInfo YAML description.
Note: only load trusted YAML files as it can lead to abritrary
code execution.
""".format(
script=os.path.basename(sys.argv[0])
)))
parser.add_argument("--conf", '-c',
help="Path to a TargetConf and PlatformInfo yaml file. Other options will override what is specified in the file."
)
parser.add_argument("--kind", "-k",
choices=["android", "linux", "host"],
help="The kind of target to connect to.")
device_group = parser.add_mutually_exclusive_group()
device_group.add_argument("--device", "-d",
help="The ADB ID of the target. Superseeds --host. Only applies to Android kind.")
device_group.add_argument("--host", "-n",
help="The hostname/IP of the target.")
parser.add_argument("--username", "-u",
help="Login username. Only applies to Linux kind.")
parser.add_argument("--password", "-p",
help="Login password. Only applies to Linux kind.")
parser.add_argument("--log-level",
default='info',
choices=('warning', 'info', 'debug'),
help="Verbosity level of the logs.")
parser.add_argument("--res-dir", "-o",
help="Result directory of the created Target. If no directory is specified, a default location under $LISA_HOME will be used.")
params = params or {}
for param, settings in params.items():
parser.add_argument(f'--{param}', **settings)
custom_params = {k.replace('-', '_') for k in params.keys()}
# Options that are not a key in TargetConf must be listed here
not_target_conf_opt = {
'platform_info', 'log_level', 'res_dir', 'conf',
}
not_target_conf_opt.update(custom_params)
args = parser.parse_args(argv)
setup_logging(level=args.log_level.upper())
target_conf = TargetConf()
platform_info = None
if args.conf:
# Tentatively load a PlatformInfo from the conf file
with contextlib.suppress(KeyError, ValueError):
platform_info = PlatformInfo.from_yaml_map(args.conf)
# Load the TargetConf from the file, and update it with command
# line arguments
try:
conf = TargetConf.from_yaml_map(args.conf)
except (KeyError, ValueError):
pass
else:
target_conf.add_src(args.conf, conf)
target_conf.add_src('command-line', {
k: v for k, v in vars(args).items()
if v is not None and k not in not_target_conf_opt
})
# Some sanity check to get better error messages
if 'kind' not in target_conf:
parser.error('--conf with target configuration or any of the connection options is required')
if args.kind == 'android':
if ('host' not in target_conf) and ('device' not in target_conf):
parser.error('--host or --device must be specified')
if args.kind == 'linux':
for required in ['host', 'username', 'password']:
if required not in target_conf:
parser.error(f'--{required} must be specified')
custom_args = {
param: value
for param, value in vars(args).items()
if param in custom_params
}
custom_args = argparse.Namespace(**custom_args)
return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir)
def _init_target(self, kind, name, workdir, device, host,
port, username, password, keyfile, strict_host_check, use_scp,
devlib_platform,
wait_boot, wait_boot_timeout,
):
"""
Initialize the Target
"""
logger = self.get_logger()
conn_settings = {}
resolved_username = username or 'root'
logger.debug(f'Setting up {kind} target...')
# If the target is Android, we need just (eventually) the device
if kind == 'android':
devlib_target_cls = devlib.AndroidTarget
# Workaround for ARM-software/devlib#225
workdir = workdir or '/data/local/tmp/devlib-target'
if device:
pass
elif host:
port = port or self.ADB_PORT_DEFAULT
device = f'{host}:{port}'
else:
device = 'DEFAULT'
conn_settings['device'] = device
# If the username was explicitly set to "root", root the target as
# early as possible
conn_settings['adb_as_root'] = (username == 'root')
elif kind == 'linux':
devlib_target_cls = devlib.LinuxTarget
conn_settings.update(
username=resolved_username,
port=port or self.SSH_PORT_DEFAULT,
host=host,
strict_host_check=True if strict_host_check is None else strict_host_check,
use_scp=False if use_scp is None else use_scp,
)
# Configure password or SSH keyfile
if keyfile:
conn_settings['keyfile'] = keyfile
else:
conn_settings['password'] = password
elif kind == 'host':
devlib_target_cls = devlib.LocalLinuxTarget
# If we are given a password, assume we can use it as a sudo
# password.
conn_settings.update(
unrooted=password is None,
password=password,
)
else:
raise ValueError(f'Unsupported platform type {kind}')
settings = '\n '.join(
f' {key}: {val}'
for key, val in conn_settings.items()
if key != 'password'
)
logger.debug(f'{kind} {name} target connection settings:\n {settings}')
########################################################################
# Devlib Platform configuration
########################################################################
if not devlib_platform:
devlib_platform = devlib.platform.Platform()
########################################################################
# Create devlib Target object
########################################################################
target = devlib_target_cls(
platform=devlib_platform,
load_default_modules=False,
connection_settings=conn_settings,
working_directory=workdir,
connect=False,
)
target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout)
# None as username means adb root will be attempted, but failure will
# not prevent from connecting to the target.
if kind == 'android' and username is None:
try:
target.adb_root(enable=True)
except Exception as e: # pylint: disable=broad-except
logger.warning(f'"adb root" failed: {e}')
logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}')
target.setup()
logger.info(f"Connected to target {(name or '')}")
return target
def get_res_dir(self, name=None, append_time=True, symlink=True):
"""
Returns a directory managed by LISA to store results.
Usage of that function is reserved to interactive use or simple scripts.
Tests should not rely on that as the created folder will not be tracked
by any external entity, which means the results will be lost in some
automated environment.
:param name: Name of the results directory
:type name: str
:param append_time: If True, the current datetime will be appended to
the given ``name``. If ``name`` is None, the directory name will be
the current datetime.
:type append_time: bool
:param symlink: Create a symlink named ``results_latest`` to the newly
created results directory
:type symlink: bool
"""
if isinstance(self._res_dir, ArtifactPath):
root = self._res_dir.root
relative = self._res_dir.relative
else:
root = self._res_dir
relative = ''
return self._get_res_dir(
root=root,
relative=relative,
name=name,
append_time=append_time,
symlink=symlink,
)
def _get_res_dir(self, root, relative, name, append_time, symlink):
logger = self.get_logger()
while True:
time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f')
if not name:
name = time_str
elif append_time:
name = f"{name}-{time_str}"
# If we were given an ArtifactPath with an existing root, we
# preserve that root so it can be relocated as the caller wants it
res_dir = ArtifactPath(root, os.path.join(relative, name))
# Compute base installation path
logger.info(f'Creating result directory: {res_dir}')
# It will fail if the folder already exists. In that case,
# append_time should be used to ensure we get a unique name.
try:
os.makedirs(res_dir)
break
except FileExistsError:
# If the time is used in the name, there is some hope that the
# next time it will succeed
if append_time:
logger.info('Directory already exists, retrying ...')
continue
else:
raise
if symlink:
res_lnk = os.path.join(LISA_HOME, LATEST_LINK)
with contextlib.suppress(FileNotFoundError):
os.remove(res_lnk)
# There may be a race condition with another tool trying to create
# the link
with contextlib.suppress(FileExistsError):
os.symlink(res_dir, res_lnk)
return res_dir
def install_tools(self, tools):
"""
Install tools additional to those specified in the test config 'tools'
field
:param tools: The list of names of tools to install
:type tools: list(str)
"""
def bin_path(tool):
binary = os.path.join(ASSETS_PATH, 'binaries', self.abi, tool)
if not os.path.isfile(binary):
binary = os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool)
return binary
tools = set(tools) - self._installed_tools
# TODO: compute the checksum of the tool + install location and keep
# that in _installed_tools, so we are sure to be correct
for tool in map(bin_path, tools):
self.target.install(tool)
self._installed_tools.add(tool)
@contextlib.contextmanager
def freeze_userspace(self):
"""
Context manager that lets you freeze the userspace.
.. note:: A number of situations prevent from freezing anything. When
that happens, a warning is logged but no exception is raised, so
it's a best-effort approach.
"""
logger = self.get_logger()
if not self.is_rooted:
logger.warning('Could not freeze userspace: target is not rooted')
cm = nullcontext
elif not self.is_module_available('cgroups'):
logger.warning('Could not freeze userspace: "cgroups" devlib module is necessary')
cm = nullcontext
else:
controllers = [s.name for s in self.cgroups.list_subsystems()]
if 'freezer' not in controllers:
logger.warning('Could not freeze userspace: freezer cgroup controller not available on the target')
cm = nullcontext
else:
exclude = copy.copy(self.CRITICAL_TASKS[self.target.os])
# Do not freeze the process in charge of de-freezing, otherwise we
# will freeze to death and a machine hard reboot will be required
if isinstance(self.target, devlib.LocalLinuxTarget):
exclude.append(str(os.getpid()))
@contextlib.contextmanager
def cm():
logger.info(f"Freezing all tasks except: {','.join(exclude)}")
try:
yield self.cgroups.freeze(exclude)
finally:
logger.info('Un-freezing userspace tasks')
self.cgroups.freeze(thaw=True)
with cm() as x:
yield x
@contextlib.contextmanager
def disable_idle_states(self):
"""
Context manager that lets you disable all idle states
"""
logger = self.get_logger()
logger.info('Disabling idle states for all domains')
try:
cpuidle = self.cpuidle
except AttributeError:
logger.warning('Could not disable idle states, cpuidle devlib module is not loaded')
cm = nullcontext
else:
@contextlib.contextmanager
def cm():
try:
for cpu in range(self.plat_info['cpus-count']):
cpuidle.disable_all(cpu)
yield
finally:
logger.info('Re-enabling idle states for all domains')
for cpu in range(self.plat_info['cpus-count']):
cpuidle.enable_all(cpu)
with cm() as x:
yield x
def get_tags(self):
return {'board': self.name}
@classmethod
def _make_remote_snippet(cls, name, code_str, module, kwargs, global_vars, out_tempfiles):
# Inject the parameters inside the wrapper's globals so that it can
# access them. It's harmless as they would shadow any global name
# anyway, and it's restricted to the wrapper using eval()
global_vars = {
**global_vars,
**kwargs,
}
# Treat the modules separately as they cannot be pickled
modules = {
name: mod
for name, mod in global_vars.items()
if isinstance(mod, ModuleType)
}
def can_include(f):
return (
isinstance(f, FunctionType) and
# Only allow inlining of functions defined in the same module so that:
# 1. there is no name clash risk
# 2. we don't inline the whole world, which could lead to a
# number of problems that could appear after another module
# is updated or so. We only inline local things that are in
# direct control
f.__module__ == module
)
def add_func(f, name):
# Disallow decorated functions since their definition depends on
# external callable we cannot control
if hasattr(f, '__wrapped__'):
raise TypeError('Decorated functions cannot be called from remote functions')
closure_vars = {
name: val
for var_dct in inspect.getclosurevars(f)
if isinstance(var_dct, Mapping)
for name, val in var_dct.items()
}
funcs[name] = (f, cls._get_code(f)[1])
for _name, _f in closure_vars.items():
if _f is not f and can_include(_f):
add_func(_f, _name)
modules.update(
(name, mod)
for name, mod in closure_vars.items()
if isinstance(mod, ModuleType)
)
funcs = {}
for f_name, f in global_vars.items():
if can_include(f):
add_func(f, f_name)
code_str += '\n' + '\n'.join(map(itemgetter(1), funcs.values()))
non_pickled = set(modules.keys()) | set(funcs.keys())
global_vars = {
name: val
for name, val in global_vars.items()
if name not in non_pickled
}
if modules:
modules = f"import {', '.join(sorted(modules))}"
else:
modules = ''
script = textwrap.dedent('''
import pickle
import sys
def wrapper():
{modules}
{code}
return {f}({kwargs})
try:
out = eval(wrapper.__code__, pickle.loads({globals}))
except BaseException as e:
out = e
out_is_excep = True
else:
out_is_excep = False
out = pickle.dumps(out)
out_tempfile = {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0]
with open(out_tempfile, 'wb') as f:
f.write(out)
''').format(
f=name,
code=textwrap.dedent(code_str).replace('\n', '\n' + ' ' * 4),
modules=modules,
out_tempfiles=repr(out_tempfiles),
globals=repr(pickle.dumps(global_vars)),
kwargs=', '.join(
f'{name}={name}'
for name in kwargs.keys()
)
)
return script
@staticmethod
def _get_code(f):
lines, _ = inspect.getsourcelines(f)
# Remove decorators, as they are either undefined or just were used to
# feed the function to us
lines = [
line
for line in lines
if not line.strip().startswith('@')
]
code_str = textwrap.dedent(''.join(lines))
name = f.__name__
return (name, code_str)
def execute_python(self, f, args, kwargs, **execute_kwargs):
"""
Executes the given Python function ``f`` with the provided positional
and keyword arguments.
The return value or any exception is pickled back and is
returned/raised in the host caller.
:Variable keyword arguments: Forwarded to :meth:`execute` that
will spawn the Python interpreter on the target
.. note:: Closure variables are supported, but mutating them will not
be reflected in the caller's context. Also, functions that are
referred to will be:
* bundled in the script if it is defined in the same module
* referred to by name, assuming it comes from a module that is
installed on the target and that this module is in scope. If
that is not the case, a :exc:`NameError` will be raised.
.. attention:: Decorators are ignored and not applied.
"""
sig = inspect.signature(f)
kwargs = sig.bind(*args, **kwargs).arguments
closure_vars = inspect.getclosurevars(f)
name, code_str = self._get_code(f)
def mktemp():
return self.execute(
f'mktemp -p {shlex.quote(self.working_directory)}'
).strip()
def read_output(path):
with tempfile.TemporaryDirectory() as d:
name = os.path.join(d, 'out')
self.pull(path, name)
with open(name, 'rb') as f:
return pickle.loads(f.read())
def parse_output(paths, err):
val, excep = paths
try:
return read_output(val)
# If the file is empty, we probably got an exception
except EOFError:
# pylint: disable=raise-missing-from
try:
excep = read_output(excep)
# If we can't even read the exception, raise the initial one
# from devlib
except EOFError:
raise err if err is not None else ValueError('No exception was raised or value returned by the function')
else:
raise excep
out_tempfiles = tuple()
try:
out_tempfiles = (mktemp(), mktemp())
snippet = self._make_remote_snippet(
name=name,
code_str=code_str,
module=f.__module__,
kwargs=kwargs,
global_vars={
**closure_vars.globals,
**closure_vars.nonlocals,
},
out_tempfiles=out_tempfiles
)
cmd = ['python3', '-c', snippet]
cmd = ' '.join(map(shlex.quote, cmd))
try:
self.execute(cmd, **execute_kwargs)
except Exception as e: # pylint: disable=broad-except
err = e
else:
err = None
return parse_output(out_tempfiles, err)
finally:
for path in out_tempfiles:
self.remove(path)
def remote_func(self, **kwargs):
"""
Decorates a given function to execute remotely using
:meth:`execute_python`::
target = Target(...)
@target.remote_func(timeout=42)
def foo(x, y):
return x + y
# Execute the function on the target transparently
val = foo(1, y=2)
:Variable keyword arguments: Forwarded to :meth:`execute` that
will spawn the Python interpreter on the target
"""
def wrapper_param(f):
@functools.wraps(f)
def wrapper(*f_args, **f_kwargs):
return self.execute_python(f, f_args, f_kwargs, **kwargs)
return wrapper
return wrapper_param
class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform):
def __init__(self, system, simulator, **kwargs):
simulator_args = copy.copy(simulator.get('args', []))
system_platform = system['platform']
# Get gem5 binary arguments
simulator_args.append('--listener-mode=on')
simulator_args.append(system_platform['description'])
simulator_args.extend(system_platform.get('args', []))
simulator_args.extend((
f"--kernel {system['kernel']}",
f"--dtb {system['dtb']}",
f"--disk-image {system['disk']}"
))
diod_path = which('diod')
if diod_path is None:
raise RuntimeError('Failed to find "diod" on your host machine, check your installation or your PATH variable')
# Setup virtio
# Brackets are there to let the output dir be created automatically
virtio_args = [
f'--which-diod={diod_path}',
'--workload-automation-vio={}',
]
simulator_args.extend(virtio_args)
# Quote/escape arguments and build the command line
gem5_args = ' '.join(shlex.quote(a) for a in simulator_args)
super().__init__(
gem5_args=gem5_args,
gem5_bin=simulator['bin'],
**kwargs
)
# vim :set tabstop=4 shiftwidth=4 expandtab textwidth=80
| [((1739, 1775), 'lisa.utils.import_all_submodules', 'import_all_submodules', (['devlib.module'], {}), '(devlib.module)\n', (1760, 1775), False, 'from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized\n'), ((1834, 1870), 'lisa.utils.get_subclasses', 'get_subclasses', (['devlib.module.Module'], {}), '(devlib.module.Module)\n', (1848, 1870), False, 'from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized\n'), ((9141, 9182), 'os.makedirs', 'os.makedirs', (['self._res_dir'], {'exist_ok': '(True)'}), '(self._res_dir, exist_ok=True)\n', (9152, 9182), False, 'import os\n'), ((9194, 9219), 'os.listdir', 'os.listdir', (['self._res_dir'], {}), '(self._res_dir)\n', (9204, 9219), False, 'import os\n'), ((11765, 11810), 'lisa.utils.ArtifactPath.join', 'ArtifactPath.join', (['self._res_dir', '"""rta_calib"""'], {}), "(self._res_dir, 'rta_calib')\n", (11782, 11810), False, 'from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized\n'), ((11819, 11849), 'os.makedirs', 'os.makedirs', (['rta_calib_res_dir'], {}), '(rta_calib_res_dir)\n', (11830, 11849), False, 'import os\n'), ((15073, 15123), 'lisa.utils.resolve_dotted_name', 'resolve_dotted_name', (["devlib_platform_conf['class']"], {}), "(devlib_platform_conf['class'])\n", (15092, 15123), False, 'from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized\n'), ((22457, 22490), 'argparse.Namespace', 'argparse.Namespace', ([], {}), '(**custom_args)\n', (22475, 22490), False, 'import argparse\n'), ((36608, 36633), 'inspect.getsourcelines', 'inspect.getsourcelines', (['f'], {}), '(f)\n', (36630, 36633), False, 'import inspect\n'), ((37995, 38015), 'inspect.signature', 'inspect.signature', (['f'], {}), '(f)\n', (38012, 38015), False, 'import inspect\n'), ((38092, 38117), 'inspect.getclosurevars', 'inspect.getclosurevars', (['f'], {}), '(f)\n', (38114, 38117), False, 'import inspect\n'), ((41531, 41544), 'devlib.utils.misc.which', 'which', (['"""diod"""'], {}), "('diod')\n", (41536, 41544), False, 'from devlib.utils.misc import which\n'), ((3790, 3875), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""name"""', '"""Board name, free-form value only used to embelish logs"""', '[str]'], {}), "('name', 'Board name, free-form value only used to embelish logs', [str]\n )\n", (3797, 3875), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((3880, 3958), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""kind"""', '"""Target kind. Can be "linux" (ssh) or "android" (adb)"""', '[str]'], {}), '(\'kind\', \'Target kind. Can be "linux" (ssh) or "android" (adb)\', [str])\n', (3887, 3958), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((3969, 4035), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""host"""', '"""Hostname or IP address of the host"""', '[str, None]'], {}), "('host', 'Hostname or IP address of the host', [str, None])\n", (3976, 4035), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4045, 4176), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""username"""', '"""SSH username. On ADB connections, "root" username will root adb upon target connection"""', '[str, None]'], {}), '(\'username\',\n \'SSH username. On ADB connections, "root" username will root adb upon target connection\'\n , [str, None])\n', (4052, 4176), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4243, 4297), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""port"""', '"""SSH or ADB server port"""', '[int, None]'], {}), "('port', 'SSH or ADB server port', [int, None])\n", (4250, 4297), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4307, 4381), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""device"""', '"""ADB device. Takes precedence over "host\\""""', '[str, None]'], {}), '(\'device\', \'ADB device. Takes precedence over "host"\', [str, None])\n', (4314, 4381), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4391, 4446), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""keyfile"""', '"""SSH private key file"""', '[str, None]'], {}), "('keyfile', 'SSH private key file', [str, None])\n", (4398, 4446), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4456, 4559), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""strict-host-check"""', '"""Equivalent to StrictHostKeyChecking option of OpenSSH"""', '[bool, None]'], {}), "('strict-host-check',\n 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None])\n", (4463, 4559), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4565, 4615), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""workdir"""', '"""Remote target workdir"""', '[str]'], {}), "('workdir', 'Remote target workdir', [str])\n", (4572, 4615), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4625, 4701), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""tools"""', '"""List of tools to install on the target"""', '[TypedList[str]]'], {}), "('tools', 'List of tools to install on the target', [TypedList[str]])\n", (4632, 4701), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4711, 4822), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""lazy-platinfo"""', '"""Lazily autodect the platform information to speed up the connection"""', '[bool]'], {}), "('lazy-platinfo',\n 'Lazily autodect the platform information to speed up the connection',\n [bool])\n", (4718, 4822), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((9348, 9362), 'lisa.platforms.platinfo.PlatformInfo', 'PlatformInfo', ([], {}), '()\n', (9360, 9362), False, 'from lisa.platforms.platinfo import PlatformInfo\n'), ((9548, 9568), 'copy.copy', 'copy.copy', (['plat_info'], {}), '(plat_info)\n', (9557, 9568), False, 'import copy\n'), ((16704, 16736), 'lisa.platforms.platinfo.PlatformInfo.from_yaml_map', 'PlatformInfo.from_yaml_map', (['path'], {}), '(path)\n', (16730, 16736), False, 'from lisa.platforms.platinfo import PlatformInfo\n'), ((25245, 25271), 'devlib.platform.Platform', 'devlib.platform.Platform', ([], {}), '()\n', (25269, 25271), False, 'import devlib\n'), ((28923, 28959), 'os.path.join', 'os.path.join', (['LISA_HOME', 'LATEST_LINK'], {}), '(LISA_HOME, LATEST_LINK)\n', (28935, 28959), False, 'import os\n'), ((29575, 29628), 'os.path.join', 'os.path.join', (['ASSETS_PATH', '"""binaries"""', 'self.abi', 'tool'], {}), "(ASSETS_PATH, 'binaries', self.abi, tool)\n", (29587, 29628), False, 'import os\n'), ((40706, 40724), 'functools.wraps', 'functools.wraps', (['f'], {}), '(f)\n', (40721, 40724), False, 'import functools\n'), ((21091, 21132), 'contextlib.suppress', 'contextlib.suppress', (['KeyError', 'ValueError'], {}), '(KeyError, ValueError)\n', (21110, 21132), False, 'import contextlib\n'), ((21166, 21203), 'lisa.platforms.platinfo.PlatformInfo.from_yaml_map', 'PlatformInfo.from_yaml_map', (['args.conf'], {}), '(args.conf)\n', (21192, 21203), False, 'from lisa.platforms.platinfo import PlatformInfo\n'), ((28176, 28204), 'os.path.join', 'os.path.join', (['relative', 'name'], {}), '(relative, name)\n', (28188, 28204), False, 'import os\n'), ((28495, 28515), 'os.makedirs', 'os.makedirs', (['res_dir'], {}), '(res_dir)\n', (28506, 28515), False, 'import os\n'), ((28977, 29015), 'contextlib.suppress', 'contextlib.suppress', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (28996, 29015), False, 'import contextlib\n'), ((29033, 29051), 'os.remove', 'os.remove', (['res_lnk'], {}), '(res_lnk)\n', (29042, 29051), False, 'import os\n'), ((29172, 29208), 'contextlib.suppress', 'contextlib.suppress', (['FileExistsError'], {}), '(FileExistsError)\n', (29191, 29208), False, 'import contextlib\n'), ((29226, 29254), 'os.symlink', 'os.symlink', (['res_dir', 'res_lnk'], {}), '(res_dir, res_lnk)\n', (29236, 29254), False, 'import os\n'), ((29648, 29670), 'os.path.isfile', 'os.path.isfile', (['binary'], {}), '(binary)\n', (29662, 29670), False, 'import os\n'), ((29697, 29751), 'os.path.join', 'os.path.join', (['ASSETS_PATH', '"""binaries"""', '"""scripts"""', 'tool'], {}), "(ASSETS_PATH, 'binaries', 'scripts', tool)\n", (29709, 29751), False, 'import os\n'), ((35526, 36175), 'textwrap.dedent', 'textwrap.dedent', (['"""\n import pickle\n import sys\n\n def wrapper():\n {modules}\n\n {code}\n return {f}({kwargs})\n\n try:\n out = eval(wrapper.__code__, pickle.loads({globals}))\n except BaseException as e:\n out = e\n out_is_excep = True\n else:\n out_is_excep = False\n\n out = pickle.dumps(out)\n out_tempfile = {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0]\n\n with open(out_tempfile, \'wb\') as f:\n f.write(out)\n """'], {}), '(\n """\n import pickle\n import sys\n\n def wrapper():\n {modules}\n\n {code}\n return {f}({kwargs})\n\n try:\n out = eval(wrapper.__code__, pickle.loads({globals}))\n except BaseException as e:\n out = e\n out_is_excep = True\n else:\n out_is_excep = False\n\n out = pickle.dumps(out)\n out_tempfile = {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0]\n\n with open(out_tempfile, \'wb\') as f:\n f.write(out)\n """\n )\n', (35541, 36175), False, 'import textwrap\n'), ((38356, 38385), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (38383, 38385), False, 'import tempfile\n'), ((38415, 38437), 'os.path.join', 'os.path.join', (['d', '"""out"""'], {}), "(d, 'out')\n", (38427, 38437), False, 'import os\n'), ((42051, 42065), 'shlex.quote', 'shlex.quote', (['a'], {}), '(a)\n', (42062, 42065), False, 'import shlex\n'), ((4905, 4955), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""enable"""', '"""Enable the boot check"""', '[bool]'], {}), "('enable', 'Enable the boot check', [bool])\n", (4912, 4955), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((4969, 5023), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""timeout"""', '"""Timeout of the boot check"""', '[int]'], {}), "('timeout', 'Timeout of the boot check', [int])\n", (4976, 5023), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((5602, 5692), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""excluded-modules"""', '"""List of devlib modules to *not* load"""', '[TypedList[str]]'], {}), "('excluded-modules', 'List of devlib modules to *not* load', [\n TypedList[str]])\n", (5609, 5692), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((5701, 5838), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""file-xfer"""', '"""File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)"""', '[TypedList[str]]'], {}), '(\'file-xfer\',\n \'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)\'\n , [TypedList[str]])\n', (5708, 5838), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((8910, 8945), 'os.path.join', 'os.path.join', (['LISA_HOME', 'RESULT_DIR'], {}), '(LISA_HOME, RESULT_DIR)\n', (8922, 8945), False, 'import os\n'), ((27807, 27821), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (27819, 27821), False, 'from datetime import datetime\n'), ((31123, 31169), 'copy.copy', 'copy.copy', (['self.CRITICAL_TASKS[self.target.os]'], {}), '(self.CRITICAL_TASKS[self.target.os])\n', (31132, 31169), False, 'import copy\n'), ((34468, 34493), 'inspect.getclosurevars', 'inspect.getclosurevars', (['f'], {}), '(f)\n', (34490, 34493), False, 'import inspect\n'), ((35148, 35161), 'operator.itemgetter', 'itemgetter', (['(1)'], {}), '(1)\n', (35158, 35161), False, 'from operator import itemgetter\n'), ((36369, 36394), 'pickle.dumps', 'pickle.dumps', (['global_vars'], {}), '(global_vars)\n', (36381, 36394), False, 'import pickle\n'), ((36211, 36236), 'textwrap.dedent', 'textwrap.dedent', (['code_str'], {}), '(code_str)\n', (36226, 36236), False, 'import textwrap\n'), ((5427, 5478), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""class"""', '"""Name of the class to use"""', '[str]'], {}), "('class', 'Name of the class to use', [str])\n", (5434, 5478), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((5496, 5572), 'lisa.conf.KeyDesc', 'KeyDesc', (['"""args"""', '"""Keyword arguments to build the Platform object"""', '[Mapping]'], {}), "('args', 'Keyword arguments to build the Platform object', [Mapping])\n", (5503, 5572), False, 'from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable\n'), ((18996, 19025), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (19012, 19025), False, 'import os\n'), ((31444, 31455), 'os.getpid', 'os.getpid', ([], {}), '()\n', (31453, 31455), False, 'import os\n'), ((38247, 38282), 'shlex.quote', 'shlex.quote', (['self.working_directory'], {}), '(self.working_directory)\n', (38258, 38282), False, 'import shlex\n')] |
EasonC13/iota.py | iota/commands/core/get_node_info.py | f596c1ac0d9bcbceda1cf6109cd921943a6599b3 | import filters as f
from iota import TransactionHash, Address
from iota.commands import FilterCommand, RequestFilter, ResponseFilter
from iota.filters import Trytes
__all__ = [
'GetNodeInfoCommand',
]
class GetNodeInfoCommand(FilterCommand):
"""
Executes `getNodeInfo` command.
See :py:meth:`iota.api.StrictIota.get_node_info`.
"""
command = 'getNodeInfo'
def get_request_filter(self):
return GetNodeInfoRequestFilter()
def get_response_filter(self):
return GetNodeInfoResponseFilter()
class GetNodeInfoRequestFilter(RequestFilter):
def __init__(self) -> None:
# ``getNodeInfo`` does not accept any parameters.
# Using a filter here just to enforce that the request is empty.
super(GetNodeInfoRequestFilter, self).__init__({})
class GetNodeInfoResponseFilter(ResponseFilter):
def __init__(self) -> None:
super(GetNodeInfoResponseFilter, self).__init__({
'coordinatorAddress':
f.ByteString(encoding='ascii') | Trytes(Address),
'latestMilestone':
f.ByteString(encoding='ascii') | Trytes(TransactionHash),
'latestSolidSubtangleMilestone':
f.ByteString(encoding='ascii') | Trytes(TransactionHash),
})
| [((1003, 1033), 'filters.ByteString', 'f.ByteString', ([], {'encoding': '"""ascii"""'}), "(encoding='ascii')\n", (1015, 1033), True, 'import filters as f\n'), ((1036, 1051), 'iota.filters.Trytes', 'Trytes', (['Address'], {}), '(Address)\n', (1042, 1051), False, 'from iota.filters import Trytes\n'), ((1100, 1130), 'filters.ByteString', 'f.ByteString', ([], {'encoding': '"""ascii"""'}), "(encoding='ascii')\n", (1112, 1130), True, 'import filters as f\n'), ((1133, 1156), 'iota.filters.Trytes', 'Trytes', (['TransactionHash'], {}), '(TransactionHash)\n', (1139, 1156), False, 'from iota.filters import Trytes\n'), ((1220, 1250), 'filters.ByteString', 'f.ByteString', ([], {'encoding': '"""ascii"""'}), "(encoding='ascii')\n", (1232, 1250), True, 'import filters as f\n'), ((1253, 1276), 'iota.filters.Trytes', 'Trytes', (['TransactionHash'], {}), '(TransactionHash)\n', (1259, 1276), False, 'from iota.filters import Trytes\n')] |
Juandiegordp/TPI | Aplicacion/Presentacion/views.py | 427266f00745e9d9678110c1d01d3be4febca673 | from Negocio import controller
import forms, functions
from flask import Flask, render_template, request, redirect, url_for, flash
def register(mysql, request):
registerForm= forms.RegisterForm(request.form)
if request.method == 'POST' and registerForm.validate():
return controller.registraUsuario(mysql, request, registerForm)
return render_template('register.html', form=registerForm)
def Index(mysql, request):
if request.method=='GET':
success= request.args.get('success')
if success==None:
if controller.usuarioIniciado():
return redirect(url_for('home'))
else:
return render_template('Index.html')
else:
return render_template('Index.html', success=success)
return render_template('Index.html')
def home(mysql, request):
if request.method== 'POST':
controller.iniciarSesion(mysql, request)
if controller.usuarioIniciado() and request.method== 'GET':
return controller.mostrarRutinas(mysql, request)
else:
return redirect(url_for('Index'))
def historial_rutina(mysql, request):
if controller.usuarioIniciado() and request.method== 'GET':
return controller.mostrar_historial_rutina(mysql, request)
else:
return redirect(url_for('Index'))
def historial_usuario(mysql, request):
if controller.usuarioIniciado() and request.method== 'GET':
return controller.mostrar_historial_usuario(mysql, request)
else:
return redirect(url_for('Index'))
def perfil(mysql, request):
if controller.usuarioIniciado and request.method=='GET':
success= request.args.get('success')
usuario=controller.datosUsuario(mysql, request)
imc=functions.IMC(usuario[8], usuario[7])
m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8])
return render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal )
else:
return redirect(url_for('Index'))
def ActualizarPerfil(mysql, request):
actualize_form= forms.PerfilForm(request.form)
if request.method == 'POST' and controller.usuarioIniciado:
if actualize_form.validate():
return controller.actualizar_perfil(mysql, request)
else:
flash("Alguno de los datos es incorrecto")
return redirect(url_for('actualizar_perfil', success=False))
else:
if request.method == 'GET' and controller.usuarioIniciado:
datos=controller.formulario_perfil(mysql)
return render_template('actualizar_perfil.html', form=actualize_form, datos=datos)
return redirect(url_for('perfil'))
def administracionRutinas(mysql, request):
if controller.usuarioIniciado():
return render_template('administracion_rutinas.html')
else:
return redirect(url_for('Index'))
def crearRutina(mysql, request):
if request.method =='POST' and controller.usuarioIniciado():
return controller.agregarRutina(mysql, request)
else:
if controller.rutinaIniciada() and controller.usuarioIniciado():
return controller.rutinaEnCurso(mysql, request)
if controller.usuarioIniciado():
return redirect(url_for('adm_rutinas'))
else:
return redirect(url_for('Index'))
def registrarEjerciciosRutina(mysql, request):
if request.method == 'POST':
return controller.registrarEjerciciosRutina(mysql, request)
return redirect(url_for('adm_rutinas'))
def modificarRutina(mysql, request):
if controller.usuarioIniciado():
rutinas=controller.rutinasUsuario(mysql)
rutinaEjercicios=controller.rutinaEjercicios(mysql)
datosEjer=controller.datosEjercicios(mysql)
return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios)
else:
return redirect(url_for('Index'))
def registrarModiciaciones(mysql, request):
if request.method == 'POST':
return controller.registrarModificaciones(mysql, request)
return redirect(url_for('adm_rutinas'))
def eliminarRutina(mysql,request):
if controller.usuarioIniciado():
rutinas=controller.rutinasUsuario(mysql)
rutinaEjercicios=controller.rutinaEjercicios(mysql)
return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios)
else:
return redirect(url_for('Index'))
def registrarEliminacion(mysql, request):
if request.method=='POST' and controller.usuarioIniciado():
return controller.registrarEliminacion(mysql, request)
else:
return redirect(url_for('Index'))
def registrarEjercicios(mysql, request):
if request.method == 'POST':
return controller.registrarEjercicio(mysql, request)
return redirect(url_for('ejercicios')) | [((182, 214), 'forms.RegisterForm', 'forms.RegisterForm', (['request.form'], {}), '(request.form)\n', (200, 214), False, 'import forms, functions\n'), ((355, 406), 'flask.render_template', 'render_template', (['"""register.html"""'], {'form': 'registerForm'}), "('register.html', form=registerForm)\n", (370, 406), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((780, 809), 'flask.render_template', 'render_template', (['"""Index.html"""'], {}), "('Index.html')\n", (795, 809), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2218, 2248), 'forms.PerfilForm', 'forms.PerfilForm', (['request.form'], {}), '(request.form)\n', (2234, 2248), False, 'import forms, functions\n'), ((2852, 2880), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (2878, 2880), False, 'from Negocio import controller\n'), ((3664, 3692), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3690, 3692), False, 'from Negocio import controller\n'), ((4247, 4275), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (4273, 4275), False, 'from Negocio import controller\n'), ((288, 344), 'Negocio.controller.registraUsuario', 'controller.registraUsuario', (['mysql', 'request', 'registerForm'], {}), '(mysql, request, registerForm)\n', (314, 344), False, 'from Negocio import controller\n'), ((479, 506), 'flask.request.args.get', 'request.args.get', (['"""success"""'], {}), "('success')\n", (495, 506), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((887, 927), 'Negocio.controller.iniciarSesion', 'controller.iniciarSesion', (['mysql', 'request'], {}), '(mysql, request)\n', (911, 927), False, 'from Negocio import controller\n'), ((935, 963), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (961, 963), False, 'from Negocio import controller\n'), ((1006, 1047), 'Negocio.controller.mostrarRutinas', 'controller.mostrarRutinas', (['mysql', 'request'], {}), '(mysql, request)\n', (1031, 1047), False, 'from Negocio import controller\n'), ((1146, 1174), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (1172, 1174), False, 'from Negocio import controller\n'), ((1217, 1268), 'Negocio.controller.mostrar_historial_rutina', 'controller.mostrar_historial_rutina', (['mysql', 'request'], {}), '(mysql, request)\n', (1252, 1268), False, 'from Negocio import controller\n'), ((1368, 1396), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (1394, 1396), False, 'from Negocio import controller\n'), ((1439, 1491), 'Negocio.controller.mostrar_historial_usuario', 'controller.mostrar_historial_usuario', (['mysql', 'request'], {}), '(mysql, request)\n', (1475, 1491), False, 'from Negocio import controller\n'), ((1651, 1678), 'flask.request.args.get', 'request.args.get', (['"""success"""'], {}), "('success')\n", (1667, 1678), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1695, 1734), 'Negocio.controller.datosUsuario', 'controller.datosUsuario', (['mysql', 'request'], {}), '(mysql, request)\n', (1718, 1734), False, 'from Negocio import controller\n'), ((1747, 1784), 'functions.IMC', 'functions.IMC', (['usuario[8]', 'usuario[7]'], {}), '(usuario[8], usuario[7])\n', (1760, 1784), False, 'import forms, functions\n'), ((1802, 1870), 'Negocio.controller.calcular_metabolismo_basal', 'controller.calcular_metabolismo_basal', (['mysql', 'usuario[7]', 'usuario[8]'], {}), '(mysql, usuario[7], usuario[8])\n', (1839, 1870), False, 'from Negocio import controller\n'), ((2897, 2943), 'flask.render_template', 'render_template', (['"""administracion_rutinas.html"""'], {}), "('administracion_rutinas.html')\n", (2912, 2943), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3064, 3092), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3090, 3092), False, 'from Negocio import controller\n'), ((3109, 3149), 'Negocio.controller.agregarRutina', 'controller.agregarRutina', (['mysql', 'request'], {}), '(mysql, request)\n', (3133, 3149), False, 'from Negocio import controller\n'), ((3296, 3324), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3322, 3324), False, 'from Negocio import controller\n'), ((3523, 3575), 'Negocio.controller.registrarEjerciciosRutina', 'controller.registrarEjerciciosRutina', (['mysql', 'request'], {}), '(mysql, request)\n', (3559, 3575), False, 'from Negocio import controller\n'), ((3595, 3617), 'flask.url_for', 'url_for', (['"""adm_rutinas"""'], {}), "('adm_rutinas')\n", (3602, 3617), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3708, 3740), 'Negocio.controller.rutinasUsuario', 'controller.rutinasUsuario', (['mysql'], {}), '(mysql)\n', (3733, 3740), False, 'from Negocio import controller\n'), ((3764, 3798), 'Negocio.controller.rutinaEjercicios', 'controller.rutinaEjercicios', (['mysql'], {}), '(mysql)\n', (3791, 3798), False, 'from Negocio import controller\n'), ((3815, 3848), 'Negocio.controller.datosEjercicios', 'controller.datosEjercicios', (['mysql'], {}), '(mysql)\n', (3841, 3848), False, 'from Negocio import controller\n'), ((3862, 3971), 'flask.render_template', 'render_template', (['"""modify_rutina.html"""'], {'rutinas': 'rutinas', 'ejercicios': 'datosEjer', 'rutinaEjer': 'rutinaEjercicios'}), "('modify_rutina.html', rutinas=rutinas, ejercicios=datosEjer,\n rutinaEjer=rutinaEjercicios)\n", (3877, 3971), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4111, 4161), 'Negocio.controller.registrarModificaciones', 'controller.registrarModificaciones', (['mysql', 'request'], {}), '(mysql, request)\n', (4145, 4161), False, 'from Negocio import controller\n'), ((4181, 4203), 'flask.url_for', 'url_for', (['"""adm_rutinas"""'], {}), "('adm_rutinas')\n", (4188, 4203), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4291, 4323), 'Negocio.controller.rutinasUsuario', 'controller.rutinasUsuario', (['mysql'], {}), '(mysql)\n', (4316, 4323), False, 'from Negocio import controller\n'), ((4347, 4381), 'Negocio.controller.rutinaEjercicios', 'controller.rutinaEjercicios', (['mysql'], {}), '(mysql)\n', (4374, 4381), False, 'from Negocio import controller\n'), ((4395, 4483), 'flask.render_template', 'render_template', (['"""delete_rutina.html"""'], {'rutinas': 'rutinas', 'rutinaEjer': 'rutinaEjercicios'}), "('delete_rutina.html', rutinas=rutinas, rutinaEjer=\n rutinaEjercicios)\n", (4410, 4483), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4606, 4634), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (4632, 4634), False, 'from Negocio import controller\n'), ((4651, 4698), 'Negocio.controller.registrarEliminacion', 'controller.registrarEliminacion', (['mysql', 'request'], {}), '(mysql, request)\n', (4682, 4698), False, 'from Negocio import controller\n'), ((4838, 4883), 'Negocio.controller.registrarEjercicio', 'controller.registrarEjercicio', (['mysql', 'request'], {}), '(mysql, request)\n', (4867, 4883), False, 'from Negocio import controller\n'), ((4903, 4924), 'flask.url_for', 'url_for', (['"""ejercicios"""'], {}), "('ejercicios')\n", (4910, 4924), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((543, 571), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (569, 571), False, 'from Negocio import controller\n'), ((723, 769), 'flask.render_template', 'render_template', (['"""Index.html"""'], {'success': 'success'}), "('Index.html', success=success)\n", (738, 769), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1082, 1098), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (1089, 1098), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1303, 1319), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (1310, 1319), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1526, 1542), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (1533, 1542), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2142, 2158), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (2149, 2158), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2364, 2408), 'Negocio.controller.actualizar_perfil', 'controller.actualizar_perfil', (['mysql', 'request'], {}), '(mysql, request)\n', (2392, 2408), False, 'from Negocio import controller\n'), ((2430, 2472), 'flask.flash', 'flash', (['"""Alguno de los datos es incorrecto"""'], {}), "('Alguno de los datos es incorrecto')\n", (2435, 2472), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2632, 2667), 'Negocio.controller.formulario_perfil', 'controller.formulario_perfil', (['mysql'], {}), '(mysql)\n', (2660, 2667), False, 'from Negocio import controller\n'), ((2684, 2759), 'flask.render_template', 'render_template', (['"""actualizar_perfil.html"""'], {'form': 'actualize_form', 'datos': 'datos'}), "('actualizar_perfil.html', form=actualize_form, datos=datos)\n", (2699, 2759), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2782, 2799), 'flask.url_for', 'url_for', (['"""perfil"""'], {}), "('perfil')\n", (2789, 2799), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2978, 2994), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (2985, 2994), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3168, 3195), 'Negocio.controller.rutinaIniciada', 'controller.rutinaIniciada', ([], {}), '()\n', (3193, 3195), False, 'from Negocio import controller\n'), ((3200, 3228), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3226, 3228), False, 'from Negocio import controller\n'), ((3246, 3286), 'Negocio.controller.rutinaEnCurso', 'controller.rutinaEnCurso', (['mysql', 'request'], {}), '(mysql, request)\n', (3270, 3286), False, 'from Negocio import controller\n'), ((4003, 4019), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (4010, 4019), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4511, 4527), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (4518, 4527), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4733, 4749), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (4740, 4749), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((655, 684), 'flask.render_template', 'render_template', (['"""Index.html"""'], {}), "('Index.html')\n", (670, 684), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1971, 1996), 'functions.evaluarIMC', 'functions.evaluarIMC', (['imc'], {}), '(imc)\n', (1991, 1996), False, 'import forms, functions\n'), ((2001, 2092), 'functions.porcentajeGrasa', 'functions.porcentajeGrasa', (['usuario[5]', 'usuario[9]', 'usuario[10]', 'usuario[7]', 'usuario[11]'], {}), '(usuario[5], usuario[9], usuario[10], usuario[7],\n usuario[11])\n', (2026, 2092), False, 'import forms, functions\n'), ((2498, 2541), 'flask.url_for', 'url_for', (['"""actualizar_perfil"""'], {'success': '(False)'}), "('actualizar_perfil', success=False)\n", (2505, 2541), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3351, 3373), 'flask.url_for', 'url_for', (['"""adm_rutinas"""'], {}), "('adm_rutinas')\n", (3358, 3373), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3412, 3428), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (3419, 3428), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((604, 619), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (611, 619), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n')] |
heartexlabs/label-studio-evalme | evalme/tests/test_old_format.py | 48f7a5226346b6e074edb4717b84122cc089bc7a | from evalme.matcher import Matcher
def test_old_format_agreement_matrix():
m = Matcher(new_format=False)
m.load(r"./tests/test_data/test_old_format.json")
matrix = m.get_annotations_agreement()
assert matrix is not None
assert matrix > 0
def test_old_format_load():
m = Matcher(new_format=False)
m.load(r"./tests/test_data/test_old_format.json")
assert m._new_format is False
assert m._result_name == 'completions'
def test_new_format_load():
m = Matcher(new_format=False)
m.load(r"./tests/test_data/test_bbox.json")
assert m._new_format is True
assert m._result_name == 'annotations'
| [((85, 110), 'evalme.matcher.Matcher', 'Matcher', ([], {'new_format': '(False)'}), '(new_format=False)\n', (92, 110), False, 'from evalme.matcher import Matcher\n'), ((299, 324), 'evalme.matcher.Matcher', 'Matcher', ([], {'new_format': '(False)'}), '(new_format=False)\n', (306, 324), False, 'from evalme.matcher import Matcher\n'), ((494, 519), 'evalme.matcher.Matcher', 'Matcher', ([], {'new_format': '(False)'}), '(new_format=False)\n', (501, 519), False, 'from evalme.matcher import Matcher\n')] |
wombat70/behave | behave/runner.py | c54493b0531795d946ac6754bfc643248cf3056a | # -*- coding: UTF-8 -*-
"""
This module provides Runner class to run behave feature files (or model elements).
"""
from __future__ import absolute_import, print_function, with_statement
import contextlib
import os.path
import sys
import warnings
import weakref
import six
from behave._types import ExceptionUtil
from behave.capture import CaptureController
from behave.exception import ConfigError
from behave.formatter._registry import make_formatters
from behave.runner_util import \
collect_feature_locations, parse_features, \
exec_file, load_step_modules, PathManager
from behave.step_registry import registry as the_step_registry
from enum import Enum
if six.PY2:
# -- USE PYTHON3 BACKPORT: With unicode traceback support.
import traceback2 as traceback
else:
import traceback
class CleanupError(RuntimeError):
pass
class ContextMaskWarning(UserWarning):
"""Raised if a context variable is being overwritten in some situations.
If the variable was originally set by user code then this will be raised if
*behave* overwrites the value.
If the variable was originally set by *behave* then this will be raised if
user code overwrites the value.
"""
pass
class ContextMode(Enum):
"""Used to distinguish between the two usage modes while using the context:
* BEHAVE: Indicates "behave" (internal) mode
* USER: Indicates "user" mode (in steps, hooks, fixtures, ...)
"""
BEHAVE = 1
USER = 2
class Context(object):
"""Hold contextual information during the running of tests.
This object is a place to store information related to the tests you're
running. You may add arbitrary attributes to it of whatever value you need.
During the running of your tests the object will have additional layers of
namespace added and removed automatically. There is a "root" namespace and
additional namespaces for features and scenarios.
Certain names are used by *behave*; be wary of using them yourself as
*behave* may overwrite the value you set. These names are:
.. attribute:: feature
This is set when we start testing a new feature and holds a
:class:`~behave.model.Feature`. It will not be present outside of a
feature (i.e. within the scope of the environment before_all and
after_all).
.. attribute:: scenario
This is set when we start testing a new scenario (including the
individual scenarios of a scenario outline) and holds a
:class:`~behave.model.Scenario`. It will not be present outside of the
scope of a scenario.
.. attribute:: tags
The current set of active tags (as a Python set containing instances of
:class:`~behave.model.Tag` which are basically just glorified strings)
combined from the feature and scenario. This attribute will not be
present outside of a feature scope.
.. attribute:: aborted
This is set to true in the root namespace when the user aborts a test run
(:exc:`KeyboardInterrupt` exception). Initially: False.
.. attribute:: failed
This is set to true in the root namespace as soon as a step fails.
Initially: False.
.. attribute:: table
This is set at the step level and holds any :class:`~behave.model.Table`
associated with the step.
.. attribute:: text
This is set at the step level and holds any multiline text associated
with the step.
.. attribute:: config
The configuration of *behave* as determined by configuration files and
command-line options. The attributes of this object are the same as the
`configuration file section names`_.
.. attribute:: active_outline
This is set for each scenario in a scenario outline and references the
:class:`~behave.model.Row` that is active for the current scenario. It is
present mostly for debugging, but may be useful otherwise.
.. attribute:: log_capture
If logging capture is enabled then this attribute contains the captured
logging as an instance of :class:`~behave.log_capture.LoggingCapture`.
It is not present if logging is not being captured.
.. attribute:: stdout_capture
If stdout capture is enabled then this attribute contains the captured
output as a StringIO instance. It is not present if stdout is not being
captured.
.. attribute:: stderr_capture
If stderr capture is enabled then this attribute contains the captured
output as a StringIO instance. It is not present if stderr is not being
captured.
A :class:`behave.runner.ContextMaskWarning` warning will be raised if user
code attempts to overwrite one of these variables, or if *behave* itself
tries to overwrite a user-set variable.
You may use the "in" operator to test whether a certain value has been set
on the context, for example:
"feature" in context
checks whether there is a "feature" value in the context.
Values may be deleted from the context using "del" but only at the level
they are set. You can't delete a value set by a feature at a scenario level
but you can delete a value set for a scenario in that scenario.
.. _`configuration file section names`: behave.html#configuration-files
"""
# pylint: disable=too-many-instance-attributes
FAIL_ON_CLEANUP_ERRORS = True
def __init__(self, runner):
self._runner = weakref.proxy(runner)
self._config = runner.config
d = self._root = {
"aborted": False,
"failed": False,
"config": self._config,
"active_outline": None,
"cleanup_errors": 0,
"@cleanups": [], # -- REQUIRED-BY: before_all() hook
"@layer": "testrun",
}
self._stack = [d]
self._record = {}
self._origin = {}
self._mode = ContextMode.BEHAVE
# -- MODEL ENTITY REFERENCES/SUPPORT:
self.feature = None
# DISABLED: self.rule = None
# DISABLED: self.scenario = None
self.text = None
self.table = None
# -- RUNTIME SUPPORT:
self.stdout_capture = None
self.stderr_capture = None
self.log_capture = None
self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS
@staticmethod
def ignore_cleanup_error(context, cleanup_func, exception):
pass
@staticmethod
def print_cleanup_error(context, cleanup_func, exception):
cleanup_func_name = getattr(cleanup_func, "__name__", None)
if not cleanup_func_name:
cleanup_func_name = "%r" % cleanup_func
print(u"CLEANUP-ERROR in %s: %s: %s" %
(cleanup_func_name, exception.__class__.__name__, exception))
traceback.print_exc(file=sys.stdout)
# MAYBE: context._dump(pretty=True, prefix="Context: ")
# -- MARK: testrun as FAILED
# context._set_root_attribute("failed", True)
def _do_cleanups(self):
"""Execute optional cleanup functions when stack frame is popped.
A user can add a user-specified handler for cleanup errors.
.. code-block:: python
# -- FILE: features/environment.py
def cleanup_database(database):
pass
def handle_cleanup_error(context, cleanup_func, exception):
pass
def before_all(context):
context.on_cleanup_error = handle_cleanup_error
context.add_cleanup(cleanup_database, the_database)
"""
# -- BEST-EFFORT ALGORITHM: Tries to perform all cleanups.
assert self._stack, "REQUIRE: Non-empty stack"
current_layer = self._stack[0]
cleanup_funcs = current_layer.get("@cleanups", [])
on_cleanup_error = getattr(self, "on_cleanup_error",
self.print_cleanup_error)
context = self
cleanup_errors = []
for cleanup_func in reversed(cleanup_funcs):
try:
cleanup_func()
except Exception as e: # pylint: disable=broad-except
# pylint: disable=protected-access
context._root["cleanup_errors"] += 1
cleanup_errors.append(sys.exc_info())
on_cleanup_error(context, cleanup_func, e)
if self.fail_on_cleanup_errors and cleanup_errors:
first_cleanup_erro_info = cleanup_errors[0]
del cleanup_errors # -- ENSURE: Release other exception frames.
six.reraise(*first_cleanup_erro_info)
def _push(self, layer_name=None):
"""Push a new layer on the context stack.
HINT: Use layer_name values: "scenario", "feature", "testrun".
:param layer_name: Layer name to use (or None).
"""
initial_data = {"@cleanups": []}
if layer_name:
initial_data["@layer"] = layer_name
self._stack.insert(0, initial_data)
def _pop(self):
"""Pop the current layer from the context stack.
Performs any pending cleanups, registered for this layer.
"""
try:
self._do_cleanups()
finally:
# -- ENSURE: Layer is removed even if cleanup-errors occur.
self._stack.pop(0)
def _use_with_behave_mode(self):
"""Provides a context manager for using the context in BEHAVE mode."""
return use_context_with_mode(self, ContextMode.BEHAVE)
def use_with_user_mode(self):
"""Provides a context manager for using the context in USER mode."""
return use_context_with_mode(self, ContextMode.USER)
def user_mode(self):
warnings.warn("Use 'use_with_user_mode()' instead",
PendingDeprecationWarning, stacklevel=2)
return self.use_with_user_mode()
def _set_root_attribute(self, attr, value):
for frame in self.__dict__["_stack"]:
if frame is self.__dict__["_root"]:
continue
if attr in frame:
record = self.__dict__["_record"][attr]
params = {
"attr": attr,
"filename": record[0],
"line": record[1],
"function": record[3],
}
self._emit_warning(attr, params)
self.__dict__["_root"][attr] = value
if attr not in self._origin:
self._origin[attr] = self._mode
def _emit_warning(self, attr, params):
msg = ""
if self._mode is ContextMode.BEHAVE and self._origin[attr] is not ContextMode.BEHAVE:
msg = "behave runner is masking context attribute '%(attr)s' " \
"originally set in %(function)s (%(filename)s:%(line)s)"
elif self._mode is ContextMode.USER:
if self._origin[attr] is not ContextMode.USER:
msg = "user code is masking context attribute '%(attr)s' " \
"originally set by behave"
elif self._config.verbose:
msg = "user code is masking context attribute " \
"'%(attr)s'; see the tutorial for what this means"
if msg:
msg = msg % params
warnings.warn(msg, ContextMaskWarning, stacklevel=3)
def _dump(self, pretty=False, prefix=" "):
for level, frame in enumerate(self._stack):
print("%sLevel %d" % (prefix, level))
if pretty:
for name in sorted(frame.keys()):
value = frame[name]
print("%s %-15s = %r" % (prefix, name, value))
else:
print(prefix + repr(frame))
def __getattr__(self, attr):
if attr[0] == "_":
try:
return self.__dict__[attr]
except KeyError:
raise AttributeError(attr)
for frame in self._stack:
if attr in frame:
return frame[attr]
msg = "'{0}' object has no attribute '{1}'"
msg = msg.format(self.__class__.__name__, attr)
raise AttributeError(msg)
def __setattr__(self, attr, value):
if attr[0] == "_":
self.__dict__[attr] = value
return
for frame in self._stack[1:]:
if attr in frame:
record = self._record[attr]
params = {
"attr": attr,
"filename": record[0],
"line": record[1],
"function": record[3],
}
self._emit_warning(attr, params)
stack_limit = 2
if six.PY2:
stack_limit += 1 # Due to traceback2 usage.
stack_frame = traceback.extract_stack(limit=stack_limit)[0]
self._record[attr] = stack_frame
frame = self._stack[0]
frame[attr] = value
if attr not in self._origin:
self._origin[attr] = self._mode
def __delattr__(self, attr):
frame = self._stack[0]
if attr in frame:
del frame[attr]
del self._record[attr]
else:
msg = "'{0}' object has no attribute '{1}' at the current level"
msg = msg.format(self.__class__.__name__, attr)
raise AttributeError(msg)
def __contains__(self, attr):
if attr[0] == "_":
return attr in self.__dict__
for frame in self._stack:
if attr in frame:
return True
return False
def execute_steps(self, steps_text):
"""The steps identified in the "steps" text string will be parsed and
executed in turn just as though they were defined in a feature file.
If the execute_steps call fails (either through error or failure
assertion) then the step invoking it will need to catch the resulting
exceptions.
:param steps_text: Text with the Gherkin steps to execute (as string).
:returns: True, if the steps executed successfully.
:raises: AssertionError, if a step failure occurs.
:raises: ValueError, if invoked without a feature context.
"""
assert isinstance(steps_text, six.text_type), "Steps must be unicode."
if not self.feature:
raise ValueError("execute_steps() called outside of feature")
# -- PREPARE: Save original context data for current step.
# Needed if step definition that called this method uses .table/.text
original_table = getattr(self, "table", None)
original_text = getattr(self, "text", None)
self.feature.parser.variant = "steps"
steps = self.feature.parser.parse_steps(steps_text)
with self._use_with_behave_mode():
for step in steps:
passed = step.run(self._runner, quiet=True, capture=False)
if not passed:
# -- ISSUE #96: Provide more substep info to diagnose problem.
step_line = u"%s %s" % (step.keyword, step.name)
message = "%s SUB-STEP: %s" % \
(step.status.name.upper(), step_line)
if step.error_message:
message += "\nSubstep info: %s\n" % step.error_message
message += u"Traceback (of failed substep):\n"
message += u"".join(traceback.format_tb(step.exc_traceback))
# message += u"\nTraceback (of context.execute_steps()):"
assert False, message
# -- FINALLY: Restore original context data for current step.
self.table = original_table
self.text = original_text
return True
def add_cleanup(self, cleanup_func, *args, **kwargs):
"""Adds a cleanup function that is called when :meth:`Context._pop()`
is called. This is intended for user-cleanups.
:param cleanup_func: Callable function
:param args: Args for cleanup_func() call (optional).
:param kwargs: Kwargs for cleanup_func() call (optional).
"""
# MAYBE:
assert callable(cleanup_func), "REQUIRES: callable(cleanup_func)"
assert self._stack
if args or kwargs:
def internal_cleanup_func():
cleanup_func(*args, **kwargs)
else:
internal_cleanup_func = cleanup_func
current_frame = self._stack[0]
if cleanup_func not in current_frame["@cleanups"]:
# -- AVOID DUPLICATES:
current_frame["@cleanups"].append(internal_cleanup_func)
@contextlib.contextmanager
def use_context_with_mode(context, mode):
"""Switch context to ContextMode.BEHAVE or ContextMode.USER mode.
Provides a context manager for switching between the two context modes.
.. sourcecode:: python
context = Context()
with use_context_with_mode(context, ContextMode.BEHAVE):
... # Do something
# -- POSTCONDITION: Original context._mode is restored.
:param context: Context object to use.
:param mode: Mode to apply to context object.
"""
# pylint: disable=protected-access
assert mode in (ContextMode.BEHAVE, ContextMode.USER)
current_mode = context._mode
try:
context._mode = mode
yield
finally:
# -- RESTORE: Initial current_mode
# Even if an AssertionError/Exception is raised.
context._mode = current_mode
@contextlib.contextmanager
def scoped_context_layer(context, layer_name=None):
"""Provides context manager for context layer (push/do-something/pop cycle).
.. code-block::
with scoped_context_layer(context):
the_fixture = use_fixture(foo, context, name="foo_42")
"""
# pylint: disable=protected-access
try:
context._push(layer_name)
yield context
finally:
context._pop()
def path_getrootdir(path):
"""
Extract rootdir from path in a platform independent way.
POSIX-PATH EXAMPLE:
rootdir = path_getrootdir("/foo/bar/one.feature")
assert rootdir == "/"
WINDOWS-PATH EXAMPLE:
rootdir = path_getrootdir("D:\\foo\\bar\\one.feature")
assert rootdir == r"D:\"
"""
drive, _ = os.path.splitdrive(path)
if drive:
# -- WINDOWS:
return drive + os.path.sep
# -- POSIX:
return os.path.sep
class ModelRunner(object):
"""
Test runner for a behave model (features).
Provides the core functionality of a test runner and
the functional API needed by model elements.
.. attribute:: aborted
This is set to true when the user aborts a test run
(:exc:`KeyboardInterrupt` exception). Initially: False.
Stored as derived attribute in :attr:`Context.aborted`.
"""
# pylint: disable=too-many-instance-attributes
def __init__(self, config, features=None, step_registry=None):
self.config = config
self.features = features or []
self.hooks = {}
self.formatters = []
self.undefined_steps = []
self.step_registry = step_registry
self.capture_controller = CaptureController(config)
self.context = None
self.feature = None
self.hook_failures = 0
# @property
def _get_aborted(self):
value = False
if self.context:
value = self.context.aborted
return value
# @aborted.setter
def _set_aborted(self, value):
# pylint: disable=protected-access
assert self.context, "REQUIRE: context, but context=%r" % self.context
self.context._set_root_attribute("aborted", bool(value))
aborted = property(_get_aborted, _set_aborted,
doc="Indicates that test run is aborted by the user.")
def run_hook(self, name, context, *args):
if not self.config.dry_run and (name in self.hooks):
try:
with context.use_with_user_mode():
self.hooks[name](context, *args)
# except KeyboardInterrupt:
# self.aborted = True
# if name not in ("before_all", "after_all"):
# raise
except Exception as e: # pylint: disable=broad-except
# -- HANDLE HOOK ERRORS:
use_traceback = False
if self.config.verbose:
use_traceback = True
ExceptionUtil.set_traceback(e)
extra = u""
if "tag" in name:
extra = "(tag=%s)" % args[0]
error_text = ExceptionUtil.describe(e, use_traceback).rstrip()
error_message = u"HOOK-ERROR in %s%s: %s" % (name, extra, error_text)
print(error_message)
self.hook_failures += 1
if "tag" in name:
# -- SCENARIO or FEATURE
statement = getattr(context, "scenario", context.feature)
elif "all" in name:
# -- ABORT EXECUTION: For before_all/after_all
self.aborted = True
statement = None
else:
# -- CASE: feature, scenario, step
statement = args[0]
if statement:
# -- CASE: feature, scenario, step
statement.hook_failed = True
if statement.error_message:
# -- NOTE: One exception/failure is already stored.
# Append only error message.
statement.error_message += u"\n"+ error_message
else:
# -- FIRST EXCEPTION/FAILURE:
statement.store_exception_context(e)
statement.error_message = error_message
def setup_capture(self):
if not self.context:
self.context = Context(self)
self.capture_controller.setup_capture(self.context)
def start_capture(self):
self.capture_controller.start_capture()
def stop_capture(self):
self.capture_controller.stop_capture()
def teardown_capture(self):
self.capture_controller.teardown_capture()
def run_model(self, features=None):
# pylint: disable=too-many-branches
if not self.context:
self.context = Context(self)
if self.step_registry is None:
self.step_registry = the_step_registry
if features is None:
features = self.features
# -- ENSURE: context.execute_steps() works in weird cases (hooks, ...)
context = self.context
self.hook_failures = 0
self.setup_capture()
self.run_hook("before_all", context)
run_feature = not self.aborted
failed_count = 0
undefined_steps_initial_size = len(self.undefined_steps)
for feature in features:
if run_feature:
try:
self.feature = feature
for formatter in self.formatters:
formatter.uri(feature.filename)
failed = feature.run(self)
if failed:
failed_count += 1
if self.config.stop or self.aborted:
# -- FAIL-EARLY: After first failure.
run_feature = False
except KeyboardInterrupt:
self.aborted = True
failed_count += 1
run_feature = False
# -- ALWAYS: Report run/not-run feature to reporters.
# REQUIRED-FOR: Summary to keep track of untested features.
for reporter in self.config.reporters:
reporter.feature(feature)
# -- AFTER-ALL:
# pylint: disable=protected-access, broad-except
cleanups_failed = False
self.run_hook("after_all", self.context)
try:
self.context._do_cleanups() # Without dropping the last context layer.
except Exception:
cleanups_failed = True
if self.aborted:
print("\nABORTED: By user.")
for formatter in self.formatters:
formatter.close()
for reporter in self.config.reporters:
reporter.end()
failed = ((failed_count > 0) or self.aborted or (self.hook_failures > 0)
or (len(self.undefined_steps) > undefined_steps_initial_size)
or cleanups_failed)
# XXX-MAYBE: or context.failed)
return failed
def run(self):
"""
Implements the run method by running the model.
"""
self.context = Context(self)
return self.run_model()
class Runner(ModelRunner):
"""
Standard test runner for behave:
* setup paths
* loads environment hooks
* loads step definitions
* select feature files, parses them and creates model (elements)
"""
def __init__(self, config):
super(Runner, self).__init__(config)
self.path_manager = PathManager()
self.base_dir = None
def setup_paths(self):
# pylint: disable=too-many-branches, too-many-statements
if self.config.paths:
if self.config.verbose:
print("Supplied path:", \
", ".join('"%s"' % path for path in self.config.paths))
first_path = self.config.paths[0]
if hasattr(first_path, "filename"):
# -- BETTER: isinstance(first_path, FileLocation):
first_path = first_path.filename
base_dir = first_path
if base_dir.startswith("@"):
# -- USE: behave @features.txt
base_dir = base_dir[1:]
file_locations = self.feature_locations()
if file_locations:
base_dir = os.path.dirname(file_locations[0].filename)
base_dir = os.path.abspath(base_dir)
# supplied path might be to a feature file
if os.path.isfile(base_dir):
if self.config.verbose:
print("Primary path is to a file so using its directory")
base_dir = os.path.dirname(base_dir)
else:
if self.config.verbose:
print('Using default path "./features"')
base_dir = os.path.abspath("features")
# Get the root. This is not guaranteed to be "/" because Windows.
root_dir = path_getrootdir(base_dir)
new_base_dir = base_dir
steps_dir = self.config.steps_dir
environment_file = self.config.environment_file
while True:
if self.config.verbose:
print("Trying base directory:", new_base_dir)
if os.path.isdir(os.path.join(new_base_dir, steps_dir)):
break
if os.path.isfile(os.path.join(new_base_dir, environment_file)):
break
if new_base_dir == root_dir:
break
new_base_dir = os.path.dirname(new_base_dir)
if new_base_dir == root_dir:
if self.config.verbose:
if not self.config.paths:
print('ERROR: Could not find "%s" directory. '\
'Please specify where to find your features.' % \
steps_dir)
else:
print('ERROR: Could not find "%s" directory in your '\
'specified path "%s"' % (steps_dir, base_dir))
message = 'No %s directory in %r' % (steps_dir, base_dir)
raise ConfigError(message)
base_dir = new_base_dir
self.config.base_dir = base_dir
for dirpath, dirnames, filenames in os.walk(base_dir, followlinks=True):
if [fn for fn in filenames if fn.endswith(".feature")]:
break
else:
if self.config.verbose:
if not self.config.paths:
print('ERROR: Could not find any "<name>.feature" files. '\
'Please specify where to find your features.')
else:
print('ERROR: Could not find any "<name>.feature" files '\
'in your specified path "%s"' % base_dir)
raise ConfigError('No feature files in %r' % base_dir)
self.base_dir = base_dir
self.path_manager.add(base_dir)
if not self.config.paths:
self.config.paths = [base_dir]
if base_dir != os.getcwd():
self.path_manager.add(os.getcwd())
def before_all_default_hook(self, context):
"""
Default implementation for :func:`before_all()` hook.
Setup the logging subsystem based on the configuration data.
"""
# pylint: disable=no-self-use
context.config.setup_logging()
def load_hooks(self, filename=None):
filename = filename or self.config.environment_file
hooks_path = os.path.join(self.base_dir, filename)
if os.path.exists(hooks_path):
exec_file(hooks_path, self.hooks)
if "before_all" not in self.hooks:
self.hooks["before_all"] = self.before_all_default_hook
def load_step_definitions(self, extra_step_paths=None):
if extra_step_paths is None:
extra_step_paths = []
# -- Allow steps to import other stuff from the steps dir
# NOTE: Default matcher can be overridden in "environment.py" hook.
steps_dir = os.path.join(self.base_dir, self.config.steps_dir)
step_paths = [steps_dir] + list(extra_step_paths)
load_step_modules(step_paths)
def feature_locations(self):
return collect_feature_locations(self.config.paths)
def run(self):
with self.path_manager:
self.setup_paths()
return self.run_with_paths()
def run_with_paths(self):
self.context = Context(self)
self.load_hooks()
self.load_step_definitions()
# -- ENSURE: context.execute_steps() works in weird cases (hooks, ...)
# self.setup_capture()
# self.run_hook("before_all", self.context)
# -- STEP: Parse all feature files (by using their file location).
feature_locations = [filename for filename in self.feature_locations()
if not self.config.exclude(filename)]
features = parse_features(feature_locations, language=self.config.lang)
self.features.extend(features)
# -- STEP: Run all features.
stream_openers = self.config.outputs
self.formatters = make_formatters(self.config, stream_openers)
return self.run_model()
| [((5457, 5478), 'weakref.proxy', 'weakref.proxy', (['runner'], {}), '(runner)\n', (5470, 5478), False, 'import weakref\n'), ((6802, 6838), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (6821, 6838), False, 'import traceback\n'), ((9702, 9798), 'warnings.warn', 'warnings.warn', (['"""Use \'use_with_user_mode()\' instead"""', 'PendingDeprecationWarning'], {'stacklevel': '(2)'}), '("Use \'use_with_user_mode()\' instead",\n PendingDeprecationWarning, stacklevel=2)\n', (9715, 9798), False, 'import warnings\n'), ((19252, 19277), 'behave.capture.CaptureController', 'CaptureController', (['config'], {}), '(config)\n', (19269, 19277), False, 'from behave.capture import CaptureController\n'), ((25266, 25279), 'behave.runner_util.PathManager', 'PathManager', ([], {}), '()\n', (25277, 25279), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((29873, 29902), 'behave.runner_util.load_step_modules', 'load_step_modules', (['step_paths'], {}), '(step_paths)\n', (29890, 29902), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((29952, 29996), 'behave.runner_util.collect_feature_locations', 'collect_feature_locations', (['self.config.paths'], {}), '(self.config.paths)\n', (29977, 29996), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((30656, 30716), 'behave.runner_util.parse_features', 'parse_features', (['feature_locations'], {'language': 'self.config.lang'}), '(feature_locations, language=self.config.lang)\n', (30670, 30716), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((30865, 30909), 'behave.formatter._registry.make_formatters', 'make_formatters', (['self.config', 'stream_openers'], {}), '(self.config, stream_openers)\n', (30880, 30909), False, 'from behave.formatter._registry import make_formatters\n'), ((8568, 8605), 'six.reraise', 'six.reraise', (['*first_cleanup_erro_info'], {}), '(*first_cleanup_erro_info)\n', (8579, 8605), False, 'import six\n'), ((11264, 11316), 'warnings.warn', 'warnings.warn', (['msg', 'ContextMaskWarning'], {'stacklevel': '(3)'}), '(msg, ContextMaskWarning, stacklevel=3)\n', (11277, 11316), False, 'import warnings\n'), ((12766, 12808), 'traceback.extract_stack', 'traceback.extract_stack', ([], {'limit': 'stack_limit'}), '(limit=stack_limit)\n', (12789, 12808), False, 'import traceback\n'), ((27845, 27865), 'behave.exception.ConfigError', 'ConfigError', (['message'], {}), '(message)\n', (27856, 27865), False, 'from behave.exception import ConfigError\n'), ((28539, 28587), 'behave.exception.ConfigError', 'ConfigError', (["('No feature files in %r' % base_dir)"], {}), "('No feature files in %r' % base_dir)\n", (28550, 28587), False, 'from behave.exception import ConfigError\n'), ((29316, 29349), 'behave.runner_util.exec_file', 'exec_file', (['hooks_path', 'self.hooks'], {}), '(hooks_path, self.hooks)\n', (29325, 29349), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((8288, 8302), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8300, 8302), False, 'import sys\n'), ((20539, 20569), 'behave._types.ExceptionUtil.set_traceback', 'ExceptionUtil.set_traceback', (['e'], {}), '(e)\n', (20566, 20569), False, 'from behave._types import ExceptionUtil\n'), ((15430, 15469), 'traceback.format_tb', 'traceback.format_tb', (['step.exc_traceback'], {}), '(step.exc_traceback)\n', (15449, 15469), False, 'import traceback\n'), ((20711, 20751), 'behave._types.ExceptionUtil.describe', 'ExceptionUtil.describe', (['e', 'use_traceback'], {}), '(e, use_traceback)\n', (20733, 20751), False, 'from behave._types import ExceptionUtil\n')] |
genfifth/generative-design_Code-Package-Python-Mode | 01_P/P_2_1_1_02/main.py | 93fc8435933aa2e9329de77a1177bb34e63dd1c4 | add_library('pdf')
import random
from datetime import datetime
tileCount = 20
def setup():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
savePDF = False
actStrokeCap = ROUND
actRandomSeed = 0
colorLeft = color(197, 0, 123)
colorRight = color(87, 35, 129)
alphaLeft = 100
alphaRight = 100
def draw():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
if savePDF:
beginRecord(PDF, datetime.now().strftime("%Y%m%d%H%M%S")+".pdf")
background(255)
smooth()
noFill()
strokeCap(actStrokeCap)
random.seed(actRandomSeed)
for gridY in range(tileCount):
for gridX in range(tileCount):
posX = int(width/tileCount*gridX)
posY = int(height/tileCount*gridY)
toggle = random.randint(0,1)
if (toggle == 0):
strokeWeight(mouseX/20)
stroke(colorLeft, alphaLeft)
line(posX, posY, posX+width/tileCount, posY+height/tileCount)
elif (toggle == 1):
strokeWeight(mouseY/20)
stroke(colorRight, alphaRight)
line(posX, posY+width/tileCount, posX+height/tileCount, posY)
if (savePDF):
savePDF = False
endRecord()
def mousePressed():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
actRandomSeed = random.randint(0, 100000)
def keyReleased():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
if (key=='s' or key=='S'):
saveFrame(datetime.now().strftime("%Y%m%d%H%M%S")+".png")
if (key=='p' or key=='P'):
savePDF = True
if key == "1":
actStrokeCap = ROUND
elif key == "2":
actStrokeCap = SQUARE
elif key == "3":
actStrokeCap = PROJECT
elif (key == '4'):
if (colorLeft == color(0)):
colorLeft = color(323, 100, 77)
else:
colorLeft = color(0)
elif (key == '5'):
if (colorRight == color(0)):
colorRight = color(273, 73, 51)
else:
colorRight = color(0)
elif (key == '6'):
if (alphaLeft == 100):
alphaLeft = 50
else:
alphaLeft = 100
elif (key == '7'):
if (alphaRight == 100):
alphaRight = 50
else:
alphaRight = 100
if (key == '0'):
actStrokeCap = ROUND
colorLeft = color(0)
colorRight = color(0)
alphaLeft = 100
alphaRight = 100
| [((648, 674), 'random.seed', 'random.seed', (['actRandomSeed'], {}), '(actRandomSeed)\n', (659, 674), False, 'import random\n'), ((1487, 1512), 'random.randint', 'random.randint', (['(0)', '(100000)'], {}), '(0, 100000)\n', (1501, 1512), False, 'import random\n'), ((865, 885), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (879, 885), False, 'import random\n'), ((516, 530), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (528, 530), False, 'from datetime import datetime\n'), ((1677, 1691), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1689, 1691), False, 'from datetime import datetime\n')] |
peiwangdb/dbt | core/dbt/contracts/graph/manifest.py | 30e72bc5e2ae950ddf0a1230b0c6406b889bea1a | import enum
from dataclasses import dataclass, field
from itertools import chain, islice
from mashumaro import DataClassMessagePackMixin
from multiprocessing.synchronize import Lock
from typing import (
Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple,
TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar
)
from typing_extensions import Protocol
from uuid import UUID
from dbt.contracts.graph.compiled import (
CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode
)
from dbt.contracts.graph.parsed import (
ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch,
ParsedSourceDefinition, ParsedExposure, HasUniqueID,
UnpatchedSourceDefinition, ManifestNodes
)
from dbt.contracts.graph.unparsed import SourcePatch
from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile
from dbt.contracts.util import (
BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version
)
from dbt.dataclass_schema import dbtClassMixin
from dbt.exceptions import (
CompilationException,
raise_duplicate_resource_name, raise_compiler_error, warn_or_error,
raise_duplicate_patch_name,
raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name,
)
from dbt.helper_types import PathSet
from dbt.logger import GLOBAL_LOGGER as logger
from dbt.node_types import NodeType
from dbt.ui import line_wrap_message
from dbt import flags
from dbt import tracking
import dbt.utils
NodeEdgeMap = Dict[str, List[str]]
PackageName = str
DocName = str
RefName = str
UniqueID = str
def find_unique_id_for_package(storage, key, package: Optional[PackageName]):
if key not in storage:
return None
pkg_dct: Mapping[PackageName, UniqueID] = storage[key]
if package is None:
if not pkg_dct:
return None
else:
return next(iter(pkg_dct.values()))
elif package in pkg_dct:
return pkg_dct[package]
else:
return None
class DocLookup(dbtClassMixin):
def __init__(self, manifest: 'Manifest'):
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
self.populate(manifest)
def get_unique_id(self, key, package: Optional[PackageName]):
return find_unique_id_for_package(self.storage, key, package)
def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):
unique_id = self.get_unique_id(key, package)
if unique_id is not None:
return self.perform_lookup(unique_id, manifest)
return None
def add_doc(self, doc: ParsedDocumentation):
if doc.name not in self.storage:
self.storage[doc.name] = {}
self.storage[doc.name][doc.package_name] = doc.unique_id
def populate(self, manifest):
for doc in manifest.docs.values():
self.add_doc(doc)
def perform_lookup(
self, unique_id: UniqueID, manifest
) -> ParsedDocumentation:
if unique_id not in manifest.docs:
raise dbt.exceptions.InternalException(
f'Doc {unique_id} found in cache but not found in manifest'
)
return manifest.docs[unique_id]
class SourceLookup(dbtClassMixin):
def __init__(self, manifest: 'Manifest'):
self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {}
self.populate(manifest)
def get_unique_id(self, key, package: Optional[PackageName]):
return find_unique_id_for_package(self.storage, key, package)
def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):
unique_id = self.get_unique_id(key, package)
if unique_id is not None:
return self.perform_lookup(unique_id, manifest)
return None
def add_source(self, source: ParsedSourceDefinition):
key = (source.source_name, source.name)
if key not in self.storage:
self.storage[key] = {}
self.storage[key][source.package_name] = source.unique_id
def populate(self, manifest):
for source in manifest.sources.values():
if hasattr(source, 'source_name'):
self.add_source(source)
def perform_lookup(
self, unique_id: UniqueID, manifest: 'Manifest'
) -> ParsedSourceDefinition:
if unique_id not in manifest.sources:
raise dbt.exceptions.InternalException(
f'Source {unique_id} found in cache but not found in manifest'
)
return manifest.sources[unique_id]
class RefableLookup(dbtClassMixin):
# model, seed, snapshot
_lookup_types: ClassVar[set] = set(NodeType.refable())
# refables are actually unique, so the Dict[PackageName, UniqueID] will
# only ever have exactly one value, but doing 3 dict lookups instead of 1
# is not a big deal at all and retains consistency
def __init__(self, manifest: 'Manifest'):
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
self.populate(manifest)
def get_unique_id(self, key, package: Optional[PackageName]):
return find_unique_id_for_package(self.storage, key, package)
def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):
unique_id = self.get_unique_id(key, package)
if unique_id is not None:
return self.perform_lookup(unique_id, manifest)
return None
def add_node(self, node: ManifestNode):
if node.resource_type in self._lookup_types:
if node.name not in self.storage:
self.storage[node.name] = {}
self.storage[node.name][node.package_name] = node.unique_id
def populate(self, manifest):
for node in manifest.nodes.values():
self.add_node(node)
def perform_lookup(
self, unique_id: UniqueID, manifest
) -> ManifestNode:
if unique_id not in manifest.nodes:
raise dbt.exceptions.InternalException(
f'Node {unique_id} found in cache but not found in manifest'
)
return manifest.nodes[unique_id]
class AnalysisLookup(RefableLookup):
_lookup_types: ClassVar[set] = set(NodeType.Analysis)
def _search_packages(
current_project: str,
node_package: str,
target_package: Optional[str] = None,
) -> List[Optional[str]]:
if target_package is not None:
return [target_package]
elif current_project == node_package:
return [current_project, None]
else:
return [current_project, node_package, None]
@dataclass
class ManifestMetadata(BaseArtifactMetadata):
"""Metadata for the manifest."""
dbt_schema_version: str = field(
default_factory=lambda: str(WritableManifest.dbt_schema_version)
)
project_id: Optional[str] = field(
default=None,
metadata={
'description': 'A unique identifier for the project',
},
)
user_id: Optional[UUID] = field(
default=None,
metadata={
'description': 'A unique identifier for the user',
},
)
send_anonymous_usage_stats: Optional[bool] = field(
default=None,
metadata=dict(description=(
'Whether dbt is configured to send anonymous usage statistics'
)),
)
adapter_type: Optional[str] = field(
default=None,
metadata=dict(description='The type name of the adapter'),
)
def __post_init__(self):
if tracking.active_user is None:
return
if self.user_id is None:
self.user_id = tracking.active_user.id
if self.send_anonymous_usage_stats is None:
self.send_anonymous_usage_stats = (
not tracking.active_user.do_not_track
)
@classmethod
def default(cls):
return cls(
dbt_schema_version=str(WritableManifest.dbt_schema_version),
)
def _sort_values(dct):
"""Given a dictionary, sort each value. This makes output deterministic,
which helps for tests.
"""
return {k: sorted(v) for k, v in dct.items()}
def build_node_edges(nodes: List[ManifestNode]):
"""Build the forward and backward edges on the given list of ParsedNodes
and return them as two separate dictionaries, each mapping unique IDs to
lists of edges.
"""
backward_edges: Dict[str, List[str]] = {}
# pre-populate the forward edge dict for simplicity
forward_edges: Dict[str, List[str]] = {n.unique_id: [] for n in nodes}
for node in nodes:
backward_edges[node.unique_id] = node.depends_on_nodes[:]
for unique_id in node.depends_on_nodes:
if unique_id in forward_edges.keys():
forward_edges[unique_id].append(node.unique_id)
return _sort_values(forward_edges), _sort_values(backward_edges)
# Build a map of children of macros
def build_macro_edges(nodes: List[Any]):
forward_edges: Dict[str, List[str]] = {
n.unique_id: [] for n in nodes if n.unique_id.startswith('macro') or n.depends_on.macros
}
for node in nodes:
for unique_id in node.depends_on.macros:
if unique_id in forward_edges.keys():
forward_edges[unique_id].append(node.unique_id)
return _sort_values(forward_edges)
def _deepcopy(value):
return value.from_dict(value.to_dict(omit_none=True))
class Locality(enum.IntEnum):
Core = 1
Imported = 2
Root = 3
class Specificity(enum.IntEnum):
Default = 1
Adapter = 2
@dataclass
class MacroCandidate:
locality: Locality
macro: ParsedMacro
def __eq__(self, other: object) -> bool:
if not isinstance(other, MacroCandidate):
return NotImplemented
return self.locality == other.locality
def __lt__(self, other: object) -> bool:
if not isinstance(other, MacroCandidate):
return NotImplemented
if self.locality < other.locality:
return True
if self.locality > other.locality:
return False
return False
@dataclass
class MaterializationCandidate(MacroCandidate):
specificity: Specificity
@classmethod
def from_macro(
cls, candidate: MacroCandidate, specificity: Specificity
) -> 'MaterializationCandidate':
return cls(
locality=candidate.locality,
macro=candidate.macro,
specificity=specificity,
)
def __eq__(self, other: object) -> bool:
if not isinstance(other, MaterializationCandidate):
return NotImplemented
equal = (
self.specificity == other.specificity and
self.locality == other.locality
)
if equal:
raise_compiler_error(
'Found two materializations with the name {} (packages {} and '
'{}). dbt cannot resolve this ambiguity'
.format(self.macro.name, self.macro.package_name,
other.macro.package_name)
)
return equal
def __lt__(self, other: object) -> bool:
if not isinstance(other, MaterializationCandidate):
return NotImplemented
if self.specificity < other.specificity:
return True
if self.specificity > other.specificity:
return False
if self.locality < other.locality:
return True
if self.locality > other.locality:
return False
return False
M = TypeVar('M', bound=MacroCandidate)
class CandidateList(List[M]):
def last(self) -> Optional[ParsedMacro]:
if not self:
return None
self.sort()
return self[-1].macro
def _get_locality(
macro: ParsedMacro, root_project_name: str, internal_packages: Set[str]
) -> Locality:
if macro.package_name == root_project_name:
return Locality.Root
elif macro.package_name in internal_packages:
return Locality.Core
else:
return Locality.Imported
class Searchable(Protocol):
resource_type: NodeType
package_name: str
@property
def search_name(self) -> str:
raise NotImplementedError('search_name not implemented')
N = TypeVar('N', bound=Searchable)
@dataclass
class NameSearcher(Generic[N]):
name: str
package: Optional[str]
nodetypes: List[NodeType]
def _matches(self, model: N) -> bool:
"""Return True if the model matches the given name, package, and type.
If package is None, any package is allowed.
nodetypes should be a container of NodeTypes that implements the 'in'
operator.
"""
if model.resource_type not in self.nodetypes:
return False
if self.name != model.search_name:
return False
return self.package is None or self.package == model.package_name
def search(self, haystack: Iterable[N]) -> Optional[N]:
"""Find an entry in the given iterable by name."""
for model in haystack:
if self._matches(model):
return model
return None
D = TypeVar('D')
@dataclass
class Disabled(Generic[D]):
target: D
MaybeDocumentation = Optional[ParsedDocumentation]
MaybeParsedSource = Optional[Union[
ParsedSourceDefinition,
Disabled[ParsedSourceDefinition],
]]
MaybeNonSource = Optional[Union[
ManifestNode,
Disabled[ManifestNode]
]]
T = TypeVar('T', bound=GraphMemberNode)
def _update_into(dest: MutableMapping[str, T], new_item: T):
"""Update dest to overwrite whatever is at dest[new_item.unique_id] with
new_itme. There must be an existing value to overwrite, and they two nodes
must have the same original file path.
"""
unique_id = new_item.unique_id
if unique_id not in dest:
raise dbt.exceptions.RuntimeException(
f'got an update_{new_item.resource_type} call with an '
f'unrecognized {new_item.resource_type}: {new_item.unique_id}'
)
existing = dest[unique_id]
if new_item.original_file_path != existing.original_file_path:
raise dbt.exceptions.RuntimeException(
f'cannot update a {new_item.resource_type} to have a new file '
f'path!'
)
dest[unique_id] = new_item
# This contains macro methods that are in both the Manifest
# and the MacroManifest
class MacroMethods:
# Just to make mypy happy. There must be a better way.
def __init__(self):
self.macros = []
self.metadata = {}
def find_macro_by_name(
self, name: str, root_project_name: str, package: Optional[str]
) -> Optional[ParsedMacro]:
"""Find a macro in the graph by its name and package name, or None for
any package. The root project name is used to determine priority:
- locally defined macros come first
- then imported macros
- then macros defined in the root project
"""
filter: Optional[Callable[[MacroCandidate], bool]] = None
if package is not None:
def filter(candidate: MacroCandidate) -> bool:
return package == candidate.macro.package_name
candidates: CandidateList = self._find_macros_by_name(
name=name,
root_project_name=root_project_name,
filter=filter,
)
return candidates.last()
def find_generate_macro_by_name(
self, component: str, root_project_name: str
) -> Optional[ParsedMacro]:
"""
The `generate_X_name` macros are similar to regular ones, but ignore
imported packages.
- if there is a `generate_{component}_name` macro in the root
project, return it
- return the `generate_{component}_name` macro from the 'dbt'
internal project
"""
def filter(candidate: MacroCandidate) -> bool:
return candidate.locality != Locality.Imported
candidates: CandidateList = self._find_macros_by_name(
name=f'generate_{component}_name',
root_project_name=root_project_name,
# filter out imported packages
filter=filter,
)
return candidates.last()
def _find_macros_by_name(
self,
name: str,
root_project_name: str,
filter: Optional[Callable[[MacroCandidate], bool]] = None
) -> CandidateList:
"""Find macros by their name.
"""
# avoid an import cycle
from dbt.adapters.factory import get_adapter_package_names
candidates: CandidateList = CandidateList()
packages = set(get_adapter_package_names(self.metadata.adapter_type))
for unique_id, macro in self.macros.items():
if macro.name != name:
continue
candidate = MacroCandidate(
locality=_get_locality(macro, root_project_name, packages),
macro=macro,
)
if filter is None or filter(candidate):
candidates.append(candidate)
return candidates
@dataclass
class ParsingInfo:
static_analysis_parsed_path_count: int = 0
static_analysis_path_count: int = 0
@dataclass
class ManifestStateCheck(dbtClassMixin):
vars_hash: FileHash = field(default_factory=FileHash.empty)
profile_hash: FileHash = field(default_factory=FileHash.empty)
project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict)
@dataclass
class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
"""The manifest for the full graph, after parsing and during compilation.
"""
# These attributes are both positional and by keyword. If an attribute
# is added it must all be added in the __reduce_ex__ method in the
# args tuple in the right position.
nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict)
sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict)
macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict)
docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)
exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)
selectors: MutableMapping[str, Any] = field(default_factory=dict)
disabled: List[CompileResultNode] = field(default_factory=list)
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
flat_graph: Dict[str, Any] = field(default_factory=dict)
state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck)
# Moved from the ParseResult object
source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict)
# following is from ParseResult
_disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict)
_doc_lookup: Optional[DocLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_source_lookup: Optional[SourceLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_ref_lookup: Optional[RefableLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_analysis_lookup: Optional[AnalysisLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_parsing_info: ParsingInfo = field(
default_factory=ParsingInfo,
metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_lock: Lock = field(
default_factory=flags.MP_CONTEXT.Lock,
metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
def __pre_serialize__(self):
# serialization won't work with anything except an empty source_patches because
# tuple keys are not supported, so ensure it's empty
self.source_patches = {}
return self
@classmethod
def __post_deserialize__(cls, obj):
obj._lock = flags.MP_CONTEXT.Lock()
return obj
def sync_update_node(
self, new_node: NonSourceCompiledNode
) -> NonSourceCompiledNode:
"""update the node with a lock. The only time we should want to lock is
when compiling an ephemeral ancestor of a node at runtime, because
multiple threads could be just-in-time compiling the same ephemeral
dependency, and we want them to have a consistent view of the manifest.
If the existing node is not compiled, update it with the new node and
return that. If the existing node is compiled, do not update the
manifest and return the existing node.
"""
with self._lock:
existing = self.nodes[new_node.unique_id]
if getattr(existing, 'compiled', False):
# already compiled -> must be a NonSourceCompiledNode
return cast(NonSourceCompiledNode, existing)
_update_into(self.nodes, new_node)
return new_node
def update_exposure(self, new_exposure: ParsedExposure):
_update_into(self.exposures, new_exposure)
def update_node(self, new_node: ManifestNode):
_update_into(self.nodes, new_node)
def update_source(self, new_source: ParsedSourceDefinition):
_update_into(self.sources, new_source)
def build_flat_graph(self):
"""This attribute is used in context.common by each node, so we want to
only build it once and avoid any concurrency issues around it.
Make sure you don't call this until you're done with building your
manifest!
"""
self.flat_graph = {
'exposures': {
k: v.to_dict(omit_none=False)
for k, v in self.exposures.items()
},
'nodes': {
k: v.to_dict(omit_none=False)
for k, v in self.nodes.items()
},
'sources': {
k: v.to_dict(omit_none=False)
for k, v in self.sources.items()
}
}
def find_disabled_by_name(
self, name: str, package: Optional[str] = None
) -> Optional[ManifestNode]:
searcher: NameSearcher = NameSearcher(
name, package, NodeType.refable()
)
result = searcher.search(self.disabled)
return result
def find_disabled_source_by_name(
self, source_name: str, table_name: str, package: Optional[str] = None
) -> Optional[ParsedSourceDefinition]:
search_name = f'{source_name}.{table_name}'
searcher: NameSearcher = NameSearcher(
search_name, package, [NodeType.Source]
)
result = searcher.search(self.disabled)
if result is not None:
assert isinstance(result, ParsedSourceDefinition)
return result
def _materialization_candidates_for(
self, project_name: str,
materialization_name: str,
adapter_type: Optional[str],
) -> CandidateList:
if adapter_type is None:
specificity = Specificity.Default
else:
specificity = Specificity.Adapter
full_name = dbt.utils.get_materialization_macro_name(
materialization_name=materialization_name,
adapter_type=adapter_type,
with_prefix=False,
)
return CandidateList(
MaterializationCandidate.from_macro(m, specificity)
for m in self._find_macros_by_name(full_name, project_name)
)
def find_materialization_macro_by_name(
self, project_name: str, materialization_name: str, adapter_type: str
) -> Optional[ParsedMacro]:
candidates: CandidateList = CandidateList(chain.from_iterable(
self._materialization_candidates_for(
project_name=project_name,
materialization_name=materialization_name,
adapter_type=atype,
) for atype in (adapter_type, None)
))
return candidates.last()
def get_resource_fqns(self) -> Mapping[str, PathSet]:
resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {}
all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values())
for resource in all_resources:
resource_type_plural = resource.resource_type.pluralize()
if resource_type_plural not in resource_fqns:
resource_fqns[resource_type_plural] = set()
resource_fqns[resource_type_plural].add(tuple(resource.fqn))
return resource_fqns
# This is called by 'parse_patch' in the NodePatchParser
def add_patch(
self, source_file: SchemaSourceFile, patch: ParsedNodePatch,
) -> None:
if patch.yaml_key in ['models', 'seeds', 'snapshots']:
unique_id = self.ref_lookup.get_unique_id(patch.name, None)
elif patch.yaml_key == 'analyses':
unique_id = self.analysis_lookup.get_unique_id(patch.name, None)
else:
raise dbt.exceptions.InternalException(
f'Unexpected yaml_key {patch.yaml_key} for patch in '
f'file {source_file.path.original_file_path}'
)
if unique_id is None:
# This will usually happen when a node is disabled
return
# patches can't be overwritten
node = self.nodes.get(unique_id)
if node:
if node.patch_path:
package_name, existing_file_path = node.patch_path.split('://')
raise_duplicate_patch_name(patch, existing_file_path)
source_file.append_patch(patch.yaml_key, unique_id)
node.patch(patch)
def add_macro_patch(
self, source_file: SchemaSourceFile, patch: ParsedMacroPatch,
) -> None:
# macros are fully namespaced
unique_id = f'macro.{patch.package_name}.{patch.name}'
macro = self.macros.get(unique_id)
if not macro:
warn_or_error(
f'WARNING: Found documentation for macro "{patch.name}" '
f'which was not found'
)
return
if macro.patch_path:
package_name, existing_file_path = macro.patch_path.split('://')
raise_duplicate_macro_patch_name(patch, existing_file_path)
source_file.macro_patches[patch.name] = unique_id
macro.patch(patch)
def add_source_patch(
self, source_file: SchemaSourceFile, patch: SourcePatch,
) -> None:
# source patches must be unique
key = (patch.overrides, patch.name)
if key in self.source_patches:
raise_duplicate_source_patch_name(patch, self.source_patches[key])
self.source_patches[key] = patch
source_file.source_patches.append(key)
def get_used_schemas(self, resource_types=None):
return frozenset({
(node.database, node.schema) for node in
chain(self.nodes.values(), self.sources.values())
if not resource_types or node.resource_type in resource_types
})
def get_used_databases(self):
return frozenset(
x.database for x in
chain(self.nodes.values(), self.sources.values())
)
# This is used in dbt.task.rpc.sql_commands 'add_new_refs'
def deepcopy(self):
return Manifest(
nodes={k: _deepcopy(v) for k, v in self.nodes.items()},
sources={k: _deepcopy(v) for k, v in self.sources.items()},
macros={k: _deepcopy(v) for k, v in self.macros.items()},
docs={k: _deepcopy(v) for k, v in self.docs.items()},
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
metadata=self.metadata,
disabled=[_deepcopy(n) for n in self.disabled],
files={k: _deepcopy(v) for k, v in self.files.items()},
state_check=_deepcopy(self.state_check),
)
def build_parent_and_child_maps(self):
edge_members = list(chain(
self.nodes.values(),
self.sources.values(),
self.exposures.values(),
))
forward_edges, backward_edges = build_node_edges(edge_members)
self.child_map = forward_edges
self.parent_map = backward_edges
def build_macro_child_map(self):
edge_members = list(chain(
self.nodes.values(),
self.macros.values(),
))
forward_edges = build_macro_edges(edge_members)
return forward_edges
def writable_manifest(self):
self.build_parent_and_child_maps()
return WritableManifest(
nodes=self.nodes,
sources=self.sources,
macros=self.macros,
docs=self.docs,
exposures=self.exposures,
selectors=self.selectors,
metadata=self.metadata,
disabled=self.disabled,
child_map=self.child_map,
parent_map=self.parent_map,
)
def write(self, path):
self.writable_manifest().write(path)
# Called in dbt.compilation.Linker.write_graph and
# dbt.graph.queue.get and ._include_in_cost
def expect(self, unique_id: str) -> GraphMemberNode:
if unique_id in self.nodes:
return self.nodes[unique_id]
elif unique_id in self.sources:
return self.sources[unique_id]
elif unique_id in self.exposures:
return self.exposures[unique_id]
else:
# something terrible has happened
raise dbt.exceptions.InternalException(
'Expected node {} not found in manifest'.format(unique_id)
)
@property
def doc_lookup(self) -> DocLookup:
if self._doc_lookup is None:
self._doc_lookup = DocLookup(self)
return self._doc_lookup
def rebuild_doc_lookup(self):
self._doc_lookup = DocLookup(self)
@property
def source_lookup(self) -> SourceLookup:
if self._source_lookup is None:
self._source_lookup = SourceLookup(self)
return self._source_lookup
def rebuild_source_lookup(self):
self._source_lookup = SourceLookup(self)
@property
def ref_lookup(self) -> RefableLookup:
if self._ref_lookup is None:
self._ref_lookup = RefableLookup(self)
return self._ref_lookup
def rebuild_ref_lookup(self):
self._ref_lookup = RefableLookup(self)
@property
def analysis_lookup(self) -> AnalysisLookup:
if self._analysis_lookup is None:
self._analysis_lookup = AnalysisLookup(self)
return self._analysis_lookup
# Called by dbt.parser.manifest._resolve_refs_for_exposure
# and dbt.parser.manifest._process_refs_for_node
def resolve_ref(
self,
target_model_name: str,
target_model_package: Optional[str],
current_project: str,
node_package: str,
) -> MaybeNonSource:
node: Optional[ManifestNode] = None
disabled: Optional[ManifestNode] = None
candidates = _search_packages(
current_project, node_package, target_model_package
)
for pkg in candidates:
node = self.ref_lookup.find(target_model_name, pkg, self)
if node is not None and node.config.enabled:
return node
# it's possible that the node is disabled
if disabled is None:
disabled = self.find_disabled_by_name(
target_model_name, pkg
)
if disabled is not None:
return Disabled(disabled)
return None
# Called by dbt.parser.manifest._resolve_sources_for_exposure
# and dbt.parser.manifest._process_source_for_node
def resolve_source(
self,
target_source_name: str,
target_table_name: str,
current_project: str,
node_package: str
) -> MaybeParsedSource:
key = (target_source_name, target_table_name)
candidates = _search_packages(current_project, node_package)
source: Optional[ParsedSourceDefinition] = None
disabled: Optional[ParsedSourceDefinition] = None
for pkg in candidates:
source = self.source_lookup.find(key, pkg, self)
if source is not None and source.config.enabled:
return source
if disabled is None:
disabled = self.find_disabled_source_by_name(
target_source_name, target_table_name, pkg
)
if disabled is not None:
return Disabled(disabled)
return None
# Called by DocsRuntimeContext.doc
def resolve_doc(
self,
name: str,
package: Optional[str],
current_project: str,
node_package: str,
) -> Optional[ParsedDocumentation]:
"""Resolve the given documentation. This follows the same algorithm as
resolve_ref except the is_enabled checks are unnecessary as docs are
always enabled.
"""
candidates = _search_packages(
current_project, node_package, package
)
for pkg in candidates:
result = self.doc_lookup.find(name, pkg, self)
if result is not None:
return result
return None
# Called by RunTask.defer_to_manifest
def merge_from_artifact(
self,
adapter,
other: 'WritableManifest',
selected: AbstractSet[UniqueID],
) -> None:
"""Given the selected unique IDs and a writable manifest, update this
manifest by replacing any unselected nodes with their counterpart.
Only non-ephemeral refable nodes are examined.
"""
refables = set(NodeType.refable())
merged = set()
for unique_id, node in other.nodes.items():
current = self.nodes.get(unique_id)
if current and (
node.resource_type in refables and
not node.is_ephemeral and
unique_id not in selected and
not adapter.get_relation(
current.database, current.schema, current.identifier
)
):
merged.add(unique_id)
self.nodes[unique_id] = node.replace(deferred=True)
# log up to 5 items
sample = list(islice(merged, 5))
logger.debug(
f'Merged {len(merged)} items from state (sample: {sample})'
)
# Methods that were formerly in ParseResult
def add_macro(self, source_file: SourceFile, macro: ParsedMacro):
if macro.unique_id in self.macros:
# detect that the macro exists and emit an error
other_path = self.macros[macro.unique_id].original_file_path
# subtract 2 for the "Compilation Error" indent
# note that the line wrap eats newlines, so if you want newlines,
# this is the result :(
msg = line_wrap_message(
f'''\
dbt found two macros named "{macro.name}" in the project
"{macro.package_name}".
To fix this error, rename or remove one of the following
macros:
- {macro.original_file_path}
- {other_path}
''',
subtract=2
)
raise_compiler_error(msg)
self.macros[macro.unique_id] = macro
source_file.macros.append(macro.unique_id)
def has_file(self, source_file: SourceFile) -> bool:
key = source_file.file_id
if key is None:
return False
if key not in self.files:
return False
my_checksum = self.files[key].checksum
return my_checksum == source_file.checksum
def add_source(
self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition
):
# sources can't be overwritten!
_check_duplicates(source, self.sources)
self.sources[source.unique_id] = source # type: ignore
source_file.sources.append(source.unique_id)
def add_node_nofile(self, node: ManifestNodes):
# nodes can't be overwritten!
_check_duplicates(node, self.nodes)
self.nodes[node.unique_id] = node
def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None):
self.add_node_nofile(node)
if isinstance(source_file, SchemaSourceFile):
assert test_from
source_file.add_test(node.unique_id, test_from)
else:
source_file.nodes.append(node.unique_id)
def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure):
_check_duplicates(exposure, self.exposures)
self.exposures[exposure.unique_id] = exposure
source_file.exposures.append(exposure.unique_id)
def add_disabled_nofile(self, node: CompileResultNode):
if node.unique_id in self._disabled:
self._disabled[node.unique_id].append(node)
else:
self._disabled[node.unique_id] = [node]
def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None):
self.add_disabled_nofile(node)
if isinstance(source_file, SchemaSourceFile):
assert test_from
source_file.add_test(node.unique_id, test_from)
else:
source_file.nodes.append(node.unique_id)
def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation):
_check_duplicates(doc, self.docs)
self.docs[doc.unique_id] = doc
source_file.docs.append(doc.unique_id)
# end of methods formerly in ParseResult
# Provide support for copy.deepcopy() - we just need to avoid the lock!
# pickle and deepcopy use this. It returns a callable object used to
# create the initial version of the object and a tuple of arguments
# for the object, i.e. the Manifest.
# The order of the arguments must match the order of the attributes
# in the Manifest class declaration, because they are used as
# positional arguments to construct a Manifest.
def __reduce_ex__(self, protocol):
args = (
self.nodes,
self.sources,
self.macros,
self.docs,
self.exposures,
self.selectors,
self.disabled,
self.files,
self.metadata,
self.flat_graph,
self.state_check,
self.source_patches,
self._disabled,
self._doc_lookup,
self._source_lookup,
self._ref_lookup,
)
return self.__class__, args
class MacroManifest(MacroMethods):
def __init__(self, macros):
self.macros = macros
self.metadata = ManifestMetadata()
# This is returned by the 'graph' context property
# in the ProviderContext class.
self.flat_graph = {}
AnyManifest = Union[Manifest, MacroManifest]
@dataclass
@schema_version('manifest', 2)
class WritableManifest(ArtifactMixin):
nodes: Mapping[UniqueID, ManifestNode] = field(
metadata=dict(description=(
'The nodes defined in the dbt project and its dependencies'
))
)
sources: Mapping[UniqueID, ParsedSourceDefinition] = field(
metadata=dict(description=(
'The sources defined in the dbt project and its dependencies'
))
)
macros: Mapping[UniqueID, ParsedMacro] = field(
metadata=dict(description=(
'The macros defined in the dbt project and its dependencies'
))
)
docs: Mapping[UniqueID, ParsedDocumentation] = field(
metadata=dict(description=(
'The docs defined in the dbt project and its dependencies'
))
)
exposures: Mapping[UniqueID, ParsedExposure] = field(
metadata=dict(description=(
'The exposures defined in the dbt project and its dependencies'
))
)
selectors: Mapping[UniqueID, Any] = field(
metadata=dict(description=(
'The selectors defined in selectors.yml'
))
)
disabled: Optional[List[CompileResultNode]] = field(metadata=dict(
description='A list of the disabled nodes in the target'
))
parent_map: Optional[NodeEdgeMap] = field(metadata=dict(
description='A mapping from child nodes to their dependencies',
))
child_map: Optional[NodeEdgeMap] = field(metadata=dict(
description='A mapping from parent nodes to their dependents',
))
metadata: ManifestMetadata = field(metadata=dict(
description='Metadata about the manifest',
))
def _check_duplicates(
value: HasUniqueID, src: Mapping[str, HasUniqueID]
):
if value.unique_id in src:
raise_duplicate_resource_name(value, src[value.unique_id])
K_T = TypeVar('K_T')
V_T = TypeVar('V_T')
def _expect_value(
key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str
) -> V_T:
if key not in src:
raise CompilationException(
'Expected to find "{}" in cached "result.{}" based '
'on cached file information: {}!'
.format(key, name, old_file)
)
return src[key]
| [((11468, 11502), 'typing.TypeVar', 'TypeVar', (['"""M"""'], {'bound': 'MacroCandidate'}), "('M', bound=MacroCandidate)\n", (11475, 11502), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((12186, 12216), 'typing.TypeVar', 'TypeVar', (['"""N"""'], {'bound': 'Searchable'}), "('N', bound=Searchable)\n", (12193, 12216), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((13082, 13094), 'typing.TypeVar', 'TypeVar', (['"""D"""'], {}), "('D')\n", (13089, 13094), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((13399, 13434), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': 'GraphMemberNode'}), "('T', bound=GraphMemberNode)\n", (13406, 13434), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((39262, 39291), 'dbt.contracts.util.schema_version', 'schema_version', (['"""manifest"""', '(2)'], {}), "('manifest', 2)\n", (39276, 39291), False, 'from dbt.contracts.util import BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version\n'), ((41121, 41135), 'typing.TypeVar', 'TypeVar', (['"""K_T"""'], {}), "('K_T')\n", (41128, 41135), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((41142, 41156), 'typing.TypeVar', 'TypeVar', (['"""V_T"""'], {}), "('V_T')\n", (41149, 41156), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((6784, 6872), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'description': 'A unique identifier for the project'}"}), "(default=None, metadata={'description':\n 'A unique identifier for the project'})\n", (6789, 6872), False, 'from dataclasses import dataclass, field\n'), ((6945, 7030), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'description': 'A unique identifier for the user'}"}), "(default=None, metadata={'description':\n 'A unique identifier for the user'})\n", (6950, 7030), False, 'from dataclasses import dataclass, field\n'), ((17257, 17294), 'dataclasses.field', 'field', ([], {'default_factory': 'FileHash.empty'}), '(default_factory=FileHash.empty)\n', (17262, 17294), False, 'from dataclasses import dataclass, field\n'), ((17324, 17361), 'dataclasses.field', 'field', ([], {'default_factory': 'FileHash.empty'}), '(default_factory=FileHash.empty)\n', (17329, 17361), False, 'from dataclasses import dataclass, field\n'), ((17414, 17441), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (17419, 17441), False, 'from dataclasses import dataclass, field\n'), ((17846, 17873), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (17851, 17873), False, 'from dataclasses import dataclass, field\n'), ((17933, 17960), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (17938, 17960), False, 'from dataclasses import dataclass, field\n'), ((18008, 18035), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18013, 18035), False, 'from dataclasses import dataclass, field\n'), ((18089, 18116), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18094, 18116), False, 'from dataclasses import dataclass, field\n'), ((18170, 18197), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18175, 18197), False, 'from dataclasses import dataclass, field\n'), ((18240, 18267), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18245, 18267), False, 'from dataclasses import dataclass, field\n'), ((18308, 18335), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (18313, 18335), False, 'from dataclasses import dataclass, field\n'), ((18384, 18411), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18389, 18411), False, 'from dataclasses import dataclass, field\n'), ((18445, 18484), 'dataclasses.field', 'field', ([], {'default_factory': 'ManifestMetadata'}), '(default_factory=ManifestMetadata)\n', (18450, 18484), False, 'from dataclasses import dataclass, field\n'), ((18518, 18545), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18523, 18545), False, 'from dataclasses import dataclass, field\n'), ((18584, 18625), 'dataclasses.field', 'field', ([], {'default_factory': 'ManifestStateCheck'}), '(default_factory=ManifestStateCheck)\n', (18589, 18625), False, 'from dataclasses import dataclass, field\n'), ((18727, 18754), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18732, 18754), False, 'from dataclasses import dataclass, field\n'), ((18853, 18880), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18858, 18880), False, 'from dataclasses import dataclass, field\n'), ((18920, 19015), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (18925, 19015), False, 'from dataclasses import dataclass, field\n'), ((19070, 19165), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (19075, 19165), False, 'from dataclasses import dataclass, field\n'), ((19218, 19313), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (19223, 19313), False, 'from dataclasses import dataclass, field\n'), ((19372, 19467), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (19377, 19467), False, 'from dataclasses import dataclass, field\n'), ((19510, 19619), 'dataclasses.field', 'field', ([], {'default_factory': 'ParsingInfo', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default_factory=ParsingInfo, metadata={'serialize': lambda x: None,\n 'deserialize': lambda x: None})\n", (19515, 19619), False, 'from dataclasses import dataclass, field\n'), ((19656, 19775), 'dataclasses.field', 'field', ([], {'default_factory': 'flags.MP_CONTEXT.Lock', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda\n x: None, 'deserialize': lambda x: None})\n", (19661, 19775), False, 'from dataclasses import dataclass, field\n'), ((4640, 4658), 'dbt.node_types.NodeType.refable', 'NodeType.refable', ([], {}), '()\n', (4656, 4658), False, 'from dbt.node_types import NodeType\n'), ((20108, 20131), 'dbt.flags.MP_CONTEXT.Lock', 'flags.MP_CONTEXT.Lock', ([], {}), '()\n', (20129, 20131), False, 'from dbt import flags\n'), ((41054, 41112), 'dbt.exceptions.raise_duplicate_resource_name', 'raise_duplicate_resource_name', (['value', 'src[value.unique_id]'], {}), '(value, src[value.unique_id])\n', (41083, 41112), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((16607, 16660), 'dbt.adapters.factory.get_adapter_package_names', 'get_adapter_package_names', (['self.metadata.adapter_type'], {}), '(self.metadata.adapter_type)\n', (16632, 16660), False, 'from dbt.adapters.factory import get_adapter_package_names\n'), ((22362, 22380), 'dbt.node_types.NodeType.refable', 'NodeType.refable', ([], {}), '()\n', (22378, 22380), False, 'from dbt.node_types import NodeType\n'), ((26091, 26192), 'dbt.exceptions.warn_or_error', 'warn_or_error', (['f"""WARNING: Found documentation for macro "{patch.name}" which was not found"""'], {}), '(\n f\'WARNING: Found documentation for macro "{patch.name}" which was not found\'\n )\n', (26104, 26192), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((26370, 26429), 'dbt.exceptions.raise_duplicate_macro_patch_name', 'raise_duplicate_macro_patch_name', (['patch', 'existing_file_path'], {}), '(patch, existing_file_path)\n', (26402, 26429), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((26757, 26823), 'dbt.exceptions.raise_duplicate_source_patch_name', 'raise_duplicate_source_patch_name', (['patch', 'self.source_patches[key]'], {}), '(patch, self.source_patches[key])\n', (26790, 26823), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((33973, 33991), 'dbt.node_types.NodeType.refable', 'NodeType.refable', ([], {}), '()\n', (33989, 33991), False, 'from dbt.node_types import NodeType\n'), ((34589, 34606), 'itertools.islice', 'islice', (['merged', '(5)'], {}), '(merged, 5)\n', (34595, 34606), False, 'from itertools import chain, islice\n'), ((35201, 35563), 'dbt.ui.line_wrap_message', 'line_wrap_message', (['f""" dbt found two macros named "{macro.name}" in the project\n "{macro.package_name}".\n\n\n To fix this error, rename or remove one of the following\n macros:\n\n - {macro.original_file_path}\n\n - {other_path}\n """'], {'subtract': '(2)'}), '(\n f""" dbt found two macros named "{macro.name}" in the project\n "{macro.package_name}".\n\n\n To fix this error, rename or remove one of the following\n macros:\n\n - {macro.original_file_path}\n\n - {other_path}\n """\n , subtract=2)\n', (35218, 35563), False, 'from dbt.ui import line_wrap_message\n'), ((35614, 35639), 'dbt.exceptions.raise_compiler_error', 'raise_compiler_error', (['msg'], {}), '(msg)\n', (35634, 35639), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((21003, 21040), 'typing.cast', 'cast', (['NonSourceCompiledNode', 'existing'], {}), '(NonSourceCompiledNode, existing)\n', (21007, 21040), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((25654, 25707), 'dbt.exceptions.raise_duplicate_patch_name', 'raise_duplicate_patch_name', (['patch', 'existing_file_path'], {}), '(patch, existing_file_path)\n', (25680, 25707), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n')] |
opengauss-mirror/Yat | openGaussBase/testcase/SQL/DCL/Alter_Default_Privileges/Opengauss_Function_Alter_Default_Privileges_Case0016.py | aef107a8304b94e5d99b4f1f36eb46755eb8919e | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 功能测试
Case Name : 初始用户和sysadmin自己alter自己权限
Description :
1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变
1.1.初始用户alter自己的权限
1.2.清理环境 期望:清理成功
2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变
2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功
2.2.default016_016用户连接 执行alter测试
2.3.清理 期望:清理成功
备注:以上alter测试包括对表(包含视图),类型,函数的权限测试
Expect :
1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变
1.1.初始用户alter自己的权限
1.2.清理环境 期望:清理成功
2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变
2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功
2.2.default016_016用户连接 执行alter测试
2.3.清理 期望:清理成功
备注:以上alter测试包括对表(包含视图),类型,函数的权限测试
History :
"""
import sys
import unittest
from yat.test import macro
from yat.test import Node
sys.path.append(sys.path[0] + "/../")
from testcase.utils.Logger import Logger
from testcase.utils.Constant import Constant
from testcase.utils.CommonSH import CommonSH
logger = Logger()
commonsh = CommonSH('dbuser')
class Privategrant(unittest.TestCase):
def setUp(self):
logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------')
self.userNode = Node('dbuser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.Constant = Constant()
# 初始用户用户名
self.username = self.userNode.ssh_user
# 初始用户密码
self.password = macro.GAUSSDB_INIT_USER_PASSWD
def test_common_user_permission(self):
logger.info('--------1.初始用户alter自己的权限--------')
logger.info('--------1.1.初始用户alter自己的权限--------')
sql_cmd = (f'''
drop schema if exists schema_016 cascade;
create schema schema_016;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on tables to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on functions to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} GRANT EXECUTE on functions to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on TYPES to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} GRANT USAGE on TYPES to {self.username} WITH GRANT OPTION ;
drop schema if exists schema_016 cascade;
create schema schema_016;
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id int unique);
select * from test_alter_default_016;
drop function if exists test_default_016(int) cascade;
create or replace function test_default_016(a int) return int
as
b int:= a;
begin
for i in 1..a loop
b:=b+1;
end loop;
return b;
end;
select test_default_016(16);
drop type if exists type016;
CREATE TYPE type016 AS (c_int int,c_text text);
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id type016);
select * from test_alter_default_016;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke ALL on tables from {self.username} CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username} CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke ALL on functions from {self.username} CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role {self.username} revoke EXECUTE on functions from {self.username} CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke ALL on TYPES from {self.username} CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role {self.username} revoke USAGE on TYPES from {self.username} CASCADE CONSTRAINTS;
''')
excute_cmd = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg)
self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg)
logger.info('--------1.2.清理环境--------')
sql_cmd = ('''
drop table if exists test_alter_default_016 cascade;
drop type if exists type016 cascade;
drop function if exists test_default_016(int) cascade;
drop schema if exists schema_016 cascade;
''')
excute_cmd = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg)
logger.info('--------2.sysadmin用户alter自己的权限--------')
logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------')
sql_cmd = commonsh.execut_db_sql(f'''
drop owned by default016_01 cascade;
drop user if exists default016_01;
create user default016_01 password '{macro.COMMON_PASSWD}';
grant all privileges to default016_01;
''')
logger.info(sql_cmd)
self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd)
logger.info('--------2.2.default016_01用户连接 执行alter测试--------')
sql_cmd = (f'''
drop schema if exists schema_016 cascade;
create schema schema_016;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on tables to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on functions to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 GRANT EXECUTE on functions to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on TYPES to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 GRANT USAGE on TYPES to default016_01 WITH GRANT OPTION ;
drop schema if exists schema_016 cascade;
create schema schema_016;
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id int unique);
select * from test_alter_default_016;
drop function if exists test_default_016(int) cascade;
create or replace function test_default_016(a int) return int
as
b int:= a;
begin
for i in 1..a loop
b:=b+1;
end loop;
return b;
end;
select test_default_016(16);
drop type if exists type016;
CREATE TYPE type016 AS (c_int int,c_text text);
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id type016);
select * from test_alter_default_016;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke ALL on tables from default016_01 CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01 CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke ALL on functions from default016_01 CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role default016_01 revoke EXECUTE on functions from default016_01 CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke ALL on TYPES from default016_01 CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role default016_01 revoke USAGE on TYPES from default016_01 CASCADE CONSTRAINTS;
''')
excute_cmd = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg)
self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg)
logger.info('--------2.3.清理--------')
sql_cmd = commonsh.execut_db_sql(f'''
drop owned by default016_01 cascade;
drop user if exists default016_01;
''')
logger.info(sql_cmd)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd)
def tearDown(self):
logger.info('----------------------------------清理环境----------------------------------')
sql_cmd = commonsh.execut_db_sql('''
drop owned by default016_01 cascade;
drop user if exists default016_01;
''')
logger.info(sql_cmd)
logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016执行结束--------') | [((1262, 1299), 'sys.path.append', 'sys.path.append', (["(sys.path[0] + '/../')"], {}), "(sys.path[0] + '/../')\n", (1277, 1299), False, 'import sys\n'), ((1440, 1448), 'testcase.utils.Logger.Logger', 'Logger', ([], {}), '()\n', (1446, 1448), False, 'from testcase.utils.Logger import Logger\n'), ((1460, 1478), 'testcase.utils.CommonSH.CommonSH', 'CommonSH', (['"""dbuser"""'], {}), "('dbuser')\n", (1468, 1478), False, 'from testcase.utils.CommonSH import CommonSH\n'), ((1660, 1674), 'yat.test.Node', 'Node', (['"""dbuser"""'], {}), "('dbuser')\n", (1664, 1674), False, 'from yat.test import Node\n'), ((1744, 1754), 'testcase.utils.Constant.Constant', 'Constant', ([], {}), '()\n', (1752, 1754), False, 'from testcase.utils.Constant import Constant\n')] |
ryanbsaunders/phantom-apps | Apps/phdigitalshadows/dsapi/service/infrastructure_service.py | 1befda793a08d366fbd443894f993efb1baf9635 | # File: infrastructure_service.py
#
# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
#
from .ds_base_service import DSBaseService
from .ds_find_service import DSFindService
from ..model.infrastructure import Infrastructure
class InfrastructureService(DSFindService):
def __init__(self, ds_api_key, ds_api_secret_key, proxy=None):
super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy)
def find_all(self, view=None):
"""
Streams all infrastructure objects retrieved from the Digital Shadows API.
:param view: InfrastructureView
:return: Infrastructure generator
"""
if view is None:
view = InfrastructureService.infrastructure_view()
return self._find_all('/api/ip-ports',
view,
Infrastructure)
def find_all_pages(self, view=None):
"""
Streams all infrastructure objects retrieved from the Digital Shadows API in page groups.
:param view: InfrastructureView
:return: Infrastructure generator
"""
if view is None:
view = Infrastructure.infrastructure_view()
return self._find_all_pages('/api/ip-ports',
view,
Infrastructure)
@staticmethod
@DSBaseService.paginated(size=500)
@DSBaseService.sorted('published')
def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False,
severities=None, alerted=False, reverse=None):
view = {
'filter': {
'detectedOpen': detectedopen,
'severities': [] if severities is None else severities,
'alerted': 'true' if alerted else 'false',
'markedClosed': 'true' if markedclosed else 'false',
'detectedClosed': 'true' if detectedclosed else 'false'
}
}
if domainname is not None:
view['filter']['domainName'] = domainname
if reverse is not None:
view['sort'] = {
'direction': 'ASCENDING' if reverse else 'DESCENDING',
'property': 'published'
}
return view
| [] |
NCBI-Codeathons/Identify-antiphage-defense-systems-in-the-bacterial-pangenome | src/find_genes_by_location/find_genes_by_location.py | b1eb83118268ada50e90f979347e47e055a51029 | import argparse
from collections import defaultdict
import csv
from dataclasses import dataclass, field
from enum import Enum, unique, auto
import os
import sys
import tempfile
import yaml
import zipfile
import gffutils
from google.protobuf import json_format
from ncbi.datasets.v1alpha1 import dataset_catalog_pb2
from ncbi.datasets.v1alpha1.reports import assembly_pb2
from ncbi.datasets.reports.report_reader import DatasetsReportReader
def retrieve_assembly_report(zip_in, catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport:
report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc)
for path in report_files:
yaml = zip_in.read(path)
rpt_rdr = DatasetsReportReader()
return rpt_rdr.assembly_report(yaml)
def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog:
catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json')
return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog())
def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str):
report_files = get_catalog_files(catalog, desired_filetype, assm_acc)
filepaths = []
for assm_acc, paths in report_files.items():
filepaths.extend(paths)
return filepaths
def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str = None):
files = defaultdict(list)
for assm in catalog.assemblies:
acc = assm.accession
if assm_acc and assm_acc != acc:
continue
for f in assm.files:
filepath = os.path.join('ncbi_dataset', 'data', f.file_path)
if f.file_type == desired_filetype:
files[acc].append(filepath)
return files
def get_zip_file_for_acc(acc, path):
fname = os.path.join(path, f'{acc}.zip')
if os.path.isfile(fname):
return fname
return None
@dataclass
class Gene:
id: str
feat_type: str
name: str
chrom: str
strand: str
range_start: int
range_stop: int
protein_accession: str = ""
def get_fields(self):
return [self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession]
def name_val(self):
return self.protein_accession if self.protein_accession else self.name
def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start, stop, extra_fields):
found_genes = []
feat_types = ('gene', 'pseudogene')
for gene in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False):
gene_name = gene.attributes.get('Name', None)[0]
prot_acc = ""
if gene.attributes['gene_biotype'][0] == 'protein_coding':
cds = list(gff3_db.children(gene, featuretype='CDS'))
prot_acc = cds[0].attributes.get('protein_id', None)[0]
geneobj = Gene(
gene.id,
gene.featuretype,
gene_name,
gene.chrom,
gene.strand,
gene.start,
gene.stop,
prot_acc,
)
csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields, *geneobj.get_fields()])
found_genes.append(geneobj)
return found_genes
class FindGenesByLoc:
default_packages_dir = os.path.join('var', 'data', 'packages')
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir,
help=f'root of input data directory [{self.default_packages_dir}]')
parser.add_argument('--locs', type=str, help='file containing genomic locations')
self.args = parser.parse_args()
self.writer = csv.writer(sys.stdout, dialect='excel-tab')
def read_data(self):
for row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'):
yield row
def run(self):
for assm_acc, seq_acc, start, stop, *extra in self.read_data():
self.find_all_for_location(assm_acc, seq_acc, start, stop, extra)
def process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields):
with tempfile.NamedTemporaryFile() as tmpfile:
tmpfile.write(zin.read(gff_fname))
db = gffutils.create_db(
tmpfile.name,
dbfn=':memory:',
force=True,
keep_order=True,
merge_strategy='merge',
sort_attribute_values=True
)
find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start, stop, extra_fields)
def find_all_for_location(self, assm_acc, seq_acc, start, stop, extra_fields):
zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir)
try:
with zipfile.ZipFile(zip_file, 'r') as zin:
catalog = retrieve_data_catalog(zin)
gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3)
for assm_acc, gff_files in gff_files.items():
report = retrieve_assembly_report(zin, catalog, assm_acc)
for gff_fname in gff_files:
self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields)
except zipfile.BadZipFile:
print(f'{zip_file} is not a zip file')
if __name__ == '__main__':
FindGenesByLoc().run()
| [((1509, 1526), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1520, 1526), False, 'from collections import defaultdict\n'), ((1916, 1948), 'os.path.join', 'os.path.join', (['path', 'f"""{acc}.zip"""'], {}), "(path, f'{acc}.zip')\n", (1928, 1948), False, 'import os\n'), ((1956, 1977), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (1970, 1977), False, 'import os\n'), ((3403, 3442), 'os.path.join', 'os.path.join', (['"""var"""', '"""data"""', '"""packages"""'], {}), "('var', 'data', 'packages')\n", (3415, 3442), False, 'import os\n'), ((737, 759), 'ncbi.datasets.reports.report_reader.DatasetsReportReader', 'DatasetsReportReader', ([], {}), '()\n', (757, 759), False, 'from ncbi.datasets.reports.report_reader import DatasetsReportReader\n'), ((989, 1018), 'ncbi.datasets.v1alpha1.dataset_catalog_pb2.Catalog', 'dataset_catalog_pb2.Catalog', ([], {}), '()\n', (1016, 1018), False, 'from ncbi.datasets.v1alpha1 import dataset_catalog_pb2\n'), ((3485, 3510), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3508, 3510), False, 'import argparse\n'), ((3850, 3893), 'csv.writer', 'csv.writer', (['sys.stdout'], {'dialect': '"""excel-tab"""'}), "(sys.stdout, dialect='excel-tab')\n", (3860, 3893), False, 'import csv\n'), ((1706, 1755), 'os.path.join', 'os.path.join', (['"""ncbi_dataset"""', '"""data"""', 'f.file_path'], {}), "('ncbi_dataset', 'data', f.file_path)\n", (1718, 1755), False, 'import os\n'), ((4305, 4334), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (4332, 4334), False, 'import tempfile\n'), ((4411, 4546), 'gffutils.create_db', 'gffutils.create_db', (['tmpfile.name'], {'dbfn': '""":memory:"""', 'force': '(True)', 'keep_order': '(True)', 'merge_strategy': '"""merge"""', 'sort_attribute_values': '(True)'}), "(tmpfile.name, dbfn=':memory:', force=True, keep_order=\n True, merge_strategy='merge', sort_attribute_values=True)\n", (4429, 4546), False, 'import gffutils\n'), ((4933, 4963), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file', '"""r"""'], {}), "(zip_file, 'r')\n", (4948, 4963), False, 'import zipfile\n')] |
tOverney/ADA-Project | web/backend/backend_django/apps/capacity/models.py | 69221210b1f4f13f6979123c6a7a1a9813ea18e5 | from django.db import models
from multigtfs.models import (
Block, Fare, FareRule, Feed, Frequency, Route, Service, ServiceDate, Shape,
ShapePoint, Stop, StopTime, Trip, Agency)
class Path(models.Model):
trip = models.ForeignKey(Trip)
stop = models.ForeignKey(Stop)
path = models.CharField(max_length=1024, null=True, blank=True)
class Meta:
unique_together = ('trip', 'stop',)
class Capacity(models.Model):
trip = models.ForeignKey(Trip)
stop_time = models.ForeignKey(StopTime)
service_date = models.ForeignKey(ServiceDate)
capacity1st = models.IntegerField('capacity1st', null=True, blank=True)
capacity2nd = models.IntegerField('capacity2nd', null=True, blank=True)
class Meta:
unique_together = ('trip', 'stop_time', 'service_date') | [((226, 249), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Trip'], {}), '(Trip)\n', (243, 249), False, 'from django.db import models\n'), ((261, 284), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Stop'], {}), '(Stop)\n', (278, 284), False, 'from django.db import models\n'), ((296, 352), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1024)', 'null': '(True)', 'blank': '(True)'}), '(max_length=1024, null=True, blank=True)\n', (312, 352), False, 'from django.db import models\n'), ((460, 483), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Trip'], {}), '(Trip)\n', (477, 483), False, 'from django.db import models\n'), ((500, 527), 'django.db.models.ForeignKey', 'models.ForeignKey', (['StopTime'], {}), '(StopTime)\n', (517, 527), False, 'from django.db import models\n'), ((547, 577), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ServiceDate'], {}), '(ServiceDate)\n', (564, 577), False, 'from django.db import models\n'), ((597, 654), 'django.db.models.IntegerField', 'models.IntegerField', (['"""capacity1st"""'], {'null': '(True)', 'blank': '(True)'}), "('capacity1st', null=True, blank=True)\n", (616, 654), False, 'from django.db import models\n'), ((673, 730), 'django.db.models.IntegerField', 'models.IntegerField', (['"""capacity2nd"""'], {'null': '(True)', 'blank': '(True)'}), "('capacity2nd', null=True, blank=True)\n", (692, 730), False, 'from django.db import models\n')] |
rainydaygit/testtcloudserver | apps/interface/settings/config.py | 8037603efe4502726a4d794fb1fc0a3f3cc80137 | try:
from public_config import *
except ImportError:
pass
PORT = 9028
SERVICE_NAME = 'interface'
| [] |
farnswj1/PokemonAPI | api/api/pokemon/views.py | b6fc4dfe8c0fde6b4560455dd37e61b6a0d2ea27 | from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from rest_framework.generics import (
ListAPIView,
RetrieveAPIView,
CreateAPIView,
UpdateAPIView,
DestroyAPIView
)
from .models import Pokemon
from .serializers import PokemonSerializer
from .filters import PokemonFilterSet
# Create your views here.
class PokemonListAPIView(ListAPIView):
queryset = Pokemon.objects.all()
serializer_class = PokemonSerializer
filterset_class = PokemonFilterSet
@method_decorator(cache_page(7200))
def get(self, request, *args, **kwargs):
return super().get(request, *args, **kwargs)
class PokemonDetailAPIView(RetrieveAPIView):
queryset = Pokemon.objects.all()
serializer_class = PokemonSerializer
@method_decorator(cache_page(7200))
def get(self, request, *args, **kwargs):
return super().get(request, *args, **kwargs)
class PokemonCreateAPIView(CreateAPIView):
queryset = Pokemon.objects.all()
serializer_class = PokemonSerializer
class PokemonUpdateAPIView(UpdateAPIView):
queryset = Pokemon.objects.all()
serializer_class = PokemonSerializer
class PokemonDeleteAPIView(DestroyAPIView):
queryset = Pokemon.objects.all()
serializer_class = PokemonSerializer
| [((557, 573), 'django.views.decorators.cache.cache_page', 'cache_page', (['(7200)'], {}), '(7200)\n', (567, 573), False, 'from django.views.decorators.cache import cache_page\n'), ((821, 837), 'django.views.decorators.cache.cache_page', 'cache_page', (['(7200)'], {}), '(7200)\n', (831, 837), False, 'from django.views.decorators.cache import cache_page\n')] |
sma-de/ansible-collections-gitlab | plugins/action/normalize_gitlab_cfg.py | 5da99b04722fc016d3e8589635fcbb3579dcfda2 |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleOptionsError
from ansible.module_utils.six import iteritems, string_types
from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey
from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict
from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert
def user_role_to_cfg(username, urole, cfg):
tmp = ['roles', 'subroles'] \
+ urole['path'].replace('/', '/subroles/').split('/')
tmp = get_subdict(cfg, tmp, default_empty=True)
setdefault_none(setdefault_none(tmp, 'members', {}),
urole['level'], []
).append(username)
class ConfigRootNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
self._add_defaultsetter(kwargs,
'random_pwlen', DefaultSetterConstant(80)
)
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
ServersNormalizer(pluginref),
]
super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs)
class ServersNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
ServerInstancesNormalizer(pluginref),
]
super(ServersNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return ['servers']
class ServerInstancesNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
SrvInstNormalizer(pluginref),
]
super(ServerInstancesNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return ['instances']
class SrvInstNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
ServerUsersNormalizer(pluginref),
SrvRolesNormalizer(pluginref),
]
super(SrvInstNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return [SUBDICT_METAKEY_ANY]
class SrvRolesBaseNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
SrvRolesMembersNormalizer(pluginref),
## note: for recursive structures, the sub normalizers can only
## be instantiated if the corresponding key actually exists
## to avoid indefinite recursions of death
(SrvSubRolesNormalizer, True),
]
super(SrvRolesBaseNormalizer, self).__init__(
pluginref, *args, **kwargs
)
def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs):
# do config subkey
c = setdefault_none(my_subcfg, 'config', defval={})
setdefault_none(c, 'name', defval=cfgpath_abs[-1])
# build role hierarchy path and parent
if cfgpath_abs[-1] == 'roles':
## top level
parent = []
else:
## subrole
parent = get_subdict(cfg, cfgpath_abs[:-2])
parent = parent['role_abspath']
my_subcfg['role_abspath'] = parent + [c['name']]
c['parent'] = '/'.join(parent)
return my_subcfg
class SrvRolesNormalizer(SrvRolesBaseNormalizer):
def __init__(self, pluginref, *args, **kwargs):
super(SrvRolesNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return ['roles']
class SrvSubRolesNormalizer(NormalizerBase):
NORMER_CONFIG_PATH = ['subroles']
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
SrvRoleInstNormalizer(pluginref),
]
super(SrvSubRolesNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return type(self).NORMER_CONFIG_PATH
class SrvRoleInstNormalizer(SrvRolesBaseNormalizer):
def __init__(self, pluginref, *args, **kwargs):
super(SrvRoleInstNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return [SUBDICT_METAKEY_ANY]
class SrvRolesMembersNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
super(SrvRolesMembersNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return ['members']
def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs):
if not my_subcfg:
return my_subcfg
## if it exists, members should be a dict where the keys are
## valid gitlab access levels (like guest or developer) and
## the values should be a list of users
exportcfg = []
my_group = self.get_parentcfg(cfg, cfgpath_abs)
my_group = '/'.join(my_group['role_abspath'])
for (k,ul) in iteritems(my_subcfg):
for u in ul:
exportcfg.append({
'gitlab_group': my_group, 'gitlab_user': u, 'access_level': k
})
my_subcfg['_exportcfg'] = exportcfg
return my_subcfg
class ServerUsersNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
ServerBotsNormalizer(pluginref),
ServerHumansNormalizer(pluginref),
]
super(ServerUsersNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return ['users']
class ServerUsrBaseNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
SrvUsrNormalizer(pluginref),
]
super(ServerUsrBaseNormalizer, self).__init__(
pluginref, *args, **kwargs
)
class ServerBotsNormalizer(ServerUsrBaseNormalizer):
def __init__(self, pluginref, *args, **kwargs):
super(ServerBotsNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return ['bots']
class ServerHumansNormalizer(ServerUsrBaseNormalizer):
def __init__(self, pluginref, *args, **kwargs):
super(ServerHumansNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return ['humans']
class SrvUsrNormalizer(NormalizerBase):
def __init__(self, pluginref, *args, **kwargs):
subnorms = kwargs.setdefault('sub_normalizers', [])
subnorms += [
SrvUsrCfgNormalizer(pluginref),
]
self._add_defaultsetter(kwargs,
'pw_access', DefaultSetterConstant(True)
)
super(SrvUsrNormalizer, self).__init__(
pluginref, *args, **kwargs
)
@property
def config_path(self):
return [SUBDICT_METAKEY_ANY]
def _handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs):
usr_roles = my_subcfg.get('roles', None)
if usr_roles:
for ur in usr_roles:
user_role_to_cfg(my_subcfg['config']['username'], ur,
self.get_parentcfg(cfg, cfgpath_abs, level=3)
)
return my_subcfg
class SrvUsrCfgNormalizer(NormalizerNamed):
def __init__(self, pluginref, *args, **kwargs):
super(SrvUsrCfgNormalizer, self).__init__(
pluginref, *args, mapkey_lvl=-2, **kwargs
)
self.default_setters['name'] = DefaultSetterOtherKey('username')
@property
def config_path(self):
return ['config']
@property
def name_key(self):
return 'username'
def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs):
mail = my_subcfg.get('email', None)
if not mail:
# if not mail address is explicitly given, check if mail
# template is specified for server, if so use this to
# create address with username as param
tmp = self.get_parentcfg(
cfg, cfgpath_abs, level=3
).get('mail_template', None)
if tmp:
my_subcfg['email'] = tmp.format(
my_subcfg['username'].replace('_', '-')
)
return my_subcfg
class ActionModule(ConfigNormalizerBaseMerger):
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(ConfigRootNormalizer(self),
*args, default_merge_vars=['gitlab_cfg_defaults'],
extra_merge_vars_ans=['extra_gitlab_config_maps'],
**kwargs
)
self._supports_check_mode = False
self._supports_async = False
@property
def my_ansvar(self):
return 'gitlab_cfg'
| [((777, 818), 'ansible_collections.smabot.base.plugins.module_utils.utils.dicting.get_subdict', 'get_subdict', (['cfg', 'tmp'], {'default_empty': '(True)'}), '(cfg, tmp, default_empty=True)\n', (788, 818), False, 'from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict\n'), ((3306, 3353), 'ansible_collections.smabot.base.plugins.module_utils.utils.dicting.setdefault_none', 'setdefault_none', (['my_subcfg', '"""config"""'], {'defval': '{}'}), "(my_subcfg, 'config', defval={})\n", (3321, 3353), False, 'from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict\n'), ((3362, 3412), 'ansible_collections.smabot.base.plugins.module_utils.utils.dicting.setdefault_none', 'setdefault_none', (['c', '"""name"""'], {'defval': 'cfgpath_abs[-1]'}), "(c, 'name', defval=cfgpath_abs[-1])\n", (3377, 3412), False, 'from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict\n'), ((5578, 5598), 'ansible.module_utils.six.iteritems', 'iteritems', (['my_subcfg'], {}), '(my_subcfg)\n', (5587, 5598), False, 'from ansible.module_utils.six import iteritems, string_types\n'), ((8287, 8320), 'ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base.DefaultSetterOtherKey', 'DefaultSetterOtherKey', (['"""username"""'], {}), "('username')\n", (8308, 8320), False, 'from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey\n'), ((1093, 1118), 'ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base.DefaultSetterConstant', 'DefaultSetterConstant', (['(80)'], {}), '(80)\n', (1114, 1118), False, 'from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey\n'), ((3607, 3641), 'ansible_collections.smabot.base.plugins.module_utils.utils.dicting.get_subdict', 'get_subdict', (['cfg', 'cfgpath_abs[:-2]'], {}), '(cfg, cfgpath_abs[:-2])\n', (3618, 3641), False, 'from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict\n'), ((7465, 7492), 'ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base.DefaultSetterConstant', 'DefaultSetterConstant', (['(True)'], {}), '(True)\n', (7486, 7492), False, 'from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey\n'), ((839, 874), 'ansible_collections.smabot.base.plugins.module_utils.utils.dicting.setdefault_none', 'setdefault_none', (['tmp', '"""members"""', '{}'], {}), "(tmp, 'members', {})\n", (854, 874), False, 'from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict\n')] |
clodonil/pipeline_aws_custom | microservices/validate/tools/dynamodb.py | 8ca517d0bad48fe528461260093f0035f606f9be | """
Tools de integração com o Dynamodb
"""
import boto3
import botocore
import logging
import datetime
import json
import copy
import time
import os
class DyConnect:
def __init__(self, table, region):
self.table = table
self.region = region
def connect(self):
try:
dydb = boto3.resource('dynamodb', region_name=self.region)
conn = dydb.Table(self.table)
return conn
except:
print("Problema na conexao com DynamoDB")
logging.CRITICAL("Problema na conexao com DynamoDB")
return False
def dynamodb_save(self, dados):
conn = self.connect()
if conn:
retorno = conn.put_item(Item=dados)
def dynamodb_query(self, query):
conn = self.connect()
return conn.get_item(Key=query)
def get_dy_template(template_name):
newtemplate = DyConnect(dynamodb['template'], aws_region)
query = {'name': template_name}
stages = newtemplate.dynamodb_query(query)
if 'Item' in stages:
if 'details' in stages['Item']:
return stages['Item']['details']
return False
def get_sharedlibrary_release():
newtemplate = DyConnect(dynamodb['template'], aws_region)
query = {'name': 'sharedlibrary'}
version = newtemplate.dynamodb_query(query)
if 'Item' in version:
return version['Item']['release']
return False
| [((319, 370), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {'region_name': 'self.region'}), "('dynamodb', region_name=self.region)\n", (333, 370), False, 'import boto3\n'), ((519, 571), 'logging.CRITICAL', 'logging.CRITICAL', (['"""Problema na conexao com DynamoDB"""'], {}), "('Problema na conexao com DynamoDB')\n", (535, 571), False, 'import logging\n')] |
LZC6244/scrapy_ddiy | scrapy_ddiy/spiders/GlidedSky/glided_sky_001.py | 1bf7cdd382afd471af0bf7069b377fb364dc4730 | # -*- coding: utf-8 -*-
from scrapy import Request
from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider
class GlidedSky001Spider(DdiyBaseSpider):
name = 'glided_sky_001'
description = 'GlidedSky 爬虫-基础1'
start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1'
custom_settings = {
'COOKIES_ENABLED': True,
'DOWNLOADER_MIDDLEWARES': {
'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589,
},
}
def start_requests(self):
yield Request(url=self.start_url, callback=self.parse)
def parse(self, response, **kwargs):
all_number = [int(i) for i in
response.xpath('//div[@class="card-body"]//div[@class="col-md-1"]/text()').getall()]
self.logger.info(f'Sum or web number is {sum(all_number)}')
| [((549, 597), 'scrapy.Request', 'Request', ([], {'url': 'self.start_url', 'callback': 'self.parse'}), '(url=self.start_url, callback=self.parse)\n', (556, 597), False, 'from scrapy import Request\n')] |
google/joint_vae | datasets/celeba/celeba_dataset.py | 984f456d1a38c6b27e23433aef241dea56f53384 | #
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Provides data for the mnist with attributes dataset.
Provide data loading utilities for an augmented version of the
MNIST dataset which contains the following attributes:
1. Location (digits are translated on a canvas and placed around
one of four locations/regions in the canvas). Each location
is a gaussian placed at four quadrants of the canvas.
2. Scale (We vary scale from 0.4 to 1.0), with two gaussians
placed at 0.5 +- 0.1 and 0.9 +- 0.1 repsectively.
3. Orientation: we vary orientation from -90 to +90 degrees,
sampling actual values from gaussians at +30 +- 10 and
-30 +-10. On a third of the occasions we dont orient the
digit at all which means a rotation of 0 degrees.
The original data after transformations is binarized as per the
procedure described in the following paper:
Salakhutdinov, Ruslan, and Iain Murray. 2008. ``On the Quantitative Analysis of
Deep Belief Networks.'' In Proceedings of the 25th International Conference on
Machine Learning, 872-79.
Author: vrama@
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.contrib.slim.python.slim.data import dataset
from tensorflow.contrib.slim.python.slim.data import tfexample_decoder
from datasets.celeba.image_decoder import ImageDecodeProcess
# Only provides option to load the binarized version of the dataset.
_FILE_PATTERN = '%s-*'
_SPLIT_TYPE = 'iid'
_DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig'
_SPLITS_TO_SIZES = {'train': 162770, 'val': 19867, 'test': 19962}
_ITEMS_TO_DESCRIPTIONS = {
'image': 'A [218 x 178 x 3] RGB image.',
'labels': 'Attributes corresponding to the image.',
}
_NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18)
def get_split(split_name='train',
split_type="iid",
dataset_dir=None,
image_length=64,
num_classes_per_attribute=None):
"""Gets a dataset tuple with instructions for reading 2D shapes data.
Args:
split_name: A train/test split name.
split_type: str, type of split being loaded "iid" or "comp"
dataset_dir: The base directory of the dataset sources.
num_classes_per_attribute: The number of labels for the classfication
problem corresponding to each attribute. For example, if the first
attribute is "shape" and there are three possible shapes, then
then provide a value 3 in the first index, and so on.
Returns:
A `Dataset` namedtuple.
metadata: A dictionary with some metadata about the dataset we just
constructed.
Raises:
ValueError: if `split_name` is not a valid train/test split.
"""
if split_name not in _SPLITS_TO_SIZES:
raise ValueError('split name %s was not recognized.' % split_name)
if split_type is not "iid":
raise ValueError("Only IID split available for CelebA.")
if num_classes_per_attribute is None:
num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE
if dataset_dir is None or dataset_dir == '':
dataset_dir = _DATASET_DIR
# Load attribute label map file.
label_map_json = os.path.join(dataset_dir,
'attribute_label_map.json')
file_pattern = os.path.join(dataset_dir, _FILE_PATTERN % split_name)
tf.logging.info('Loading from %s file.' % (file_pattern))
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'),
'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64),
}
# TODO(vrama): See
# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270
# For where changes would need to be made to preprocess the images which
# get loaded.
items_to_handlers = {
'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64),
'labels': tfexample_decoder.Tensor('image/labels'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
metadata = {
'num_classes_per_attribute': num_classes_per_attribute,
'split_type': _SPLIT_TYPE,
'label_map_json': label_map_json,
}
return dataset.Dataset(
data_sources=file_pattern,
reader=tf.TFRecordReader,
decoder=decoder,
num_samples=_SPLITS_TO_SIZES[split_name],
items_to_descriptions=_ITEMS_TO_DESCRIPTIONS), metadata
| [((3721, 3774), 'os.path.join', 'os.path.join', (['dataset_dir', '"""attribute_label_map.json"""'], {}), "(dataset_dir, 'attribute_label_map.json')\n", (3733, 3774), False, 'import os\n'), ((3835, 3888), 'os.path.join', 'os.path.join', (['dataset_dir', '(_FILE_PATTERN % split_name)'], {}), '(dataset_dir, _FILE_PATTERN % split_name)\n', (3847, 3888), False, 'import os\n'), ((3891, 3946), 'tensorflow.logging.info', 'tf.logging.info', (["('Loading from %s file.' % file_pattern)"], {}), "('Loading from %s file.' % file_pattern)\n", (3906, 3946), True, 'import tensorflow as tf\n'), ((4626, 4697), 'tensorflow.contrib.slim.python.slim.data.tfexample_decoder.TFExampleDecoder', 'tfexample_decoder.TFExampleDecoder', (['keys_to_features', 'items_to_handlers'], {}), '(keys_to_features, items_to_handlers)\n', (4660, 4697), False, 'from tensorflow.contrib.slim.python.slim.data import tfexample_decoder\n'), ((3996, 4047), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4014, 4047), True, 'import tensorflow as tf\n'), ((4071, 4125), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '"""raw"""'}), "((), tf.string, default_value='raw')\n", (4089, 4125), True, 'import tensorflow as tf\n'), ((4493, 4549), 'datasets.celeba.image_decoder.ImageDecodeProcess', 'ImageDecodeProcess', ([], {'shape': '[218, 178, 3]', 'image_length': '(64)'}), '(shape=[218, 178, 3], image_length=64)\n', (4511, 4549), False, 'from datasets.celeba.image_decoder import ImageDecodeProcess\n'), ((4567, 4607), 'tensorflow.contrib.slim.python.slim.data.tfexample_decoder.Tensor', 'tfexample_decoder.Tensor', (['"""image/labels"""'], {}), "('image/labels')\n", (4591, 4607), False, 'from tensorflow.contrib.slim.python.slim.data import tfexample_decoder\n'), ((4910, 5091), 'tensorflow.contrib.slim.python.slim.data.dataset.Dataset', 'dataset.Dataset', ([], {'data_sources': 'file_pattern', 'reader': 'tf.TFRecordReader', 'decoder': 'decoder', 'num_samples': '_SPLITS_TO_SIZES[split_name]', 'items_to_descriptions': '_ITEMS_TO_DESCRIPTIONS'}), '(data_sources=file_pattern, reader=tf.TFRecordReader,\n decoder=decoder, num_samples=_SPLITS_TO_SIZES[split_name],\n items_to_descriptions=_ITEMS_TO_DESCRIPTIONS)\n', (4925, 5091), False, 'from tensorflow.contrib.slim.python.slim.data import dataset\n')] |
kan-s0/JORLDY | jorldy/manager/log_manager.py | 44989cf415196604a1ad0383b34085dee6bb1c51 | import os
import datetime, time
import imageio
from pygifsicle import optimize
from torch.utils.tensorboard import SummaryWriter
class LogManager:
def __init__(self, env, id, experiment=None):
self.id = id
now = datetime.datetime.now().strftime("%Y%m%d%H%M%S%f")
self.path = (
f"./logs/{experiment}/{env}/{id}/{now}/"
if experiment
else f"./logs/{env}/{id}/{now}/"
)
self.writer = SummaryWriter(self.path)
self.stamp = time.time()
def write(self, scalar_dict, frames, step):
for key, value in scalar_dict.items():
self.writer.add_scalar(f"{self.id}/" + key, value, step)
self.writer.add_scalar("all/" + key, value, step)
if "score" in key:
time_delta = int(time.time() - self.stamp)
self.writer.add_scalar(f"{self.id}/{key}_per_time", value, time_delta)
self.writer.add_scalar(f"all/{key}_per_time", value, time_delta)
if len(frames) > 0:
score = scalar_dict["score"]
write_path = os.path.join(self.path, f"{step:010d}_{score}.gif")
imageio.mimwrite(write_path, frames, fps=60)
optimize(write_path)
print(f"...Record episode to {write_path}...")
| [((464, 488), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', (['self.path'], {}), '(self.path)\n', (477, 488), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((510, 521), 'time.time', 'time.time', ([], {}), '()\n', (519, 521), False, 'import datetime, time\n'), ((1102, 1153), 'os.path.join', 'os.path.join', (['self.path', 'f"""{step:010d}_{score}.gif"""'], {}), "(self.path, f'{step:010d}_{score}.gif')\n", (1114, 1153), False, 'import os\n'), ((1166, 1210), 'imageio.mimwrite', 'imageio.mimwrite', (['write_path', 'frames'], {'fps': '(60)'}), '(write_path, frames, fps=60)\n', (1182, 1210), False, 'import imageio\n'), ((1223, 1243), 'pygifsicle.optimize', 'optimize', (['write_path'], {}), '(write_path)\n', (1231, 1243), False, 'from pygifsicle import optimize\n'), ((235, 258), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (256, 258), False, 'import datetime, time\n'), ((813, 824), 'time.time', 'time.time', ([], {}), '()\n', (822, 824), False, 'import datetime, time\n')] |
multi-service-fabric/element-manager | lib/SeparateDriver/ASRDriverParts/UNIInterface.py | e550d1b5ec9419f1fb3eb6e058ce46b57c92ee2f | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright(c) 2019 Nippon Telegraph and Telephone Corporation
# Filename: ASRDriverParts/UNIInterface.py
'''
Parts Module for ASR driver UNI interface configuraton
'''
import GlobalModule
from EmCommonLog import decorater_log
from ASRDriverParts.InterfaceBase import InterfaceBase
class UNIInterface(InterfaceBase):
'''
Parts class for ASR driver UNI interface configuraton
'''
@decorater_log
def __init__(self,
vrf_name=None,
if_name=None,
vlan_id=None,
ip_address=None,
subnet_mask=None,
vip_ip_address=None,
hsrp_id=None,
mtu=None,
is_active=True):
'''
Costructor
'''
super(UNIInterface, self).__init__(vrf_name=vrf_name,
if_name=if_name)
self.vlan_id = vlan_id
self.ip_address = ip_address
self.subnet_mask = subnet_mask
self.vip_ip_address = vip_ip_address
self.hsrp_id = hsrp_id
self.mtu = mtu
self.is_active = is_active
@decorater_log
def output_add_command(self):
'''
Command line to add configuration is output.
'''
parame = self._get_param()
self._interface_common_start()
self._append_add_command("standby version 2")
comm_txt = "standby %(hsrp_id)s ip %(vip_ip_address)s"
self._append_add_command(comm_txt, parame)
if self.is_active:
comm_txt = "standby %(hsrp_id)s priority 105"
self._append_add_command(comm_txt, parame)
comm_txt = "standby %(hsrp_id)s preempt"
self._append_add_command(comm_txt, parame)
comm_txt = "ip mtu %(mtu)s"
self._append_add_command(comm_txt, parame)
self._interface_common_end()
GlobalModule.EM_LOGGER.debug(
"uni if command = %s" % (self._tmp_add_command,))
return self._tmp_add_command
@decorater_log
def _get_param(self):
'''
Parameter is acquired from attribute.(dict type)
'''
tmp_param = super(UNIInterface, self)._get_param()
tmp_param.update(
{
"vlan_id": self.vlan_id,
"ip_address": self.ip_address,
"subnet_mask": self.subnet_mask,
"vip_ip_address": self.vip_ip_address,
"hsrp_id": self.hsrp_id,
"mtu": self.mtu,
}
)
return tmp_param
| [((1985, 2063), 'GlobalModule.EM_LOGGER.debug', 'GlobalModule.EM_LOGGER.debug', (["('uni if command = %s' % (self._tmp_add_command,))"], {}), "('uni if command = %s' % (self._tmp_add_command,))\n", (2013, 2063), False, 'import GlobalModule\n')] |
AnthonyQuantum/open_model_zoo | tools/accuracy_checker/accuracy_checker/annotation_converters/mnist.py | 7d235755e2d17f6186b11243a169966e4f05385a | """
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from ..config import PathField, BoolField
from ..representation import ClassificationAnnotation
from ..utils import read_csv, check_file_existence, read_json
from .format_converter import BaseFormatConverter, ConverterReturn
try:
from PIL import Image
except ImportError:
Image = None
class MNISTCSVFormatConverter(BaseFormatConverter):
"""
MNIST CSV dataset converter. All annotation converters should be derived from BaseFormatConverter class.
"""
# register name for this converter
# this name will be used for converter class look up
__provider__ = 'mnist_csv'
annotation_types = (ClassificationAnnotation, )
@classmethod
def parameters(cls):
configuration_parameters = super().parameters()
configuration_parameters.update({
'annotation_file': PathField(description="Path to csv file which contain dataset."),
'convert_images': BoolField(
optional=True,
default=False,
description="Allows to convert images from pickle file to user specified directory."
),
'converted_images_dir': PathField(
optional=True, is_directory=True, check_exists=False, description="Path to converted images location."
),
'dataset_meta_file': PathField(
description='path to json file with dataset meta (e.g. label_map, color_encoding)', optional=True
)
})
return configuration_parameters
def configure(self):
"""
This method is responsible for obtaining the necessary parameters
for converting from the command line or config.
"""
self.test_csv_file = self.get_value_from_config('annotation_file')
self.converted_images_dir = self.get_value_from_config('converted_images_dir')
self.convert_images = self.get_value_from_config('convert_images')
if self.convert_images and not self.converted_images_dir:
self.converted_images_dir = self.test_csv_file.parent / 'converted_images'
if not self.converted_images_dir.exists():
self.converted_images_dir.mkdir(parents=True)
if self.convert_images and Image is None:
raise ValueError(
"conversion mnist images requires Pillow installation, please install it before usage"
)
self.dataset_meta = self.get_value_from_config('dataset_meta_file')
def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):
"""
This method is executed automatically when convert.py is started.
All arguments are automatically got from command line arguments or config file in method configure
Returns:
annotations: list of annotation representation objects.
meta: dictionary with additional dataset level metadata.
"""
annotations = []
check_images = check_content and not self.convert_images
meta = self.generate_meta()
labels_to_id = meta['label_map']
content_errors = None
if check_content:
self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent / 'converted_images'
if self.converted_images_dir and check_content:
if not self.converted_images_dir.exists():
content_errors = ['{}: does not exist'.format(self.converted_images_dir)]
check_images = False
# read original dataset annotation
annotation_table = read_csv(self.test_csv_file)
num_iterations = len(annotation_table)
for index, annotation in enumerate(annotation_table):
identifier = '{}.png'.format(index)
label = labels_to_id.get(annotation['label'], int(annotation['label']))
if self.convert_images:
image = Image.fromarray(self.convert_image(annotation))
image = image.convert("L")
image.save(str(self.converted_images_dir / identifier))
annotations.append(ClassificationAnnotation(identifier, label))
if check_images:
if not check_file_existence(self.converted_images_dir / identifier):
# add error to errors list if file not found
content_errors.append('{}: does not exist'.format(self.converted_images_dir / identifier))
if progress_callback is not None and index % progress_interval == 0:
progress_callback(index / num_iterations * 100)
return ConverterReturn(annotations, meta, content_errors)
@staticmethod
def convert_image(features):
image = np.zeros((28, 28))
column_template = '{}x{}'
for x in range(28):
for y in range(28):
pixel = int(features[column_template.format(x+1, y+1)])
image[x, y] = pixel
return image
def generate_meta(self):
if not self.dataset_meta:
return {'label_map': {str(i): i for i in range(10)}}
dataset_meta = read_json(self.dataset_meta)
label_map = dataset_meta.get('label_map')
if 'labels' in dataset_meta:
label_map = dict(enumerate(dataset_meta['labels']))
dataset_meta['label_map'] = label_map or {str(i): i for i in range(10)}
return dataset_meta
| [((5319, 5337), 'numpy.zeros', 'np.zeros', (['(28, 28)'], {}), '((28, 28))\n', (5327, 5337), True, 'import numpy as np\n')] |
davidbrownell/Common_EnvironmentEx | Libraries/Python/wxGlade/v0.9,5/wxGlade-0.9.5-py3.6.egg/wxglade/bugdialog.py | 9e20b79b4de0cb472f65ac08b3de83f9ed8e2ca3 | """\
Dialog to show details of internal errors.
@copyright: 2014-2016 Carsten Grohmann
@copyright: 2017 Dietmar Schwertberger
@license: MIT (see LICENSE.txt) - THIS PROGRAM COMES WITH NO WARRANTY
"""
import bugdialog_ui
import config
import log
import logging
import sys
import wx
class BugReport(bugdialog_ui.UIBugDialog):
"Dialog to show details of internal errors"
_disabled = False # Flag to prevent dialog popups during test runs.
def __init__(self):
self._disabled = getattr(sys, '_called_from_test', False)
bugdialog_ui.UIBugDialog.__init__(self, None, -1, "")
def SetContent(self, msg, exc):
"""Prepare given exception information and show it as dialog content.
msg: Short description of the action that has raised this error
exc: Caught exception (Exception instance)
see: SetContentEI()"""
if self._disabled:
return
exc_type = exc.__class__.__name__
exc_msg = str(exc)
header = self.st_header.GetLabel() % {'action': msg}
log.exception_orig(header)
self._fill_dialog(exc_msg, exc_type, header)
def SetContentEI(self, exc_type, exc_value, exc_tb, msg=_('An internal error occurred')):
"""Format given exception and add details to dialog.
exc_type: Exception type
exc_value: Exception value
exc_tb: Exception traceback
msg: Short description of the exception
see: SetContent()"""
if self._disabled:
return
# don't use exception() because it overwrites exc_info with 1
logging.error(msg, exc_info=(exc_type, exc_value, exc_tb))
self._fill_dialog(msg, exc_type, _('An internal error occurred'))
def _fill_dialog(self, exc_msg, exc_type, header):
"""Fill the bug dialog
exc_msg: Short exception summary
exc_type: Exception type as string
header: Initial message
see: L{SetContent(), SetContentEI()"""
details = log.getBufferAsString()
if not exc_msg:
exc_msg = _('No summary available')
summary = self.st_summary.GetLabel() % { 'exc_type':exc_type, 'exc_msg':exc_msg }
self.st_header.SetLabel(header)
self.st_summary.SetLabel(summary)
self.tc_details.SetValue(details)
howto = self.tc_howto_report.GetValue()
howto = howto % {'log_file': config.log_file}
self.tc_howto_report.SetValue(howto)
def OnCopy(self, event):
"Copy the dialog content to the clipboard"
text = self.tc_details.GetValue()
if not text:
return
data = wx.TextDataObject(text)
if wx.TheClipboard.Open():
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
else:
wx.MessageBox("Unable to open the clipboard", "Error")
def ShowModal(self, **kwargs):
if getattr(sys, '_called_from_test', False):
return wx.ID_OK
super(BugReport, self).ShowModal(**kwargs)
def Show(msg, exc):
"""Wrapper for creating a L{BugReport} dialog and show the details of the given exception instance.
msg: Short description of the action that has raised this error
exc: Caught exception
see ShowEI(), BugReport.SetContent()"""
dialog = BugReport()
dialog.SetContent(msg, exc)
dialog.ShowModal()
dialog.Destroy()
def ShowEI(exc_type, exc_value, exc_tb, msg=None):
"""Wrapper for creating a L{BugReport} dialog and show the given exception details.
exc_type: Exception type
exc_value: Exception value
exc_tb: Exception traceback
msg: Short description of the exception
see: L{Show(), BugReport.SetContent()"""
dialog = BugReport()
dialog.SetContentEI(exc_type, exc_value, exc_tb, msg)
dialog.ShowModal()
dialog.Destroy()
def ShowEnvironmentError(msg, inst):
"""Show EnvironmentError exceptions detailed and user-friendly
msg: Error message
inst: The caught exception"""
details = {'msg':msg, 'type':inst.__class__.__name__}
if inst.filename:
details['filename'] = _('Filename: %s') % inst.filename
if inst.errno is not None and inst.strerror is not None:
details['error'] = '%s - %s' % (inst.errno, inst.strerror)
else:
details['error'] = str(inst.args)
text = _("""%(msg)s
Error type: %(type)s
Error code: %(error)s
%(filename)s""") % details
wx.MessageBox(text, _('Error'), wx.OK | wx.CENTRE | wx.ICON_ERROR)
| [((550, 603), 'bugdialog_ui.UIBugDialog.__init__', 'bugdialog_ui.UIBugDialog.__init__', (['self', 'None', '(-1)', '""""""'], {}), "(self, None, -1, '')\n", (583, 603), False, 'import bugdialog_ui\n'), ((1059, 1085), 'log.exception_orig', 'log.exception_orig', (['header'], {}), '(header)\n', (1077, 1085), False, 'import log\n'), ((1604, 1662), 'logging.error', 'logging.error', (['msg'], {'exc_info': '(exc_type, exc_value, exc_tb)'}), '(msg, exc_info=(exc_type, exc_value, exc_tb))\n', (1617, 1662), False, 'import logging\n'), ((2007, 2030), 'log.getBufferAsString', 'log.getBufferAsString', ([], {}), '()\n', (2028, 2030), False, 'import log\n'), ((2645, 2668), 'wx.TextDataObject', 'wx.TextDataObject', (['text'], {}), '(text)\n', (2662, 2668), False, 'import wx\n'), ((2680, 2702), 'wx.TheClipboard.Open', 'wx.TheClipboard.Open', ([], {}), '()\n', (2700, 2702), False, 'import wx\n'), ((2716, 2745), 'wx.TheClipboard.SetData', 'wx.TheClipboard.SetData', (['data'], {}), '(data)\n', (2739, 2745), False, 'import wx\n'), ((2758, 2781), 'wx.TheClipboard.Close', 'wx.TheClipboard.Close', ([], {}), '()\n', (2779, 2781), False, 'import wx\n'), ((2808, 2862), 'wx.MessageBox', 'wx.MessageBox', (['"""Unable to open the clipboard"""', '"""Error"""'], {}), "('Unable to open the clipboard', 'Error')\n", (2821, 2862), False, 'import wx\n')] |
Neelamegam2000/QRcode-for-license | core/views.py | a6d4c9655c5ba52b24c1ea737797557f06e0fcbf | from django.shortcuts import render, redirect
from django.conf import settings
from django.core.files.storage import FileSystemStorage,default_storage
from django.core.mail import send_mail, EmailMessage
from core.models import Document
from core.forms import DocumentForm
from django.contrib import messages
import os
import pyqrcode
import png
import random
import base64
import cv2
import numpy as np
import pyzbar.pyzbar as pyzbar
def home(request):
documents= Document.objects.all()
return render(request, 'home.html', { 'documents': documents })
"""def simple_upload(request):
if request.method == 'POST' and request.FILES['myfile']:
myfile = request.FILES['myfile']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
media_path = os.path.join(BASE_DIR,'media')
full_path=os.path.join(media_path,myfile.name)
qr=pyqrcode.create(uploaded_file_url)
filename_before=filename.rsplit(".")
filename1=filename_before[0]+".png"
s=qr.png(filename1,scale=6)
'''from fpdf import FPDF
pdf=FPDF()
pdf.add_page()
pdf.image(filename1,x=50,y=None,w=60,h=60,type="",link=uploaded_file_url)'''
return render(request, 'simple_upload.html', {
'uploaded_file_url': uploaded_file_url
})
return render(request, 'simple_upload.html')"""
def model_form_upload(request):
id=""
msg=""
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES,request.POST)
if form.is_valid():
form.save()
email=form.cleaned_data['Email']
document_count=Document.objects.values_list('document').count()
document_last=Document.objects.values_list('document')[document_count-1]
document_name=document_last[0]
print(email)
t=Document.objects.last()
num_list=['0','1','2','3','4','5','6','7','8','9']
password1=""
for i in range(0,8):
password1=password1+random.choice(num_list)
t.password=password1
print(type(document_name))
document_name1=document_name.encode('ascii')
document_encode=str(base64.b64encode(document_name1))
ax=document_encode[2:-1]
t.file_url=ax
print(ax)
t.save()
qr=pyqrcode.create(ax)
filename=document_name.rsplit(".")
filename1=filename[0].split("/")
filename2=filename1[1]+".png"
qr.png(filename2,scale=6)
"""mail=EmailMessage('QR',password1,'[email protected]',[email])
#mail.attach(filename2,filename2.content_type)
mail.send()"""
subject = 'QRcode scanner for license'
message = password1
email_from = settings.EMAIL_HOST_USER
recipient_list = [email, ]
mail=EmailMessage( subject, message, email_from, recipient_list )
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
mail.attach_file(os.path.join(BASE_DIR,filename2))
mail.send()
msg="your successfully uploaded"
return redirect('model_form_upload')
else:
form = DocumentForm()
return render(request, 'model_form_upload.html', {'form': form,'msg':msg})
def mypass(request):
m=""
if(request.POST.get("pswd")==request.POST.get("pswd3")):
user_data=Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("old_pswd")).update(password=request.POST.get("pswd"))
user_data1=Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("pswd"))
"""if(len_user_data==1):
userdata.password=request.POST.get("pswd")
return render(request,'mypass.html',{u:"you have change the password successfully"})
else:"""
c=0
if(user_data1):
subject = 'QRcode scanner for license'
message = "Password has succesfully changed"+" "+request.POST.get("pswd")
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get("email"), ]
mail=EmailMessage( subject, message, email_from, recipient_list )
mail.send()
c=1
m="your password is changed succesfully"
elif(len(Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("old_pswd")))==0 and request.method=="POST"):
m="your email or password is incorrect"
else:
m=""
print(m)
return render(request,'mypass.html',{"m":m})
def user_req(request):
if("scanner" in request.POST and request.method=="POST"):
cap = cv2.VideoCapture(0+cv2.CAP_DSHOW)
font = cv2.FONT_HERSHEY_PLAIN
decodedObjects=[]
while decodedObjects==[]:
_, frame = cap.read()
decodedObjects = pyzbar.decode(frame)
for obj in decodedObjects:
points = obj.polygon
(x,y,w,h) = obj.rect
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(frame, [pts], True, (0, 255, 0), 3)
cv2.putText(frame, str(obj.data), (50, 50), font, 2,
(255, 0, 0), 3)
id =obj.data.decode("utf-8")
cv2.imshow("QR Reader", frame)
key = cv2.waitKey(10) & 0xFF
if decodedObjects!=[] :
cv2.destroyAllWindows()
return render(request,"user_req.html",{"id":id})
if('proceed' in request.POST and request.method=="POST"):
userdata=Document.objects.filter(file_url=request.POST.get("id1")).filter(password=request.POST.get("password1"))
return render(request,"user_req.html",{"userdata":userdata})
return render(request,"user_req.html",)
def user(request):
return render(request,"user.html",)
def forget_pass(request):
msg=""
if(request.method=="POST"):
num_list=['0','1','2','3','4','5','6','7','8','9']
password1=""
for i in range(0,8):
password1=password1+random.choice(num_list)
user_data=Document.objects.filter(Email=request.POST.get("email")).update(password=password1)
subject = 'QRcode scanner for license Forget password'
message = "Password has succesfully changed"+" "+password1
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get("email"), ]
mail=EmailMessage( subject, message, email_from, recipient_list )
mail.send()
if(user_data>0):
msg="your password is changed succesfully and mail sent"
elif(user_data==0):
msg="your email is incorrect or not found"
return render(request,"forget_pass.html",{"msg":msg})
def qrcode_miss(request):
msg=""
if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))):
user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1'))
m=user_data[0][0]
p=m.split('/')
print(p)
t=p[1]
print(t)
subject = 'QRcode scanner for license'
message = "resend"
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get('email'),]
mail=EmailMessage( subject, message, email_from, recipient_list )
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
k=os.path.join(BASE_DIR,t)
print(k)
mail.attach_file(k)
mail.send()
msg="your qrcode is sent to your email"
elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')).count()==0):
msg="your email or password is incorrect"
return render(request,'qrcode_miss.html',{"msg":msg})
| [((475, 497), 'core.models.Document.objects.all', 'Document.objects.all', ([], {}), '()\n', (495, 497), False, 'from core.models import Document\n'), ((509, 563), 'django.shortcuts.render', 'render', (['request', '"""home.html"""', "{'documents': documents}"], {}), "(request, 'home.html', {'documents': documents})\n", (515, 563), False, 'from django.shortcuts import render, redirect\n'), ((3512, 3581), 'django.shortcuts.render', 'render', (['request', '"""model_form_upload.html"""', "{'form': form, 'msg': msg}"], {}), "(request, 'model_form_upload.html', {'form': form, 'msg': msg})\n", (3518, 3581), False, 'from django.shortcuts import render, redirect\n'), ((4789, 4829), 'django.shortcuts.render', 'render', (['request', '"""mypass.html"""', "{'m': m}"], {}), "(request, 'mypass.html', {'m': m})\n", (4795, 4829), False, 'from django.shortcuts import render, redirect\n'), ((6037, 6069), 'django.shortcuts.render', 'render', (['request', '"""user_req.html"""'], {}), "(request, 'user_req.html')\n", (6043, 6069), False, 'from django.shortcuts import render, redirect\n'), ((6101, 6129), 'django.shortcuts.render', 'render', (['request', '"""user.html"""'], {}), "(request, 'user.html')\n", (6107, 6129), False, 'from django.shortcuts import render, redirect\n'), ((6999, 7048), 'django.shortcuts.render', 'render', (['request', '"""forget_pass.html"""', "{'msg': msg}"], {}), "(request, 'forget_pass.html', {'msg': msg})\n", (7005, 7048), False, 'from django.shortcuts import render, redirect\n'), ((8175, 8224), 'django.shortcuts.render', 'render', (['request', '"""qrcode_miss.html"""', "{'msg': msg}"], {}), "(request, 'qrcode_miss.html', {'msg': msg})\n", (8181, 8224), False, 'from django.shortcuts import render, redirect\n'), ((1625, 1680), 'core.forms.DocumentForm', 'DocumentForm', (['request.POST', 'request.FILES', 'request.POST'], {}), '(request.POST, request.FILES, request.POST)\n', (1637, 1680), False, 'from core.forms import DocumentForm\n'), ((3484, 3498), 'core.forms.DocumentForm', 'DocumentForm', ([], {}), '()\n', (3496, 3498), False, 'from core.forms import DocumentForm\n'), ((4402, 4460), 'django.core.mail.EmailMessage', 'EmailMessage', (['subject', 'message', 'email_from', 'recipient_list'], {}), '(subject, message, email_from, recipient_list)\n', (4414, 4460), False, 'from django.core.mail import send_mail, EmailMessage\n'), ((4927, 4962), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0 + cv2.CAP_DSHOW)'], {}), '(0 + cv2.CAP_DSHOW)\n', (4943, 4962), False, 'import cv2\n'), ((5576, 5606), 'cv2.imshow', 'cv2.imshow', (['"""QR Reader"""', 'frame'], {}), "('QR Reader', frame)\n", (5586, 5606), False, 'import cv2\n'), ((5729, 5773), 'django.shortcuts.render', 'render', (['request', '"""user_req.html"""', "{'id': id}"], {}), "(request, 'user_req.html', {'id': id})\n", (5735, 5773), False, 'from django.shortcuts import render, redirect\n'), ((5972, 6028), 'django.shortcuts.render', 'render', (['request', '"""user_req.html"""', "{'userdata': userdata}"], {}), "(request, 'user_req.html', {'userdata': userdata})\n", (5978, 6028), False, 'from django.shortcuts import render, redirect\n'), ((6723, 6781), 'django.core.mail.EmailMessage', 'EmailMessage', (['subject', 'message', 'email_from', 'recipient_list'], {}), '(subject, message, email_from, recipient_list)\n', (6735, 6781), False, 'from django.core.mail import send_mail, EmailMessage\n'), ((7646, 7704), 'django.core.mail.EmailMessage', 'EmailMessage', (['subject', 'message', 'email_from', 'recipient_list'], {}), '(subject, message, email_from, recipient_list)\n', (7658, 7704), False, 'from django.core.mail import send_mail, EmailMessage\n'), ((7801, 7826), 'os.path.join', 'os.path.join', (['BASE_DIR', 't'], {}), '(BASE_DIR, t)\n', (7813, 7826), False, 'import os\n'), ((2027, 2050), 'core.models.Document.objects.last', 'Document.objects.last', ([], {}), '()\n', (2048, 2050), False, 'from core.models import Document\n'), ((2556, 2575), 'pyqrcode.create', 'pyqrcode.create', (['ax'], {}), '(ax)\n', (2571, 2575), False, 'import pyqrcode\n'), ((3130, 3188), 'django.core.mail.EmailMessage', 'EmailMessage', (['subject', 'message', 'email_from', 'recipient_list'], {}), '(subject, message, email_from, recipient_list)\n', (3142, 3188), False, 'from django.core.mail import send_mail, EmailMessage\n'), ((3429, 3458), 'django.shortcuts.redirect', 'redirect', (['"""model_form_upload"""'], {}), "('model_form_upload')\n", (3437, 3458), False, 'from django.shortcuts import render, redirect\n'), ((5123, 5143), 'pyzbar.pyzbar.decode', 'pyzbar.decode', (['frame'], {}), '(frame)\n', (5136, 5143), True, 'import pyzbar.pyzbar as pyzbar\n'), ((5621, 5636), 'cv2.waitKey', 'cv2.waitKey', (['(10)'], {}), '(10)\n', (5632, 5636), False, 'import cv2\n'), ((5688, 5711), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5709, 5711), False, 'import cv2\n'), ((1885, 1925), 'core.models.Document.objects.values_list', 'Document.objects.values_list', (['"""document"""'], {}), "('document')\n", (1913, 1925), False, 'from core.models import Document\n'), ((2400, 2432), 'base64.b64encode', 'base64.b64encode', (['document_name1'], {}), '(document_name1)\n', (2416, 2432), False, 'import base64\n'), ((3306, 3339), 'os.path.join', 'os.path.join', (['BASE_DIR', 'filename2'], {}), '(BASE_DIR, filename2)\n', (3318, 3339), False, 'import os\n'), ((5279, 5305), 'numpy.array', 'np.array', (['points', 'np.int32'], {}), '(points, np.int32)\n', (5287, 5305), True, 'import numpy as np\n'), ((5368, 5417), 'cv2.polylines', 'cv2.polylines', (['frame', '[pts]', '(True)', '(0, 255, 0)', '(3)'], {}), '(frame, [pts], True, (0, 255, 0), 3)\n', (5381, 5417), False, 'import cv2\n'), ((6348, 6371), 'random.choice', 'random.choice', (['num_list'], {}), '(num_list)\n', (6361, 6371), False, 'import random\n'), ((7237, 7277), 'core.models.Document.objects.values_list', 'Document.objects.values_list', (['"""document"""'], {}), "('document')\n", (7265, 7277), False, 'from core.models import Document\n'), ((7761, 7786), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (7776, 7786), False, 'import os\n'), ((1809, 1849), 'core.models.Document.objects.values_list', 'Document.objects.values_list', (['"""document"""'], {}), "('document')\n", (1837, 1849), False, 'from core.models import Document\n'), ((2211, 2234), 'random.choice', 'random.choice', (['num_list'], {}), '(num_list)\n', (2224, 2234), False, 'import random\n'), ((3249, 3274), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (3264, 3274), False, 'import os\n'), ((7980, 8020), 'core.models.Document.objects.values_list', 'Document.objects.values_list', (['"""document"""'], {}), "('document')\n", (8008, 8020), False, 'from core.models import Document\n')] |
nonprofittechy/docassemble-MACourts | docassemble/MACourts/__init__.py | 6035393a09cff3e8a371f19b79d1cde3a60691c1 | __version__ = '0.0.58.2'
| [] |
Meat0Project/ChatBot | main.py | 35ebadc71b100d861f9c9e211e1e751175f47c50 | '''
Made by - Aditya mangal
Purpose - Python mini project
Date - 18 october 2020
'''
from chatterbot import ChatBot
from chatterbot.trainers import ChatterBotCorpusTrainer
form termcolor import cprint
import time
chatbot = ChatBot('Bot')
trainer = ChatterBotCorpusTrainer(chatbot)
trainer.train('chatterbot.corpus.english')
cprint("#" * 50, "magenta")
cprint((f"A Chatot ").center(50), "yellow")
cprint("#" * 50, "magenta")
print('You can exit by type exit\n')
while True:
query = input(">> ")
if 'exit' in query:
exit()
else:
print(chatbot.get_response(query))
| [] |
Jeffreyo3/AdventOfCode2020 | challenges/day14.py | 8705847a04885d6489eb11acfddf2ff5702d8927 | """
--- Day 14: Docking Data ---
As your ferry approaches the sea port, the captain asks for your help again. The computer system that runs this port isn't compatible with the docking program on the ferry, so the docking parameters aren't being correctly initialized in the docking program's memory.
After a brief inspection, you discover that the sea port's computer system uses a strange bitmask system in its initialization program. Although you don't have the correct decoder chip handy, you can emulate it in software!
The initialization program (your puzzle input) can either update the bitmask or write a value to memory. Values and memory addresses are both 36-bit unsigned integers. For example, ignoring bitmasks for a moment, a line like mem[8] = 11 would write the value 11 to memory address 8.
The bitmask is always given as a string of 36 bits, written with the most significant bit (representing 2^35) on the left and the least significant bit (2^0, that is, the 1s bit) on the right. The current bitmask is applied to values immediately before they are written to memory: a 0 or 1 overwrites the corresponding bit in the value, while an X leaves the bit in the value unchanged.
For example, consider the following program:
mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
mem[8] = 11
mem[7] = 101
mem[8] = 0
This program starts by specifying a bitmask (mask = ....). The mask it specifies will overwrite two bits in every written value: the 2s bit is overwritten with 0, and the 64s bit is overwritten with 1.
The program then attempts to write the value 11 to memory address 8. By expanding everything out to individual bits, the mask is applied as follows:
value: 000000000000000000000000000000001011 (decimal 11)
mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
result: 000000000000000000000000000001001001 (decimal 73)
So, because of the mask, the value 73 is written to memory address 8 instead. Then, the program tries to write 101 to address 7:
value: 000000000000000000000000000001100101 (decimal 101)
mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
result: 000000000000000000000000000001100101 (decimal 101)
This time, the mask has no effect, as the bits it overwrote were already the values the mask tried to set. Finally, the program tries to write 0 to address 8:
value: 000000000000000000000000000000000000 (decimal 0)
mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
result: 000000000000000000000000000001000000 (decimal 64)
64 is written to address 8 instead, overwriting the value that was there previously.
To initialize your ferry's docking program, you need the sum of all values left in memory after the initialization program completes. (The entire 36-bit address space begins initialized to the value 0 at every address.) In the above example, only two values in memory are not zero - 101 (at address 7) and 64 (at address 8) - producing a sum of 165.
Execute the initialization program. What is the sum of all values left in memory after it completes?
"""
f = open("challenges\data\day14data.txt", "r")
def processData(file):
data = []
for x in f:
x=x.strip().replace('\n', '').split(" = ")
data.append((x[0], x[1]))
return data
# Function to convert Decimal number
# to Binary number
def decimalToBinary(n):
return bin(n).replace("0b", "")
def leadingZeros(length, bin_num):
leadingZeros = length - len(bin_num)
return "0"*leadingZeros + bin_num
def initialize(commands):
memory = {}
mask = "X"*36
for c in commands:
if c[0] == "mask":
mask = c[1]
else:
address = c[0][c[0].index("[")+1:len(c[0])-1]
binaryValue = decimalToBinary(int(c[1]))
binary36 = leadingZeros(36, binaryValue)
memory[address] = ""
for i in range(len(mask)):
if mask[i] == "X":
memory[address] += binary36[i]
else:
memory[address] += mask[i]
sum = 0
for val in memory.values():
sum += int("".join(val), 2)
return sum
"""
--- Part Two ---
For some reason, the sea port's computer system still can't communicate with your ferry's docking program. It must be using version 2 of the decoder chip!
A version 2 decoder chip doesn't modify the values being written at all. Instead, it acts as a memory address decoder. Immediately before a value is written to memory, each bit in the bitmask modifies the corresponding bit of the destination memory address in the following way:
If the bitmask bit is 0, the corresponding memory address bit is unchanged.
If the bitmask bit is 1, the corresponding memory address bit is overwritten with 1.
If the bitmask bit is X, the corresponding memory address bit is floating.
A floating bit is not connected to anything and instead fluctuates unpredictably. In practice, this means the floating bits will take on all possible values, potentially causing many memory addresses to be written all at once!
For example, consider the following program:
mask = 000000000000000000000000000000X1001X
mem[42] = 100
mask = 00000000000000000000000000000000X0XX
mem[26] = 1
When this program goes to write to memory address 42, it first applies the bitmask:
address: 000000000000000000000000000000101010 (decimal 42)
mask: 000000000000000000000000000000X1001X
result: 000000000000000000000000000000X1101X
After applying the mask, four bits are overwritten, three of which are different, and two of which are floating. Floating bits take on every possible combination of values; with two floating bits, four actual memory addresses are written:
000000000000000000000000000000011010 (decimal 26)
000000000000000000000000000000011011 (decimal 27)
000000000000000000000000000000111010 (decimal 58)
000000000000000000000000000000111011 (decimal 59)
Next, the program is about to write to memory address 26 with a different bitmask:
address: 000000000000000000000000000000011010 (decimal 26)
mask: 00000000000000000000000000000000X0XX
result: 00000000000000000000000000000001X0XX
This results in an address with three floating bits, causing writes to eight memory addresses:
000000000000000000000000000000010000 (decimal 16)
000000000000000000000000000000010001 (decimal 17)
000000000000000000000000000000010010 (decimal 18)
000000000000000000000000000000010011 (decimal 19)
000000000000000000000000000000011000 (decimal 24)
000000000000000000000000000000011001 (decimal 25)
000000000000000000000000000000011010 (decimal 26)
000000000000000000000000000000011011 (decimal 27)
The entire 36-bit address space still begins initialized to the value 0 at every address, and you still need the sum of all values left in memory at the end of the program. In this example, the sum is 208.
Execute the initialization program using an emulator for a version 2 decoder chip. What is the sum of all values left in memory after it completes?
"""
def calculateCombinations(bin_address):
combinations = []
# xCount = 0
xPositions = []
for i in range(len(bin_address)):
# find each X and add its idx to a list
if bin_address[i] == "X":
xPositions.append(i)
# xCount += 1
if len(xPositions) > 0:
for i in range(2**(len(xPositions))):
# need to generate all possible combos of 0s & 1s
# w/ leading 0s
possible = decimalToBinary(i)
while len(possible) < len(xPositions):
possible = "0"+possible
combinations.append(possible)
addresses = []
for c in combinations:
# need to insert combination[i] into binary number
# current combo associated idx is in xPositions[i]
newAddress = ""
currPos = 0
for i in range(len(bin_address)):
if currPos < len(xPositions) and i == xPositions[currPos]:
newAddress += c[currPos]
currPos += 1
else:
newAddress += bin_address[i]
addresses.append(newAddress)
return addresses
def initialize_v2(commands):
memory = {}
mask = "X"*36
for c in commands:
if c[0] == "mask":
mask = c[1]
else:
address = c[0][c[0].index("[")+1:len(c[0])-1]
binaryAddress = decimalToBinary(int(address))
binary36 = leadingZeros(36, binaryAddress)
newVal = ""
for i in range(len(mask)):
if mask[i] != "0":
newVal += mask[i]
else:
newVal += binary36[i]
addresses = calculateCombinations(newVal)
for a in addresses:
memory[a] = int(c[1])
sum = 0
for val in memory.values():
sum += val
# print(memory)
return sum
data = processData(f)
# [print(d) for d in data]
sumAllValues = initialize(data)
print("Part 1:", sumAllValues)
sumAllValuesV2 = initialize_v2(data)
print("Part 2:", sumAllValuesV2)
# binary = decimalToBinary(33323)
# binary = leadingZeros(36, binary)
# print(binary)
# combos = initialize_v2([("mask", "100X100X101011111X100000100X11010011"),
# ("mem[33323]", "349380")])
# print(combos) | [] |
Lovely-XPP/tkzgeom | src/Dialogs/RegularPolygonDialog.py | bf68e139dc05f759542d6611f4dc07f4f2727b92 | from PyQt5 import QtWidgets, uic
from Factory import Factory
from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox
from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget
import Constant as c
class RegularPolygonDialog(QtWidgets.QDialog):
def __init__(self, scene, data):
"""Construct RegularPolygonDialog."""
super(RegularPolygonDialog, self).__init__()
self.ui = uic.loadUi('regularpolygon.ui', self)
self.scene = scene
self.sides = 3
self.free_point = False
self.data = data
self.ui.buttonBox.accepted.connect(self.accepted)
self.ui.buttonBox.rejected.connect(self.rejected)
self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func)
self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x))
def hslider_sides_func(self, value):
"""Be slider callback function to set sides."""
self.sides = value
self.ui.sides_spin.setValue(value)
def accepted(self):
"""Create new regular polygon with settings."""
A, B = self.data
angle = -(self.sides - 2) * 180 / self.sides
polygon = [A, B]
for _ in range(self.sides - 2):
item = Factory.create_empty_item('point', c.Point.Definition.ROTATION)
definition = {'A': A, 'B': B, 'angle': angle}
id_ = Factory.next_id(item, definition, self.scene.project_data.items)
item.item["id"] = id_
item.item["definition"] = definition
if self.free_point:
item = turn_into_free_point(item, self.scene)
self.scene.project_data.add(item)
A = B
B = item.item["id"]
polygon.append(item.item["id"])
item = Factory.create_empty_item('polygon', None)
definition = polygon
item.item["id"] = Factory.next_id(item, definition, self.scene.project_data.items)
item.item["definition"] = definition
self.scene.project_data.add(item)
self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims)
current_row_old = self.scene.ui.listWidget.currentRow()
fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx)
set_selected_id_in_listWidget(self.scene, current_row_old)
self.scene.edit.add_undo_item(self.scene)
def rejected(self):
"""Add no new regular polygon."""
pass
| [((446, 483), 'PyQt5.uic.loadUi', 'uic.loadUi', (['"""regularpolygon.ui"""', 'self'], {}), "('regularpolygon.ui', self)\n", (456, 483), False, 'from PyQt5 import QtWidgets, uic\n'), ((1820, 1862), 'Factory.Factory.create_empty_item', 'Factory.create_empty_item', (['"""polygon"""', 'None'], {}), "('polygon', None)\n", (1845, 1862), False, 'from Factory import Factory\n'), ((1918, 1982), 'Factory.Factory.next_id', 'Factory.next_id', (['item', 'definition', 'self.scene.project_data.items'], {}), '(item, definition, self.scene.project_data.items)\n', (1933, 1982), False, 'from Factory import Factory\n'), ((2222, 2330), 'Fill.ListWidget.fill_listWidget_with_data', 'fill_listWidget_with_data', (['self.scene.project_data', 'self.scene.ui.listWidget', 'self.scene.current_tab_idx'], {}), '(self.scene.project_data, self.scene.ui.listWidget,\n self.scene.current_tab_idx)\n', (2247, 2330), False, 'from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget\n'), ((2335, 2393), 'Fill.ListWidget.set_selected_id_in_listWidget', 'set_selected_id_in_listWidget', (['self.scene', 'current_row_old'], {}), '(self.scene, current_row_old)\n', (2364, 2393), False, 'from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget\n'), ((1282, 1345), 'Factory.Factory.create_empty_item', 'Factory.create_empty_item', (['"""point"""', 'c.Point.Definition.ROTATION'], {}), "('point', c.Point.Definition.ROTATION)\n", (1307, 1345), False, 'from Factory import Factory\n'), ((1422, 1486), 'Factory.Factory.next_id', 'Factory.next_id', (['item', 'definition', 'self.scene.project_data.items'], {}), '(item, definition, self.scene.project_data.items)\n', (1437, 1486), False, 'from Factory import Factory\n'), ((840, 868), 'Dialogs.DialogMacros.free_point_checkbox', 'free_point_checkbox', (['self', 'x'], {}), '(self, x)\n', (859, 868), False, 'from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox\n'), ((1625, 1663), 'Dialogs.DialogMacros.turn_into_free_point', 'turn_into_free_point', (['item', 'self.scene'], {}), '(item, self.scene)\n', (1645, 1663), False, 'from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox\n')] |
UCY-LINC-LAB/5G-Slicer | tests/test_networks.py | 41e75a6709bc779cb4f3e08484b9ada3911646ed | import unittest
from networks.QoS import QoS
from networks.connections.mathematical_connections import FunctionalDegradation
from networks.slicing import SliceConceptualGraph
from utils.location import Location
class TestBaseStationLinear(unittest.TestCase):
def setUp(self):
self.name = "network"
self.wireless_connection_type = "LinearDegradation"
self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.parameters = dict(
best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps',
'error_rate': '1.0%'},
worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps',
'error_rate': '2.0%'}, radius="5km")
self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters)
def test_creation(self):
self.assertEqual(self.network.get_name(), "network")
def test_get_empty_nodes(self):
self.assertEqual(self.network.get_nodes(), {})
def test_add_node(self):
name, lat, lon = 'node', 33, 40
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node(name, lat, lon)
self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node('node', 33, 40)
def test_get_empty_RUs(self):
self.assertEqual(self.network.get_RUs(), {})
def test_set_basetastion(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RU(lat, lon)
def test_constructor(self):
with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException):
SliceConceptualGraph('test', {}, {}, {})
SliceConceptualGraph('test', self.midhaul_qos, {}, {})
SliceConceptualGraph('test', {}, self.backhaul_qos, {})
SliceConceptualGraph('test', {}, {}, self.parameters)
def test_get_qos(self):
self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos))
def test_set_qos(self):
self.network.set_backhaul(QoS.minimum_qos_dict)
self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict))
def test_qos_from_distance(self):
self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos'))
self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos'))
def test_get_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('test', 10, 10)
self.assertEqual(self.network.get_node_location('test2'), None)
self.assertEqual(self.network.get_node_location('test'), Location(10, 10))
def test_has_to_pass_through_backhaul(self):
self.network.set_RU(10, 10)
self.network.set_RU(20, 20)
self.network.add_node('source1', 10, 10)
self.network.add_node('destination1', 10, 10)
self.network.add_node('destination2', 20, 20)
def test_set_RUs(self):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
self.assertEqual(self.network.get_RUs(),
{'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})})
lat, lon = 33, 40
self.network.set_RU(lat, lon)
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
def test_set_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('destination1', 10, 10)
self.network.set_node_location('destination1', 20, 20)
self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20))
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 'test', 20)
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 20, 'test')
class TestBaseLog2Degradation(unittest.TestCase):
def setUp(self):
self.name = "network"
self.wireless_connection_type = "Log2Degradation"
self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.parameters = dict(
best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps',
'error_rate': '1.0%'},
worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps',
'error_rate': '2.0%'}, radius="5km")
self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters)
def test_creation(self):
self.assertEqual(self.network.get_name(), "network")
def test_get_empty_nodes(self):
self.assertEqual(self.network.get_nodes(), {})
def test_add_node(self):
name, lat, lon = 'node', 33, 40
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node(name, lat, lon)
self.network.set_RU(33, 40, 0)
self.network.add_node(name, lat, lon)
self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node('node', 33, 40)
def test_get_empty_RUs(self):
self.assertEqual(self.network.get_RUs(), {})
def test_set_basetastion(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RU(lat, lon)
def test_constructor(self):
with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException):
SliceConceptualGraph('test', {} ,{}, {})
SliceConceptualGraph('test', self.midhaul_qos, {}, {})
SliceConceptualGraph('test', {}, self.backhaul_qos, {})
SliceConceptualGraph('test', {}, {}, self.parameters)
def test_get_qos(self):
self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos))
def test_set_qos(self):
self.network.set_backhaul(QoS.minimum_qos_dict)
self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict))
def test_qos_from_distance(self):
self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos'))
self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos'))
def test_get_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('test', 10, 10)
self.assertEqual(self.network.get_node_location('test2'), None)
self.assertEqual(self.network.get_node_location('test'), Location(10, 10))
def test_set_RUs(self):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
self.assertEqual(self.network.get_RUs(),
{'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
def test_set_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('destination1', 10, 10)
self.network.set_node_location('destination1', 20, 20)
self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20))
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 'test', 20)
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 20, 'test')
| [((1054, 1144), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['self.name', 'self.midhaul_qos', 'self.backhaul_qos', 'self.parameters'], {}), '(self.name, self.midhaul_qos, self.backhaul_qos, self.\n parameters)\n', (1074, 1144), False, 'from networks.slicing import SliceConceptualGraph\n'), ((5498, 5588), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['self.name', 'self.midhaul_qos', 'self.backhaul_qos', 'self.parameters'], {}), '(self.name, self.midhaul_qos, self.backhaul_qos, self.\n parameters)\n', (5518, 5588), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2240, 2280), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', '{}'], {}), "('test', {}, {}, {})\n", (2260, 2280), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2293, 2347), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', 'self.midhaul_qos', '{}', '{}'], {}), "('test', self.midhaul_qos, {}, {})\n", (2313, 2347), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2360, 2415), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', 'self.backhaul_qos', '{}'], {}), "('test', {}, self.backhaul_qos, {})\n", (2380, 2415), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2428, 2481), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', 'self.parameters'], {}), "('test', {}, {}, self.parameters)\n", (2448, 2481), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2565, 2587), 'networks.QoS.QoS', 'QoS', (['self.backhaul_qos'], {}), '(self.backhaul_qos)\n', (2568, 2587), False, 'from networks.QoS import QoS\n'), ((2728, 2753), 'networks.QoS.QoS', 'QoS', (['QoS.minimum_qos_dict'], {}), '(QoS.minimum_qos_dict)\n', (2731, 2753), False, 'from networks.QoS import QoS\n'), ((3297, 3313), 'utils.location.Location', 'Location', (['(10)', '(10)'], {}), '(10, 10)\n', (3305, 3313), False, 'from utils.location import Location\n'), ((4377, 4393), 'utils.location.Location', 'Location', (['(20)', '(20)'], {}), '(20, 20)\n', (4385, 4393), False, 'from utils.location import Location\n'), ((6785, 6825), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', '{}'], {}), "('test', {}, {}, {})\n", (6805, 6825), False, 'from networks.slicing import SliceConceptualGraph\n'), ((6838, 6892), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', 'self.midhaul_qos', '{}', '{}'], {}), "('test', self.midhaul_qos, {}, {})\n", (6858, 6892), False, 'from networks.slicing import SliceConceptualGraph\n'), ((6905, 6960), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', 'self.backhaul_qos', '{}'], {}), "('test', {}, self.backhaul_qos, {})\n", (6925, 6960), False, 'from networks.slicing import SliceConceptualGraph\n'), ((6973, 7026), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', 'self.parameters'], {}), "('test', {}, {}, self.parameters)\n", (6993, 7026), False, 'from networks.slicing import SliceConceptualGraph\n'), ((7110, 7132), 'networks.QoS.QoS', 'QoS', (['self.backhaul_qos'], {}), '(self.backhaul_qos)\n', (7113, 7132), False, 'from networks.QoS import QoS\n'), ((7273, 7298), 'networks.QoS.QoS', 'QoS', (['QoS.minimum_qos_dict'], {}), '(QoS.minimum_qos_dict)\n', (7276, 7298), False, 'from networks.QoS import QoS\n'), ((7842, 7858), 'utils.location.Location', 'Location', (['(10)', '(10)'], {}), '(10, 10)\n', (7850, 7858), False, 'from utils.location import Location\n'), ((8578, 8594), 'utils.location.Location', 'Location', (['(20)', '(20)'], {}), '(20, 20)\n', (8586, 8594), False, 'from utils.location import Location\n'), ((1563, 1581), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (1571, 1581), False, 'from utils.location import Location\n'), ((1965, 1983), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (1973, 1983), False, 'from utils.location import Location\n'), ((3785, 3819), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 10, 'lon': 10})\n", (3793, 3819), False, 'from utils.location import Location\n'), ((3828, 3860), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 5, 'lon': 5})\n", (3836, 3860), False, 'from utils.location import Location\n'), ((6108, 6126), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (6116, 6126), False, 'from utils.location import Location\n'), ((6510, 6528), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (6518, 6528), False, 'from utils.location import Location\n'), ((8050, 8084), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 10, 'lon': 10})\n", (8058, 8084), False, 'from utils.location import Location\n'), ((8093, 8125), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 5, 'lon': 5})\n", (8101, 8125), False, 'from utils.location import Location\n')] |
Subsets and Splits