repo_name
stringlengths
7
94
repo_path
stringlengths
4
237
repo_head_hexsha
stringlengths
40
40
content
stringlengths
10
680k
apis
stringlengths
2
680k
ruslan-ok/ruslan
v2_hier/site_stat.py
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
"""Collecting statistics of site visits.""" import collections from datetime import datetime from functools import reduce from django.utils.translation import gettext_lazy as _ from hier.models import IPInfo, AccessLog, SiteStat from v2_hier.utils import APPS def get_site_stat(user): """Processing a new portion of log file records. The site applications that users have visited and information about their IP addresses will be shown. """ TOTAL_IP = _('total different').capitalize() + ' IP' TOTAL_LOG = _('total log records').capitalize() NEW_LOG = _('new log records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the last previously processed log file entry last = datetime.min site_stat = None if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user = user.id).get() if site_stat.record and site_stat.record.event: last = site_stat.record.event # New records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) # Save last processed log record last_rec = None if (len(records) > 0): last_rec = records[0] if site_stat: site_stat.record = last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps = {} for rec in records: uri = valid_uri(rec) if not uri: continue # Determining the access to the site application a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS)) if not a_app: continue app = a_app[0] if not app in apps: apps[app] = {} host = str(rec.host.info()) #raise Exception('aaa = ', aaa) if not host in apps[app]: apps[app][host] = [] page = '{} {}'.format(rec.method, uri) if not page in apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps def valid_uri(rec): if (rec.status >= 400) or (rec.status == 301): return None if 'favicon.ico' in rec.uri or '/static/' in rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri: return None if ('/?' in rec.uri) and (rec.method != 'POST'): uri = rec.uri.split('?')[0] else: uri = rec.uri uri = uri.replace('/ru/', '/').replace('/en/', '/') if (uri == '/'): return None return uri
[((622, 643), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (641, 643), False, 'import collections\n'), ((668, 688), 'hier.models.IPInfo.objects.all', 'IPInfo.objects.all', ([], {}), '()\n', (686, 688), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((715, 738), 'hier.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (736, 738), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((527, 549), 'django.utils.translation.gettext_lazy', '_', (['"""total log records"""'], {}), "('total log records')\n", (528, 549), True, 'from django.utils.translation import gettext_lazy as _\n'), ((577, 597), 'django.utils.translation.gettext_lazy', '_', (['"""new log records"""'], {}), "('new log records')\n", (578, 597), True, 'from django.utils.translation import gettext_lazy as _\n'), ((855, 892), 'hier.models.SiteStat.objects.filter', 'SiteStat.objects.filter', ([], {'user': 'user.id'}), '(user=user.id)\n', (878, 892), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((1100, 1140), 'hier.models.AccessLog.objects.filter', 'AccessLog.objects.filter', ([], {'event__gt': 'last'}), '(event__gt=last)\n', (1124, 1140), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((1425, 1476), 'hier.models.SiteStat.objects.create', 'SiteStat.objects.create', ([], {'user': 'user', 'record': 'last_rec'}), '(user=user, record=last_rec)\n', (1448, 1476), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((469, 489), 'django.utils.translation.gettext_lazy', '_', (['"""total different"""'], {}), "('total different')\n", (470, 489), True, 'from django.utils.translation import gettext_lazy as _\n'), ((923, 960), 'hier.models.SiteStat.objects.filter', 'SiteStat.objects.filter', ([], {'user': 'user.id'}), '(user=user.id)\n', (946, 960), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n')]
mkosmo/cfncluster
cli/pcluster/utils.py
f1817cc187f2b92127d48f16debb4b7ea4f4a80f
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with # the License. A copy of the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and # limitations under the License. # fmt: off from __future__ import absolute_import, print_function # isort:skip from future import standard_library # isort:skip standard_library.install_aliases() # fmt: on import json import logging import os import sys import time import urllib.request import zipfile from io import BytesIO import boto3 import pkg_resources from botocore.exceptions import ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = "parallelcluster-" PCLUSTER_ISSUES_LINK = "https://github.com/aws/aws-parallelcluster/issues" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name def get_region(): """Get AWS_DEFAULT_REGION from the environment.""" return os.environ.get("AWS_DEFAULT_REGION") def get_partition(): """Get partition for the AWS_DEFAULT_REGION set in the environment.""" return "aws-us-gov" if get_region().startswith("us-gov") else "aws" def paginate_boto3(method, **kwargs): """ Return a generator for a boto3 call, this allows pagination over an arbitrary number of responses. :param method: boto3 method :param kwargs: arguments to method :return: generator with boto3 results """ client = method.__self__ paginator = client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters(): for result in page: yield result def create_s3_bucket(bucket_name, region): """ Create a new S3 bucket. :param bucket_name: name of the S3 bucket to create :param region: aws region """ s3_client = boto3.client("s3") """ :type : pyboto3.s3 """ try: if region != "us-east-1": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print("Bucket already exists") def delete_s3_bucket(bucket_name): """ Delete an S3 bucket together with all stored objects. :param bucket_name: name of the S3 bucket to delete """ try: bucket = boto3.resource("s3").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client("s3").exceptions.NoSuchBucket: pass except ClientError: print("Failed to delete bucket %s. Please delete it manually." % bucket_name) def zip_dir(path): """ Create a zip archive containing all files and dirs rooted in path. The archive is created in memory and a file handler is returned by the function. :param path: directory containing the resources to archive. :return file handler pointing to the compressed archive. """ file_out = BytesIO() with zipfile.ZipFile(file_out, "w", zipfile.ZIP_DEFLATED) as ziph: for root, _, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name, root): """ Upload to the specified S3 bucket the content of the directory rooted in root path. All dirs contained in root dir will be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip. All files contained in root dir will be uploaded to $bucket_name. :param bucket_name: name of the S3 bucket where files are uploaded :param root: root directory containing the resources to upload. """ bucket = boto3.resource("s3").Bucket(bucket_name) for res in os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), "%s/artifacts.zip" % res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region, file_name): """ Get pricing file (if none) and parse content as json. :param region: AWS Region :param file_name the object name to get :return: a json object representing the file content :raises ClientError if unable to download the file :raises ValueError if unable to decode the file content """ bucket_name = "{0}-aws-parallelcluster".format(region) file_contents = boto3.resource("s3").Object(bucket_name, file_name).get()["Body"].read().decode("utf-8") return json.loads(file_contents) def get_supported_features(region, feature): """ Get a json object containing the attributes supported by a feature, for example. { "Features": { "efa": { "instances": ["c5n.18xlarge", "p3dn.24xlarge", "i3en.24xlarge"], "baseos": ["alinux", "centos7"], "schedulers": ["sge", "slurm", "torque"] }, "batch": { "instances": ["r3.8xlarge", ..., "m5.4xlarge"] } } } :param region: AWS Region :param feature: the feature to search for, i.e. "efa" "awsbatch" :return: json object containing all the attributes supported by feature """ try: features = _get_json_from_s3(region, "features/feature_whitelist.json") supported_features = features.get("Features").get(feature) except (ValueError, ClientError, KeyError) as e: if isinstance(e, ClientError): code = e.response.get("Error").get("Code") if code == "InvalidAccessKeyId": error(e.response.get("Error").get("Message")) error( "Failed validate {0}. This is probably a bug on our end. " "Please submit an issue {1}".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features def get_instance_vcpus(region, instance_type): """ Get number of vcpus for the given instance type. :param region: AWS Region :param instance_type: the instance type to search for. :return: the number of vcpus or -1 if the instance type cannot be found or the pricing file cannot be retrieved/parsed """ try: instances = _get_json_from_s3(region, "instances/instances.json") vcpus = int(instances[instance_type]["vcpus"]) except (KeyError, ValueError, ClientError): vcpus = -1 return vcpus def get_supported_os(scheduler): """ Return a tuple of the os supported by parallelcluster for the specific scheduler. :param scheduler: the scheduler for which we want to know the supported os :return: a tuple of strings of the supported os """ return "alinux" if scheduler == "awsbatch" else "alinux", "centos6", "centos7", "ubuntu1604", "ubuntu1804" def get_supported_schedulers(): """ Return a tuple of the scheduler supported by parallelcluster. :return: a tuple of strings of the supported scheduler """ return "sge", "torque", "slurm", "awsbatch" def get_stack_output_value(stack_outputs, output_key): """ Get output value from Cloudformation Stack Output. :param stack_outputs: Cloudformation Stack Outputs :param output_key: Output Key :return: OutputValue if that output exists, otherwise None """ return next((o.get("OutputValue") for o in stack_outputs if o.get("OutputKey") == output_key), None) def get_stack(stack_name, cfn_client=None): """ Get the output for a DescribeStacks action for the given Stack. :param stack_name: the CFN Stack name :param cfn_client: boto3 cloudformation client :return: the Stack data type """ try: if not cfn_client: cfn_client = boto3.client("cloudformation") return cfn_client.describe_stacks(StackName=stack_name).get("Stacks")[0] except (ClientError, IndexError) as e: error(e.response.get("Error").get("Message")) def verify_stack_creation(stack_name, cfn_client): """ Wait for the stack creation to be completed and notify if the stack creation fails. :param stack_name: the stack name that we should verify :param cfn_client: the CloudFormation client to use to verify stack status :return: True if the creation was successful, false otherwise. """ status = get_stack(stack_name, cfn_client).get("StackStatus") resource_status = "" while status == "CREATE_IN_PROGRESS": status = get_stack(stack_name, cfn_client).get("StackStatus") events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents")[0] resource_status = ("Status: %s - %s" % (events.get("LogicalResourceId"), events.get("ResourceStatus"))).ljust( 80 ) sys.stdout.write("\r%s" % resource_status) sys.stdout.flush() time.sleep(5) # print the last status update in the logs if resource_status != "": LOGGER.debug(resource_status) if status != "CREATE_COMPLETE": LOGGER.critical("\nCluster creation failed. Failed events:") events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents") for event in events: if event.get("ResourceStatus") == "CREATE_FAILED": LOGGER.info( " - %s %s %s", event.get("ResourceType"), event.get("LogicalResourceId"), event.get("ResourceStatusReason"), ) return False return True def get_templates_bucket_path(): """Return a string containing the path of bucket.""" region = get_region() s3_suffix = ".cn" if region.startswith("cn") else "" return "https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/".format( REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version(): """Get the version of the installed aws-parallelcluster package.""" return pkg_resources.get_distribution("aws-parallelcluster").version def check_if_latest_version(): """Check if the current package version is the latest one.""" try: latest = json.loads(urllib.request.urlopen("https://pypi.python.org/pypi/aws-parallelcluster/json").read())[ "info" ]["version"] if get_installed_version() < latest: print("Info: There is a newer version %s of AWS ParallelCluster available." % latest) except Exception: pass def warn(message): """Print a warning message.""" print("WARNING: {0}".format(message)) def error(message, fail_on_error=True): """Print an error message and Raise SystemExit exception to the stderr if fail_on_error is true.""" if fail_on_error: sys.exit("ERROR: {0}".format(message)) else: print("ERROR: {0}".format(message)) def get_cfn_param(params, key_name): """ Get parameter value from Cloudformation Stack Parameters. :param params: Cloudformation Stack Parameters :param key_name: Parameter Key :return: ParameterValue if that parameter exists, otherwise None """ param_value = next((i.get("ParameterValue") for i in params if i.get("ParameterKey") == key_name), "NONE") return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): """ Search for a Mount Target Id in given availability zone for the given EFS file system id. :param efs_fs_id: EFS file system Id :param avail_zone: Availability zone to verify :return: the mount_target_id or None """ mount_target_id = None if efs_fs_id: mount_targets = boto3.client("efs").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get("MountTargets"): # Check to see if there is an existing mt in the az of the stack mount_target_subnet = mount_target.get("SubnetId") if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get("MountTargetId") return mount_target_id def get_avail_zone(subnet_id): avail_zone = None try: avail_zone = ( boto3.client("ec2").describe_subnets(SubnetIds=[subnet_id]).get("Subnets")[0].get("AvailabilityZone") ) except ClientError as e: LOGGER.debug( "Unable to detect availability zone for subnet {0}.\n{1}".format( subnet_id, e.response.get("Error").get("Message") ) ) return avail_zone def get_latest_alinux_ami_id(): """Get latest alinux ami id.""" try: alinux_ami_id = ( boto3.client("ssm") .get_parameters_by_path(Path="/aws/service/ami-amazon-linux-latest") .get("Parameters")[0] .get("Value") ) except ClientError as e: error("Unable to retrieve Amazon Linux AMI id.\n{0}".format(e.response.get("Error").get("Message"))) return alinux_ami_id def list_ec2_instance_types(): """Return a list of all the instance types available on EC2, independent by the region.""" return boto3.client("ec2").meta.service_model.shape_for("InstanceType").enum def get_master_server_id(stack_name): """Return the physical id of the master server, or [] if no master server.""" try: resources = boto3.client("cloudformation").describe_stack_resource( StackName=stack_name, LogicalResourceId="MasterServer" ) return resources.get("StackResourceDetail").get("PhysicalResourceId") except ClientError as e: error(e.response.get("Error").get("Message")) def _get_master_server_ip(stack_name): """ Get the IP Address of the MasterServer. :param stack_name: The name of the cloudformation stack :param config: Config object :return private/public ip address """ ec2 = boto3.client("ec2") master_id = get_master_server_id(stack_name) if not master_id: error("MasterServer not running. Can't SSH") instance = ec2.describe_instances(InstanceIds=[master_id]).get("Reservations")[0].get("Instances")[0] ip_address = instance.get("PublicIpAddress") if ip_address is None: ip_address = instance.get("PrivateIpAddress") state = instance.get("State").get("Name") if state != "running" or ip_address is None: error("MasterServer: %s\nCannot get ip address.", state.upper()) return ip_address def get_master_ip_and_username(cluster_name): cfn = boto3.client("cloudformation") try: stack_name = get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get("Stacks")[0] stack_status = stack_result.get("StackStatus") valid_status = ["CREATE_COMPLETE", "UPDATE_COMPLETE", "UPDATE_ROLLBACK_COMPLETE"] invalid_status = ["DELETE_COMPLETE", "DELETE_IN_PROGRESS"] if stack_status in invalid_status: error("Unable to retrieve master_ip and username for a stack in the status: {0}".format(stack_status)) elif stack_status in valid_status: outputs = stack_result.get("Outputs") master_ip = get_stack_output_value(outputs, "MasterPublicIP") or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs, "ClusterUser") else: # Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running master_ip = _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings = template.get("TemplateBody").get("Mappings").get("OSFeatures") base_os = get_cfn_param(stack_result.get("Parameters"), "BaseOS") username = mappings.get(base_os).get("User") if not master_ip: error("Failed to get cluster {0} ip.".format(cluster_name)) if not username: error("Failed to get cluster {0} username.".format(cluster_name)) except ClientError as e: error(e.response.get("Error").get("Message")) return master_ip, username def get_cli_log_file(): return os.path.expanduser(os.path.join("~", ".parallelcluster", "pcluster-cli.log"))
[((684, 718), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (716, 718), False, 'from future import standard_library\n'), ((939, 966), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (956, 966), False, 'import logging\n'), ((1256, 1292), 'os.environ.get', 'os.environ.get', (['"""AWS_DEFAULT_REGION"""'], {}), "('AWS_DEFAULT_REGION')\n", (1270, 1292), False, 'import os\n'), ((2129, 2147), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (2141, 2147), False, 'import boto3\n'), ((3311, 3320), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3318, 3320), False, 'from io import BytesIO\n'), ((4160, 4176), 'os.listdir', 'os.listdir', (['root'], {}), '(root)\n', (4170, 4176), False, 'import os\n'), ((4983, 5008), 'json.loads', 'json.loads', (['file_contents'], {}), '(file_contents)\n', (4993, 5008), False, 'import json\n'), ((14307, 14326), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (14319, 14326), False, 'import boto3\n'), ((14936, 14966), 'boto3.client', 'boto3.client', (['"""cloudformation"""'], {}), "('cloudformation')\n", (14948, 14966), False, 'import boto3\n'), ((3330, 3382), 'zipfile.ZipFile', 'zipfile.ZipFile', (['file_out', '"""w"""', 'zipfile.ZIP_DEFLATED'], {}), "(file_out, 'w', zipfile.ZIP_DEFLATED)\n", (3345, 3382), False, 'import zipfile\n'), ((3422, 3435), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (3429, 3435), False, 'import os\n'), ((9205, 9247), 'sys.stdout.write', 'sys.stdout.write', (["('\\r%s' % resource_status)"], {}), "('\\r%s' % resource_status)\n", (9221, 9247), False, 'import sys\n'), ((9256, 9274), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (9272, 9274), False, 'import sys\n'), ((9283, 9296), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (9293, 9296), False, 'import time\n'), ((10419, 10472), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""aws-parallelcluster"""'], {}), "('aws-parallelcluster')\n", (10449, 10472), False, 'import pkg_resources\n'), ((16584, 16641), 'os.path.join', 'os.path.join', (['"""~"""', '""".parallelcluster"""', '"""pcluster-cli.log"""'], {}), "('~', '.parallelcluster', 'pcluster-cli.log')\n", (16596, 16641), False, 'import os\n'), ((4104, 4124), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (4118, 4124), False, 'import boto3\n'), ((4203, 4226), 'os.path.join', 'os.path.join', (['root', 'res'], {}), '(root, res)\n', (4215, 4226), False, 'import os\n'), ((8183, 8213), 'boto3.client', 'boto3.client', (['"""cloudformation"""'], {}), "('cloudformation')\n", (8195, 8213), False, 'import boto3\n'), ((2696, 2716), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (2710, 2716), False, 'import boto3\n'), ((2810, 2828), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (2822, 2828), False, 'import boto3\n'), ((4351, 4374), 'os.path.join', 'os.path.join', (['root', 'res'], {}), '(root, res)\n', (4363, 4374), False, 'import os\n'), ((12073, 12092), 'boto3.client', 'boto3.client', (['"""efs"""'], {}), "('efs')\n", (12085, 12092), False, 'import boto3\n'), ((13770, 13800), 'boto3.client', 'boto3.client', (['"""cloudformation"""'], {}), "('cloudformation')\n", (13782, 13800), False, 'import boto3\n'), ((3495, 3519), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (3507, 3519), False, 'import os\n'), ((4271, 4294), 'os.path.join', 'os.path.join', (['root', 'res'], {}), '(root, res)\n', (4283, 4294), False, 'import os\n'), ((4408, 4431), 'os.path.join', 'os.path.join', (['root', 'res'], {}), '(root, res)\n', (4420, 4431), False, 'import os\n'), ((3537, 3561), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (3549, 3561), False, 'import os\n'), ((13549, 13568), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (13561, 13568), False, 'import boto3\n'), ((12605, 12624), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (12617, 12624), False, 'import boto3\n'), ((13075, 13094), 'boto3.client', 'boto3.client', (['"""ssm"""'], {}), "('ssm')\n", (13087, 13094), False, 'import boto3\n'), ((4883, 4903), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (4897, 4903), False, 'import boto3\n')]
ThinkmanWang/thinkutils_plus
thinkutils_plus/eventbus/sample/myeventbus.py
65d56a1a0cfce22dff08a4f0baea6b4eb08a2e35
__author__ = 'Xsank' import time from thinkutils_plus.eventbus.eventbus import EventBus from myevent import GreetEvent from myevent import ByeEvent from mylistener import MyListener if __name__=="__main__": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world') eventbus.async_post(be) eventbus.post(ge) time.sleep(0.1) eventbus.unregister(MyListener()) eventbus.destroy()
[((223, 233), 'thinkutils_plus.eventbus.eventbus.EventBus', 'EventBus', ([], {}), '()\n', (231, 233), False, 'from thinkutils_plus.eventbus.eventbus import EventBus\n'), ((277, 296), 'myevent.GreetEvent', 'GreetEvent', (['"""world"""'], {}), "('world')\n", (287, 296), False, 'from myevent import GreetEvent\n'), ((304, 321), 'myevent.ByeEvent', 'ByeEvent', (['"""world"""'], {}), "('world')\n", (312, 321), False, 'from myevent import ByeEvent\n'), ((376, 391), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (386, 391), False, 'import time\n'), ((256, 268), 'mylistener.MyListener', 'MyListener', ([], {}), '()\n', (266, 268), False, 'from mylistener import MyListener\n'), ((416, 428), 'mylistener.MyListener', 'MyListener', ([], {}), '()\n', (426, 428), False, 'from mylistener import MyListener\n')]
kjthegod/chromium
tools/telemetry/telemetry/core/platform/android_device_unittest.py
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from telemetry import benchmark from telemetry.core import browser_options from telemetry.core.platform import android_device from telemetry.core.platform import android_platform_backend from telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01', '02'] self.assertEquals( set(['01', '02']), set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of devices attached\n????????????\tno permissions\n', '* daemon not running. starting it now on port 5037 *\n' '* daemon started successfully *\n') device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave a permissions error. Consider running adb as root:', ' adb kill-server', ' sudo `which adb` devices\n\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached. Please specify one of the following:\n' ' --device=015d14fec128220c\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('015d14fec128220c', device.device_id)
[((512, 566), 'telemetry.unittest_util.system_stub.Override', 'system_stub.Override', (['android_device', "['adb_commands']"], {}), "(android_device, ['adb_commands'])\n", (532, 566), False, 'from telemetry.unittest_util import system_stub\n'), ((1033, 1122), 'telemetry.unittest_util.system_stub.Override', 'system_stub.Override', (['android_device', "['adb_commands', 'os', 'subprocess', 'logging']"], {}), "(android_device, ['adb_commands', 'os', 'subprocess',\n 'logging'])\n", (1053, 1122), False, 'from telemetry.unittest_util import system_stub\n'), ((1149, 1213), 'telemetry.unittest_util.system_stub.Override', 'system_stub.Override', (['android_platform_backend', "['adb_commands']"], {}), "(android_platform_backend, ['adb_commands'])\n", (1169, 1213), False, 'from telemetry.unittest_util import system_stub\n'), ((1371, 1409), 'telemetry.core.browser_options.BrowserFinderOptions', 'browser_options.BrowserFinderOptions', ([], {}), '()\n', (1407, 1409), False, 'from telemetry.core import browser_options\n'), ((1722, 1760), 'telemetry.core.browser_options.BrowserFinderOptions', 'browser_options.BrowserFinderOptions', ([], {}), '()\n', (1758, 1760), False, 'from telemetry.core import browser_options\n'), ((1965, 2003), 'telemetry.core.browser_options.BrowserFinderOptions', 'browser_options.BrowserFinderOptions', ([], {}), '()\n', (2001, 2003), False, 'from telemetry.core import browser_options\n'), ((2265, 2305), 'telemetry.core.platform.android_device.GetDevice', 'android_device.GetDevice', (['finder_options'], {}), '(finder_options)\n', (2289, 2305), False, 'from telemetry.core.platform import android_device\n'), ((2627, 2665), 'telemetry.core.browser_options.BrowserFinderOptions', 'browser_options.BrowserFinderOptions', ([], {}), '()\n', (2663, 2665), False, 'from telemetry.core import browser_options\n'), ((2791, 2831), 'telemetry.core.platform.android_device.GetDevice', 'android_device.GetDevice', (['finder_options'], {}), '(finder_options)\n', (2815, 2831), False, 'from telemetry.core.platform import android_device\n'), ((3171, 3209), 'telemetry.core.browser_options.BrowserFinderOptions', 'browser_options.BrowserFinderOptions', ([], {}), '()\n', (3207, 3209), False, 'from telemetry.core import browser_options\n'), ((3402, 3442), 'telemetry.core.platform.android_device.GetDevice', 'android_device.GetDevice', (['finder_options'], {}), '(finder_options)\n', (3426, 3442), False, 'from telemetry.core.platform import android_device\n'), ((3646, 3684), 'telemetry.core.browser_options.BrowserFinderOptions', 'browser_options.BrowserFinderOptions', ([], {}), '()\n', (3682, 3684), False, 'from telemetry.core import browser_options\n'), ((3792, 3832), 'telemetry.core.platform.android_device.GetDevice', 'android_device.GetDevice', (['finder_options'], {}), '(finder_options)\n', (3816, 3832), False, 'from telemetry.core.platform import android_device\n'), ((1617, 1657), 'telemetry.core.platform.android_device.GetDevice', 'android_device.GetDevice', (['finder_options'], {}), '(finder_options)\n', (1641, 1657), False, 'from telemetry.core.platform import android_device\n'), ((1853, 1893), 'telemetry.core.platform.android_device.GetDevice', 'android_device.GetDevice', (['finder_options'], {}), '(finder_options)\n', (1877, 1893), False, 'from telemetry.core.platform import android_device\n'), ((812, 865), 'telemetry.core.platform.android_device.AndroidDevice.GetAllConnectedDevices', 'android_device.AndroidDevice.GetAllConnectedDevices', ([], {}), '()\n', (863, 865), False, 'from telemetry.core.platform import android_device\n')]
bekaaa/xgboost_tuner
logger.py
2d93f6cc751b3a8778420a88caf73fd1dc8ef2ce
#! /usr/bin/env python import logging #--------------------------------------- class logger : ''' A ready to use logging class. All you need to do is set an object with the parameters (log_filename, directory to save it) then whenever you want to add text, type obj.add("some text"). The function obj.close() is not important, I just added it for coverage. You can edit any of the below configuration to whatever you like. ''' def __init__(self, filename, log_dir='../data/log'): self.log = None self.handler = None LOG_PATH = log_dir assert type(filename) == str or filename != '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message): assert type(message) == str self.logger.info(message); return #------------------------------------ def close(self): self.logger.removeHandler(self.handler) return #----------------------------------------
[((615, 634), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (632, 634), False, 'import logging\n'), ((728, 757), 'logging.FileHandler', 'logging.FileHandler', (['filename'], {}), '(filename)\n', (747, 757), False, 'import logging\n'), ((810, 883), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s : %(message)s"""', 'datefmt': '"""%d-%m %H:%M"""'}), "(fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M')\n", (827, 883), False, 'import logging\n')]
lessleslie/slm-code-generation
baselines/prep_baseline.py
017ac0828faf3467e9f85883e27be09ec3898b14
import json import multiprocessing as mp import re from argparse import ArgumentParser from enum import Enum, auto import javalang from functools import partial PRED_TOKEN = 'PRED' modifiers = ['public', 'private', 'protected', 'static'] class TargetType(Enum): seq = auto() tree = auto() @staticmethod def from_string(s): try: return TargetType[s] except KeyError: raise ValueError() target_type = TargetType.seq RE_WORDS = re.compile(r''' # Find words in a string. Order matters! [A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word [A-Z]?[a-z]+ | # Capitalized words / all lower case [A-Z]+ | # All upper case \d+ | # Numbers _ | \" | .+ ''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str): return [subtok for subtok in RE_WORDS.findall(str) if not subtok == '_'] def subtokenize(s): failed = False try: tokens = list(javalang.tokenizer.tokenize(s)) except: try: tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try: tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1] except: tokens = s.split() failed = True if failed: return [' _ '.join(split_subtokens(i)) for i in tokens if not i in modifiers] else: return [' _ '.join(split_subtokens(i.value)) for i in tokens if not i.value in modifiers] def subtokenize_tree(s): return ' '.join([sub for sub in re.split(TREE_SPLIT, s) if len(sub) > 0]) def process_line(target_type, max_targets, max_nodes, line): obj = json.loads(line) left_context = obj['left_context'] right_context = obj['right_context'] target_seq = obj['target_seq'] num_targets = obj['num_targets'] num_nodes = obj['num_nodes'] if max_targets is not None and num_targets > max_targets: return None, None if max_nodes is not None and num_nodes > max_nodes: return None, None if target_type is TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes): total_examples = 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path, 'w') as source_output_file: with open(target_output_path, 'w') as target_output_file: with open(file_path, 'r') as file: subtokenize_line = partial(process_line, target_type, max_targets, max_nodes) with mp.Pool(64) as pool: if data_file_role in ['test', 'val']: examples = [process_line(target_type, max_targets, max_nodes, line) for line in file] else: examples = pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples = [process_line(target_type, max_targets, max_nodes, line) for line in file] for source_seq, target_seq in examples: if source_seq is None or target_seq is None: continue source_output_file.write(source_seq + '\n') target_output_file.write(target_seq + '\n') total_examples += 1 #print(source_seq, target_seq) print('File: ' + file_path) print('Total examples: ' + str(total_examples)) if __name__ == '__main__': parser = ArgumentParser() parser.add_argument("-trd", "--train_data", dest="train_data_path", help="path to training data file", required=True) parser.add_argument("-ted", "--test_data", dest="test_data_path", help="path to test data file", required=True) parser.add_argument("-vd", "--val_data", dest="val_data_path", help="path to validation data file", required=True) parser.add_argument("-o", "--output_name", dest="output_name", help="output name - the base name for the created dataset", metavar="FILE", required=True, default='data') parser.add_argument("--target_type", dest="target_type", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument("--max_targets", dest="max_targets", type=int, required=False, default=40) parser.add_argument("--max_nodes", dest="max_nodes", type=int, required=False, default=None) parser.add_argument('--local', action='store_true') args = parser.parse_args() train_data_path = args.train_data_path test_data_path = args.test_data_path val_data_path = args.val_data_path for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name, target_type=args.target_type, max_targets=args.max_targets, max_nodes=args.max_nodes)
[((487, 778), 're.compile', 're.compile', (['"""\n # Find words in a string. Order matters!\n [A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word\n [A-Z]?[a-z]+ | # Capitalized words / all lower case\n [A-Z]+ | # All upper case\n \\\\d+ | # Numbers\n _ |\n \\\\" |\n .+\n"""', 're.VERBOSE'], {}), '(\n """\n # Find words in a string. Order matters!\n [A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word\n [A-Z]?[a-z]+ | # Capitalized words / all lower case\n [A-Z]+ | # All upper case\n \\\\d+ | # Numbers\n _ |\n \\\\" |\n .+\n"""\n , re.VERBOSE)\n', (497, 778), False, 'import re\n'), ((782, 803), 're.compile', 're.compile', (['"""([(),])"""'], {}), "('([(),])')\n", (792, 803), False, 'import re\n'), ((274, 280), 'enum.auto', 'auto', ([], {}), '()\n', (278, 280), False, 'from enum import Enum, auto\n'), ((292, 298), 'enum.auto', 'auto', ([], {}), '()\n', (296, 298), False, 'from enum import Enum, auto\n'), ((1691, 1707), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1701, 1707), False, 'import json\n'), ((3999, 4015), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (4013, 4015), False, 'from argparse import ArgumentParser\n'), ((980, 1010), 'javalang.tokenizer.tokenize', 'javalang.tokenizer.tokenize', (['s'], {}), '(s)\n', (1007, 1010), False, 'import javalang\n'), ((1577, 1600), 're.split', 're.split', (['TREE_SPLIT', 's'], {}), '(TREE_SPLIT, s)\n', (1585, 1600), False, 'import re\n'), ((2975, 3033), 'functools.partial', 'partial', (['process_line', 'target_type', 'max_targets', 'max_nodes'], {}), '(process_line, target_type, max_targets, max_nodes)\n', (2982, 3033), False, 'from functools import partial\n'), ((3055, 3066), 'multiprocessing.Pool', 'mp.Pool', (['(64)'], {}), '(64)\n', (3062, 3066), True, 'import multiprocessing as mp\n'), ((1063, 1100), 'javalang.tokenizer.tokenize', 'javalang.tokenizer.tokenize', (["(s + '()')"], {}), "(s + '()')\n", (1090, 1100), False, 'import javalang\n'), ((1170, 1212), 'javalang.tokenizer.tokenize', 'javalang.tokenizer.tokenize', (["('(' + s + ')')"], {}), "('(' + s + ')')\n", (1197, 1212), False, 'import javalang\n')]
varioustoxins/spack
var/spack/repos/builtin/packages/r-multicool/package.py
cab0e4cb240f34891a6d753f3393e512f9a99e9a
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RMulticool(RPackage): """Permutations of multisets in cool-lex order A set of tools to permute multisets without loops or hash tables and to generate integer partitions. The permutation functions are based on C code from Aaron Williams. Cool-lex order is similar to colexicographical order. The algorithm is described in Williams, A. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation of Multiset Permutations by Prefix Shifts. Symposium on Discrete Algorithms, New York, United States. The permutation code is distributed without restrictions. The code for stable and efficient computation of multinomial coefficients comes from Dave Barber. The code can be download from <http://tamivox.org/dave/multinomial/code.html> and is distributed without conditions. The package also generates the integer partitions of a positive, non-zero integer n. The C++ code for this is based on Python code from Jerome Kelleher which can be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and Python code are distributed without conditions.""" homepage = "https://cloud.r-project.org/package=multicool" url = "https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9') version('0.1-9', sha256='bdf92571cef1b649952d155395a92b8683099ee13114f73a9d41fc5d7d49d329') depends_on('[email protected]:', type=('build', 'run'))
[]
erincerys/ergo
updatetranslations.py
0aeedcdcccb5348d8eedb5faa6a0536d93ca3ae3
#!/usr/bin/env python3 # updatetranslations.py # # tl;dr this script updates our translation file with the newest, coolest strings we've added! # it manually searches the source code, extracts strings and then updates the language files. # Written in 2018 by Daniel Oaks <[email protected]> # # To the extent possible under law, the author(s) have dedicated all copyright # and related and neighboring rights to this software to the public domain # worldwide. This software is distributed without any warranty. # # You should have received a copy of the CC0 Public Domain Dedication along # with this software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. """updatetranslations.py Usage: updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py (-h | --help) Options: <irc-dir> Oragono's irc subdirectory where the Go code is kept. <languages-dir> Languages directory.""" import os import re import json from docopt import docopt import yaml ignored_strings = [ 'none', 'saset' ] if __name__ == '__main__': arguments = docopt(__doc__, version="0.1.0") if arguments['run']: # general IRC strings irc_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if filepath.endswith('.go'): content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\.t\("((?:[^"]|\\")+)"\)', content) for match in matches: if match not in irc_strings: irc_strings.append(match) matches = re.findall(r'\.t\(\`([^\`]+)\`\)', content) for match in matches: if match not in irc_strings: irc_strings.append(match) for s in ignored_strings: try: irc_strings.remove(s) except ValueError: # ignore any that don't exist ... print("irc strings:", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in irc_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'help.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if '\n' in match and match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string.split('\n')[0]) # nickserv help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'nickserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("nickserv help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # chanserv help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'chanserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("chanserv help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # hostserv help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'hostserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("hostserv help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string)
[((1137, 1169), 'docopt.docopt', 'docopt', (['__doc__'], {'version': '"""0.1.0"""'}), "(__doc__, version='0.1.0')\n", (1143, 1169), False, 'from docopt import docopt\n'), ((1287, 1318), 'os.walk', 'os.walk', (["arguments['<irc-dir>']"], {}), "(arguments['<irc-dir>'])\n", (1294, 1318), False, 'import os\n'), ((2697, 2728), 'os.walk', 'os.walk', (["arguments['<irc-dir>']"], {}), "(arguments['<irc-dir>'])\n", (2704, 2728), False, 'import os\n'), ((3915, 3946), 'os.walk', 'os.walk', (["arguments['<irc-dir>']"], {}), "(arguments['<irc-dir>'])\n", (3922, 3946), False, 'import os\n'), ((5117, 5148), 'os.walk', 'os.walk', (["arguments['<irc-dir>']"], {}), "(arguments['<irc-dir>'])\n", (5124, 5148), False, 'import os\n'), ((6319, 6350), 'os.walk', 'os.walk', (["arguments['<irc-dir>']"], {}), "(arguments['<irc-dir>'])\n", (6326, 6350), False, 'import os\n'), ((2230, 2300), 'os.path.join', 'os.path.join', (["arguments['<languages-dir>']", '"""example"""', '"""irc.lang.json"""'], {}), "(arguments['<languages-dir>'], 'example', 'irc.lang.json')\n", (2242, 2300), False, 'import os\n'), ((2333, 2426), 'json.dumps', 'json.dumps', (['{k: k for k in irc_strings}'], {'sort_keys': '(True)', 'indent': '(2)', 'separators': "(',', ': ')"}), "({k: k for k in irc_strings}, sort_keys=True, indent=2,\n separators=(',', ': '))\n", (2343, 2426), False, 'import json\n'), ((3421, 3492), 'os.path.join', 'os.path.join', (["arguments['<languages-dir>']", '"""example"""', '"""help.lang.json"""'], {}), "(arguments['<languages-dir>'], 'example', 'help.lang.json')\n", (3433, 3492), False, 'import os\n'), ((3525, 3619), 'json.dumps', 'json.dumps', (['{k: k for k in help_strings}'], {'sort_keys': '(True)', 'indent': '(2)', 'separators': "(',', ': ')"}), "({k: k for k in help_strings}, sort_keys=True, indent=2,\n separators=(',', ': '))\n", (3535, 3619), False, 'import json\n'), ((4634, 4709), 'os.path.join', 'os.path.join', (["arguments['<languages-dir>']", '"""example"""', '"""nickserv.lang.json"""'], {}), "(arguments['<languages-dir>'], 'example', 'nickserv.lang.json')\n", (4646, 4709), False, 'import os\n'), ((4742, 4836), 'json.dumps', 'json.dumps', (['{k: k for k in help_strings}'], {'sort_keys': '(True)', 'indent': '(2)', 'separators': "(',', ': ')"}), "({k: k for k in help_strings}, sort_keys=True, indent=2,\n separators=(',', ': '))\n", (4752, 4836), False, 'import json\n'), ((5836, 5911), 'os.path.join', 'os.path.join', (["arguments['<languages-dir>']", '"""example"""', '"""chanserv.lang.json"""'], {}), "(arguments['<languages-dir>'], 'example', 'chanserv.lang.json')\n", (5848, 5911), False, 'import os\n'), ((5944, 6038), 'json.dumps', 'json.dumps', (['{k: k for k in help_strings}'], {'sort_keys': '(True)', 'indent': '(2)', 'separators': "(',', ': ')"}), "({k: k for k in help_strings}, sort_keys=True, indent=2,\n separators=(',', ': '))\n", (5954, 6038), False, 'import json\n'), ((7038, 7113), 'os.path.join', 'os.path.join', (["arguments['<languages-dir>']", '"""example"""', '"""hostserv.lang.json"""'], {}), "(arguments['<languages-dir>'], 'example', 'hostserv.lang.json')\n", (7050, 7113), False, 'import os\n'), ((7146, 7240), 'json.dumps', 'json.dumps', (['{k: k for k in help_strings}'], {'sort_keys': '(True)', 'indent': '(2)', 'separators': "(',', ': ')"}), "({k: k for k in help_strings}, sort_keys=True, indent=2,\n separators=(',', ': '))\n", (7156, 7240), False, 'import json\n'), ((1554, 1606), 're.findall', 're.findall', (['"""\\\\.t\\\\("((?:[^"]|\\\\\\\\")+)"\\\\)"""', 'content'], {}), '(\'\\\\.t\\\\("((?:[^"]|\\\\\\\\")+)"\\\\)\', content)\n', (1564, 1606), False, 'import re\n'), ((1783, 1831), 're.findall', 're.findall', (['"""\\\\.t\\\\(\\\\`([^\\\\`]+)\\\\`\\\\)"""', 'content'], {}), "('\\\\.t\\\\(\\\\`([^\\\\`]+)\\\\`\\\\)', content)\n", (1793, 1831), False, 'import re\n'), ((2958, 2996), 're.findall', 're.findall', (['"""\\\\`([^\\\\`]+)\\\\`"""', 'content'], {}), "('\\\\`([^\\\\`]+)\\\\`', content)\n", (2968, 2996), False, 'import re\n'), ((4180, 4218), 're.findall', 're.findall', (['"""\\\\`([^\\\\`]+)\\\\`"""', 'content'], {}), "('\\\\`([^\\\\`]+)\\\\`', content)\n", (4190, 4218), False, 'import re\n'), ((5382, 5420), 're.findall', 're.findall', (['"""\\\\`([^\\\\`]+)\\\\`"""', 'content'], {}), "('\\\\`([^\\\\`]+)\\\\`', content)\n", (5392, 5420), False, 'import re\n'), ((6584, 6622), 're.findall', 're.findall', (['"""\\\\`([^\\\\`]+)\\\\`"""', 'content'], {}), "('\\\\`([^\\\\`]+)\\\\`', content)\n", (6594, 6622), False, 'import re\n')]
apanda/modeling
processing_tools/number_of_tenants.py
e032abd413bb3325ad6e5995abadeef74314f383
import sys from collections import defaultdict def Process (fnames): tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for fname in fnames: f = open(fname) for l in f: if l.startswith("tenant"): continue parts = l.strip().split() tenants = int(parts[0]) priv = int(parts[1]) pub = int(parts[2]) num_machines = tenants * priv * pub int_checks = (tenants * tenants * priv * (priv - 1)) / 2 int_time = int_checks * float(parts[3]) ext_checks = (tenants * priv) * ((tenants - 1) * pub) ext_time = ext_checks * float(parts[4]) oext_check = (tenants * priv) * (tenants * pub) oext_time = oext_check * float(parts[5]) total = int_time + ext_time + oext_time tenant_time[(priv, pub)][tenants] += total tenant_run[(priv, pub)][tenants] += 1 for k in sorted(tenant_run.keys()): print "# ----%s------"%(str(k)) for k2 in sorted(tenant_run[k].keys()): print "%d %d %f"%(k2, tenant_run[k][k2], \ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print "%d %d %f"%(k, runs[k], machines[k]/float(runs[k])) if __name__ == "__main__": Process(sys.argv[1:])
[]
borisbolliet/pyfisher
pyfisher/mpi.py
715e192baa4fadbff754416d2b001c3708c9276c
from __future__ import print_function import numpy as np import os,sys,time """ Copied from orphics.mpi """ try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi = True if disable_mpi_env.lower().strip() == "true" else False except: disable_mpi = False """ Use the below cleanup stuff only for intel-mpi! If you use it on openmpi, you will have no traceback for errors causing hours of endless confusion and frustration! - Sincerely, past frustrated Mat """ # From Sigurd's enlib.mpi: # Uncaught exceptions don't cause mpi to abort. This can lead to thousands of # wasted CPU hours # def cleanup(type, value, traceback): # sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup class fakeMpiComm: """ A Simple Fake MPI implementation """ def __init__(self): pass def Get_rank(self): return 0 def Get_size(self): return 1 def Barrier(self): pass def Abort(self,dummy): pass try: if disable_mpi: raise from mpi4py import MPI except: if not(disable_mpi): print("WARNING: mpi4py could not be loaded. Falling back to fake MPI. This means that if you submitted multiple processes, they will all be assigned the same rank of 0, and they are potentially doing the same thing.") class template: pass MPI = template() MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) # first distribute equally if rem>0: num_each[-rem:] += 1 # add the remainder to the last set of cores (so that rank 0 never gets extra jobs) task_range = list(range(num_tasks)) # the full range of tasks cumul = np.cumsum(num_each).tolist() # the end indices for each task task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list containing the tasks for each core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank = comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0: print ("At most ", max(num_each) , " tasks...") my_tasks = each_tasks[rank] return comm,rank,my_tasks
[((1658, 1692), 'numpy.array', 'np.array', (['([min_each] * avail_cores)'], {}), '([min_each] * avail_cores)\n', (1666, 1692), True, 'import numpy as np\n'), ((1917, 1936), 'numpy.cumsum', 'np.cumsum', (['num_each'], {}), '(num_each)\n', (1926, 1936), True, 'import numpy as np\n')]
SpironoZeppeli/Magic-The-Scannening
ebay.py
93c595a4c98fb725a79eeddfaba99cb0409d41fb
import requests import urllib.request import urllib.parse import PIL import re import configparser import json from PIL import Image from ebaysdk.trading import Connection as Trading from ebaysdk.exception import ConnectionError from yaml import load from PyQt5.QtWidgets import QMessageBox class EbaySeller: def __init__(self): self.api = Trading() config = configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r') as file: self.yaml_config = load(file) def upload_card(self, card_name, eu_card_price, us_card_price, card_id): if us_card_price != 0: card_price = us_card_price * 0.8 else: card_price = eu_card_price if card_price < 1: card_price = 1 card_price = str(round(card_price, 2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card' except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, wizards gatherer error") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card base_height = 500 img = Image.open('temp.jpg') height_percent = (base_height / float(img.size[1])) wsize = int((float(img.size[0]) * float(height_percent))) img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to PictShare files = {'file': open('temp.png', 'rb')} try: r = requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, PictShare error") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r = r.text r = json.loads(r) print(r) r = r['url'] # Fix using regular expression, may not be needed at a later date r = re.sub('\\.net', '.net/', r) r = re.sub('\\.net//', '.net/', r) print(r) try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image = image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to ebay response = self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name + ' MTG - NM/M', 'Description': card_name + ' MTG - NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config["PostalCode"], 'Currency': self.yaml_config["Currency"], 'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config["PayPalEmailAddress"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ "ShippingService"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg = QMessageBox() if response.reply.Ack == 'Failure': self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Complete, please check log.txt") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle("Upload Complete") self.msg.setText("Upload Complete, please check your ebay account to confirm") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as e: print(e) print(e.response.dict()) def get_multiverse_id(self, name): try: name = re.sub(' ', '%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact=' + name) r = json.loads(r.text) return r['multiverse_ids'][0] except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, scryfall error") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name): try: multiverse_id = self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r = json.loads(r.text) r = r[0] card_name = r.get('name') eu_card_price = r.get('price_normal') us_card_price = r.get('us_normal') card_set = r.get('set_id') card_set_name = r.get('set_name') card_id = r.get('multiverse_id') # Display card info in CLI print('Name: ' + card_name) print('Set: ' + card_set) print('Set name: ' + card_set_name) print('Card ID: ' + str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id) except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, card name not valid") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec()
[((355, 364), 'ebaysdk.trading.Connection', 'Trading', ([], {}), '()\n', (362, 364), True, 'from ebaysdk.trading import Connection as Trading\n'), ((382, 409), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (407, 409), False, 'import configparser\n'), ((1350, 1372), 'PIL.Image.open', 'Image.open', (['"""temp.jpg"""'], {}), "('temp.jpg')\n", (1360, 1372), False, 'from PIL import Image\n'), ((2072, 2085), 'json.loads', 'json.loads', (['r'], {}), '(r)\n', (2082, 2085), False, 'import json\n'), ((2210, 2238), 're.sub', 're.sub', (['"""\\\\.net"""', '""".net/"""', 'r'], {}), "('\\\\.net', '.net/', r)\n", (2216, 2238), False, 'import re\n'), ((2251, 2281), 're.sub', 're.sub', (['"""\\\\.net//"""', '""".net/"""', 'r'], {}), "('\\\\.net//', '.net/', r)\n", (2257, 2281), False, 'import re\n'), ((523, 533), 'yaml.load', 'load', (['file'], {}), '(file)\n', (527, 533), False, 'from yaml import load\n'), ((1704, 1770), 'requests.post', 'requests.post', (['"""https://pictshare.net/api/upload.php"""'], {'files': 'files'}), "('https://pictshare.net/api/upload.php', files=files)\n", (1717, 1770), False, 'import requests\n'), ((3938, 3951), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (3949, 3951), False, 'from PyQt5.QtWidgets import QMessageBox\n'), ((4727, 4751), 're.sub', 're.sub', (['""" """', '"""%20"""', 'name'], {}), "(' ', '%20', name)\n", (4733, 4751), False, 'import re\n'), ((4768, 4834), 'requests.get', 'requests.get', (["('https://api.scryfall.com/cards/named?exact=' + name)"], {}), "('https://api.scryfall.com/cards/named?exact=' + name)\n", (4780, 4834), False, 'import requests\n'), ((4851, 4869), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (4861, 4869), False, 'import json\n'), ((5407, 5425), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (5417, 5425), False, 'import json\n'), ((1008, 1021), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (1019, 1021), False, 'from PyQt5.QtWidgets import QMessageBox\n'), ((1810, 1823), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (1821, 1823), False, 'from PyQt5.QtWidgets import QMessageBox\n'), ((4951, 4964), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (4962, 4964), False, 'from PyQt5.QtWidgets import QMessageBox\n'), ((6042, 6055), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (6053, 6055), False, 'from PyQt5.QtWidgets import QMessageBox\n')]
v1nam/gurkbot
bot/exts/github/github.py
a0f5e05a5f65e6169accc90271fca58f4df211fe
import typing from bot.constants import BOT_REPO_URL from discord import Embed from discord.ext import commands from discord.ext.commands.cooldowns import BucketType from . import _issues, _profile, _source class Github(commands.Cog): """ Github Category cog, which contains commands related to github. Commands: ├ profile Fetches a user's GitHub information. ├ issue Command to retrieve issue(s) from a GitHub repository. └ source Displays information about the bot's source code. """ def __init__(self, bot: commands.Bot) -> None: self.bot = bot @commands.group(name="github", aliases=("gh",), invoke_without_command=True) async def github_group(self, ctx: commands.Context) -> None: """Commands for Github.""" await ctx.send_help(ctx.command) @github_group.command(name="profile") @commands.cooldown(1, 10, BucketType.user) async def profile(self, ctx: commands.Context, username: str) -> None: """ Fetches a user's GitHub information. Username is optional and sends the help command if not specified. """ github_profile = _profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name="issue", aliases=("pr",)) async def issue( self, ctx: commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str] = None, ) -> None: """Command to retrieve issue(s) from a GitHub repository.""" github_issue = _issues.Issues(self.bot.http_session) if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params["numbers"]) if repository is None: user = "gurkult" else: user, _, repository = repository.rpartition("/") if user == "": user = "gurkult" embed = await github_issue.issue(ctx.message.channel, numbers, repository, user) await ctx.send(embed=embed) @github_group.command(name="source", aliases=("src", "inspect")) async def source_command( self, ctx: commands.Context, *, source_item: typing.Optional[str] = None ) -> None: """Displays information about the bot's source code.""" if source_item is None: embed = Embed(title="Gurkbot's GitHub Repository") embed.add_field(name="Repository", value=f"[Go to GitHub]({BOT_REPO_URL})") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item): raise commands.BadArgument( f"Unable to convert `{source_item}` to valid command or Cog." ) github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot: commands.Bot) -> None: """Load the Github cog.""" bot.add_cog(Github(bot))
[((633, 708), 'discord.ext.commands.group', 'commands.group', ([], {'name': '"""github"""', 'aliases': "('gh',)", 'invoke_without_command': '(True)'}), "(name='github', aliases=('gh',), invoke_without_command=True)\n", (647, 708), False, 'from discord.ext import commands\n'), ((898, 939), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(10)', 'BucketType.user'], {}), '(1, 10, BucketType.user)\n', (915, 939), False, 'from discord.ext import commands\n'), ((1727, 1796), 'discord.ext.commands.MissingRequiredArgument', 'commands.MissingRequiredArgument', (["ctx.command.clean_params['numbers']"], {}), "(ctx.command.clean_params['numbers'])\n", (1759, 1796), False, 'from discord.ext import commands\n'), ((2432, 2474), 'discord.Embed', 'Embed', ([], {'title': '"""Gurkbot\'s GitHub Repository"""'}), '(title="Gurkbot\'s GitHub Repository")\n', (2437, 2474), False, 'from discord import Embed\n'), ((2753, 2841), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['f"""Unable to convert `{source_item}` to valid command or Cog."""'], {}), "(\n f'Unable to convert `{source_item}` to valid command or Cog.')\n", (2773, 2841), False, 'from discord.ext import commands\n')]
SmashKs/BarBarian
log/slack_sender.py
b308dcb9e24ec621abbbc121847923e14e5b6a4b
from slackclient import SlackClient from external import SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p, **k): if '_the_instance' not in cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__ == '__main__': SlackBot().send_msg_to('hello world!!')
[((416, 442), 'slackclient.SlackClient', 'SlackClient', (['SLACK_API_KEY'], {}), '(SLACK_API_KEY)\n', (427, 442), False, 'from slackclient import SlackClient\n')]
miracle2k/pytezos
src/pytezos/block/forge.py
e6b99f00f342d9a05b0c36a9883040961fd6d58e
from typing import Any, Dict, List, Tuple from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]: if len(fitness) == 0: major = 0 minor = 1 else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int, length: int) -> bytes: return value.to_bytes(length, 'big') def forge_command(command: str) -> bytes: if command == 'activate': return b'\x00' raise NotImplementedError(command) def forge_fitness(fitness: List[str]) -> bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int) -> bytes: return priority.to_bytes(2, 'big') def forge_content(content: Dict[str, Any]) -> bytes: res = b'' res += forge_command(content['command']) res += forge_base58(content['hash']) res += forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes: res = b'' if protocol_data.get('content'): res += forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res += b'\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\x00' res += b'\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\x00' return res def forge_block_header(shell_header: Dict[str, Any]) -> bytes: res = forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context']) res += bytes.fromhex(shell_header['protocol_data']) return res
[((1052, 1081), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["content['hash']"], {}), "(content['hash'])\n", (1064, 1081), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((1970, 2011), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["shell_header['predecessor']"], {}), "(shell_header['predecessor'])\n", (1982, 2011), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((2163, 2208), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["shell_header['operations_hash']"], {}), "(shell_header['operations_hash'])\n", (2175, 2208), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((2270, 2307), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["shell_header['context']"], {}), "(shell_header['context'])\n", (2282, 2307), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((2039, 2084), 'pytezos.michelson.forge.optimize_timestamp', 'optimize_timestamp', (["shell_header['timestamp']"], {}), "(shell_header['timestamp'])\n", (2057, 2084), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((1601, 1647), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["protocol_data['seed_nonce_hash']"], {}), "(protocol_data['seed_nonce_hash'])\n", (1613, 1647), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n')]
jichangjichang/Paddle
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
4fa3cee5499c6df0ad6043b0cfa220d09f2034e8
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import numpy as np import math import sys import paddle.compat as cpt from op_test import OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size = 3 self.channels = 3 self.height = 6 self.width = 4 # n, c, h, w self.x_dim = (self.batch_size, self.channels, self.height, self.width) self.spatial_scale = 1.0 / 4.0 self.pooled_height = 2 self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i in range(self.rois_num): roi = self.rois[i] roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h + 1, 1)) roi_width = int(max(roi_end_w - roi_start_w + 1, 1)) x_i = self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width) for c in range(self.channels): for ph in range(self.pooled_height): for pw in range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph + 1) * bin_size_h)) wend = int(math.ceil((pw + 1) * bin_size_w)) hstart = min(max(hstart + roi_start_h, 0), self.height) hend = min(max(hend + roi_start_h, 0), self.height) wstart = min(max(wstart + roi_start_w, 0), self.width) wend = min(max(wend + roi_start_w, 0), self.width) is_empty = (hend <= hstart) or (wend <= wstart) if is_empty: out_data[i, c, ph, pw] = 0 else: out_data[i, c, ph, pw] = -sys.float_info.max argmax_data[i, c, ph, pw] = -1 for h in range(hstart, hend): for w in range(wstart, wend): if x_i[c, h, w] > out_data[i, c, ph, pw]: out_data[i, c, ph, pw] = x_i[c, h, w] argmax_data[i, c, ph, pw] = h * self.width + w self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois = [] self.rois_lod = [[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno + 1) for i in range(bno + 1): x1 = np.random.random_integers( 0, self.width // self.spatial_scale - self.pooled_width) y1 = np.random.random_integers( 0, self.height // self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale) y2 = np.random.random_integers( y1 + self.pooled_height, self.height // self.spatial_scale) roi = [bno, x1, y1, x2, y2] rois.append(roi) self.rois_num = len(rois) self.rois = np.array(rois).astype("int64") def setUp(self): self.op_type = "roi_pool" self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__ == '__main__': unittest.main()
[((5305, 5320), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5318, 5320), False, 'import unittest\n'), ((1679, 1758), 'numpy.zeros', 'np.zeros', (['(self.rois_num, self.channels, self.pooled_height, self.pooled_width)'], {}), '((self.rois_num, self.channels, self.pooled_height, self.pooled_width))\n', (1687, 1758), True, 'import numpy as np\n'), ((1810, 1889), 'numpy.zeros', 'np.zeros', (['(self.rois_num, self.channels, self.pooled_height, self.pooled_width)'], {}), '((self.rois_num, self.channels, self.pooled_height, self.pooled_width))\n', (1818, 1889), True, 'import numpy as np\n'), ((1583, 1611), 'numpy.random.random', 'np.random.random', (['self.x_dim'], {}), '(self.x_dim)\n', (1599, 1611), True, 'import numpy as np\n'), ((2057, 2095), 'paddle.compat.round', 'cpt.round', (['(roi[1] * self.spatial_scale)'], {}), '(roi[1] * self.spatial_scale)\n', (2066, 2095), True, 'import paddle.compat as cpt\n'), ((2127, 2165), 'paddle.compat.round', 'cpt.round', (['(roi[2] * self.spatial_scale)'], {}), '(roi[2] * self.spatial_scale)\n', (2136, 2165), True, 'import paddle.compat as cpt\n'), ((2195, 2233), 'paddle.compat.round', 'cpt.round', (['(roi[3] * self.spatial_scale)'], {}), '(roi[3] * self.spatial_scale)\n', (2204, 2233), True, 'import paddle.compat as cpt\n'), ((2263, 2301), 'paddle.compat.round', 'cpt.round', (['(roi[4] * self.spatial_scale)'], {}), '(roi[4] * self.spatial_scale)\n', (2272, 2301), True, 'import paddle.compat as cpt\n'), ((4385, 4472), 'numpy.random.random_integers', 'np.random.random_integers', (['(0)', '(self.width // self.spatial_scale - self.pooled_width)'], {}), '(0, self.width // self.spatial_scale - self.\n pooled_width)\n', (4410, 4472), True, 'import numpy as np\n'), ((4510, 4599), 'numpy.random.random_integers', 'np.random.random_integers', (['(0)', '(self.height // self.spatial_scale - self.pooled_height)'], {}), '(0, self.height // self.spatial_scale - self.\n pooled_height)\n', (4535, 4599), True, 'import numpy as np\n'), ((4638, 4726), 'numpy.random.random_integers', 'np.random.random_integers', (['(x1 + self.pooled_width)', '(self.width // self.spatial_scale)'], {}), '(x1 + self.pooled_width, self.width // self.\n spatial_scale)\n', (4663, 4726), True, 'import numpy as np\n'), ((4790, 4880), 'numpy.random.random_integers', 'np.random.random_integers', (['(y1 + self.pooled_height)', '(self.height // self.spatial_scale)'], {}), '(y1 + self.pooled_height, self.height // self.\n spatial_scale)\n', (4815, 4880), True, 'import numpy as np\n'), ((5029, 5043), 'numpy.array', 'np.array', (['rois'], {}), '(rois)\n', (5037, 5043), True, 'import numpy as np\n'), ((2806, 2833), 'math.floor', 'math.floor', (['(ph * bin_size_h)'], {}), '(ph * bin_size_h)\n', (2816, 2833), False, 'import math\n'), ((2872, 2899), 'math.floor', 'math.floor', (['(pw * bin_size_w)'], {}), '(pw * bin_size_w)\n', (2882, 2899), False, 'import math\n'), ((2936, 2968), 'math.ceil', 'math.ceil', (['((ph + 1) * bin_size_h)'], {}), '((ph + 1) * bin_size_h)\n', (2945, 2968), False, 'import math\n'), ((3005, 3037), 'math.ceil', 'math.ceil', (['((pw + 1) * bin_size_w)'], {}), '((pw + 1) * bin_size_w)\n', (3014, 3037), False, 'import math\n')]
movermeyer/django-firestone
testproject/testapp/tests/__init__.py
e045089f6ff4a6686633f9c5909c314a010bd4a0
from test_proxy import * from test_serializers import * from test_deserializers import * from test_exceptions import * from test_authentication import * from test_whole_flow import * from test_handlers_metaclass_magic import * from test_handlers_serialize_to_python import * from test_handlers_is_method_allowed import * from test_handlers_data_control import * from test_handlers_package import * from test_handlers_finalize_pending import * from test_handlers_cleanse_body import * from test_handlers_validate import * from test_handlers_clean_models import * from test_handlers_get import * from test_handlers_is_catastrophic import * from test_handlers_post import * from test_handlers_put import * from test_handlers_delete import * from test_handlers_patch_response import * from test_handlers_authentication_hook import * from test_handlers_filter_data import * from test_handlers_order import * from test_handlers_order_data import * from test_handlers_paginate import * from test_handlers_paginate_data import * from test_handlers_inject_data_hook import * from test_handlers_handle_exception import * from test_handlers_deserialize_body import *
[]
SheepiCagio/Advent-of-Code-2021
Day20.py
52f0035da2cb258810d8947cbf56b51b65a9fe8b
import numpy as np raw = open("inputs/20.txt","r").readlines() input_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in raw] test_raw = open("inputs/20_test.txt","r").readlines() test_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter): splitvalue = False index_string = '' grid = [] for i in input_array: if i == '': splitvalue = True continue if not splitvalue: index_string += i else: grid.append(list(i)) grid = [[int(i) for i in row] for row in grid] for x in range(1,iter+1): grid = enhancer(grid, index_string,x) print('The number of lit pixels is:', sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter) if iter == 1 or index_string[0] == '0' or (iter % 2 == 1 and index_string[511] == '0'): grid = addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif (index_string[0] == '1' and index_string [511] == '1') or (iter % 2 == 0 and index_string[511] == '0'): grid = addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for i in range(1,len(grid)-1): for j in range(1, len(grid[i])-1): binStr = '' for k in range(-1,2): for l in range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return output_grid #pictureEnhancer(test_array,2) #pictureEnhancer(input_array,2) pictureEnhancer(test_array,50) pictureEnhancer(input_array,50)
[]
sesu089/stackoverflow
questions/53349623/main.py
6fae69be6fa74fba9d554e6b5f387e5d3c1aad73
import sys from PyQt5 import QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__() self.button = QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English', ''), ('français', 'eng-fr' ), ('中文', 'eng-chs'), ]) for i, (text, lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index): data = self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) demo = Demo() demo.show() sys.exit(app.exec_())
[((866, 886), 'PyQt5.QtCore.pyqtSlot', 'QtCore.pyqtSlot', (['int'], {}), '(int)\n', (881, 886), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1582, 1614), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (1604, 1614), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((170, 193), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', ([], {}), '()\n', (191, 193), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((215, 264), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', ([], {'alignment': 'QtCore.Qt.AlignCenter'}), '(alignment=QtCore.Qt.AlignCenter)\n', (231, 264), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((287, 312), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self'], {}), '(self)\n', (306, 312), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((400, 424), 'PyQt5.QtCore.QTranslator', 'QtCore.QTranslator', (['self'], {}), '(self)\n', (418, 424), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((450, 477), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self'], {}), '(self)\n', (471, 477), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1407, 1456), 'PyQt5.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""Demo"""', '"""Start"""'], {}), "('Demo', 'Start')\n", (1439, 1456), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1485, 1541), 'PyQt5.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""Demo"""', '"""Hello, World"""'], {}), "('Demo', 'Hello, World')\n", (1517, 1541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1026, 1059), 'PyQt5.QtWidgets.QApplication.instance', 'QtWidgets.QApplication.instance', ([], {}), '()\n', (1057, 1059), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1116, 1149), 'PyQt5.QtWidgets.QApplication.instance', 'QtWidgets.QApplication.instance', ([], {}), '()\n', (1147, 1149), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
iterait/apistrap
tests/test_error_descriptions_from_raises.py
e83460fa97f13a95a928971b0d2defe0ac611911
import pytest from apistrap.flask import FlaskApistrap from apistrap.schemas import ErrorResponse @pytest.fixture() def app_with_raises(app): oapi = FlaskApistrap() @app.route("/", methods=["GET"]) def view(): """ Something something. :raises KeyError: KeyError description """ oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse()) @app.route("/", methods=["GET"]) def view(): """ Something something. :raises KeyError: KeyError description """ oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response = client.get("/spec.json") assert response.json["paths"]["/"]["get"]["responses"] == { "500": { "description": "KeyError description", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } def test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get("/spec.json") assert response.json["paths"]["/"]["get"]["responses"] == { "515": { "description": "KeyError description", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } }
[((102, 118), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (116, 118), False, 'import pytest\n'), ((354, 370), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (368, 370), False, 'import pytest\n'), ((156, 171), 'apistrap.flask.FlaskApistrap', 'FlaskApistrap', ([], {}), '()\n', (169, 171), False, 'from apistrap.flask import FlaskApistrap\n'), ((420, 435), 'apistrap.flask.FlaskApistrap', 'FlaskApistrap', ([], {}), '()\n', (433, 435), False, 'from apistrap.flask import FlaskApistrap\n'), ((488, 503), 'apistrap.schemas.ErrorResponse', 'ErrorResponse', ([], {}), '()\n', (501, 503), False, 'from apistrap.schemas import ErrorResponse\n')]
chamathshashika/projects-python-wrappers
projects/api/UsersApi.py
33e9f6bccba16a581b115c582033a93d43bb159c
#$Id$ from projects.util.ZohoHttpClient import ZohoHttpClient from projects.api.Api import Api from projects.parser.UsersParser import UsersParser base_url = Api().base_url zoho_http_client = ZohoHttpClient() parser = UsersParser() class UsersApi: """Users Api class is used to 1.Get all the users in the given project. """ def __init__(self, authtoken, portal_id): """Initialize Users api using user's authtoken and portal id. Args: authtoken(str): User's authtoken. portal_id(str): User's portal id. """ self.details = { 'authtoken': authtoken } self.portal_id = portal_id def get_users(self, project_id): """Get all the users in the given project. Args: project_id(long): Project id. Returns: list of instance: List of users object. """ url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/users/' response = zoho_http_client.get(url, self.details) return parser.get_users(response)
[((194, 210), 'projects.util.ZohoHttpClient.ZohoHttpClient', 'ZohoHttpClient', ([], {}), '()\n', (208, 210), False, 'from projects.util.ZohoHttpClient import ZohoHttpClient\n'), ((220, 233), 'projects.parser.UsersParser.UsersParser', 'UsersParser', ([], {}), '()\n', (231, 233), False, 'from projects.parser.UsersParser import UsersParser\n'), ((160, 165), 'projects.api.Api.Api', 'Api', ([], {}), '()\n', (163, 165), False, 'from projects.api.Api import Api\n')]
leader1313/Baxter_teleoperation_system
useless/tuck_arms.py
856d999acd73e6c1dc15a342cb6c4fcd1a482863
#!/usr/bin/env python # Copyright (c) 2013-2015, Rethink Robotics # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the Rethink Robotics nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """ Tool to tuck/untuck Baxter's arms to/from the shipping pose """ import argparse from copy import deepcopy import rospy from std_msgs.msg import ( Empty, Bool, ) import baxter_interface from baxter_core_msgs.msg import ( CollisionAvoidanceState, ) from baxter_interface import CHECK_VERSION class Tuck(object): def __init__(self, tuck_cmd): self._done = False self._limbs = ('left', 'right') self._arms = { 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck = tuck_cmd self._tuck_rate = rospy.Rate(20.0) # Hz self._tuck_threshold = 0.2 # radians self._peak_angle = -1.6 # radians self._arm_state = { 'tuck': {'left': 'none', 'right': 'none'}, 'collide': {'left': False, 'right': False}, 'flipped': {'left': False, 'right': False} } self._joint_moves = { 'tuck': { 'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0], 'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0] }, 'untuck': { 'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50], 'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50] } } self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub = { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self, data, limb): self._arm_state['collide'][limb] = len(data.collision_object) > 0 self._check_arm_state() def _check_arm_state(self): """ Check for goals and behind collision field. If s1 joint is over the peak, collision will need to be disabled to get the arm around the head-arm collision force-field. """ diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold for limb in self._limbs: angles = [self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()] # Check if in a goal position untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb] = 'none' # Check if shoulder is flipped over peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle) def _prepare_to_tuck(self): # If arms are in "tucked" state, disable collision avoidance # before enabling robot, to avoid arm jerking from "force-field". head = baxter_interface.Head() start_disabled = not self._rs.state().enabled at_goal = lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo("Moving head to neutral position") while not at_goal() and not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for pub in self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled == True and not rospy.is_shutdown(): [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck, disabled): if any(disabled.values()): [pub.publish(Empty()) for pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal for limb, goal in tuck.viewitems()) and not rospy.is_shutdown()): if self._rs.state().enabled == False: self._enable_pub.publish(True) for limb in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self): # Update our starting state to check if arms are tucked self._prepare_to_tuck() self._check_arm_state() # Tuck Arms if self._tuck == True: # If arms are already tucked, report this to user and exit. if all(self._arm_state['tuck'][limb] == 'tuck' for limb in self._limbs): rospy.loginfo("Tucking: Arms already in 'Tucked' position.") self._done = True return else: rospy.loginfo("Tucking: One or more arms not Tucked.") any_flipped = not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( "Moving to neutral start position with collision %s.", "on" if any_flipped else "off") # Move to neutral pose before tucking arms to avoid damage self._check_arm_state() actions = dict() disabled = {'left': True, 'right': True} for limb in self._limbs: if not self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb] = False self._move_to(actions, disabled) # Disable collision and Tuck Arms rospy.loginfo("Tucking: Tucking with collision avoidance off.") actions = {'left': 'tuck', 'right': 'tuck'} disabled = {'left': True, 'right': True} self._move_to(actions, disabled) self._done = True return # Untuck Arms else: # If arms are tucked disable collision and untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo("Untucking: One or more arms Tucked;" " Disabling Collision Avoidance and untucking.") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True return # If arms already untucked, move to neutral location else: rospy.loginfo("Untucking: Arms already Untucked;" " Moving to neutral position.") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True return def clean_shutdown(self): """Handles ROS shutdown (Ctrl-C) safely.""" if not self._done: rospy.logwarn('Aborting: Shutting down safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled != False: [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser = argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument("-t","--tuck", dest="tuck", action='store_true', default=False, help="tuck arms") tuck_group.add_argument("-u", "--untuck", dest="untuck", action='store_true', default=False, help="untuck arms") args = parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo("Initializing node... ") rospy.init_node("rsdk_tuck_arms") rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo("Finished tuck") if __name__ == "__main__": main()
[((10616, 10641), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10639, 10641), False, 'import argparse\n'), ((11028, 11066), 'rospy.loginfo', 'rospy.loginfo', (['"""Initializing node... """'], {}), "('Initializing node... ')\n", (11041, 11066), False, 'import rospy\n'), ((11071, 11104), 'rospy.init_node', 'rospy.init_node', (['"""rsdk_tuck_arms"""'], {}), "('rsdk_tuck_arms')\n", (11086, 11104), False, 'import rospy\n'), ((11109, 11167), 'rospy.loginfo', 'rospy.loginfo', (["('%sucking arms' % ('T' if tuck else 'Unt',))"], {}), "('%sucking arms' % ('T' if tuck else 'Unt',))\n", (11122, 11167), False, 'import rospy\n'), ((11196, 11236), 'rospy.on_shutdown', 'rospy.on_shutdown', (['tucker.clean_shutdown'], {}), '(tucker.clean_shutdown)\n', (11213, 11236), False, 'import rospy\n'), ((11270, 11300), 'rospy.loginfo', 'rospy.loginfo', (['"""Finished tuck"""'], {}), "('Finished tuck')\n", (11283, 11300), False, 'import rospy\n'), ((2208, 2224), 'rospy.Rate', 'rospy.Rate', (['(20.0)'], {}), '(20.0)\n', (2218, 2224), False, 'import rospy\n'), ((3063, 3185), 'rospy.Subscriber', 'rospy.Subscriber', (['"""robot/limb/left/collision_avoidance_state"""', 'CollisionAvoidanceState', 'self._update_collision', '"""left"""'], {}), "('robot/limb/left/collision_avoidance_state',\n CollisionAvoidanceState, self._update_collision, 'left')\n", (3079, 3185), False, 'import rospy\n'), ((3299, 3423), 'rospy.Subscriber', 'rospy.Subscriber', (['"""robot/limb/right/collision_avoidance_state"""', 'CollisionAvoidanceState', 'self._update_collision', '"""right"""'], {}), "('robot/limb/right/collision_avoidance_state',\n CollisionAvoidanceState, self._update_collision, 'right')\n", (3315, 3423), False, 'import rospy\n'), ((3852, 3895), 'baxter_interface.RobotEnable', 'baxter_interface.RobotEnable', (['CHECK_VERSION'], {}), '(CHECK_VERSION)\n', (3880, 3895), False, 'import baxter_interface\n'), ((3923, 3985), 'rospy.Publisher', 'rospy.Publisher', (['"""robot/set_super_enable"""', 'Bool'], {'queue_size': '(10)'}), "('robot/set_super_enable', Bool, queue_size=10)\n", (3938, 3985), False, 'import rospy\n'), ((5560, 5583), 'baxter_interface.Head', 'baxter_interface.Head', ([], {}), '()\n', (5581, 5583), False, 'import baxter_interface\n'), ((5769, 5817), 'rospy.loginfo', 'rospy.loginfo', (['"""Moving head to neutral position"""'], {}), "('Moving head to neutral position')\n", (5782, 5817), False, 'import rospy\n'), ((2054, 2083), 'baxter_interface.Limb', 'baxter_interface.Limb', (['"""left"""'], {}), "('left')\n", (2075, 2083), False, 'import baxter_interface\n'), ((2106, 2136), 'baxter_interface.Limb', 'baxter_interface.Limb', (['"""right"""'], {}), "('right')\n", (2127, 2136), False, 'import baxter_interface\n'), ((3558, 3647), 'rospy.Publisher', 'rospy.Publisher', (['"""robot/limb/left/suppress_collision_avoidance"""', 'Empty'], {'queue_size': '(10)'}), "('robot/limb/left/suppress_collision_avoidance', Empty,\n queue_size=10)\n", (3573, 3647), False, 'import rospy\n'), ((3701, 3791), 'rospy.Publisher', 'rospy.Publisher', (['"""robot/limb/right/suppress_collision_avoidance"""', 'Empty'], {'queue_size': '(10)'}), "('robot/limb/right/suppress_collision_avoidance', Empty,\n queue_size=10)\n", (3716, 3791), False, 'import rospy\n'), ((10267, 10317), 'rospy.logwarn', 'rospy.logwarn', (['"""Aborting: Shutting down safely..."""'], {}), "('Aborting: Shutting down safely...')\n", (10280, 10317), False, 'import rospy\n'), ((10982, 10996), 'rospy.myargv', 'rospy.myargv', ([], {}), '()\n', (10994, 10996), False, 'import rospy\n'), ((5854, 5873), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (5871, 5873), False, 'import rospy\n'), ((6714, 6733), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (6731, 6733), False, 'import rospy\n'), ((7788, 7848), 'rospy.loginfo', 'rospy.loginfo', (['"""Tucking: Arms already in \'Tucked\' position."""'], {}), '("Tucking: Arms already in \'Tucked\' position.")\n', (7801, 7848), False, 'import rospy\n'), ((7940, 7994), 'rospy.loginfo', 'rospy.loginfo', (['"""Tucking: One or more arms not Tucked."""'], {}), "('Tucking: One or more arms not Tucked.')\n", (7953, 7994), False, 'import rospy\n'), ((8791, 8854), 'rospy.loginfo', 'rospy.loginfo', (['"""Tucking: Tucking with collision avoidance off."""'], {}), "('Tucking: Tucking with collision avoidance off.')\n", (8804, 8854), False, 'import rospy\n'), ((9255, 9362), 'rospy.loginfo', 'rospy.loginfo', (['"""Untucking: One or more arms Tucked; Disabling Collision Avoidance and untucking."""'], {}), "(\n 'Untucking: One or more arms Tucked; Disabling Collision Avoidance and untucking.'\n )\n", (9268, 9362), False, 'import rospy\n'), ((9453, 9489), 'copy.deepcopy', 'deepcopy', (["self._arm_state['flipped']"], {}), "(self._arm_state['flipped'])\n", (9461, 9489), False, 'from copy import deepcopy\n'), ((9759, 9837), 'rospy.loginfo', 'rospy.loginfo', (['"""Untucking: Arms already Untucked; Moving to neutral position."""'], {}), "('Untucking: Arms already Untucked; Moving to neutral position.')\n", (9772, 9837), False, 'import rospy\n'), ((9938, 9974), 'copy.deepcopy', 'deepcopy', (["self._arm_state['flipped']"], {}), "(self._arm_state['flipped'])\n", (9946, 9974), False, 'from copy import deepcopy\n'), ((6244, 6263), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (6261, 6263), False, 'import rospy\n'), ((6531, 6538), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (6536, 6538), False, 'from std_msgs.msg import Empty, Bool\n'), ((8122, 8226), 'rospy.loginfo', 'rospy.loginfo', (['"""Moving to neutral start position with collision %s."""', "('on' if any_flipped else 'off')"], {}), "('Moving to neutral start position with collision %s.', 'on' if\n any_flipped else 'off')\n", (8135, 8226), False, 'import rospy\n'), ((5935, 5942), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (5940, 5942), False, 'from std_msgs.msg import Empty, Bool\n'), ((6294, 6301), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (6299, 6301), False, 'from std_msgs.msg import Empty, Bool\n'), ((6957, 6964), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (6962, 6964), False, 'from std_msgs.msg import Empty, Bool\n'), ((10453, 10460), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (10458, 10460), False, 'from std_msgs.msg import Empty, Bool\n')]
Deepak-Kharah/ioe-project
django-system/src/tsm_api/serializers.py
6f83ddcfced25130e0f05c3380dde97429d1f224
from rest_framework import serializers from .models import Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta: model = Measurement fields = '__all__'
[]
pabferde/galaxy_dynamics_from_Vc
src/GalaxyDynamicsFromVc/units.py
7232f7bfd6d2338bcec3bbf87478f7a482c749ef
_Msun_kpc3_to_GeV_cm3_factor = 0.3/8.0e6 def Msun_kpc3_to_GeV_cm3(value): return value*_Msun_kpc3_to_GeV_cm3_factor
[]
tedye/leetcode
Python/leetcode.031.next-permutation.py
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
class Solution(object): def nextPermutation(self, nums): """ :type nums: List[int] :rtype: void Do not return anything, modify nums in-place instead. """ if not nums: return n = len(nums)-1 while n > 0 and nums[n-1] >= nums[n]: n -= 1 t = n if t == 0: nums[:] = nums[::-1] return x = nums[n-1] while t < len(nums) and x < nums[t]: t += 1 temp = nums[t-1] nums[t-1] = nums[n-1] nums[n-1] = temp nums[n:] = nums[n:][::-1] return
[]
mirokrastev/poll-website
poll/models/telemetry_models.py
4f26cce3f838ab05de91f0e1dba34d9bc59927b4
from django.db import models from django.contrib.auth import get_user_model from poll.models.poll_models import Poll class BasePollTelemetry(models.Model): """ This Base class gives a hint that in the future more Telemetry classes could be implemented. """ poll = models.ForeignKey(db_index=True, to=Poll, on_delete=models.CASCADE) def __str__(self): return str(self.poll) class Meta: abstract = True class AnonymousUserPollTelemetry(models.Model): """ To "store" the anonymous users that have viewed the Poll, I need to store their IP Addresses. It will NEVER be displayed outside the admin panel. """ anonymous_user = models.GenericIPAddressField(blank=True, null=True) def __str__(self): return self.anonymous_user class UsersPollTelemetry(BasePollTelemetry): users = models.ManyToManyField(db_index=True, to=get_user_model()) anonymous_users = models.ManyToManyField(db_index=True, to=AnonymousUserPollTelemetry) class Meta: verbose_name = 'PollTelemetry' verbose_name_plural = 'PollTelemetry'
[((287, 354), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'db_index': '(True)', 'to': 'Poll', 'on_delete': 'models.CASCADE'}), '(db_index=True, to=Poll, on_delete=models.CASCADE)\n', (304, 354), False, 'from django.db import models\n'), ((692, 743), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (720, 743), False, 'from django.db import models\n'), ((943, 1011), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'db_index': '(True)', 'to': 'AnonymousUserPollTelemetry'}), '(db_index=True, to=AnonymousUserPollTelemetry)\n', (965, 1011), False, 'from django.db import models\n'), ((903, 919), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (917, 919), False, 'from django.contrib.auth import get_user_model\n')]
HenryLittle/OpenPCDet-HL
pcdet/models/backbones_2d/__init__.py
7dba01750e10d170849314723ec0665782236a70
from .base_bev_backbone import BaseBEVBackbone from .decouple_bev_backbone import DecoupledBEVBackbone __all__ = { 'BaseBEVBackbone': BaseBEVBackbone, 'DecoupledBEVBackbone': DecoupledBEVBackbone, }
[]
beatrizserrano/galaxy
test/unit/app/tools/test_select_parameters.py
e149d9d32e1bca6c07c38b1a9cdabfee60323610
from unittest.mock import Mock import pytest from galaxy import model from galaxy.tools.parameters import basic from .util import BaseParameterTestCase class SelectToolParameterTestCase(BaseParameterTestCase): def test_validated_values(self): self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>""" with pytest.raises(ValueError) as exc_info: self.param.from_json("42", self.trans, {"input_bam": model.HistoryDatasetAssociation()}) assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined" def test_validated_values_missing_dependency(self): self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>""" with pytest.raises(ValueError) as exc_info: self.param.from_json("42", self.trans) assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined" def test_unvalidated_values(self): self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>""" self.trans.workflow_building_mode = True assert self.param.from_json("42", self.trans) == "42" def test_validated_datasets(self): self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>""" with pytest.raises(ValueError) as exc_info: self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": None}) assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined" def test_unvalidated_datasets(self): self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>""" self.trans.workflow_building_mode = True assert isinstance( self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": basic.RuntimeValue()}), model.HistoryDatasetAssociation, ) def test_filter_param_value(self): self.options_xml = """<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="0" /></options>""" assert ("testname1", "testpath1", False) in self.param.get_options(self.trans, {"input_bam": "testname1"}) assert ("testname2", "testpath2", False) in self.param.get_options(self.trans, {"input_bam": "testname2"}) assert len(self.param.get_options(self.trans, {"input_bam": "testname3"})) == 0 def test_filter_param_value2(self): # Same test as above, but filtering on a different column. self.options_xml = """<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="1" /></options>""" assert ("testname1", "testpath1", False) in self.param.get_options(self.trans, {"input_bam": "testpath1"}) assert ("testname2", "testpath2", False) in self.param.get_options(self.trans, {"input_bam": "testpath2"}) assert len(self.param.get_options(self.trans, {"input_bam": "testpath3"})) == 0 # TODO: Good deal of overlap here with DataToolParameterTestCase, # refactor. def setUp(self): super().setUp() self.test_history = model.History() self.app.model.context.add(self.test_history) self.app.model.context.flush() self.app.tool_data_tables["test_table"] = MockToolDataTable() self.trans = Mock( app=self.app, get_history=lambda: self.test_history, get_current_user_roles=lambda: [], workflow_building_mode=False, webapp=Mock(name="galaxy"), ) self.type = "select" self.set_data_ref = False self.multiple = False self.optional = False self.options_xml = "" self._param = None @property def param(self): if not self._param: multi_text = "" if self.multiple: multi_text = 'multiple="True"' optional_text = "" if self.optional: optional_text = 'optional="True"' options_text = self.options_xml data_ref_text = "" if self.set_data_ref: data_ref_text = 'data_ref="input_bam"' template_xml = """<param name="my_name" type="%s" %s %s %s>%s</param>""" param_str = template_xml % (self.type, data_ref_text, multi_text, optional_text, options_text) self._param = self._parameter_for(xml=param_str) return self._param class MockToolDataTable: def __init__(self): self.columns = dict( name=0, value=1, ) self.missing_index_file = None def get_fields(self): return [["testname1", "testpath1"], ["testname2", "testpath2"]]
[((3287, 3302), 'galaxy.model.History', 'model.History', ([], {}), '()\n', (3300, 3302), False, 'from galaxy import model\n'), ((371, 396), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (384, 396), False, 'import pytest\n'), ((799, 824), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (812, 824), False, 'import pytest\n'), ((1418, 1443), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1431, 1443), False, 'import pytest\n'), ((1490, 1523), 'galaxy.model.HistoryDatasetAssociation', 'model.HistoryDatasetAssociation', ([], {}), '()\n', (1521, 1523), False, 'from galaxy import model\n'), ((1927, 1960), 'galaxy.model.HistoryDatasetAssociation', 'model.HistoryDatasetAssociation', ([], {}), '()\n', (1958, 1960), False, 'from galaxy import model\n'), ((3678, 3697), 'unittest.mock.Mock', 'Mock', ([], {'name': '"""galaxy"""'}), "(name='galaxy')\n", (3682, 3697), False, 'from unittest.mock import Mock\n'), ((475, 508), 'galaxy.model.HistoryDatasetAssociation', 'model.HistoryDatasetAssociation', ([], {}), '()\n', (506, 508), False, 'from galaxy import model\n'), ((1988, 2008), 'galaxy.tools.parameters.basic.RuntimeValue', 'basic.RuntimeValue', ([], {}), '()\n', (2006, 2008), False, 'from galaxy.tools.parameters import basic\n')]
Toasterstein/recumpiler
recumpiler/__init__.py
390957cfaa8f60ffeb24adb43b91981dd445c6b9
#!/usr/bin/env python # -*- coding: utf-8 -*- """recumpiler Recompile text to be semi-readable memey garbage. """ __version__ = (0, 0, 0)
[]
codenote/chromium-test
net/net.gyp
0637af0080f7e80bf7d20b29ce94c5edc817f390
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'variables': { 'chromium_code': 1, 'linux_link_kerberos%': 0, 'conditions': [ ['chromeos==1 or OS=="android" or OS=="ios"', { # Disable Kerberos on ChromeOS, Android and iOS, at least for now. # It needs configuration (krb5.conf and so on). 'use_kerberos%': 0, }, { # chromeos == 0 'use_kerberos%': 1, }], ['OS=="android" and target_arch != "ia32"', { # The way the cache uses mmap() is inefficient on some Android devices. # If this flag is set, we hackily avoid using mmap() in the disk cache. # We are pretty confident that mmap-ing the index would not hurt any # existing x86 android devices, but we cannot be so sure about the # variety of ARM devices. So enable it for x86 only for now. 'posix_avoid_mmap%': 1, }, { 'posix_avoid_mmap%': 0, }], ['OS=="ios"', { # Websockets and socket stream are not used on iOS. 'enable_websockets%': 0, # iOS does not use V8. 'use_v8_in_net%': 0, 'enable_built_in_dns%': 0, }, { 'enable_websockets%': 1, 'use_v8_in_net%': 1, 'enable_built_in_dns%': 1, }], ], }, 'includes': [ '../build/win_precompile.gypi', ], 'targets': [ { 'target_name': 'net', 'type': '<(component)', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../crypto/crypto.gyp:crypto', '../sdch/sdch.gyp:sdch', '../third_party/icu/icu.gyp:icui18n', '../third_party/icu/icu.gyp:icuuc', '../third_party/zlib/zlib.gyp:zlib', 'net_resources', ], 'sources': [ 'android/cert_verify_result_android.h', 'android/cert_verify_result_android_list.h', 'android/gurl_utils.cc', 'android/gurl_utils.h', 'android/keystore.cc', 'android/keystore.h', 'android/keystore_openssl.cc', 'android/keystore_openssl.h', 'android/net_jni_registrar.cc', 'android/net_jni_registrar.h', 'android/network_change_notifier_android.cc', 'android/network_change_notifier_android.h', 'android/network_change_notifier_delegate_android.cc', 'android/network_change_notifier_delegate_android.h', 'android/network_change_notifier_factory_android.cc', 'android/network_change_notifier_factory_android.h', 'android/network_library.cc', 'android/network_library.h', 'base/address_family.h', 'base/address_list.cc', 'base/address_list.h', 'base/address_tracker_linux.cc', 'base/address_tracker_linux.h', 'base/auth.cc', 'base/auth.h', 'base/backoff_entry.cc', 'base/backoff_entry.h', 'base/bandwidth_metrics.cc', 'base/bandwidth_metrics.h', 'base/big_endian.cc', 'base/big_endian.h', 'base/cache_type.h', 'base/completion_callback.h', 'base/connection_type_histograms.cc', 'base/connection_type_histograms.h', 'base/crypto_module.h', 'base/crypto_module_nss.cc', 'base/crypto_module_openssl.cc', 'base/data_url.cc', 'base/data_url.h', 'base/directory_lister.cc', 'base/directory_lister.h', 'base/dns_reloader.cc', 'base/dns_reloader.h', 'base/dns_util.cc', 'base/dns_util.h', 'base/escape.cc', 'base/escape.h', 'base/expiring_cache.h', 'base/file_stream.cc', 'base/file_stream.h', 'base/file_stream_context.cc', 'base/file_stream_context.h', 'base/file_stream_context_posix.cc', 'base/file_stream_context_win.cc', 'base/file_stream_metrics.cc', 'base/file_stream_metrics.h', 'base/file_stream_metrics_posix.cc', 'base/file_stream_metrics_win.cc', 'base/file_stream_net_log_parameters.cc', 'base/file_stream_net_log_parameters.h', 'base/file_stream_whence.h', 'base/filter.cc', 'base/filter.h', 'base/int128.cc', 'base/int128.h', 'base/gzip_filter.cc', 'base/gzip_filter.h', 'base/gzip_header.cc', 'base/gzip_header.h', 'base/hash_value.cc', 'base/hash_value.h', 'base/host_mapping_rules.cc', 'base/host_mapping_rules.h', 'base/host_port_pair.cc', 'base/host_port_pair.h', 'base/io_buffer.cc', 'base/io_buffer.h', 'base/ip_endpoint.cc', 'base/ip_endpoint.h', 'base/keygen_handler.cc', 'base/keygen_handler.h', 'base/keygen_handler_mac.cc', 'base/keygen_handler_nss.cc', 'base/keygen_handler_openssl.cc', 'base/keygen_handler_win.cc', 'base/linked_hash_map.h', 'base/load_flags.h', 'base/load_flags_list.h', 'base/load_states.h', 'base/load_states_list.h', 'base/load_timing_info.cc', 'base/load_timing_info.h', 'base/mime_sniffer.cc', 'base/mime_sniffer.h', 'base/mime_util.cc', 'base/mime_util.h', 'base/net_error_list.h', 'base/net_errors.cc', 'base/net_errors.h', 'base/net_errors_posix.cc', 'base/net_errors_win.cc', 'base/net_export.h', 'base/net_log.cc', 'base/net_log.h', 'base/net_log_event_type_list.h', 'base/net_log_source_type_list.h', 'base/net_module.cc', 'base/net_module.h', 'base/net_util.cc', 'base/net_util.h', 'base/net_util_posix.cc', 'base/net_util_win.cc', 'base/network_change_notifier.cc', 'base/network_change_notifier.h', 'base/network_change_notifier_factory.h', 'base/network_change_notifier_linux.cc', 'base/network_change_notifier_linux.h', 'base/network_change_notifier_mac.cc', 'base/network_change_notifier_mac.h', 'base/network_change_notifier_win.cc', 'base/network_change_notifier_win.h', 'base/network_config_watcher_mac.cc', 'base/network_config_watcher_mac.h', 'base/network_delegate.cc', 'base/network_delegate.h', 'base/nss_memio.c', 'base/nss_memio.h', 'base/openssl_private_key_store.h', 'base/openssl_private_key_store_android.cc', 'base/openssl_private_key_store_memory.cc', 'base/platform_mime_util.h', # TODO(tc): gnome-vfs? xdgmime? /etc/mime.types? 'base/platform_mime_util_linux.cc', 'base/platform_mime_util_mac.mm', 'base/platform_mime_util_win.cc', 'base/prioritized_dispatcher.cc', 'base/prioritized_dispatcher.h', 'base/priority_queue.h', 'base/rand_callback.h', 'base/registry_controlled_domains/registry_controlled_domain.cc', 'base/registry_controlled_domains/registry_controlled_domain.h', 'base/request_priority.h', 'base/sdch_filter.cc', 'base/sdch_filter.h', 'base/sdch_manager.cc', 'base/sdch_manager.h', 'base/static_cookie_policy.cc', 'base/static_cookie_policy.h', 'base/sys_addrinfo.h', 'base/test_data_stream.cc', 'base/test_data_stream.h', 'base/upload_bytes_element_reader.cc', 'base/upload_bytes_element_reader.h', 'base/upload_data.cc', 'base/upload_data.h', 'base/upload_data_stream.cc', 'base/upload_data_stream.h', 'base/upload_element.cc', 'base/upload_element.h', 'base/upload_element_reader.cc', 'base/upload_element_reader.h', 'base/upload_file_element_reader.cc', 'base/upload_file_element_reader.h', 'base/upload_progress.h', 'base/url_util.cc', 'base/url_util.h', 'base/winsock_init.cc', 'base/winsock_init.h', 'base/winsock_util.cc', 'base/winsock_util.h', 'base/zap.cc', 'base/zap.h', 'cert/asn1_util.cc', 'cert/asn1_util.h', 'cert/cert_database.cc', 'cert/cert_database.h', 'cert/cert_database_android.cc', 'cert/cert_database_ios.cc', 'cert/cert_database_mac.cc', 'cert/cert_database_nss.cc', 'cert/cert_database_openssl.cc', 'cert/cert_database_win.cc', 'cert/cert_status_flags.cc', 'cert/cert_status_flags.h', 'cert/cert_trust_anchor_provider.h', 'cert/cert_verifier.cc', 'cert/cert_verifier.h', 'cert/cert_verify_proc.cc', 'cert/cert_verify_proc.h', 'cert/cert_verify_proc_android.cc', 'cert/cert_verify_proc_android.h', 'cert/cert_verify_proc_mac.cc', 'cert/cert_verify_proc_mac.h', 'cert/cert_verify_proc_nss.cc', 'cert/cert_verify_proc_nss.h', 'cert/cert_verify_proc_openssl.cc', 'cert/cert_verify_proc_openssl.h', 'cert/cert_verify_proc_win.cc', 'cert/cert_verify_proc_win.h', 'cert/cert_verify_result.cc', 'cert/cert_verify_result.h', 'cert/crl_set.cc', 'cert/crl_set.h', 'cert/ev_root_ca_metadata.cc', 'cert/ev_root_ca_metadata.h', 'cert/multi_threaded_cert_verifier.cc', 'cert/multi_threaded_cert_verifier.h', 'cert/nss_cert_database.cc', 'cert/nss_cert_database.h', 'cert/pem_tokenizer.cc', 'cert/pem_tokenizer.h', 'cert/single_request_cert_verifier.cc', 'cert/single_request_cert_verifier.h', 'cert/test_root_certs.cc', 'cert/test_root_certs.h', 'cert/test_root_certs_mac.cc', 'cert/test_root_certs_nss.cc', 'cert/test_root_certs_openssl.cc', 'cert/test_root_certs_android.cc', 'cert/test_root_certs_win.cc', 'cert/x509_cert_types.cc', 'cert/x509_cert_types.h', 'cert/x509_cert_types_mac.cc', 'cert/x509_cert_types_win.cc', 'cert/x509_certificate.cc', 'cert/x509_certificate.h', 'cert/x509_certificate_ios.cc', 'cert/x509_certificate_mac.cc', 'cert/x509_certificate_net_log_param.cc', 'cert/x509_certificate_net_log_param.h', 'cert/x509_certificate_nss.cc', 'cert/x509_certificate_openssl.cc', 'cert/x509_certificate_win.cc', 'cert/x509_util.h', 'cert/x509_util.cc', 'cert/x509_util_ios.cc', 'cert/x509_util_ios.h', 'cert/x509_util_mac.cc', 'cert/x509_util_mac.h', 'cert/x509_util_nss.cc', 'cert/x509_util_nss.h', 'cert/x509_util_openssl.cc', 'cert/x509_util_openssl.h', 'cookies/canonical_cookie.cc', 'cookies/canonical_cookie.h', 'cookies/cookie_monster.cc', 'cookies/cookie_monster.h', 'cookies/cookie_options.h', 'cookies/cookie_store.cc', 'cookies/cookie_store.h', 'cookies/cookie_util.cc', 'cookies/cookie_util.h', 'cookies/parsed_cookie.cc', 'cookies/parsed_cookie.h', 'disk_cache/addr.cc', 'disk_cache/addr.h', 'disk_cache/backend_impl.cc', 'disk_cache/backend_impl.h', 'disk_cache/bitmap.cc', 'disk_cache/bitmap.h', 'disk_cache/block_files.cc', 'disk_cache/block_files.h', 'disk_cache/cache_creator.cc', 'disk_cache/cache_util.h', 'disk_cache/cache_util.cc', 'disk_cache/cache_util_posix.cc', 'disk_cache/cache_util_win.cc', 'disk_cache/disk_cache.h', 'disk_cache/disk_format.cc', 'disk_cache/disk_format.h', 'disk_cache/entry_impl.cc', 'disk_cache/entry_impl.h', 'disk_cache/errors.h', 'disk_cache/eviction.cc', 'disk_cache/eviction.h', 'disk_cache/experiments.h', 'disk_cache/file.cc', 'disk_cache/file.h', 'disk_cache/file_block.h', 'disk_cache/file_lock.cc', 'disk_cache/file_lock.h', 'disk_cache/file_posix.cc', 'disk_cache/file_win.cc', 'disk_cache/histogram_macros.h', 'disk_cache/in_flight_backend_io.cc', 'disk_cache/in_flight_backend_io.h', 'disk_cache/in_flight_io.cc', 'disk_cache/in_flight_io.h', 'disk_cache/mapped_file.h', 'disk_cache/mapped_file_posix.cc', 'disk_cache/mapped_file_avoid_mmap_posix.cc', 'disk_cache/mapped_file_win.cc', 'disk_cache/mem_backend_impl.cc', 'disk_cache/mem_backend_impl.h', 'disk_cache/mem_entry_impl.cc', 'disk_cache/mem_entry_impl.h', 'disk_cache/mem_rankings.cc', 'disk_cache/mem_rankings.h', 'disk_cache/net_log_parameters.cc', 'disk_cache/net_log_parameters.h', 'disk_cache/rankings.cc', 'disk_cache/rankings.h', 'disk_cache/sparse_control.cc', 'disk_cache/sparse_control.h', 'disk_cache/stats.cc', 'disk_cache/stats.h', 'disk_cache/stats_histogram.cc', 'disk_cache/stats_histogram.h', 'disk_cache/storage_block-inl.h', 'disk_cache/storage_block.h', 'disk_cache/stress_support.h', 'disk_cache/trace.cc', 'disk_cache/trace.h', 'disk_cache/simple/simple_backend_impl.cc', 'disk_cache/simple/simple_backend_impl.h', 'disk_cache/simple/simple_disk_format.cc', 'disk_cache/simple/simple_disk_format.h', 'disk_cache/simple/simple_entry_impl.cc', 'disk_cache/simple/simple_entry_impl.h', 'disk_cache/simple/simple_index.cc', 'disk_cache/simple/simple_index.h', 'disk_cache/simple/simple_synchronous_entry.cc', 'disk_cache/simple/simple_synchronous_entry.h', 'disk_cache/flash/flash_entry_impl.cc', 'disk_cache/flash/flash_entry_impl.h', 'disk_cache/flash/format.h', 'disk_cache/flash/internal_entry.cc', 'disk_cache/flash/internal_entry.h', 'disk_cache/flash/log_store.cc', 'disk_cache/flash/log_store.h', 'disk_cache/flash/log_store_entry.cc', 'disk_cache/flash/log_store_entry.h', 'disk_cache/flash/segment.cc', 'disk_cache/flash/segment.h', 'disk_cache/flash/storage.cc', 'disk_cache/flash/storage.h', 'dns/address_sorter.h', 'dns/address_sorter_posix.cc', 'dns/address_sorter_posix.h', 'dns/address_sorter_win.cc', 'dns/dns_client.cc', 'dns/dns_client.h', 'dns/dns_config_service.cc', 'dns/dns_config_service.h', 'dns/dns_config_service_posix.cc', 'dns/dns_config_service_posix.h', 'dns/dns_config_service_win.cc', 'dns/dns_config_service_win.h', 'dns/dns_hosts.cc', 'dns/dns_hosts.h', 'dns/dns_protocol.h', 'dns/dns_query.cc', 'dns/dns_query.h', 'dns/dns_response.cc', 'dns/dns_response.h', 'dns/dns_session.cc', 'dns/dns_session.h', 'dns/dns_socket_pool.cc', 'dns/dns_socket_pool.h', 'dns/dns_transaction.cc', 'dns/dns_transaction.h', 'dns/host_cache.cc', 'dns/host_cache.h', 'dns/host_resolver.cc', 'dns/host_resolver.h', 'dns/host_resolver_impl.cc', 'dns/host_resolver_impl.h', 'dns/host_resolver_proc.cc', 'dns/host_resolver_proc.h', 'dns/mapped_host_resolver.cc', 'dns/mapped_host_resolver.h', 'dns/notify_watcher_mac.cc', 'dns/notify_watcher_mac.h', 'dns/serial_worker.cc', 'dns/serial_worker.h', 'dns/single_request_host_resolver.cc', 'dns/single_request_host_resolver.h', 'ftp/ftp_auth_cache.cc', 'ftp/ftp_auth_cache.h', 'ftp/ftp_ctrl_response_buffer.cc', 'ftp/ftp_ctrl_response_buffer.h', 'ftp/ftp_directory_listing_parser.cc', 'ftp/ftp_directory_listing_parser.h', 'ftp/ftp_directory_listing_parser_ls.cc', 'ftp/ftp_directory_listing_parser_ls.h', 'ftp/ftp_directory_listing_parser_netware.cc', 'ftp/ftp_directory_listing_parser_netware.h', 'ftp/ftp_directory_listing_parser_os2.cc', 'ftp/ftp_directory_listing_parser_os2.h', 'ftp/ftp_directory_listing_parser_vms.cc', 'ftp/ftp_directory_listing_parser_vms.h', 'ftp/ftp_directory_listing_parser_windows.cc', 'ftp/ftp_directory_listing_parser_windows.h', 'ftp/ftp_network_layer.cc', 'ftp/ftp_network_layer.h', 'ftp/ftp_network_session.cc', 'ftp/ftp_network_session.h', 'ftp/ftp_network_transaction.cc', 'ftp/ftp_network_transaction.h', 'ftp/ftp_request_info.h', 'ftp/ftp_response_info.cc', 'ftp/ftp_response_info.h', 'ftp/ftp_server_type_histograms.cc', 'ftp/ftp_server_type_histograms.h', 'ftp/ftp_transaction.h', 'ftp/ftp_transaction_factory.h', 'ftp/ftp_util.cc', 'ftp/ftp_util.h', 'http/des.cc', 'http/des.h', 'http/http_atom_list.h', 'http/http_auth.cc', 'http/http_auth.h', 'http/http_auth_cache.cc', 'http/http_auth_cache.h', 'http/http_auth_controller.cc', 'http/http_auth_controller.h', 'http/http_auth_filter.cc', 'http/http_auth_filter.h', 'http/http_auth_filter_win.h', 'http/http_auth_gssapi_posix.cc', 'http/http_auth_gssapi_posix.h', 'http/http_auth_handler.cc', 'http/http_auth_handler.h', 'http/http_auth_handler_basic.cc', 'http/http_auth_handler_basic.h', 'http/http_auth_handler_digest.cc', 'http/http_auth_handler_digest.h', 'http/http_auth_handler_factory.cc', 'http/http_auth_handler_factory.h', 'http/http_auth_handler_negotiate.cc', 'http/http_auth_handler_negotiate.h', 'http/http_auth_handler_ntlm.cc', 'http/http_auth_handler_ntlm.h', 'http/http_auth_handler_ntlm_portable.cc', 'http/http_auth_handler_ntlm_win.cc', 'http/http_auth_sspi_win.cc', 'http/http_auth_sspi_win.h', 'http/http_basic_stream.cc', 'http/http_basic_stream.h', 'http/http_byte_range.cc', 'http/http_byte_range.h', 'http/http_cache.cc', 'http/http_cache.h', 'http/http_cache_transaction.cc', 'http/http_cache_transaction.h', 'http/http_content_disposition.cc', 'http/http_content_disposition.h', 'http/http_chunked_decoder.cc', 'http/http_chunked_decoder.h', 'http/http_network_layer.cc', 'http/http_network_layer.h', 'http/http_network_session.cc', 'http/http_network_session.h', 'http/http_network_session_peer.cc', 'http/http_network_session_peer.h', 'http/http_network_transaction.cc', 'http/http_network_transaction.h', 'http/http_pipelined_connection.h', 'http/http_pipelined_connection_impl.cc', 'http/http_pipelined_connection_impl.h', 'http/http_pipelined_host.cc', 'http/http_pipelined_host.h', 'http/http_pipelined_host_capability.h', 'http/http_pipelined_host_forced.cc', 'http/http_pipelined_host_forced.h', 'http/http_pipelined_host_impl.cc', 'http/http_pipelined_host_impl.h', 'http/http_pipelined_host_pool.cc', 'http/http_pipelined_host_pool.h', 'http/http_pipelined_stream.cc', 'http/http_pipelined_stream.h', 'http/http_proxy_client_socket.cc', 'http/http_proxy_client_socket.h', 'http/http_proxy_client_socket_pool.cc', 'http/http_proxy_client_socket_pool.h', 'http/http_request_headers.cc', 'http/http_request_headers.h', 'http/http_request_info.cc', 'http/http_request_info.h', 'http/http_response_body_drainer.cc', 'http/http_response_body_drainer.h', 'http/http_response_headers.cc', 'http/http_response_headers.h', 'http/http_response_info.cc', 'http/http_response_info.h', 'http/http_security_headers.cc', 'http/http_security_headers.h', 'http/http_server_properties.cc', 'http/http_server_properties.h', 'http/http_server_properties_impl.cc', 'http/http_server_properties_impl.h', 'http/http_status_code.h', 'http/http_stream.h', 'http/http_stream_base.h', 'http/http_stream_factory.cc', 'http/http_stream_factory.h', 'http/http_stream_factory_impl.cc', 'http/http_stream_factory_impl.h', 'http/http_stream_factory_impl_job.cc', 'http/http_stream_factory_impl_job.h', 'http/http_stream_factory_impl_request.cc', 'http/http_stream_factory_impl_request.h', 'http/http_stream_parser.cc', 'http/http_stream_parser.h', 'http/http_transaction.h', 'http/http_transaction_delegate.h', 'http/http_transaction_factory.h', 'http/http_util.cc', 'http/http_util.h', 'http/http_util_icu.cc', 'http/http_vary_data.cc', 'http/http_vary_data.h', 'http/http_version.h', 'http/md4.cc', 'http/md4.h', 'http/partial_data.cc', 'http/partial_data.h', 'http/proxy_client_socket.h', 'http/proxy_client_socket.cc', 'http/transport_security_state.cc', 'http/transport_security_state.h', 'http/transport_security_state_static.h', 'http/url_security_manager.cc', 'http/url_security_manager.h', 'http/url_security_manager_posix.cc', 'http/url_security_manager_win.cc', 'ocsp/nss_ocsp.cc', 'ocsp/nss_ocsp.h', 'proxy/dhcp_proxy_script_adapter_fetcher_win.cc', 'proxy/dhcp_proxy_script_adapter_fetcher_win.h', 'proxy/dhcp_proxy_script_fetcher.cc', 'proxy/dhcp_proxy_script_fetcher.h', 'proxy/dhcp_proxy_script_fetcher_factory.cc', 'proxy/dhcp_proxy_script_fetcher_factory.h', 'proxy/dhcp_proxy_script_fetcher_win.cc', 'proxy/dhcp_proxy_script_fetcher_win.h', 'proxy/dhcpcsvc_init_win.cc', 'proxy/dhcpcsvc_init_win.h', 'proxy/multi_threaded_proxy_resolver.cc', 'proxy/multi_threaded_proxy_resolver.h', 'proxy/network_delegate_error_observer.cc', 'proxy/network_delegate_error_observer.h', 'proxy/polling_proxy_config_service.cc', 'proxy/polling_proxy_config_service.h', 'proxy/proxy_bypass_rules.cc', 'proxy/proxy_bypass_rules.h', 'proxy/proxy_config.cc', 'proxy/proxy_config.h', 'proxy/proxy_config_service.h', 'proxy/proxy_config_service_android.cc', 'proxy/proxy_config_service_android.h', 'proxy/proxy_config_service_fixed.cc', 'proxy/proxy_config_service_fixed.h', 'proxy/proxy_config_service_ios.cc', 'proxy/proxy_config_service_ios.h', 'proxy/proxy_config_service_linux.cc', 'proxy/proxy_config_service_linux.h', 'proxy/proxy_config_service_mac.cc', 'proxy/proxy_config_service_mac.h', 'proxy/proxy_config_service_win.cc', 'proxy/proxy_config_service_win.h', 'proxy/proxy_config_source.cc', 'proxy/proxy_config_source.h', 'proxy/proxy_info.cc', 'proxy/proxy_info.h', 'proxy/proxy_list.cc', 'proxy/proxy_list.h', 'proxy/proxy_resolver.h', 'proxy/proxy_resolver_error_observer.h', 'proxy/proxy_resolver_mac.cc', 'proxy/proxy_resolver_mac.h', 'proxy/proxy_resolver_script.h', 'proxy/proxy_resolver_script_data.cc', 'proxy/proxy_resolver_script_data.h', 'proxy/proxy_resolver_winhttp.cc', 'proxy/proxy_resolver_winhttp.h', 'proxy/proxy_retry_info.h', 'proxy/proxy_script_decider.cc', 'proxy/proxy_script_decider.h', 'proxy/proxy_script_fetcher.h', 'proxy/proxy_script_fetcher_impl.cc', 'proxy/proxy_script_fetcher_impl.h', 'proxy/proxy_server.cc', 'proxy/proxy_server.h', 'proxy/proxy_server_mac.cc', 'proxy/proxy_service.cc', 'proxy/proxy_service.h', 'quic/blocked_list.h', 'quic/congestion_control/available_channel_estimator.cc', 'quic/congestion_control/available_channel_estimator.h', 'quic/congestion_control/channel_estimator.cc', 'quic/congestion_control/channel_estimator.h', 'quic/congestion_control/cube_root.cc', 'quic/congestion_control/cube_root.h', 'quic/congestion_control/cubic.cc', 'quic/congestion_control/cubic.h', 'quic/congestion_control/fix_rate_receiver.cc', 'quic/congestion_control/fix_rate_receiver.h', 'quic/congestion_control/fix_rate_sender.cc', 'quic/congestion_control/fix_rate_sender.h', 'quic/congestion_control/hybrid_slow_start.cc', 'quic/congestion_control/hybrid_slow_start.h', 'quic/congestion_control/inter_arrival_bitrate_ramp_up.cc', 'quic/congestion_control/inter_arrival_bitrate_ramp_up.h', 'quic/congestion_control/inter_arrival_overuse_detector.cc', 'quic/congestion_control/inter_arrival_overuse_detector.h', 'quic/congestion_control/inter_arrival_probe.cc', 'quic/congestion_control/inter_arrival_probe.h', 'quic/congestion_control/inter_arrival_receiver.cc', 'quic/congestion_control/inter_arrival_receiver.h', 'quic/congestion_control/inter_arrival_sender.cc', 'quic/congestion_control/inter_arrival_sender.h', 'quic/congestion_control/inter_arrival_state_machine.cc', 'quic/congestion_control/inter_arrival_state_machine.h', 'quic/congestion_control/leaky_bucket.cc', 'quic/congestion_control/leaky_bucket.h', 'quic/congestion_control/paced_sender.cc', 'quic/congestion_control/paced_sender.h', 'quic/congestion_control/quic_congestion_manager.cc', 'quic/congestion_control/quic_congestion_manager.h', 'quic/congestion_control/quic_max_sized_map.h', 'quic/congestion_control/receive_algorithm_interface.cc', 'quic/congestion_control/receive_algorithm_interface.h', 'quic/congestion_control/send_algorithm_interface.cc', 'quic/congestion_control/send_algorithm_interface.h', 'quic/congestion_control/tcp_cubic_sender.cc', 'quic/congestion_control/tcp_cubic_sender.h', 'quic/congestion_control/tcp_receiver.cc', 'quic/congestion_control/tcp_receiver.h', 'quic/crypto/aes_128_gcm_decrypter.h', 'quic/crypto/aes_128_gcm_decrypter_nss.cc', 'quic/crypto/aes_128_gcm_decrypter_openssl.cc', 'quic/crypto/aes_128_gcm_encrypter.h', 'quic/crypto/aes_128_gcm_encrypter_nss.cc', 'quic/crypto/aes_128_gcm_encrypter_openssl.cc', 'quic/crypto/crypto_framer.cc', 'quic/crypto/crypto_framer.h', 'quic/crypto/crypto_handshake.cc', 'quic/crypto/crypto_handshake.h', 'quic/crypto/crypto_protocol.h', 'quic/crypto/crypto_utils.cc', 'quic/crypto/crypto_utils.h', 'quic/crypto/curve25519_key_exchange.cc', 'quic/crypto/curve25519_key_exchange.h', 'quic/crypto/key_exchange.h', 'quic/crypto/null_decrypter.cc', 'quic/crypto/null_decrypter.h', 'quic/crypto/null_encrypter.cc', 'quic/crypto/null_encrypter.h', 'quic/crypto/p256_key_exchange.h', 'quic/crypto/p256_key_exchange_nss.cc', 'quic/crypto/p256_key_exchange_openssl.cc', 'quic/crypto/quic_decrypter.cc', 'quic/crypto/quic_decrypter.h', 'quic/crypto/quic_encrypter.cc', 'quic/crypto/quic_encrypter.h', 'quic/crypto/quic_random.cc', 'quic/crypto/quic_random.h', 'quic/crypto/scoped_evp_cipher_ctx.h', 'quic/crypto/strike_register.cc', 'quic/crypto/strike_register.h', 'quic/quic_bandwidth.cc', 'quic/quic_bandwidth.h', 'quic/quic_blocked_writer_interface.h', 'quic/quic_client_session.cc', 'quic/quic_client_session.h', 'quic/quic_crypto_client_stream.cc', 'quic/quic_crypto_client_stream.h', 'quic/quic_crypto_client_stream_factory.h', 'quic/quic_crypto_server_stream.cc', 'quic/quic_crypto_server_stream.h', 'quic/quic_crypto_stream.cc', 'quic/quic_crypto_stream.h', 'quic/quic_clock.cc', 'quic/quic_clock.h', 'quic/quic_connection.cc', 'quic/quic_connection.h', 'quic/quic_connection_helper.cc', 'quic/quic_connection_helper.h', 'quic/quic_connection_logger.cc', 'quic/quic_connection_logger.h', 'quic/quic_data_reader.cc', 'quic/quic_data_reader.h', 'quic/quic_data_writer.cc', 'quic/quic_data_writer.h', 'quic/quic_fec_group.cc', 'quic/quic_fec_group.h', 'quic/quic_framer.cc', 'quic/quic_framer.h', 'quic/quic_http_stream.cc', 'quic/quic_http_stream.h', 'quic/quic_packet_creator.cc', 'quic/quic_packet_creator.h', 'quic/quic_packet_entropy_manager.cc', 'quic/quic_packet_entropy_manager.h', 'quic/quic_packet_generator.cc', 'quic/quic_packet_generator.h', 'quic/quic_protocol.cc', 'quic/quic_protocol.h', 'quic/quic_reliable_client_stream.cc', 'quic/quic_reliable_client_stream.h', 'quic/quic_session.cc', 'quic/quic_session.h', 'quic/quic_stats.cc', 'quic/quic_stats.h', 'quic/quic_stream_factory.cc', 'quic/quic_stream_factory.h', 'quic/quic_stream_sequencer.cc', 'quic/quic_stream_sequencer.h', 'quic/quic_time.cc', 'quic/quic_time.h', 'quic/quic_utils.cc', 'quic/quic_utils.h', 'quic/reliable_quic_stream.cc', 'quic/reliable_quic_stream.h', 'socket/buffered_write_stream_socket.cc', 'socket/buffered_write_stream_socket.h', 'socket/client_socket_factory.cc', 'socket/client_socket_factory.h', 'socket/client_socket_handle.cc', 'socket/client_socket_handle.h', 'socket/client_socket_pool.cc', 'socket/client_socket_pool.h', 'socket/client_socket_pool_base.cc', 'socket/client_socket_pool_base.h', 'socket/client_socket_pool_histograms.cc', 'socket/client_socket_pool_histograms.h', 'socket/client_socket_pool_manager.cc', 'socket/client_socket_pool_manager.h', 'socket/client_socket_pool_manager_impl.cc', 'socket/client_socket_pool_manager_impl.h', 'socket/next_proto.h', 'socket/nss_ssl_util.cc', 'socket/nss_ssl_util.h', 'socket/server_socket.h', 'socket/socket_net_log_params.cc', 'socket/socket_net_log_params.h', 'socket/socket.h', 'socket/socks5_client_socket.cc', 'socket/socks5_client_socket.h', 'socket/socks_client_socket.cc', 'socket/socks_client_socket.h', 'socket/socks_client_socket_pool.cc', 'socket/socks_client_socket_pool.h', 'socket/ssl_client_socket.cc', 'socket/ssl_client_socket.h', 'socket/ssl_client_socket_nss.cc', 'socket/ssl_client_socket_nss.h', 'socket/ssl_client_socket_openssl.cc', 'socket/ssl_client_socket_openssl.h', 'socket/ssl_client_socket_pool.cc', 'socket/ssl_client_socket_pool.h', 'socket/ssl_error_params.cc', 'socket/ssl_error_params.h', 'socket/ssl_server_socket.h', 'socket/ssl_server_socket_nss.cc', 'socket/ssl_server_socket_nss.h', 'socket/ssl_server_socket_openssl.cc', 'socket/ssl_socket.h', 'socket/stream_listen_socket.cc', 'socket/stream_listen_socket.h', 'socket/stream_socket.cc', 'socket/stream_socket.h', 'socket/tcp_client_socket.cc', 'socket/tcp_client_socket.h', 'socket/tcp_client_socket_libevent.cc', 'socket/tcp_client_socket_libevent.h', 'socket/tcp_client_socket_win.cc', 'socket/tcp_client_socket_win.h', 'socket/tcp_listen_socket.cc', 'socket/tcp_listen_socket.h', 'socket/tcp_server_socket.h', 'socket/tcp_server_socket_libevent.cc', 'socket/tcp_server_socket_libevent.h', 'socket/tcp_server_socket_win.cc', 'socket/tcp_server_socket_win.h', 'socket/transport_client_socket_pool.cc', 'socket/transport_client_socket_pool.h', 'socket/unix_domain_socket_posix.cc', 'socket/unix_domain_socket_posix.h', 'socket_stream/socket_stream.cc', 'socket_stream/socket_stream.h', 'socket_stream/socket_stream_job.cc', 'socket_stream/socket_stream_job.h', 'socket_stream/socket_stream_job_manager.cc', 'socket_stream/socket_stream_job_manager.h', 'socket_stream/socket_stream_metrics.cc', 'socket_stream/socket_stream_metrics.h', 'spdy/buffered_spdy_framer.cc', 'spdy/buffered_spdy_framer.h', 'spdy/spdy_bitmasks.h', 'spdy/spdy_credential_builder.cc', 'spdy/spdy_credential_builder.h', 'spdy/spdy_credential_state.cc', 'spdy/spdy_credential_state.h', 'spdy/spdy_frame_builder.cc', 'spdy/spdy_frame_builder.h', 'spdy/spdy_frame_reader.cc', 'spdy/spdy_frame_reader.h', 'spdy/spdy_framer.cc', 'spdy/spdy_framer.h', 'spdy/spdy_header_block.cc', 'spdy/spdy_header_block.h', 'spdy/spdy_http_stream.cc', 'spdy/spdy_http_stream.h', 'spdy/spdy_http_utils.cc', 'spdy/spdy_http_utils.h', 'spdy/spdy_io_buffer.cc', 'spdy/spdy_io_buffer.h', 'spdy/spdy_priority_forest.h', 'spdy/spdy_protocol.cc', 'spdy/spdy_protocol.h', 'spdy/spdy_proxy_client_socket.cc', 'spdy/spdy_proxy_client_socket.h', 'spdy/spdy_session.cc', 'spdy/spdy_session.h', 'spdy/spdy_session_pool.cc', 'spdy/spdy_session_pool.h', 'spdy/spdy_stream.cc', 'spdy/spdy_stream.h', 'spdy/spdy_websocket_stream.cc', 'spdy/spdy_websocket_stream.h', 'ssl/client_cert_store.h', 'ssl/client_cert_store_impl.h', 'ssl/client_cert_store_impl_mac.cc', 'ssl/client_cert_store_impl_nss.cc', 'ssl/client_cert_store_impl_win.cc', 'ssl/default_server_bound_cert_store.cc', 'ssl/default_server_bound_cert_store.h', 'ssl/openssl_client_key_store.cc', 'ssl/openssl_client_key_store.h', 'ssl/server_bound_cert_service.cc', 'ssl/server_bound_cert_service.h', 'ssl/server_bound_cert_store.cc', 'ssl/server_bound_cert_store.h', 'ssl/ssl_cert_request_info.cc', 'ssl/ssl_cert_request_info.h', 'ssl/ssl_cipher_suite_names.cc', 'ssl/ssl_cipher_suite_names.h', 'ssl/ssl_client_auth_cache.cc', 'ssl/ssl_client_auth_cache.h', 'ssl/ssl_client_cert_type.h', 'ssl/ssl_config_service.cc', 'ssl/ssl_config_service.h', 'ssl/ssl_config_service_defaults.cc', 'ssl/ssl_config_service_defaults.h', 'ssl/ssl_info.cc', 'ssl/ssl_info.h', 'third_party/mozilla_security_manager/nsKeygenHandler.cpp', 'third_party/mozilla_security_manager/nsKeygenHandler.h', 'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp', 'third_party/mozilla_security_manager/nsNSSCertificateDB.h', 'third_party/mozilla_security_manager/nsPKCS12Blob.cpp', 'third_party/mozilla_security_manager/nsPKCS12Blob.h', 'udp/datagram_client_socket.h', 'udp/datagram_server_socket.h', 'udp/datagram_socket.h', 'udp/udp_client_socket.cc', 'udp/udp_client_socket.h', 'udp/udp_net_log_parameters.cc', 'udp/udp_net_log_parameters.h', 'udp/udp_server_socket.cc', 'udp/udp_server_socket.h', 'udp/udp_socket.h', 'udp/udp_socket_libevent.cc', 'udp/udp_socket_libevent.h', 'udp/udp_socket_win.cc', 'udp/udp_socket_win.h', 'url_request/data_protocol_handler.cc', 'url_request/data_protocol_handler.h', 'url_request/file_protocol_handler.cc', 'url_request/file_protocol_handler.h', 'url_request/fraudulent_certificate_reporter.h', 'url_request/ftp_protocol_handler.cc', 'url_request/ftp_protocol_handler.h', 'url_request/http_user_agent_settings.h', 'url_request/protocol_intercept_job_factory.cc', 'url_request/protocol_intercept_job_factory.h', 'url_request/static_http_user_agent_settings.cc', 'url_request/static_http_user_agent_settings.h', 'url_request/url_fetcher.cc', 'url_request/url_fetcher.h', 'url_request/url_fetcher_core.cc', 'url_request/url_fetcher_core.h', 'url_request/url_fetcher_delegate.cc', 'url_request/url_fetcher_delegate.h', 'url_request/url_fetcher_factory.h', 'url_request/url_fetcher_impl.cc', 'url_request/url_fetcher_impl.h', 'url_request/url_fetcher_response_writer.cc', 'url_request/url_fetcher_response_writer.h', 'url_request/url_request.cc', 'url_request/url_request.h', 'url_request/url_request_about_job.cc', 'url_request/url_request_about_job.h', 'url_request/url_request_context.cc', 'url_request/url_request_context.h', 'url_request/url_request_context_builder.cc', 'url_request/url_request_context_builder.h', 'url_request/url_request_context_getter.cc', 'url_request/url_request_context_getter.h', 'url_request/url_request_context_storage.cc', 'url_request/url_request_context_storage.h', 'url_request/url_request_data_job.cc', 'url_request/url_request_data_job.h', 'url_request/url_request_error_job.cc', 'url_request/url_request_error_job.h', 'url_request/url_request_file_dir_job.cc', 'url_request/url_request_file_dir_job.h', 'url_request/url_request_file_job.cc', 'url_request/url_request_file_job.h', 'url_request/url_request_filter.cc', 'url_request/url_request_filter.h', 'url_request/url_request_ftp_job.cc', 'url_request/url_request_ftp_job.h', 'url_request/url_request_http_job.cc', 'url_request/url_request_http_job.h', 'url_request/url_request_job.cc', 'url_request/url_request_job.h', 'url_request/url_request_job_factory.cc', 'url_request/url_request_job_factory.h', 'url_request/url_request_job_factory_impl.cc', 'url_request/url_request_job_factory_impl.h', 'url_request/url_request_job_manager.cc', 'url_request/url_request_job_manager.h', 'url_request/url_request_netlog_params.cc', 'url_request/url_request_netlog_params.h', 'url_request/url_request_redirect_job.cc', 'url_request/url_request_redirect_job.h', 'url_request/url_request_simple_job.cc', 'url_request/url_request_simple_job.h', 'url_request/url_request_status.h', 'url_request/url_request_test_job.cc', 'url_request/url_request_test_job.h', 'url_request/url_request_throttler_entry.cc', 'url_request/url_request_throttler_entry.h', 'url_request/url_request_throttler_entry_interface.h', 'url_request/url_request_throttler_header_adapter.cc', 'url_request/url_request_throttler_header_adapter.h', 'url_request/url_request_throttler_header_interface.h', 'url_request/url_request_throttler_manager.cc', 'url_request/url_request_throttler_manager.h', 'url_request/view_cache_helper.cc', 'url_request/view_cache_helper.h', 'websockets/websocket_errors.cc', 'websockets/websocket_errors.h', 'websockets/websocket_frame.cc', 'websockets/websocket_frame.h', 'websockets/websocket_frame_parser.cc', 'websockets/websocket_frame_parser.h', 'websockets/websocket_handshake_handler.cc', 'websockets/websocket_handshake_handler.h', 'websockets/websocket_job.cc', 'websockets/websocket_job.h', 'websockets/websocket_net_log_params.cc', 'websockets/websocket_net_log_params.h', 'websockets/websocket_stream.h', 'websockets/websocket_throttle.cc', 'websockets/websocket_throttle.h', ], 'defines': [ 'NET_IMPLEMENTATION', ], 'export_dependent_settings': [ '../base/base.gyp:base', ], 'conditions': [ ['chromeos==1', { 'sources!': [ 'base/network_change_notifier_linux.cc', 'base/network_change_notifier_linux.h', 'base/network_change_notifier_netlink_linux.cc', 'base/network_change_notifier_netlink_linux.h', 'proxy/proxy_config_service_linux.cc', 'proxy/proxy_config_service_linux.h', ], }], ['use_kerberos==1', { 'defines': [ 'USE_KERBEROS', ], 'conditions': [ ['OS=="openbsd"', { 'include_dirs': [ '/usr/include/kerberosV' ], }], ['linux_link_kerberos==1', { 'link_settings': { 'ldflags': [ '<!@(krb5-config --libs gssapi)', ], }, }, { # linux_link_kerberos==0 'defines': [ 'DLOPEN_KERBEROS', ], }], ], }, { # use_kerberos == 0 'sources!': [ 'http/http_auth_gssapi_posix.cc', 'http/http_auth_gssapi_posix.h', 'http/http_auth_handler_negotiate.h', 'http/http_auth_handler_negotiate.cc', ], }], ['posix_avoid_mmap==1', { 'defines': [ 'POSIX_AVOID_MMAP', ], 'direct_dependent_settings': { 'defines': [ 'POSIX_AVOID_MMAP', ], }, 'sources!': [ 'disk_cache/mapped_file_posix.cc', ], }, { # else 'sources!': [ 'disk_cache/mapped_file_avoid_mmap_posix.cc', ], }], ['disable_ftp_support==1', { 'sources/': [ ['exclude', '^ftp/'], ], 'sources!': [ 'url_request/ftp_protocol_handler.cc', 'url_request/ftp_protocol_handler.h', 'url_request/url_request_ftp_job.cc', 'url_request/url_request_ftp_job.h', ], }], ['enable_built_in_dns==1', { 'defines': [ 'ENABLE_BUILT_IN_DNS', ] }, { # else 'sources!': [ 'dns/address_sorter_posix.cc', 'dns/address_sorter_posix.h', 'dns/dns_client.cc', ], }], ['use_openssl==1', { 'sources!': [ 'base/crypto_module_nss.cc', 'base/keygen_handler_nss.cc', 'base/nss_memio.c', 'base/nss_memio.h', 'cert/cert_database_nss.cc', 'cert/cert_verify_proc_nss.cc', 'cert/cert_verify_proc_nss.h', 'cert/nss_cert_database.cc', 'cert/nss_cert_database.h', 'cert/test_root_certs_nss.cc', 'cert/x509_certificate_nss.cc', 'cert/x509_util_nss.cc', 'cert/x509_util_nss.h', 'ocsp/nss_ocsp.cc', 'ocsp/nss_ocsp.h', 'quic/crypto/aes_128_gcm_decrypter_nss.cc', 'quic/crypto/aes_128_gcm_encrypter_nss.cc', 'quic/crypto/p256_key_exchange_nss.cc', 'socket/nss_ssl_util.cc', 'socket/nss_ssl_util.h', 'socket/ssl_client_socket_nss.cc', 'socket/ssl_client_socket_nss.h', 'socket/ssl_server_socket_nss.cc', 'socket/ssl_server_socket_nss.h', 'ssl/client_cert_store_impl_nss.cc', 'third_party/mozilla_security_manager/nsKeygenHandler.cpp', 'third_party/mozilla_security_manager/nsKeygenHandler.h', 'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp', 'third_party/mozilla_security_manager/nsNSSCertificateDB.h', 'third_party/mozilla_security_manager/nsPKCS12Blob.cpp', 'third_party/mozilla_security_manager/nsPKCS12Blob.h', ], }, { # else !use_openssl: remove the unneeded files 'sources!': [ 'base/crypto_module_openssl.cc', 'base/keygen_handler_openssl.cc', 'base/openssl_private_key_store.h', 'base/openssl_private_key_store_android.cc', 'base/openssl_private_key_store_memory.cc', 'cert/cert_database_openssl.cc', 'cert/cert_verify_proc_openssl.cc', 'cert/cert_verify_proc_openssl.h', 'cert/test_root_certs_openssl.cc', 'cert/x509_certificate_openssl.cc', 'cert/x509_util_openssl.cc', 'cert/x509_util_openssl.h', 'quic/crypto/aes_128_gcm_decrypter_openssl.cc', 'quic/crypto/aes_128_gcm_encrypter_openssl.cc', 'quic/crypto/p256_key_exchange_openssl.cc', 'quic/crypto/scoped_evp_cipher_ctx.h', 'socket/ssl_client_socket_openssl.cc', 'socket/ssl_client_socket_openssl.h', 'socket/ssl_server_socket_openssl.cc', 'ssl/openssl_client_key_store.cc', 'ssl/openssl_client_key_store.h', ], }, ], [ 'use_glib == 1', { 'dependencies': [ '../build/linux/system.gyp:gconf', '../build/linux/system.gyp:gio', ], 'conditions': [ ['use_openssl==1', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', ], }, { # else use_openssl==0, use NSS 'dependencies': [ '../build/linux/system.gyp:ssl', ], }], ['os_bsd==1', { 'sources!': [ 'base/network_change_notifier_linux.cc', 'base/network_change_notifier_netlink_linux.cc', 'proxy/proxy_config_service_linux.cc', ], },{ 'dependencies': [ '../build/linux/system.gyp:libresolv', ], }], ['OS=="solaris"', { 'link_settings': { 'ldflags': [ '-R/usr/lib/mps', ], }, }], ], }, { # else: OS is not in the above list 'sources!': [ 'base/crypto_module_nss.cc', 'base/keygen_handler_nss.cc', 'cert/cert_database_nss.cc', 'cert/nss_cert_database.cc', 'cert/nss_cert_database.h', 'cert/test_root_certs_nss.cc', 'cert/x509_certificate_nss.cc', 'ocsp/nss_ocsp.cc', 'ocsp/nss_ocsp.h', 'third_party/mozilla_security_manager/nsKeygenHandler.cpp', 'third_party/mozilla_security_manager/nsKeygenHandler.h', 'third_party/mozilla_security_manager/nsNSSCertificateDB.cpp', 'third_party/mozilla_security_manager/nsNSSCertificateDB.h', 'third_party/mozilla_security_manager/nsPKCS12Blob.cpp', 'third_party/mozilla_security_manager/nsPKCS12Blob.h', ], }, ], [ 'toolkit_uses_gtk == 1', { 'dependencies': [ '../build/linux/system.gyp:gdk', ], }], [ 'use_nss != 1', { 'sources!': [ 'cert/cert_verify_proc_nss.cc', 'cert/cert_verify_proc_nss.h', 'ssl/client_cert_store_impl_nss.cc', ], }], [ 'enable_websockets != 1', { 'sources/': [ ['exclude', '^socket_stream/'], ['exclude', '^websockets/'], ], 'sources!': [ 'spdy/spdy_websocket_stream.cc', 'spdy/spdy_websocket_stream.h', ], }], [ 'OS == "win"', { 'sources!': [ 'http/http_auth_handler_ntlm_portable.cc', 'socket/tcp_client_socket_libevent.cc', 'socket/tcp_client_socket_libevent.h', 'socket/tcp_server_socket_libevent.cc', 'socket/tcp_server_socket_libevent.h', 'ssl/client_cert_store_impl_nss.cc', 'udp/udp_socket_libevent.cc', 'udp/udp_socket_libevent.h', ], 'dependencies': [ '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', 'tld_cleanup', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { # else: OS != "win" 'sources!': [ 'base/winsock_init.cc', 'base/winsock_init.h', 'base/winsock_util.cc', 'base/winsock_util.h', 'proxy/proxy_resolver_winhttp.cc', 'proxy/proxy_resolver_winhttp.h', ], }, ], [ 'OS == "mac"', { 'sources!': [ 'ssl/client_cert_store_impl_nss.cc', ], 'dependencies': [ '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], 'link_settings': { 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework', '$(SDKROOT)/System/Library/Frameworks/Security.framework', '$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework', '$(SDKROOT)/usr/lib/libresolv.dylib', ] }, }, ], [ 'OS == "ios"', { 'dependencies': [ '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], 'link_settings': { 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/CFNetwork.framework', '$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework', '$(SDKROOT)/System/Library/Frameworks/Security.framework', '$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework', '$(SDKROOT)/usr/lib/libresolv.dylib', ], }, }, ], ['OS=="android" and _toolset=="target" and android_webview_build == 0', { 'dependencies': [ 'net_java', ], }], [ 'OS == "android"', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', 'net_jni_headers', ], 'sources!': [ 'base/openssl_private_key_store_memory.cc', 'cert/cert_database_openssl.cc', 'cert/cert_verify_proc_openssl.cc', 'cert/test_root_certs_openssl.cc', ], # The net/android/keystore_openssl.cc source file needs to # access an OpenSSL-internal header. 'include_dirs': [ '../third_party/openssl', ], }, { # else OS != "android" 'defines': [ # These are the features Android doesn't support. 'ENABLE_MEDIA_CODEC_THEORA', ], }, ], [ 'OS == "linux"', { 'dependencies': [ '../build/linux/system.gyp:dbus', '../dbus/dbus.gyp:dbus', ], }, ], ], 'target_conditions': [ # These source files are excluded by default platform rules, but they # are needed in specific cases on other platforms. Re-including them can # only be done in target_conditions as it is evaluated after the # platform rules. ['OS == "android"', { 'sources/': [ ['include', '^base/platform_mime_util_linux\\.cc$'], ], }], ['OS == "ios"', { 'sources/': [ ['include', '^base/network_change_notifier_mac\\.cc$'], ['include', '^base/network_config_watcher_mac\\.cc$'], ['include', '^base/platform_mime_util_mac\\.mm$'], # The iOS implementation only partially uses NSS and thus does not # defines |use_nss|. In particular the |USE_NSS| preprocessor # definition is not used. The following files are needed though: ['include', '^cert/cert_verify_proc_nss\\.cc$'], ['include', '^cert/cert_verify_proc_nss\\.h$'], ['include', '^cert/test_root_certs_nss\\.cc$'], ['include', '^cert/x509_util_nss\\.cc$'], ['include', '^cert/x509_util_nss\\.h$'], ['include', '^dns/notify_watcher_mac\\.cc$'], ['include', '^proxy/proxy_resolver_mac\\.cc$'], ['include', '^proxy/proxy_server_mac\\.cc$'], ['include', '^ocsp/nss_ocsp\\.cc$'], ['include', '^ocsp/nss_ocsp\\.h$'], ], }], ], }, { 'target_name': 'net_unittests', 'type': '<(gtest_target_type)', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../crypto/crypto.gyp:crypto', '../testing/gmock.gyp:gmock', '../testing/gtest.gyp:gtest', '../third_party/zlib/zlib.gyp:zlib', 'net', 'net_test_support', ], 'sources': [ 'android/keystore_unittest.cc', 'android/network_change_notifier_android_unittest.cc', 'base/address_list_unittest.cc', 'base/address_tracker_linux_unittest.cc', 'base/backoff_entry_unittest.cc', 'base/big_endian_unittest.cc', 'base/data_url_unittest.cc', 'base/directory_lister_unittest.cc', 'base/dns_util_unittest.cc', 'base/escape_unittest.cc', 'base/expiring_cache_unittest.cc', 'base/file_stream_unittest.cc', 'base/filter_unittest.cc', 'base/int128_unittest.cc', 'base/gzip_filter_unittest.cc', 'base/host_mapping_rules_unittest.cc', 'base/host_port_pair_unittest.cc', 'base/ip_endpoint_unittest.cc', 'base/keygen_handler_unittest.cc', 'base/mime_sniffer_unittest.cc', 'base/mime_util_unittest.cc', 'base/mock_filter_context.cc', 'base/mock_filter_context.h', 'base/net_log_unittest.cc', 'base/net_log_unittest.h', 'base/net_util_unittest.cc', 'base/network_change_notifier_win_unittest.cc', 'base/prioritized_dispatcher_unittest.cc', 'base/priority_queue_unittest.cc', 'base/registry_controlled_domains/registry_controlled_domain_unittest.cc', 'base/sdch_filter_unittest.cc', 'base/static_cookie_policy_unittest.cc', 'base/test_completion_callback_unittest.cc', 'base/upload_bytes_element_reader_unittest.cc', 'base/upload_data_stream_unittest.cc', 'base/upload_file_element_reader_unittest.cc', 'base/url_util_unittest.cc', 'cert/cert_verify_proc_unittest.cc', 'cert/crl_set_unittest.cc', 'cert/ev_root_ca_metadata_unittest.cc', 'cert/multi_threaded_cert_verifier_unittest.cc', 'cert/nss_cert_database_unittest.cc', 'cert/pem_tokenizer_unittest.cc', 'cert/x509_certificate_unittest.cc', 'cert/x509_cert_types_unittest.cc', 'cert/x509_util_unittest.cc', 'cert/x509_util_nss_unittest.cc', 'cert/x509_util_openssl_unittest.cc', 'cookies/canonical_cookie_unittest.cc', 'cookies/cookie_monster_unittest.cc', 'cookies/cookie_store_unittest.h', 'cookies/cookie_util_unittest.cc', 'cookies/parsed_cookie_unittest.cc', 'disk_cache/addr_unittest.cc', 'disk_cache/backend_unittest.cc', 'disk_cache/bitmap_unittest.cc', 'disk_cache/block_files_unittest.cc', 'disk_cache/cache_util_unittest.cc', 'disk_cache/entry_unittest.cc', 'disk_cache/mapped_file_unittest.cc', 'disk_cache/storage_block_unittest.cc', 'disk_cache/flash/flash_entry_unittest.cc', 'disk_cache/flash/log_store_entry_unittest.cc', 'disk_cache/flash/log_store_unittest.cc', 'disk_cache/flash/segment_unittest.cc', 'disk_cache/flash/storage_unittest.cc', 'dns/address_sorter_posix_unittest.cc', 'dns/address_sorter_unittest.cc', 'dns/dns_config_service_posix_unittest.cc', 'dns/dns_config_service_unittest.cc', 'dns/dns_config_service_win_unittest.cc', 'dns/dns_hosts_unittest.cc', 'dns/dns_query_unittest.cc', 'dns/dns_response_unittest.cc', 'dns/dns_session_unittest.cc', 'dns/dns_transaction_unittest.cc', 'dns/host_cache_unittest.cc', 'dns/host_resolver_impl_unittest.cc', 'dns/mapped_host_resolver_unittest.cc', 'dns/serial_worker_unittest.cc', 'dns/single_request_host_resolver_unittest.cc', 'ftp/ftp_auth_cache_unittest.cc', 'ftp/ftp_ctrl_response_buffer_unittest.cc', 'ftp/ftp_directory_listing_parser_ls_unittest.cc', 'ftp/ftp_directory_listing_parser_netware_unittest.cc', 'ftp/ftp_directory_listing_parser_os2_unittest.cc', 'ftp/ftp_directory_listing_parser_unittest.cc', 'ftp/ftp_directory_listing_parser_unittest.h', 'ftp/ftp_directory_listing_parser_vms_unittest.cc', 'ftp/ftp_directory_listing_parser_windows_unittest.cc', 'ftp/ftp_network_transaction_unittest.cc', 'ftp/ftp_util_unittest.cc', 'http/des_unittest.cc', 'http/http_auth_cache_unittest.cc', 'http/http_auth_controller_unittest.cc', 'http/http_auth_filter_unittest.cc', 'http/http_auth_gssapi_posix_unittest.cc', 'http/http_auth_handler_basic_unittest.cc', 'http/http_auth_handler_digest_unittest.cc', 'http/http_auth_handler_factory_unittest.cc', 'http/http_auth_handler_mock.cc', 'http/http_auth_handler_mock.h', 'http/http_auth_handler_negotiate_unittest.cc', 'http/http_auth_handler_unittest.cc', 'http/http_auth_sspi_win_unittest.cc', 'http/http_auth_unittest.cc', 'http/http_byte_range_unittest.cc', 'http/http_cache_unittest.cc', 'http/http_chunked_decoder_unittest.cc', 'http/http_content_disposition_unittest.cc', 'http/http_network_layer_unittest.cc', 'http/http_network_transaction_spdy3_unittest.cc', 'http/http_network_transaction_spdy2_unittest.cc', 'http/http_pipelined_connection_impl_unittest.cc', 'http/http_pipelined_host_forced_unittest.cc', 'http/http_pipelined_host_impl_unittest.cc', 'http/http_pipelined_host_pool_unittest.cc', 'http/http_pipelined_host_test_util.cc', 'http/http_pipelined_host_test_util.h', 'http/http_pipelined_network_transaction_unittest.cc', 'http/http_proxy_client_socket_pool_spdy2_unittest.cc', 'http/http_proxy_client_socket_pool_spdy3_unittest.cc', 'http/http_request_headers_unittest.cc', 'http/http_response_body_drainer_unittest.cc', 'http/http_response_headers_unittest.cc', 'http/http_security_headers_unittest.cc', 'http/http_server_properties_impl_unittest.cc', 'http/http_stream_factory_impl_unittest.cc', 'http/http_stream_parser_unittest.cc', 'http/http_transaction_unittest.cc', 'http/http_transaction_unittest.h', 'http/http_util_unittest.cc', 'http/http_vary_data_unittest.cc', 'http/mock_allow_url_security_manager.cc', 'http/mock_allow_url_security_manager.h', 'http/mock_gssapi_library_posix.cc', 'http/mock_gssapi_library_posix.h', 'http/mock_http_cache.cc', 'http/mock_http_cache.h', 'http/mock_sspi_library_win.cc', 'http/mock_sspi_library_win.h', 'http/transport_security_state_unittest.cc', 'http/url_security_manager_unittest.cc', 'proxy/dhcp_proxy_script_adapter_fetcher_win_unittest.cc', 'proxy/dhcp_proxy_script_fetcher_factory_unittest.cc', 'proxy/dhcp_proxy_script_fetcher_win_unittest.cc', 'proxy/multi_threaded_proxy_resolver_unittest.cc', 'proxy/network_delegate_error_observer_unittest.cc', 'proxy/proxy_bypass_rules_unittest.cc', 'proxy/proxy_config_service_android_unittest.cc', 'proxy/proxy_config_service_linux_unittest.cc', 'proxy/proxy_config_service_win_unittest.cc', 'proxy/proxy_config_unittest.cc', 'proxy/proxy_info_unittest.cc', 'proxy/proxy_list_unittest.cc', 'proxy/proxy_resolver_v8_tracing_unittest.cc', 'proxy/proxy_resolver_v8_unittest.cc', 'proxy/proxy_script_decider_unittest.cc', 'proxy/proxy_script_fetcher_impl_unittest.cc', 'proxy/proxy_server_unittest.cc', 'proxy/proxy_service_unittest.cc', 'quic/blocked_list_test.cc', 'quic/congestion_control/available_channel_estimator_test.cc', 'quic/congestion_control/channel_estimator_test.cc', 'quic/congestion_control/cube_root_test.cc', 'quic/congestion_control/cubic_test.cc', 'quic/congestion_control/fix_rate_test.cc', 'quic/congestion_control/hybrid_slow_start_test.cc', 'quic/congestion_control/inter_arrival_bitrate_ramp_up_test.cc', 'quic/congestion_control/inter_arrival_overuse_detector_test.cc', 'quic/congestion_control/inter_arrival_probe_test.cc', 'quic/congestion_control/inter_arrival_receiver_test.cc', 'quic/congestion_control/inter_arrival_state_machine_test.cc', 'quic/congestion_control/inter_arrival_sender_test.cc', 'quic/congestion_control/leaky_bucket_test.cc', 'quic/congestion_control/paced_sender_test.cc', 'quic/congestion_control/quic_congestion_control_test.cc', 'quic/congestion_control/quic_congestion_manager_test.cc', 'quic/congestion_control/quic_max_sized_map_test.cc', 'quic/congestion_control/tcp_cubic_sender_test.cc', 'quic/congestion_control/tcp_receiver_test.cc', 'quic/crypto/aes_128_gcm_decrypter_test.cc', 'quic/crypto/aes_128_gcm_encrypter_test.cc', 'quic/crypto/crypto_framer_test.cc', 'quic/crypto/crypto_handshake_test.cc', 'quic/crypto/curve25519_key_exchange_test.cc', 'quic/crypto/null_decrypter_test.cc', 'quic/crypto/null_encrypter_test.cc', 'quic/crypto/p256_key_exchange_test.cc', 'quic/crypto/quic_random_test.cc', 'quic/crypto/strike_register_test.cc', 'quic/test_tools/crypto_test_utils.cc', 'quic/test_tools/crypto_test_utils.h', 'quic/test_tools/mock_clock.cc', 'quic/test_tools/mock_clock.h', 'quic/test_tools/mock_crypto_client_stream.cc', 'quic/test_tools/mock_crypto_client_stream.h', 'quic/test_tools/mock_crypto_client_stream_factory.cc', 'quic/test_tools/mock_crypto_client_stream_factory.h', 'quic/test_tools/mock_random.cc', 'quic/test_tools/mock_random.h', 'quic/test_tools/quic_connection_peer.cc', 'quic/test_tools/quic_connection_peer.h', 'quic/test_tools/quic_framer_peer.cc', 'quic/test_tools/quic_framer_peer.h', 'quic/test_tools/quic_packet_creator_peer.cc', 'quic/test_tools/quic_packet_creator_peer.h', 'quic/test_tools/quic_session_peer.cc', 'quic/test_tools/quic_session_peer.h', 'quic/test_tools/quic_test_utils.cc', 'quic/test_tools/quic_test_utils.h', 'quic/test_tools/reliable_quic_stream_peer.cc', 'quic/test_tools/reliable_quic_stream_peer.h', 'quic/test_tools/simple_quic_framer.cc', 'quic/test_tools/simple_quic_framer.h', 'quic/test_tools/test_task_runner.cc', 'quic/test_tools/test_task_runner.h', 'quic/quic_bandwidth_test.cc', 'quic/quic_client_session_test.cc', 'quic/quic_clock_test.cc', 'quic/quic_connection_helper_test.cc', 'quic/quic_connection_test.cc', 'quic/quic_crypto_client_stream_test.cc', 'quic/quic_crypto_server_stream_test.cc', 'quic/quic_crypto_stream_test.cc', 'quic/quic_data_writer_test.cc', 'quic/quic_fec_group_test.cc', 'quic/quic_framer_test.cc', 'quic/quic_http_stream_test.cc', 'quic/quic_network_transaction_unittest.cc', 'quic/quic_packet_creator_test.cc', 'quic/quic_packet_entropy_manager_test.cc', 'quic/quic_packet_generator_test.cc', 'quic/quic_protocol_test.cc', 'quic/quic_reliable_client_stream_test.cc', 'quic/quic_session_test.cc', 'quic/quic_stream_factory_test.cc', 'quic/quic_stream_sequencer_test.cc', 'quic/quic_time_test.cc', 'quic/quic_utils_test.cc', 'quic/reliable_quic_stream_test.cc', 'socket/buffered_write_stream_socket_unittest.cc', 'socket/client_socket_pool_base_unittest.cc', 'socket/deterministic_socket_data_unittest.cc', 'socket/mock_client_socket_pool_manager.cc', 'socket/mock_client_socket_pool_manager.h', 'socket/socks5_client_socket_unittest.cc', 'socket/socks_client_socket_pool_unittest.cc', 'socket/socks_client_socket_unittest.cc', 'socket/ssl_client_socket_openssl_unittest.cc', 'socket/ssl_client_socket_pool_unittest.cc', 'socket/ssl_client_socket_unittest.cc', 'socket/ssl_server_socket_unittest.cc', 'socket/tcp_client_socket_unittest.cc', 'socket/tcp_listen_socket_unittest.cc', 'socket/tcp_listen_socket_unittest.h', 'socket/tcp_server_socket_unittest.cc', 'socket/transport_client_socket_pool_unittest.cc', 'socket/transport_client_socket_unittest.cc', 'socket/unix_domain_socket_posix_unittest.cc', 'socket_stream/socket_stream_metrics_unittest.cc', 'socket_stream/socket_stream_unittest.cc', 'spdy/buffered_spdy_framer_spdy3_unittest.cc', 'spdy/buffered_spdy_framer_spdy2_unittest.cc', 'spdy/spdy_credential_builder_unittest.cc', 'spdy/spdy_credential_state_unittest.cc', 'spdy/spdy_frame_builder_test.cc', 'spdy/spdy_frame_reader_test.cc', 'spdy/spdy_framer_test.cc', 'spdy/spdy_header_block_unittest.cc', 'spdy/spdy_http_stream_spdy3_unittest.cc', 'spdy/spdy_http_stream_spdy2_unittest.cc', 'spdy/spdy_http_utils_unittest.cc', 'spdy/spdy_network_transaction_spdy3_unittest.cc', 'spdy/spdy_network_transaction_spdy2_unittest.cc', 'spdy/spdy_priority_forest_test.cc', 'spdy/spdy_protocol_test.cc', 'spdy/spdy_proxy_client_socket_spdy3_unittest.cc', 'spdy/spdy_proxy_client_socket_spdy2_unittest.cc', 'spdy/spdy_session_spdy3_unittest.cc', 'spdy/spdy_session_spdy2_unittest.cc', 'spdy/spdy_stream_spdy3_unittest.cc', 'spdy/spdy_stream_spdy2_unittest.cc', 'spdy/spdy_stream_test_util.cc', 'spdy/spdy_stream_test_util.h', 'spdy/spdy_test_util_common.cc', 'spdy/spdy_test_util_common.h', 'spdy/spdy_test_util_spdy3.cc', 'spdy/spdy_test_util_spdy3.h', 'spdy/spdy_test_util_spdy2.cc', 'spdy/spdy_test_util_spdy2.h', 'spdy/spdy_test_utils.cc', 'spdy/spdy_test_utils.h', 'spdy/spdy_websocket_stream_spdy2_unittest.cc', 'spdy/spdy_websocket_stream_spdy3_unittest.cc', 'spdy/spdy_websocket_test_util_spdy2.cc', 'spdy/spdy_websocket_test_util_spdy2.h', 'spdy/spdy_websocket_test_util_spdy3.cc', 'spdy/spdy_websocket_test_util_spdy3.h', 'ssl/client_cert_store_impl_unittest.cc', 'ssl/default_server_bound_cert_store_unittest.cc', 'ssl/openssl_client_key_store_unittest.cc', 'ssl/server_bound_cert_service_unittest.cc', 'ssl/ssl_cipher_suite_names_unittest.cc', 'ssl/ssl_client_auth_cache_unittest.cc', 'ssl/ssl_config_service_unittest.cc', 'test/python_utils_unittest.cc', 'test/run_all_unittests.cc', 'test/test_certificate_data.h', 'tools/dump_cache/url_to_filename_encoder.cc', 'tools/dump_cache/url_to_filename_encoder.h', 'tools/dump_cache/url_to_filename_encoder_unittest.cc', 'tools/dump_cache/url_utilities.h', 'tools/dump_cache/url_utilities.cc', 'tools/dump_cache/url_utilities_unittest.cc', 'udp/udp_socket_unittest.cc', 'url_request/url_fetcher_impl_unittest.cc', 'url_request/url_request_context_builder_unittest.cc', 'url_request/url_request_filter_unittest.cc', 'url_request/url_request_ftp_job_unittest.cc', 'url_request/url_request_http_job_unittest.cc', 'url_request/url_request_job_factory_impl_unittest.cc', 'url_request/url_request_job_unittest.cc', 'url_request/url_request_throttler_simulation_unittest.cc', 'url_request/url_request_throttler_test_support.cc', 'url_request/url_request_throttler_test_support.h', 'url_request/url_request_throttler_unittest.cc', 'url_request/url_request_unittest.cc', 'url_request/view_cache_helper_unittest.cc', 'websockets/websocket_errors_unittest.cc', 'websockets/websocket_frame_parser_unittest.cc', 'websockets/websocket_frame_unittest.cc', 'websockets/websocket_handshake_handler_unittest.cc', 'websockets/websocket_handshake_handler_spdy2_unittest.cc', 'websockets/websocket_handshake_handler_spdy3_unittest.cc', 'websockets/websocket_job_spdy2_unittest.cc', 'websockets/websocket_job_spdy3_unittest.cc', 'websockets/websocket_net_log_params_unittest.cc', 'websockets/websocket_throttle_unittest.cc', ], 'conditions': [ ['chromeos==1', { 'sources!': [ 'base/network_change_notifier_linux_unittest.cc', 'proxy/proxy_config_service_linux_unittest.cc', ], }], [ 'OS == "android"', { 'sources!': [ # No res_ninit() et al on Android, so this doesn't make a lot of # sense. 'dns/dns_config_service_posix_unittest.cc', 'ssl/client_cert_store_impl_unittest.cc', ], 'dependencies': [ 'net_javatests', 'net_test_jni_headers', ], }], [ 'use_glib == 1', { 'dependencies': [ '../build/linux/system.gyp:ssl', ], }, { # else use_glib == 0: !posix || mac 'sources!': [ 'cert/nss_cert_database_unittest.cc', ], }, ], [ 'toolkit_uses_gtk == 1', { 'dependencies': [ '../build/linux/system.gyp:gtk', ], }, ], [ 'os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', { 'conditions': [ ['linux_use_tcmalloc==1', { 'dependencies': [ '../base/allocator/allocator.gyp:allocator', ], }], ], }], [ 'use_kerberos==1', { 'defines': [ 'USE_KERBEROS', ], }, { # use_kerberos == 0 'sources!': [ 'http/http_auth_gssapi_posix_unittest.cc', 'http/http_auth_handler_negotiate_unittest.cc', 'http/mock_gssapi_library_posix.cc', 'http/mock_gssapi_library_posix.h', ], }], [ 'use_openssl==1', { # When building for OpenSSL, we need to exclude NSS specific tests. # TODO(bulach): Add equivalent tests when the underlying # functionality is ported to OpenSSL. 'sources!': [ 'cert/nss_cert_database_unittest.cc', 'cert/x509_util_nss_unittest.cc', 'ssl/client_cert_store_impl_unittest.cc', ], }, { # else !use_openssl: remove the unneeded files 'sources!': [ 'cert/x509_util_openssl_unittest.cc', 'socket/ssl_client_socket_openssl_unittest.cc', 'ssl/openssl_client_key_store_unittest.cc', ], }, ], [ 'enable_websockets != 1', { 'sources/': [ ['exclude', '^socket_stream/'], ['exclude', '^websockets/'], ['exclude', '^spdy/spdy_websocket_stream_spdy._unittest\\.cc$'], ], }], [ 'disable_ftp_support==1', { 'sources/': [ ['exclude', '^ftp/'], ], 'sources!': [ 'url_request/url_request_ftp_job_unittest.cc', ], }, ], [ 'enable_built_in_dns!=1', { 'sources!': [ 'dns/address_sorter_posix_unittest.cc', 'dns/address_sorter_unittest.cc', ], }, ], [ 'use_v8_in_net==1', { 'dependencies': [ 'net_with_v8', ], }, { # else: !use_v8_in_net 'sources!': [ 'proxy/proxy_resolver_v8_unittest.cc', 'proxy/proxy_resolver_v8_tracing_unittest.cc', ], }, ], [ 'OS == "win"', { 'sources!': [ 'dns/dns_config_service_posix_unittest.cc', 'http/http_auth_gssapi_posix_unittest.cc', ], # This is needed to trigger the dll copy step on windows. # TODO(mark): Specifying this here shouldn't be necessary. 'dependencies': [ '../third_party/icu/icu.gyp:icudata', '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], [ 'OS == "mac"', { 'dependencies': [ '../third_party/nss/nss.gyp:nspr', '../third_party/nss/nss.gyp:nss', 'third_party/nss/ssl.gyp:libssl', ], }, ], [ 'OS == "ios"', { 'dependencies': [ '../third_party/nss/nss.gyp:nss', ], 'actions': [ { 'action_name': 'copy_test_data', 'variables': { 'test_data_files': [ 'data/ssl/certificates/', 'data/url_request_unittest/', ], 'test_data_prefix': 'net', }, 'includes': [ '../build/copy_test_data_ios.gypi' ], }, ], 'sources!': [ # TODO(droger): The following tests are disabled because the # implementation is missing or incomplete. # KeygenHandler::GenKeyAndSignChallenge() is not ported to iOS. 'base/keygen_handler_unittest.cc', # Need to read input data files. 'base/gzip_filter_unittest.cc', 'disk_cache/backend_unittest.cc', 'disk_cache/block_files_unittest.cc', 'socket/ssl_server_socket_unittest.cc', # Need TestServer. 'proxy/proxy_script_fetcher_impl_unittest.cc', 'socket/ssl_client_socket_unittest.cc', 'ssl/client_cert_store_impl_unittest.cc', 'url_request/url_fetcher_impl_unittest.cc', 'url_request/url_request_context_builder_unittest.cc', # Needs GetAppOutput(). 'test/python_utils_unittest.cc', # The following tests are disabled because they don't apply to # iOS. # OS is not "linux" or "freebsd" or "openbsd". 'socket/unix_domain_socket_posix_unittest.cc', ], 'conditions': [ ['coverage != 0', { 'sources!': [ # These sources can't be built with coverage due to a # toolchain bug: http://openradar.appspot.com/radar?id=1499403 'http/transport_security_state_unittest.cc', # These tests crash when run with coverage turned on due to an # issue with llvm_gcda_increment_indirect_counter: # http://crbug.com/156058 'cookies/cookie_monster_unittest.cc', 'cookies/cookie_store_unittest.h', 'http/http_auth_controller_unittest.cc', 'http/http_network_layer_unittest.cc', 'http/http_network_transaction_spdy2_unittest.cc', 'http/http_network_transaction_spdy3_unittest.cc', 'spdy/spdy_http_stream_spdy2_unittest.cc', 'spdy/spdy_http_stream_spdy3_unittest.cc', 'spdy/spdy_proxy_client_socket_spdy3_unittest.cc', 'spdy/spdy_session_spdy3_unittest.cc', # These tests crash when run with coverage turned on: # http://crbug.com/177203 'proxy/proxy_service_unittest.cc', ], }], ], }], [ 'OS == "linux"', { 'dependencies': [ '../build/linux/system.gyp:dbus', '../dbus/dbus.gyp:dbus_test_support', ], }, ], [ 'OS == "android"', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', ], 'sources!': [ 'dns/dns_config_service_posix_unittest.cc', ], }, ], ['OS == "android" and gtest_target_type == "shared_library"', { 'dependencies': [ '../testing/android/native_test.gyp:native_test_native_code', ] }], [ 'OS != "win" and OS != "mac"', { 'sources!': [ 'cert/x509_cert_types_unittest.cc', ], }], ], }, { 'target_name': 'net_perftests', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/base.gyp:test_support_perf', '../build/temp_gyp/googleurl.gyp:googleurl', '../testing/gtest.gyp:gtest', 'net', 'net_test_support', ], 'sources': [ 'cookies/cookie_monster_perftest.cc', 'disk_cache/disk_cache_perftest.cc', 'proxy/proxy_resolver_perftest.cc', ], 'conditions': [ [ 'use_v8_in_net==1', { 'dependencies': [ 'net_with_v8', ], }, { # else: !use_v8_in_net 'sources!': [ 'proxy/proxy_resolver_perftest.cc', ], }, ], # This is needed to trigger the dll copy step on windows. # TODO(mark): Specifying this here shouldn't be necessary. [ 'OS == "win"', { 'dependencies': [ '../third_party/icu/icu.gyp:icudata', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], ], }, { 'target_name': 'net_test_support', 'type': 'static_library', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:test_support_base', '../build/temp_gyp/googleurl.gyp:googleurl', '../testing/gtest.gyp:gtest', 'net', ], 'export_dependent_settings': [ '../base/base.gyp:base', '../base/base.gyp:test_support_base', '../testing/gtest.gyp:gtest', ], 'sources': [ 'base/capturing_net_log.cc', 'base/capturing_net_log.h', 'base/load_timing_info_test_util.cc', 'base/load_timing_info_test_util.h', 'base/mock_file_stream.cc', 'base/mock_file_stream.h', 'base/test_completion_callback.cc', 'base/test_completion_callback.h', 'base/test_data_directory.cc', 'base/test_data_directory.h', 'cert/mock_cert_verifier.cc', 'cert/mock_cert_verifier.h', 'cookies/cookie_monster_store_test.cc', 'cookies/cookie_monster_store_test.h', 'cookies/cookie_store_test_callbacks.cc', 'cookies/cookie_store_test_callbacks.h', 'cookies/cookie_store_test_helpers.cc', 'cookies/cookie_store_test_helpers.h', 'disk_cache/disk_cache_test_base.cc', 'disk_cache/disk_cache_test_base.h', 'disk_cache/disk_cache_test_util.cc', 'disk_cache/disk_cache_test_util.h', 'disk_cache/flash/flash_cache_test_base.h', 'disk_cache/flash/flash_cache_test_base.cc', 'dns/dns_test_util.cc', 'dns/dns_test_util.h', 'dns/mock_host_resolver.cc', 'dns/mock_host_resolver.h', 'proxy/mock_proxy_resolver.cc', 'proxy/mock_proxy_resolver.h', 'proxy/mock_proxy_script_fetcher.cc', 'proxy/mock_proxy_script_fetcher.h', 'proxy/proxy_config_service_common_unittest.cc', 'proxy/proxy_config_service_common_unittest.h', 'socket/socket_test_util.cc', 'socket/socket_test_util.h', 'test/base_test_server.cc', 'test/base_test_server.h', 'test/cert_test_util.cc', 'test/cert_test_util.h', 'test/local_test_server_posix.cc', 'test/local_test_server_win.cc', 'test/local_test_server.cc', 'test/local_test_server.h', 'test/net_test_suite.cc', 'test/net_test_suite.h', 'test/python_utils.cc', 'test/python_utils.h', 'test/remote_test_server.cc', 'test/remote_test_server.h', 'test/spawner_communicator.cc', 'test/spawner_communicator.h', 'test/test_server.h', 'url_request/test_url_fetcher_factory.cc', 'url_request/test_url_fetcher_factory.h', 'url_request/url_request_test_util.cc', 'url_request/url_request_test_util.h', ], 'conditions': [ ['inside_chromium_build==1 and OS != "ios"', { 'dependencies': [ '../third_party/protobuf/protobuf.gyp:py_proto', ], }], ['os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', { 'conditions': [ ['use_openssl==1', { 'dependencies': [ '../third_party/openssl/openssl.gyp:openssl', ], }, { 'dependencies': [ '../build/linux/system.gyp:ssl', ], }], ], }], ['os_posix == 1 and OS != "mac" and OS != "android" and OS != "ios"', { 'conditions': [ ['linux_use_tcmalloc==1', { 'dependencies': [ '../base/allocator/allocator.gyp:allocator', ], }], ], }], ['OS != "android"', { 'sources!': [ 'test/remote_test_server.cc', 'test/remote_test_server.h', 'test/spawner_communicator.cc', 'test/spawner_communicator.h', ], }], ['OS == "ios"', { 'dependencies': [ '../third_party/nss/nss.gyp:nss', ], }], [ 'use_v8_in_net==1', { 'dependencies': [ 'net_with_v8', ], }, ], ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'net_resources', 'type': 'none', 'variables': { 'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/net', }, 'actions': [ { 'action_name': 'net_resources', 'variables': { 'grit_grd_file': 'base/net_resources.grd', }, 'includes': [ '../build/grit_action.gypi' ], }, ], 'includes': [ '../build/grit_target.gypi' ], }, { 'target_name': 'http_server', 'type': 'static_library', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'server/http_connection.cc', 'server/http_connection.h', 'server/http_server.cc', 'server/http_server.h', 'server/http_server_request_info.cc', 'server/http_server_request_info.h', 'server/web_socket.cc', 'server/web_socket.h', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'dump_cache', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_test_support', ], 'sources': [ 'tools/dump_cache/cache_dumper.cc', 'tools/dump_cache/cache_dumper.h', 'tools/dump_cache/dump_cache.cc', 'tools/dump_cache/dump_files.cc', 'tools/dump_cache/dump_files.h', 'tools/dump_cache/simple_cache_dumper.cc', 'tools/dump_cache/simple_cache_dumper.h', 'tools/dump_cache/upgrade_win.cc', 'tools/dump_cache/upgrade_win.h', 'tools/dump_cache/url_to_filename_encoder.cc', 'tools/dump_cache/url_to_filename_encoder.h', 'tools/dump_cache/url_utilities.h', 'tools/dump_cache/url_utilities.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], 'conditions': [ ['use_v8_in_net == 1', { 'targets': [ { 'target_name': 'net_with_v8', 'type': '<(component)', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../build/temp_gyp/googleurl.gyp:googleurl', '../v8/tools/gyp/v8.gyp:v8', 'net' ], 'defines': [ 'NET_IMPLEMENTATION', ], 'sources': [ 'proxy/proxy_resolver_v8.cc', 'proxy/proxy_resolver_v8.h', 'proxy/proxy_resolver_v8_tracing.cc', 'proxy/proxy_resolver_v8_tracing.h', 'proxy/proxy_service_v8.cc', 'proxy/proxy_service_v8.h', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], }], ['OS != "ios"', { 'targets': [ # iOS doesn't have the concept of simple executables, these targets # can't be compiled on the platform. { 'target_name': 'crash_cache', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_test_support', ], 'sources': [ 'tools/crash_cache/crash_cache.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'crl_set_dump', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/crl_set_dump/crl_set_dump.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'dns_fuzz_stub', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/dns_fuzz_stub/dns_fuzz_stub.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'fetch_client', 'type': 'executable', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../testing/gtest.gyp:gtest', 'net', 'net_with_v8', ], 'sources': [ 'tools/fetch/fetch_client.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'fetch_server', 'type': 'executable', 'variables': { 'enable_wexit_time_destructors': 1, }, 'dependencies': [ '../base/base.gyp:base', '../build/temp_gyp/googleurl.gyp:googleurl', 'net', ], 'sources': [ 'tools/fetch/fetch_server.cc', 'tools/fetch/http_listen_socket.cc', 'tools/fetch/http_listen_socket.h', 'tools/fetch/http_server.cc', 'tools/fetch/http_server.h', 'tools/fetch/http_server_request_info.cc', 'tools/fetch/http_server_request_info.h', 'tools/fetch/http_server_response_info.cc', 'tools/fetch/http_server_response_info.h', 'tools/fetch/http_session.cc', 'tools/fetch/http_session.h', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'gdig', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/gdig/file_net_log.cc', 'tools/gdig/gdig.cc', ], }, { 'target_name': 'get_server_time', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../build/temp_gyp/googleurl.gyp:googleurl', 'net', ], 'sources': [ 'tools/get_server_time/get_server_time.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'net_watcher', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_with_v8', ], 'conditions': [ [ 'use_glib == 1', { 'dependencies': [ '../build/linux/system.gyp:gconf', '../build/linux/system.gyp:gio', ], }, ], ], 'sources': [ 'tools/net_watcher/net_watcher.cc', ], }, { 'target_name': 'run_testserver', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:test_support_base', '../testing/gtest.gyp:gtest', 'net_test_support', ], 'sources': [ 'tools/testserver/run_testserver.cc', ], }, { 'target_name': 'stress_cache', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', 'net', 'net_test_support', ], 'sources': [ 'disk_cache/stress_cache.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, { 'target_name': 'tld_cleanup', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../build/temp_gyp/googleurl.gyp:googleurl', ], 'sources': [ 'tools/tld_cleanup/tld_cleanup.cc', ], # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. 'msvs_disabled_warnings': [4267, ], }, ], }], ['os_posix == 1 and OS != "mac" and OS != "ios" and OS != "android"', { 'targets': [ { 'target_name': 'flip_balsa_and_epoll_library', 'type': 'static_library', 'dependencies': [ '../base/base.gyp:base', 'net', ], 'sources': [ 'tools/flip_server/balsa_enums.h', 'tools/flip_server/balsa_frame.cc', 'tools/flip_server/balsa_frame.h', 'tools/flip_server/balsa_headers.cc', 'tools/flip_server/balsa_headers.h', 'tools/flip_server/balsa_headers_token_utils.cc', 'tools/flip_server/balsa_headers_token_utils.h', 'tools/flip_server/balsa_visitor_interface.h', 'tools/flip_server/constants.h', 'tools/flip_server/epoll_server.cc', 'tools/flip_server/epoll_server.h', 'tools/flip_server/http_message_constants.cc', 'tools/flip_server/http_message_constants.h', 'tools/flip_server/split.h', 'tools/flip_server/split.cc', ], }, { 'target_name': 'flip_in_mem_edsm_server', 'type': 'executable', 'cflags': [ '-Wno-deprecated', ], 'dependencies': [ '../base/base.gyp:base', '../third_party/openssl/openssl.gyp:openssl', 'flip_balsa_and_epoll_library', 'net', ], 'sources': [ 'tools/dump_cache/url_to_filename_encoder.cc', 'tools/dump_cache/url_to_filename_encoder.h', 'tools/dump_cache/url_utilities.h', 'tools/dump_cache/url_utilities.cc', 'tools/flip_server/acceptor_thread.h', 'tools/flip_server/acceptor_thread.cc', 'tools/flip_server/buffer_interface.h', 'tools/flip_server/create_listener.cc', 'tools/flip_server/create_listener.h', 'tools/flip_server/flip_config.cc', 'tools/flip_server/flip_config.h', 'tools/flip_server/flip_in_mem_edsm_server.cc', 'tools/flip_server/http_interface.cc', 'tools/flip_server/http_interface.h', 'tools/flip_server/loadtime_measurement.h', 'tools/flip_server/mem_cache.h', 'tools/flip_server/mem_cache.cc', 'tools/flip_server/output_ordering.cc', 'tools/flip_server/output_ordering.h', 'tools/flip_server/ring_buffer.cc', 'tools/flip_server/ring_buffer.h', 'tools/flip_server/simple_buffer.cc', 'tools/flip_server/simple_buffer.h', 'tools/flip_server/sm_connection.cc', 'tools/flip_server/sm_connection.h', 'tools/flip_server/sm_interface.h', 'tools/flip_server/spdy_ssl.cc', 'tools/flip_server/spdy_ssl.h', 'tools/flip_server/spdy_interface.cc', 'tools/flip_server/spdy_interface.h', 'tools/flip_server/spdy_util.cc', 'tools/flip_server/spdy_util.h', 'tools/flip_server/streamer_interface.cc', 'tools/flip_server/streamer_interface.h', 'tools/flip_server/string_piece_utils.h', ], }, { 'target_name': 'quic_library', 'type': 'static_library', 'dependencies': [ '../base/base.gyp:base', '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations', '../build/temp_gyp/googleurl.gyp:googleurl', '../third_party/openssl/openssl.gyp:openssl', 'flip_balsa_and_epoll_library', 'net', ], 'sources': [ 'tools/quic/quic_client.cc', 'tools/quic/quic_client.h', 'tools/quic/quic_client_session.cc', 'tools/quic/quic_client_session.h', 'tools/quic/quic_dispatcher.h', 'tools/quic/quic_dispatcher.cc', 'tools/quic/quic_epoll_clock.cc', 'tools/quic/quic_epoll_clock.h', 'tools/quic/quic_epoll_connection_helper.cc', 'tools/quic/quic_epoll_connection_helper.h', 'tools/quic/quic_in_memory_cache.cc', 'tools/quic/quic_in_memory_cache.h', 'tools/quic/quic_packet_writer.h', 'tools/quic/quic_reliable_client_stream.cc', 'tools/quic/quic_reliable_client_stream.h', 'tools/quic/quic_reliable_server_stream.cc', 'tools/quic/quic_reliable_server_stream.h', 'tools/quic/quic_server.cc', 'tools/quic/quic_server.h', 'tools/quic/quic_server_session.cc', 'tools/quic/quic_server_session.h', 'tools/quic/quic_socket_utils.cc', 'tools/quic/quic_socket_utils.h', 'tools/quic/quic_spdy_client_stream.cc', 'tools/quic/quic_spdy_client_stream.h', 'tools/quic/quic_spdy_server_stream.cc', 'tools/quic/quic_spdy_server_stream.h', 'tools/quic/quic_time_wait_list_manager.h', 'tools/quic/quic_time_wait_list_manager.cc', 'tools/quic/spdy_utils.cc', 'tools/quic/spdy_utils.h', ], }, { 'target_name': 'quic_client', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../third_party/openssl/openssl.gyp:openssl', 'net', 'quic_library', ], 'sources': [ 'tools/quic/quic_client_bin.cc', ], }, { 'target_name': 'quic_server', 'type': 'executable', 'dependencies': [ '../base/base.gyp:base', '../third_party/openssl/openssl.gyp:openssl', 'net', 'quic_library', ], 'sources': [ 'tools/quic/quic_server_bin.cc', ], }, { 'target_name': 'quic_unittests', 'type': '<(gtest_target_type)', 'dependencies': [ '../base/base.gyp:test_support_base', '../testing/gmock.gyp:gmock', '../testing/gtest.gyp:gtest', 'net', 'quic_library', ], 'sources': [ 'quic/test_tools/quic_session_peer.cc', 'quic/test_tools/quic_session_peer.h', 'quic/test_tools/crypto_test_utils.cc', 'quic/test_tools/crypto_test_utils.h', 'quic/test_tools/mock_clock.cc', 'quic/test_tools/mock_clock.h', 'quic/test_tools/mock_random.cc', 'quic/test_tools/mock_random.h', 'quic/test_tools/simple_quic_framer.cc', 'quic/test_tools/simple_quic_framer.h', 'quic/test_tools/quic_connection_peer.cc', 'quic/test_tools/quic_connection_peer.h', 'quic/test_tools/quic_framer_peer.cc', 'quic/test_tools/quic_framer_peer.h', 'quic/test_tools/quic_session_peer.cc', 'quic/test_tools/quic_session_peer.h', 'quic/test_tools/quic_test_utils.cc', 'quic/test_tools/quic_test_utils.h', 'quic/test_tools/reliable_quic_stream_peer.cc', 'quic/test_tools/reliable_quic_stream_peer.h', 'tools/flip_server/simple_buffer.cc', 'tools/flip_server/simple_buffer.h', 'tools/quic/end_to_end_test.cc', 'tools/quic/quic_client_session_test.cc', 'tools/quic/quic_dispatcher_test.cc', 'tools/quic/quic_epoll_clock_test.cc', 'tools/quic/quic_epoll_connection_helper_test.cc', 'tools/quic/quic_reliable_client_stream_test.cc', 'tools/quic/quic_reliable_server_stream_test.cc', 'tools/quic/test_tools/http_message_test_utils.cc', 'tools/quic/test_tools/http_message_test_utils.h', 'tools/quic/test_tools/mock_epoll_server.cc', 'tools/quic/test_tools/mock_epoll_server.h', 'tools/quic/test_tools/quic_test_client.cc', 'tools/quic/test_tools/quic_test_client.h', 'tools/quic/test_tools/quic_test_utils.cc', 'tools/quic/test_tools/quic_test_utils.h', 'tools/quic/test_tools/run_all_unittests.cc', ], } ] }], ['OS=="android"', { 'targets': [ { 'target_name': 'net_jni_headers', 'type': 'none', 'sources': [ 'android/java/src/org/chromium/net/AndroidKeyStore.java', 'android/java/src/org/chromium/net/AndroidNetworkLibrary.java', 'android/java/src/org/chromium/net/GURLUtils.java', 'android/java/src/org/chromium/net/NetworkChangeNotifier.java', 'android/java/src/org/chromium/net/ProxyChangeListener.java', ], 'variables': { 'jni_gen_package': 'net', }, 'direct_dependent_settings': { 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)/net', ], }, 'includes': [ '../build/jni_generator.gypi' ], }, { 'target_name': 'net_test_jni_headers', 'type': 'none', 'sources': [ 'android/javatests/src/org/chromium/net/AndroidKeyStoreTestUtil.java', ], 'variables': { 'jni_gen_package': 'net', }, 'direct_dependent_settings': { 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)/net', ], }, 'includes': [ '../build/jni_generator.gypi' ], }, { 'target_name': 'net_java', 'type': 'none', 'variables': { 'java_in_dir': '../net/android/java', }, 'dependencies': [ '../base/base.gyp:base', 'cert_verify_result_android_java', 'certificate_mime_types_java', 'net_errors_java', 'private_key_types_java', ], 'includes': [ '../build/java.gypi' ], }, { 'target_name': 'net_java_test_support', 'type': 'none', 'variables': { 'java_in_dir': '../net/test/android/javatests', }, 'includes': [ '../build/java.gypi' ], }, { 'target_name': 'net_javatests', 'type': 'none', 'variables': { 'java_in_dir': '../net/android/javatests', }, 'dependencies': [ '../base/base.gyp:base', '../base/base.gyp:base_java_test_support', 'net_java', ], 'includes': [ '../build/java.gypi' ], }, { 'target_name': 'net_errors_java', 'type': 'none', 'sources': [ 'android/java/NetError.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['base/net_error_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, { 'target_name': 'certificate_mime_types_java', 'type': 'none', 'sources': [ 'android/java/CertificateMimeType.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['base/mime_util_certificate_type_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, { 'target_name': 'cert_verify_result_android_java', 'type': 'none', 'sources': [ 'android/java/CertVerifyResultAndroid.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['android/cert_verify_result_android_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, { 'target_name': 'private_key_types_java', 'type': 'none', 'sources': [ 'android/java/PrivateKeyType.template', ], 'variables': { 'package_name': 'org/chromium/net', 'template_deps': ['android/private_key_type_list.h'], }, 'includes': [ '../build/android/java_cpp_template.gypi' ], }, ], }], # Special target to wrap a gtest_target_type==shared_library # net_unittests into an android apk for execution. # See base.gyp for TODO(jrg)s about this strategy. ['OS == "android" and gtest_target_type == "shared_library"', { 'targets': [ { 'target_name': 'net_unittests_apk', 'type': 'none', 'dependencies': [ 'net_java', 'net_javatests', 'net_unittests', ], 'variables': { 'test_suite_name': 'net_unittests', 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)net_unittests<(SHARED_LIB_SUFFIX)', }, 'includes': [ '../build/apk_test.gypi' ], }, ], }], ['test_isolation_mode != "noop"', { 'targets': [ { 'target_name': 'net_unittests_run', 'type': 'none', 'dependencies': [ 'net_unittests', ], 'includes': [ 'net_unittests.isolate', ], 'actions': [ { 'action_name': 'isolate', 'inputs': [ 'net_unittests.isolate', '<@(isolate_dependency_tracked)', ], 'outputs': [ '<(PRODUCT_DIR)/net_unittests.isolated', ], 'action': [ 'python', '../tools/swarm_client/isolate.py', '<(test_isolation_mode)', '--outdir', '<(test_isolation_outdir)', '--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)', '--variable', 'OS', '<(OS)', '--result', '<@(_outputs)', '--isolate', 'net_unittests.isolate', ], }, ], }, ], }], ], }
[]
mdemoret-nv/clx
python/clx/analytics/detector.py
3737706187d8f5720561e10b85cbd638c77b9267
import logging import torch import torch.nn as nn from abc import ABC, abstractmethod log = logging.getLogger(__name__) class Detector(ABC): def __init__(self, lr=0.001): self.lr = lr self.__model = None self.__optimizer = None self.__criterion = nn.CrossEntropyLoss() @property def model(self): return self.__model @property def optimizer(self): return self.__optimizer @property def criterion(self): return self.__criterion @abstractmethod def init_model(self, char_vocab, hidden_size, n_domain_type, n_layers): pass @abstractmethod def train_model(self, epoch, train_dataset): pass @abstractmethod def predict(self, epoch, train_dataset): pass def load_model(self, file_path): """ This function load already saved model and sets cuda parameters. :param file_path: File path of a model to loaded. :type file_path: string """ model = torch.load(file_path) model.eval() self.__model = model self.__set_model2cuda() self.__set_optimizer() def save_model(self, file_path): """ This function saves model to given location. :param file_path: File path to save model. :type file_path: string """ torch.save(self.model, file_path) def __set_parallelism(self): gpu_count = torch.cuda.device_count() if gpu_count > 1: log.info("%s GPUs!" % (gpu_count)) self.__model = nn.DataParallel(self.model) self.__set_model2cuda() else: self.__set_model2cuda() def __set_optimizer(self): self.__optimizer = torch.optim.RMSprop( self.model.parameters(), self.lr, weight_decay=0.0 ) def __set_model2cuda(self): if torch.cuda.is_available(): log.info("Setting cuda") self.model.cuda() def leverage_model(self, model): """This function leverages model by setting parallelism parameters. :param model: Model instance. :type model: RNNClassifier """ self.__model = model self.__set_parallelism() self.__set_optimizer()
[((93, 120), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (110, 120), False, 'import logging\n'), ((286, 307), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (305, 307), True, 'import torch.nn as nn\n'), ((1022, 1043), 'torch.load', 'torch.load', (['file_path'], {}), '(file_path)\n', (1032, 1043), False, 'import torch\n'), ((1356, 1389), 'torch.save', 'torch.save', (['self.model', 'file_path'], {}), '(self.model, file_path)\n', (1366, 1389), False, 'import torch\n'), ((1444, 1469), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1467, 1469), False, 'import torch\n'), ((1881, 1906), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1904, 1906), False, 'import torch\n'), ((1570, 1597), 'torch.nn.DataParallel', 'nn.DataParallel', (['self.model'], {}), '(self.model)\n', (1585, 1597), True, 'import torch.nn as nn\n')]
openplans/streetscore
street_score/project/models.py
ea27f70399b070d3199f236d99575c5962943d67
import math import random from django.db import models class TimeStampedModel (models.Model): """ Base model class for when you want to keep track of created and updated times for model instances. """ created_datetime = models.DateTimeField(auto_now_add=True) updated_datetime = models.DateTimeField(auto_now=True) class Meta: abstract = True class Rating (TimeStampedModel): criterion = models.ForeignKey('Criterion', related_name='ratings') """ The criterion that this rating is for. """ place1 = models.ForeignKey('Place', related_name='+') """ The first place that this rating compares """ place2 = models.ForeignKey('Place', related_name='+') """ The second place that this rating compares """ score = models.IntegerField() """ The rating score. 1 means that place1 "wins" over place2 for the given criterion. -1 means that place2 "wins". """ user_info = models.ForeignKey('sessions.UserInfo', null=True, related_name='ratings') """ The information for the user that made this rating. Not required, but useful for data analysis. """ def __unicode__(self): meaning = ({ -1: 'more {0} than', 1: 'less {0} than', 0: 'as {0} as', }) return ('Place #{p1} is {rating} place #{p2}').format( p1=self.place1, p2=self.place2, rating=meaning[self.score].format(self.criterion.prompt)) @property def question(self): """ The question string to which the rating is a response. """ return self.criterion.prompt class Criterion (models.Model): prompt = models.TextField() """ The question prompt, i.e. 'How clean is the street?'. """ def __unicode__(self): return self.prompt class Meta: verbose_name_plural = "criteria" class Place (models.Model): lat = models.FloatField() lon = models.FloatField() def __unicode__(self): return '({0}, {1})'.format(self.lat, self.lon) class UserInfo (TimeStampedModel): lat = models.FloatField(null=True) lon = models.FloatField(null=True) """ The user's location. """ SOURCES = ( ('ip', 'IP Address'), ('html5', 'HTML5 Geolocation API'), ) location_source = models.CharField(max_length=32, choices=SOURCES) location_data = models.CharField(max_length=2048) """ The method by which the location was obtained, and any additional information required to recreate the location. """ session = models.OneToOneField('sessions.Session') """ The Django browser session. """ def __unicode__(self): return u'User for session {key}'.format(key=self.session.session_key) class Meta: app_label = 'sessions' db_table = 'project_userinfo' verbose_name_plural = 'User info' class SiteConfiguration (models.Model): site = models.OneToOneField('sites.Site', related_name='config') google_analytics_key = models.CharField(max_length=256, null=True, blank=True) addthis_key = models.CharField(max_length=256, null=True, blank=True) addthis_title = models.CharField(max_length=256, null=True, blank=True) about_title = models.CharField(max_length=256, null=True, blank=True) about_text = models.TextField(null=True, blank=True) about_text_is_html = models.BooleanField(blank=True, default=False) def __unicode__(self): return 'Configuration for {0}'.format(self.site.name) class Meta: app_label = 'sites' db_table = 'project_siteconfiguration' class SurveySession (object): """ """ def __init__(self, questions=None, places=None): self.__questions = questions self.__places = places @property def questions(self): """ Get the set of questions for this survey. """ return self.__questions or self.init_questions() @property def places(self): """ Get the block for this session. """ return self.__places or self.init_places() def init_places(self): """ Load two places at random. TODO: Order the places by those that have the least questions answered about them first. """ places = Place.objects.all().order_by('?')[:2] self.__places = places return self.__places def init_questions(self): """ Load a set of questions at random. """ all_questions = ( Criterion.objects.all() .annotate(num_ratings=models.Count('ratings')) ) self.__questions = all_questions return self.__questions @classmethod def make_surveys(cls, count=1): # TODO: Choose the places and questions more smartly. Use the init_... # methods defined above (and make them better too). places = list(Place.objects.all().order_by('?')[:(count * 2)]) questions = list(Criterion.objects.all()) surveys = [] for i in range(count): place1 = places[2 * i] place2 = places[2 * i + 1] surveys.append(cls(places=[place1, place2], questions=questions)) return surveys
[((243, 282), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (263, 282), False, 'from django.db import models\n'), ((306, 341), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (326, 341), False, 'from django.db import models\n'), ((434, 488), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Criterion"""'], {'related_name': '"""ratings"""'}), "('Criterion', related_name='ratings')\n", (451, 488), False, 'from django.db import models\n'), ((562, 606), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Place"""'], {'related_name': '"""+"""'}), "('Place', related_name='+')\n", (579, 606), False, 'from django.db import models\n'), ((683, 727), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Place"""'], {'related_name': '"""+"""'}), "('Place', related_name='+')\n", (700, 727), False, 'from django.db import models\n'), ((804, 825), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (823, 825), False, 'from django.db import models\n'), ((984, 1057), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""sessions.UserInfo"""'], {'null': '(True)', 'related_name': '"""ratings"""'}), "('sessions.UserInfo', null=True, related_name='ratings')\n", (1001, 1057), False, 'from django.db import models\n'), ((1734, 1752), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1750, 1752), False, 'from django.db import models\n'), ((1980, 1999), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (1997, 1999), False, 'from django.db import models\n'), ((2010, 2029), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (2027, 2029), False, 'from django.db import models\n'), ((2160, 2188), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (2177, 2188), False, 'from django.db import models\n'), ((2199, 2227), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (2216, 2227), False, 'from django.db import models\n'), ((2388, 2436), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'choices': 'SOURCES'}), '(max_length=32, choices=SOURCES)\n', (2404, 2436), False, 'from django.db import models\n'), ((2457, 2490), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2048)'}), '(max_length=2048)\n', (2473, 2490), False, 'from django.db import models\n'), ((2647, 2687), 'django.db.models.OneToOneField', 'models.OneToOneField', (['"""sessions.Session"""'], {}), "('sessions.Session')\n", (2667, 2687), False, 'from django.db import models\n'), ((3023, 3080), 'django.db.models.OneToOneField', 'models.OneToOneField', (['"""sites.Site"""'], {'related_name': '"""config"""'}), "('sites.Site', related_name='config')\n", (3043, 3080), False, 'from django.db import models\n'), ((3108, 3163), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'null': '(True)', 'blank': '(True)'}), '(max_length=256, null=True, blank=True)\n', (3124, 3163), False, 'from django.db import models\n'), ((3182, 3237), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'null': '(True)', 'blank': '(True)'}), '(max_length=256, null=True, blank=True)\n', (3198, 3237), False, 'from django.db import models\n'), ((3258, 3313), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'null': '(True)', 'blank': '(True)'}), '(max_length=256, null=True, blank=True)\n', (3274, 3313), False, 'from django.db import models\n'), ((3333, 3388), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'null': '(True)', 'blank': '(True)'}), '(max_length=256, null=True, blank=True)\n', (3349, 3388), False, 'from django.db import models\n'), ((3406, 3445), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (3422, 3445), False, 'from django.db import models\n'), ((3471, 3517), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'blank': '(True)', 'default': '(False)'}), '(blank=True, default=False)\n', (3490, 3517), False, 'from django.db import models\n'), ((4707, 4730), 'django.db.models.Count', 'models.Count', (['"""ratings"""'], {}), "('ratings')\n", (4719, 4730), False, 'from django.db import models\n')]
vitlabuda/selfdroid-web-app
src/selfdroid/appstorage/crud/AppAdder.py
9eac9ee2c34038de13e179b6afb3d530a086e7b2
# SPDX-License-Identifier: BSD-3-Clause # # Copyright (c) 2021 Vít Labuda. All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the # following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following # disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the # following disclaimer in the documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os import sqlalchemy.exc from selfdroid.appstorage.AppMetadata import AppMetadata from selfdroid.appstorage.AppMetadataDBModel import AppMetadataDBModel from selfdroid.appstorage.AppStorageConsistencyEnsurer import AppStorageConsistencyEnsurer from selfdroid.appstorage.apk.APKParser import APKParser from selfdroid.appstorage.apk.ParsedAPK import ParsedAPK from selfdroid.appstorage.crud.AppAdderException import AppAdderException from selfdroid.web.WebStatusMessageCollector import WebStatusMessageCollector from selfdroid import db class AppAdder: """ This class must be instantiated and have its public methods called in a locked context! """ def __init__(self, uploaded_apk_path: str): self._uploaded_apk_path: str = uploaded_apk_path self._parsed_apk: ParsedAPK = APKParser(self._uploaded_apk_path).parsed_apk def add_app_while_locked(self) -> AppMetadata: """ :return: The metadata of the added app. """ try: app_metadata = self._add_app_while_locked_with_exceptions_handled() except (sqlalchemy.exc.SQLAlchemyError, OSError): db.session.rollback() raise AppAdderException("An error occurred while adding the app!") finally: AppStorageConsistencyEnsurer().ensure_consistency_while_locked() return app_metadata def _add_app_while_locked_with_exceptions_handled(self) -> AppMetadata: self._check_if_app_can_be_added() return self._perform_app_addition() def _check_if_app_can_be_added(self) -> None: an_app_with_the_same_package_name = AppMetadataDBModel.query.filter_by(package_name=self._parsed_apk.package_name).first() if an_app_with_the_same_package_name is not None: html_message = WebStatusMessageCollector.format_html_message("An app with the same package name <i>({})</i> is already present on the server! You should update the app instead of adding it!", self._parsed_apk.package_name) raise AppAdderException(html_message) def _perform_app_addition(self) -> AppMetadata: # An UserReadableException mustn't be raised in this method! # 1. Database db_model = self._parsed_apk.create_new_db_model_with_metadata() db.session.add(db_model) db.session.commit() assert isinstance(db_model.id, int) app_metadata = AppMetadata.from_db_model(db_model) # 2. APK apk_path = app_metadata.get_apk_path() os.rename(self._uploaded_apk_path, apk_path) # 3. Icon icon_path = app_metadata.get_icon_path() with open(icon_path, "wb") as icon_file: icon_file.write(self._parsed_apk.uniform_png_app_icon) return app_metadata
[((3860, 3884), 'selfdroid.db.session.add', 'db.session.add', (['db_model'], {}), '(db_model)\n', (3874, 3884), False, 'from selfdroid import db\n'), ((3893, 3912), 'selfdroid.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3910, 3912), False, 'from selfdroid import db\n'), ((3982, 4017), 'selfdroid.appstorage.AppMetadata.AppMetadata.from_db_model', 'AppMetadata.from_db_model', (['db_model'], {}), '(db_model)\n', (4007, 4017), False, 'from selfdroid.appstorage.AppMetadata import AppMetadata\n'), ((4092, 4136), 'os.rename', 'os.rename', (['self._uploaded_apk_path', 'apk_path'], {}), '(self._uploaded_apk_path, apk_path)\n', (4101, 4136), False, 'import os\n'), ((2384, 2418), 'selfdroid.appstorage.apk.APKParser.APKParser', 'APKParser', (['self._uploaded_apk_path'], {}), '(self._uploaded_apk_path)\n', (2393, 2418), False, 'from selfdroid.appstorage.apk.APKParser import APKParser\n'), ((3377, 3594), 'selfdroid.web.WebStatusMessageCollector.WebStatusMessageCollector.format_html_message', 'WebStatusMessageCollector.format_html_message', (['"""An app with the same package name <i>({})</i> is already present on the server! You should update the app instead of adding it!"""', 'self._parsed_apk.package_name'], {}), "(\n 'An app with the same package name <i>({})</i> is already present on the server! You should update the app instead of adding it!'\n , self._parsed_apk.package_name)\n", (3422, 3594), False, 'from selfdroid.web.WebStatusMessageCollector import WebStatusMessageCollector\n'), ((3603, 3634), 'selfdroid.appstorage.crud.AppAdderException.AppAdderException', 'AppAdderException', (['html_message'], {}), '(html_message)\n', (3620, 3634), False, 'from selfdroid.appstorage.crud.AppAdderException import AppAdderException\n'), ((2720, 2741), 'selfdroid.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (2739, 2741), False, 'from selfdroid import db\n'), ((2761, 2821), 'selfdroid.appstorage.crud.AppAdderException.AppAdderException', 'AppAdderException', (['"""An error occurred while adding the app!"""'], {}), "('An error occurred while adding the app!')\n", (2778, 2821), False, 'from selfdroid.appstorage.crud.AppAdderException import AppAdderException\n'), ((3205, 3283), 'selfdroid.appstorage.AppMetadataDBModel.AppMetadataDBModel.query.filter_by', 'AppMetadataDBModel.query.filter_by', ([], {'package_name': 'self._parsed_apk.package_name'}), '(package_name=self._parsed_apk.package_name)\n', (3239, 3283), False, 'from selfdroid.appstorage.AppMetadataDBModel import AppMetadataDBModel\n'), ((2852, 2882), 'selfdroid.appstorage.AppStorageConsistencyEnsurer.AppStorageConsistencyEnsurer', 'AppStorageConsistencyEnsurer', ([], {}), '()\n', (2880, 2882), False, 'from selfdroid.appstorage.AppStorageConsistencyEnsurer import AppStorageConsistencyEnsurer\n')]
bkmeneguello/ansible-role-libvirt
library/libvirt_filter.py
e7f82077b1fd4c2ec5afa463973ecde599209549
# TODO: https://libvirt.org/formatnwfilter.html
[]
Solunest/pyestafeta
estafeta/core/__init__.py
cd24cea4973f5184f4cc7e72a653de8b22e32f69
from estafeta.core.client import EstafetaClient user = None password = None id = None account_number = None production = None from estafeta.core.error import EstafetaWrongData, EstafetaEmptyField __url_label__ = [ 'https://labelqa.estafeta.com/EstafetaLabel20/services/EstafetaLabelWS?wsdl', 'https://label.estafeta.com/EstafetaLabel20/services/EstafetaLabelWS?wsdl', ] __url_tracking__ = [ 'https://trackingqa.estafeta.com/Service.asmx?wsdl', 'https://tracking.estafeta.com/Service.asmx?wsdl', ] __url_quote__ = [ 'http://frecuenciacotizador.estafeta.com/Service.asmx?wsdl', 'http://frecuenciacotizador.estafeta.com/Service.asmx?wsdl', ]
[]
tb365/mcclient_python
yunionclient/api/flavors.py
06647e7496b9e2c3aeb5ade1276c81871063159b
from yunionclient.common import base class FlavorManager(base.StandaloneManager): keyword = 'flavor' keyword_plural = 'flavors' _columns = ['ID', 'Name', 'VCPU_count', 'VMEM_size', 'Disk_size', 'Disk_backend', 'Ext_Bandwidth', 'Int_Bandwidth', 'is_public', 'Description', 'Aggregate_strategy', 'Flavor_type']
[]
rakib313/Bangla-End2End-Speech-Recognition
char_map.py
67e776841bf2bb3f108e94d0567dc707497605ff
""" Defines two dictionaries for converting between text and integer sequences. """ char_map_str = """ ' 0 <SPACE> 1 ব 2 া 3 ং 4 ল 5 দ 6 ে 7 শ 8 য 9 ় 10 ি 11 ত 12 ্ 13 ন 14 এ 15 ধ 16 র 17 ণ 18 ক 19 ড 20 হ 21 উ 22 প 23 জ 24 অ 25 থ 26 স 27 ষ 28 ই 29 আ 30 ছ 31 গ 32 ু 33 ো 34 ও 35 ভ 36 ী 37 ট 38 ূ 39 ম 40 ৈ 41 ৃ 42 ঙ 43 খ 44 ঃ 45 ১ 46 ৯ 47 ৬ 48 ০ 49 ২ 50 চ 51 ঘ 52 ৎ 53 ৫ 54 ৪ 55 ফ 56 ৌ 57 ৮ 58 ঁ 59 য় 60 ৩ 61 ঢ 62 ঠ 63 ৭ 64 ড় 65 ঝ 66 ঞ 67 ঔ 68 ঈ 69 v 70 b 71 s 72 ঐ 73 2 74 0 75 1 76 4 77 f 78 o 79 t 80 a 81 l 82 w 83 r 84 d 85 c 86 u 87 p 88 n 89 g 90 ঋ 91 i 92 z 93 m 94 e 95 ঊ 96 h 97 x 98 3 99 5 100 y 101 9 102 ৗ 103 j 104 œ 105 8 106 ঢ় 107 k 108 ৰ 109 """ # the "blank" character is mapped to 28 char_map = {} index_map = {} for line in char_map_str.strip().split('\n'): ch, index = line.split() char_map[ch] = int(index) index_map[int(index)+1] = ch index_map[2] = ' '
[]
MaggieChege/New_App
app.py
75a2be55d50cf21305036c875af2120607ac33c0
from flask import Blueprint from flask_restful import Api # from restful import Api from resources.Hello import CategoryResource api_bp = Blueprint('api', __name__) api = Api(api_bp) # Route api.add_resource(CategoryResource, '/Hello')
[((140, 166), 'flask.Blueprint', 'Blueprint', (['"""api"""', '__name__'], {}), "('api', __name__)\n", (149, 166), False, 'from flask import Blueprint\n'), ((173, 184), 'flask_restful.Api', 'Api', (['api_bp'], {}), '(api_bp)\n', (176, 184), False, 'from flask_restful import Api\n')]
ejojmjn/indiana-phone
websockets.py
5d666ac651d3e02291806f24c265564002912e00
#from gevent import monkey #monkey.patch_all() from flask import Flask, render_template, json from flask_socketio import SocketIO, emit from pydbus import SystemBus from gi.repository import GLib import threading import json app = Flask(__name__) app.config['SECRET_KEY'] = 'secret!' socketio = SocketIO(app, async_mode='threading') #socketio = SocketIO(app) #Message: (':1.654', '/hfp/org/bluez/hci0/dev_94_65_2D_84_61_99', 'org.ofono.Modem', 'PropertyChanged', ('Powered', False)) #Data: Powered bus = SystemBus() def cb_server_signal_emission(*args): print("Message: ", args) makedev = lambda path : path.split('/')[-1] iface = args[2] if 'org.ofono.Modem' in iface: if 'PropertyChanged' in args[3]: message = { 'source': 'modem', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] } else: message = {'unknown_signal': args } elif 'org.ofono.NetworkRegistration' in iface: if 'PropertyChanged' in args[3]: message = { 'source': 'network', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] } else: message = {'unknown_signal': args } elif 'ofono.VoiceCallManager' in iface: if 'CallAdded' in args[3]: message = { 'source': 'callmgr', 'event': 'call_added', 'device': makedev(args[1]), 'properties': args[4][1] } elif 'CallRemoved' in args[3]: message = { 'source': 'callmgr', 'event': 'call_removed', 'device': makedev(args[1]) } else: message = {'unknown_signal': args } elif 'ofono.VoiceCall' in iface: if 'PropertyChanged' in args[3]: message = { 'source': 'call', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] } else: message = {'unknown_signal': args } socketio.emit('message', json.dumps(message)) def dbus_monitor(): bus.subscribe(iface = 'org.ofono.Modem', signal_fired = cb_server_signal_emission) bus.subscribe(iface = 'org.ofono.NetworkRegistration', signal_fired = cb_server_signal_emission) print(bus) bus.subscribe(iface = 'org.ofono.VoiceCallManager', signal_fired = cb_server_signal_emission) print(bus) bus.subscribe(iface = 'org.ofono.VoiceCall', signal_fired = cb_server_signal_emission) loop = GLib.MainLoop() loop.run() @app.route('/') def index(): return ''' <html> <head> <script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/socket.io/1.3.6/socket.io.min.js"></script> <script type="text/javascript" charset="utf-8"> var socket = io.connect('http://' + document.domain + ':' + location.port); socket.on('connect', function() { socket.emit('connected', {data: 'Client connected!'}); }); socket.on('message', function(message) { console.log('The server has a message for you: ' + message); var t = document.getElementById("logbox"); t.value = t.value + 'MESSAGE: ' + message + '\\n'; }); </script> </head> <body> <textarea id="logbox" width="100" rows="10"></textarea> <br> <button onclick="document.getElementById('logbox').value='';">Clear</button> </body> </html> ''' @socketio.on('my event') def handle_my_custom_event(arg1): emit('message', {'data': 42}) if __name__ == '__main__': t = threading.Thread(target=dbus_monitor) t.daemon = True t.start() socketio.run(app, host='0.0.0.0', port=5001)
[((235, 250), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (240, 250), False, 'from flask import Flask, render_template, json\n'), ((299, 336), 'flask_socketio.SocketIO', 'SocketIO', (['app'], {'async_mode': '"""threading"""'}), "(app, async_mode='threading')\n", (307, 336), False, 'from flask_socketio import SocketIO, emit\n'), ((512, 523), 'pydbus.SystemBus', 'SystemBus', ([], {}), '()\n', (521, 523), False, 'from pydbus import SystemBus\n'), ((2454, 2469), 'gi.repository.GLib.MainLoop', 'GLib.MainLoop', ([], {}), '()\n', (2467, 2469), False, 'from gi.repository import GLib\n'), ((3367, 3396), 'flask_socketio.emit', 'emit', (['"""message"""', "{'data': 42}"], {}), "('message', {'data': 42})\n", (3371, 3396), False, 'from flask_socketio import SocketIO, emit\n'), ((3431, 3468), 'threading.Thread', 'threading.Thread', ([], {'target': 'dbus_monitor'}), '(target=dbus_monitor)\n', (3447, 3468), False, 'import threading\n'), ((1939, 1958), 'json.dumps', 'json.dumps', (['message'], {}), '(message)\n', (1949, 1958), False, 'import json\n')]
wayn111/RediSearch
tests/pytests/test_tags.py
897b2de35988b84851dd8380c614a21ad8da7c0f
# -*- coding: utf-8 -*- from includes import * from common import * def search(env, r, *args): return r.execute_command('ft.search', *args) def testTagIndex(env): r = env env.expect('ft.create', 'idx', 'ON', 'HASH','schema', 'title', 'text', 'tags', 'tag').ok() N = 10 for n in range(N): env.expect('ft.add', 'idx', 'doc%d' % n, 1.0, 'fields', 'title', 'hello world term%d' % n, 'tags', 'foo bar,xxx,tag %d' % n).ok() for _ in r.retry_with_rdb_reload(): waitForIndex(r, 'idx') res = env.cmd('ft.search', 'idx', 'hello world') env.assertEqual(10, res[0]) res = env.cmd('ft.search', 'idx', 'foo bar') env.assertEqual(0, res[0]) res = env.cmd('ft.search', 'idx', '@tags:{foo bar}') env.assertEqual(N, res[0]) # inorder should not affect tags res = env.cmd( 'ft.search', 'idx', '@tags:{tag 1} @tags:{foo bar}', 'slop', '0', 'inorder') env.assertEqual(1, res[0]) for n in range(N - 1): res = env.cmd( 'ft.search', 'idx', '@tags:{tag %d}' % n, 'nocontent') env.assertEqual(1, res[0]) env.assertEqual('doc%d' % n, res[1]) res = env.cmd( 'ft.search', 'idx', '@tags:{tag\\ %d}' % n, 'nocontent') env.assertEqual(1, res[0]) res = env.cmd( 'ft.search', 'idx', 'hello world @tags:{tag\\ %d|tag %d}' % (n, n + 1), 'nocontent') env.assertEqual(2, res[0]) res = py2sorted(res[1:]) env.assertEqual('doc%d' % n, res[0]) env.assertEqual('doc%d' % (n + 1), res[1]) res = env.cmd( 'ft.search', 'idx', 'term%d @tags:{tag %d}' % (n, n), 'nocontent') env.assertEqual(1, res[0]) env.assertEqual('doc%d' % n, res[1]) def testSeparator(env): r = env env.expect( 'ft.create', 'idx', 'ON', 'HASH', 'schema', 'title', 'text', 'tags', 'tag', 'separator', ':').ok() env.expect('ft.add', 'idx', 'doc1', 1.0, 'fields', 'title', 'hello world', 'tags', 'x:hello world: fooz bar:foo,bar:BOO FAR').ok() for _ in r.retry_with_rdb_reload(): waitForIndex(r, 'idx') for q in ('@tags:{hello world}', '@tags:{fooz bar}', '@tags:{foo\\,bar}', '@tags:{boo\\ far}', '@tags:{x}'): res = env.cmd('ft.search', 'idx', q) env.assertEqual(1, res[0]) def testTagPrefix(env): env.skipOnCluster() r = env env.expect( 'ft.create', 'idx', 'ON', 'HASH', 'schema', 'title', 'text', 'tags', 'tag', 'separator', ',').ok() env.expect('ft.add', 'idx', 'doc1', 1.0, 'fields', 'title', 'hello world', 'tags', 'hello world,hello-world,hell,jell').ok() env.expect('FT.DEBUG', 'dump_tagidx', 'idx', 'tags') \ .equal([['hell', [1]], ['hello world', [1]], ['hello-world', [1]], ['jell', [1]]]) for _ in r.retry_with_rdb_reload(): waitForIndex(r, 'idx') for q in ('@tags:{hello world}', '@tags:{hel*}', '@tags:{hello\\-*}', '@tags:{he*}'): res = env.cmd('ft.search', 'idx', q) env.assertEqual(res[0], 1) def testTagFieldCase(env): r = env env.expect( 'ft.create', 'idx', 'ON', 'HASH', 'schema', 'title', 'text', 'TAgs', 'tag').ok() env.expect('ft.add', 'idx', 'doc1', 1.0, 'fields', 'title', 'hello world', 'TAgs', 'HELLO WORLD,FOO BAR').ok() for _ in r.retry_with_rdb_reload(): waitForIndex(r, 'idx') env.assertListEqual([0], r.execute_command( 'FT.SEARCH', 'idx', '@tags:{HELLO WORLD}')) env.assertListEqual([1, 'doc1'], r.execute_command( 'FT.SEARCH', 'idx', '@TAgs:{HELLO WORLD}', 'NOCONTENT')) env.assertListEqual([1, 'doc1'], r.execute_command( 'FT.SEARCH', 'idx', '@TAgs:{foo bar}', 'NOCONTENT')) env.assertListEqual([0], r.execute_command( 'FT.SEARCH', 'idx', '@TAGS:{foo bar}', 'NOCONTENT')) def testInvalidSyntax(env): r = env # invalid syntax with env.assertResponseError(): r.execute_command( 'ft.create', 'idx', 'ON', 'HASH', 'schema', 'title', 'text', 'tags', 'tag', 'separator') with env.assertResponseError(): r.execute_command( 'ft.create', 'idx', 'ON', 'HASH', 'schema', 'title', 'text', 'tags', 'tag', 'separator', "foo") with env.assertResponseError(): r.execute_command( 'ft.create', 'idx', 'ON', 'HASH', 'schema', 'title', 'text', 'tags', 'tag', 'separator', "") def testTagVals(env): r = env r.execute_command( 'ft.create', 'idx', 'ON', 'HASH', 'schema', 'title', 'text', 'tags', 'tag', 'othertags', 'tag') N = 100 alltags = set() for n in range(N): tags = ('foo %d' % n, 'bar %d' % n, 'x') alltags.add(tags[0]) alltags.add(tags[1]) alltags.add(tags[2]) env.assertOk(r.execute_command('ft.add', 'idx', 'doc%d' % n, 1.0, 'fields', 'tags', ','.join(tags), 'othertags', 'baz %d' % int(n // 2))) for _ in r.retry_with_rdb_reload(): waitForIndex(r, 'idx') res = r.execute_command('ft.tagvals', 'idx', 'tags') env.assertEqual(N * 2 + 1, len(res)) env.assertEqual(alltags, set(res)) res = r.execute_command('ft.tagvals', 'idx', 'othertags') env.assertEqual(N / 2, len(res)) env.expect('ft.tagvals', 'idx').raiseError() env.expect('ft.tagvals', 'idx', 'idx', 'idx').raiseError() env.expect('ft.tagvals', 'fake_idx', 'tags').raiseError() env.expect('ft.tagvals', 'idx', 'fake_tags').raiseError() env.expect('ft.tagvals', 'idx', 'title').raiseError() def testSearchNotExistsTagValue(env): # this test basically make sure we are not leaking env.expect('FT.CREATE idx ON HASH SCHEMA t TAG SORTABLE').ok() env.expect('FT.SEARCH idx @t:{val}').equal([0]) def testIssue1305(env): env.expect('FT.CREATE myIdx ON HASH SCHEMA title TAG').ok() env.expect('FT.ADD myIdx doc2 1.0 FIELDS title "work"').ok() env.expect('FT.ADD myIdx doc2 1.0 FIELDS title "hello"').error() env.expect('FT.ADD myIdx doc3 1.0 FIELDS title "hello"').ok() env.expect('FT.ADD myIdx doc1 1.0 FIELDS title "hello,work"').ok() expectedRes = {'doc1' : ['inf', ['title', '"hello,work"']], 'doc3' : ['inf', ['title', '"hello"']], 'doc2' : ['inf', ['title', '"work"']]} res = env.cmd('ft.search', 'myIdx', '~@title:{wor} ~@title:{hell}', 'WITHSCORES')[1:] res = {res[i]:res[i + 1: i + 3] for i in range(0, len(res), 3)} env.assertEqual(res, expectedRes) def testTagCaseSensitive(env): conn = getConnectionByEnv(env) env.expect('FT.CREATE idx1 SCHEMA t TAG').ok() env.expect('FT.CREATE idx2 SCHEMA t TAG CASESENSITIVE').ok() env.expect('FT.CREATE idx3 SCHEMA t TAG SEPARATOR .').ok() env.expect('FT.CREATE idx4 SCHEMA t TAG SEPARATOR . CASESENSITIVE').ok() env.expect('FT.CREATE idx5 SCHEMA t TAG CASESENSITIVE SEPARATOR .').ok() conn.execute_command('HSET', 'doc1', 't', 'foo,FOO') conn.execute_command('HSET', 'doc2', 't', 'FOO') conn.execute_command('HSET', 'doc3', 't', 'foo') if not env.is_cluster(): conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0') env.expect('FT.DEBUG', 'dump_tagidx', 'idx1', 't').equal([['foo', [1, 2, 3]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx2', 't').equal([['foo', [1, 3]], ['FOO', [1, 2]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx3', 't').equal([['foo', [2, 3]], ['foo,foo', [1]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx4', 't').equal([['foo', [3]], ['foo,FOO', [1]], ['FOO', [2]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx5', 't').equal([['foo', [3]], ['foo,FOO', [1]], ['FOO', [2]]]) env.expect('FT.SEARCH', 'idx1', '@t:{FOO}') \ .equal([3, 'doc1', ['t', 'foo,FOO'], 'doc2', ['t', 'FOO'], 'doc3', ['t', 'foo']]) env.expect('FT.SEARCH', 'idx1', '@t:{foo}') \ .equal([3, 'doc1', ['t', 'foo,FOO'], 'doc2', ['t', 'FOO'], 'doc3', ['t', 'foo']]) env.expect('FT.SEARCH', 'idx2', '@t:{FOO}') \ .equal([2, 'doc1', ['t', 'foo,FOO'], 'doc2', ['t', 'FOO']]) env.expect('FT.SEARCH', 'idx2', '@t:{foo}') \ .equal([2, 'doc1', ['t', 'foo,FOO'], 'doc3', ['t', 'foo']]) conn.execute_command('HSET', 'doc1', 't', 'f o,F O') conn.execute_command('HSET', 'doc2', 't', 'F O') conn.execute_command('HSET', 'doc3', 't', 'f o') if not env.is_cluster(): forceInvokeGC(env, 'idx1') forceInvokeGC(env, 'idx2') forceInvokeGC(env, 'idx3') forceInvokeGC(env, 'idx4') forceInvokeGC(env, 'idx5') env.expect('FT.DEBUG', 'dump_tagidx', 'idx1', 't').equal([['f o', [4, 5, 6]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx2', 't').equal([['f o', [4, 6]], ['F O', [4, 5]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx3', 't').equal([['f o', [5, 6]], ['f o,f o', [4]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx4', 't').equal([['f o', [6]], ['f o,F O', [4]], ['F O', [5]]]) env.expect('FT.DEBUG', 'dump_tagidx', 'idx5', 't').equal([['f o', [6]], ['f o,F O', [4]], ['F O', [5]]]) # not casesensitive env.expect('FT.SEARCH', 'idx1', '@t:{F\\ O}') \ .equal([3, 'doc1', ['t', 'f o,F O'], 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']]) env.expect('FT.SEARCH', 'idx1', '@t:{f\\ o}') \ .equal([3, 'doc1', ['t', 'f o,F O'], 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']]) # casesensitive env.expect('FT.SEARCH', 'idx2', '@t:{F\\ O}') \ .equal([2, 'doc1', ['t', 'f o,F O'], 'doc2', ['t', 'F O']]) env.expect('FT.SEARCH', 'idx2', '@t:{f\\ o}') \ .equal([2, 'doc1', ['t', 'f o,F O'], 'doc3', ['t', 'f o']]) # not casesensitive env.expect('FT.SEARCH', 'idx3', '@t:{f\\ o\\,f\\ o}') \ .equal([1, 'doc1', ['t', 'f o,F O']]) env.expect('FT.SEARCH', 'idx3', '@t:{f\\ o\\,F\\ O}') \ .equal([1, 'doc1', ['t', 'f o,F O']]) env.expect('FT.SEARCH', 'idx3', '@t:{F\\ O\\,F\\ O}') \ .equal([1, 'doc1', ['t', 'f o,F O']]) env.expect('FT.SEARCH', 'idx3', '@t:{F\\ O}') \ .equal([2, 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']]) env.expect('FT.SEARCH', 'idx3', '@t:{f\\ o}') \ .equal([2, 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']]) # casesensitive env.expect('FT.SEARCH', 'idx4', '@t:{f\\ o\\,f\\ o}') \ .equal([0]) env.expect('FT.SEARCH', 'idx4', '@t:{f\\ o\\,F\\ O}') \ .equal([1, 'doc1', ['t', 'f o,F O']]) env.expect('FT.SEARCH', 'idx4', '@t:{F\\ O\\,F\\ O}') \ .equal([0]) env.expect('FT.SEARCH', 'idx4', '@t:{F\\ O}') \ .equal([1, 'doc2', ['t', 'F O']]) env.expect('FT.SEARCH', 'idx4', '@t:{f\\ o}') \ .equal([1, 'doc3', ['t', 'f o']]) def testTagGCClearEmpty(env): env.skipOnCluster() conn = getConnectionByEnv(env) conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0') conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG') conn.execute_command('HSET', 'doc1', 't', 'foo') conn.execute_command('HSET', 'doc2', 't', 'bar') conn.execute_command('HSET', 'doc3', 't', 'baz') env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [1]], ['bar', [2]], ['baz', [3]]]) env.expect('FT.SEARCH', 'idx', '@t:{foo}').equal([1, 'doc1', ['t', 'foo']]) # delete two tags conn.execute_command('DEL', 'doc1') conn.execute_command('DEL', 'doc2') forceInvokeGC(env, 'idx') env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['baz', [3]]]) env.expect('FT.SEARCH', 'idx', '@t:{foo}').equal([0]) # delete last tag conn.execute_command('DEL', 'doc3') forceInvokeGC(env, 'idx') env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([]) # check term can be used after being empty conn.execute_command('HSET', 'doc4', 't', 'foo') conn.execute_command('HSET', 'doc5', 't', 'foo') env.expect('FT.SEARCH', 'idx', '@t:{foo}') \ .equal([2, 'doc4', ['t', 'foo'], 'doc5', ['t', 'foo']]) def testTagGCClearEmptyWithCursor(env): env.skipOnCluster() conn = getConnectionByEnv(env) conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0') conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG') conn.execute_command('HSET', 'doc1', 't', 'foo') conn.execute_command('HSET', 'doc2', 't', 'foo') env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [1, 2]]]) res, cursor = env.cmd('FT.AGGREGATE', 'idx', '@t:{foo}', 'WITHCURSOR', 'COUNT', '1') env.assertEqual(res, [1, []]) # delete both documents and run the GC to clean 'foo' inverted index env.expect('DEL', 'doc1').equal(1) env.expect('DEL', 'doc2').equal(1) forceInvokeGC(env, 'idx') # make sure the inverted index was cleaned env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([]) # read from the cursor res, cursor = env.cmd('FT.CURSOR', 'READ', 'idx', cursor) env.assertEqual(res, [0]) env.assertEqual(cursor, 0) def testTagGCClearEmptyWithCursorAndMoreData(env): env.skipOnCluster() conn = getConnectionByEnv(env) conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0') conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG') conn.execute_command('HSET', 'doc1', 't', 'foo') conn.execute_command('HSET', 'doc2', 't', 'foo') env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [1, 2]]]) res, cursor = env.cmd('FT.AGGREGATE', 'idx', '@t:{foo}', 'WITHCURSOR', 'COUNT', '1') env.assertEqual(res, [1, []]) # delete both documents and run the GC to clean 'foo' inverted index env.expect('DEL', 'doc1').equal(1) env.expect('DEL', 'doc2').equal(1) forceInvokeGC(env, 'idx') # make sure the inverted index was cleaned env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([]) # add data conn.execute_command('HSET', 'doc3', 't', 'foo') conn.execute_command('HSET', 'doc4', 't', 'foo') env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [3, 4]]]) # read from the cursor res, cursor = conn.execute_command('FT.CURSOR', 'READ', 'idx', cursor) env.assertEqual(res, [0]) env.assertEqual(cursor, 0) # ensure later documents with same tag are read res = conn.execute_command('FT.AGGREGATE', 'idx', '@t:{foo}') env.assertEqual(res, [1, [], []]) @unstable def testEmptyTagLeak(env): env.skipOnCluster() cycles = 1 tags = 30 conn = getConnectionByEnv(env) conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0') conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG') pl = conn.pipeline() for i in range(cycles): for j in range(tags): x = j + i * tags pl.execute_command('HSET', 'doc{}'.format(x), 't', 'tag{}'.format(x)) pl.execute() for j in range(tags): pl.execute_command('DEL', 'doc{}'.format(j + i * tags)) pl.execute() forceInvokeGC(env, 'idx') env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([])
[]
Lexa307/PhotonDefender
sc2/bot_ai.py
a08dc652e5c64e3ccb33b7cfa206846dca0575bd
import itertools import logging import math import random from collections import Counter from typing import Any, Dict, List, Optional, Set, Tuple, Union # mypy type checking from .cache import property_cache_forever, property_cache_once_per_frame from .data import ActionResult, Alert, Race, Result, Target, race_gas, race_townhalls, race_worker from .data import ActionResult, Attribute, Race, race_worker, race_townhalls, race_gas, Target, Result from .game_data import AbilityData, GameData # imports for mypy and pycharm autocomplete from .game_state import GameState from .game_data import GameData, AbilityData from .ids.ability_id import AbilityId from .ids.unit_typeid import UnitTypeId from .ids.upgrade_id import UpgradeId from .pixel_map import PixelMap from .position import Point2, Point3 from .unit import Unit from .units import Units logger = logging.getLogger(__name__) class BotAI: """Base class for bots.""" EXPANSION_GAP_THRESHOLD = 15 def __init__(self): # Specific opponent bot ID used in sc2ai ladder games http://sc2ai.net/ # The bot ID will stay the same each game so your bot can "adapt" to the opponent self.opponent_id: int = None self.units: Units = None self.workers: Units = None self.townhalls: Units = None self.geysers: Units = None self.minerals: int = None self.vespene: int = None self.supply_army: Union[float, int] = None self.supply_workers: Union[float, int] = None # Doesn't include workers in production self.supply_cap: Union[float, int] = None self.supply_used: Union[float, int] = None self.supply_left: Union[float, int] = None self.idle_worker_count: int = None self.army_count: int = None self.warp_gate_count: int = None self.larva_count: int = None self.cached_known_enemy_structures = None self.cached_known_enemy_units = None @property def enemy_race(self) -> Race: assert len(self._game_info.player_races) == 2, "enemy_race not available" self.enemy_id = 3 - self.player_id return Race(self._game_info.player_races[self.enemy_id]) @property def time(self) -> Union[int, float]: """ Returns time in seconds, assumes the game is played on 'faster' """ return self.state.game_loop / 22.4 # / (1/1.4) * (1/16) @property def time_formatted(self) -> str: """ Returns time as string in min:sec format """ t = self.time return f"{int(t // 60):02}:{int(t % 60):02}" @property def game_info(self) -> "GameInfo": return self._game_info def alert(self, alert_code: Alert) -> bool: """ Check if alert is triggered in the current step. Example use: from sc2.data import Alert if self.alert(Alert.AddOnComplete): print("Addon Complete") Alert codes: AlertError AddOnComplete BuildingComplete BuildingUnderAttack LarvaHatched MergeComplete MineralsExhausted MorphComplete MothershipComplete MULEExpired NuclearLaunchDetected NukeComplete NydusWormDetected ResearchComplete TrainError TrainUnitComplete TrainWorkerComplete TransformationComplete UnitUnderAttack UpgradeComplete VespeneExhausted WarpInComplete """ assert isinstance(alert_code, Alert), f"alert_code {alert_code} is no Alert" return alert_code.value in self.state.alerts @property def start_location(self) -> Point2: return self._game_info.player_start_location @property def enemy_start_locations(self) -> List[Point2]: """Possible start locations for enemies.""" return self._game_info.start_locations @property_cache_once_per_frame def known_enemy_units(self) -> Units: """List of known enemy units, including structures.""" return self.state.enemy_units @property_cache_once_per_frame def known_enemy_structures(self) -> Units: """List of known enemy units, structures only.""" return self.state.enemy_units.structure @property def main_base_ramp(self) -> "Ramp": """ Returns the Ramp instance of the closest main-ramp to start location. Look in game_info.py for more information """ if hasattr(self, "cached_main_base_ramp"): return self.cached_main_base_ramp # The reason for len(ramp.upper) in {2, 5} is: # ParaSite map has 5 upper points, and most other maps have 2 upper points at the main ramp. # The map Acolyte has 4 upper points at the wrong ramp (which is closest to the start position). try: self.cached_main_base_ramp = min( (ramp for ramp in self.game_info.map_ramps if len(ramp.upper) in {2, 5}), key=lambda r: self.start_location.distance_to(r.top_center), ) except ValueError: # Hardcoded hotfix for Honorgrounds LE map, as that map has a large main base ramp with inbase natural self.cached_main_base_ramp = min( (ramp for ramp in self.game_info.map_ramps if len(ramp.upper) in {4, 9}), key=lambda r: self.start_location.distance_to(r.top_center), ) return self.cached_main_base_ramp @property_cache_forever def expansion_locations(self) -> Dict[Point2, Units]: """ Returns dict with the correct expansion position Point2 object as key, resources (mineral field and vespene geyser) as value. """ # Idea: create a group for every resource, then merge these groups if # any resource in a group is closer than 6 to any resource of another group # Distance we group resources by RESOURCE_SPREAD_THRESHOLD = 8.5 geysers = self.state.vespene_geyser # Create a group for every resource resource_groups = [[resource] for resource in self.state.resources] # Loop the merging process as long as we change something found_something = True while found_something: found_something = False # Check every combination of two groups for group_a, group_b in itertools.combinations(resource_groups, 2): # Check if any pair of resource of these groups is closer than threshold together if any( resource_a.distance_to(resource_b) <= RESOURCE_SPREAD_THRESHOLD for resource_a, resource_b in itertools.product(group_a, group_b) ): # Remove the single groups and add the merged group resource_groups.remove(group_a) resource_groups.remove(group_b) resource_groups.append(group_a + group_b) found_something = True break # Distance offsets we apply to center of each resource group to find expansion position offset_range = 7 offsets = [ (x, y) for x, y in itertools.product(range(-offset_range, offset_range + 1), repeat=2) if math.hypot(x, y) <= 8 ] # Dict we want to return centers = {} # For every resource group: for resources in resource_groups: # Possible expansion points amount = len(resources) # Calculate center, round and add 0.5 because expansion location will have (x.5, y.5) # coordinates because bases have size 5. center_x = int(sum(resource.position.x for resource in resources) / amount) + 0.5 center_y = int(sum(resource.position.y for resource in resources) / amount) + 0.5 possible_points = (Point2((offset[0] + center_x, offset[1] + center_y)) for offset in offsets) # Filter out points that are too near possible_points = ( point for point in possible_points # Check if point can be built on if self._game_info.placement_grid[point.rounded] == 1 # Check if all resources have enough space to point and all(point.distance_to(resource) > (7 if resource in geysers else 6) for resource in resources) ) # Choose best fitting point result = min(possible_points, key=lambda point: sum(point.distance_to(resource) for resource in resources)) centers[result] = resources return centers def _correct_zerg_supply(self): """ The client incorrectly rounds zerg supply down instead of up (see https://github.com/Blizzard/s2client-proto/issues/123), so self.supply_used and friends return the wrong value when there are an odd number of zerglings and banelings. This function corrects the bad values. """ # TODO: remove when Blizzard/sc2client-proto#123 gets fixed. half_supply_units = { UnitTypeId.ZERGLING, UnitTypeId.ZERGLINGBURROWED, UnitTypeId.BANELING, UnitTypeId.BANELINGBURROWED, UnitTypeId.BANELINGCOCOON, } correction = self.units(half_supply_units).amount % 2 self.supply_used += correction self.supply_army += correction self.supply_left -= correction async def get_available_abilities( self, units: Union[List[Unit], Units], ignore_resource_requirements=False ) -> List[List[AbilityId]]: """ Returns available abilities of one or more units. Right know only checks cooldown, energy cost, and whether the ability has been researched. Example usage: units_abilities = await self.get_available_abilities(self.units) or units_abilities = await self.get_available_abilities([self.units.random]) """ return await self._client.query_available_abilities(units, ignore_resource_requirements) async def expand_now( self, building: UnitTypeId = None, max_distance: Union[int, float] = 10, location: Optional[Point2] = None ): """ Not recommended as this function uses 'self.do' (reduces performance). Finds the next possible expansion via 'self.get_next_expansion()'. If the target expansion is blocked (e.g. an enemy unit), it will misplace the expansion. """ if not building: # self.race is never Race.Random start_townhall_type = { Race.Protoss: UnitTypeId.NEXUS, Race.Terran: UnitTypeId.COMMANDCENTER, Race.Zerg: UnitTypeId.HATCHERY, } building = start_townhall_type[self.race] assert isinstance(building, UnitTypeId) if not location: location = await self.get_next_expansion() await self.build(building, near=location, max_distance=max_distance, random_alternative=False, placement_step=1) async def get_next_expansion(self) -> Optional[Point2]: """Find next expansion location.""" closest = None distance = math.inf for el in self.expansion_locations: def is_near_to_expansion(t): return t.distance_to(el) < self.EXPANSION_GAP_THRESHOLD if any(map(is_near_to_expansion, self.townhalls)): # already taken continue startp = self._game_info.player_start_location d = await self._client.query_pathing(startp, el) if d is None: continue if d < distance: distance = d closest = el return closest async def distribute_workers(self, resource_ratio: float = 2): """ Distributes workers across all the bases taken. Keyword `resource_ratio` takes a float. If the current minerals to gas ratio is bigger than `resource_ratio`, this function prefer filling geysers first, if it is lower, it will prefer sending workers to minerals first. This is only for workers that need to be moved anyways, it will NOT will geysers on its own. NOTE: This function is far from optimal, if you really want to have refined worker control, you should write your own distribution function. For example long distance mining control and moving workers if a base was killed are not being handled. WARNING: This is quite slow when there are lots of workers or multiple bases. """ if not self.state.mineral_field or not self.workers or not self.townhalls.ready: return actions = [] worker_pool = [worker for worker in self.workers.idle] bases = self.townhalls.ready geysers = self.geysers.ready # list of places that need more workers deficit_mining_places = [] for mining_place in bases | geysers: difference = mining_place.surplus_harvesters # perfect amount of workers, skip mining place if not difference: continue if mining_place.is_vespene_geyser: # get all workers that target the gas extraction site # or are on their way back from it local_workers = self.workers.filter( lambda unit: unit.order_target == mining_place.tag or (unit.is_carrying_vespene and unit.order_target == bases.closest_to(mining_place).tag) ) else: # get tags of minerals around expansion local_minerals_tags = { mineral.tag for mineral in self.state.mineral_field if mineral.distance_to(mining_place) <= 8 } # get all target tags a worker can have # tags of the minerals he could mine at that base # get workers that work at that gather site local_workers = self.workers.filter( lambda unit: unit.order_target in local_minerals_tags or (unit.is_carrying_minerals and unit.order_target == mining_place.tag) ) # too many workers if difference > 0: for worker in local_workers[:difference]: worker_pool.append(worker) # too few workers # add mining place to deficit bases for every missing worker else: deficit_mining_places += [mining_place for _ in range(-difference)] # prepare all minerals near a base if we have too many workers # and need to send them to the closest patch if len(worker_pool) > len(deficit_mining_places): all_minerals_near_base = [ mineral for mineral in self.state.mineral_field if any(mineral.distance_to(base) <= 8 for base in self.townhalls.ready) ] # distribute every worker in the pool for worker in worker_pool: # as long as have workers and mining places if deficit_mining_places: # choose only mineral fields first if current mineral to gas ratio is less than target ratio if self.vespene and self.minerals / self.vespene < resource_ratio: possible_mining_places = [place for place in deficit_mining_places if not place.vespene_contents] # else prefer gas else: possible_mining_places = [place for place in deficit_mining_places if place.vespene_contents] # if preferred type is not available any more, get all other places if not possible_mining_places: possible_mining_places = deficit_mining_places # find closest mining place current_place = min(deficit_mining_places, key=lambda place: place.distance_to(worker)) # remove it from the list deficit_mining_places.remove(current_place) # if current place is a gas extraction site, go there if current_place.vespene_contents: actions.append(worker.gather(current_place)) # if current place is a gas extraction site, # go to the mineral field that is near and has the most minerals left else: local_minerals = [ mineral for mineral in self.state.mineral_field if mineral.distance_to(current_place) <= 8 ] target_mineral = max(local_minerals, key=lambda mineral: mineral.mineral_contents) actions.append(worker.gather(target_mineral)) # more workers to distribute than free mining spots # send to closest if worker is doing nothing elif worker.is_idle and all_minerals_near_base: target_mineral = min(all_minerals_near_base, key=lambda mineral: mineral.distance_to(worker)) actions.append(worker.gather(target_mineral)) else: # there are no deficit mining places and worker is not idle # so dont move him pass await self.do_actions(actions) @property def owned_expansions(self) -> Dict[Point2, Unit]: """List of expansions owned by the player.""" owned = {} for el in self.expansion_locations: def is_near_to_expansion(t): return t.distance_to(el) < self.EXPANSION_GAP_THRESHOLD th = next((x for x in self.townhalls if is_near_to_expansion(x)), None) if th: owned[el] = th return owned def can_feed(self, unit_type: UnitTypeId) -> bool: """ Checks if you have enough free supply to build the unit """ required = self._game_data.units[unit_type.value]._proto.food_required return required == 0 or self.supply_left >= required def can_afford( self, item_id: Union[UnitTypeId, UpgradeId, AbilityId], check_supply_cost: bool = True ) -> "CanAffordWrapper": """Tests if the player has enough resources to build a unit or cast an ability.""" enough_supply = True if isinstance(item_id, UnitTypeId): unit = self._game_data.units[item_id.value] cost = self._game_data.calculate_ability_cost(unit.creation_ability) if check_supply_cost: enough_supply = self.can_feed(item_id) elif isinstance(item_id, UpgradeId): cost = self._game_data.upgrades[item_id.value].cost else: cost = self._game_data.calculate_ability_cost(item_id) return CanAffordWrapper(cost.minerals <= self.minerals, cost.vespene <= self.vespene, enough_supply) async def can_cast( self, unit: Unit, ability_id: AbilityId, target: Optional[Union[Unit, Point2, Point3]] = None, only_check_energy_and_cooldown: bool = False, cached_abilities_of_unit: List[AbilityId] = None, ) -> bool: """Tests if a unit has an ability available and enough energy to cast it. See data_pb2.py (line 161) for the numbers 1-5 to make sense""" assert isinstance(unit, Unit) assert isinstance(ability_id, AbilityId) assert isinstance(target, (type(None), Unit, Point2, Point3)) # check if unit has enough energy to cast or if ability is on cooldown if cached_abilities_of_unit: abilities = cached_abilities_of_unit else: abilities = (await self.get_available_abilities([unit]))[0] if ability_id in abilities: if only_check_energy_and_cooldown: return True cast_range = self._game_data.abilities[ability_id.value]._proto.cast_range ability_target = self._game_data.abilities[ability_id.value]._proto.target # Check if target is in range (or is a self cast like stimpack) if ( ability_target == 1 or ability_target == Target.PointOrNone.value and isinstance(target, (Point2, Point3)) and unit.distance_to(target) <= cast_range ): # cant replace 1 with "Target.None.value" because ".None" doesnt seem to be a valid enum name return True # Check if able to use ability on a unit elif ( ability_target in {Target.Unit.value, Target.PointOrUnit.value} and isinstance(target, Unit) and unit.distance_to(target) <= cast_range ): return True # Check if able to use ability on a position elif ( ability_target in {Target.Point.value, Target.PointOrUnit.value} and isinstance(target, (Point2, Point3)) and unit.distance_to(target) <= cast_range ): return True return False def select_build_worker(self, pos: Union[Unit, Point2, Point3], force: bool = False) -> Optional[Unit]: """Select a worker to build a building with.""" workers = ( self.workers.filter(lambda w: (w.is_gathering or w.is_idle) and w.distance_to(pos) < 20) or self.workers ) if workers: for worker in workers.sorted_by_distance_to(pos).prefer_idle: if ( not worker.orders or len(worker.orders) == 1 and worker.orders[0].ability.id in {AbilityId.MOVE, AbilityId.HARVEST_GATHER} ): return worker return workers.random if force else None async def can_place(self, building: Union[AbilityData, AbilityId, UnitTypeId], position: Point2) -> bool: """Tests if a building can be placed in the given location.""" building_type = type(building) assert building_type in {AbilityData, AbilityId, UnitTypeId} if building_type == UnitTypeId: building = self._game_data.units[building.value].creation_ability elif building_type == AbilityId: building = self._game_data.abilities[building.value] r = await self._client.query_building_placement(building, [position]) return r[0] == ActionResult.Success async def find_placement( self, building: UnitTypeId, near: Union[Unit, Point2, Point3], max_distance: int = 20, random_alternative: bool = True, placement_step: int = 2, ) -> Optional[Point2]: """Finds a placement location for building.""" assert isinstance(building, (AbilityId, UnitTypeId)) assert isinstance(near, Point2) if isinstance(building, UnitTypeId): building = self._game_data.units[building.value].creation_ability else: # AbilityId building = self._game_data.abilities[building.value] if await self.can_place(building, near): return near if max_distance == 0: return None for distance in range(placement_step, max_distance, placement_step): possible_positions = [ Point2(p).offset(near).to2 for p in ( [(dx, -distance) for dx in range(-distance, distance + 1, placement_step)] + [(dx, distance) for dx in range(-distance, distance + 1, placement_step)] + [(-distance, dy) for dy in range(-distance, distance + 1, placement_step)] + [(distance, dy) for dy in range(-distance, distance + 1, placement_step)] ) ] res = await self._client.query_building_placement(building, possible_positions) possible = [p for r, p in zip(res, possible_positions) if r == ActionResult.Success] if not possible: continue if random_alternative: return random.choice(possible) else: return min(possible, key=lambda p: p.distance_to_point2(near)) return None def already_pending_upgrade(self, upgrade_type: UpgradeId) -> Union[int, float]: """ Check if an upgrade is being researched Return values: 0: not started 0 < x < 1: researching 1: finished """ assert isinstance(upgrade_type, UpgradeId) if upgrade_type in self.state.upgrades: return 1 level = None if "LEVEL" in upgrade_type.name: level = upgrade_type.name[-1] creationAbilityID = self._game_data.upgrades[upgrade_type.value].research_ability.id for structure in self.units.filter(lambda unit: unit.is_structure and unit.is_ready): for order in structure.orders: if order.ability.id is creationAbilityID: if level and order.ability.button_name[-1] != level: return 0 return order.progress return 0 @property_cache_once_per_frame def _abilities_all_units(self) -> Counter: """ Cache for the already_pending function, includes protoss units warping in, and all units in production, and all structures, and all morphs """ abilities_amount = Counter() for unit in self.units: # type: Unit for order in unit.orders: abilities_amount[order.ability] += 1 if not unit.is_ready: if self.race != Race.Terran or not unit.is_structure: # If an SCV is constructing a building, already_pending would count this structure twice (once from the SCV order, and once from "not structure.is_ready") abilities_amount[self._game_data.units[unit.type_id.value].creation_ability] += 1 return abilities_amount @property_cache_once_per_frame def _abilities_workers_and_eggs(self) -> Counter: """ Cache for the already_pending function, includes all worker orders (including pending). Zerg units in production (except queens and morphing units) and structures in production, counts double for terran """ abilities_amount = Counter() for worker in self.workers: # type: Unit for order in worker.orders: abilities_amount[order.ability] += 1 if self.race == Race.Zerg: for egg in self.units(UnitTypeId.EGG): # type: Unit for order in egg.orders: abilities_amount[order.ability] += 1 if self.race != Race.Terran: # If an SCV is constructing a building, already_pending would count this structure twice # (once from the SCV order, and once from "not structure.is_ready") for unit in self.units.structure.not_ready: # type: Unit abilities_amount[self._game_data.units[unit.type_id.value].creation_ability] += 1 return abilities_amount def already_pending(self, unit_type: Union[UpgradeId, UnitTypeId], all_units: bool = True) -> int: """ Returns a number of buildings or units already in progress, or if a worker is en route to build it. This also includes queued orders for workers and build queues of buildings. If all_units==True, then build queues of other units (such as Carriers (Interceptors) or Oracles (Stasis Ward)) are also included. """ # TODO / FIXME: SCV building a structure might be counted as two units if isinstance(unit_type, UpgradeId): return self.already_pending_upgrade(unit_type) ability = self._game_data.units[unit_type.value].creation_ability amount = len(self.units(unit_type).not_ready) if all_units: amount += sum([o.ability == ability for u in self.units for o in u.orders]) else: amount += sum([o.ability == ability for w in self.workers for o in w.orders]) amount += sum([egg.orders[0].ability == ability for egg in self.units(UnitTypeId.EGG)]) return amount async def build(self, building: UnitTypeId, near: Union[Point2, Point3], max_distance: int=20, unit: Optional[Unit]=None, random_alternative: bool=True, placement_step: int=2): """Build a building.""" if isinstance(near, Unit): near = near.position.to2 elif near is not None: near = near.to2 else: return p = await self.find_placement(building, near.rounded, max_distance, random_alternative, placement_step) if p is None: return ActionResult.CantFindPlacementLocation unit = unit or self.select_build_worker(p) if unit is None or not self.can_afford(building): return ActionResult.Error return await self.do(unit.build(building, p)) async def do(self, action): if not self.can_afford(action): logger.warning(f"Cannot afford action {action}") return ActionResult.Error r = await self._client.actions(action) if not r: # success cost = self._game_data.calculate_ability_cost(action.ability) self.minerals -= cost.minerals self.vespene -= cost.vespene else: logger.error(f"Error: {r} (action: {action})") return r async def do_actions(self, actions: List["UnitCommand"], prevent_double=True): """ Unlike 'self.do()', this function does not instantly subtract minerals and vespene. """ if not actions: return None if prevent_double: actions = list(filter(self.prevent_double_actions, actions)) for action in actions: cost = self._game_data.calculate_ability_cost(action.ability) self.minerals -= cost.minerals self.vespene -= cost.vespene return await self._client.actions(actions) def prevent_double_actions(self, action): # always add actions if queued if action.queue: return True if action.unit.orders: # action: UnitCommand # current_action: UnitOrder current_action = action.unit.orders[0] if current_action.ability.id != action.ability: # different action, return true return True try: if current_action.target == action.target.tag: # same action, remove action if same target unit return False except AttributeError: pass try: if action.target.x == current_action.target.x and action.target.y == current_action.target.y: # same action, remove action if same target position return False except AttributeError: pass return True return True async def chat_send(self, message: str): """ Send a chat message. """ assert isinstance(message, str), f"{message} is no string" await self._client.chat_send(message, False) # For the functions below, make sure you are inside the boundries of the map size. def get_terrain_height(self, pos: Union[Point2, Point3, Unit]) -> int: """ Returns terrain height at a position. Caution: terrain height is different from a unit's z-coordinate. """ assert isinstance(pos, (Point2, Point3, Unit)), f"pos is not of type Point2, Point3 or Unit" pos = pos.position.to2.rounded return self._game_info.terrain_height[pos] # returns int def get_terrain_z_height(self, pos: Union[Point2, Point3, Unit]) -> int: """ Returns terrain z-height at a position. """ assert isinstance(pos, (Point2, Point3, Unit)), f"pos is not of type Point2, Point3 or Unit" pos = pos.position.to2.rounded return -16 + 32 * self._game_info.terrain_height[pos] / 255 def in_placement_grid(self, pos: Union[Point2, Point3, Unit]) -> bool: """ Returns True if you can place something at a position. Remember, buildings usually use 2x2, 3x3 or 5x5 of these grid points. Caution: some x and y offset might be required, see ramp code: https://github.com/Dentosal/python-sc2/blob/master/sc2/game_info.py#L17-L18 """ assert isinstance(pos, (Point2, Point3, Unit)) pos = pos.position.to2.rounded return self._game_info.placement_grid[pos] == 1 def in_pathing_grid(self, pos: Union[Point2, Point3, Unit]) -> bool: """ Returns True if a unit can pass through a grid point. """ assert isinstance(pos, (Point2, Point3, Unit)) pos = pos.position.to2.rounded return self._game_info.pathing_grid[pos] == 1 def is_visible(self, pos: Union[Point2, Point3, Unit]) -> bool: """ Returns True if you have vision on a grid point. """ # more info: https://github.com/Blizzard/s2client-proto/blob/9906df71d6909511907d8419b33acc1a3bd51ec0/s2clientprotocol/spatial.proto#L19 assert isinstance(pos, (Point2, Point3, Unit)) pos = pos.position.to2.rounded return self.state.visibility[pos] == 2 def has_creep(self, pos: Union[Point2, Point3, Unit]) -> bool: """ Returns True if there is creep on the grid point. """ assert isinstance(pos, (Point2, Point3, Unit)) pos = pos.position.to2.rounded return self.state.creep[pos] == 1 def _prepare_start(self, client, player_id, game_info, game_data): """Ran until game start to set game and player data.""" self._client: "Client" = client self._game_info: "GameInfo" = game_info self._game_data: GameData = game_data self.player_id: int = player_id self.race: Race = Race(self._game_info.player_races[self.player_id]) self._units_previous_map: dict = dict() self._previous_upgrades: Set[UpgradeId] = set() self.units: Units = Units([]) def _prepare_first_step(self): """First step extra preparations. Must not be called before _prepare_step.""" if self.townhalls: self._game_info.player_start_location = self.townhalls.first.position self._game_info.map_ramps, self._game_info.vision_blockers = self._game_info._find_ramps_and_vision_blockers() def _prepare_step(self, state, proto_game_info): # Set attributes from new state before on_step.""" self.state: GameState = state # See game_state.py # update pathing grid self._game_info.pathing_grid: PixelMap = PixelMap( proto_game_info.game_info.start_raw.pathing_grid, in_bits=True, mirrored=False ) # Required for events self._units_previous_map: Dict = {unit.tag: unit for unit in self.units} self.units: Units = state.own_units self.workers: Units = self.units(race_worker[self.race]) self.townhalls: Units = self.units(race_townhalls[self.race]) self.geysers: Units = self.units(race_gas[self.race]) self.minerals: int = state.common.minerals self.vespene: int = state.common.vespene self.supply_army: int = state.common.food_army self.supply_workers: int = state.common.food_workers # Doesn't include workers in production self.supply_cap: int = state.common.food_cap self.supply_used: int = state.common.food_used self.supply_left: int = self.supply_cap - self.supply_used if self.race == Race.Zerg: self.larva_count: int = state.common.larva_count # Workaround Zerg supply rounding bug self._correct_zerg_supply() elif self.race == Race.Protoss: self.warp_gate_count: int = state.common.warp_gate_count self.idle_worker_count: int = state.common.idle_worker_count self.army_count: int = state.common.army_count # reset cached values self.cached_known_enemy_structures = None self.cached_known_enemy_units = None async def issue_events(self): """ This function will be automatically run from main.py and triggers the following functions: - on_unit_created - on_unit_destroyed - on_building_construction_complete """ await self._issue_unit_dead_events() await self._issue_unit_added_events() for unit in self.units.structure: await self._issue_building_complete_event(unit) if len(self._previous_upgrades) != len(self.state.upgrades): for upgrade_completed in self.state.upgrades - self._previous_upgrades: await self.on_upgrade_complete(upgrade_completed) self._previous_upgrades = self.state.upgrades async def _issue_unit_added_events(self): for unit in self.units.not_structure: if unit.tag not in self._units_previous_map: await self.on_unit_created(unit) for unit in self.units.structure: if unit.tag not in self._units_previous_map: await self.on_building_construction_started(unit) async def _issue_building_complete_event(self, unit): if unit.build_progress < 1: return if unit.tag not in self._units_previous_map: return unit_prev = self._units_previous_map[unit.tag] if unit_prev.build_progress < 1: await self.on_building_construction_complete(unit) async def _issue_unit_dead_events(self): for unit_tag in self.state.dead_units: await self.on_unit_destroyed(unit_tag) async def on_unit_destroyed(self, unit_tag): """ Override this in your bot class. Note that this function uses unit tags because the unit does not exist any more. """ async def on_unit_created(self, unit: Unit): """ Override this in your bot class. """ async def on_building_construction_started(self, unit: Unit): """ Override this in your bot class. """ async def on_building_construction_complete(self, unit: Unit): """ Override this in your bot class. Note that this function is also triggered at the start of the game for the starting base building.""" async def on_upgrade_complete(self, upgrade: UpgradeId): """ Override this in your bot class. """ def on_start(self): """ Allows initializing the bot when the game data is available. """ async def on_start_async(self): """ This function is run after "on_start". At this point, game_data, game_info and the first iteration of game_state (self.state) are available. """ async def on_step(self, iteration: int): """Ran on every game step (looped in realtime mode).""" raise NotImplementedError def on_end(self, game_result: Result): """ Triggered at the end of a game. """ class CanAffordWrapper: def __init__(self, can_afford_minerals, can_afford_vespene, have_enough_supply): self.can_afford_minerals = can_afford_minerals self.can_afford_vespene = can_afford_vespene self.have_enough_supply = have_enough_supply def __bool__(self): return self.can_afford_minerals and self.can_afford_vespene and self.have_enough_supply @property def action_result(self): if not self.can_afford_vespene: return ActionResult.NotEnoughVespene elif not self.can_afford_minerals: return ActionResult.NotEnoughMinerals elif not self.have_enough_supply: return ActionResult.NotEnoughFood else: return None
[((864, 891), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (881, 891), False, 'import logging\n'), ((25574, 25583), 'collections.Counter', 'Counter', ([], {}), '()\n', (25581, 25583), False, 'from collections import Counter\n'), ((26487, 26496), 'collections.Counter', 'Counter', ([], {}), '()\n', (26494, 26496), False, 'from collections import Counter\n'), ((6390, 6432), 'itertools.combinations', 'itertools.combinations', (['resource_groups', '(2)'], {}), '(resource_groups, 2)\n', (6412, 6432), False, 'import itertools\n'), ((24244, 24267), 'random.choice', 'random.choice', (['possible'], {}), '(possible)\n', (24257, 24267), False, 'import random\n'), ((7319, 7335), 'math.hypot', 'math.hypot', (['x', 'y'], {}), '(x, y)\n', (7329, 7335), False, 'import math\n'), ((6690, 6725), 'itertools.product', 'itertools.product', (['group_a', 'group_b'], {}), '(group_a, group_b)\n', (6707, 6725), False, 'import itertools\n')]
xyzst/pants
src/python/pants/jvm/resolve/lockfile_metadata.py
d6a357fe67ee7e8e1aefeae625e107f5609f1717
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import annotations from dataclasses import dataclass from enum import Enum from typing import Any, Iterable, cast from pants.core.util_rules.lockfile_metadata import ( LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar, ) from pants.jvm.resolve.common import ArtifactRequirement from pants.util.ordered_set import FrozenOrderedSet _jvm_lockfile_metadata = lockfile_metadata_registrar(LockfileScope.JVM) class InvalidJVMLockfileReason(Enum): REQUIREMENTS_MISMATCH = "requirements_mismatch" @dataclass(frozen=True) class JVMLockfileMetadata(LockfileMetadata): scope = LockfileScope.JVM @staticmethod def new( requirements: Iterable[ArtifactRequirement], ) -> JVMLockfileMetadata: """Call the most recent version of the `LockfileMetadata` class to construct a concrete instance. This static method should be used in place of the `LockfileMetadata` constructor. This gives calling sites a predictable method to call to construct a new `LockfileMetadata` for writing, while still allowing us to support _reading_ older, deprecated metadata versions. """ return JVMLockfileMetadataV1.from_artifact_requirements(requirements) @classmethod def from_lockfile( cls, lockfile: bytes, lockfile_path: str | None = None, resolve_name: str | None = None ) -> JVMLockfileMetadataV1: return cast( JVMLockfileMetadataV1, LockfileMetadata.from_lockfile_for_scope( LockfileScope.JVM, lockfile, lockfile_path, resolve_name ), ) def is_valid_for( self, requirements: Iterable[ArtifactRequirement] | None, ) -> LockfileMetadataValidation: """Returns Truthy if this `JVMLockfileMetadata` can be used in the current execution context.""" raise NotImplementedError("call `is_valid_for` on subclasses only") @_jvm_lockfile_metadata(1) @dataclass(frozen=True) class JVMLockfileMetadataV1(JVMLockfileMetadata): """Lockfile version that permits specifying a requirements as a set rather than a digest. Validity is tested by the set of requirements strings being the same in the user requirements as those in the stored requirements. """ requirements: FrozenOrderedSet[str] @classmethod def from_artifact_requirements( cls, requirements: Iterable[ArtifactRequirement] ) -> JVMLockfileMetadataV1: return cls(FrozenOrderedSet(i.to_metadata_str() for i in requirements)) @classmethod def _from_json_dict( cls: type[JVMLockfileMetadataV1], json_dict: dict[Any, Any], lockfile_description: str, error_suffix: str, ) -> JVMLockfileMetadataV1: metadata = _get_metadata(json_dict, lockfile_description, error_suffix) requirements = metadata( "generated_with_requirements", FrozenOrderedSet[str], FrozenOrderedSet, ) return JVMLockfileMetadataV1(requirements) @classmethod def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]: instance = cast(JVMLockfileMetadataV1, instance) return { "generated_with_requirements": ( sorted(instance.requirements) if instance.requirements is not None else None ) } def is_valid_for( self, requirements: Iterable[ArtifactRequirement] | None, ) -> LockfileMetadataValidation: """Returns a truthy object if the request requirements match the metadata requirements. For this version, "match" is defined as the request requirements being a non-strict subset of the metadata requirements. """ failure_reasons: set[InvalidJVMLockfileReason] = set() if not self.requirements.issuperset(i.to_metadata_str() for i in requirements or []): failure_reasons.add(InvalidJVMLockfileReason.REQUIREMENTS_MISMATCH) return LockfileMetadataValidation(failure_reasons)
[((580, 626), 'pants.core.util_rules.lockfile_metadata.lockfile_metadata_registrar', 'lockfile_metadata_registrar', (['LockfileScope.JVM'], {}), '(LockfileScope.JVM)\n', (607, 626), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n'), ((722, 744), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (731, 744), False, 'from dataclasses import dataclass\n'), ((2166, 2188), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (2175, 2188), False, 'from dataclasses import dataclass\n'), ((2978, 3038), 'pants.core.util_rules.lockfile_metadata._get_metadata', '_get_metadata', (['json_dict', 'lockfile_description', 'error_suffix'], {}), '(json_dict, lockfile_description, error_suffix)\n', (2991, 3038), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n'), ((3364, 3401), 'typing.cast', 'cast', (['JVMLockfileMetadataV1', 'instance'], {}), '(JVMLockfileMetadataV1, instance)\n', (3368, 3401), False, 'from typing import Any, Iterable, cast\n'), ((4216, 4259), 'pants.core.util_rules.lockfile_metadata.LockfileMetadataValidation', 'LockfileMetadataValidation', (['failure_reasons'], {}), '(failure_reasons)\n', (4242, 4259), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n'), ((1672, 1774), 'pants.core.util_rules.lockfile_metadata.LockfileMetadata.from_lockfile_for_scope', 'LockfileMetadata.from_lockfile_for_scope', (['LockfileScope.JVM', 'lockfile', 'lockfile_path', 'resolve_name'], {}), '(LockfileScope.JVM, lockfile,\n lockfile_path, resolve_name)\n', (1712, 1774), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n')]
GregorKikelj/opendbc
generator/generator.py
a20ed24ea2593e5d019adf538dc0cecfc7ef8709
#!/usr/bin/env python3 import os import re cur_path = os.path.dirname(os.path.realpath(__file__)) opendbc_root = os.path.join(cur_path, '../') include_pattern = re.compile(r'CM_ "IMPORT (.*?)";') def read_dbc(src_dir, filename): with open(os.path.join(src_dir, filename)) as file_in: return file_in.read() def create_dbc(src_dir, filename, output_path): dbc_file_in = read_dbc(src_dir, filename) includes = include_pattern.findall(dbc_file_in) output_filename = filename.replace('.dbc', '_generated.dbc') output_file_location = os.path.join(output_path, output_filename) with open(output_file_location, 'w') as dbc_file_out: dbc_file_out.write('CM_ "AUTOGENERATED FILE, DO NOT EDIT";\n') for include_filename in includes: include_file_header = '\n\nCM_ "Imported file %s starts here";\n' % include_filename dbc_file_out.write(include_file_header) include_file = read_dbc(src_dir, include_filename) dbc_file_out.write(include_file) dbc_file_out.write('\nCM_ "%s starts here";\n' % filename) core_dbc = include_pattern.sub('', dbc_file_in) dbc_file_out.write(core_dbc) def create_all(output_path): for src_dir, _, filenames in os.walk(cur_path): if src_dir == cur_path: continue #print(src_dir) for filename in filenames: if filename.startswith('_') or not filename.endswith('.dbc'): continue #print(filename) create_dbc(src_dir, filename, output_path) if __name__ == "__main__": create_all(opendbc_root)
[((114, 143), 'os.path.join', 'os.path.join', (['cur_path', '"""../"""'], {}), "(cur_path, '../')\n", (126, 143), False, 'import os\n'), ((162, 195), 're.compile', 're.compile', (['"""CM_ "IMPORT (.*?)";"""'], {}), '(\'CM_ "IMPORT (.*?)";\')\n', (172, 195), False, 'import re\n'), ((71, 97), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (87, 97), False, 'import os\n'), ((549, 591), 'os.path.join', 'os.path.join', (['output_path', 'output_filename'], {}), '(output_path, output_filename)\n', (561, 591), False, 'import os\n'), ((1201, 1218), 'os.walk', 'os.walk', (['cur_path'], {}), '(cur_path)\n', (1208, 1218), False, 'import os\n'), ((244, 275), 'os.path.join', 'os.path.join', (['src_dir', 'filename'], {}), '(src_dir, filename)\n', (256, 275), False, 'import os\n')]
matheusdemicheli/dogtel
customer/admin.py
4eed44c8214fe814c26a6df0125af9b065c81c1c
from django.contrib import admin from django.utils.safestring import mark_safe from customer.models import Owner, Dog, Breed, SubBreed class OwnerAdmin(admin.ModelAdmin): """ Owner ModelAdmin. """ search_fields = ['name'] class BreedAdmin(admin.ModelAdmin): """ Breed ModelAdmin. """ search_fields = ['name'] class SubBreedAdmin(admin.ModelAdmin): """ SubBreed ModelAdmin. """ search_fields = ['name', 'breed__name'] autocomplete_fields = ['breed'] list_display = ['name', 'breed'] class DogAdmin(admin.ModelAdmin): """ Dog ModelAdmin. """ search_fields = ['name', 'owner__name'] autocomplete_fields = ['owner', 'breed', 'sub_breed'] list_display = ['name', 'owner', 'breed', 'sub_breed', 'img_photo'] def img_photo(self, obj): """ Render the dog's photo. """ return mark_safe('<img src="%s" width="70">' % obj.photo.url) admin.site.register(Dog, DogAdmin) admin.site.register(Owner, OwnerAdmin) admin.site.register(Breed, BreedAdmin) admin.site.register(SubBreed, SubBreedAdmin)
[((948, 982), 'django.contrib.admin.site.register', 'admin.site.register', (['Dog', 'DogAdmin'], {}), '(Dog, DogAdmin)\n', (967, 982), False, 'from django.contrib import admin\n'), ((983, 1021), 'django.contrib.admin.site.register', 'admin.site.register', (['Owner', 'OwnerAdmin'], {}), '(Owner, OwnerAdmin)\n', (1002, 1021), False, 'from django.contrib import admin\n'), ((1022, 1060), 'django.contrib.admin.site.register', 'admin.site.register', (['Breed', 'BreedAdmin'], {}), '(Breed, BreedAdmin)\n', (1041, 1060), False, 'from django.contrib import admin\n'), ((1061, 1105), 'django.contrib.admin.site.register', 'admin.site.register', (['SubBreed', 'SubBreedAdmin'], {}), '(SubBreed, SubBreedAdmin)\n', (1080, 1105), False, 'from django.contrib import admin\n'), ((891, 945), 'django.utils.safestring.mark_safe', 'mark_safe', (['(\'<img src="%s" width="70">\' % obj.photo.url)'], {}), '(\'<img src="%s" width="70">\' % obj.photo.url)\n', (900, 945), False, 'from django.utils.safestring import mark_safe\n')]
APSL/kaneda
kaneda/tasks/rq.py
739db48588d2237dd7710b16f23921d489182868
from __future__ import absolute_import from redis import Redis from rq.decorators import job from kaneda.utils import get_backend backend = get_backend() @job(queue='kaneda', connection=Redis()) def report(name, metric, value, tags, id_): """ RQ job to report metrics to the configured backend in kanedasettings.py To run the worker execute this command: rqworker [queue] """ return backend.report(name, metric, value, tags, id_)
[((143, 156), 'kaneda.utils.get_backend', 'get_backend', ([], {}), '()\n', (154, 156), False, 'from kaneda.utils import get_backend\n'), ((191, 198), 'redis.Redis', 'Redis', ([], {}), '()\n', (196, 198), False, 'from redis import Redis\n')]
jg-rivera/cert-ripper
src/ripper.py
2bab5e02cd2da8e92a1c308640917b6f5ee729cb
from dotenv import load_dotenv from PyPDF2 import PdfFileReader, PdfFileWriter import os import json class CertRipper: def __init__( self, start_page_index=0, master_pdf_path=None, json_points_path=None, ripped_certs_path=None, ripped_cert_file_name=None, ): self.start_page_index = start_page_index self.master_pdf_path = master_pdf_path self.pdf = PdfFileReader(master_pdf_path) self.pdf_length = self.pdf.getNumPages() self.json_points_path = json_points_path self.ripped_certs_path = ripped_certs_path self.ripped_cert_file_name = ripped_cert_file_name def process(self): recipient_groups = self.get_recipient_groups_from_points() self.extract_pdf_from_master(recipient_groups) def extract_pdf_from_master(self, recipient_groups): current_page_index = self.start_page_index process_index = 0 for recipient_group in recipient_groups: recipient_group_name = recipient_group["name"] recipient_group_tag = recipient_group["tag"] recipient_slugs = recipient_group["recipient_slugs"] print( f"[*] Ripping \x1b[93m{recipient_group_name}\x1b[0m group ...") for recipient_slug in recipient_slugs: page = self.pdf.getPage(current_page_index) file_name = self.ripped_cert_file_name.format( index=current_page_index + 1, tag=recipient_group_tag, recipient=recipient_slug ) pdf_writer = PdfFileWriter() pdf_writer.addPage(page) output_file_name = f"{self.ripped_certs_path}\\{file_name}.pdf" with open(output_file_name, "wb") as out: pdf_writer.write(out) print( f"\x1b[95m[{process_index}]\x1b[0m Ripped \x1b[92m[{file_name}]\x1b[0m from \x1b[94mpage {current_page_index + 1}\x1b[0m of master") current_page_index += 1 process_index += 1 def get_recipient_groups_from_points(self): recipient_groups = [] total_recipients = 0 with open(self.json_points_path, "r") as json_file: points = json.load(json_file) for point in points: point_name = point["name"] point_tag = point["tag"] point_recipients = point["recipients"] point_recipient_slugs = [] for point_recipient in point_recipients: recipient_name = point_recipient["name"] recipient_name_slug = "_".join(recipient_name.split()) point_recipient_slugs.append(recipient_name_slug) total_recipients += 1 recipient_groups.append({ "name": point_name, "tag": point_tag, "recipient_slugs": point_recipient_slugs }) total_groups = len(recipient_groups) self.__check_pdf_length(total_recipients) print( f"Read \x1b[95m{total_groups} groups(s)\x1b[0m and \x1b[95m{total_recipients} recipient(s)\x1b[0m from JSON points") return recipient_groups def __check_pdf_length(self, recipients_length): pdf_length = self.pdf_length - (self.start_page_index) if pdf_length != recipients_length: raise ValueError( f"Number of recipients ({recipients_length}) does not match with PDF length ({pdf_length})" ) if __name__ == "__main__": load_dotenv() ripper = CertRipper( start_page_index=os.getenv("START_PAGE_INDEX"), master_pdf_path=os.getenv("MASTER_PDF_PATH"), json_points_path=os.getenv("JSON_POINTS_PATH"), ripped_certs_path=os.getenv("RIPPED_CERTS_PATH"), ripped_cert_file_name=os.getenv("RIPPED_CERT_FILE_NAME"), ) ripper.process()
[((3709, 3722), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (3720, 3722), False, 'from dotenv import load_dotenv\n'), ((432, 462), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['master_pdf_path'], {}), '(master_pdf_path)\n', (445, 462), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((2328, 2348), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (2337, 2348), False, 'import json\n'), ((3774, 3803), 'os.getenv', 'os.getenv', (['"""START_PAGE_INDEX"""'], {}), "('START_PAGE_INDEX')\n", (3783, 3803), False, 'import os\n'), ((3829, 3857), 'os.getenv', 'os.getenv', (['"""MASTER_PDF_PATH"""'], {}), "('MASTER_PDF_PATH')\n", (3838, 3857), False, 'import os\n'), ((3884, 3913), 'os.getenv', 'os.getenv', (['"""JSON_POINTS_PATH"""'], {}), "('JSON_POINTS_PATH')\n", (3893, 3913), False, 'import os\n'), ((3941, 3971), 'os.getenv', 'os.getenv', (['"""RIPPED_CERTS_PATH"""'], {}), "('RIPPED_CERTS_PATH')\n", (3950, 3971), False, 'import os\n'), ((4003, 4037), 'os.getenv', 'os.getenv', (['"""RIPPED_CERT_FILE_NAME"""'], {}), "('RIPPED_CERT_FILE_NAME')\n", (4012, 4037), False, 'import os\n'), ((1647, 1662), 'PyPDF2.PdfFileWriter', 'PdfFileWriter', ([], {}), '()\n', (1660, 1662), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n')]
shehzadulislam/Assignment4
venv/Lib/site-packages/tests/test_111_FieldNumAddCol.py
a9cced70be6ae5d2685027d68032d5849f638301
# # Licensed Materials - Property of IBM # # (c) Copyright IBM Corp. 2007-2008 # import unittest, sys import ibm_db import config from testfunctions import IbmDbTestFunctions class IbmDbTestCase(unittest.TestCase): def test_111_FieldNumAddCol(self): obj = IbmDbTestFunctions() obj.assert_expect(self.run_test_111) def run_test_111(self): conn = ibm_db.connect(config.database, config.user, config.password) server = ibm_db.server_info( conn ) if conn: ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF) insert = "INSERT INTO animals values (7, 'cat', 'Benji', 5.1)" ibm_db.exec_immediate(conn, insert) stmt = ibm_db.exec_immediate(conn, "SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed") if (server.DBMS_NAME[0:3] == 'IDS'): num1 = ibm_db.field_num(stmt, "id") num2 = ibm_db.field_num(stmt, "breed") num3 = ibm_db.field_num(stmt, "number") num4 = ibm_db.field_num(stmt, "NUMBER") num5 = ibm_db.field_num(stmt, "bREED") num6 = ibm_db.field_num(stmt, 8) num7 = ibm_db.field_num(stmt, 1) num8 = ibm_db.field_num(stmt, "WEIGHT") else: num1 = ibm_db.field_num(stmt, "ID") num2 = ibm_db.field_num(stmt, "BREED") num3 = ibm_db.field_num(stmt, "NUMBER") num4 = ibm_db.field_num(stmt, "number") num5 = ibm_db.field_num(stmt, "Breed") num6 = ibm_db.field_num(stmt, 8) num7 = ibm_db.field_num(stmt, 1) num8 = ibm_db.field_num(stmt, "weight") print("%s" % num1) print("int(%d)" % num2) print("int(%d)" % num3) print("%s" % num4) print("%s" % num5) print("%s" % num6) print("int(%d)" % num7) print("%s" % num8) ibm_db.rollback(conn) else: print("Connection failed.") #__END__ #__LUW_EXPECTED__ #False #int(0) #int(1) #False #False #False #int(1) #False #__ZOS_EXPECTED__ #False #int(0) #int(1) #False #False #False #int(1) #False #__SYSTEMI_EXPECTED__ #False #int(0) #int(1) #False #False #False #int(1) #False #__IDS_EXPECTED__ #False #int(0) #int(1) #False #False #False #int(1) #False
[((268, 288), 'testfunctions.IbmDbTestFunctions', 'IbmDbTestFunctions', ([], {}), '()\n', (286, 288), False, 'from testfunctions import IbmDbTestFunctions\n'), ((368, 429), 'ibm_db.connect', 'ibm_db.connect', (['config.database', 'config.user', 'config.password'], {}), '(config.database, config.user, config.password)\n', (382, 429), False, 'import ibm_db\n'), ((443, 467), 'ibm_db.server_info', 'ibm_db.server_info', (['conn'], {}), '(conn)\n', (461, 467), False, 'import ibm_db\n'), ((490, 540), 'ibm_db.autocommit', 'ibm_db.autocommit', (['conn', 'ibm_db.SQL_AUTOCOMMIT_OFF'], {}), '(conn, ibm_db.SQL_AUTOCOMMIT_OFF)\n', (507, 540), False, 'import ibm_db\n'), ((617, 652), 'ibm_db.exec_immediate', 'ibm_db.exec_immediate', (['conn', 'insert'], {}), '(conn, insert)\n', (638, 652), False, 'import ibm_db\n'), ((673, 792), 'ibm_db.exec_immediate', 'ibm_db.exec_immediate', (['conn', '"""SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed"""'], {}), "(conn,\n 'SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed'\n )\n", (694, 792), False, 'import ibm_db\n'), ((1804, 1825), 'ibm_db.rollback', 'ibm_db.rollback', (['conn'], {}), '(conn)\n', (1819, 1825), False, 'import ibm_db\n'), ((847, 875), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""id"""'], {}), "(stmt, 'id')\n", (863, 875), False, 'import ibm_db\n'), ((891, 922), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""breed"""'], {}), "(stmt, 'breed')\n", (907, 922), False, 'import ibm_db\n'), ((938, 970), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""number"""'], {}), "(stmt, 'number')\n", (954, 970), False, 'import ibm_db\n'), ((986, 1018), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""NUMBER"""'], {}), "(stmt, 'NUMBER')\n", (1002, 1018), False, 'import ibm_db\n'), ((1034, 1065), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""bREED"""'], {}), "(stmt, 'bREED')\n", (1050, 1065), False, 'import ibm_db\n'), ((1081, 1106), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(8)'], {}), '(stmt, 8)\n', (1097, 1106), False, 'import ibm_db\n'), ((1122, 1147), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(1)'], {}), '(stmt, 1)\n', (1138, 1147), False, 'import ibm_db\n'), ((1163, 1195), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""WEIGHT"""'], {}), "(stmt, 'WEIGHT')\n", (1179, 1195), False, 'import ibm_db\n'), ((1223, 1251), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""ID"""'], {}), "(stmt, 'ID')\n", (1239, 1251), False, 'import ibm_db\n'), ((1267, 1298), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""BREED"""'], {}), "(stmt, 'BREED')\n", (1283, 1298), False, 'import ibm_db\n'), ((1314, 1346), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""NUMBER"""'], {}), "(stmt, 'NUMBER')\n", (1330, 1346), False, 'import ibm_db\n'), ((1362, 1394), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""number"""'], {}), "(stmt, 'number')\n", (1378, 1394), False, 'import ibm_db\n'), ((1410, 1441), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""Breed"""'], {}), "(stmt, 'Breed')\n", (1426, 1441), False, 'import ibm_db\n'), ((1457, 1482), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(8)'], {}), '(stmt, 8)\n', (1473, 1482), False, 'import ibm_db\n'), ((1498, 1523), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(1)'], {}), '(stmt, 1)\n', (1514, 1523), False, 'import ibm_db\n'), ((1539, 1571), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""weight"""'], {}), "(stmt, 'weight')\n", (1555, 1571), False, 'import ibm_db\n')]
Mindelirium/foundation
foundation/djangocms_pagebanner/cms_toolbar.py
2d07e430915d696ca7376afea633692119c4d30e
from cms.api import get_page_draft from cms.toolbar_pool import toolbar_pool from cms.toolbar_base import CMSToolbar from cms.utils import get_cms_setting from cms.utils.permissions import has_page_change_permission from django.core.urlresolvers import reverse, NoReverseMatch from django.utils.translation import ugettext_lazy as _ from .models import PageBannerExtension _banner_change_url = 'admin:djangocms_pagebanner_pagebannerextension_change' _banner_add_url = 'admin:djangocms_pagebanner_pagebannerextension_add' @toolbar_pool.register class PageBannerExtensionToolbar(CMSToolbar): def populate(self): # always use draft if we have a page self.page = get_page_draft(self.request.current_page) if not self.page: # Nothing to do return # check global permissions if CMS_PERMISSIONS is active if get_cms_setting('PERMISSION'): has_global_current_page_change_permission = \ has_page_change_permission(self.request) else: has_global_current_page_change_permission = False # check if user has page edit permission can_change = (self.request.current_page and self.request.current_page.has_change_permission( self.request)) if has_global_current_page_change_permission or can_change: try: page_banner_extension = PageBannerExtension.objects.get( extended_object_id=self.page.id) except PageBannerExtension.DoesNotExist: page_banner_extension = None try: if page_banner_extension: url = reverse(_banner_change_url, args=(page_banner_extension.pk,)) else: url = (reverse(_banner_add_url) + '?extended_object=%s' % self.page.pk) except NoReverseMatch: # not in urls pass else: not_edit_mode = not self.toolbar.edit_mode current_page_menu = self.toolbar.get_or_create_menu('page') current_page_menu.add_modal_item(_('Page banner'), url=url, disabled=not_edit_mode)
[((682, 723), 'cms.api.get_page_draft', 'get_page_draft', (['self.request.current_page'], {}), '(self.request.current_page)\n', (696, 723), False, 'from cms.api import get_page_draft\n'), ((874, 903), 'cms.utils.get_cms_setting', 'get_cms_setting', (['"""PERMISSION"""'], {}), "('PERMISSION')\n", (889, 903), False, 'from cms.utils import get_cms_setting\n'), ((979, 1019), 'cms.utils.permissions.has_page_change_permission', 'has_page_change_permission', (['self.request'], {}), '(self.request)\n', (1005, 1019), False, 'from cms.utils.permissions import has_page_change_permission\n'), ((1708, 1769), 'django.core.urlresolvers.reverse', 'reverse', (['_banner_change_url'], {'args': '(page_banner_extension.pk,)'}), '(_banner_change_url, args=(page_banner_extension.pk,))\n', (1715, 1769), False, 'from django.core.urlresolvers import reverse, NoReverseMatch\n'), ((2233, 2249), 'django.utils.translation.ugettext_lazy', '_', (['"""Page banner"""'], {}), "('Page banner')\n", (2234, 2249), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1853, 1877), 'django.core.urlresolvers.reverse', 'reverse', (['_banner_add_url'], {}), '(_banner_add_url)\n', (1860, 1877), False, 'from django.core.urlresolvers import reverse, NoReverseMatch\n')]
etri-edgeai/nn-comp-discblock
tasks/lm/models/lm.py
6e00a019c223508797ca91a7d5ffec7917b12c6d
import math import torch import torch.nn as nn import torch.nn.functional as F class RNNModel(nn.Module): """Container module with an encoder, a recurrent module, and a decoder.""" def __init__(self, rnn_type, ntoken, ninp, nhid, nlayers, dropout=0.5, tie_weights=False, encoder=None, decoder=None): super(RNNModel, self).__init__() self.ntoken = ntoken self.drop = nn.Dropout(dropout) if encoder is None: self.encoder = nn.Embedding(ntoken, ninp) else: self.encoder = encoder if rnn_type in ['LSTM', 'GRU']: self.rnn = getattr(nn, rnn_type)(ninp, nhid, nlayers, dropout=dropout) else: try: nonlinearity = {'RNN_TANH': 'tanh', 'RNN_RELU': 'relu'}[rnn_type] except KeyError: raise ValueError( """An invalid option for `--model` was supplied, options are ['LSTM', 'GRU', 'RNN_TANH' or 'RNN_RELU']""") self.rnn = nn.RNN(ninp, nhid, nlayers, nonlinearity=nonlinearity, dropout=dropout) if decoder is None: self.decoder = nn.Linear(nhid, ntoken) else: self.decoder = decoder # Optionally tie weights as in: # "Using the Output Embedding to Improve Language Models" (Press & Wolf 2016) # https://arxiv.org/abs/1608.05859 # and # "Tying Word Vectors and Word Classifiers: A Loss Framework for Language Modeling" (Inan et al. 2016) # https://arxiv.org/abs/1611.01462 if tie_weights: if nhid != ninp: raise ValueError('When using the tied flag, nhid must be equal to emsize') self.decoder.weight = self.encoder.weight self.rnn_type = rnn_type self.nhid = nhid self.nlayers = nlayers def init_weights(self): initrange = 0.1 if self.encoder.__class__.__name__ == "Embedding": self.encoder.weight.data.uniform_(-initrange, initrange) else: self.encoder.init_weights() if self.decoder.__class__.__name__ == "Linear": self.decoder.bias.data.zero_() self.decoder.weight.data.uniform_(-initrange, initrange) else: self.decoder.init_weights() def forward(self, input, hidden): emb = self.drop(self.encoder(input)) hidden_ = [] for h in hidden: if isinstance(h, torch.LongTensor) or isinstance(h, torch.cuda.LongTensor): h = h.to(torch.float) hidden_.append(h) output, hidden = self.rnn(emb, tuple(hidden_)) output = self.drop(output) decoded = self.decoder(output) decoded = decoded.view(-1, self.ntoken) return F.log_softmax(decoded, dim=1), hidden def init_hidden(self, bsz): weight = next(self.parameters()) if self.rnn_type == 'LSTM': return (weight.new_zeros(self.nlayers, bsz, self.nhid), weight.new_zeros(self.nlayers, bsz, self.nhid)) else: return weight.new_zeros(self.nlayers, bsz, self.nhid) # Temporarily leave PositionalEncoding module here. Will be moved somewhere else. class PositionalEncoding(nn.Module): r"""Inject some information about the relative or absolute position of the tokens in the sequence. The positional encodings have the same dimension as the embeddings, so that the two can be summed. Here, we use sine and cosine functions of different frequencies. .. math:: \text{PosEncoder}(pos, 2i) = sin(pos/10000^(2i/d_model)) \text{PosEncoder}(pos, 2i+1) = cos(pos/10000^(2i/d_model)) \text{where pos is the word position and i is the embed idx) Args: d_model: the embed dim (required). dropout: the dropout value (default=0.1). max_len: the max. length of the incoming sequence (default=5000). Examples: >>> pos_encoder = PositionalEncoding(d_model) """ def __init__(self, d_model, dropout=0.1, max_len=5000): super(PositionalEncoding, self).__init__() self.dropout = nn.Dropout(p=dropout) pe = torch.zeros(max_len, d_model) position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model)) pe[:, 0::2] = torch.sin(position * div_term) pe[:, 1::2] = torch.cos(position * div_term) pe = pe.unsqueeze(0).transpose(0, 1) self.register_buffer('pe', pe) def forward(self, x): r"""Inputs of forward function Args: x: the sequence fed to the positional encoder model (required). Shape: x: [sequence length, batch size, embed dim] output: [sequence length, batch size, embed dim] Examples: >>> output = pos_encoder(x) """ x = x + self.pe[:x.size(0), :] return self.dropout(x) class TransformerModel(nn.Module): """Container module with an encoder, a recurrent or transformer module, and a decoder.""" def __init__(self, ntoken, ninp, nhead, nhid, nlayers, dropout=0.5, encoder=None, decoder=None): super(TransformerModel, self).__init__() try: from torch.nn import TransformerEncoder, TransformerEncoderLayer except: raise ImportError('TransformerEncoder module does not exist in PyTorch 1.1 or lower.') self.model_type = 'Transformer' self.src_mask = None self.pos_encoder = PositionalEncoding(ninp, dropout) encoder_layers = TransformerEncoderLayer(ninp, nhead, nhid, dropout) self.transformer_encoder = TransformerEncoder(encoder_layers, nlayers) if encoder is None: self.encoder = nn.Embedding(ntoken, ninp) else: self.encoder = encoder self.ninp = ninp if decoder is None: self.decoder = nn.Linear(nhid, ntoken) else: self.decoder = decoder def _generate_square_subsequent_mask(self, sz): mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1) mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0)) return mask def init_weights(self): initrange = 0.1 if self.encoder.__class__.__name__ == "Embedding": self.encoder.weight.data.uniform_(-initrange, initrange) else: self.encoder.init_weights() if self.decoder.__class__.__name__ == "Linear": self.decoder.bias.data.zero_() self.decoder.weight.data.uniform_(-initrange, initrange) else: self.decoder.init_weights() def forward(self, src, has_mask=True): if has_mask: device = src.device if self.src_mask is None or self.src_mask.size(0) != len(src): mask = self._generate_square_subsequent_mask(len(src)).to(device) self.src_mask = mask else: self.src_mask = None src = self.encoder(src) * math.sqrt(self.ninp) src = self.pos_encoder(src) output = self.transformer_encoder(src, self.src_mask) output = self.decoder(output) return F.log_softmax(output, dim=-1)
[((400, 419), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (410, 419), True, 'import torch.nn as nn\n'), ((4154, 4175), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (4164, 4175), True, 'import torch.nn as nn\n'), ((4190, 4219), 'torch.zeros', 'torch.zeros', (['max_len', 'd_model'], {}), '(max_len, d_model)\n', (4201, 4219), False, 'import torch\n'), ((4417, 4447), 'torch.sin', 'torch.sin', (['(position * div_term)'], {}), '(position * div_term)\n', (4426, 4447), False, 'import torch\n'), ((4470, 4500), 'torch.cos', 'torch.cos', (['(position * div_term)'], {}), '(position * div_term)\n', (4479, 4500), False, 'import torch\n'), ((5655, 5706), 'torch.nn.TransformerEncoderLayer', 'TransformerEncoderLayer', (['ninp', 'nhead', 'nhid', 'dropout'], {}), '(ninp, nhead, nhid, dropout)\n', (5678, 5706), False, 'from torch.nn import TransformerEncoder, TransformerEncoderLayer\n'), ((5742, 5785), 'torch.nn.TransformerEncoder', 'TransformerEncoder', (['encoder_layers', 'nlayers'], {}), '(encoder_layers, nlayers)\n', (5760, 5785), False, 'from torch.nn import TransformerEncoder, TransformerEncoderLayer\n'), ((7316, 7345), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['output'], {'dim': '(-1)'}), '(output, dim=-1)\n', (7329, 7345), True, 'import torch.nn.functional as F\n'), ((476, 502), 'torch.nn.Embedding', 'nn.Embedding', (['ntoken', 'ninp'], {}), '(ntoken, ninp)\n', (488, 502), True, 'import torch.nn as nn\n'), ((1014, 1085), 'torch.nn.RNN', 'nn.RNN', (['ninp', 'nhid', 'nlayers'], {'nonlinearity': 'nonlinearity', 'dropout': 'dropout'}), '(ninp, nhid, nlayers, nonlinearity=nonlinearity, dropout=dropout)\n', (1020, 1085), True, 'import torch.nn as nn\n'), ((1141, 1164), 'torch.nn.Linear', 'nn.Linear', (['nhid', 'ntoken'], {}), '(nhid, ntoken)\n', (1150, 1164), True, 'import torch.nn as nn\n'), ((2776, 2805), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['decoded'], {'dim': '(1)'}), '(decoded, dim=1)\n', (2789, 2805), True, 'import torch.nn.functional as F\n'), ((5841, 5867), 'torch.nn.Embedding', 'nn.Embedding', (['ntoken', 'ninp'], {}), '(ntoken, ninp)\n', (5853, 5867), True, 'import torch.nn as nn\n'), ((5997, 6020), 'torch.nn.Linear', 'nn.Linear', (['nhid', 'ntoken'], {}), '(nhid, ntoken)\n', (6006, 6020), True, 'import torch.nn as nn\n'), ((7144, 7164), 'math.sqrt', 'math.sqrt', (['self.ninp'], {}), '(self.ninp)\n', (7153, 7164), False, 'import math\n'), ((4239, 4282), 'torch.arange', 'torch.arange', (['(0)', 'max_len'], {'dtype': 'torch.float'}), '(0, max_len, dtype=torch.float)\n', (4251, 4282), False, 'import torch\n'), ((4325, 4352), 'torch.arange', 'torch.arange', (['(0)', 'd_model', '(2)'], {}), '(0, d_model, 2)\n', (4337, 4352), False, 'import torch\n'), ((4365, 4382), 'math.log', 'math.log', (['(10000.0)'], {}), '(10000.0)\n', (4373, 4382), False, 'import math\n'), ((6150, 6168), 'torch.ones', 'torch.ones', (['sz', 'sz'], {}), '(sz, sz)\n', (6160, 6168), False, 'import torch\n')]
Baibhabswain/pythonPrograms
fibo.py
38380174f22e73b766b98754b00cd78a56b4bf59
def main(): a=input("The enter the first number :") b=input("The enter the second number :") range=input("Please enter the range") i=0;count=0; print a print b while count!=range: c=a+b count +=1 print c a=b b=c main()
[]
gghf-service/gghf-api
scrapper/playstation/__init__.py
9740700d1dd160e90fc949f9c3e652c3483a49aa
from scrapper.playstation.spider import main
[]
motakine/ILAS_slackbot
plugins/number.py
ddfb34db1cddcb459fef34cfc04c498d6f85d135
import slackbot.bot import random answer = random.randint(1, 50) max = 50 def number(num): '''number 判定 Args: num (int): 判定する数字 Returns: str: num が answer より大きい: 'Too large' num が answer より小さい: 'Too small' num が answer と一致: 'Correct!'、新しくゲームを始める その他: 'Can I kick you?.' 0は不可思議な数である maxが1の時に2以上を答えると1だけだと言われる ''' global answer global max # 入力された値に応じて返答を構成、正解ならニューゲーム if num == 0: return ' is a mysterious number...' elif num < max + 1: if num > answer: return ' is too large. The answer is more small.' elif num < answer: return ' is too small. The answer is more large.' elif num == answer: answer = random.randint(1, max) return ' is correct! :tada: Now, start a new game.' elif max == 1: return '? Can I kick you? Only 1.' return '? Can I kick you? 1 to %d.' % max def number_set(num): '''number set判定 Args: num (int): 判定する数字 Returns: str: 答えのmax(答えになりうる値の最大)を変更する。デフォは50 1にするとマジ?と訊かれる。それだけ。 不可思議な数字は0である ''' global answer global max # 入力された値に応じて返答を構成、maxを変更、ニューゲーム if num == 0: return 'There is a mysterious number... It is ' elif num == 1: max = 1 answer = random.randint(1, max) return '1? Really? Then, the maximum of the answer is ' max = num answer = random.randint(1, max) return 'OK. Then, the maximum of the answer is ' @slackbot.bot.respond_to(r'^number\s+set\s+(\d+)') def resp_set(message, digitstr): '''number set (数字) 形式への返答 (数字)部のnumber set判定を行い、返事する Args: ''' # number set 判定 nbs = number_set(int(digitstr)) # 返事する文字列を構成 reply = '{0:s}{1:s}.'.format(nbs, digitstr) message.reply(reply) @slackbot.bot.respond_to(r'^number\s+(\d+)') def resp_number(message, digitstr): '''number (数字) 形式への返答 (数字) 部のnumber判定を行い, 'number (数字) 判定' を返事する Args: message (slackbot.dispatcher.Message): slack message digtstr (str): 数値の文字列 ''' # number 判定 nb = number(int(digitstr)) # 返事する文字列を構成 reply = '{0:s}{1:s}'.format(digitstr, nb) message.reply(reply) @slackbot.bot.respond_to(r'^number\s+giveup') def resp_giveup(message): '''number giveup への返答 正解を表示し、新しい正解を設定、'Start a new game.'を返す Args: ''' global answer global max # 表示する答えを設定、次のゲームの解答を設定 showanswer = answer answer = random.randint(1, max) # 返事する文字列を構成 message.reply('Hahaha! Failed! :ghost: The answer is %d. Start a new game.' % showanswer) message.react('stuck_out_tongue_winking_eye')
[]
hatal175/pytype
pytype/analyze.py
22150dd56c2a11f3d385a1cbb28eed985df31d72
"""Code for checking and inferring types.""" import collections import logging import re import subprocess from typing import Any, Dict, Union from pytype import abstract from pytype import abstract_utils from pytype import convert_structural from pytype import debug from pytype import function from pytype import metrics from pytype import output from pytype import special_builtins from pytype import state as frame_state from pytype import vm from pytype.overlays import typing_overlay from pytype.pytd import builtins from pytype.pytd import escape from pytype.pytd import optimize from pytype.pytd import pytd from pytype.pytd import pytd_utils from pytype.pytd import visitors from pytype.typegraph import cfg log = logging.getLogger(__name__) # Most interpreter functions (including lambdas) need to be analyzed as # stand-alone functions. The exceptions are comprehensions and generators, which # have names like "<listcomp>" and "<genexpr>". _SKIP_FUNCTION_RE = re.compile("<(?!lambda).+>$") CallRecord = collections.namedtuple( "CallRecord", ["node", "function", "signatures", "positional_arguments", "keyword_arguments", "return_value"]) # How deep to follow call chains: INIT_MAXIMUM_DEPTH = 4 # during module loading MAXIMUM_DEPTH = 3 # during non-quick analysis QUICK_CHECK_MAXIMUM_DEPTH = 2 # during quick checking QUICK_INFER_MAXIMUM_DEPTH = 1 # during quick inference class _Initializing: pass class CallTracer(vm.VirtualMachine): """Virtual machine that records all function calls. Attributes: exitpoint: A CFG node representing the program exit. Needs to be set before analyze_types. """ _CONSTRUCTORS = ("__new__", "__init__") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._unknowns = {} self._calls = set() self._method_calls = set() # Used by init_class. self._instance_cache: Dict[Any, Union[_Initializing, cfg.Variable]] = {} # Used by call_init. Can differ from _instance_cache because we also call # __init__ on classes not initialized via init_class. self._initialized_instances = set() self._interpreter_functions = [] self._interpreter_classes = [] self._analyzed_functions = set() self._analyzed_classes = set() self._generated_classes = {} self.exitpoint = None def create_varargs(self, node): value = abstract.Instance(self.convert.tuple_type, self) value.merge_instance_type_parameter( node, abstract_utils.T, self.convert.create_new_unknown(node)) return value.to_variable(node) def create_kwargs(self, node): key_type = self.convert.primitive_class_instances[str].to_variable(node) value_type = self.convert.create_new_unknown(node) kwargs = abstract.Instance(self.convert.dict_type, self) kwargs.merge_instance_type_parameter(node, abstract_utils.K, key_type) kwargs.merge_instance_type_parameter(node, abstract_utils.V, value_type) return kwargs.to_variable(node) def create_method_arguments(self, node, method, use_defaults=False): """Create arguments for the given method. Creates Unknown objects as arguments for the given method. Note that we don't need to take parameter annotations into account as InterpreterFunction.call() will take care of that. Args: node: The current node. method: An abstract.InterpreterFunction. use_defaults: Whether to use parameter defaults for arguments. When True, unknown arguments are created with force=False, as it is fine to use Unsolvable rather than Unknown objects for type-checking defaults. Returns: A tuple of a node and a function.Args object. """ args = [] num_posargs = method.argcount(node) num_posargs_no_default = num_posargs - len(method.defaults) for i in range(num_posargs): default_idx = i - num_posargs_no_default if use_defaults and default_idx >= 0: arg = method.defaults[default_idx] else: arg = self.convert.create_new_unknown(node, force=not use_defaults) args.append(arg) kws = {} for key in method.signature.kwonly_params: if use_defaults and key in method.kw_defaults: kws[key] = method.kw_defaults[key] else: kws[key] = self.convert.create_new_unknown(node, force=not use_defaults) starargs = self.create_varargs(node) if method.has_varargs() else None starstarargs = self.create_kwargs(node) if method.has_kwargs() else None return node, function.Args(posargs=tuple(args), namedargs=kws, starargs=starargs, starstarargs=starstarargs) def call_function_with_args(self, node, val, args): """Call a function. Args: node: The given node. val: A cfg.Binding containing the function. args: A function.Args object. Returns: A tuple of (1) a node and (2) a cfg.Variable of the return value. """ fvar = val.AssignToNewVariable(node) with val.data.record_calls(): new_node, ret = self.call_function_in_frame(node, fvar, *args) return new_node, ret def call_function_in_frame(self, node, var, args, kwargs, starargs, starstarargs): frame = frame_state.SimpleFrame(node=node) self.push_frame(frame) log.info("Analyzing %r", [v.name for v in var.data]) state = frame_state.FrameState.init(node, self) state, ret = self.call_function_with_state( state, var, args, kwargs, starargs, starstarargs) self.pop_frame(frame) return state.node, ret def _maybe_fix_classmethod_cls_arg(self, node, cls, func, args): sig = func.signature if (args.posargs and sig.param_names and (sig.param_names[0] not in sig.annotations)): # fix "cls" parameter return args._replace( posargs=(cls.AssignToNewVariable(node),) + args.posargs[1:]) else: return args def maybe_analyze_method(self, node, val, cls=None): method = val.data fname = val.data.name if isinstance(method, abstract.INTERPRETER_FUNCTION_TYPES): self._analyzed_functions.add(method.get_first_opcode()) if (not self.options.analyze_annotated and (method.signature.has_return_annotation or method.has_overloads) and fname.rsplit(".", 1)[-1] not in self._CONSTRUCTORS): log.info("%r has annotations, not analyzing further.", fname) else: for f in method.iter_signature_functions(): node, args = self.create_method_arguments(node, f) if f.is_classmethod and cls: args = self._maybe_fix_classmethod_cls_arg(node, cls, f, args) node, _ = self.call_function_with_args(node, val, args) return node def _call_with_fake_args(self, node0, funcv): """Attempt to call the given function with made-up arguments.""" # TODO(tsudol): If expand this beyond __init__, need to handle # DictKeyMissing nodes = [] rets = [] for funcb in funcv.bindings: func = funcb.data log.info("Trying %s with fake arguments", func) if isinstance(func, abstract.INTERPRETER_FUNCTION_TYPES): node1, args = self.create_method_arguments(node0, func) # Once the args are generated, try calling the function. # call_function will check fallback_to_unsolvable if a DictKeyMissing or # FailedFunctionCall error is raised when the target function is called. # DictKeyMissing doesn't trigger call_with_fake_args, so that shouldn't # be raised again, and generating fake arguments should avoid any # FailedFunctionCall errors. To prevent an infinite recursion loop, set # fallback_to_unsolvable to False just in case. # This means any additional errors that may be raised will be passed to # the call_function that called this method in the first place. node2, ret = self.call_function(node1, funcb.AssignToNewVariable(), args, fallback_to_unsolvable=False) nodes.append(node2) rets.append(ret) if nodes: ret = self.join_variables(node0, rets) node = self.join_cfg_nodes(nodes) if ret.bindings: return node, ret else: node = node0 log.info("Unable to generate fake arguments for %s", funcv) return node, self.new_unsolvable(node) def analyze_method_var(self, node0, name, var, cls=None): log.info("Analyzing %s", name) node1 = node0.ConnectNew(name) for val in var.bindings: node2 = self.maybe_analyze_method(node1, val, cls) node2.ConnectTo(node0) return node0 def bind_method(self, node, name, methodvar, instance_var): bound = self.program.NewVariable() for m in methodvar.Data(node): if isinstance(m, special_builtins.ClassMethodInstance): m = m.func.data[0] is_cls = True else: is_cls = (m.isinstance_InterpreterFunction() and m.is_classmethod) bound.AddBinding(m.property_get(instance_var, is_cls), [], node) return bound def _instantiate_binding(self, node0, cls, container): """Instantiate a class binding.""" node1, new = cls.data.get_own_new(node0, cls) if not new or ( any(not isinstance(f, abstract.InterpreterFunction) for f in new.data)): # This assumes that any inherited __new__ method defined in a pyi file # returns an instance of the current class. return node0, cls.data.instantiate(node0, container=container) instance = self.program.NewVariable() nodes = [] for b in new.bindings: self._analyzed_functions.add(b.data.get_first_opcode()) node2, args = self.create_method_arguments(node1, b.data) args = self._maybe_fix_classmethod_cls_arg(node0, cls, b.data, args) node3 = node2.ConnectNew() node4, ret = self.call_function_with_args(node3, b, args) instance.PasteVariable(ret) nodes.append(node4) return self.join_cfg_nodes(nodes), instance def _instantiate_var(self, node, clsv, container): """Build an (dummy) instance from a class, for analyzing it.""" n = self.program.NewVariable() for cls in clsv.Bindings(node, strict=False): node, var = self._instantiate_binding(node, cls, container) n.PasteVariable(var) return node, n def _mark_maybe_missing_members(self, values): """Set maybe_missing_members to True on these values and their type params. Args: values: A list of BaseValue objects. On every instance among the values, recursively set maybe_missing_members to True on the instance and its type parameters. """ values = list(values) seen = set() while values: v = values.pop(0) if v not in seen: seen.add(v) if isinstance(v, abstract.SimpleValue): v.maybe_missing_members = True for child in v.instance_type_parameters.values(): values.extend(child.data) def init_class(self, node, cls, container=None, extra_key=None): """Instantiate a class, and also call __init__. Calling __init__ can be expensive, so this method caches its created instances. If you don't need __init__ called, use cls.instantiate instead. Args: node: The current node. cls: The class to instantiate. container: Optionally, a container to pass to the class's instantiate() method, so that type parameters in the container's template are instantiated to TypeParameterInstance. extra_key: Optionally, extra information about the location at which the instantion occurs. By default, this method keys on the current opcode and the class, which sometimes isn't enough to disambiguate callers that shouldn't get back the same cached instance. Returns: A tuple of node and instance variable. """ key = (self.frame and self.frame.current_opcode, extra_key, cls) instance = self._instance_cache.get(key) if not instance or isinstance(instance, _Initializing): clsvar = cls.to_variable(node) node, instance = self._instantiate_var(node, clsvar, container) if key in self._instance_cache: # We've encountered a recursive pattern such as # class A: # def __init__(self, x: "A"): ... # Calling __init__ again would lead to an infinite loop, so # we instead create an incomplete instance that will be # overwritten later. Note that we have to create a new # instance rather than using the one that we're already in # the process of initializing - otherwise, setting # maybe_missing_members to True would cause pytype to ignore # all attribute errors on self in __init__. self._mark_maybe_missing_members(instance.data) else: self._instance_cache[key] = _Initializing() node = self.call_init(node, instance) self._instance_cache[key] = instance return node, instance def _call_method(self, node, binding, method_name): node, method = self.attribute_handler.get_attribute( node, binding.data.get_class(), method_name, binding) if method: bound_method = self.bind_method( node, method_name, method, binding.AssignToNewVariable()) node = self.analyze_method_var(node, method_name, bound_method) return node def _call_init_on_binding(self, node, b): if isinstance(b.data, abstract.SimpleValue): for param in b.data.instance_type_parameters.values(): node = self.call_init(node, param) node = self._call_method(node, b, "__init__") cls = b.data.get_class() if isinstance(cls, abstract.InterpreterClass): # Call any additional initalizers the class has registered. for method in cls.additional_init_methods: node = self._call_method(node, b, method) return node def call_init(self, node, instance): # Call __init__ on each binding. for b in instance.bindings: if b.data in self._initialized_instances: continue self._initialized_instances.add(b.data) node = self._call_init_on_binding(node, b) return node def reinitialize_if_initialized(self, node, instance): if instance in self._initialized_instances: self._call_init_on_binding(node, instance.to_binding(node)) def analyze_class(self, node, val): self._analyzed_classes.add(val.data) node, instance = self.init_class(node, val.data) good_instances = [b for b in instance.bindings if val.data == b.data.cls] if not good_instances: # __new__ returned something that's not an instance of our class. instance = val.data.instantiate(node) node = self.call_init(node, instance) elif len(good_instances) != len(instance.bindings): # __new__ returned some extra possibilities we don't need. instance = self.join_bindings(node, good_instances) for instance_value in instance.data: val.data.register_canonical_instance(instance_value) for name, methodvar in sorted(val.data.members.items()): if name in self._CONSTRUCTORS: continue # We already called this method during initialization. b = self.bind_method(node, name, methodvar, instance) node = self.analyze_method_var(node, name, b, val) return node def analyze_function(self, node0, val): if val.data.is_attribute_of_class: # We'll analyze this function as part of a class. log.info("Analyze functions: Skipping class method %s", val.data.name) else: node1 = node0.ConnectNew(val.data.name) node2 = self.maybe_analyze_method(node1, val) node2.ConnectTo(node0) return node0 def _should_analyze_as_interpreter_function(self, data): # We record analyzed functions by opcode rather than function object. The # two ways of recording are equivalent except for closures, which are # re-generated when the variables they close over change, but we don't want # to re-analyze them. return (isinstance(data, abstract.InterpreterFunction) and not data.is_overload and not data.is_class_builder and data.get_first_opcode() not in self._analyzed_functions and not _SKIP_FUNCTION_RE.search(data.name)) def analyze_toplevel(self, node, defs): for name, var in sorted(defs.items()): # sort, for determinicity if not self._is_typing_member(name, var): for value in var.bindings: if isinstance(value.data, abstract.InterpreterClass): new_node = self.analyze_class(node, value) elif (isinstance(value.data, abstract.INTERPRETER_FUNCTION_TYPES) and not value.data.is_overload): new_node = self.analyze_function(node, value) else: continue if new_node is not node: new_node.ConnectTo(node) # Now go through all functions and classes we haven't analyzed yet. # These are typically hidden under a decorator. # Go through classes first so that the `is_attribute_of_class` will # be set for all functions in class. for c in self._interpreter_classes: for value in c.bindings: if (isinstance(value.data, abstract.InterpreterClass) and value.data not in self._analyzed_classes): node = self.analyze_class(node, value) for f in self._interpreter_functions: for value in f.bindings: if self._should_analyze_as_interpreter_function(value.data): node = self.analyze_function(node, value) return node def analyze(self, node, defs, maximum_depth): assert not self.frame self.maximum_depth = maximum_depth self._analyzing = True node = node.ConnectNew(name="Analyze") return self.analyze_toplevel(node, defs) def trace_unknown(self, name, unknown_binding): self._unknowns[name] = unknown_binding def trace_call(self, node, func, sigs, posargs, namedargs, result): """Add an entry into the call trace. Args: node: The CFG node right after this function call. func: A cfg.Binding of a function that was called. sigs: The signatures that the function might have been called with. posargs: The positional arguments, an iterable over cfg.Value. namedargs: The keyword arguments, a dict mapping str to cfg.Value. result: A Variable of the possible result values. """ log.debug("Logging call to %r with %d args, return %r", func, len(posargs), result) args = tuple(posargs) kwargs = tuple((namedargs or {}).items()) record = CallRecord(node, func, sigs, args, kwargs, result) if isinstance(func.data, abstract.BoundPyTDFunction): self._method_calls.add(record) elif isinstance(func.data, abstract.PyTDFunction): self._calls.add(record) def trace_functiondef(self, f): self._interpreter_functions.append(f) def trace_classdef(self, c): self._interpreter_classes.append(c) def trace_namedtuple(self, nt): # All namedtuple instances with the same name are equal, so it's fine to # overwrite previous instances. self._generated_classes[nt.name] = nt def pytd_classes_for_unknowns(self): classes = [] for name, val in self._unknowns.items(): if val in val.variable.Filter(self.exitpoint, strict=False): classes.append(val.data.to_structural_def(self.exitpoint, name)) return classes def pytd_for_types(self, defs): # If a variable is annotated, we'll always output that type. annotated_names = set() data = [] pytd_convert = self.convert.pytd_convert annots = abstract_utils.get_annotations_dict(defs) for name, t in pytd_convert.annotations_to_instance_types( self.exitpoint, annots): annotated_names.add(name) data.append(pytd.Constant(name, t)) for name, var in defs.items(): if (name in output.TOP_LEVEL_IGNORE or name in annotated_names or self._is_typing_member(name, var)): continue options = var.FilteredData(self.exitpoint, strict=False) if (len(options) > 1 and not all(isinstance(o, abstract.FUNCTION_TYPES) for o in options)): # It's ambiguous whether this is a type, a function or something # else, so encode it as a constant. combined_types = pytd_utils.JoinTypes(t.to_type(self.exitpoint) for t in options) data.append(pytd.Constant(name, combined_types)) elif options: for option in options: try: d = option.to_pytd_def(self.exitpoint, name) # Deep definition except NotImplementedError: d = option.to_type(self.exitpoint) # Type only if isinstance(d, pytd.NothingType): if isinstance(option, abstract.Empty): d = pytd.AnythingType() else: assert isinstance(option, typing_overlay.NoReturn) if isinstance(d, pytd.Type) and not isinstance(d, pytd.TypeParameter): data.append(pytd.Constant(name, d)) else: data.append(d) else: log.error("No visible options for %s", name) data.append(pytd.Constant(name, pytd.AnythingType())) return pytd_utils.WrapTypeDeclUnit("inferred", data) @staticmethod def _call_traces_to_function(call_traces, name_transform=lambda x: x): funcs = collections.defaultdict(pytd_utils.OrderedSet) for node, func, sigs, args, kws, retvar in call_traces: # The lengths may be different in the presence of optional and kw args. arg_names = max((sig.get_positional_names() for sig in sigs), key=len) for i in range(len(arg_names)): if not isinstance(func.data, abstract.BoundFunction) or i > 0: arg_names[i] = function.argname(i) arg_types = (a.data.to_type(node) for a in args) ret = pytd_utils.JoinTypes(t.to_type(node) for t in retvar.data) starargs = None starstarargs = None funcs[func.data.name].add(pytd.Signature( tuple(pytd.Parameter(n, t, False, False, None) for n, t in zip(arg_names, arg_types)) + tuple(pytd.Parameter(name, a.data.to_type(node), False, False, None) for name, a in kws), starargs, starstarargs, ret, exceptions=(), template=())) functions = [] for name, signatures in funcs.items(): functions.append(pytd.Function(name_transform(name), tuple(signatures), pytd.MethodTypes.METHOD)) return functions def _is_typing_member(self, name, var): for module_name in ("typing", "typing_extensions"): if module_name not in self.loaded_overlays: continue module = self.loaded_overlays[module_name].get_module(name) if name in module.members and module.members[name].data == var.data: return True return False def pytd_functions_for_call_traces(self): return self._call_traces_to_function(self._calls, escape.pack_partial) def pytd_classes_for_call_traces(self): class_to_records = collections.defaultdict(list) for call_record in self._method_calls: args = call_record.positional_arguments if not any(isinstance(a.data, abstract.Unknown) for a in args): # We don't need to record call signatures that don't involve # unknowns - there's nothing to solve for. continue cls = args[0].data.get_class() if isinstance(cls, abstract.PyTDClass): class_to_records[cls].append(call_record) classes = [] for cls, call_records in class_to_records.items(): full_name = cls.module + "." + cls.name if cls.module else cls.name classes.append(pytd.Class( name=escape.pack_partial(full_name), metaclass=None, parents=(pytd.NamedType("builtins.object"),), # not used in solver methods=tuple(self._call_traces_to_function(call_records)), constants=(), classes=(), decorators=(), slots=None, template=(), )) return classes def pytd_classes_for_namedtuple_instances(self): return tuple(v.generate_ast() for v in self._generated_classes.values()) def compute_types(self, defs): classes = (tuple(self.pytd_classes_for_unknowns()) + tuple(self.pytd_classes_for_call_traces()) + self.pytd_classes_for_namedtuple_instances()) functions = tuple(self.pytd_functions_for_call_traces()) aliases = () # aliases are instead recorded as constants ty = pytd_utils.Concat( self.pytd_for_types(defs), pytd_utils.CreateModule("unknowns", classes=classes, functions=functions, aliases=aliases)) ty = ty.Visit(optimize.CombineReturnsAndExceptions()) ty = ty.Visit(optimize.PullInMethodClasses()) ty = ty.Visit(visitors.DefaceUnresolved( [ty, self.loader.concat_all()], escape.UNKNOWN)) return ty.Visit(visitors.AdjustTypeParameters()) def _check_return(self, node, actual, formal): if not self.options.report_errors: return True views = abstract_utils.get_views([actual], node) # Check for typevars in the return value first, since bad_matches # expects not to get any. bad = [view for view in views if actual in view and view[actual].data.formal] if not bad: bad = self.matcher.bad_matches(actual, formal, node) if bad: self.errorlog.bad_return_type( self.frames, node, formal, actual, bad) return not bad def check_types(src, filename, errorlog, options, loader, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH, maximum_depth=None, **kwargs): """Verify the Python code.""" tracer = CallTracer(errorlog=errorlog, options=options, generate_unknowns=False, loader=loader, **kwargs) loc, defs = tracer.run_program(src, filename, init_maximum_depth) snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("analyze:check_types:tracer") if deep: if maximum_depth is None: maximum_depth = ( QUICK_CHECK_MAXIMUM_DEPTH if options.quick else MAXIMUM_DEPTH) tracer.analyze(loc, defs, maximum_depth=maximum_depth) snapshotter.take_snapshot("analyze:check_types:post") _maybe_output_debug(options, tracer.program) def infer_types(src, errorlog, options, loader, filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH, show_library_calls=False, maximum_depth=None, tracer_vm=None, **kwargs): """Given Python source return its types. Args: src: A string containing Python source code. errorlog: Where error messages go. Instance of errors.ErrorLog. options: config.Options object loader: A load_pytd.Loader instance to load PYI information. filename: Filename of the program we're parsing. deep: If True, analyze all functions, even the ones not called by the main execution flow. init_maximum_depth: Depth of analysis during module loading. show_library_calls: If True, call traces are kept in the output. maximum_depth: Depth of the analysis. Default: unlimited. tracer_vm: An instance of CallTracer, in case the caller wants to instantiate and retain the vm used for type inference. **kwargs: Additional parameters to pass to vm.VirtualMachine Returns: A tuple of (ast: TypeDeclUnit, builtins: TypeDeclUnit) Raises: AssertionError: In case of a bad parameter combination. """ # If the caller has passed in a vm, use that. if tracer_vm: assert isinstance(tracer_vm, CallTracer) tracer = tracer_vm else: tracer = CallTracer(errorlog=errorlog, options=options, generate_unknowns=options.protocols, store_all_calls=not deep, loader=loader, **kwargs) loc, defs = tracer.run_program(src, filename, init_maximum_depth) log.info("===Done running definitions and module-level code===") snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("analyze:infer_types:tracer") if deep: if maximum_depth is None: if not options.quick: maximum_depth = MAXIMUM_DEPTH elif options.analyze_annotated: # Since there's no point in analyzing annotated functions for inference, # the presence of this option means that the user wants checking, too. maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH else: maximum_depth = QUICK_INFER_MAXIMUM_DEPTH tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth) else: tracer.exitpoint = loc snapshotter.take_snapshot("analyze:infer_types:post") ast = tracer.compute_types(defs) ast = tracer.loader.resolve_ast(ast) if tracer.has_unknown_wildcard_imports or any( a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS): if "__getattr__" not in ast: ast = pytd_utils.Concat( ast, builtins.GetDefaultAst(options.python_version)) # If merged with other if statement, triggers a ValueError: Unresolved class # when attempts to load from the protocols file if options.protocols: protocols_pytd = tracer.loader.import_name("protocols") else: protocols_pytd = None builtins_pytd = tracer.loader.concat_all() # Insert type parameters, where appropriate ast = ast.Visit(visitors.CreateTypeParametersForSignatures()) if options.protocols: log.info("=========== PyTD to solve =============\n%s", pytd_utils.Print(ast)) ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd) elif not show_library_calls: log.info("Solving is turned off. Discarding call traces.") # Rename remaining "~unknown" to "?" ast = ast.Visit(visitors.RemoveUnknownClasses()) # Remove "~list" etc.: ast = convert_structural.extract_local(ast) _maybe_output_debug(options, tracer.program) return ast, builtins_pytd def _maybe_output_debug(options, program): """Maybe emit debugging output.""" if options.output_cfg or options.output_typegraph: dot = debug.program_to_dot(program, set([]), bool(options.output_cfg)) svg_file = options.output_cfg or options.output_typegraph proc = subprocess.Popen(["/usr/bin/dot", "-T", "svg", "-o", svg_file], stdin=subprocess.PIPE, universal_newlines=True) (_, stderr) = proc.communicate(dot) if stderr: log.info("Failed to create %s: %s", svg_file, stderr) if options.output_debug: text = debug.program_to_text(program) if options.output_debug == "-": log.info("=========== Program Dump =============\n%s", text) else: with options.open_function(options.output_debug, "w") as fi: fi.write(text)
[((726, 753), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (743, 753), False, 'import logging\n'), ((976, 1005), 're.compile', 're.compile', (['"""<(?!lambda).+>$"""'], {}), "('<(?!lambda).+>$')\n", (986, 1005), False, 'import re\n'), ((1021, 1158), 'collections.namedtuple', 'collections.namedtuple', (['"""CallRecord"""', "['node', 'function', 'signatures', 'positional_arguments',\n 'keyword_arguments', 'return_value']"], {}), "('CallRecord', ['node', 'function', 'signatures',\n 'positional_arguments', 'keyword_arguments', 'return_value'])\n", (1043, 1158), False, 'import collections\n'), ((26132, 26178), 'pytype.metrics.get_metric', 'metrics.get_metric', (['"""memory"""', 'metrics.Snapshot'], {}), "('memory', metrics.Snapshot)\n", (26150, 26178), False, 'from pytype import metrics\n'), ((28221, 28267), 'pytype.metrics.get_metric', 'metrics.get_metric', (['"""memory"""', 'metrics.Snapshot'], {}), "('memory', metrics.Snapshot)\n", (28239, 28267), False, 'from pytype import metrics\n'), ((2395, 2443), 'pytype.abstract.Instance', 'abstract.Instance', (['self.convert.tuple_type', 'self'], {}), '(self.convert.tuple_type, self)\n', (2412, 2443), False, 'from pytype import abstract\n'), ((2770, 2817), 'pytype.abstract.Instance', 'abstract.Instance', (['self.convert.dict_type', 'self'], {}), '(self.convert.dict_type, self)\n', (2787, 2817), False, 'from pytype import abstract\n'), ((5308, 5342), 'pytype.state.SimpleFrame', 'frame_state.SimpleFrame', ([], {'node': 'node'}), '(node=node)\n', (5331, 5342), True, 'from pytype import state as frame_state\n'), ((5439, 5478), 'pytype.state.FrameState.init', 'frame_state.FrameState.init', (['node', 'self'], {}), '(node, self)\n', (5466, 5478), True, 'from pytype import state as frame_state\n'), ((19753, 19794), 'pytype.abstract_utils.get_annotations_dict', 'abstract_utils.get_annotations_dict', (['defs'], {}), '(defs)\n', (19788, 19794), False, 'from pytype import abstract_utils\n'), ((21394, 21439), 'pytype.pytd.pytd_utils.WrapTypeDeclUnit', 'pytd_utils.WrapTypeDeclUnit', (['"""inferred"""', 'data'], {}), "('inferred', data)\n", (21421, 21439), False, 'from pytype.pytd import pytd_utils\n'), ((21542, 21588), 'collections.defaultdict', 'collections.defaultdict', (['pytd_utils.OrderedSet'], {}), '(pytd_utils.OrderedSet)\n', (21565, 21588), False, 'import collections\n'), ((23242, 23271), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (23265, 23271), False, 'import collections\n'), ((25286, 25326), 'pytype.abstract_utils.get_views', 'abstract_utils.get_views', (['[actual]', 'node'], {}), '([actual], node)\n', (25310, 25326), False, 'from pytype import abstract_utils\n'), ((29572, 29616), 'pytype.pytd.visitors.CreateTypeParametersForSignatures', 'visitors.CreateTypeParametersForSignatures', ([], {}), '()\n', (29614, 29616), False, 'from pytype.pytd import visitors\n'), ((29748, 29815), 'pytype.convert_structural.convert_pytd', 'convert_structural.convert_pytd', (['ast', 'builtins_pytd', 'protocols_pytd'], {}), '(ast, builtins_pytd, protocols_pytd)\n', (29779, 29815), False, 'from pytype import convert_structural\n'), ((30437, 30553), 'subprocess.Popen', 'subprocess.Popen', (["['/usr/bin/dot', '-T', 'svg', '-o', svg_file]"], {'stdin': 'subprocess.PIPE', 'universal_newlines': '(True)'}), "(['/usr/bin/dot', '-T', 'svg', '-o', svg_file], stdin=\n subprocess.PIPE, universal_newlines=True)\n", (30453, 30553), False, 'import subprocess\n'), ((30730, 30760), 'pytype.debug.program_to_text', 'debug.program_to_text', (['program'], {}), '(program)\n', (30751, 30760), False, 'from pytype import debug\n'), ((24780, 24874), 'pytype.pytd.pytd_utils.CreateModule', 'pytd_utils.CreateModule', (['"""unknowns"""'], {'classes': 'classes', 'functions': 'functions', 'aliases': 'aliases'}), "('unknowns', classes=classes, functions=functions,\n aliases=aliases)\n", (24803, 24874), False, 'from pytype.pytd import pytd_utils\n'), ((24922, 24960), 'pytype.pytd.optimize.CombineReturnsAndExceptions', 'optimize.CombineReturnsAndExceptions', ([], {}), '()\n', (24958, 24960), False, 'from pytype.pytd import optimize\n'), ((24980, 25010), 'pytype.pytd.optimize.PullInMethodClasses', 'optimize.PullInMethodClasses', ([], {}), '()\n', (25008, 25010), False, 'from pytype.pytd import optimize\n'), ((25134, 25165), 'pytype.pytd.visitors.AdjustTypeParameters', 'visitors.AdjustTypeParameters', ([], {}), '()\n', (25163, 25165), False, 'from pytype.pytd import visitors\n'), ((29715, 29736), 'pytype.pytd.pytd_utils.Print', 'pytd_utils.Print', (['ast'], {}), '(ast)\n', (29731, 29736), False, 'from pytype.pytd import pytd_utils\n'), ((30041, 30078), 'pytype.convert_structural.extract_local', 'convert_structural.extract_local', (['ast'], {}), '(ast)\n', (30073, 30078), False, 'from pytype import convert_structural\n'), ((19941, 19963), 'pytype.pytd.pytd.Constant', 'pytd.Constant', (['name', 't'], {}), '(name, t)\n', (19954, 19963), False, 'from pytype.pytd import pytd\n'), ((29168, 29214), 'pytype.pytd.builtins.GetDefaultAst', 'builtins.GetDefaultAst', (['options.python_version'], {}), '(options.python_version)\n', (29190, 29214), False, 'from pytype.pytd import builtins\n'), ((29971, 30002), 'pytype.pytd.visitors.RemoveUnknownClasses', 'visitors.RemoveUnknownClasses', ([], {}), '()\n', (30000, 30002), False, 'from pytype.pytd import visitors\n'), ((20579, 20614), 'pytype.pytd.pytd.Constant', 'pytd.Constant', (['name', 'combined_types'], {}), '(name, combined_types)\n', (20592, 20614), False, 'from pytype.pytd import pytd\n'), ((21938, 21957), 'pytype.function.argname', 'function.argname', (['i'], {}), '(i)\n', (21954, 21957), False, 'from pytype import function\n'), ((23895, 23925), 'pytype.pytd.escape.pack_partial', 'escape.pack_partial', (['full_name'], {}), '(full_name)\n', (23914, 23925), False, 'from pytype.pytd import escape\n'), ((21361, 21380), 'pytype.pytd.pytd.AnythingType', 'pytd.AnythingType', ([], {}), '()\n', (21378, 21380), False, 'from pytype.pytd import pytd\n'), ((23972, 24005), 'pytype.pytd.pytd.NamedType', 'pytd.NamedType', (['"""builtins.object"""'], {}), "('builtins.object')\n", (23986, 24005), False, 'from pytype.pytd import pytd\n'), ((21189, 21211), 'pytype.pytd.pytd.Constant', 'pytd.Constant', (['name', 'd'], {}), '(name, d)\n', (21202, 21211), False, 'from pytype.pytd import pytd\n'), ((22196, 22236), 'pytype.pytd.pytd.Parameter', 'pytd.Parameter', (['n', 't', '(False)', '(False)', 'None'], {}), '(n, t, False, False, None)\n', (22210, 22236), False, 'from pytype.pytd import pytd\n'), ((20977, 20996), 'pytype.pytd.pytd.AnythingType', 'pytd.AnythingType', ([], {}), '()\n', (20994, 20996), False, 'from pytype.pytd import pytd\n')]
milescsmith/cDNA_Cupcake
src/cupcake/post_isoseq_cluster/demux_by_barcode_groups.py
776d841c69fc6d8b3dce95bb9f076546bc0429c0
#!/usr/bin/env python __author__ = "[email protected]" """ Given a pooled input GFF + demux CSV file, write out per-{barcode group} GFFs If input fasta/fastq is given, optionally also output per-{barcode group} FASTA/FASTQ """ import re from collections import defaultdict from csv import DictReader from typing import Optional import typer from Bio import SeqIO import cupcake.sequence.GFF as GFF from cupcake import version_callback from cupcake import cupcake_logger as logger rex_pbid = re.compile(r"(PB.\d+.\d+)(|\S+)") app = typer.Typer(name="cupcake.post_isoseq_cluster.demux_by_barcode_groups") def get_type_fafq(in_filename): in_filename = in_filename.upper() if in_filename.endswith(".FA") or in_filename.endswith("FASTA"): return "fasta" elif in_filename.endswith(".FQ") or in_filename.endswith("FASTQ"): return "fastq" else: raise Exception( f"Unrecognized file suffix .{in_filename[in_filename.find('.'):]}! Must end with .fasta or .fastq!" ) def regroup_gff( pooled_gff, demux_count_file, output_prefix, out_group_dict, in_fafq=None ): """ :param pooled_sam: SAM file :param demux_count_file: comma-delimited per-barcode count file :param output_prefix: output prefix for GFF :param out_group_dict: dict of barcode name --> group to be long in (ex: {'EM1':'EM', 'EM2':'EM'}) :param in_fafq: optional fasta/fastq that was input to SAM """ if in_fafq is not None: type_fafq = get_type_fafq(in_fafq) in_tissue = defaultdict( lambda: set() ) # pbid --> list of tissue it is in (EM, END, R) for r in DictReader(open(demux_count_file), delimiter=","): for k, v in r.items(): if k != "id" and int(v) > 0: in_tissue[r["id"]].add(k) # in_tissue = dict(in_tissue) handles = {} handles_fafq = {} for g in out_group_dict.values(): handles[g] = open(f"{output_prefix}_{g}_only.gff", "w") if in_fafq is not None: handles_fafq[g] = open(f"{output_prefix}_{g}_only.{type_fafq}", "w") if in_fafq is not None: fafq_dict = SeqIO.to_dict(SeqIO.parse(open(in_fafq), type_fafq)) fafq_dict_keys = list(fafq_dict.keys()) for k in fafq_dict_keys: m = rex_pbid.match(k) if m is not None: fafq_dict[m.group(1)] = fafq_dict[k] reader = GFF.collapseGFFReader(pooled_gff) for r in reader: groups_to_write_in = set() pbid = r.seqid if pbid not in in_tissue: logger.info( f"WARNING: {pbid} does not belong to any group indicated by outgroup_dict" ) for tissue in in_tissue[pbid]: groups_to_write_in.add(out_group_dict[tissue]) for g in groups_to_write_in: GFF.write_collapseGFF_format(handles[g], r) if in_fafq is not None: SeqIO.write(fafq_dict[pbid], handles_fafq[g], type_fafq) @app.command(name="") def main( pooled_gff: str = typer.Argument(..., help="Pooled GFF file"), demux_count_file: str = typer.Argument(..., help="Demux count file"), output_prefix: str = typer.Argument(..., help="Output prefix for GFF outputs"), outgroup_dict: str = typer.Argument(..., help="Tuples indicating barcode grouping"), pooled_fastx: Optional[str] = typer.Option( None, help="Pooled FASTA/FASTQ (optional, if given, will also output demux fa/fq)", ), version: bool = typer.Option( None, "--version", callback=version_callback, is_eager=True, help="Prints the version of the SQANTI3 package.", ), ) -> None: tmp = eval(outgroup_dict) out_group_dict = dict([tmp]) if len(tmp) == 1 else dict(tmp) regroup_gff( pooled_gff, demux_count_file, output_prefix, out_group_dict, pooled_fastx, ) if __name__ == "__main__": typer.run(main)
[((493, 528), 're.compile', 're.compile', (['"""(PB.\\\\d+.\\\\d+)(|\\\\S+)"""'], {}), "('(PB.\\\\d+.\\\\d+)(|\\\\S+)')\n", (503, 528), False, 'import re\n'), ((535, 606), 'typer.Typer', 'typer.Typer', ([], {'name': '"""cupcake.post_isoseq_cluster.demux_by_barcode_groups"""'}), "(name='cupcake.post_isoseq_cluster.demux_by_barcode_groups')\n", (546, 606), False, 'import typer\n'), ((2412, 2445), 'cupcake.sequence.GFF.collapseGFFReader', 'GFF.collapseGFFReader', (['pooled_gff'], {}), '(pooled_gff)\n', (2433, 2445), True, 'import cupcake.sequence.GFF as GFF\n'), ((3046, 3089), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Pooled GFF file"""'}), "(..., help='Pooled GFF file')\n", (3060, 3089), False, 'import typer\n'), ((3119, 3163), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Demux count file"""'}), "(..., help='Demux count file')\n", (3133, 3163), False, 'import typer\n'), ((3190, 3247), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Output prefix for GFF outputs"""'}), "(..., help='Output prefix for GFF outputs')\n", (3204, 3247), False, 'import typer\n'), ((3274, 3336), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Tuples indicating barcode grouping"""'}), "(..., help='Tuples indicating barcode grouping')\n", (3288, 3336), False, 'import typer\n'), ((3372, 3473), 'typer.Option', 'typer.Option', (['None'], {'help': '"""Pooled FASTA/FASTQ (optional, if given, will also output demux fa/fq)"""'}), "(None, help=\n 'Pooled FASTA/FASTQ (optional, if given, will also output demux fa/fq)')\n", (3384, 3473), False, 'import typer\n'), ((3513, 3641), 'typer.Option', 'typer.Option', (['None', '"""--version"""'], {'callback': 'version_callback', 'is_eager': '(True)', 'help': '"""Prints the version of the SQANTI3 package."""'}), "(None, '--version', callback=version_callback, is_eager=True,\n help='Prints the version of the SQANTI3 package.')\n", (3525, 3641), False, 'import typer\n'), ((3963, 3978), 'typer.run', 'typer.run', (['main'], {}), '(main)\n', (3972, 3978), False, 'import typer\n'), ((2571, 2663), 'cupcake.cupcake_logger.info', 'logger.info', (['f"""WARNING: {pbid} does not belong to any group indicated by outgroup_dict"""'], {}), "(\n f'WARNING: {pbid} does not belong to any group indicated by outgroup_dict')\n", (2582, 2663), True, 'from cupcake import cupcake_logger as logger\n'), ((2837, 2880), 'cupcake.sequence.GFF.write_collapseGFF_format', 'GFF.write_collapseGFF_format', (['handles[g]', 'r'], {}), '(handles[g], r)\n', (2865, 2880), True, 'import cupcake.sequence.GFF as GFF\n'), ((2933, 2989), 'Bio.SeqIO.write', 'SeqIO.write', (['fafq_dict[pbid]', 'handles_fafq[g]', 'type_fafq'], {}), '(fafq_dict[pbid], handles_fafq[g], type_fafq)\n', (2944, 2989), False, 'from Bio import SeqIO\n')]
paulhfu/3dcv-students
vll/data/circle_dataset.py
f8d42c985cf33903170733b0c8f6a2199099553c
import random import numpy as np import math from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa from skimage.io import imsave from skimage.util import random_noise maxSlope = 10 # restrict the maximum slope of generated lines for stability minLength = 20 # restrict the minimum length of line segments class ICircleDataset: ''' Generator of circle segment images. Images will have 1 random circle each, filled with noise and distractor lines. Class also offers functionality for drawing line parameters, hypotheses and point predictions. ''' def __init__(self, imgW = 64, imgH = 64, margin = -5, bg_clr = 0.5): ''' Constructor. imgW -- image width (default 64) imgH -- image height (default 64) margin -- lines segments are sampled within this margin, negative value means that a line segment can start or end outside the image (default -5) bg_clr -- background intensity (default 0.5) ''' self.imgW = imgW self.imgH = imgH self.margin = margin self.bg_clr = bg_clr def draw_circle(self, data, cX, cY, r, clr, alpha=1.0): ''' Draw a circle with the given color and opacity. data -- image to draw to cX -- x value of circle center cY -- y value of circle center r -- radius of circle clr -- line color, triple of values alpha -- opacity (default 1.0) ''' cY = int(cY * self.imgH) cX = int(cX * self.imgW) r = int(r * self.imgW) rr, cc, val = circle_perimeter_aa(cY, cX, r) set_color(data, (rr, cc), clr, val) def draw_hyps(self, labels, scores, data=None): ''' Draw a set of line hypothesis for a batch of images. labels -- line parameters, array shape (NxMx2) where N is the number of images in the batch M is the number of hypotheses per image 2 is the number of line parameters (intercept, slope) scores -- hypotheses scores, array shape (NxM), see above, higher score will be drawn with higher opacity data -- batch of images to draw to, if empty a new batch wil be created according to the shape of labels ''' n = labels.shape[0] # number of images m = labels.shape[1] # number of hypotheses if data is None: # create new batch of images data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32) data.fill(self.bg_clr) clr = (0, 0, 1) for i in range (0, n): for j in range (0, m): lY1 = int(labels[i, j, 0] * self.imgH) lY2 = int(labels[i, j, 1] * self.imgW + labels[i, j, 0] * self.imgH) self.draw_line(data[i], 0, lY1, self.imgW, lY2, clr, scores[i, j]) return data def draw_models(self, labels, data=None, correct=None): ''' Draw circles for a batch of images. labels -- circle parameters, array shape (Nx3) where N is the number of images in the batch 3 is the number of circles parameters (center x, center y, radius) data -- batch of images to draw to, if empty a new batch wil be created according to the shape of labels and circles will be green, circles will be blue otherwise correct -- array of shape (N) indicating whether a circle estimate is correct ''' n = labels.shape[0] if data is None: data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32) data.fill(self.bg_clr) clr = (0, 1, 0) else: clr = (0, 0, 1) for i in range (0, n): self.draw_circle(data[i], labels[i, 0], labels[i, 1], labels[i, 2], clr) if correct is not None: # draw border green if estiamte is correct, red otherwise if correct[i]: borderclr = (0, 1, 0) else: borderclr = (1, 0, 0) set_color(data[i], line(0, 0, 0, self.imgW-1), borderclr) set_color(data[i], line(0, 0, self.imgH-1, 0), borderclr) set_color(data[i], line(self.imgH-1, 0, self.imgH-1, self.imgW-1), borderclr) set_color(data[i], line(0, self.imgW-1, self.imgH-1, self.imgW-1), borderclr) return data def draw_points(self, points, data, inliers=None): ''' Draw 2D points for a batch of images. points -- 2D points, array shape (Nx2xM) where N is the number of images in the batch 2 is the number of point dimensions (x, y) M is the number of points data -- batch of images to draw to inliers -- soft inlier score for each point, if given and score < 0.5 point will be drawn green, red otherwise ''' n = points.shape[0] # number of images m = points.shape[2] # number of points for i in range (0, n): for j in range(0, m): clr = (0.2, 0.2, 0.2) # draw predicted points as dark circles if inliers is not None and inliers[i, j] > 0.5: clr = (0.7, 0.7, 0.7) # draw inliers as light circles r = int(points[i, 0, j] * self.imgH) c = int(points[i, 1, j] * self.imgW) rr, cc = circle(r, c, 2) set_color(data[i], (rr, cc), clr) return data def samples(self, n): ''' Create new input images of random line segments and distractors along with ground truth parameters. n -- number of images to create ''' data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32) data.fill(self.bg_clr) labels = np.zeros((n, 3), dtype=np.float32) for i in range (0, n): data[i] = random_noise(data[i], mode='speckle') return data, labels
[((1439, 1469), 'skimage.draw.circle_perimeter_aa', 'circle_perimeter_aa', (['cY', 'cX', 'r'], {}), '(cY, cX, r)\n', (1458, 1469), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((1472, 1507), 'skimage.draw.set_color', 'set_color', (['data', '(rr, cc)', 'clr', 'val'], {}), '(data, (rr, cc), clr, val)\n', (1481, 1507), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((4940, 4996), 'numpy.zeros', 'np.zeros', (['(n, self.imgH, self.imgW, 3)'], {'dtype': 'np.float32'}), '((n, self.imgH, self.imgW, 3), dtype=np.float32)\n', (4948, 4996), True, 'import numpy as np\n'), ((5033, 5067), 'numpy.zeros', 'np.zeros', (['(n, 3)'], {'dtype': 'np.float32'}), '((n, 3), dtype=np.float32)\n', (5041, 5067), True, 'import numpy as np\n'), ((2190, 2246), 'numpy.zeros', 'np.zeros', (['(n, self.imgH, self.imgW, 3)'], {'dtype': 'np.float32'}), '((n, self.imgH, self.imgW, 3), dtype=np.float32)\n', (2198, 2246), True, 'import numpy as np\n'), ((3128, 3184), 'numpy.zeros', 'np.zeros', (['(n, self.imgH, self.imgW, 3)'], {'dtype': 'np.float32'}), '((n, self.imgH, self.imgW, 3), dtype=np.float32)\n', (3136, 3184), True, 'import numpy as np\n'), ((5107, 5144), 'skimage.util.random_noise', 'random_noise', (['data[i]'], {'mode': '"""speckle"""'}), "(data[i], mode='speckle')\n", (5119, 5144), False, 'from skimage.util import random_noise\n'), ((4688, 4703), 'skimage.draw.circle', 'circle', (['r', 'c', '(2)'], {}), '(r, c, 2)\n', (4694, 4703), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((4708, 4741), 'skimage.draw.set_color', 'set_color', (['data[i]', '(rr, cc)', 'clr'], {}), '(data[i], (rr, cc), clr)\n', (4717, 4741), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3556, 3584), 'skimage.draw.line', 'line', (['(0)', '(0)', '(0)', '(self.imgW - 1)'], {}), '(0, 0, 0, self.imgW - 1)\n', (3560, 3584), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3621, 3649), 'skimage.draw.line', 'line', (['(0)', '(0)', '(self.imgH - 1)', '(0)'], {}), '(0, 0, self.imgH - 1, 0)\n', (3625, 3649), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3686, 3738), 'skimage.draw.line', 'line', (['(self.imgH - 1)', '(0)', '(self.imgH - 1)', '(self.imgW - 1)'], {}), '(self.imgH - 1, 0, self.imgH - 1, self.imgW - 1)\n', (3690, 3738), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3771, 3823), 'skimage.draw.line', 'line', (['(0)', '(self.imgW - 1)', '(self.imgH - 1)', '(self.imgW - 1)'], {}), '(0, self.imgW - 1, self.imgH - 1, self.imgW - 1)\n', (3775, 3823), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n')]
youngmg1995/NES-Music-Maker
VAE/reduced_model/nesm_generator.py
aeda10a541cfd439cfa46c45e63411e0d98e41c1
# -*- coding: utf-8 -*- """ Created on Wed Apr 1 17:14:19 2020 @author: Mitchell nesm_generator.py ~~~~~~~~~~~~~~~~~ This file serves as a script for using our pre-trained VAE model to generate brand new NES music soundtracks. NOTE - using the reduced model we only generate the first melodic voice for each track rather than each of the four voices present in an NESM track. To do so we first reconstruct our model using the file VAE class defined in `VAE.py` and the same parameters used in `model_training`. Then we use functions from the file `generation_utils` to have our trained model create entirely new and original NES music. """ # Imports #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # NOTE - nesmdb folder manually added to environment libraries from dataset_utils import load_training from VAE import VAE from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs,\ plot_track, filter_tracks import nesmdb from nesmdb.vgm.vgm_to_wav import save_vgmwav import tensorflow as tf import numpy as np import os, json ### Load Mappings #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Parameters for shape of dataset (note these are also used for model def.) measures = 8 measure_len = 96 # load data training_foldername = '../../nesmdb24_seprsco/train/' train_save_filename = 'transformed_dataset.json' dataset , labels2int_map , int2labels_map = \ load_training(training_foldername, train_save_filename, measures = measures, measure_len = measure_len) ### Reinitiate Model #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ### Model Parameters latent_dim = 124 input_dim = len(int2labels_map) - 1 dropout = .1 maxnorm = None vae_b1 , vae_b2 = .02 , .1 print('Reinitiating VAE Model') # Build Model model = VAE(latent_dim, input_dim, measures, measure_len, dropout, maxnorm, vae_b1 , vae_b2) # Reload Saved Weights checkpoint_dir = './training_checkpoints' checkpoint_prefix = os.path.join(checkpoint_dir, "model_ckpt") model.load_weights(checkpoint_prefix) model.build(tf.TensorShape([None, measures, measure_len, ])) # Print Summary of Model model.summary() ### Sample Latent Variable Distributions #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Here we use SVD to more effectively sample from the orthogonal components # of our latent space # Parameters for sampling num_songs = 10 print('Generating Latent Samples to Generate {} New Tracks'.format(num_songs)) # Grab distributions of dataset over latent space # Have to run in batches due to size of the dataset batch_size = 300 latent_vecs = get_latent_vecs(model, dataset, batch_size) # Sample from normal distribution rand_vecs = np.random.normal(0.0, 1.0, (num_songs, latent_dim)) # perform SVD plot_eigenvalues = True sample_vecs = latent_SVD(latent_vecs, rand_vecs, plot_eigenvalues) ### Generate New Tracks #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Create new seprsco tracks using our model and the random samples # Seprsco files can later be converted to valid NES music format # Parameters for track generation (specifically filtering) p_min = .5 print('Generating New Tracks from Latent Samples') # Decode samples using VAE decoded_tracks = model.decoder(sample_vecs) # Plot first decoded track print("Example Model Generated Track") plot_track(decoded_tracks[0]) # Filter Track decoded_tracks = filter_tracks(decoded_tracks, p_min) # Plot first filtered track print("Example Filtered Track") plot_track(decoded_tracks[0]) # Convert tracks to seprsco format print('Converting Model Output to Seprsco') seprsco_tracks = generate_seprsco(decoded_tracks, int2labels_map) ### Convert to WAV #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Convert seprsco tracks to WAV files so we can listen!!! print('Converting Seprsco to WAV Audio') wav_tracks = [] for track in seprsco_tracks: wav = nesmdb.convert.seprsco_to_wav(track) wav_tracks.append(wav) ### Save WAV Files #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Save our wav tracks to appropriate files (be sure not to overwrite existing) # Also save latent variables so we can reproduce songs we like # Save WAV tracks save_wav = False if save_wav: print('Saving Generated WAV Audio Tracks') wav_folder = 'model_gen_files/' for i in range(len(wav_tracks)): wav_file = wav_folder+'VAE_NESM_{}.wav'.format(i) save_vgmwav(wav_file, wav_tracks[i]) # Save Latent Variables save_latent_var = False if save_latent_var: print('Saving Latent Variables for Generated Tracks') latent_filename = os.path.join(wav_folder, "latent_variables.json") with open(latent_filename, 'w') as f: json.dump({ 'VAE_NESM_{}.wav'.format(i): sample_vecs[i].tolist() for i in range(sample_vecs.shape[0]) }, f) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #----------------------------------END FILE------------------------------------ #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
[((1458, 1561), 'dataset_utils.load_training', 'load_training', (['training_foldername', 'train_save_filename'], {'measures': 'measures', 'measure_len': 'measure_len'}), '(training_foldername, train_save_filename, measures=measures,\n measure_len=measure_len)\n', (1471, 1561), False, 'from dataset_utils import load_training\n'), ((1868, 1955), 'VAE.VAE', 'VAE', (['latent_dim', 'input_dim', 'measures', 'measure_len', 'dropout', 'maxnorm', 'vae_b1', 'vae_b2'], {}), '(latent_dim, input_dim, measures, measure_len, dropout, maxnorm, vae_b1,\n vae_b2)\n', (1871, 1955), False, 'from VAE import VAE\n'), ((2052, 2094), 'os.path.join', 'os.path.join', (['checkpoint_dir', '"""model_ckpt"""'], {}), "(checkpoint_dir, 'model_ckpt')\n", (2064, 2094), False, 'import os, json\n'), ((2713, 2756), 'generation_utils.get_latent_vecs', 'get_latent_vecs', (['model', 'dataset', 'batch_size'], {}), '(model, dataset, batch_size)\n', (2728, 2756), False, 'from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs, plot_track, filter_tracks\n'), ((2804, 2855), 'numpy.random.normal', 'np.random.normal', (['(0.0)', '(1.0)', '(num_songs, latent_dim)'], {}), '(0.0, 1.0, (num_songs, latent_dim))\n', (2820, 2855), True, 'import numpy as np\n'), ((2909, 2961), 'generation_utils.latent_SVD', 'latent_SVD', (['latent_vecs', 'rand_vecs', 'plot_eigenvalues'], {}), '(latent_vecs, rand_vecs, plot_eigenvalues)\n', (2919, 2961), False, 'from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs, plot_track, filter_tracks\n'), ((3462, 3491), 'generation_utils.plot_track', 'plot_track', (['decoded_tracks[0]'], {}), '(decoded_tracks[0])\n', (3472, 3491), False, 'from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs, plot_track, filter_tracks\n'), ((3525, 3561), 'generation_utils.filter_tracks', 'filter_tracks', (['decoded_tracks', 'p_min'], {}), '(decoded_tracks, p_min)\n', (3538, 3561), False, 'from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs, plot_track, filter_tracks\n'), ((3623, 3652), 'generation_utils.plot_track', 'plot_track', (['decoded_tracks[0]'], {}), '(decoded_tracks[0])\n', (3633, 3652), False, 'from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs, plot_track, filter_tracks\n'), ((3750, 3798), 'generation_utils.generate_seprsco', 'generate_seprsco', (['decoded_tracks', 'int2labels_map'], {}), '(decoded_tracks, int2labels_map)\n', (3766, 3798), False, 'from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs, plot_track, filter_tracks\n'), ((2145, 2190), 'tensorflow.TensorShape', 'tf.TensorShape', (['[None, measures, measure_len]'], {}), '([None, measures, measure_len])\n', (2159, 2190), True, 'import tensorflow as tf\n'), ((4055, 4091), 'nesmdb.convert.seprsco_to_wav', 'nesmdb.convert.seprsco_to_wav', (['track'], {}), '(track)\n', (4084, 4091), False, 'import nesmdb\n'), ((4783, 4832), 'os.path.join', 'os.path.join', (['wav_folder', '"""latent_variables.json"""'], {}), "(wav_folder, 'latent_variables.json')\n", (4795, 4832), False, 'import os, json\n'), ((4597, 4633), 'nesmdb.vgm.vgm_to_wav.save_vgmwav', 'save_vgmwav', (['wav_file', 'wav_tracks[i]'], {}), '(wav_file, wav_tracks[i])\n', (4608, 4633), False, 'from nesmdb.vgm.vgm_to_wav import save_vgmwav\n')]
anttin/anlogger
anlogger/logger.py
dfa7be7ba2f4651507b188f986c10bab9bd7460e
import logging import logging.handlers import os class Logger(object): def __init__(self, name, default_loglevel='INFO', fmt=None, syslog=None): self.name = name self.syslog = syslog self.fmt = fmt if fmt is not None else "%(asctime)-15s %(name)s %(levelname)s %(message)s" if 'LOGLEVEL' in os.environ: self.level = os.environ['LOGLEVEL'].upper() else: self.level = default_loglevel.upper() logging.basicConfig(format=self.fmt) self.logger = logging.getLogger(self.name) self.logger.setLevel(self.level) if self.syslog is not None and self.syslog not in (False, 0): if isinstance(self.syslog, (list, tuple)): _addr = tuple(self.syslog) elif isinstance(self.syslog, str): _addr = self.syslog else: _addr = "/dev/log" if os.path.exists("/dev/log") else None if _addr is not None: handler = logging.handlers.SysLogHandler(address=_addr) self.logger.addHandler(handler) def get(self): return self.logger
[((435, 471), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'self.fmt'}), '(format=self.fmt)\n', (454, 471), False, 'import logging\n'), ((490, 518), 'logging.getLogger', 'logging.getLogger', (['self.name'], {}), '(self.name)\n', (507, 518), False, 'import logging\n'), ((902, 947), 'logging.handlers.SysLogHandler', 'logging.handlers.SysLogHandler', ([], {'address': '_addr'}), '(address=_addr)\n', (932, 947), False, 'import logging\n'), ((818, 844), 'os.path.exists', 'os.path.exists', (['"""/dev/log"""'], {}), "('/dev/log')\n", (832, 844), False, 'import os\n')]
saurabhcommand/Hello-world
Python/hello_world-theopaid.py
647bad9da901a52d455f05ecc37c6823c22dc77e
#Author Theodosis Paidakis print("Hello World") hello_list = ["Hello World"] print(hello_list[0]) for i in hello_list: print(i)
[]
uliang/NaturalLanguageQueryingSystem
question_answering/stubs.py
d18b4ae429362ba311d6f26debbcfe391b810458
from collections import namedtuple from unittest.mock import MagicMock _fake_ext = namedtuple('_', ['qtype', 'kb_ident']) class FakeDoc: def __init__(self, text, qtype, kb_ident): self._ = _fake_ext(qtype, kb_ident) self.text = text def __str__(self): return f"<[MOCKED NLP]{self.text}>"
[((86, 124), 'collections.namedtuple', 'namedtuple', (['"""_"""', "['qtype', 'kb_ident']"], {}), "('_', ['qtype', 'kb_ident'])\n", (96, 124), False, 'from collections import namedtuple\n')]
devas123/Bitcoin-Trader-RL
lib/env/trade/BaseTradeStrategy.py
097cb0ba7428b2c4f997bdb0425a6153c23f9c83
from abc import ABCMeta, abstractmethod from typing import Tuple, Callable class BaseTradeStrategy(object, metaclass=ABCMeta): @abstractmethod def __init__(self, commissionPercent: float, maxSlippagePercent: float, base_precision: int, asset_precision: int, min_cost_limit: float, min_amount_limit: float): pass @abstractmethod def trade(self, action: int, n_discrete_actions: int, balance: float, asset_held: float, current_price: Callable[[str], float]) -> Tuple[float, float, float, float]: raise NotImplementedError()
[]
jsjang93/joony
4day/Book04_1.py
62f7a325094c887212b894932263bf84500e0f03
# Book04_1.py class Book: category = '소설' # Class 멤버 b1 = Book(); print(b1.category) b2 = b1; print(b2.category) print(Book.category) Book.category = '수필' print(b2.category); print(b1.category) ; print(Book.category) b2.category = 'IT' print(b2.category); print(b1.category) ; print(Book.category)
[]
odidev/virgil-crypto-c
wrappers/python/virgil_crypto_lib/foundation/kdf1.py
3d5d5cb19fdcf81eab08cdc63647f040117ecbd8
# Copyright (C) 2015-2021 Virgil Security, Inc. # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # (1) Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # (2) Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # (3) Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Lead Maintainer: Virgil Security Inc. <[email protected]> from ctypes import * from ._c_bridge import VscfKdf1 from ._c_bridge import VscfImplTag from ._c_bridge import VscfStatus from virgil_crypto_lib.common._c_bridge import Data from virgil_crypto_lib.common._c_bridge import Buffer from .alg import Alg from .kdf import Kdf class Kdf1(Alg, Kdf): """Virgil Security implementation of the KDF1 (ISO-18033-2) algorithm.""" def __init__(self): """Create underlying C context.""" self._lib_vscf_kdf1 = VscfKdf1() self._c_impl = None self._ctx = None self.ctx = self._lib_vscf_kdf1.vscf_kdf1_new() def __delete__(self, instance): """Destroy underlying C context.""" self._lib_vscf_kdf1.vscf_kdf1_delete(self.ctx) def set_hash(self, hash): self._lib_vscf_kdf1.vscf_kdf1_use_hash(self.ctx, hash.c_impl) def alg_id(self): """Provide algorithm identificator.""" result = self._lib_vscf_kdf1.vscf_kdf1_alg_id(self.ctx) return result def produce_alg_info(self): """Produce object with algorithm information and configuration parameters.""" result = self._lib_vscf_kdf1.vscf_kdf1_produce_alg_info(self.ctx) instance = VscfImplTag.get_type(result)[0].take_c_ctx(cast(result, POINTER(VscfImplTag.get_type(result)[1]))) return instance def restore_alg_info(self, alg_info): """Restore algorithm configuration from the given object.""" status = self._lib_vscf_kdf1.vscf_kdf1_restore_alg_info(self.ctx, alg_info.c_impl) VscfStatus.handle_status(status) def derive(self, data, key_len): """Derive key of the requested length from the given data.""" d_data = Data(data) key = Buffer(key_len) self._lib_vscf_kdf1.vscf_kdf1_derive(self.ctx, d_data.data, key_len, key.c_buffer) return key.get_bytes() @classmethod def take_c_ctx(cls, c_ctx): inst = cls.__new__(cls) inst._lib_vscf_kdf1 = VscfKdf1() inst.ctx = c_ctx return inst @classmethod def use_c_ctx(cls, c_ctx): inst = cls.__new__(cls) inst._lib_vscf_kdf1 = VscfKdf1() inst.ctx = inst._lib_vscf_kdf1.vscf_kdf1_shallow_copy(c_ctx) return inst @property def c_impl(self): return self._c_impl @property def ctx(self): return self._ctx @ctx.setter def ctx(self, value): self._ctx = self._lib_vscf_kdf1.vscf_kdf1_shallow_copy(value) self._c_impl = self._lib_vscf_kdf1.vscf_kdf1_impl(self.ctx)
[((3299, 3309), 'virgil_crypto_lib.common._c_bridge.Data', 'Data', (['data'], {}), '(data)\n', (3303, 3309), False, 'from virgil_crypto_lib.common._c_bridge import Data\n'), ((3324, 3339), 'virgil_crypto_lib.common._c_bridge.Buffer', 'Buffer', (['key_len'], {}), '(key_len)\n', (3330, 3339), False, 'from virgil_crypto_lib.common._c_bridge import Buffer\n')]
leixiayang/django-python
mysite/zoo/tests.py
8faa84867af5645d3d3d8e67fe8020be4dc68551
#!/usr/bin/env python # encoding: utf-8 from django.test import TestCase from zoo import models class AnimalTestCase(TestCase): """Test animals' sound """ def test_dog_says(self): """test dog says woof or not """ dog = models.Dog(name='Snoopy') self.assertEqual(dog.says(), 'woof') def test_cat_says(self): """test cat says meow of not """ cat = models.Cat(name='Garfield') self.assertEqual(cat.says(), 'meow')
[((259, 284), 'zoo.models.Dog', 'models.Dog', ([], {'name': '"""Snoopy"""'}), "(name='Snoopy')\n", (269, 284), False, 'from zoo import models\n'), ((424, 451), 'zoo.models.Cat', 'models.Cat', ([], {'name': '"""Garfield"""'}), "(name='Garfield')\n", (434, 451), False, 'from zoo import models\n')]
gjaosdij/PythonProject
EX025.py
ae27990efa93462b632f165d13c08c7fd93beb38
print('Digite seu nome completo: ') nome = input().strip().upper() print('Seu nome tem "Silva"?') print('SILVA' in nome)
[]
linxi1158/iMIX
configs/_base_/datasets/stvqa_dataset.py
af87a17275f02c94932bb2e29f132a84db812002
dataset_type = 'STVQADATASET' data_root = '/home/datasets/mix_data/iMIX/' feature_path = 'data/datasets/stvqa/defaults/features/' ocr_feature_path = 'data/datasets/stvqa/defaults/ocr_features/' annotation_path = 'data/datasets/stvqa/defaults/annotations/' vocab_path = 'data/datasets/stvqa/defaults/extras/vocabs/' train_datasets = ['train'] test_datasets = ['val'] reader_train_cfg = dict( type='STVQAREADER', card='default', mix_features=dict( train=data_root + feature_path + 'detectron.lmdb', val=data_root + feature_path + 'detectron.lmdb', test=data_root + feature_path + 'detectron.lmdb', ), mix_ocr_features=dict( train=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb', val=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb', test=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb', ), mix_annotations=dict( train=data_root + annotation_path + 'imdb_subtrain.npy', val=data_root + annotation_path + 'imdb_subval.npy', test=data_root + annotation_path + 'imdb_test_task3.npy', ), datasets=train_datasets) reader_test_cfg = dict( type='STVQAREADER', card='default', mix_features=dict( train=data_root + feature_path + 'detectron.lmdb', val=data_root + feature_path + 'detectron.lmdb', test=data_root + feature_path + 'detectron.lmdb', ), mix_ocr_features=dict( train=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb', val=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb', test=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb', ), mix_annotations=dict( train=data_root + annotation_path + 'imdb_subtrain.npy', val=data_root + annotation_path + 'imdb_subval.npy', test=data_root + annotation_path + 'imdb_test_task3.npy', ), datasets=train_datasets) info_cpler_cfg = dict( type='STVQAInfoCpler', glove_weights=dict( glove6b50d=data_root + 'glove/glove.6B.50d.txt.pt', glove6b100d=data_root + 'glove/glove.6B.100d.txt.pt', glove6b200d=data_root + 'glove/glove.6B.200d.txt.pt', glove6b300d=data_root + 'glove/glove.6B.300d.txt.pt', ), fasttext_weights=dict( wiki300d1m=data_root + 'fasttext/wiki-news-300d-1M.vec', wiki300d1msub=data_root + 'fasttext/wiki-news-300d-1M-subword.vec', wiki_bin=data_root + 'fasttext/wiki.en.bin', ), tokenizer='/home/datasets/VQA/bert/' + 'bert-base-uncased-vocab.txt', mix_vocab=dict( answers_st_5k=data_root + vocab_path + 'fixed_answer_vocab_stvqa_5k.txt', vocabulary_100k=data_root + vocab_path + 'vocabulary_100k.txt', ), max_seg_lenth=20, max_ocr_lenth=10, word_mask_ratio=0.0, vocab_name='vocabulary_100k', vocab_answer_name='answers_st_5k', glove_name='glove6b300d', fasttext_name='wiki_bin', if_bert=True, ) train_data = dict( samples_per_gpu=16, workers_per_gpu=1, data=dict(type=dataset_type, reader=reader_train_cfg, info_cpler=info_cpler_cfg, limit_nums=800)) test_data = dict( samples_per_gpu=16, workers_per_gpu=1, data=dict(type=dataset_type, reader=reader_test_cfg, info_cpler=info_cpler_cfg), )
[]
2martens/rpo-website
src/rpocore/migrations/0007_auto_20160927_1517.py
14990920722c537810aecd2b97f5af6bbdd1b5ec
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2016-09-27 13:17 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import mezzanine.core.fields class Migration(migrations.Migration): dependencies = [ ('rpocore', '0006_auto_20160921_1924'), ] operations = [ migrations.CreateModel( name='SupportingOrganization', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('_order', mezzanine.core.fields.OrderField(null=True, verbose_name='Order')), ('name', models.CharField(max_length=100, verbose_name='Name')), ('logo', models.ImageField(upload_to='', verbose_name='Logo of organization')), ('url', models.CharField(max_length=200, verbose_name='URL')), ], options={ 'verbose_name_plural': 'Supporting organizations', 'ordering': ('_order',), 'verbose_name': 'Supporting organization', }, ), migrations.AlterField( model_name='carouselitem', name='homepage', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='carousel_items', to='rpocore.HomepagePage', verbose_name='Homepage'), ), migrations.AlterField( model_name='homepagepage', name='process', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='rpocore.Process', verbose_name='Process'), ), migrations.AlterField( model_name='notablesupporter', name='supporter_page', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notable_supporters', to='rpocore.SupporterPage', verbose_name='Supporter page'), ), migrations.AlterField( model_name='phase', name='process', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rpocore.Process', verbose_name='Process'), ), migrations.AlterField( model_name='statementpage', name='formal_statements', field=models.ManyToManyField(blank=True, to='rpocore.FormalStatement', verbose_name='Formal statements'), ), migrations.AlterField( model_name='statementpage', name='informal_statements', field=models.ManyToManyField(blank=True, to='rpocore.InformalStatement', verbose_name='Informal statements'), ), migrations.AlterField( model_name='supporter', name='support_group', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='rpocore.SupportGroup', verbose_name='Support group'), ), ]
[((1261, 1411), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""carousel_items"""', 'to': '"""rpocore.HomepagePage"""', 'verbose_name': '"""Homepage"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='carousel_items', to='rpocore.HomepagePage', verbose_name='Homepage')\n", (1278, 1411), False, 'from django.db import migrations, models\n'), ((1535, 1659), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""rpocore.Process"""', 'verbose_name': '"""Process"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='rpocore.Process', verbose_name='Process')\n", (1552, 1659), False, 'from django.db import migrations, models\n'), ((1795, 1970), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""notable_supporters"""', 'to': '"""rpocore.SupporterPage"""', 'verbose_name': '"""Supporter page"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='notable_supporters', to='rpocore.SupporterPage',\n verbose_name='Supporter page')\n", (1812, 1970), False, 'from django.db import migrations, models\n'), ((2084, 2197), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""rpocore.Process"""', 'verbose_name': '"""Process"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'rpocore.Process', verbose_name='Process')\n", (2101, 2197), False, 'from django.db import migrations, models\n'), ((2332, 2434), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'to': '"""rpocore.FormalStatement"""', 'verbose_name': '"""Formal statements"""'}), "(blank=True, to='rpocore.FormalStatement',\n verbose_name='Formal statements')\n", (2354, 2434), False, 'from django.db import migrations, models\n'), ((2572, 2678), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'to': '"""rpocore.InformalStatement"""', 'verbose_name': '"""Informal statements"""'}), "(blank=True, to='rpocore.InformalStatement',\n verbose_name='Informal statements')\n", (2594, 2678), False, 'from django.db import migrations, models\n'), ((2806, 2940), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""rpocore.SupportGroup"""', 'verbose_name': '"""Support group"""'}), "(null=True, on_delete=django.db.models.deletion.PROTECT,\n to='rpocore.SupportGroup', verbose_name='Support group')\n", (2823, 2940), False, 'from django.db import migrations, models\n'), ((471, 564), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (487, 564), False, 'from django.db import migrations, models\n'), ((683, 736), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""Name"""'}), "(max_length=100, verbose_name='Name')\n", (699, 736), False, 'from django.db import migrations, models\n'), ((764, 832), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '""""""', 'verbose_name': '"""Logo of organization"""'}), "(upload_to='', verbose_name='Logo of organization')\n", (781, 832), False, 'from django.db import migrations, models\n'), ((859, 911), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""URL"""'}), "(max_length=200, verbose_name='URL')\n", (875, 911), False, 'from django.db import migrations, models\n')]
manno-xx/FutureLearnRobotBuggy
code/Line.py
d5f0172597ad88d6a8b883b0b16d425a76edfb0b
#LineSensor test from gpiozero import LineSensor from time import sleep from signal import pause def lineDetected(): print('line detected') def noLineDetected(): print('no line detected') sensor = LineSensor(14) sensor.when_line = lineDetected sensor.when_no_line = noLineDetected pause() sensor.close()
[((210, 224), 'gpiozero.LineSensor', 'LineSensor', (['(14)'], {}), '(14)\n', (220, 224), False, 'from gpiozero import LineSensor\n'), ((296, 303), 'signal.pause', 'pause', ([], {}), '()\n', (301, 303), False, 'from signal import pause\n')]
chmousset/litex-boards
litex_boards/platforms/myminieye_runber.py
c081177d77f37a4ea6cff150d42a69bd6f0abbc2
# # This file is part of LiteX-Boards. # # Copyright (c) 2021 Gwenhael Goavec-Merou <[email protected]> # SPDX-License-Identifier: BSD-2-Clause from migen import * from litex.build.generic_platform import * from litex.build.gowin.platform import GowinPlatform from litex.build.openfpgaloader import OpenFPGALoader # IOs ---------------------------------------------------------------------------------------------- _io = [ # Clk / Rst ("clk12", 0, Pins("4"), IOStandard("LVCMOS33")), # Leds ("user_led", 0, Pins("23"), IOStandard("LVCMOS33")), ("user_led", 1, Pins("24"), IOStandard("LVCMOS33")), ("user_led", 2, Pins("25"), IOStandard("LVCMOS33")), ("user_led", 3, Pins("26"), IOStandard("LVCMOS33")), ("user_led", 4, Pins("27"), IOStandard("LVCMOS33")), ("user_led", 5, Pins("28"), IOStandard("LVCMOS33")), ("user_led", 6, Pins("29"), IOStandard("LVCMOS33")), ("user_led", 7, Pins("30"), IOStandard("LVCMOS33")), # RGB led, active-low ("rgb_led", 0, Subsignal("r", Pins("112")), Subsignal("g", Pins("114")), Subsignal("b", Pins("113")), IOStandard("LVCMOS33"), ), ("rgb_led", 1, Subsignal("r", Pins("106")), Subsignal("g", Pins("111")), Subsignal("b", Pins("110")), IOStandard("LVCMOS33"), ), ("rgb_led", 2, Subsignal("r", Pins("101")), Subsignal("g", Pins("104")), Subsignal("b", Pins("102")), IOStandard("LVCMOS33"), ), ("rgb_led", 3, Subsignal("r", Pins("98")), Subsignal("g", Pins("100")), Subsignal("b", Pins("99")), IOStandard("LVCMOS33"), ), # Switches ("user_sw", 0, Pins("75"), IOStandard("LVCMOS33")), ("user_sw", 1, Pins("76"), IOStandard("LVCMOS33")), ("user_sw", 2, Pins("78"), IOStandard("LVCMOS33")), ("user_sw", 3, Pins("79"), IOStandard("LVCMOS33")), ("user_sw", 4, Pins("80"), IOStandard("LVCMOS33")), ("user_sw", 5, Pins("81"), IOStandard("LVCMOS33")), ("user_sw", 6, Pins("82"), IOStandard("LVCMOS33")), ("user_sw", 7, Pins("83"), IOStandard("LVCMOS33")), # Buttons. ("user_btn", 0, Pins("58"), IOStandard("LVCMOS33")), ("user_btn", 1, Pins("59"), IOStandard("LVCMOS33")), ("user_btn", 2, Pins("60"), IOStandard("LVCMOS33")), ("user_btn", 3, Pins("61"), IOStandard("LVCMOS33")), ("user_btn", 4, Pins("62"), IOStandard("LVCMOS33")), ("user_btn", 5, Pins("63"), IOStandard("LVCMOS33")), ("user_btn", 6, Pins("64"), IOStandard("LVCMOS33")), ("user_btn", 7, Pins("65"), IOStandard("LVCMOS33")), # Serial. # FT232H has only one interface -> use (arbitrary) two pins from J2 to # connect an external USB<->serial adapter ("serial", 0, Subsignal("tx", Pins("116")), # J2.17 Subsignal("rx", Pins("115")), # J2.18 IOStandard("LVCMOS33") ), # Seven Segment ("seven_seg_dig", 0, Pins("137"), IOStandard("LVCMOS33")), ("seven_seg_dig", 1, Pins("140"), IOStandard("LVCMOS33")), ("seven_seg_dig", 2, Pins("141"), IOStandard("LVCMOS33")), ("seven_seg_dig", 3, Pins("7"), IOStandard("LVCMOS33")), ("seven_seg", 0, Pins("138 142 9 11 12 139 8 10"), IOStandard("LVCMOS33")), ] # Connectors --------------------------------------------------------------------------------------- _connectors = [ ["J1", "- 38 39 40 41 42 43 44 66 67 68 69 70 71 72 96 95 94 93 -"], ["J2", "- 136 135 134 133 132 131 130 129 128 123 122 121 120 119 118 117 116 115 -"], ] # Platform ----------------------------------------------------------------------------------------- class Platform(GowinPlatform): default_clk_name = "clk12" default_clk_period = 1e9/12e6 def __init__(self, toolchain="gowin"): GowinPlatform.__init__(self, "GW1N-UV4LQ144C6/I5", _io, _connectors, toolchain=toolchain, devicename="GW1N-4") self.toolchain.options["use_mspi_as_gpio"] = 1 def create_programmer(self): return OpenFPGALoader("runber") def do_finalize(self, fragment): GowinPlatform.do_finalize(self, fragment) self.add_period_constraint(self.lookup_request("clk12", loose=True), 1e9/12e6)
[((3824, 3938), 'litex.build.gowin.platform.GowinPlatform.__init__', 'GowinPlatform.__init__', (['self', '"""GW1N-UV4LQ144C6/I5"""', '_io', '_connectors'], {'toolchain': 'toolchain', 'devicename': '"""GW1N-4"""'}), "(self, 'GW1N-UV4LQ144C6/I5', _io, _connectors,\n toolchain=toolchain, devicename='GW1N-4')\n", (3846, 3938), False, 'from litex.build.gowin.platform import GowinPlatform\n'), ((4039, 4063), 'litex.build.openfpgaloader.OpenFPGALoader', 'OpenFPGALoader', (['"""runber"""'], {}), "('runber')\n", (4053, 4063), False, 'from litex.build.openfpgaloader import OpenFPGALoader\n'), ((4110, 4151), 'litex.build.gowin.platform.GowinPlatform.do_finalize', 'GowinPlatform.do_finalize', (['self', 'fragment'], {}), '(self, fragment)\n', (4135, 4151), False, 'from litex.build.gowin.platform import GowinPlatform\n')]
zhangkunliang/BayesOptimization
combo/search/discrete/policy.py
6d78c9e9f96239b0dbb85650a0d878e9410158ec
import numpy as np import copy import combo.misc import cPickle as pickle from results import history from .. import utility from ...variable import variable from ..call_simulator import call_simulator from ... import predictor from ...gp import predictor as gp_predictor from ...blm import predictor as blm_predictor import combo.search.score MAX_SEACH = int(20000) class policy: def __init__(self, test_X, config=None): self.predictor = None self.training = variable() self.test = self._set_test(test_X) self.actions = np.arange(0, self.test.X.shape[0]) self.history = history() self.config = self._set_config(config) def set_seed(self, seed): self.seed = seed np.random.seed(self.seed) def delete_actions(self, index, actions=None): actions = self._set_unchosed_actions(actions) return np.delete(actions, index) def write(self, action, t, X=None): if X is None: X = self.test.X[action, :] Z = self.test.Z[action, :] if self.test.Z is not None else None else: Z = self.predictor.get_basis(X) \ if self.predictor is not None else None self.new_data = variable(X, t, Z) self.history.write(t, action) self.training.add(X=X, t=t, Z=Z) def random_search(self, max_num_probes, num_search_each_probe=1, simulator=None, is_disp=True): N = int(num_search_each_probe) if int(max_num_probes) * N > len(self.actions): raise ValueError('max_num_probes * num_search_each_probe must \ be smaller than the length of candidates') if is_disp: utility.show_interactive_mode(simulator, self.history) for n in xrange(0, max_num_probes): if is_disp and N > 1: utility.show_start_message_multi_search(self.history.num_runs) action = self.get_random_action(N) if simulator is None: return action t, X = call_simulator(simulator, action) self.write(action, t, X) if is_disp: utility.show_search_results(self.history, N) return copy.deepcopy(self.history) def bayes_search(self, training=None, max_num_probes=None, num_search_each_probe=1, predictor=None, is_disp=True, simulator=None, score='TS', interval=0, num_rand_basis=0): if max_num_probes is None: max_num_probes = 1 simulator = None is_rand_expans = False if num_rand_basis == 0 else True self.training = self._set_training(training) if predictor is None: self.predictor = self._init_predictor(is_rand_expans) else: self.predictor = predictor N = int(num_search_each_probe) for n in xrange(max_num_probes): if utility.is_learning(n, interval): self.predictor.fit(self.training, num_rand_basis) self.test.Z = self.predictor.get_basis(self.test.X) self.training.Z = self.predictor.get_basis(self.training.X) self.predictor.prepare(self.training) else: try: self.predictor.update(self.training, self.new_data) except: self.predictor.prepare(self.training) if num_search_each_probe != 1: utility.show_start_message_multi_search(self.history.num_runs, score) K = self.config.search.multi_probe_num_sampling alpha = self.config.search.alpha action = self.get_actions(score, N, K, alpha) if simulator is None: return action t, X = call_simulator(simulator, action) self.write(action, t, X) if is_disp: utility.show_search_results(self.history, N) return copy.deepcopy(self.history) def get_score(self, mode, predictor=None, training=None, alpha=1): self._set_training(training) self._set_predictor(predictor) actions = self.actions test = self.test.get_subset(actions) if mode == 'EI': f = combo.search.score.EI(predictor, training, test) elif mode == 'PI': f = combo.search.score.PI(predictor, training, test) elif mode == 'TS': f = combo.search.score.TS(predictor, training, test, alpha) else: raise NotImplementedError('mode must be EI, PI or TS.') return f def get_marginal_score(self, mode, chosed_actions, N, alpha): f = np.zeros((N, len(self.actions))) new_test = self.test.get_subset(chosed_actions) virtual_t \ = self.predictor.get_predict_samples(self.training, new_test, N) for n in xrange(N): predictor = copy.deepcopy(self.predictor) train = copy.deepcopy(self.training) virtual_train = new_test virtual_train.t = virtual_t[n, :] if virtual_train.Z is None: train.add(virtual_train.X, virtual_train.t) else: train.add(virtual_train.X, virtual_train.t, virtual_train.Z) try: predictor.update(train, virtual_train) except: predictor.prepare(train) f[n, :] = self.get_score(mode, predictor, train) return f def get_actions(self, mode, N, K, alpha): f = self.get_score(mode, self.predictor, self.training, alpha) temp = np.argmax(f) action = self.actions[temp] self.actions = self.delete_actions(temp) chosed_actions = np.zeros(N, dtype=int) chosed_actions[0] = action for n in xrange(1, N): f = self.get_marginal_score(mode, chosed_actions[0:n], K, alpha) temp = np.argmax(np.mean(f, 0)) chosed_actions[n] = self.actions[temp] self.actions = self.delete_actions(temp) return chosed_actions def get_random_action(self, N): random_index = np.random.permutation(xrange(self.actions.shape[0])) index = random_index[0:N] action = self.actions[index] self.actions = self.delete_actions(index) return action def load(self, file_history, file_training=None, file_predictor=None): self.history.load(file_history) if file_training is None: N = self.history.total_num_search X = self.test.X[self.history.chosed_actions[0:N], :] t = self.history.fx[0:N] self.training = variable(X=X, t=t) else: self.training = variable() self.training.load(file_training) if file_predictor is not None: with open(file_predictor) as f: self.predictor = pickle.load(f) def export_predictor(self): return self.predictor def export_training(self): return self.training def export_history(self): return self.history def _set_predictor(self, predictor=None): if predictor is None: predictor = self.predictor return predictor def _init_predictor(self, is_rand_expans, predictor=None): self.predictor = self._set_predictor(predictor) if self.predictor is None: if is_rand_expans: self.predictor = blm_predictor(self.config) else: self.predictor = gp_predictor(self.config) return self.predictor def _set_training(self, training=None): if training is None: training = self.training return training def _set_unchosed_actions(self, actions=None): if actions is None: actions = self.actions return actions def _set_test(self, test_X): if isinstance(test_X, np.ndarray): test = variable(X=test_X) elif isinstance(test_X, variable): test = test_X else: raise TypeError('The type of test_X must \ take ndarray or combo.variable') return test def _set_config(self, config=None): if config is None: config = combo.misc.set_config() return config
[((559, 593), 'numpy.arange', 'np.arange', (['(0)', 'self.test.X.shape[0]'], {}), '(0, self.test.X.shape[0])\n', (568, 593), True, 'import numpy as np\n'), ((617, 626), 'results.history', 'history', ([], {}), '()\n', (624, 626), False, 'from results import history\n'), ((738, 763), 'numpy.random.seed', 'np.random.seed', (['self.seed'], {}), '(self.seed)\n', (752, 763), True, 'import numpy as np\n'), ((885, 910), 'numpy.delete', 'np.delete', (['actions', 'index'], {}), '(actions, index)\n', (894, 910), True, 'import numpy as np\n'), ((2236, 2263), 'copy.deepcopy', 'copy.deepcopy', (['self.history'], {}), '(self.history)\n', (2249, 2263), False, 'import copy\n'), ((4089, 4116), 'copy.deepcopy', 'copy.deepcopy', (['self.history'], {}), '(self.history)\n', (4102, 4116), False, 'import copy\n'), ((5744, 5756), 'numpy.argmax', 'np.argmax', (['f'], {}), '(f)\n', (5753, 5756), True, 'import numpy as np\n'), ((5868, 5890), 'numpy.zeros', 'np.zeros', (['N'], {'dtype': 'int'}), '(N, dtype=int)\n', (5876, 5890), True, 'import numpy as np\n'), ((5040, 5069), 'copy.deepcopy', 'copy.deepcopy', (['self.predictor'], {}), '(self.predictor)\n', (5053, 5069), False, 'import copy\n'), ((5090, 5118), 'copy.deepcopy', 'copy.deepcopy', (['self.training'], {}), '(self.training)\n', (5103, 5118), False, 'import copy\n'), ((6064, 6077), 'numpy.mean', 'np.mean', (['f', '(0)'], {}), '(f, 0)\n', (6071, 6077), True, 'import numpy as np\n'), ((7032, 7046), 'cPickle.load', 'pickle.load', (['f'], {}), '(f)\n', (7043, 7046), True, 'import cPickle as pickle\n')]
dequeb/asmbattle
venv/lib/python3.9/site-packages/py2app/bootstrap/disable_linecache.py
27e8b209de5787836e288a2f2f9b7644ce07563e
def _disable_linecache(): import linecache def fake_getline(*args, **kwargs): return "" linecache.orig_getline = linecache.getline linecache.getline = fake_getline _disable_linecache()
[]
s403o/tw_bot
source.py
fd26ebc86d4c7d1be1ae654f26f5ca74c2566a03
import requests from bs4 import BeautifulSoup as bs import os #source url = '' # the source you want the bot take images from #down page page = requests.get(url) html = bs(page.text, 'html.parser') #locate image_loc = html.findAll('img') #create folder for located imgs if not os.path.exists('imgs'): os.makedirs('imgs') #open the new folder os.chdir('imgs') image0 = 0 #img name #get images for image in image_loc: try: url = image['src'] source = requests.get(url) if source.status_code == 200: with open('img-' + str(image0) + '.jpg', 'png') as mkimg: mkimg.write(requests.get(url).content) mkimg.close() image0 += 1 except: pass
[((147, 164), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (159, 164), False, 'import requests\n'), ((172, 200), 'bs4.BeautifulSoup', 'bs', (['page.text', '"""html.parser"""'], {}), "(page.text, 'html.parser')\n", (174, 200), True, 'from bs4 import BeautifulSoup as bs\n'), ((350, 366), 'os.chdir', 'os.chdir', (['"""imgs"""'], {}), "('imgs')\n", (358, 366), False, 'import os\n'), ((282, 304), 'os.path.exists', 'os.path.exists', (['"""imgs"""'], {}), "('imgs')\n", (296, 304), False, 'import os\n'), ((308, 327), 'os.makedirs', 'os.makedirs', (['"""imgs"""'], {}), "('imgs')\n", (319, 327), False, 'import os\n'), ((469, 486), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (481, 486), False, 'import requests\n'), ((605, 622), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (617, 622), False, 'import requests\n')]
QIAOANGeo/BZB_ydzw
lib/appController.py
8c11e9797cca31d1fab26be7eb0a71666cfac15f
''' 1、启动appium服务 subproccess 配置文件 1.1、校验服务是否启动 1.2、杀掉上一次的服务 2、启动driver ''' from lib.tools import Tool import subprocess from lib.path import SYSTEMPATH, ERRORPATH import time from appium import webdriver import queue # 声明一个python队列 driver_queue = queue.Queue() class Controller(object): def __init__(self): # 获取配置信息 self.config = Tool().get_config self.tester = self.config.get('tester') self.device_type = self.config.get('device_type') # 获取到所有的手机信息 self.devices = self.config.get('devices') self.device = self.devices.get(self.device_type)[0] # port 用于校验服务是否启动 self.port = self.device.get('port') self.name = self.device.get('name') def kill_server(self): mac = '''ps -ef|grep appium|grep -v grep|grep %s|awk '{print "kill -9 " $2}'|sh''' % self.port win = 'taskkill /F /IM node.exe /t' subprocess.getoutput(win) def start_server(self): self.kill_server() command = 'appium -a {ip} -p {port} -U {deviceName}'.format(ip=self.device.get('ip'), port=self.device.get('port'), deviceName=self.device.get('deviceName')) print('command : %s' % command) subprocess.Popen(command, stdout=open(SYSTEMPATH, 'a+'), stderr=open(ERRORPATH, 'a+'), shell=True) def test_server(self): # mac = 'ps -ef|grep appium|grep -v grep|grep %s' % self.port win = 'netstat -ano | findstr %s' % self.port time.sleep(3) while True: data = subprocess.getoutput(win) if data: time.sleep(10) print('%s 端口启动成功。' % self.port) break else: print('%s 端口启动失败。5秒后重试。' % self.port) time.sleep(5) return True def start_driver(self): url = 'http://{ip}:{port}/wd/hub'.format(ip=self.device.get('ip'), port=self.port) # 合并手机信息和包名入口 self.device.update(self.tester) driver = webdriver.Remote(url, self.device) driver_queue.put(driver) if __name__ == '__main__': controller = Controller() controller.start_server() if controller.test_server(): controller.start_driver()
[((257, 270), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (268, 270), False, 'import queue\n'), ((917, 942), 'subprocess.getoutput', 'subprocess.getoutput', (['win'], {}), '(win)\n', (937, 942), False, 'import subprocess\n'), ((1608, 1621), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1618, 1621), False, 'import time\n'), ((2180, 2214), 'appium.webdriver.Remote', 'webdriver.Remote', (['url', 'self.device'], {}), '(url, self.device)\n', (2196, 2214), False, 'from appium import webdriver\n'), ((364, 370), 'lib.tools.Tool', 'Tool', ([], {}), '()\n', (368, 370), False, 'from lib.tools import Tool\n'), ((1661, 1686), 'subprocess.getoutput', 'subprocess.getoutput', (['win'], {}), '(win)\n', (1681, 1686), False, 'import subprocess\n'), ((1724, 1738), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (1734, 1738), False, 'import time\n'), ((1897, 1910), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1907, 1910), False, 'import time\n')]
passionzhan/LeetCode
pondSizes.py
c4d33b64b9da15ca7a9b0d41e645d86a697694fe
# -*- encoding: utf-8 -*- ''' @project : LeetCode @File : pondSizes.py @Contact : [email protected] @Desc : 你有一个用于表示一片土地的整数矩阵land,该矩阵中每个点的值代表对应地点的海拔高度。若值为0则表示水域。由垂直、水平或对角连接的水域为池塘。池塘的大小是指相连接的水域的个数。编写一个方法来计算矩阵中所有池塘的大小,返回值需要从小到大排序。 示例: 输入: [ [0,2,1,0], [0,1,0,1], [1,1,0,1], [0,1,0,1] ] 输出: [1,2,4] 提示: 0 < len(land) <= 1000 0 < len(land[i]) <= 1000 来源:力扣(LeetCode) 链接:https://leetcode-cn.com/problems/pond-sizes-lcci @Modify Time @Author @Version @Desciption ------------ ------- -------- ----------- 2020-03-07 zhan 1.0 None ''' from typing import List from collections import deque class Solution: def pondSizes(self, land: List[List[int]]) -> List[int]: def neighbors(iR,iC,flag): ans = set() if (iR-1,iC-1) in flag: ans.add((iR-1,iC-1)) if (iR-1,iC) in flag: ans.add((iR-1,iC)) if (iR-1,iC+1) in flag: ans.add((iR-1,iC+1)) if (iR,iC-1) in flag: ans.add((iR,iC-1)) if (iR, iC + 1) in flag: ans.add((iR, iC + 1)) if (iR + 1, iC-1) in flag: ans.add((iR + 1, iC-1)) if (iR + 1, iC) in flag: ans.add((iR + 1, iC)) if (iR+1, iC + 1) in flag: ans.add((iR+1, iC + 1)) return ans flag = {(i,j) for j in range(len(land[0])) for i in range(len(land)) if land[i][j] == 0} ans = [] while flag: tmpArea = 0 mydueque = deque() mydueque.append(flag.pop()) while mydueque: curEle = mydueque.popleft() tmpArea +=1 for neighbor in neighbors(curEle[0], curEle[1], flag): mydueque.append(neighbor) flag.remove(neighbor) ans.append(tmpArea) ans.sort() return ans if __name__ == '__main__': a = [ [0,2,1,0], [0,1,0,1], [1,1,0,1], [0,1,0,1] ] ans = Solution().pondSizes(a) print(ans)
[((1662, 1669), 'collections.deque', 'deque', ([], {}), '()\n', (1667, 1669), False, 'from collections import deque\n')]
kurtraschke/geolucidate
geolucidate/functions.py
827195a90d972fa5efce5a03bdbe53d8395d94ba
# -*- coding: utf-8 -*- from decimal import Decimal, setcontext, ExtendedContext from geolucidate.links.google import google_maps_link from geolucidate.links.tools import MapLink from geolucidate.parser import parser_re setcontext(ExtendedContext) def _cleanup(parts): """ Normalize up the parts matched by :obj:`parser.parser_re` to degrees, minutes, and seconds. >>> _cleanup({'latdir': 'south', 'longdir': 'west', ... 'latdeg':'60','latmin':'30', ... 'longdeg':'50','longmin':'40'}) ['S', '60', '30', '00', 'W', '50', '40', '00'] >>> _cleanup({'latdir': 'south', 'longdir': 'west', ... 'latdeg':'60','latmin':'30', 'latdecsec':'.50', ... 'longdeg':'50','longmin':'40','longdecsec':'.90'}) ['S', '60', '30.50', '00', 'W', '50', '40.90', '00'] """ latdir = (parts['latdir'] or parts['latdir2']).upper()[0] longdir = (parts['longdir'] or parts['longdir2']).upper()[0] latdeg = parts.get('latdeg') longdeg = parts.get('longdeg') latmin = parts.get('latmin', '00') or '00' longmin = parts.get('longmin', '00') or '00' latdecsec = parts.get('latdecsec', '') longdecsec = parts.get('longdecsec', '') if (latdecsec and longdecsec): latmin += latdecsec longmin += longdecsec latsec = '00' longsec = '00' else: latsec = parts.get('latsec', '') or '00' longsec = parts.get('longsec', '') or '00' return [latdir, latdeg, latmin, latsec, longdir, longdeg, longmin, longsec] def _convert(latdir, latdeg, latmin, latsec, longdir, longdeg, longmin, longsec): """ Convert normalized degrees, minutes, and seconds to decimal degrees. Quantize the converted value based on the input precision and return a 2-tuple of strings. >>> _convert('S','50','30','30','W','50','30','30') ('-50.508333', '-50.508333') >>> _convert('N','50','27','55','W','127','27','65') ('50.459167', '-127.460833') """ if (latsec != '00' or longsec != '00'): precision = Decimal('0.000001') elif (latmin != '00' or longmin != '00'): precision = Decimal('0.001') else: precision = Decimal('1') latitude = Decimal(latdeg) latmin = Decimal(latmin) latsec = Decimal(latsec) longitude = Decimal(longdeg) longmin = Decimal(longmin) longsec = Decimal(longsec) if latsec > 59 or longsec > 59: # Assume that 'seconds' greater than 59 are actually a decimal # fraction of minutes latitude += (latmin + (latsec / Decimal('100'))) / Decimal('60') longitude += (longmin + (longsec / Decimal('100'))) / Decimal('60') else: latitude += (latmin + (latsec / Decimal('60'))) / Decimal('60') longitude += (longmin + (longsec / Decimal('60'))) / Decimal('60') if latdir == 'S': latitude *= Decimal('-1') if longdir == 'W': longitude *= Decimal('-1') lat_str = str(latitude.quantize(precision)) long_str = str(longitude.quantize(precision)) return (lat_str, long_str) def replace(string, sub_function=google_maps_link()): """ Replace detected coordinates with a map link, using the given substitution function. The substitution function will be passed a :class:`~.MapLink` instance, and should return a string which will be substituted by :func:`re.sub` in place of the detected coordinates. >>> replace("58147N/07720W") '<a href="http://maps.google.com/maps?q=58.235278%2C-77.333333+%2858147N%2F07720W%29&ll=58.235278%2C-77.333333&t=h" title="58147N/07720W (58.235278, -77.333333)">58147N/07720W</a>' >>> replace("5814N/07720W", google_maps_link('satellite')) '<a href="http://maps.google.com/maps?q=58.233%2C-77.333+%285814N%2F07720W%29&ll=58.233%2C-77.333&t=k" title="5814N/07720W (58.233, -77.333)">5814N/07720W</a>' >>> from geolucidate.links.bing import bing_maps_link >>> replace("58N/077W", bing_maps_link('map')) '<a href="http://bing.com/maps/default.aspx?style=r&cp=58~-77&sp=Point.58_-77_58N%2F077W&v=2" title="58N/077W (58, -77)">58N/077W</a>' """ def do_replace(match): original_string = match.group() (latitude, longitude) = _convert(*_cleanup(match.groupdict())) return sub_function(MapLink(original_string, latitude, longitude)) return parser_re.sub(do_replace, string) def get_replacements(string, sub_function=google_maps_link()): """ Return a dict whose keys are instances of :class:`re.Match` and whose values are the corresponding replacements. Use :func:`get_replacements` when the replacement cannot be performed through ordinary string substitution by :func:`re.sub`, as in :func:`replace`. >>> get_replacements("4630 NORTH 5705 WEST 58147N/07720W") ... #doctest: +ELLIPSIS {<re.Match object...>: '<a href="..." title="...">4630 NORTH 5705 WEST</a>', <re.Match object...>: '<a href="..." title="...">58147N/07720W</a>'} >>> test_string = "4630 NORTH 5705 WEST 58147N/07720W" >>> replacements = get_replacements(test_string) >>> offset = 0 >>> out = bytearray(test_string, encoding="ascii", errors="replace") >>> for (match, link) in replacements.items(): ... start = match.start() + offset ... end = match.end() + offset ... out[start:end] = bytearray(link, encoding="ascii", errors="replace") ... offset += (len(link) - len(match.group())) >>> out.decode(encoding="ascii") == replace(test_string) True """ substitutions = {} matches = parser_re.finditer(string) for match in matches: (latitude, longitude) = _convert(*_cleanup(match.groupdict())) substitutions[match] = sub_function(MapLink(match.group(), latitude, longitude)) return substitutions
[((222, 249), 'decimal.setcontext', 'setcontext', (['ExtendedContext'], {}), '(ExtendedContext)\n', (232, 249), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2246, 2261), 'decimal.Decimal', 'Decimal', (['latdeg'], {}), '(latdeg)\n', (2253, 2261), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2275, 2290), 'decimal.Decimal', 'Decimal', (['latmin'], {}), '(latmin)\n', (2282, 2290), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2304, 2319), 'decimal.Decimal', 'Decimal', (['latsec'], {}), '(latsec)\n', (2311, 2319), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2337, 2353), 'decimal.Decimal', 'Decimal', (['longdeg'], {}), '(longdeg)\n', (2344, 2353), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2368, 2384), 'decimal.Decimal', 'Decimal', (['longmin'], {}), '(longmin)\n', (2375, 2384), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2399, 2415), 'decimal.Decimal', 'Decimal', (['longsec'], {}), '(longsec)\n', (2406, 2415), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((3228, 3246), 'geolucidate.links.google.google_maps_link', 'google_maps_link', ([], {}), '()\n', (3244, 3246), False, 'from geolucidate.links.google import google_maps_link\n'), ((4475, 4508), 'geolucidate.parser.parser_re.sub', 'parser_re.sub', (['do_replace', 'string'], {}), '(do_replace, string)\n', (4488, 4508), False, 'from geolucidate.parser import parser_re\n'), ((4553, 4571), 'geolucidate.links.google.google_maps_link', 'google_maps_link', ([], {}), '()\n', (4569, 4571), False, 'from geolucidate.links.google import google_maps_link\n'), ((5698, 5724), 'geolucidate.parser.parser_re.finditer', 'parser_re.finditer', (['string'], {}), '(string)\n', (5716, 5724), False, 'from geolucidate.parser import parser_re\n'), ((2084, 2103), 'decimal.Decimal', 'Decimal', (['"""0.000001"""'], {}), "('0.000001')\n", (2091, 2103), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2989, 3002), 'decimal.Decimal', 'Decimal', (['"""-1"""'], {}), "('-1')\n", (2996, 3002), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((3048, 3061), 'decimal.Decimal', 'Decimal', (['"""-1"""'], {}), "('-1')\n", (3055, 3061), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2170, 2186), 'decimal.Decimal', 'Decimal', (['"""0.001"""'], {}), "('0.001')\n", (2177, 2186), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2217, 2229), 'decimal.Decimal', 'Decimal', (['"""1"""'], {}), "('1')\n", (2224, 2229), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2634, 2647), 'decimal.Decimal', 'Decimal', (['"""60"""'], {}), "('60')\n", (2641, 2647), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2732, 2745), 'decimal.Decimal', 'Decimal', (['"""60"""'], {}), "('60')\n", (2739, 2745), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2835, 2848), 'decimal.Decimal', 'Decimal', (['"""60"""'], {}), "('60')\n", (2842, 2848), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2932, 2945), 'decimal.Decimal', 'Decimal', (['"""60"""'], {}), "('60')\n", (2939, 2945), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((4416, 4461), 'geolucidate.links.tools.MapLink', 'MapLink', (['original_string', 'latitude', 'longitude'], {}), '(original_string, latitude, longitude)\n', (4423, 4461), False, 'from geolucidate.links.tools import MapLink\n'), ((2615, 2629), 'decimal.Decimal', 'Decimal', (['"""100"""'], {}), "('100')\n", (2622, 2629), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2713, 2727), 'decimal.Decimal', 'Decimal', (['"""100"""'], {}), "('100')\n", (2720, 2727), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2817, 2830), 'decimal.Decimal', 'Decimal', (['"""60"""'], {}), "('60')\n", (2824, 2830), False, 'from decimal import Decimal, setcontext, ExtendedContext\n'), ((2914, 2927), 'decimal.Decimal', 'Decimal', (['"""60"""'], {}), "('60')\n", (2921, 2927), False, 'from decimal import Decimal, setcontext, ExtendedContext\n')]
rluzuriaga/pokedex
setup.py
e5c18c410994d5fb589bc3dceaba71f85268edfb
from setuptools import setup, find_packages setup( name='Pokedex', version='0.1', zip_safe=False, packages=find_packages(), package_data={ 'pokedex': ['data/csv/*.csv'] }, install_requires=[ 'SQLAlchemy>=1.0,<2.0', 'whoosh>=2.5,<2.7', 'markdown==2.4.1', 'construct==2.5.3', 'six>=1.9.0', ], entry_points={ 'console_scripts': [ 'pokedex = pokedex.main:setuptools_entry', ], }, classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.7", ] )
[((124, 139), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (137, 139), False, 'from setuptools import setup, find_packages\n')]
j-luo93/lingvo
lingvo/tasks/asr/encoder.py
7398974078391362f0c1b027164a8f33f88cf86b
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Encoders for the speech model.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections from six.moves import range from six.moves import zip import tensorflow as tf from tensorflow.python.ops import inplace_ops from lingvo.core import base_encoder from lingvo.core import base_layer from lingvo.core import layers from lingvo.core import plot from lingvo.core import py_utils from lingvo.core import rnn_cell from lingvo.core import rnn_layers from lingvo.core import summary_utils from lingvo.core import model_helper ConvLSTMBlock = collections.namedtuple('ConvLSTMBlock', ('rnn', 'cnn')) class AsrEncoder(base_encoder.BaseEncoder): """Speech encoder version 1.""" @classmethod def Params(cls): """Configs for AsrEncoder.""" p = super(AsrEncoder, cls).Params() p.Define('lstm_tpl', rnn_cell.LSTMCellSimple.Params(), 'Configs template for the RNN layer.') p.Define('cnn_tpl', layers.ConvLayer.Params(), 'Configs template for the conv layer.') p.Define('proj_tpl', layers.ProjectionLayer.Params(), 'Configs template for the projection layer.') p.Define( 'highway_skip', False, 'If set, residual connections from different layers are gated. ' 'Will only be used if residual_start is enabled.') p.Define('highway_skip_tpl', layers.HighwaySkipLayer.Params(), 'Configs template for the highway skip layer.') p.Define('conv_lstm_tpl', rnn_cell.ConvLSTMCell.Params(), 'Configs template for ConvLSTMCell.') p.Define( 'after_conv_lstm_cnn_tpl', layers.ConvLayer.Params(), 'Configs template for the cnn layer immediately follow the' ' convlstm layer.') p.Define('conv_filter_shapes', None, 'Filter shapes for each conv layer.') p.Define('conv_filter_strides', None, 'Filter strides for each conv layer.') p.Define('input_shape', [None, None, None, None], 'Shape of the input. This should a TensorShape with rank 4.') p.Define('lstm_cell_size', 256, 'LSTM cell size for the RNN layer.') p.Define('num_cnn_layers', 2, 'Number of conv layers to create.') p.Define('num_conv_lstm_layers', 1, 'Number of conv lstm layers to create.') p.Define('num_lstm_layers', 3, 'Number of rnn layers to create') p.Define('project_lstm_output', True, 'Include projection layer after each encoder LSTM layer.') p.Define('pad_steps', 6, 'Extra zero-padded timesteps to add to the input sequence. ') p.Define( 'residual_start', 0, 'Start residual connections from this lstm layer. ' 'Disabled if 0 or greater than num_lstm_layers.') p.Define('residual_stride', 1, 'Number of lstm layers to skip per residual connection.') p.Define( 'bidi_rnn_type', 'func', 'Options: func, native_cudnn. ' 'func: BidirectionalFRNN, ' 'native_cudnn: BidirectionalNativeCuDNNLSTM.') # TODO(yonghui): Maybe move those configs to a separate file. # Set some reasonable default values. # # NOTE(yonghui): The default config below assumes the following encoder # architecture: # # cnn/batch-norm/relu -> # cnn/batch-norm/relu -> # bidirectional conv-lstm -> # cnn/batch-norm/relu # bidirectional lstm -> # projection/batch-norm/relu -> # bidirectional lstm -> # projection/batch-norm/relu -> # bidirectional lstm # # Default config for the rnn layer. p.lstm_tpl.params_init = py_utils.WeightInit.Uniform(0.1) # Default config for the convolution layer. p.input_shape = [None, None, 80, 3] p.conv_filter_shapes = [(3, 3, 3, 32), (3, 3, 32, 32)] p.conv_filter_strides = [(2, 2), (2, 2)] p.cnn_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1) # TODO(yonghui): Disable variational noise logic. # NOTE(yonghui): Fortunately, variational noise logic is currently not # implemented for ConvLayer yet (as of sep 22, 2016). # Default config for the projection layer. p.proj_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1) # TODO(yonghui): Disable variational noise logic. # NOTE(yonghui): Fortunately, variational noise logic is currently not # implemented for ProjectionLayer yet (as of sep 22, 2016). p.conv_lstm_tpl.filter_shape = [1, 3] # height (time), width (frequency) p.conv_lstm_tpl.inputs_shape = [None, None, None, None] p.conv_lstm_tpl.cell_shape = [None, None, None, None] p.conv_lstm_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1) p.after_conv_lstm_cnn_tpl.filter_shape = [3, 3, None, None] p.after_conv_lstm_cnn_tpl.params_init = ( py_utils.WeightInit.TruncatedGaussian(0.1)) p.after_conv_lstm_cnn_tpl.filter_stride = [1, 1] return p @base_layer.initializer def __init__(self, params): super(AsrEncoder, self).__init__(params) p = self.params assert p.packed_input is False, ('Packed inputs are not yet supported for ' 'AsrEncoder.') name = p.name with tf.variable_scope(name): # First create the conv layers. assert p.num_cnn_layers == len(p.conv_filter_shapes) assert p.num_cnn_layers == len(p.conv_filter_strides) params_conv_layers = [] for i in range(p.num_cnn_layers): conv_p = p.cnn_tpl.Copy() conv_p.name = 'conv_L%d' % (i) conv_p.filter_shape = p.conv_filter_shapes[i] conv_p.filter_stride = p.conv_filter_strides[i] conv_p.is_eval = p.is_eval params_conv_layers.append(conv_p) self.CreateChildren('conv', params_conv_layers) conv_output_shape = tf.TensorShape(p.input_shape) for i in range(p.num_cnn_layers): conv_output_shape = self.conv[i].OutShape(conv_output_shape) conv_output_shape = conv_output_shape.as_list() assert len(conv_output_shape) == 4 # batch, height, width, channel. params_conv_lstm_rnn = [] params_conv_lstm_cnn = [] for i in range(p.num_conv_lstm_layers): # NOTE(yonghui): We assume that output from ConvLSTMBlock has the same # shape as its input. _, _, width, in_channel = conv_output_shape f_conv_lstm_p = p.conv_lstm_tpl.Copy() f_conv_lstm_p.name = 'f_conv_lstm_%d' % (i) f_conv_lstm_p.inputs_shape = [None, 1, width, in_channel] f_conv_lstm_p.cell_shape = [None, 1, width, in_channel] b_conv_lstm_p = f_conv_lstm_p.Copy() b_conv_lstm_p.name = 'b_conv_lstm_%d' % (i) conv_lstm_rnn_p = self.CreateConvLstmLayerParams() conv_lstm_rnn_p.name = 'conv_lstm_rnn' conv_lstm_rnn_p.fwd = f_conv_lstm_p conv_lstm_rnn_p.bak = b_conv_lstm_p params_conv_lstm_rnn.append(conv_lstm_rnn_p) cnn_p = p.after_conv_lstm_cnn_tpl.Copy() cnn_p.name = 'conv_lstm_cnn_%d' % (i) cnn_p.filter_shape[2] = 2 * in_channel cnn_p.filter_shape[3] = in_channel params_conv_lstm_cnn.append(cnn_p) # TODO(yonghui): Refactor ConvLSTMBlock into a layer. self.CreateChildren('conv_lstm_rnn', params_conv_lstm_rnn) self.CreateChildren('conv_lstm_cnn', params_conv_lstm_cnn) (self._first_lstm_input_dim, self._first_lstm_input_dim_pad) = self.FirstLstmLayerInputDimAndPadding( conv_output_shape, pad_to_multiple=16) # Now create all the rnn layers and projection layers. # TODO(yonghui): take care of device placement. params_rnn_layers = [] params_proj_layers = [] params_highway_skip_layers = [] for i in range(p.num_lstm_layers): if i == 0: input_dim = self._first_lstm_input_dim else: input_dim = 2 * p.lstm_cell_size forward_p = p.lstm_tpl.Copy() forward_p.name = 'fwd_rnn_L%d' % (i) forward_p.num_input_nodes = input_dim forward_p.num_output_nodes = p.lstm_cell_size backward_p = forward_p.Copy() backward_p.name = 'bak_rnn_L%d' % (i) rnn_p = self.CreateBidirectionalRNNParams(forward_p, backward_p) rnn_p.name = 'brnn_L%d' % (i) params_rnn_layers.append(rnn_p) if p.project_lstm_output and (i < p.num_lstm_layers - 1): proj_p = p.proj_tpl.Copy() proj_p.input_dim = 2 * p.lstm_cell_size proj_p.output_dim = 2 * p.lstm_cell_size proj_p.name = 'proj_L%d' % (i) proj_p.is_eval = p.is_eval params_proj_layers.append(proj_p) # add the skip layers residual_index = i - p.residual_start + 1 if p.residual_start > 0 and residual_index >= 0 and p.highway_skip: highway_skip = p.highway_skip_tpl.Copy() highway_skip.name = 'enc_hwskip_%d' % len(params_highway_skip_layers) highway_skip.input_dim = 2 * p.lstm_cell_size params_highway_skip_layers.append(highway_skip) self.CreateChildren('rnn', params_rnn_layers) self.CreateChildren('proj', params_proj_layers) self.CreateChildren('highway_skip', params_highway_skip_layers) @property def _use_functional(self): return True def CreateBidirectionalRNNParams(self, forward_p, backward_p): return model_helper.CreateBidirectionalRNNParams(self.params, forward_p, backward_p) def CreateConvLstmLayerParams(self): return rnn_layers.BidirectionalFRNN.Params() def FirstLstmLayerInputDimAndPadding(self, conv_output_shape, pad_to_multiple=16): lstm_input_shape = conv_output_shape # Makes sure the lstm input dims is multiple of 16 (alignment # requirement from FRNN). first_lstm_input_dim_unpadded = lstm_input_shape[2] * lstm_input_shape[3] if self._use_functional and (first_lstm_input_dim_unpadded % pad_to_multiple != 0): first_lstm_input_dim = int( (first_lstm_input_dim_unpadded + pad_to_multiple - 1) / pad_to_multiple) * pad_to_multiple else: first_lstm_input_dim = first_lstm_input_dim_unpadded first_lstm_input_dim_padding = ( first_lstm_input_dim - first_lstm_input_dim_unpadded) return first_lstm_input_dim, first_lstm_input_dim_padding @property def supports_streaming(self): return False def zero_state(self, batch_size): return py_utils.NestedMap() def FProp(self, theta, batch, state0=None): """Encodes source as represented by 'inputs' and 'paddings'. Args: theta: A NestedMap object containing weights' values of this layer and its children layers. batch: A NestedMap with fields: src_inputs - The inputs tensor. It is expected to be of shape [batch, time, feature_dim, channels]. paddings - The paddings tensor. It is expected to be of shape [batch, time]. state0: Recurrent input state. Not supported/ignored by this encoder. Returns: (outputs, out_paddings, state1) tuple. Outputs is of the shape [time, batch, depth], and out_paddings is of the shape [time, batch] """ p = self.params inputs, paddings = batch.src_inputs, batch.paddings with tf.name_scope(p.name): # Add a few extra padded timesteps at the end. This is for ensuring the # correctness of the conv-layers at the edges. if p.pad_steps > 0: # inplace_update() is not supported by TPU for now. Since we have done # padding on the input_generator, we may avoid this additional padding. assert not py_utils.use_tpu() inputs_pad = tf.zeros( inplace_ops.inplace_update(tf.shape(inputs), 1, p.pad_steps), inputs.dtype) paddings_pad = tf.ones( inplace_ops.inplace_update(tf.shape(paddings), 1, p.pad_steps), paddings.dtype) inputs = tf.concat([inputs, inputs_pad], 1, name='inputs') paddings = tf.concat([paddings, paddings_pad], 1) def ReshapeForPlot(tensor, padding, name): """Transposes and flattens channels to [batch, dim, seq_len] shape.""" # Flatten any dimensions beyond the third into the third. batch_size = tf.shape(tensor)[0] max_len = tf.shape(tensor)[1] plot_tensor = tf.reshape(tensor, [batch_size, max_len, -1]) plot_tensor = tf.transpose(plot_tensor, [0, 2, 1], name=name) return (plot_tensor, summary_utils.SequenceLength(padding)) plots = [ ReshapeForPlot( tf.transpose(inputs, [0, 1, 3, 2]), paddings, 'inputs') ] conv_out = inputs out_padding = paddings for i, conv_layer in enumerate(self.conv): conv_out, out_padding = conv_layer.FProp(theta.conv[i], conv_out, out_padding) plots.append( ReshapeForPlot( tf.transpose(conv_out, [0, 1, 3, 2]), out_padding, 'conv_%d_out' % i)) def TransposeFirstTwoDims(t): first_dim = tf.shape(t)[0] second_dim = tf.shape(t)[1] t_new = tf.transpose( tf.reshape(t, [first_dim, second_dim, -1]), [1, 0, 2]) t_shape_new = tf.concat([[second_dim], [first_dim], tf.shape(t)[2:]], 0) return tf.reshape(t_new, t_shape_new) # Now the conv-lstm part. conv_lstm_out = conv_out conv_lstm_out_padding = out_padding for i, (rnn, cnn) in enumerate( zip(self.conv_lstm_rnn, self.conv_lstm_cnn)): conv_lstm_in = conv_lstm_out # Move time dimension to be the first. conv_lstm_in = TransposeFirstTwoDims(conv_lstm_in) conv_lstm_in = tf.expand_dims(conv_lstm_in, 2) conv_lstm_in_padding = tf.expand_dims( tf.transpose(conv_lstm_out_padding), 2) lstm_out = rnn.FProp(theta.conv_lstm_rnn[i], conv_lstm_in, conv_lstm_in_padding) # Move time dimension to be the second. cnn_in = TransposeFirstTwoDims(lstm_out) cnn_in = tf.squeeze(cnn_in, 2) cnn_in_padding = conv_lstm_out_padding cnn_out, cnn_out_padding = cnn.FProp(theta.conv_lstm_cnn[i], cnn_in, cnn_in_padding) conv_lstm_out, conv_lstm_out_padding = cnn_out, cnn_out_padding plots.append( ReshapeForPlot(conv_lstm_out, conv_lstm_out_padding, 'conv_lstm_%d_out' % i)) # Need to do a reshape before starting the rnn layers. conv_lstm_out = py_utils.HasRank(conv_lstm_out, 4) conv_lstm_out_shape = tf.shape(conv_lstm_out) new_shape = tf.concat([conv_lstm_out_shape[:2], [-1]], 0) conv_lstm_out = tf.reshape(conv_lstm_out, new_shape) if self._first_lstm_input_dim_pad: conv_lstm_out = tf.pad( conv_lstm_out, [[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]]) conv_lstm_out = py_utils.HasShape(conv_lstm_out, [-1, -1, self._first_lstm_input_dim]) # Transpose to move the time dimension to be the first. rnn_in = tf.transpose(conv_lstm_out, [1, 0, 2]) rnn_padding = tf.expand_dims(tf.transpose(conv_lstm_out_padding), 2) # rnn_in is of shape [time, batch, depth] # rnn_padding is of shape [time, batch, 1] # Now the rnn layers. num_skips = 0 for i in range(p.num_lstm_layers): rnn_out = self.rnn[i].FProp(theta.rnn[i], rnn_in, rnn_padding) residual_index = i - p.residual_start + 1 if p.residual_start > 0 and residual_index >= 0: if residual_index % p.residual_stride == 0: residual_in = rnn_in if residual_index % p.residual_stride == p.residual_stride - 1: # Highway skip connection. if p.highway_skip: rnn_out = self.highway_skip[num_skips].FProp( theta.highway_skip[num_skips], residual_in, rnn_out) num_skips += 1 else: # Residual skip connection. rnn_out += py_utils.HasShape(residual_in, tf.shape(rnn_out)) if p.project_lstm_output and (i < p.num_lstm_layers - 1): # Projection layers. rnn_out = self.proj[i].FProp(theta.proj[i], rnn_out, rnn_padding) if i == p.num_lstm_layers - 1: rnn_out *= (1.0 - rnn_padding) plots.append( ReshapeForPlot( tf.transpose(rnn_out, [1, 0, 2]), tf.transpose(rnn_padding, [1, 0, 2]), 'rnn_%d_out' % i)) rnn_in = rnn_out final_out = rnn_in if self.cluster.add_summary: fig = plot.MatplotlibFigureSummary( 'encoder_example', figsize=(8, len(plots) * 3.5)) # Order layers from bottom to top. plots.reverse() for tensor, seq_len in plots: fig.AddSubplot( [tensor, seq_len], summary_utils.TrimPaddingAndPlotSequence, title=tensor.name, xlabel='Time') fig.Finalize() rnn_padding = tf.squeeze(rnn_padding, [2]) return final_out, rnn_padding, py_utils.NestedMap()
[((1225, 1280), 'collections.namedtuple', 'collections.namedtuple', (['"""ConvLSTMBlock"""', "('rnn', 'cnn')"], {}), "('ConvLSTMBlock', ('rnn', 'cnn'))\n", (1247, 1280), False, 'import collections\n'), ((4201, 4233), 'lingvo.core.py_utils.WeightInit.Uniform', 'py_utils.WeightInit.Uniform', (['(0.1)'], {}), '(0.1)\n', (4228, 4233), False, 'from lingvo.core import py_utils\n'), ((4455, 4497), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (4492, 4497), False, 'from lingvo.core import py_utils\n'), ((4762, 4804), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (4799, 4804), False, 'from lingvo.core import py_utils\n'), ((5229, 5271), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (5266, 5271), False, 'from lingvo.core import py_utils\n'), ((5390, 5432), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (5427, 5432), False, 'from lingvo.core import py_utils\n'), ((9912, 9989), 'lingvo.core.model_helper.CreateBidirectionalRNNParams', 'model_helper.CreateBidirectionalRNNParams', (['self.params', 'forward_p', 'backward_p'], {}), '(self.params, forward_p, backward_p)\n', (9953, 9989), False, 'from lingvo.core import model_helper\n'), ((10094, 10131), 'lingvo.core.rnn_layers.BidirectionalFRNN.Params', 'rnn_layers.BidirectionalFRNN.Params', ([], {}), '()\n', (10129, 10131), False, 'from lingvo.core import rnn_layers\n'), ((11119, 11139), 'lingvo.core.py_utils.NestedMap', 'py_utils.NestedMap', ([], {}), '()\n', (11137, 11139), False, 'from lingvo.core import py_utils\n'), ((1495, 1527), 'lingvo.core.rnn_cell.LSTMCellSimple.Params', 'rnn_cell.LSTMCellSimple.Params', ([], {}), '()\n', (1525, 1527), False, 'from lingvo.core import rnn_cell\n'), ((1605, 1630), 'lingvo.core.layers.ConvLayer.Params', 'layers.ConvLayer.Params', ([], {}), '()\n', (1628, 1630), False, 'from lingvo.core import layers\n'), ((1710, 1741), 'lingvo.core.layers.ProjectionLayer.Params', 'layers.ProjectionLayer.Params', ([], {}), '()\n', (1739, 1741), False, 'from lingvo.core import layers\n'), ((2012, 2044), 'lingvo.core.layers.HighwaySkipLayer.Params', 'layers.HighwaySkipLayer.Params', ([], {}), '()\n', (2042, 2044), False, 'from lingvo.core import layers\n'), ((2137, 2167), 'lingvo.core.rnn_cell.ConvLSTMCell.Params', 'rnn_cell.ConvLSTMCell.Params', ([], {}), '()\n', (2165, 2167), False, 'from lingvo.core import rnn_cell\n'), ((2269, 2294), 'lingvo.core.layers.ConvLayer.Params', 'layers.ConvLayer.Params', ([], {}), '()\n', (2292, 2294), False, 'from lingvo.core import layers\n'), ((5782, 5805), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (5799, 5805), True, 'import tensorflow as tf\n'), ((6010, 6033), 'six.moves.range', 'range', (['p.num_cnn_layers'], {}), '(p.num_cnn_layers)\n', (6015, 6033), False, 'from six.moves import range\n'), ((6376, 6405), 'tensorflow.TensorShape', 'tf.TensorShape', (['p.input_shape'], {}), '(p.input_shape)\n', (6390, 6405), True, 'import tensorflow as tf\n'), ((6421, 6444), 'six.moves.range', 'range', (['p.num_cnn_layers'], {}), '(p.num_cnn_layers)\n', (6426, 6444), False, 'from six.moves import range\n'), ((6724, 6753), 'six.moves.range', 'range', (['p.num_conv_lstm_layers'], {}), '(p.num_conv_lstm_layers)\n', (6729, 6753), False, 'from six.moves import range\n'), ((8303, 8327), 'six.moves.range', 'range', (['p.num_lstm_layers'], {}), '(p.num_lstm_layers)\n', (8308, 8327), False, 'from six.moves import range\n'), ((11949, 11970), 'tensorflow.name_scope', 'tf.name_scope', (['p.name'], {}), '(p.name)\n', (11962, 11970), True, 'import tensorflow as tf\n'), ((15274, 15308), 'lingvo.core.py_utils.HasRank', 'py_utils.HasRank', (['conv_lstm_out', '(4)'], {}), '(conv_lstm_out, 4)\n', (15290, 15308), False, 'from lingvo.core import py_utils\n'), ((15337, 15360), 'tensorflow.shape', 'tf.shape', (['conv_lstm_out'], {}), '(conv_lstm_out)\n', (15345, 15360), True, 'import tensorflow as tf\n'), ((15379, 15424), 'tensorflow.concat', 'tf.concat', (['[conv_lstm_out_shape[:2], [-1]]', '(0)'], {}), '([conv_lstm_out_shape[:2], [-1]], 0)\n', (15388, 15424), True, 'import tensorflow as tf\n'), ((15447, 15483), 'tensorflow.reshape', 'tf.reshape', (['conv_lstm_out', 'new_shape'], {}), '(conv_lstm_out, new_shape)\n', (15457, 15483), True, 'import tensorflow as tf\n'), ((15674, 15744), 'lingvo.core.py_utils.HasShape', 'py_utils.HasShape', (['conv_lstm_out', '[-1, -1, self._first_lstm_input_dim]'], {}), '(conv_lstm_out, [-1, -1, self._first_lstm_input_dim])\n', (15691, 15744), False, 'from lingvo.core import py_utils\n'), ((15863, 15901), 'tensorflow.transpose', 'tf.transpose', (['conv_lstm_out', '[1, 0, 2]'], {}), '(conv_lstm_out, [1, 0, 2])\n', (15875, 15901), True, 'import tensorflow as tf\n'), ((16138, 16162), 'six.moves.range', 'range', (['p.num_lstm_layers'], {}), '(p.num_lstm_layers)\n', (16143, 16162), False, 'from six.moves import range\n'), ((17813, 17841), 'tensorflow.squeeze', 'tf.squeeze', (['rnn_padding', '[2]'], {}), '(rnn_padding, [2])\n', (17823, 17841), True, 'import tensorflow as tf\n'), ((12610, 12659), 'tensorflow.concat', 'tf.concat', (['[inputs, inputs_pad]', '(1)'], {'name': '"""inputs"""'}), "([inputs, inputs_pad], 1, name='inputs')\n", (12619, 12659), True, 'import tensorflow as tf\n'), ((12679, 12717), 'tensorflow.concat', 'tf.concat', (['[paddings, paddings_pad]', '(1)'], {}), '([paddings, paddings_pad], 1)\n', (12688, 12717), True, 'import tensorflow as tf\n'), ((13014, 13059), 'tensorflow.reshape', 'tf.reshape', (['tensor', '[batch_size, max_len, -1]'], {}), '(tensor, [batch_size, max_len, -1])\n', (13024, 13059), True, 'import tensorflow as tf\n'), ((13082, 13129), 'tensorflow.transpose', 'tf.transpose', (['plot_tensor', '[0, 2, 1]'], {'name': 'name'}), '(plot_tensor, [0, 2, 1], name=name)\n', (13094, 13129), True, 'import tensorflow as tf\n'), ((14012, 14042), 'tensorflow.reshape', 'tf.reshape', (['t_new', 't_shape_new'], {}), '(t_new, t_shape_new)\n', (14022, 14042), True, 'import tensorflow as tf\n'), ((14197, 14240), 'six.moves.zip', 'zip', (['self.conv_lstm_rnn', 'self.conv_lstm_cnn'], {}), '(self.conv_lstm_rnn, self.conv_lstm_cnn)\n', (14200, 14240), False, 'from six.moves import zip\n'), ((14409, 14440), 'tensorflow.expand_dims', 'tf.expand_dims', (['conv_lstm_in', '(2)'], {}), '(conv_lstm_in, 2)\n', (14423, 14440), True, 'import tensorflow as tf\n'), ((14772, 14793), 'tensorflow.squeeze', 'tf.squeeze', (['cnn_in', '(2)'], {}), '(cnn_in, 2)\n', (14782, 14793), True, 'import tensorflow as tf\n'), ((15549, 15625), 'tensorflow.pad', 'tf.pad', (['conv_lstm_out', '[[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]]'], {}), '(conv_lstm_out, [[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]])\n', (15555, 15625), True, 'import tensorflow as tf\n'), ((15937, 15972), 'tensorflow.transpose', 'tf.transpose', (['conv_lstm_out_padding'], {}), '(conv_lstm_out_padding)\n', (15949, 15972), True, 'import tensorflow as tf\n'), ((17879, 17899), 'lingvo.core.py_utils.NestedMap', 'py_utils.NestedMap', ([], {}), '()\n', (17897, 17899), False, 'from lingvo.core import py_utils\n'), ((12307, 12325), 'lingvo.core.py_utils.use_tpu', 'py_utils.use_tpu', ([], {}), '()\n', (12323, 12325), False, 'from lingvo.core import py_utils\n'), ((12934, 12950), 'tensorflow.shape', 'tf.shape', (['tensor'], {}), '(tensor)\n', (12942, 12950), True, 'import tensorflow as tf\n'), ((12972, 12988), 'tensorflow.shape', 'tf.shape', (['tensor'], {}), '(tensor)\n', (12980, 12988), True, 'import tensorflow as tf\n'), ((13159, 13196), 'lingvo.core.summary_utils.SequenceLength', 'summary_utils.SequenceLength', (['padding'], {}), '(padding)\n', (13187, 13196), False, 'from lingvo.core import summary_utils\n'), ((13255, 13289), 'tensorflow.transpose', 'tf.transpose', (['inputs', '[0, 1, 3, 2]'], {}), '(inputs, [0, 1, 3, 2])\n', (13267, 13289), True, 'import tensorflow as tf\n'), ((13768, 13779), 'tensorflow.shape', 'tf.shape', (['t'], {}), '(t)\n', (13776, 13779), True, 'import tensorflow as tf\n'), ((13804, 13815), 'tensorflow.shape', 'tf.shape', (['t'], {}), '(t)\n', (13812, 13815), True, 'import tensorflow as tf\n'), ((13861, 13903), 'tensorflow.reshape', 'tf.reshape', (['t', '[first_dim, second_dim, -1]'], {}), '(t, [first_dim, second_dim, -1])\n', (13871, 13903), True, 'import tensorflow as tf\n'), ((14500, 14535), 'tensorflow.transpose', 'tf.transpose', (['conv_lstm_out_padding'], {}), '(conv_lstm_out_padding)\n', (14512, 14535), True, 'import tensorflow as tf\n'), ((12396, 12412), 'tensorflow.shape', 'tf.shape', (['inputs'], {}), '(inputs)\n', (12404, 12412), True, 'import tensorflow as tf\n'), ((12528, 12546), 'tensorflow.shape', 'tf.shape', (['paddings'], {}), '(paddings)\n', (12536, 12546), True, 'import tensorflow as tf\n'), ((13624, 13660), 'tensorflow.transpose', 'tf.transpose', (['conv_out', '[0, 1, 3, 2]'], {}), '(conv_out, [0, 1, 3, 2])\n', (13636, 13660), True, 'import tensorflow as tf\n'), ((17187, 17219), 'tensorflow.transpose', 'tf.transpose', (['rnn_out', '[1, 0, 2]'], {}), '(rnn_out, [1, 0, 2])\n', (17199, 17219), True, 'import tensorflow as tf\n'), ((17237, 17273), 'tensorflow.transpose', 'tf.transpose', (['rnn_padding', '[1, 0, 2]'], {}), '(rnn_padding, [1, 0, 2])\n', (17249, 17273), True, 'import tensorflow as tf\n'), ((13976, 13987), 'tensorflow.shape', 'tf.shape', (['t'], {}), '(t)\n', (13984, 13987), True, 'import tensorflow as tf\n'), ((16849, 16866), 'tensorflow.shape', 'tf.shape', (['rnn_out'], {}), '(rnn_out)\n', (16857, 16866), True, 'import tensorflow as tf\n')]
Yudabin/Review_Project
pos_neg_graph/graph_ratio.py
b924199d6845defeb4cd243a99426070c014d8d8
import matplotlib.font_manager as fm import matplotlib.pyplot as plt import numpy as np font_location = './wordcloud_file/malgun.ttf' # For Windows font_name = fm.FontProperties(fname=font_location).get_name() plt.rc('font', family=font_name) def percent_graph2(movie_review) : b = movie_review labelss = sorted(b['score'].unique())## 라벨설정함. 한글이 적용이 안됨!!! c = b['score'].value_counts().sort_index() ## 빈도 print(c) print(labelss) fig = plt.figure(figsize=(8,8)) ## 캔버스 생성 fig.set_facecolor('white') ## 캔버스 배경색을 하얀색으로 설정 ax = fig.add_subplot() ## 프레임 생성 pie = ax.pie(c, ## 파이차트 출력 startangle=90, ## 시작점을 90도(degree)로 지정 counterclock=False, ## 시계 방향으로 그린다. # autopct=lambda p : '{:.2f}%'.format(p), ## 퍼센티지 출력 wedgeprops=dict(width=0.5), colors = ['yellowgreen', 'orange'], labels = labelss, textprops={'fontsize': 22} ) total = np.sum(c) ## 빈도수 총합 sum_pct = 0 ## 백분율 초기값 for i, l in enumerate(labelss): ang1, ang2 = pie[0][i].theta1, pie[0][i].theta2 ## 각1, 각2 r = pie[0][i].r ## 원의 반지름 x = ((r + 0.5) / 2) * np.cos(np.pi / 180 * ((ang1 + ang2) / 2)) ## 정중앙 x좌표 y = ((r + 0.5) / 2) * np.sin(np.pi / 180 * ((ang1 + ang2) / 2)) ## 정중앙 y좌표 if i < len(labelss) - 1: sum_pct += float(f'{c[i] / total * 100:.2f}') ## 백분율을 누적한다. ax.text(x, y, f'{c[i] / total * 100:.2f}%', ha='center', va='center', size=22, color='white', weight='bold') ## 백분율 텍스트 표시 else: ## 총합을 100으로 맞추기위해 마지막 백분율은 100에서 백분율 누적값을 빼준다. ax.text(x, y, f'{100 - sum_pct:.2f}%', ha='center', va='center',size=22,color='white', weight='bold') # pie.rc('font', family=font_name) # plt.legend(pie[0], labelss) ## 범례 표시 plt.savefig('./static/images/pos_neg_ratio.png') # 경로
[((211, 243), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': 'font_name'}), "('font', family=font_name)\n", (217, 243), True, 'import matplotlib.pyplot as plt\n'), ((461, 487), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (471, 487), True, 'import matplotlib.pyplot as plt\n'), ((933, 942), 'numpy.sum', 'np.sum', (['c'], {}), '(c)\n', (939, 942), True, 'import numpy as np\n'), ((1844, 1892), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./static/images/pos_neg_ratio.png"""'], {}), "('./static/images/pos_neg_ratio.png')\n", (1855, 1892), True, 'import matplotlib.pyplot as plt\n'), ((161, 199), 'matplotlib.font_manager.FontProperties', 'fm.FontProperties', ([], {'fname': 'font_location'}), '(fname=font_location)\n', (178, 199), True, 'import matplotlib.font_manager as fm\n'), ((1151, 1192), 'numpy.cos', 'np.cos', (['(np.pi / 180 * ((ang1 + ang2) / 2))'], {}), '(np.pi / 180 * ((ang1 + ang2) / 2))\n', (1157, 1192), True, 'import numpy as np\n'), ((1235, 1276), 'numpy.sin', 'np.sin', (['(np.pi / 180 * ((ang1 + ang2) / 2))'], {}), '(np.pi / 180 * ((ang1 + ang2) / 2))\n', (1241, 1276), True, 'import numpy as np\n')]
kbilak/City-portal
blog/views.py
f567764f3c4ae0287a178acf77f060dde0424f26
from django.shortcuts import render from django.views.generic import TemplateView def index(request): return render(request, 'index.html')
[((114, 143), 'django.shortcuts.render', 'render', (['request', '"""index.html"""'], {}), "(request, 'index.html')\n", (120, 143), False, 'from django.shortcuts import render\n')]
Vincent-Juma/area_master
my_area/views.py
3ea1dd1039053fb4de6326deb967383d09d7145b
from django.shortcuts import render from .forms import * from django.shortcuts import redirect,get_object_or_404 from django.contrib.auth.decorators import login_required from . models import * from django.views import generic @login_required(login_url='/accounts/login/') def home(request): mylocs = Myloc.objects.all() return render(request, 'home.html',{"mylocs":mylocs,}) @login_required(login_url='accounts/login/') def add_profile(request): current_user = request.user profile = Profile.objects.filter(id = current_user.id) if request.method == 'POST': form = NewProfileForm(request.POST, request.FILES) if form.is_valid(): caption = form.save(commit=False) caption.user = current_user caption.save() return redirect('myprofile') else: form = NewProfileForm() return render(request, 'edit.html', {"form":form}) @login_required(login_url='accounts/login/') def my_profile(request): current_user = request.user my_my_area = Myloc.objects.filter(user = current_user) my_profile = Profile.objects.filter(user = current_user).first return render(request, 'profile.html', {"my_my_areas":my_my_areas, "my_profile":my_profile}) @login_required(login_url='/accounts/login/') def addmy_area(request): current_user = request.user if request.method == 'POST': form = MylocForm(request.POST, request.FILES) if form.is_valid(): image = form.save(commit=False) image.user = current_user image.save() return redirect('home') else: form = MylocForm() return render(request, 'addmy_area.html', {"form": form}) def myloc_details(request,myloc_id): activities=Activity.objects.filter(myloc=myloc_id) posts=Post.objects.filter(myloc=myloc_id) myloc=Myloc.objects.get(pk=myloc_id) return render(request,'details.html',{'myloc':myloc,'activities':activities,'posts':posts}) @login_required(login_url="/accounts/login/") def new_activity(request,pk): current_user = request.user myloc = get_object_or_404(Myloc,pk=pk) if request.method == 'POST': activity_form = NewActivityForm(request.POST, request.FILES) if activity_form.is_valid(): activity = activity_form.save(commit=False) activity.user = current_user activity.myloc=myloc activity.save() return redirect('detail', myloc_id=myloc.id) else: activity_form = NewActivityForm() return render(request, 'new_activity.html', {"form": activity_form,'myloc':myloc}) @login_required(login_url="/accounts/login/") def new_post(request,pk): current_user = request.user myloc = get_object_or_404(Myloc,pk=pk) if request.method == 'POST': post_form = NewPostForm(request.POST, request.FILES) if post_form.is_valid(): post = post_form.save(commit=False) post.user = current_user post.myloc=myloc post.save() return redirect('detail', myloc_id=myloc.id) else: post_form = NewPostForm() return render(request, 'new_post.html', {"form": post_form,'myloc':myloc}) @login_required(login_url='/accounts/login/') def search_project(request): if 'project_name' in request.GET and request.GET["project_name"]: search_term = request.GET.get("project_name") searched_project = Myloc.search_by_location(search_term) message = f"{search_term}" return render(request, "search.html",{"message":message,"project": searched_project}) else: message = "No search history" return render(request, 'search.html',{"message":message})
[((229, 273), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (243, 273), False, 'from django.contrib.auth.decorators import login_required\n'), ((387, 430), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""accounts/login/"""'}), "(login_url='accounts/login/')\n", (401, 430), False, 'from django.contrib.auth.decorators import login_required\n'), ((925, 968), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""accounts/login/"""'}), "(login_url='accounts/login/')\n", (939, 968), False, 'from django.contrib.auth.decorators import login_required\n'), ((1251, 1295), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1265, 1295), False, 'from django.contrib.auth.decorators import login_required\n'), ((1989, 2033), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (2003, 2033), False, 'from django.contrib.auth.decorators import login_required\n'), ((2630, 2674), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (2644, 2674), False, 'from django.contrib.auth.decorators import login_required\n'), ((3220, 3264), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (3234, 3264), False, 'from django.contrib.auth.decorators import login_required\n'), ((337, 385), 'django.shortcuts.render', 'render', (['request', '"""home.html"""', "{'mylocs': mylocs}"], {}), "(request, 'home.html', {'mylocs': mylocs})\n", (343, 385), False, 'from django.shortcuts import render\n'), ((875, 919), 'django.shortcuts.render', 'render', (['request', '"""edit.html"""', "{'form': form}"], {}), "(request, 'edit.html', {'form': form})\n", (881, 919), False, 'from django.shortcuts import render\n'), ((1163, 1254), 'django.shortcuts.render', 'render', (['request', '"""profile.html"""', "{'my_my_areas': my_my_areas, 'my_profile': my_profile}"], {}), "(request, 'profile.html', {'my_my_areas': my_my_areas, 'my_profile':\n my_profile})\n", (1169, 1254), False, 'from django.shortcuts import render\n'), ((1655, 1705), 'django.shortcuts.render', 'render', (['request', '"""addmy_area.html"""', "{'form': form}"], {}), "(request, 'addmy_area.html', {'form': form})\n", (1661, 1705), False, 'from django.shortcuts import render\n'), ((1902, 1997), 'django.shortcuts.render', 'render', (['request', '"""details.html"""', "{'myloc': myloc, 'activities': activities, 'posts': posts}"], {}), "(request, 'details.html', {'myloc': myloc, 'activities': activities,\n 'posts': posts})\n", (1908, 1997), False, 'from django.shortcuts import render\n'), ((2108, 2139), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Myloc'], {'pk': 'pk'}), '(Myloc, pk=pk)\n', (2125, 2139), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((2552, 2629), 'django.shortcuts.render', 'render', (['request', '"""new_activity.html"""', "{'form': activity_form, 'myloc': myloc}"], {}), "(request, 'new_activity.html', {'form': activity_form, 'myloc': myloc})\n", (2558, 2629), False, 'from django.shortcuts import render\n'), ((2745, 2776), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Myloc'], {'pk': 'pk'}), '(Myloc, pk=pk)\n', (2762, 2776), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((3149, 3218), 'django.shortcuts.render', 'render', (['request', '"""new_post.html"""', "{'form': post_form, 'myloc': myloc}"], {}), "(request, 'new_post.html', {'form': post_form, 'myloc': myloc})\n", (3155, 3218), False, 'from django.shortcuts import render\n'), ((1590, 1606), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (1598, 1606), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((2451, 2488), 'django.shortcuts.redirect', 'redirect', (['"""detail"""'], {'myloc_id': 'myloc.id'}), "('detail', myloc_id=myloc.id)\n", (2459, 2488), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((3056, 3093), 'django.shortcuts.redirect', 'redirect', (['"""detail"""'], {'myloc_id': 'myloc.id'}), "('detail', myloc_id=myloc.id)\n", (3064, 3093), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((3533, 3618), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message, 'project': searched_project}"], {}), "(request, 'search.html', {'message': message, 'project':\n searched_project})\n", (3539, 3618), False, 'from django.shortcuts import render\n'), ((3675, 3727), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message}"], {}), "(request, 'search.html', {'message': message})\n", (3681, 3727), False, 'from django.shortcuts import render\n'), ((800, 821), 'django.shortcuts.redirect', 'redirect', (['"""myprofile"""'], {}), "('myprofile')\n", (808, 821), False, 'from django.shortcuts import redirect, get_object_or_404\n')]
d02d33pak/Advent-Of-Code
2020/day_01/__main__.py
765b0302c256ad61864095a537a3f6379901b1c2
""" Day 1 Main Module """ from day01 import parse_input, part1, part2 if __name__ == "__main__": # trying out the new walrus[:=] oprtr in python if (part := int(input("Enter Part: "))) == 1: print(part1(parse_input("input.txt"))) elif part == 2: print(part2(parse_input("input.txt"))) else: print("Wrong choice [1|2]")
[((221, 245), 'day01.parse_input', 'parse_input', (['"""input.txt"""'], {}), "('input.txt')\n", (232, 245), False, 'from day01 import parse_input, part1, part2\n'), ((288, 312), 'day01.parse_input', 'parse_input', (['"""input.txt"""'], {}), "('input.txt')\n", (299, 312), False, 'from day01 import parse_input, part1, part2\n')]
ignasgri/Django_Quiz
quiz_app/settings.py
c98969d4181350eaaf8883f3930d0e800c240a44
""" Django settings for quiz_app project. Generated by 'django-admin startproject' using Django 2.1.2. For more information on this file, see https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ SITE_ID = 1 import os import dj_database_url # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ.get('SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = os.environ.get('DEBUG', False) ALLOWED_HOSTS = ['ignas-quiz.herokuapp.com','localhost','127.0.0.1'] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.sites', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'storages', 'quiz', 'multichoice', 'true_false', 'essay', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'quiz_app.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'django.template.context_processors.media', ], }, }, ] WSGI_APPLICATION = 'quiz_app.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.sqlite3', # 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), # } # } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' # STATIC_ROOT = 'staticfiles' STATICFILES_DIRS= ( os.path.join(BASE_DIR, "static"), ) MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/' DATABASES = {'default': dj_database_url.parse(os.environ.get('DATABASE_URL')) } AWS_HEADERS = { # see http://developer.yahoo.com/performance/rules.html#expires 'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT', 'Cache-Control': 'max-age=94608000', } AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_STORAGE_BUCKET_NAME") AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") AWS_S3_HOST = 's3-eu-west-1.amazonaws.com' AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME STATICFILES_LOCATION = 'static' STATICFILES_STORAGE = 'custom_storages.StaticStorage' STATIC_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, STATICFILES_LOCATION) MEDIAFILES_LOCATION = 'media' MEDIA_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION) DEFAULT_FILE_STORAGE = 'custom_storages.MediaStorage'
[((715, 743), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (729, 743), False, 'import os\n'), ((819, 849), 'os.environ.get', 'os.environ.get', (['"""DEBUG"""', '(False)'], {}), "('DEBUG', False)\n", (833, 849), False, 'import os\n'), ((3470, 3501), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media"""'], {}), "(BASE_DIR, 'media')\n", (3482, 3501), False, 'import os\n'), ((3806, 3847), 'os.environ.get', 'os.environ.get', (['"""AWS_STORAGE_BUCKET_NAME"""'], {}), "('AWS_STORAGE_BUCKET_NAME')\n", (3820, 3847), False, 'import os\n'), ((3868, 3903), 'os.environ.get', 'os.environ.get', (['"""AWS_ACCESS_KEY_ID"""'], {}), "('AWS_ACCESS_KEY_ID')\n", (3882, 3903), False, 'import os\n'), ((3928, 3967), 'os.environ.get', 'os.environ.get', (['"""AWS_SECRET_ACCESS_KEY"""'], {}), "('AWS_SECRET_ACCESS_KEY')\n", (3942, 3967), False, 'import os\n'), ((3421, 3453), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (3433, 3453), False, 'import os\n'), ((469, 494), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (484, 494), False, 'import os\n'), ((3572, 3602), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (3586, 3602), False, 'import os\n')]
davrempe/predicting-physical-dynamics
scripts/data/topple_dataset.py
b0abb385a7ac491e25d1df0b9a9a943621fc2d37
import numpy as np import pickle from os.path import exists, realpath import sys import math from topple_data_loader import ToppleData, ToppleDataLoader import transforms3d class ToppleNormalizationInfo(): ''' Structure to hold all the normalization information for a dataset. ''' def __init__(self): # max element of any linear vel vector self.max_lin_vel = None # max element of any angular vel vector self.max_ang_vel = None # max distance between positions in two contiguous timesteps self.max_pos = None # max change in rotation around any axis between two contiguous timesteps (for euler rot) self.max_rot = None # max angle of rotation between two steps for axis-angle representation self.max_delta_rot = None # max 2-norm of applied impulse vector self.force_vec_max = None # max 2-norm of a point in an object point cloud (used for point cloud and force pos) self.pc_max = None # normalization values for shape-related stuff self.density_offset = None self.density_max = None self.mass_offset = None self.mass_max = None self.inertia_offset = None self.inertia_max = None self.friction_offset = None self.friction_max = None def print_out(self): print({'max_lin_vel' : self.max_lin_vel, 'max_ang_vel' : self.max_ang_vel, 'max_pos' : self.max_pos, \ 'max_rot' : self.max_rot, 'max_delta_rot' : self.max_delta_rot, 'force_vec_max' : self.force_vec_max, 'pc_max' : self.pc_max, \ 'density_off' : self.density_offset, 'density_max' : self.density_max, 'mass_off' : self.mass_offset, \ 'mass_max' : self.mass_max, 'inertia_off' : self.inertia_offset, 'inertia_max' : self.inertia_max, \ 'friction_off' : self.friction_offset, 'friction_max' : self.friction_max }) def save(self, pkl_file): ''' Saves normalization info object to a specified .pkl file. ''' with open(pkl_file, 'wb') as f: pickle.dump(self, f) def load_from(self, pkl_file): ''' Load normalization info into this object from a specified .pkl file. ''' with open(pkl_file, 'rb') as f: norm_info = pickle.load(f) self.copy_from(norm_info) def copy_from(self, norm_info): ''' Takes values from the given normalization info object and copies them to this one ''' self.max_lin_vel = norm_info.max_lin_vel self.max_ang_vel = norm_info.max_ang_vel self.max_pos = norm_info.max_pos self.max_rot = norm_info.max_rot try: self.max_delta_rot = norm_info.max_delta_rot except: # old versions of data doesn't have max delta rot pass self.force_vec_max = norm_info.force_vec_max self.pc_max = norm_info.pc_max self.density_offset = norm_info.density_offset self.density_max = norm_info.density_max self.mass_offset = norm_info.mass_offset self.mass_max = norm_info.mass_max self.inertia_offset = norm_info.inertia_offset self.inertia_max = norm_info.inertia_max try: self.friction_offset = norm_info.friction_offset self.friction_max = norm_info.friction_max except: # old version doesn't have this pass class ToppleBatch(object): ''' Structure to hold a single batch of data. ''' def __init__(self, size, seq_len, num_pts): self.size = size self.num_steps = seq_len self.num_pts = num_pts self.point_cloud = np.zeros((self.size, self.num_pts, 3)) self.lin_vel = np.zeros((self.size, self.num_steps, 3)) self.ang_vel = np.zeros((self.size, self.num_steps, 3)) self.pos = np.zeros((self.size, self.num_steps, 3)) # cummulative euler angles self.rot = np.zeros((self.size, self.num_steps, 3)) # change in rotation in quaternion rep (w, x, y, z) self.delta_quat = np.zeros((self.size, self.num_steps, 4)) # change in rotation between steps in axis-angle rep (scaled 3 vec) self.delta_rot = np.zeros((self.size, self.num_steps, 3)) # change in rotation between steps in split axis-angle rep (4-vec) self.delta_rot_split = np.zeros((self.size, self.num_steps, 4)) # 0 if before topple idx, 1 if after self.topple_label = np.zeros((self.size, self.num_steps), dtype=int) # other meta-data not directly used in network self.toppled = [] self.shape_name = [] self.body_friction = np.zeros((self.size)) self.mass = np.zeros((self.size)) self.scale = np.zeros((self.size, 3)) self.rot_euler = np.zeros((self.size, self.num_steps, 3)) class ToppleDataset(object): ''' Loads toppling data and provides batches for training and model evaluation. ''' def __init__(self, roots, norm_info_file, batch_size=32, num_steps=15, shuffle=False, num_pts=None, perturb_pts=0.0): ''' - roots : list of directories containing data to load for this dataset - norm_info_file : .pkl file containing normalization information - batch_size : number of sequences to return in each batch - num_steps : number of timesteps to return in each sequence - shuffle : randomly shuffles the returned sequence ordering - num_pts : the number of points to use in the returned point cloud. If None uses all points in the data. - perturb_pts : the stdev to randomly perturb point clouds with. If None no perturbation is performed. - ''' # settings self.batch_size = batch_size self.steps_per_seq = num_steps self.shuffle = shuffle self.perturb_std = perturb_pts self.num_pts = num_pts # load in data for root in roots: if not exists(root): print('Could not find dataset at ' + root) return data_loader = ToppleDataLoader() self.data = data_loader.load_data(roots) if num_pts is None: # use all the points in the point cloud self.num_pts = self.data.point_cloud.shape[1] # load in normalization info if not exists(norm_info_file): print('Could not find normalization info at ' + norm_info_file) return self.norm_info = ToppleNormalizationInfo() self.norm_info.load_from(norm_info_file) print('Loaded normalization info!') # see if we have axis-angle info (for backwards compat) self.use_aa = False self.use_aa_split = False self.use_topple_idx = False self.use_delta_quat = False if len(self.data.delta_rot) > 0: self.use_aa = True if len(self.data.delta_rot_split) > 0: self.use_aa_split = True if len(self.data.topple_idx) > 0: self.use_topple_idx = True if len(self.data.body_friction) > 0: self.use_body_friction = True if len(self.data.delta_quat) > 0: self.use_delta_quat = True # normalize the data print('Normalizing data...') self.normalize_data(self.data, self.norm_info) print('Finished normalizing!') # order to iterate through data when returning batches (in order by default) self.iter_inds = range(0, self.data.size) # prepare to iterate through self.reset() def normalize_data(self, data, norm_info): ''' Normalizes (in place) the given ToppleData using the ToppleNormalizationInfo. ''' # point clouds -> [-1, 1] data.point_cloud /= norm_info.pc_max # force pos -> [-1, 1] data.force_pos /= norm_info.pc_max # force vec -> [-1, 1] data.force_vec /= norm_info.force_vec_max # density -> [0, 1] data.density = (data.density - norm_info.density_offset) / norm_info.density_max # mass -> [0, 1] data.mass = (data.mass - norm_info.mass_offset) / norm_info.mass_max # inertia -> [0, 1] data.inertia = (data.inertia - norm_info.inertia_offset) / norm_info.inertia_max # friction -> [0, 1] if norm_info.friction_offset is not None: data.body_friction = (data.body_friction - norm_info.friction_offset) / norm_info.friction_max # now time sequence data # velocities -> [-1, 1] for i, lin_vel_steps in enumerate(data.lin_vel): data.lin_vel[i] = [(x / norm_info.max_lin_vel) for x in lin_vel_steps] for i, ang_vel_steps in enumerate(data.ang_vel): data.ang_vel[i] = [(x / norm_info.max_ang_vel) for x in ang_vel_steps] # delta position -> [-1, 1] for i, pos_steps in enumerate(data.pos): data.pos[i] = [(x / norm_info.max_pos) for x in pos_steps] # delta rotation -> [-1, 1] for i, rot_steps in enumerate(data.total_rot): data.total_rot[i] = [(x / norm_info.max_rot) for x in rot_steps] # delta rot axis-angle -> [-1, 1] norm if self.use_aa: for i, delta_rot_steps in enumerate(data.delta_rot): data.delta_rot[i] = [(x / norm_info.max_delta_rot) for x in delta_rot_steps] # make axes unit and and normalize angle -> [-1, 1] if self.use_aa_split: for i, delta_rot_split_steps in enumerate(data.delta_rot_split): data.delta_rot_split[i] = [np.append(x[:3] / np.linalg.norm(x[:3]), x[3] / norm_info.max_delta_rot) for x in delta_rot_split_steps] def reset(self): ''' Prepares to iterate through dataset. ''' if self.shuffle: np.random.shuffle(self.iter_inds) # we consider an epoch as returning one sequence from every single simulation # ( though if the sequence length is shorter than sim length the unique sequences contained # in the dataset will be much more than an epoch length ) self.num_batches = (self.data.size + self.batch_size - 1) // self.batch_size self.batch_idx = 0 def has_next_batch(self): ''' Returns false if done with the current "epoch" (seen each sim once). ''' return self.batch_idx < self.num_batches def next_batch(self, random_window=True, focus_toppling=False): ''' Returns the next batch of data. if random_window=True will get a random sequence of correct length (otherwise starts at 0). If focus_toppling=True, will make sure this sequence includes the part of the sequence where toppling occurs. ''' # size is either batch_size, or shorter if we're at the end of the data start_idx = self.batch_idx * self.batch_size end_idx = min((self.batch_idx + 1) * self.batch_size, self.data.size) batch_size = end_idx - start_idx # get batch data batch = ToppleBatch(self.batch_size, self.steps_per_seq, self.num_pts) for i in range(batch_size): pc, lin_vel, ang_vel, pos, rot, delta_quat, delta_rot, delta_rot_split, topple_label, meta_info = \ self.get_seq(self.iter_inds[start_idx + i], self.steps_per_seq, random_window, focus_toppling) batch.point_cloud[i] = pc batch.lin_vel[i] = lin_vel batch.ang_vel[i] = ang_vel batch.pos[i] = pos batch.rot[i] = rot if self.use_delta_quat: batch.delta_quat[i] = delta_quat if self.use_aa: batch.delta_rot[i] = delta_rot if self.use_aa_split: batch.delta_rot_split[i] = delta_rot_split if self.use_topple_idx: batch.topple_label[i] = topple_label batch.toppled.append(meta_info[0]) batch.shape_name.append(meta_info[1]) batch.scale[i] = meta_info[2] batch.rot_euler[i] = meta_info[3] if self.use_body_friction: batch.body_friction[i] = meta_info[4] batch.mass[i] = meta_info[5] if batch_size != self.batch_size: # need to pad the end with repeat of data for i in range(self.batch_size - batch_size): batch.point_cloud[batch_size + i] = batch.point_cloud[i] batch.lin_vel[batch_size + i] = batch.lin_vel[i] batch.ang_vel[batch_size + i] = batch.ang_vel[i] batch.pos[batch_size + i] = batch.pos[i] batch.rot[batch_size + i] = batch.rot[i] if self.use_delta_quat: batch.delta_quat[batch_size + i] = batch.delta_quat[i] batch.toppled.append(batch.toppled[i]) batch.shape_name.append(batch.shape_name[i]) batch.scale[batch_size + i] = batch.scale[i] batch.rot_euler[batch_size + i] = batch.rot_euler[i] batch.mass[batch_size + i] = batch.mass[i] if self.use_aa: batch.delta_rot[batch_size + i] = batch.delta_rot[i] if self.use_aa_split: batch.delta_rot_split[batch_size + i] = batch.delta_rot_split[i] if self.use_topple_idx: batch.topple_label[batch_size + i] = batch.topple_label[i] if self.use_body_friction: batch.body_friction[batch_size + i] = batch.body_friction[i] self.batch_idx += 1 return batch def get_seq(self, idx, num_steps, random_window=True, focus_toppling=False): ''' Returns a random contiguous sequence from the simulation at the given idx and length num_steps. If num_steps > sim_length the final (sim_length-num_steps) steps are padded with the value at sim[sim_length]. ''' # get the normalized canonical point cloud for this simulation pc = np.copy(self.data.point_cloud[self.data.shape_idx[idx]]) scale = self.data.scale[idx] # scale accordingly pc *= np.reshape(scale, (1, -1)) # randomly perturb point cloud pc += np.random.normal(0.0, self.perturb_std, pc.shape) # randomly draw a subset of points if desired if self.num_pts < pc.shape[0]: pc_inds = np.random.choice(pc.shape[0], self.num_pts, replace=False) pc = pc[pc_inds, :] # randomly choose a size num_steps sequence from the simulation to return time-series data total_steps = len(self.data.lin_vel[idx]) max_start_step = total_steps - num_steps start_step = 0 if max_start_step < 0: # simulation is shorter than desired sequence length pad_len = abs(max_start_step) lin_vel_list = self.data.lin_vel[idx] lin_vel_out = np.array(lin_vel_list + [lin_vel_list[-1]]*pad_len) ang_vel_list = self.data.ang_vel[idx] ang_vel_out = np.array(ang_vel_list + [ang_vel_list[-1]]*pad_len) pos_list = self.data.pos[idx] pos_out = np.array(pos_list + [pos_list[-1]]*pad_len) rot_list = self.data.total_rot[idx] rot_out = np.array(rot_list + [rot_list[-1]]*pad_len) if self.use_delta_quat: delta_quat_list = self.data.delta_quat[idx] delta_quat_out = np.array(delta_quat_list + [delta_quat_list[-1]]*pad_len) euler_rot_list = self.data.rot_euler[idx] euler_rot_out = np.array(euler_rot_list + [euler_rot_list[-1]]*pad_len) if self.use_aa: delta_rot_list = self.data.delta_rot[idx] delta_rot_out = np.array(delta_rot_list + [delta_rot_list[-1]]*pad_len) if self.use_aa_split: delta_rot_split_list = self.data.delta_rot_split[idx] delta_rot_split_out = np.array(delta_rot_split_list + [delta_rot_split_list[-1]]*pad_len) if self.use_topple_idx: topple_label_out = np.zeros((total_steps + pad_len), dtype=int) seq_topple_idx = self.data.topple_idx[idx] if seq_topple_idx > 0: topple_label_out[seq_topple_idx:] = 1 else: start_step = 0 if random_window: if focus_toppling and self.data.toppled[idx]: # choose window around the index where it topples topple_idx = self.data.topple_idx[idx] min_idx = max([topple_idx - num_steps + 1, 0]) if min_idx >= max_start_step: # just pick the max index start_step = max_start_step else: # our window is guaranteed to see some part of toppling start_step = np.random.randint(min_idx, max_start_step+1) else: start_step = np.random.randint(0, max_start_step+1) end_step = start_step + num_steps # print('Range: %d, %d' % (start_step, end_step)) lin_vel_out = np.array(self.data.lin_vel[idx][start_step:end_step]) ang_vel_out = np.array(self.data.ang_vel[idx][start_step:end_step]) pos_out = np.array(self.data.pos[idx][start_step:end_step]) rot_out = np.array(self.data.total_rot[idx][start_step:end_step]) if self.use_delta_quat: delta_quat_out = np.array(self.data.delta_quat[idx][start_step:end_step]) euler_rot_out = np.array(self.data.rot_euler[idx][start_step:end_step]) if self.use_aa: delta_rot_out = np.array(self.data.delta_rot[idx][start_step:end_step]) if self.use_aa_split: delta_rot_split_out = np.array(self.data.delta_rot_split[idx][start_step:end_step]) if self.use_topple_idx: topple_label_out = np.zeros((num_steps), dtype=int) seq_topple_idx = self.data.topple_idx[idx] if seq_topple_idx > 0: if seq_topple_idx <= start_step: topple_label_out[:] = 1 elif seq_topple_idx < end_step: topple_label_out[seq_topple_idx-start_step:] = 1 # rotate point cloud to align with first frame of sequence init_rot = self.data.rot_euler[idx][start_step] xrot, yrot, zrot = np.radians(init_rot) R = transforms3d.euler.euler2mat(zrot, xrot, yrot, axes='szxy') # unity applies euler angles in z, x, y ordering pc = np.dot(pc, R.T) toppled = self.data.toppled[idx] shape_name = self.data.shape_name[idx] mass = self.data.mass[idx] body_fric = -1.0 if self.use_body_friction: body_fric = self.data.body_friction[idx] meta_info = (toppled, shape_name, scale, euler_rot_out, body_fric, mass) if not self.use_aa: delta_rot_out = None if not self.use_aa_split: delta_rot_split_out = None if not self.use_topple_idx: topple_label_out = None if not self.use_delta_quat: delta_quat_out = None return pc, lin_vel_out, ang_vel_out, pos_out, rot_out, delta_quat_out, delta_rot_out, delta_rot_split_out, topple_label_out, meta_info def get_norm_info(self): return self.norm_info if __name__=='__main__': # norm_info = ToppleNormalizationInfo() # norm_info.load_from('../../data/sim/normalization_info/cube_train.pkl') # norm_info.print_out() topple_data = ToppleDataset(roots=['./data/sim/Cube/Cube30k_ObjSplit/Cube30kVal'], norm_info_file='./data/sim/normalization_info/cube_30k.pkl', \ batch_size=5, num_steps=10, shuffle=True, num_pts=None, perturb_pts=0.01) count = 0 while topple_data.has_next_batch(): batch = topple_data.next_batch(random_window=True, focus_toppling=False) count += 1 # print(batch.lin_vel[0]) # print(batch.toppled[0]) # print(batch.delta_rot_split[0]) # print(batch.delta_rot[0]) # print(batch.topple_label[0]) # print(batch.pos) # print(batch.body_friction) # print(batch.delta_quat[0]) # print(np.degrees(2*np.arccos(batch.delta_quat[0, :, 0]))) print('Total num batches: ' + str(count)) topple_data.reset() count = 0 while topple_data.has_next_batch(): batch = topple_data.next_batch() count += 1 print(batch.size) print('Total num batches: ' + str(count))
[((3728, 3766), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_pts, 3)'], {}), '((self.size, self.num_pts, 3))\n', (3736, 3766), True, 'import numpy as np\n'), ((3790, 3830), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (3798, 3830), True, 'import numpy as np\n'), ((3854, 3894), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (3862, 3894), True, 'import numpy as np\n'), ((3914, 3954), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (3922, 3954), True, 'import numpy as np\n'), ((4009, 4049), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (4017, 4049), True, 'import numpy as np\n'), ((4136, 4176), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 4)'], {}), '((self.size, self.num_steps, 4))\n', (4144, 4176), True, 'import numpy as np\n'), ((4278, 4318), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (4286, 4318), True, 'import numpy as np\n'), ((4425, 4465), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 4)'], {}), '((self.size, self.num_steps, 4))\n', (4433, 4465), True, 'import numpy as np\n'), ((4540, 4588), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps)'], {'dtype': 'int'}), '((self.size, self.num_steps), dtype=int)\n', (4548, 4588), True, 'import numpy as np\n'), ((4729, 4748), 'numpy.zeros', 'np.zeros', (['self.size'], {}), '(self.size)\n', (4737, 4748), True, 'import numpy as np\n'), ((4771, 4790), 'numpy.zeros', 'np.zeros', (['self.size'], {}), '(self.size)\n', (4779, 4790), True, 'import numpy as np\n'), ((4814, 4838), 'numpy.zeros', 'np.zeros', (['(self.size, 3)'], {}), '((self.size, 3))\n', (4822, 4838), True, 'import numpy as np\n'), ((4864, 4904), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (4872, 4904), True, 'import numpy as np\n'), ((6156, 6174), 'topple_data_loader.ToppleDataLoader', 'ToppleDataLoader', ([], {}), '()\n', (6172, 6174), False, 'from topple_data_loader import ToppleData, ToppleDataLoader\n'), ((14138, 14194), 'numpy.copy', 'np.copy', (['self.data.point_cloud[self.data.shape_idx[idx]]'], {}), '(self.data.point_cloud[self.data.shape_idx[idx]])\n', (14145, 14194), True, 'import numpy as np\n'), ((14274, 14300), 'numpy.reshape', 'np.reshape', (['scale', '(1, -1)'], {}), '(scale, (1, -1))\n', (14284, 14300), True, 'import numpy as np\n'), ((14354, 14403), 'numpy.random.normal', 'np.random.normal', (['(0.0)', 'self.perturb_std', 'pc.shape'], {}), '(0.0, self.perturb_std, pc.shape)\n', (14370, 14403), True, 'import numpy as np\n'), ((18650, 18670), 'numpy.radians', 'np.radians', (['init_rot'], {}), '(init_rot)\n', (18660, 18670), True, 'import numpy as np\n'), ((18683, 18742), 'transforms3d.euler.euler2mat', 'transforms3d.euler.euler2mat', (['zrot', 'xrot', 'yrot'], {'axes': '"""szxy"""'}), "(zrot, xrot, yrot, axes='szxy')\n", (18711, 18742), False, 'import transforms3d\n'), ((18805, 18820), 'numpy.dot', 'np.dot', (['pc', 'R.T'], {}), '(pc, R.T)\n', (18811, 18820), True, 'import numpy as np\n'), ((2119, 2139), 'pickle.dump', 'pickle.dump', (['self', 'f'], {}), '(self, f)\n', (2130, 2139), False, 'import pickle\n'), ((2325, 2339), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2336, 2339), False, 'import pickle\n'), ((6416, 6438), 'os.path.exists', 'exists', (['norm_info_file'], {}), '(norm_info_file)\n', (6422, 6438), False, 'from os.path import exists, realpath\n'), ((9912, 9945), 'numpy.random.shuffle', 'np.random.shuffle', (['self.iter_inds'], {}), '(self.iter_inds)\n', (9929, 9945), True, 'import numpy as np\n'), ((14520, 14578), 'numpy.random.choice', 'np.random.choice', (['pc.shape[0]', 'self.num_pts'], {'replace': '(False)'}), '(pc.shape[0], self.num_pts, replace=False)\n', (14536, 14578), True, 'import numpy as np\n'), ((15047, 15100), 'numpy.array', 'np.array', (['(lin_vel_list + [lin_vel_list[-1]] * pad_len)'], {}), '(lin_vel_list + [lin_vel_list[-1]] * pad_len)\n', (15055, 15100), True, 'import numpy as np\n'), ((15175, 15228), 'numpy.array', 'np.array', (['(ang_vel_list + [ang_vel_list[-1]] * pad_len)'], {}), '(ang_vel_list + [ang_vel_list[-1]] * pad_len)\n', (15183, 15228), True, 'import numpy as np\n'), ((15291, 15336), 'numpy.array', 'np.array', (['(pos_list + [pos_list[-1]] * pad_len)'], {}), '(pos_list + [pos_list[-1]] * pad_len)\n', (15299, 15336), True, 'import numpy as np\n'), ((15405, 15450), 'numpy.array', 'np.array', (['(rot_list + [rot_list[-1]] * pad_len)'], {}), '(rot_list + [rot_list[-1]] * pad_len)\n', (15413, 15450), True, 'import numpy as np\n'), ((15718, 15775), 'numpy.array', 'np.array', (['(euler_rot_list + [euler_rot_list[-1]] * pad_len)'], {}), '(euler_rot_list + [euler_rot_list[-1]] * pad_len)\n', (15726, 15775), True, 'import numpy as np\n'), ((17327, 17380), 'numpy.array', 'np.array', (['self.data.lin_vel[idx][start_step:end_step]'], {}), '(self.data.lin_vel[idx][start_step:end_step])\n', (17335, 17380), True, 'import numpy as np\n'), ((17407, 17460), 'numpy.array', 'np.array', (['self.data.ang_vel[idx][start_step:end_step]'], {}), '(self.data.ang_vel[idx][start_step:end_step])\n', (17415, 17460), True, 'import numpy as np\n'), ((17483, 17532), 'numpy.array', 'np.array', (['self.data.pos[idx][start_step:end_step]'], {}), '(self.data.pos[idx][start_step:end_step])\n', (17491, 17532), True, 'import numpy as np\n'), ((17555, 17610), 'numpy.array', 'np.array', (['self.data.total_rot[idx][start_step:end_step]'], {}), '(self.data.total_rot[idx][start_step:end_step])\n', (17563, 17610), True, 'import numpy as np\n'), ((17765, 17820), 'numpy.array', 'np.array', (['self.data.rot_euler[idx][start_step:end_step]'], {}), '(self.data.rot_euler[idx][start_step:end_step])\n', (17773, 17820), True, 'import numpy as np\n'), ((6038, 6050), 'os.path.exists', 'exists', (['root'], {}), '(root)\n', (6044, 6050), False, 'from os.path import exists, realpath\n'), ((15578, 15637), 'numpy.array', 'np.array', (['(delta_quat_list + [delta_quat_list[-1]] * pad_len)'], {}), '(delta_quat_list + [delta_quat_list[-1]] * pad_len)\n', (15586, 15637), True, 'import numpy as np\n'), ((15892, 15949), 'numpy.array', 'np.array', (['(delta_rot_list + [delta_rot_list[-1]] * pad_len)'], {}), '(delta_rot_list + [delta_rot_list[-1]] * pad_len)\n', (15900, 15949), True, 'import numpy as np\n'), ((16090, 16159), 'numpy.array', 'np.array', (['(delta_rot_split_list + [delta_rot_split_list[-1]] * pad_len)'], {}), '(delta_rot_split_list + [delta_rot_split_list[-1]] * pad_len)\n', (16098, 16159), True, 'import numpy as np\n'), ((16229, 16271), 'numpy.zeros', 'np.zeros', (['(total_steps + pad_len)'], {'dtype': 'int'}), '(total_steps + pad_len, dtype=int)\n', (16237, 16271), True, 'import numpy as np\n'), ((17680, 17736), 'numpy.array', 'np.array', (['self.data.delta_quat[idx][start_step:end_step]'], {}), '(self.data.delta_quat[idx][start_step:end_step])\n', (17688, 17736), True, 'import numpy as np\n'), ((17881, 17936), 'numpy.array', 'np.array', (['self.data.delta_rot[idx][start_step:end_step]'], {}), '(self.data.delta_rot[idx][start_step:end_step])\n', (17889, 17936), True, 'import numpy as np\n'), ((18009, 18070), 'numpy.array', 'np.array', (['self.data.delta_rot_split[idx][start_step:end_step]'], {}), '(self.data.delta_rot_split[idx][start_step:end_step])\n', (18017, 18070), True, 'import numpy as np\n'), ((18142, 18172), 'numpy.zeros', 'np.zeros', (['num_steps'], {'dtype': 'int'}), '(num_steps, dtype=int)\n', (18150, 18172), True, 'import numpy as np\n'), ((17154, 17194), 'numpy.random.randint', 'np.random.randint', (['(0)', '(max_start_step + 1)'], {}), '(0, max_start_step + 1)\n', (17171, 17194), True, 'import numpy as np\n'), ((17054, 17100), 'numpy.random.randint', 'np.random.randint', (['min_idx', '(max_start_step + 1)'], {}), '(min_idx, max_start_step + 1)\n', (17071, 17100), True, 'import numpy as np\n'), ((9681, 9702), 'numpy.linalg.norm', 'np.linalg.norm', (['x[:3]'], {}), '(x[:3])\n', (9695, 9702), True, 'import numpy as np\n')]
efkandurakli/Graduation-Project1
Part1/AverageAccuracy.py
fd2cba89929da2cef49ec67214b54c310b57ce01
import numpy as np from operator import truediv def AA_andEachClassAccuracy(confusion_matrix): counter = confusion_matrix.shape[0] list_diag = np.diag(confusion_matrix) list_raw_sum = np.sum(confusion_matrix, axis=1) each_acc = np.nan_to_num(truediv(list_diag, list_raw_sum)) average_acc = np.mean(each_acc) return each_acc, average_acc
[((157, 182), 'numpy.diag', 'np.diag', (['confusion_matrix'], {}), '(confusion_matrix)\n', (164, 182), True, 'import numpy as np\n'), ((203, 235), 'numpy.sum', 'np.sum', (['confusion_matrix'], {'axis': '(1)'}), '(confusion_matrix, axis=1)\n', (209, 235), True, 'import numpy as np\n'), ((319, 336), 'numpy.mean', 'np.mean', (['each_acc'], {}), '(each_acc)\n', (326, 336), True, 'import numpy as np\n'), ((266, 298), 'operator.truediv', 'truediv', (['list_diag', 'list_raw_sum'], {}), '(list_diag, list_raw_sum)\n', (273, 298), False, 'from operator import truediv\n')]
YangHee-Min/spinalcordtoolbox
scripts/sct_apply_transfo.py
38ca15aa99b03ca99b7885ddc98adf2755adc43d
#!/usr/bin/env python ######################################################################################### # # Apply transformations. This function is a wrapper for sct_WarpImageMultiTransform # # --------------------------------------------------------------------------------------- # Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca> # Authors: Julien Cohen-Adad, Olivier Comtois # Modified: 2014-07-20 # # About the license: see the file LICENSE.TXT ######################################################################################### # TODO: display message at the end # TODO: interpolation methods from __future__ import division, absolute_import import sys, io, os, time, functools from msct_parser import Parser import sct_utils as sct import sct_convert import sct_image import spinalcordtoolbox.image as msct_image from sct_crop_image import ImageCropper class Param: def __init__(self): self.verbose = '1' self.remove_temp_files = '1' # PARSER # ========================================================================================== def get_parser(): # parser initialisation parser = Parser(__file__) parser.usage.set_description('Apply transformations. This function is a wrapper for antsApplyTransforms (ANTs).') parser.add_option(name="-i", type_value="file", description="input image", mandatory=True, example="t2.nii.gz") parser.add_option(name="-d", type_value="file", description="destination image", mandatory=True, example="out.nii.gz") parser.add_option(name="-w", type_value=[[','], "file"], description="Transformation, which can be a warping field (nifti image) or an affine transformation matrix (text file).", mandatory=True, example="warp1.nii.gz,warp2.nii.gz") parser.add_option(name="-crop", type_value="multiple_choice", description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background", mandatory=False, default_value='0', example=['0', '1', '2']) parser.add_option(name="-c", type_value=None, description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background", mandatory=False, deprecated_by='-crop') parser.add_option(name="-o", type_value="file_output", description="registered source.", mandatory=False, default_value='', example="dest.nii.gz") parser.add_option(name="-x", type_value="multiple_choice", description="interpolation method", mandatory=False, default_value='spline', example=['nn', 'linear', 'spline']) parser.add_option(name="-r", type_value="multiple_choice", description="""Remove temporary files.""", mandatory=False, default_value='1', example=['0', '1']) parser.add_option(name="-v", type_value="multiple_choice", description="""Verbose.""", mandatory=False, default_value='1', example=['0', '1', '2']) return parser class Transform: def __init__(self, input_filename, warp, fname_dest, output_filename='', verbose=0, crop=0, interp='spline', remove_temp_files=1, debug=0): self.input_filename = input_filename if isinstance(warp, str): self.warp_input = list([warp]) else: self.warp_input = warp self.fname_dest = fname_dest self.output_filename = output_filename self.interp = interp self.crop = crop self.verbose = verbose self.remove_temp_files = remove_temp_files self.debug = debug def apply(self): # Initialization fname_src = self.input_filename # source image (moving) fname_warp_list = self.warp_input # list of warping fields fname_out = self.output_filename # output fname_dest = self.fname_dest # destination image (fix) verbose = self.verbose remove_temp_files = self.remove_temp_files crop_reference = self.crop # if = 1, put 0 everywhere around warping field, if = 2, real crop interp = sct.get_interpolation('isct_antsApplyTransforms', self.interp) # Parse list of warping fields sct.printv('\nParse list of warping fields...', verbose) use_inverse = [] fname_warp_list_invert = [] # fname_warp_list = fname_warp_list.replace(' ', '') # remove spaces # fname_warp_list = fname_warp_list.split(",") # parse with comma for idx_warp, path_warp in enumerate(fname_warp_list): # Check if inverse matrix is specified with '-' at the beginning of file name if path_warp.startswith("-"): use_inverse.append('-i') fname_warp_list[idx_warp] = path_warp[1:] # remove '-' fname_warp_list_invert += [[use_inverse[idx_warp], fname_warp_list[idx_warp]]] else: use_inverse.append('') fname_warp_list_invert += [[path_warp]] path_warp = fname_warp_list[idx_warp] if path_warp.endswith((".nii", ".nii.gz")) \ and msct_image.Image(fname_warp_list[idx_warp]).header.get_intent()[0] != 'vector': raise ValueError("Displacement field in {} is invalid: should be encoded" \ " in a 5D file with vector intent code" \ " (see https://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h" \ .format(path_warp)) # need to check if last warping field is an affine transfo isLastAffine = False path_fname, file_fname, ext_fname = sct.extract_fname(fname_warp_list_invert[-1][-1]) if ext_fname in ['.txt', '.mat']: isLastAffine = True # check if destination file is 3d if not sct.check_if_3d(fname_dest): sct.printv('ERROR: Destination data must be 3d') # N.B. Here we take the inverse of the warp list, because sct_WarpImageMultiTransform concatenates in the reverse order fname_warp_list_invert.reverse() fname_warp_list_invert = functools.reduce(lambda x,y: x+y, fname_warp_list_invert) # Extract path, file and extension path_src, file_src, ext_src = sct.extract_fname(fname_src) path_dest, file_dest, ext_dest = sct.extract_fname(fname_dest) # Get output folder and file name if fname_out == '': path_out = '' # output in user's current directory file_out = file_src + '_reg' ext_out = ext_src fname_out = os.path.join(path_out, file_out + ext_out) # Get dimensions of data sct.printv('\nGet dimensions of data...', verbose) img_src = msct_image.Image(fname_src) nx, ny, nz, nt, px, py, pz, pt = img_src.dim # nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_src) sct.printv(' ' + str(nx) + ' x ' + str(ny) + ' x ' + str(nz) + ' x ' + str(nt), verbose) # if 3d if nt == 1: # Apply transformation sct.printv('\nApply transformation...', verbose) if nz in [0, 1]: dim = '2' else: dim = '3' sct.run(['isct_antsApplyTransforms', '-d', dim, '-i', fname_src, '-o', fname_out, '-t', ] + fname_warp_list_invert + [ '-r', fname_dest, ] + interp, verbose=verbose, is_sct_binary=True) # if 4d, loop across the T dimension else: path_tmp = sct.tmp_create(basename="apply_transfo", verbose=verbose) # convert to nifti into temp folder sct.printv('\nCopying input data to tmp folder and convert to nii...', verbose) img_src.save(os.path.join(path_tmp, "data.nii")) sct.copy(fname_dest, os.path.join(path_tmp, file_dest + ext_dest)) fname_warp_list_tmp = [] for fname_warp in fname_warp_list: path_warp, file_warp, ext_warp = sct.extract_fname(fname_warp) sct.copy(fname_warp, os.path.join(path_tmp, file_warp + ext_warp)) fname_warp_list_tmp.append(file_warp + ext_warp) fname_warp_list_invert_tmp = fname_warp_list_tmp[::-1] curdir = os.getcwd() os.chdir(path_tmp) # split along T dimension sct.printv('\nSplit along T dimension...', verbose) im_dat = msct_image.Image('data.nii') im_header = im_dat.hdr data_split_list = sct_image.split_data(im_dat, 3) for im in data_split_list: im.save() # apply transfo sct.printv('\nApply transformation to each 3D volume...', verbose) for it in range(nt): file_data_split = 'data_T' + str(it).zfill(4) + '.nii' file_data_split_reg = 'data_reg_T' + str(it).zfill(4) + '.nii' status, output = sct.run(['isct_antsApplyTransforms', '-d', '3', '-i', file_data_split, '-o', file_data_split_reg, '-t', ] + fname_warp_list_invert_tmp + [ '-r', file_dest + ext_dest, ] + interp, verbose, is_sct_binary=True) # Merge files back sct.printv('\nMerge file back...', verbose) import glob path_out, name_out, ext_out = sct.extract_fname(fname_out) # im_list = [Image(file_name) for file_name in glob.glob('data_reg_T*.nii')] # concat_data use to take a list of image in input, now takes a list of file names to open the files one by one (see issue #715) fname_list = glob.glob('data_reg_T*.nii') fname_list.sort() im_out = sct_image.concat_data(fname_list, 3, im_header['pixdim']) im_out.save(name_out + ext_out) os.chdir(curdir) sct.generate_output_file(os.path.join(path_tmp, name_out + ext_out), fname_out) # Delete temporary folder if specified if int(remove_temp_files): sct.printv('\nRemove temporary files...', verbose) sct.rmtree(path_tmp, verbose=verbose) # 2. crop the resulting image using dimensions from the warping field warping_field = fname_warp_list_invert[-1] # if last warping field is an affine transfo, we need to compute the space of the concatenate warping field: if isLastAffine: sct.printv('WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation...', verbose, 'warning') elif crop_reference == 1: ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field, background=0).crop() # sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field+' -b 0') elif crop_reference == 2: ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field).crop() # sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field) sct.display_viewer_syntax([fname_dest, fname_out], verbose=verbose) # MAIN # ========================================================================================== def main(args=None): # check user arguments if not args: args = sys.argv[1:] # Get parser info parser = get_parser() arguments = parser.parse(args) input_filename = arguments["-i"] fname_dest = arguments["-d"] warp_filename = arguments["-w"] transform = Transform(input_filename=input_filename, fname_dest=fname_dest, warp=warp_filename) if "-crop" in arguments: transform.crop = arguments["-crop"] if "-o" in arguments: transform.output_filename = arguments["-o"] if "-x" in arguments: transform.interp = arguments["-x"] if "-r" in arguments: transform.remove_temp_files = int(arguments["-r"]) transform.verbose = int(arguments.get('-v')) sct.init_sct(log_level=transform.verbose, update=True) # Update log level transform.apply() # START PROGRAM # ========================================================================================== if __name__ == "__main__": sct.init_sct() # # initialize parameters param = Param() # call main function main()
[((1160, 1176), 'msct_parser.Parser', 'Parser', (['__file__'], {}), '(__file__)\n', (1166, 1176), False, 'from msct_parser import Parser\n'), ((12852, 12906), 'sct_utils.init_sct', 'sct.init_sct', ([], {'log_level': 'transform.verbose', 'update': '(True)'}), '(log_level=transform.verbose, update=True)\n', (12864, 12906), True, 'import sct_utils as sct\n'), ((13092, 13106), 'sct_utils.init_sct', 'sct.init_sct', ([], {}), '()\n', (13104, 13106), True, 'import sct_utils as sct\n'), ((4830, 4892), 'sct_utils.get_interpolation', 'sct.get_interpolation', (['"""isct_antsApplyTransforms"""', 'self.interp'], {}), "('isct_antsApplyTransforms', self.interp)\n", (4851, 4892), True, 'import sct_utils as sct\n'), ((4941, 5000), 'sct_utils.printv', 'sct.printv', (['"""\nParse list of warping fields..."""', 'verbose'], {}), '("""\nParse list of warping fields...""", verbose)\n', (4951, 5000), True, 'import sct_utils as sct\n'), ((6345, 6394), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_warp_list_invert[-1][-1]'], {}), '(fname_warp_list_invert[-1][-1])\n', (6362, 6394), True, 'import sct_utils as sct\n'), ((6820, 6880), 'functools.reduce', 'functools.reduce', (['(lambda x, y: x + y)', 'fname_warp_list_invert'], {}), '(lambda x, y: x + y, fname_warp_list_invert)\n', (6836, 6880), False, 'import sys, io, os, time, functools\n'), ((6960, 6988), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_src'], {}), '(fname_src)\n', (6977, 6988), True, 'import sct_utils as sct\n'), ((7030, 7059), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_dest'], {}), '(fname_dest)\n', (7047, 7059), True, 'import sct_utils as sct\n'), ((7375, 7428), 'sct_utils.printv', 'sct.printv', (['"""\nGet dimensions of data..."""', 'verbose'], {}), '("""\nGet dimensions of data...""", verbose)\n', (7385, 7428), True, 'import sct_utils as sct\n'), ((7444, 7471), 'spinalcordtoolbox.image.Image', 'msct_image.Image', (['fname_src'], {}), '(fname_src)\n', (7460, 7471), True, 'import spinalcordtoolbox.image as msct_image\n'), ((11938, 12005), 'sct_utils.display_viewer_syntax', 'sct.display_viewer_syntax', (['[fname_dest, fname_out]'], {'verbose': 'verbose'}), '([fname_dest, fname_out], verbose=verbose)\n', (11963, 12005), True, 'import sct_utils as sct\n'), ((6527, 6554), 'sct_utils.check_if_3d', 'sct.check_if_3d', (['fname_dest'], {}), '(fname_dest)\n', (6542, 6554), True, 'import sct_utils as sct\n'), ((6568, 6616), 'sct_utils.printv', 'sct.printv', (['"""ERROR: Destination data must be 3d"""'], {}), "('ERROR: Destination data must be 3d')\n", (6578, 6616), True, 'import sct_utils as sct\n'), ((7290, 7332), 'os.path.join', 'os.path.join', (['path_out', '(file_out + ext_out)'], {}), '(path_out, file_out + ext_out)\n', (7302, 7332), False, 'import sys, io, os, time, functools\n'), ((7779, 7830), 'sct_utils.printv', 'sct.printv', (['"""\nApply transformation..."""', 'verbose'], {}), '("""\nApply transformation...""", verbose)\n', (7789, 7830), True, 'import sct_utils as sct\n'), ((7939, 8127), 'sct_utils.run', 'sct.run', (["(['isct_antsApplyTransforms', '-d', dim, '-i', fname_src, '-o', fname_out,\n '-t'] + fname_warp_list_invert + ['-r', fname_dest] + interp)"], {'verbose': 'verbose', 'is_sct_binary': '(True)'}), "(['isct_antsApplyTransforms', '-d', dim, '-i', fname_src, '-o',\n fname_out, '-t'] + fname_warp_list_invert + ['-r', fname_dest] + interp,\n verbose=verbose, is_sct_binary=True)\n", (7946, 8127), True, 'import sct_utils as sct\n'), ((8303, 8360), 'sct_utils.tmp_create', 'sct.tmp_create', ([], {'basename': '"""apply_transfo"""', 'verbose': 'verbose'}), "(basename='apply_transfo', verbose=verbose)\n", (8317, 8360), True, 'import sct_utils as sct\n'), ((8422, 8508), 'sct_utils.printv', 'sct.printv', (['"""\nCopying input data to tmp folder and convert to nii..."""', 'verbose'], {}), '("""\nCopying input data to tmp folder and convert to nii...""",\n verbose)\n', (8432, 8508), True, 'import sct_utils as sct\n'), ((9042, 9053), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (9051, 9053), False, 'import sys, io, os, time, functools\n'), ((9066, 9084), 'os.chdir', 'os.chdir', (['path_tmp'], {}), '(path_tmp)\n', (9074, 9084), False, 'import sys, io, os, time, functools\n'), ((9136, 9190), 'sct_utils.printv', 'sct.printv', (['"""\nSplit along T dimension..."""', 'verbose'], {}), '("""\nSplit along T dimension...""", verbose)\n', (9146, 9190), True, 'import sct_utils as sct\n'), ((9210, 9238), 'spinalcordtoolbox.image.Image', 'msct_image.Image', (['"""data.nii"""'], {}), "('data.nii')\n", (9226, 9238), True, 'import spinalcordtoolbox.image as msct_image\n'), ((9304, 9335), 'sct_image.split_data', 'sct_image.split_data', (['im_dat', '(3)'], {}), '(im_dat, 3)\n', (9324, 9335), False, 'import sct_image\n'), ((9442, 9511), 'sct_utils.printv', 'sct.printv', (['"""\nApply transformation to each 3D volume..."""', 'verbose'], {}), '("""\nApply transformation to each 3D volume...""", verbose)\n', (9452, 9511), True, 'import sct_utils as sct\n'), ((10102, 10148), 'sct_utils.printv', 'sct.printv', (['"""\nMerge file back..."""', 'verbose'], {}), '("""\nMerge file back...""", verbose)\n', (10112, 10148), True, 'import sct_utils as sct\n'), ((10212, 10240), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_out'], {}), '(fname_out)\n', (10229, 10240), True, 'import sct_utils as sct\n'), ((10496, 10524), 'glob.glob', 'glob.glob', (['"""data_reg_T*.nii"""'], {}), "('data_reg_T*.nii')\n", (10505, 10524), False, 'import glob\n'), ((10576, 10633), 'sct_image.concat_data', 'sct_image.concat_data', (['fname_list', '(3)', "im_header['pixdim']"], {}), "(fname_list, 3, im_header['pixdim'])\n", (10597, 10633), False, 'import sct_image\n'), ((10691, 10707), 'os.chdir', 'os.chdir', (['curdir'], {}), '(curdir)\n', (10699, 10707), False, 'import sys, io, os, time, functools\n'), ((11295, 11468), 'sct_utils.printv', 'sct.printv', (['"""WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation..."""', 'verbose', '"""warning"""'], {}), "(\n 'WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation...'\n , verbose, 'warning')\n", (11305, 11468), True, 'import sct_utils as sct\n'), ((8527, 8561), 'os.path.join', 'os.path.join', (['path_tmp', '"""data.nii"""'], {}), "(path_tmp, 'data.nii')\n", (8539, 8561), False, 'import sys, io, os, time, functools\n'), ((8596, 8640), 'os.path.join', 'os.path.join', (['path_tmp', '(file_dest + ext_dest)'], {}), '(path_tmp, file_dest + ext_dest)\n', (8608, 8640), False, 'import sys, io, os, time, functools\n'), ((8775, 8804), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_warp'], {}), '(fname_warp)\n', (8792, 8804), True, 'import sct_utils as sct\n'), ((9726, 9937), 'sct_utils.run', 'sct.run', (["(['isct_antsApplyTransforms', '-d', '3', '-i', file_data_split, '-o',\n file_data_split_reg, '-t'] + fname_warp_list_invert_tmp + ['-r', \n file_dest + ext_dest] + interp)", 'verbose'], {'is_sct_binary': '(True)'}), "(['isct_antsApplyTransforms', '-d', '3', '-i', file_data_split, '-o',\n file_data_split_reg, '-t'] + fname_warp_list_invert_tmp + ['-r', \n file_dest + ext_dest] + interp, verbose, is_sct_binary=True)\n", (9733, 9937), True, 'import sct_utils as sct\n'), ((10745, 10787), 'os.path.join', 'os.path.join', (['path_tmp', '(name_out + ext_out)'], {}), '(path_tmp, name_out + ext_out)\n', (10757, 10787), False, 'import sys, io, os, time, functools\n'), ((10906, 10959), 'sct_utils.printv', 'sct.printv', (['"""\nRemove temporary files..."""', 'verbose'], {}), '("""\nRemove temporary files...""", verbose)\n', (10916, 10959), True, 'import sct_utils as sct\n'), ((10973, 11010), 'sct_utils.rmtree', 'sct.rmtree', (['path_tmp'], {'verbose': 'verbose'}), '(path_tmp, verbose=verbose)\n', (10983, 11010), True, 'import sct_utils as sct\n'), ((8842, 8886), 'os.path.join', 'os.path.join', (['path_tmp', '(file_warp + ext_warp)'], {}), '(path_tmp, file_warp + ext_warp)\n', (8854, 8886), False, 'import sys, io, os, time, functools\n'), ((11505, 11599), 'sct_crop_image.ImageCropper', 'ImageCropper', ([], {'input_file': 'fname_out', 'output_file': 'fname_out', 'ref': 'warping_field', 'background': '(0)'}), '(input_file=fname_out, output_file=fname_out, ref=warping_field,\n background=0)\n', (11517, 11599), False, 'from sct_crop_image import ImageCropper\n'), ((11751, 11827), 'sct_crop_image.ImageCropper', 'ImageCropper', ([], {'input_file': 'fname_out', 'output_file': 'fname_out', 'ref': 'warping_field'}), '(input_file=fname_out, output_file=fname_out, ref=warping_field)\n', (11763, 11827), False, 'from sct_crop_image import ImageCropper\n'), ((5852, 5895), 'spinalcordtoolbox.image.Image', 'msct_image.Image', (['fname_warp_list[idx_warp]'], {}), '(fname_warp_list[idx_warp])\n', (5868, 5895), True, 'import spinalcordtoolbox.image as msct_image\n')]
vurankar/mongo-connector
tests/plugins/test_plugin_base.py
202aa28743855643fddd77d3e66bf1a640df3ed6
# Copyright 2013-2014 MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests methods in plugin_base.py """ import copy import sys sys.path[0:0] = [""] from mongo_connector.plugins.plugin_base import PluginBase from tests import unittest from tests.plugins.helpers import (BAD_PLUGIN_CONFIGS, get_test_namespace) class TestPluginBase(unittest.TestCase): """ Tests the utils """ def setUp(self): """Initialize test instance. """ self.namespace = get_test_namespace() def test_name(self): """Test name. """ configs = self.namespace.plugins[0] for cfg in configs: obj = PluginBase(cfg) self.assertEqual(cfg['pluginName'], obj.name()) for cfg in BAD_PLUGIN_CONFIGS: obj = PluginBase(cfg) self.assertEqual(obj.name().index('generated'), 0) def test_info(self): """Test info. """ configs = self.namespace.plugins[0] for cfg in configs: obj = PluginBase(cfg) self.assertEqual(cfg['config'], obj.info()) for cfg in BAD_PLUGIN_CONFIGS: obj = PluginBase(cfg) self.assertEqual(obj.info(), {}) def _test_not_implemented_method_by_name(self, name): """Test not implemented method by name. """ configs = copy.deepcopy(self.namespace.plugins) configs.extend(BAD_PLUGIN_CONFIGS) for cfg in configs: obj = PluginBase(cfg) try: method = getattr(obj, name) if not method or not callable(method): raise KeyError method() except NotImplementedError as exc: pass return True def test_invoke(self): """Test invoke. """ flag = self._test_not_implemented_method_by_name('invoke') self.assertEqual(flag, True) def test_bulk_invoke(self): """Test bulk_invoke. """ # Bulk invoke is really implemented but it calls invoke in loop # which returns an not implemented exception. flag = self._test_not_implemented_method_by_name('bulk_invoke') self.assertEqual(flag, True) def test_commit(self): """Test commit. """ flag = self._test_not_implemented_method_by_name('commit') self.assertEqual(flag, True) def test_stop(self): """Test stop. """ flag = self._test_not_implemented_method_by_name('stop') self.assertEqual(flag, True) if __name__ == '__main__': unittest.main()
[((3116, 3131), 'tests.unittest.main', 'unittest.main', ([], {}), '()\n', (3129, 3131), False, 'from tests import unittest\n'), ((996, 1016), 'tests.plugins.helpers.get_test_namespace', 'get_test_namespace', ([], {}), '()\n', (1014, 1016), False, 'from tests.plugins.helpers import BAD_PLUGIN_CONFIGS, get_test_namespace\n'), ((1861, 1898), 'copy.deepcopy', 'copy.deepcopy', (['self.namespace.plugins'], {}), '(self.namespace.plugins)\n', (1874, 1898), False, 'import copy\n'), ((1168, 1183), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1178, 1183), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1302, 1317), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1312, 1317), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1532, 1547), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1542, 1547), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1662, 1677), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1672, 1677), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1988, 2003), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1998, 2003), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n')]
PerchLive/django-address
address/models.py
edab73847ba95d4f7a71993bcd55ea6bf300693e
import logging import sys from django.core.exceptions import ValidationError from django.db import models from django.db.models.fields.related import ForeignObject from django.utils.encoding import python_2_unicode_compatible try: from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor except ImportError: from django.db.models.fields.related import ReverseSingleRelatedObjectDescriptor as ForwardManyToOneDescriptor logger = logging.getLogger(__name__) if sys.version > '3': long = int basestring = (str, bytes) unicode = str __all__ = ['Country', 'State', 'Locality', 'Address', 'AddressField'] class InconsistentDictError(Exception): pass def _to_python(value): raw = value.get('raw', '') country = value.get('country', '') country_code = value.get('country_code', '') state = value.get('state', '') state_code = value.get('state_code', '') locality = value.get('locality', '') sublocality = value.get('sublocality', '') postal_code = value.get('postal_code', '') street_number = value.get('street_number', '') route = value.get('route', '') formatted = value.get('formatted', '') latitude = value.get('latitude', None) longitude = value.get('longitude', None) # If there is no value (empty raw) then return None. if not raw: return None # Fix issue with NYC boroughs (https://code.google.com/p/gmaps-api-issues/issues/detail?id=635) if not locality and sublocality: locality = sublocality # If we have an inconsistent set of value bail out now. if (country or state or locality) and not (country and state and locality): raise InconsistentDictError # Handle the country. try: country_obj = Country.objects.get(name=country) except Country.DoesNotExist: if country: if len(country_code) > Country._meta.get_field('code').max_length: if country_code != country: raise ValueError('Invalid country code (too long): %s' % country_code) country_code = '' country_obj = Country.objects.create(name=country, code=country_code) else: country_obj = None # Handle the state. try: state_obj = State.objects.get(name=state, country=country_obj) except State.DoesNotExist: if state: if len(state_code) > State._meta.get_field('code').max_length: if state_code != state: raise ValueError('Invalid state code (too long): %s' % state_code) state_code = '' state_obj = State.objects.create(name=state, code=state_code, country=country_obj) else: state_obj = None # Handle the locality. try: locality_obj = Locality.objects.get(name=locality, postal_code=postal_code, state=state_obj) except Locality.DoesNotExist: if locality: locality_obj = Locality.objects.create(name=locality, postal_code=postal_code, state=state_obj) else: locality_obj = None # Handle the address. try: if not (street_number or route or locality): address_obj = Address.objects.get(raw=raw) else: address_obj = Address.objects.get( street_number=street_number, route=route, locality=locality_obj ) except Address.DoesNotExist: address_obj = Address( street_number=street_number, route=route, raw=raw, locality=locality_obj, formatted=formatted, latitude=latitude, longitude=longitude, ) # If "formatted" is empty try to construct it from other values. if not address_obj.formatted: address_obj.formatted = unicode(address_obj) # Need to save. address_obj.save() # Done. return address_obj ## # Convert a dictionary to an address. ## def to_python(value): # Keep `None`s. if value is None: return None # Is it already an address object? if isinstance(value, Address): return value # If we have an integer, assume it is a model primary key. This is mostly for # Django being a cunt. elif isinstance(value, (int, long)): return value # A string is considered a raw value. elif isinstance(value, basestring): obj = Address(raw=value) obj.save() return obj # A dictionary of named address components. elif isinstance(value, dict): # Attempt a conversion. try: return _to_python(value) except InconsistentDictError: return Address.objects.create(raw=value['raw']) # Not in any of the formats I recognise. raise ValidationError('Invalid address value.') ## # A country. ## @python_2_unicode_compatible class Country(models.Model): name = models.CharField(max_length=40, unique=True, blank=True) code = models.CharField(max_length=2, blank=True) # not unique as there are duplicates (IT) class Meta: verbose_name_plural = 'Countries' ordering = ('name',) def __str__(self): return '%s' % (self.name or self.code) ## # A state. Google refers to this as `administration_level_1`. ## @python_2_unicode_compatible class State(models.Model): name = models.CharField(max_length=165, blank=True) code = models.CharField(max_length=3, blank=True) country = models.ForeignKey(Country, on_delete=models.CASCADE, related_name='states') class Meta: unique_together = ('name', 'country') ordering = ('country', 'name') def __str__(self): txt = self.to_str() country = '%s' % self.country if country and txt: txt += ', ' txt += country return txt def to_str(self): return '%s' % (self.name or self.code) ## # A locality (suburb). ## @python_2_unicode_compatible class Locality(models.Model): name = models.CharField(max_length=165, blank=True) postal_code = models.CharField(max_length=10, blank=True) state = models.ForeignKey(State, on_delete=models.CASCADE, related_name='localities') class Meta: verbose_name_plural = 'Localities' unique_together = ('name', 'postal_code', 'state') ordering = ('state', 'name') def __str__(self): txt = '%s' % self.name state = self.state.to_str() if self.state else '' if txt and state: txt += ', ' txt += state if self.postal_code: txt += ' %s' % self.postal_code cntry = '%s' % (self.state.country if self.state and self.state.country else '') if cntry: txt += ', %s' % cntry return txt ## # An address. If for any reason we are unable to find a matching # decomposed address we will store the raw address string in `raw`. ## @python_2_unicode_compatible class Address(models.Model): street_number = models.CharField(max_length=20, blank=True) route = models.CharField(max_length=100, blank=True) locality = models.ForeignKey(Locality, on_delete=models.CASCADE, related_name='addresses', blank=True, null=True) raw = models.CharField(max_length=200) formatted = models.CharField(max_length=200, blank=True) latitude = models.FloatField(blank=True, null=True) longitude = models.FloatField(blank=True, null=True) class Meta: verbose_name_plural = 'Addresses' ordering = ('locality', 'route', 'street_number') # unique_together = ('locality', 'route', 'street_number') def __str__(self): if self.formatted != '': txt = '%s' % self.formatted elif self.locality: txt = '' if self.street_number: txt = '%s' % self.street_number if self.route: if txt: txt += ' %s' % self.route locality = '%s' % self.locality if txt and locality: txt += ', ' txt += locality else: txt = '%s' % self.raw return txt def clean(self): if not self.raw: raise ValidationError('Addresses may not have a blank `raw` field.') def as_dict(self): ad = dict( street_number=self.street_number, route=self.route, raw=self.raw, formatted=self.formatted, latitude=self.latitude if self.latitude else '', longitude=self.longitude if self.longitude else '', ) if self.locality: ad['locality'] = self.locality.name ad['postal_code'] = self.locality.postal_code if self.locality.state: ad['state'] = self.locality.state.name ad['state_code'] = self.locality.state.code if self.locality.state.country: ad['country'] = self.locality.state.country.name ad['country_code'] = self.locality.state.country.code return ad class AddressDescriptor(ForwardManyToOneDescriptor): def __set__(self, inst, value): super(AddressDescriptor, self).__set__(inst, to_python(value)) ## # A field for addresses in other models. ## class AddressField(models.ForeignKey): description = 'An address' def __init__(self, *args, **kwargs): kwargs['to'] = 'address.Address' super(AddressField, self).__init__(*args, **kwargs) def contribute_to_class(self, cls, name, virtual_only=False): from address.compat import compat_contribute_to_class compat_contribute_to_class(self, cls, name, virtual_only) # super(ForeignObject, self).contribute_to_class(cls, name, virtual_only=virtual_only) setattr(cls, self.name, AddressDescriptor(self)) # def deconstruct(self): # name, path, args, kwargs = super(AddressField, self).deconstruct() # del kwargs['to'] # return name, path, args, kwargs def formfield(self, **kwargs): from .forms import AddressField as AddressFormField defaults = dict(form_class=AddressFormField) defaults.update(kwargs) return super(AddressField, self).formfield(**defaults)
[((465, 492), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (482, 492), False, 'import logging\n'), ((4865, 4906), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Invalid address value."""'], {}), "('Invalid address value.')\n", (4880, 4906), False, 'from django.core.exceptions import ValidationError\n'), ((4998, 5054), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'unique': '(True)', 'blank': '(True)'}), '(max_length=40, unique=True, blank=True)\n', (5014, 5054), False, 'from django.db import models\n'), ((5066, 5108), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'blank': '(True)'}), '(max_length=2, blank=True)\n', (5082, 5108), False, 'from django.db import models\n'), ((5449, 5493), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(165)', 'blank': '(True)'}), '(max_length=165, blank=True)\n', (5465, 5493), False, 'from django.db import models\n'), ((5505, 5547), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'blank': '(True)'}), '(max_length=3, blank=True)\n', (5521, 5547), False, 'from django.db import models\n'), ((5562, 5637), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Country'], {'on_delete': 'models.CASCADE', 'related_name': '"""states"""'}), "(Country, on_delete=models.CASCADE, related_name='states')\n", (5579, 5637), False, 'from django.db import models\n'), ((6096, 6140), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(165)', 'blank': '(True)'}), '(max_length=165, blank=True)\n', (6112, 6140), False, 'from django.db import models\n'), ((6159, 6202), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (6175, 6202), False, 'from django.db import models\n'), ((6215, 6292), 'django.db.models.ForeignKey', 'models.ForeignKey', (['State'], {'on_delete': 'models.CASCADE', 'related_name': '"""localities"""'}), "(State, on_delete=models.CASCADE, related_name='localities')\n", (6232, 6292), False, 'from django.db import models\n'), ((7086, 7129), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'blank': '(True)'}), '(max_length=20, blank=True)\n', (7102, 7129), False, 'from django.db import models\n'), ((7142, 7186), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (7158, 7186), False, 'from django.db import models\n'), ((7202, 7309), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Locality'], {'on_delete': 'models.CASCADE', 'related_name': '"""addresses"""', 'blank': '(True)', 'null': '(True)'}), "(Locality, on_delete=models.CASCADE, related_name=\n 'addresses', blank=True, null=True)\n", (7219, 7309), False, 'from django.db import models\n'), ((7315, 7347), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (7331, 7347), False, 'from django.db import models\n'), ((7364, 7408), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)'}), '(max_length=200, blank=True)\n', (7380, 7408), False, 'from django.db import models\n'), ((7424, 7464), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7441, 7464), False, 'from django.db import models\n'), ((7481, 7521), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7498, 7521), False, 'from django.db import models\n'), ((9734, 9791), 'address.compat.compat_contribute_to_class', 'compat_contribute_to_class', (['self', 'cls', 'name', 'virtual_only'], {}), '(self, cls, name, virtual_only)\n', (9760, 9791), False, 'from address.compat import compat_contribute_to_class\n'), ((8297, 8359), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Addresses may not have a blank `raw` field."""'], {}), "('Addresses may not have a blank `raw` field.')\n", (8312, 8359), False, 'from django.core.exceptions import ValidationError\n')]
eavatar/ava
src/eavatar.ava/pod/mods/tasks/__init__.py
4f09c5417b7187dd919b7edabb8c516d8efc0696
# -*- coding: utf-8 -*- """ Modules for exposing functions that can be run as tasks. """ from __future__ import (absolute_import, division, print_function, unicode_literals)
[]
jjiege/odoo
addons/hr_payroll_account/models/hr_payroll_account.py
fd5b8ad387c1881f349d125cbd56433f4d49398f
#-*- coding:utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models, _ from odoo.exceptions import UserError from odoo.tools import float_compare, float_is_zero class HrPayslipLine(models.Model): _inherit = 'hr.payslip.line' def _get_partner_id(self, credit_account): """ Get partner_id of slip line to use in account_move_line """ # use partner of salary rule or fallback on employee's address register_partner_id = self.salary_rule_id.register_id.partner_id partner_id = register_partner_id.id or self.slip_id.employee_id.address_home_id.id if credit_account: if register_partner_id or self.salary_rule_id.account_credit.internal_type in ('receivable', 'payable'): return partner_id else: if register_partner_id or self.salary_rule_id.account_debit.internal_type in ('receivable', 'payable'): return partner_id return False class HrPayslip(models.Model): _inherit = 'hr.payslip' date = fields.Date('Date Account', states={'draft': [('readonly', False)]}, readonly=True, help="Keep empty to use the period of the validation(Payslip) date.") journal_id = fields.Many2one('account.journal', 'Salary Journal', readonly=True, required=True, states={'draft': [('readonly', False)]}, default=lambda self: self.env['account.journal'].search([('type', '=', 'general')], limit=1)) move_id = fields.Many2one('account.move', 'Accounting Entry', readonly=True, copy=False) @api.model def create(self, vals): if 'journal_id' in self.env.context: vals['journal_id'] = self.env.context.get('journal_id') return super(HrPayslip, self).create(vals) @api.onchange('contract_id') def onchange_contract(self): super(HrPayslip, self).onchange_contract() self.journal_id = self.contract_id.journal_id.id or (not self.contract_id and self.default_get(['journal_id'])['journal_id']) @api.multi def action_payslip_cancel(self): moves = self.mapped('move_id') moves.filtered(lambda x: x.state == 'posted').button_cancel() moves.unlink() return super(HrPayslip, self).action_payslip_cancel() @api.multi def action_payslip_done(self): res = super(HrPayslip, self).action_payslip_done() for slip in self: line_ids = [] debit_sum = 0.0 credit_sum = 0.0 date = slip.date or slip.date_to currency = slip.company_id.currency_id or slip.journal_id.company_id.currency_id name = _('Payslip of %s') % (slip.employee_id.name) move_dict = { 'narration': name, 'ref': slip.number, 'journal_id': slip.journal_id.id, 'date': date, } for line in slip.details_by_salary_rule_category: amount = currency.round(slip.credit_note and -line.total or line.total) if currency.is_zero(amount): continue debit_account_id = line.salary_rule_id.account_debit.id credit_account_id = line.salary_rule_id.account_credit.id if debit_account_id: debit_line = (0, 0, { 'name': line.name, 'partner_id': line._get_partner_id(credit_account=False), 'account_id': debit_account_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': amount > 0.0 and amount or 0.0, 'credit': amount < 0.0 and -amount or 0.0, 'analytic_account_id': line.salary_rule_id.analytic_account_id.id or slip.contract_id.analytic_account_id.id, 'tax_line_id': line.salary_rule_id.account_tax_id.id, }) line_ids.append(debit_line) debit_sum += debit_line[2]['debit'] - debit_line[2]['credit'] if credit_account_id: credit_line = (0, 0, { 'name': line.name, 'partner_id': line._get_partner_id(credit_account=True), 'account_id': credit_account_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': amount < 0.0 and -amount or 0.0, 'credit': amount > 0.0 and amount or 0.0, 'analytic_account_id': line.salary_rule_id.analytic_account_id.id or slip.contract_id.analytic_account_id.id, 'tax_line_id': line.salary_rule_id.account_tax_id.id, }) line_ids.append(credit_line) credit_sum += credit_line[2]['credit'] - credit_line[2]['debit'] if currency.compare_amounts(credit_sum, debit_sum) == -1: acc_id = slip.journal_id.default_credit_account_id.id if not acc_id: raise UserError(_('The Expense Journal "%s" has not properly configured the Credit Account!') % (slip.journal_id.name)) adjust_credit = (0, 0, { 'name': _('Adjustment Entry'), 'partner_id': False, 'account_id': acc_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': 0.0, 'credit': currency.round(debit_sum - credit_sum), }) line_ids.append(adjust_credit) elif currency.compare_amounts(debit_sum, credit_sum) == -1: acc_id = slip.journal_id.default_debit_account_id.id if not acc_id: raise UserError(_('The Expense Journal "%s" has not properly configured the Debit Account!') % (slip.journal_id.name)) adjust_debit = (0, 0, { 'name': _('Adjustment Entry'), 'partner_id': False, 'account_id': acc_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': currency.round(credit_sum - debit_sum), 'credit': 0.0, }) line_ids.append(adjust_debit) move_dict['line_ids'] = line_ids move = self.env['account.move'].create(move_dict) slip.write({'move_id': move.id, 'date': date}) move.post() return res class HrSalaryRule(models.Model): _inherit = 'hr.salary.rule' analytic_account_id = fields.Many2one('account.analytic.account', 'Analytic Account') account_tax_id = fields.Many2one('account.tax', 'Tax') account_debit = fields.Many2one('account.account', 'Debit Account', domain=[('deprecated', '=', False)]) account_credit = fields.Many2one('account.account', 'Credit Account', domain=[('deprecated', '=', False)]) class HrContract(models.Model): _inherit = 'hr.contract' _description = 'Employee Contract' analytic_account_id = fields.Many2one('account.analytic.account', 'Analytic Account') journal_id = fields.Many2one('account.journal', 'Salary Journal') class HrPayslipRun(models.Model): _inherit = 'hr.payslip.run' journal_id = fields.Many2one('account.journal', 'Salary Journal', states={'draft': [('readonly', False)]}, readonly=True, required=True, default=lambda self: self.env['account.journal'].search([('type', '=', 'general')], limit=1))
[((1104, 1266), 'odoo.fields.Date', 'fields.Date', (['"""Date Account"""'], {'states': "{'draft': [('readonly', False)]}", 'readonly': '(True)', 'help': '"""Keep empty to use the period of the validation(Payslip) date."""'}), "('Date Account', states={'draft': [('readonly', False)]},\n readonly=True, help=\n 'Keep empty to use the period of the validation(Payslip) date.')\n", (1115, 1266), False, 'from odoo import api, fields, models, _\n'), ((1523, 1601), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.move"""', '"""Accounting Entry"""'], {'readonly': '(True)', 'copy': '(False)'}), "('account.move', 'Accounting Entry', readonly=True, copy=False)\n", (1538, 1601), False, 'from odoo import api, fields, models, _\n'), ((1816, 1843), 'odoo.api.onchange', 'api.onchange', (['"""contract_id"""'], {}), "('contract_id')\n", (1828, 1843), False, 'from odoo import api, fields, models, _\n'), ((6799, 6862), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.analytic.account"""', '"""Analytic Account"""'], {}), "('account.analytic.account', 'Analytic Account')\n", (6814, 6862), False, 'from odoo import api, fields, models, _\n'), ((6884, 6921), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.tax"""', '"""Tax"""'], {}), "('account.tax', 'Tax')\n", (6899, 6921), False, 'from odoo import api, fields, models, _\n'), ((6942, 7034), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.account"""', '"""Debit Account"""'], {'domain': "[('deprecated', '=', False)]"}), "('account.account', 'Debit Account', domain=[('deprecated',\n '=', False)])\n", (6957, 7034), False, 'from odoo import api, fields, models, _\n'), ((7052, 7145), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.account"""', '"""Credit Account"""'], {'domain': "[('deprecated', '=', False)]"}), "('account.account', 'Credit Account', domain=[('deprecated',\n '=', False)])\n", (7067, 7145), False, 'from odoo import api, fields, models, _\n'), ((7270, 7333), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.analytic.account"""', '"""Analytic Account"""'], {}), "('account.analytic.account', 'Analytic Account')\n", (7285, 7333), False, 'from odoo import api, fields, models, _\n'), ((7351, 7403), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.journal"""', '"""Salary Journal"""'], {}), "('account.journal', 'Salary Journal')\n", (7366, 7403), False, 'from odoo import api, fields, models, _\n'), ((2687, 2705), 'odoo._', '_', (['"""Payslip of %s"""'], {}), "('Payslip of %s')\n", (2688, 2705), False, 'from odoo import api, fields, models, _\n'), ((5388, 5409), 'odoo._', '_', (['"""Adjustment Entry"""'], {}), "('Adjustment Entry')\n", (5389, 5409), False, 'from odoo import api, fields, models, _\n'), ((5215, 5292), 'odoo._', '_', (['"""The Expense Journal "%s" has not properly configured the Credit Account!"""'], {}), '(\'The Expense Journal "%s" has not properly configured the Credit Account!\')\n', (5216, 5292), False, 'from odoo import api, fields, models, _\n'), ((6132, 6153), 'odoo._', '_', (['"""Adjustment Entry"""'], {}), "('Adjustment Entry')\n", (6133, 6153), False, 'from odoo import api, fields, models, _\n'), ((5961, 6037), 'odoo._', '_', (['"""The Expense Journal "%s" has not properly configured the Debit Account!"""'], {}), '(\'The Expense Journal "%s" has not properly configured the Debit Account!\')\n', (5962, 6037), False, 'from odoo import api, fields, models, _\n')]
abkoesdw/ml-datasets
ml_datasets/utils.py
c8c7b85ba8ed9c0ea233b4092d499d5022952011
import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import sys import numpy as np from matplotlib.colors import LinearSegmentedColormap from matplotlib.colors import BoundaryNorm def plot_images( num_sample_perclass=10, x=None, y=None, labels=None, title=None, cmap=None ): grid_x = num_sample_perclass + 1 grid_y = len(labels) plt.figure(figsize=(grid_y, grid_x)) gs1 = gridspec.GridSpec(grid_y, grid_x) gs1.update(wspace=0.025, hspace=0.05) font = {"family": "serif", "weight": "bold"} plt.suptitle(title) j = 0 for i in range(grid_y): idxs = [0] + list(np.where(y == list(labels.keys())[i])[0][: grid_x - 1]) label = labels[list(labels.keys())[i]] for k, idx in enumerate(idxs): ax1 = plt.subplot(gs1[j]) if k == 0: ax1.text(0, 0.25, label, ha="right", wrap=True, fontdict=font) else: ax1.imshow(x[idx, ...], cmap=cmap) plt.axis("off") j += 1 plt.show() def plot_2D(x, y, title, axis="off"): BLUE, ORANGE = "#57B5E8", "#E69E00" plt.figure(figsize=(8, 8)) plt.scatter( x[:, 0], x[:, 1], s=18, facecolors="none", edgecolors=np.array([BLUE, ORANGE])[y], ) if axis == "off": plt.axis("off") elif axis == "on": plt.xlabel("x_1") plt.ylabel("x_2") else: print("incorrect values for arg: axis (on or off only)") sys.exit() plt.title(title) plt.show() def plot_dna(df, label): matrix = df.values col_names = df.columns rows = np.arange(matrix.shape[0]) cols = np.arange(matrix.shape[1]) np.random.seed(3) np.random.shuffle(rows) np.random.shuffle(cols) matrix = matrix[:, cols[:100]].T matrix = matrix[:, rows] col_names = col_names[cols[:100]] label = label[rows] mat_min = np.min(matrix) mat_max = np.max(matrix) mat_min = -np.max([np.abs(mat_min), mat_max]) mat_max = np.max([np.abs(mat_min), mat_max]) matrix = np.ma.masked_where(np.abs(matrix) <= 0.3, matrix) plt.figure(figsize=(6, 12)) cmap_list = ["red", "darkred", "green", "lime", "lightgreen"] cmap = LinearSegmentedColormap.from_list("Custom cmap", cmap_list, len(cmap_list)) cmap.set_bad("black") bounds = np.linspace( mat_min + 6, mat_max - 6, 5 ) # np.arange(mat_min + 6, mat_max - 6, 0.1) idx = np.searchsorted(bounds, 0) bounds = np.insert(bounds, idx, 0) norm = BoundaryNorm(bounds, cmap.N) plt.imshow(matrix, cmap=cmap, norm=norm) plt.xticks(np.arange(len(label))) plt.yticks(np.arange(len(col_names))) ax = plt.gca() ax.set_xticklabels(label, rotation=90) ax.set_yticklabels(col_names) ax.yaxis.tick_right() ax.tick_params(axis=u"both", which=u"both", labelsize=5, length=0.0) plt.tight_layout() fig = plt.gcf() # fig.set_size_inches((6, 12), forward=False) # fig.savefig("img/dna.png", dpi=200) plt.show()
[((366, 402), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(grid_y, grid_x)'}), '(figsize=(grid_y, grid_x))\n', (376, 402), True, 'import matplotlib.pyplot as plt\n'), ((413, 446), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['grid_y', 'grid_x'], {}), '(grid_y, grid_x)\n', (430, 446), True, 'import matplotlib.gridspec as gridspec\n'), ((544, 563), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['title'], {}), '(title)\n', (556, 563), True, 'import matplotlib.pyplot as plt\n'), ((1035, 1045), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1043, 1045), True, 'import matplotlib.pyplot as plt\n'), ((1130, 1156), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (1140, 1156), True, 'import matplotlib.pyplot as plt\n'), ((1523, 1539), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (1532, 1539), True, 'import matplotlib.pyplot as plt\n'), ((1544, 1554), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1552, 1554), True, 'import matplotlib.pyplot as plt\n'), ((1643, 1669), 'numpy.arange', 'np.arange', (['matrix.shape[0]'], {}), '(matrix.shape[0])\n', (1652, 1669), True, 'import numpy as np\n'), ((1681, 1707), 'numpy.arange', 'np.arange', (['matrix.shape[1]'], {}), '(matrix.shape[1])\n', (1690, 1707), True, 'import numpy as np\n'), ((1712, 1729), 'numpy.random.seed', 'np.random.seed', (['(3)'], {}), '(3)\n', (1726, 1729), True, 'import numpy as np\n'), ((1734, 1757), 'numpy.random.shuffle', 'np.random.shuffle', (['rows'], {}), '(rows)\n', (1751, 1757), True, 'import numpy as np\n'), ((1762, 1785), 'numpy.random.shuffle', 'np.random.shuffle', (['cols'], {}), '(cols)\n', (1779, 1785), True, 'import numpy as np\n'), ((1929, 1943), 'numpy.min', 'np.min', (['matrix'], {}), '(matrix)\n', (1935, 1943), True, 'import numpy as np\n'), ((1958, 1972), 'numpy.max', 'np.max', (['matrix'], {}), '(matrix)\n', (1964, 1972), True, 'import numpy as np\n'), ((2140, 2167), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 12)'}), '(figsize=(6, 12))\n', (2150, 2167), True, 'import matplotlib.pyplot as plt\n'), ((2361, 2401), 'numpy.linspace', 'np.linspace', (['(mat_min + 6)', '(mat_max - 6)', '(5)'], {}), '(mat_min + 6, mat_max - 6, 5)\n', (2372, 2401), True, 'import numpy as np\n'), ((2470, 2496), 'numpy.searchsorted', 'np.searchsorted', (['bounds', '(0)'], {}), '(bounds, 0)\n', (2485, 2496), True, 'import numpy as np\n'), ((2511, 2536), 'numpy.insert', 'np.insert', (['bounds', 'idx', '(0)'], {}), '(bounds, idx, 0)\n', (2520, 2536), True, 'import numpy as np\n'), ((2548, 2576), 'matplotlib.colors.BoundaryNorm', 'BoundaryNorm', (['bounds', 'cmap.N'], {}), '(bounds, cmap.N)\n', (2560, 2576), False, 'from matplotlib.colors import BoundaryNorm\n'), ((2582, 2622), 'matplotlib.pyplot.imshow', 'plt.imshow', (['matrix'], {'cmap': 'cmap', 'norm': 'norm'}), '(matrix, cmap=cmap, norm=norm)\n', (2592, 2622), True, 'import matplotlib.pyplot as plt\n'), ((2712, 2721), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2719, 2721), True, 'import matplotlib.pyplot as plt\n'), ((2902, 2920), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2918, 2920), True, 'import matplotlib.pyplot as plt\n'), ((2931, 2940), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (2938, 2940), True, 'import matplotlib.pyplot as plt\n'), ((3037, 3047), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3045, 3047), True, 'import matplotlib.pyplot as plt\n'), ((1333, 1348), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1341, 1348), True, 'import matplotlib.pyplot as plt\n'), ((789, 808), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs1[j]'], {}), '(gs1[j])\n', (800, 808), True, 'import matplotlib.pyplot as plt\n'), ((995, 1010), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1003, 1010), True, 'import matplotlib.pyplot as plt\n'), ((1380, 1397), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x_1"""'], {}), "('x_1')\n", (1390, 1397), True, 'import matplotlib.pyplot as plt\n'), ((1406, 1423), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""x_2"""'], {}), "('x_2')\n", (1416, 1423), True, 'import matplotlib.pyplot as plt\n'), ((1507, 1517), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1515, 1517), False, 'import sys\n'), ((2045, 2060), 'numpy.abs', 'np.abs', (['mat_min'], {}), '(mat_min)\n', (2051, 2060), True, 'import numpy as np\n'), ((2104, 2118), 'numpy.abs', 'np.abs', (['matrix'], {}), '(matrix)\n', (2110, 2118), True, 'import numpy as np\n'), ((1268, 1292), 'numpy.array', 'np.array', (['[BLUE, ORANGE]'], {}), '([BLUE, ORANGE])\n', (1276, 1292), True, 'import numpy as np\n'), ((1996, 2011), 'numpy.abs', 'np.abs', (['mat_min'], {}), '(mat_min)\n', (2002, 2011), True, 'import numpy as np\n')]
MasterRadule/DefenceFirst
Simulator/simulator.py
d3c3a652357ac433213c38fa6134780e286f6cf2
import logging import os import random from abc import ABC, abstractmethod from random import randint from time import sleep, strftime HOSTNAME = ['defence-first.rs', 'defence-first.de', 'defence-first.ru'] HOSTIP = ['78.218.236.218', '87.236.11.212', '54.147.165.86'] SOURCEIP = ['163.189.141.53', '204.164.10.7', '213.166.160.236', '123.197.235.233', '77.28.21.14'] USERNAMES = ['user1', 'user2', 'user3', 'user4', 'user5'] FACILITY = ['KERN', 'USER', 'MAIL', 'DAEMON', 'AUTH', 'SYSLOG', 'LPR', 'NEWS', 'UUCP', 'CLOCK_DAEMON', 'AUTHPRIV', 'FTP', 'NTP', 'LOGAUDIT', 'LOGALERT', 'CRON', 'LOCAL0', 'LOCAL1', 'LOCAL2', 'LOCAL3', 'LOCAL4', 'LOCAL5', 'LOCAL6', 'LOCAL7'] SEVERITY = ['DEBUG', 'INFORMATIONAL', 'NOTICE', 'WARNING', 'ERROR', 'CRITICAL', 'ALERT', 'EMERGENCY'] FORMAT = '%(asctime)s %(hostname)s-Application-%(hostip)s-%(sourceip)s %(severity)s-%(facility)s %(' \ 'message)s ' RESOURCES = ['index.html', 'document.xml', 'dashboard.html'] LOGS_PATH = 'logs' class State(ABC): @abstractmethod def run(self, context): return NotImplemented class DoSAttack(State): def run(self, context): d = {'hostname': HOSTNAME[0], 'hostip': HOSTIP[0], 'severity': SEVERITY[1], 'facility': FACILITY[1]} http_response_code = '200' for i in range(25): if i >= 20: http_response_code = '503' d['severity'] = SEVERITY[5] for sourceip in SOURCEIP: d['sourceip'] = sourceip context.logger.info('Requested resource index.html {}'.format(http_response_code), extra=d) context.state = NormalState() class NormalState(State): def run(self, context): normal = {'hostname': HOSTNAME[1], 'hostip': HOSTIP[1], 'severity': SEVERITY[1], 'facility': FACILITY[1]} while True: normal['sourceip'] = random.choice(SOURCEIP) if random.random() < 0.3: context.logger.info( 'Successful authorization on username "{}"'.format(USERNAMES[SOURCEIP.index(normal['sourceip'])]), extra=normal) else: context.logger.info('Requested resource {} 200'.format(random.choice(RESOURCES)), extra=normal) sleep(1) if random.random() < 0.1: rand = randint(1, 3) if rand == 1: context.state = DoSAttack() elif rand == 2: context.state = BruteForce() elif rand == 3: context.state = DatabaseError() context.state.run(context) class BruteForce(State): def run(self, context): attack = {'hostname': HOSTNAME[1], 'hostip': HOSTIP[1], 'sourceip': SOURCEIP[0], 'severity': SEVERITY[2], 'facility': FACILITY[4]} normal = {'hostname': HOSTNAME[1], 'hostip': HOSTIP[1], 'severity': SEVERITY[1], 'facility': FACILITY[1]} for i in range(30): if i > 5: attack['severity'] = SEVERITY[3] if random.random() < 0.45: normal['sourceip'] = random.choice(SOURCEIP) context.logger.info('Requested resource {} 200'.format(random.choice(RESOURCES)), extra=normal) sleep(0.5) context.logger.info('Failed authorization on username "user1"', extra=attack) sleep(0.5) context.state = NormalState() class DatabaseError(State): def run(self, context): d = {'hostname': HOSTNAME[2], 'hostip': HOSTIP[2], 'sourceip': SOURCEIP[0], 'severity': SEVERITY[4], 'facility': FACILITY[3]} context.logger.info('Database error', extra=d) sleep(1) context.state = NormalState() class Context: def __init__(self): self.state = NormalState() formatter = logging.Formatter(FORMAT, "%Y-%m-%d %H:%M:%S") logger = logging.getLogger('simulator') if not os.path.exists(LOGS_PATH): os.mkdir(LOGS_PATH) fileHandler = logging.FileHandler( os.path.join(LOGS_PATH, 'application_log-{}.log'.format(strftime('%Y-%m-%d')))) fileHandler.setFormatter(formatter) consoleHandler = logging.StreamHandler() consoleHandler.setFormatter(formatter) logger.addHandler(fileHandler) logger.addHandler(consoleHandler) logger.setLevel(logging.INFO) self.logger = logger def run(self): self.state.run(self) @property def state(self): return self._state @state.setter def state(self, value): self._state = value if __name__ == '__main__': sm = Context() sm.run()
[((3803, 3811), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (3808, 3811), False, 'from time import sleep, strftime\n'), ((3947, 3993), 'logging.Formatter', 'logging.Formatter', (['FORMAT', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(FORMAT, '%Y-%m-%d %H:%M:%S')\n", (3964, 3993), False, 'import logging\n'), ((4012, 4042), 'logging.getLogger', 'logging.getLogger', (['"""simulator"""'], {}), "('simulator')\n", (4029, 4042), False, 'import logging\n'), ((4324, 4347), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (4345, 4347), False, 'import logging\n'), ((1930, 1953), 'random.choice', 'random.choice', (['SOURCEIP'], {}), '(SOURCEIP)\n', (1943, 1953), False, 'import random\n'), ((2324, 2332), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (2329, 2332), False, 'from time import sleep, strftime\n'), ((3484, 3494), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (3489, 3494), False, 'from time import sleep, strftime\n'), ((4059, 4084), 'os.path.exists', 'os.path.exists', (['LOGS_PATH'], {}), '(LOGS_PATH)\n', (4073, 4084), False, 'import os\n'), ((4098, 4117), 'os.mkdir', 'os.mkdir', (['LOGS_PATH'], {}), '(LOGS_PATH)\n', (4106, 4117), False, 'import os\n'), ((1969, 1984), 'random.random', 'random.random', ([], {}), '()\n', (1982, 1984), False, 'import random\n'), ((2349, 2364), 'random.random', 'random.random', ([], {}), '()\n', (2362, 2364), False, 'import random\n'), ((2395, 2408), 'random.randint', 'randint', (['(1)', '(3)'], {}), '(1, 3)\n', (2402, 2408), False, 'from random import randint\n'), ((3157, 3172), 'random.random', 'random.random', ([], {}), '()\n', (3170, 3172), False, 'import random\n'), ((3218, 3241), 'random.choice', 'random.choice', (['SOURCEIP'], {}), '(SOURCEIP)\n', (3231, 3241), False, 'import random\n'), ((3370, 3380), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (3375, 3380), False, 'from time import sleep, strftime\n'), ((4230, 4250), 'time.strftime', 'strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (4238, 4250), False, 'from time import sleep, strftime\n'), ((2271, 2295), 'random.choice', 'random.choice', (['RESOURCES'], {}), '(RESOURCES)\n', (2284, 2295), False, 'import random\n'), ((3313, 3337), 'random.choice', 'random.choice', (['RESOURCES'], {}), '(RESOURCES)\n', (3326, 3337), False, 'import random\n')]
DaniMarts/bayesrace
bayes_race/pp/__init__.py
3d0d2b26dac2e33ad7e38513304cfb259abe351c
from bayes_race.pp.pure_pursuit import purePursuit
[]
utiasASRL/pysteam
pysteam/evaluator/vector_space_error_eval.py
c0c8809ee2a5e1dab5ce7f9e5ff9de91138ce68b
from typing import Optional import numpy as np from . import Evaluator from ..state import VectorSpaceStateVar class VectorSpaceErrorEval(Evaluator): """Error evaluator for a measured vector space state variable""" def __init__(self, meas: np.ndarray, state_vec: VectorSpaceStateVar) -> None: super().__init__() self._meas: np.ndarray = meas self._state_vec: VectorSpaceStateVar = state_vec def is_active(self): return not self._state_vec.locked def evaluate(self, lhs: Optional[np.ndarray] = None): error = self._meas - self._state_vec.value if lhs is None: return error assert lhs.shape[-1] == self._state_vec.perturb_dim jacs = dict() if not self._state_vec.locked: jacs = {self._state_vec.key: -lhs} return error, jacs
[]
mwussow/pytorch_geometric
torch_geometric/nn/unpool/__init__.py
01c68f9b58c94d9efd1f6e39b9c85177aae521bb
from .knn_interpolate import knn_interpolate __all__ = [ 'knn_interpolate', ]
[]
namuyan/bc4py
bc4py/bip32/utils.py
6484d356096261d0d57e9e1f5ffeae1f9a9865f3
from bc4py_extension import PyAddress import hashlib def is_address(ck: PyAddress, hrp, ver): """check bech32 format and version""" try: if ck.hrp != hrp: return False if ck.version != ver: return False except ValueError: return False return True def get_address(pk, hrp, ver) -> PyAddress: """get address from public key""" identifier = hashlib.new('ripemd160', hashlib.sha256(pk).digest()).digest() return PyAddress.from_param(hrp, ver, identifier) def convert_address(ck: PyAddress, hrp, ver) -> PyAddress: """convert address's version""" return PyAddress.from_param(hrp, ver, ck.identifier()) def dummy_address(dummy_identifier) -> PyAddress: assert len(dummy_identifier) == 20 return PyAddress.from_param('dummy', 0, dummy_identifier) __all__ = [ "is_address", "get_address", "convert_address", "dummy_address", ]
[((488, 530), 'bc4py_extension.PyAddress.from_param', 'PyAddress.from_param', (['hrp', 'ver', 'identifier'], {}), '(hrp, ver, identifier)\n', (508, 530), False, 'from bc4py_extension import PyAddress\n'), ((789, 839), 'bc4py_extension.PyAddress.from_param', 'PyAddress.from_param', (['"""dummy"""', '(0)', 'dummy_identifier'], {}), "('dummy', 0, dummy_identifier)\n", (809, 839), False, 'from bc4py_extension import PyAddress\n'), ((439, 457), 'hashlib.sha256', 'hashlib.sha256', (['pk'], {}), '(pk)\n', (453, 457), False, 'import hashlib\n')]
LaborBerlin/cubi-tk
cubi_tk/snappy/kickoff.py
4aa5306c547c38eb41d5623ff6e4bace828f85b1
"""``cubi-tk snappy kickoff``: kickoff SNAPPY pipeline.""" import argparse import os import subprocess import typing from logzero import logger from toposort import toposort from . import common from cubi_tk.exceptions import ParseOutputException def run( args, _parser: argparse.ArgumentParser, _subparser: argparse.ArgumentParser ) -> typing.Optional[int]: logger.info("Try to find SNAPPY pipeline directory...") try: path = common.find_snappy_root_dir(args.path or os.getcwd(), common.DEPENDENCIES.keys()) except common.CouldNotFindPipelineRoot: return 1 # TODO: this assumes standard naming which is a limitation... logger.info("Looking for pipeline directories (assuming standard naming)...") logger.debug("Looking in %s", path) step_set = {name for name in common.DEPENDENCIES if (path / name).exists()} steps: typing.List[str] = [] for names in toposort({k: set(v) for k, v in common.DEPENDENCIES.items()}): steps += [name for name in names if name in step_set] logger.info("Will run the steps: %s", ", ".join(steps)) logger.info("Submitting with sbatch...") jids: typing.Dict[str, str] = {} for step in steps: dep_jids = [jids[dep] for dep in common.DEPENDENCIES[step] if dep in jids] cmd = ["sbatch"] if dep_jids: cmd += ["--dependency", "afterok:%s" % ":".join(map(str, dep_jids))] cmd += ["pipeline_job.sh"] logger.info("Submitting step %s: %s", step, " ".join(cmd)) if args.dry_run: jid = "<%s>" % step else: stdout_raw = subprocess.check_output(cmd, cwd=str(path / step), timeout=args.timeout) stdout = stdout_raw.decode("utf-8") if not stdout.startswith("Submitted batch job "): raise ParseOutputException("Did not understand sbatch output: %s" % stdout) jid = stdout.split()[-1] logger.info(" => JID: %s", jid) jids[step] = jid return None def setup_argparse(parser: argparse.ArgumentParser) -> None: """Setup argument parser for ``cubi-tk snappy pull-sheet``.""" parser.add_argument("--hidden-cmd", dest="snappy_cmd", default=run, help=argparse.SUPPRESS) parser.add_argument( "--dry-run", "-n", default=False, action="store_true", help="Perform dry-run, do not do anything.", ) parser.add_argument( "--timeout", default=10, type=int, help="Number of seconds to wait for commands." ) parser.add_argument( "path", nargs="?", help="Path into SNAPPY directory (below a directory containing .snappy_pipeline).", )
[((373, 428), 'logzero.logger.info', 'logger.info', (['"""Try to find SNAPPY pipeline directory..."""'], {}), "('Try to find SNAPPY pipeline directory...')\n", (384, 428), False, 'from logzero import logger\n'), ((667, 744), 'logzero.logger.info', 'logger.info', (['"""Looking for pipeline directories (assuming standard naming)..."""'], {}), "('Looking for pipeline directories (assuming standard naming)...')\n", (678, 744), False, 'from logzero import logger\n'), ((749, 784), 'logzero.logger.debug', 'logger.debug', (['"""Looking in %s"""', 'path'], {}), "('Looking in %s', path)\n", (761, 784), False, 'from logzero import logger\n'), ((1105, 1145), 'logzero.logger.info', 'logger.info', (['"""Submitting with sbatch..."""'], {}), "('Submitting with sbatch...')\n", (1116, 1145), False, 'from logzero import logger\n'), ((1934, 1965), 'logzero.logger.info', 'logger.info', (['""" => JID: %s"""', 'jid'], {}), "(' => JID: %s', jid)\n", (1945, 1965), False, 'from logzero import logger\n'), ((494, 505), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (503, 505), False, 'import os\n'), ((1819, 1888), 'cubi_tk.exceptions.ParseOutputException', 'ParseOutputException', (["('Did not understand sbatch output: %s' % stdout)"], {}), "('Did not understand sbatch output: %s' % stdout)\n", (1839, 1888), False, 'from cubi_tk.exceptions import ParseOutputException\n')]
RajatRasal/devito
tests/test_autotuner.py
162abb6b318e77eaa4e8f719047327c45782056f
from __future__ import absolute_import from functools import reduce from operator import mul try: from StringIO import StringIO except ImportError: # Python3 compatibility from io import StringIO import pytest from conftest import skipif_yask import numpy as np from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio from devito.logger import logger, logging @silencio(log_level='DEBUG') @skipif_yask @pytest.mark.parametrize("shape,expected", [ ((30, 30), 17), ((30, 30, 30), 21) ]) def test_at_is_actually_working(shape, expected): """ Check that autotuning is actually running when switched on, in both 2D and 3D operators. """ grid = Grid(shape=shape) buffer = StringIO() temporary_handler = logging.StreamHandler(buffer) logger.addHandler(temporary_handler) infield = Function(name='infield', grid=grid) infield.data[:] = np.arange(reduce(mul, shape), dtype=np.int32).reshape(shape) outfield = Function(name='outfield', grid=grid) stencil = Eq(outfield.indexify(), outfield.indexify() + infield.indexify()*3.0) op = Operator(stencil, dle=('blocking', {'blockinner': True, 'blockalways': True})) # Expected 3 AT attempts for the given shape op(infield=infield, outfield=outfield, autotune=True) out = [i for i in buffer.getvalue().split('\n') if 'took' in i] assert len(out) == 4 # Now try the same with aggressive autotuning, which tries 9 more cases configuration['autotuning'] = 'aggressive' op(infield=infield, outfield=outfield, autotune=True) out = [i for i in buffer.getvalue().split('\n') if 'took' in i] assert len(out) == expected configuration['autotuning'] = configuration._defaults['autotuning'] logger.removeHandler(temporary_handler) temporary_handler.flush() temporary_handler.close() buffer.flush() buffer.close() @silencio(log_level='DEBUG') @skipif_yask def test_timesteps_per_at_run(): """ Check that each autotuning run (ie with a given block shape) takes ``autotuning.core.options['at_squeezer']`` timesteps, for an operator performing the increment ``a[t + timeorder, ...] = f(a[t, ...], ...)``. """ from devito.core.autotuning import options buffer = StringIO() temporary_handler = logging.StreamHandler(buffer) logger.addHandler(temporary_handler) shape = (30, 30, 30) grid = Grid(shape=shape) x, y, z = grid.dimensions t = grid.stepping_dim # Function infield = Function(name='infield', grid=grid) infield.data[:] = np.arange(reduce(mul, shape), dtype=np.int32).reshape(shape) outfield = Function(name='outfield', grid=grid) stencil = Eq(outfield.indexify(), outfield.indexify() + infield.indexify()*3.0) op = Operator(stencil, dle=('blocking', {'blockalways': True})) op(infield=infield, outfield=outfield, autotune=True) out = [i for i in buffer.getvalue().split('\n') if 'took' in i] assert len(out) == 4 assert all('in 1 timesteps' in i for i in out) buffer.truncate(0) # TimeFunction with increasing time order; increasing the time order # shouldn't affect how many iterations the autotuner is gonna run for to in [1, 2, 4]: infield = TimeFunction(name='infield', grid=grid, time_order=to) infield.data[:] = np.arange(reduce(mul, infield.shape), dtype=np.int32).reshape(infield.shape) outfield = TimeFunction(name='outfield', grid=grid, time_order=to) stencil = Eq(outfield.indexed[t + to, x, y, z], outfield.indexify() + infield.indexify()*3.0) op = Operator(stencil, dle=('blocking', {'blockalways': True})) op(infield=infield, outfield=outfield, t=2, autotune=True) out = [i for i in buffer.getvalue().split('\n') if 'took' in i] assert len(out) == 4 assert all('in %d timesteps' % options['at_squeezer'] in i for i in out) buffer.truncate(0) logger.removeHandler(temporary_handler) temporary_handler.flush() temporary_handler.close() buffer.flush() buffer.close()
[((410, 437), 'devito.silencio', 'silencio', ([], {'log_level': '"""DEBUG"""'}), "(log_level='DEBUG')\n", (418, 437), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((452, 531), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shape,expected"""', '[((30, 30), 17), ((30, 30, 30), 21)]'], {}), "('shape,expected', [((30, 30), 17), ((30, 30, 30), 21)])\n", (475, 531), False, 'import pytest\n'), ((1914, 1941), 'devito.silencio', 'silencio', ([], {'log_level': '"""DEBUG"""'}), "(log_level='DEBUG')\n", (1922, 1941), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((716, 733), 'devito.Grid', 'Grid', ([], {'shape': 'shape'}), '(shape=shape)\n', (720, 733), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((748, 758), 'io.StringIO', 'StringIO', ([], {}), '()\n', (756, 758), False, 'from io import StringIO\n'), ((783, 812), 'devito.logger.logging.StreamHandler', 'logging.StreamHandler', (['buffer'], {}), '(buffer)\n', (804, 812), False, 'from devito.logger import logger, logging\n'), ((817, 853), 'devito.logger.logger.addHandler', 'logger.addHandler', (['temporary_handler'], {}), '(temporary_handler)\n', (834, 853), False, 'from devito.logger import logger, logging\n'), ((869, 904), 'devito.Function', 'Function', ([], {'name': '"""infield"""', 'grid': 'grid'}), "(name='infield', grid=grid)\n", (877, 904), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((1003, 1039), 'devito.Function', 'Function', ([], {'name': '"""outfield"""', 'grid': 'grid'}), "(name='outfield', grid=grid)\n", (1011, 1039), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((1133, 1211), 'devito.Operator', 'Operator', (['stencil'], {'dle': "('blocking', {'blockinner': True, 'blockalways': True})"}), "(stencil, dle=('blocking', {'blockinner': True, 'blockalways': True}))\n", (1141, 1211), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((1772, 1811), 'devito.logger.logger.removeHandler', 'logger.removeHandler', (['temporary_handler'], {}), '(temporary_handler)\n', (1792, 1811), False, 'from devito.logger import logger, logging\n'), ((2286, 2296), 'io.StringIO', 'StringIO', ([], {}), '()\n', (2294, 2296), False, 'from io import StringIO\n'), ((2321, 2350), 'devito.logger.logging.StreamHandler', 'logging.StreamHandler', (['buffer'], {}), '(buffer)\n', (2342, 2350), False, 'from devito.logger import logger, logging\n'), ((2355, 2391), 'devito.logger.logger.addHandler', 'logger.addHandler', (['temporary_handler'], {}), '(temporary_handler)\n', (2372, 2391), False, 'from devito.logger import logger, logging\n'), ((2429, 2446), 'devito.Grid', 'Grid', ([], {'shape': 'shape'}), '(shape=shape)\n', (2433, 2446), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((2533, 2568), 'devito.Function', 'Function', ([], {'name': '"""infield"""', 'grid': 'grid'}), "(name='infield', grid=grid)\n", (2541, 2568), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((2667, 2703), 'devito.Function', 'Function', ([], {'name': '"""outfield"""', 'grid': 'grid'}), "(name='outfield', grid=grid)\n", (2675, 2703), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((2797, 2855), 'devito.Operator', 'Operator', (['stencil'], {'dle': "('blocking', {'blockalways': True})"}), "(stencil, dle=('blocking', {'blockalways': True}))\n", (2805, 2855), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((4013, 4052), 'devito.logger.logger.removeHandler', 'logger.removeHandler', (['temporary_handler'], {}), '(temporary_handler)\n', (4033, 4052), False, 'from devito.logger import logger, logging\n'), ((3268, 3322), 'devito.TimeFunction', 'TimeFunction', ([], {'name': '"""infield"""', 'grid': 'grid', 'time_order': 'to'}), "(name='infield', grid=grid, time_order=to)\n", (3280, 3322), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((3481, 3536), 'devito.TimeFunction', 'TimeFunction', ([], {'name': '"""outfield"""', 'grid': 'grid', 'time_order': 'to'}), "(name='outfield', grid=grid, time_order=to)\n", (3493, 3536), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((3673, 3731), 'devito.Operator', 'Operator', (['stencil'], {'dle': "('blocking', {'blockalways': True})"}), "(stencil, dle=('blocking', {'blockalways': True}))\n", (3681, 3731), False, 'from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio\n'), ((937, 955), 'functools.reduce', 'reduce', (['mul', 'shape'], {}), '(mul, shape)\n', (943, 955), False, 'from functools import reduce\n'), ((2601, 2619), 'functools.reduce', 'reduce', (['mul', 'shape'], {}), '(mul, shape)\n', (2607, 2619), False, 'from functools import reduce\n'), ((3359, 3385), 'functools.reduce', 'reduce', (['mul', 'infield.shape'], {}), '(mul, infield.shape)\n', (3365, 3385), False, 'from functools import reduce\n')]
timctho/detectron2-chargrid
projects/CharGrid/data/bizcard2coco.py
547479c88ad7d1de2348377706167a84d024a622
from data.data_reader import BIZCARD_LABEL_MAP, BizcardDataParser import argparse from pathlib import Path import os import json import cv2 import numpy as np def convert_bizcard_to_coco_format(image_dir, json_dir, id_list, out_dir, out_name): coco_json = {} images = [] annotations = [] categories = [] for _, key in enumerate(BIZCARD_LABEL_MAP.keys()): categories.append({ 'id': BIZCARD_LABEL_MAP[key], 'name': key }) with open(id_list) as fp: ids = fp.readlines() for idx, file_id in enumerate(ids): file_id = Path(file_id.strip()) print(idx, file_id) if not (image_dir / file_id).with_suffix('.jpg').exists(): file_id = file_id.with_suffix('.jpeg') else: file_id = file_id.with_suffix('.jpg') height, width = cv2.imread(str(image_dir / file_id)).shape[:2] images.append({ 'file_name': str(file_id), 'id': idx, 'height': height, 'width': width }) try: gt = BizcardDataParser.parse_data(str((json_dir / file_id).with_suffix('.json')), str(image_dir / file_id))[ 0] for word in gt.words: anno = { 'id': len(annotations), 'image_id': idx, 'bbox': [word.bbox.min_x, word.bbox.min_y, (word.bbox.max_x - word.bbox.min_x), (word.bbox.max_y - word.bbox.min_y)], 'segmentation': [word.bbox.val], 'category_id': word.label, 'iscrowd': 0, 'area': cv2.contourArea(np.reshape(word.bbox.val, [-1, 2]).astype(np.float32)) } annotations.append(anno) except Exception as e: print(e) print(str(image_dir / file_id)) coco_json['images'] = images coco_json['annotations'] = annotations coco_json['categories'] = categories with open(Path(out_dir, out_name), 'w') as f: json.dump(coco_json, f) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--img_dir', type=str) parser.add_argument('--gt_dir', type=str) parser.add_argument('--data_list', type=str) parser.add_argument('--out_dir', type=str) parser.add_argument('--out_name', type=str) args = parser.parse_args() if not Path(args.out_dir).exists(): Path(args.out_dir).mkdir() convert_bizcard_to_coco_format( Path(args.img_dir), Path(args.gt_dir), args.data_list, args.out_dir, args.out_name)
[((2152, 2177), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2175, 2177), False, 'import argparse\n'), ((351, 375), 'data.data_reader.BIZCARD_LABEL_MAP.keys', 'BIZCARD_LABEL_MAP.keys', ([], {}), '()\n', (373, 375), False, 'from data.data_reader import BIZCARD_LABEL_MAP, BizcardDataParser\n'), ((2086, 2109), 'json.dump', 'json.dump', (['coco_json', 'f'], {}), '(coco_json, f)\n', (2095, 2109), False, 'import json\n'), ((2567, 2585), 'pathlib.Path', 'Path', (['args.img_dir'], {}), '(args.img_dir)\n', (2571, 2585), False, 'from pathlib import Path\n'), ((2595, 2612), 'pathlib.Path', 'Path', (['args.gt_dir'], {}), '(args.gt_dir)\n', (2599, 2612), False, 'from pathlib import Path\n'), ((2042, 2065), 'pathlib.Path', 'Path', (['out_dir', 'out_name'], {}), '(out_dir, out_name)\n', (2046, 2065), False, 'from pathlib import Path\n'), ((2458, 2476), 'pathlib.Path', 'Path', (['args.out_dir'], {}), '(args.out_dir)\n', (2462, 2476), False, 'from pathlib import Path\n'), ((2495, 2513), 'pathlib.Path', 'Path', (['args.out_dir'], {}), '(args.out_dir)\n', (2499, 2513), False, 'from pathlib import Path\n'), ((1700, 1734), 'numpy.reshape', 'np.reshape', (['word.bbox.val', '[-1, 2]'], {}), '(word.bbox.val, [-1, 2])\n', (1710, 1734), True, 'import numpy as np\n')]
m09/deckz
deckz/cli/run.py
0f97ef2a43c2c714ac18173a4fe3266cccba31e2
from pathlib import Path from typing import List, Optional from typer import Argument from deckz.cli import app from deckz.paths import Paths from deckz.running import run as running_run @app.command() def run( targets: Optional[List[str]] = Argument(None), handout: bool = True, presentation: bool = True, print: bool = True, deck_path: Path = Path("."), ) -> None: """Compile main targets.""" paths = Paths.from_defaults(deck_path) running_run( paths=paths, build_handout=handout, build_presentation=presentation, build_print=print, target_whitelist=targets, )
[((192, 205), 'deckz.cli.app.command', 'app.command', ([], {}), '()\n', (203, 205), False, 'from deckz.cli import app\n'), ((250, 264), 'typer.Argument', 'Argument', (['None'], {}), '(None)\n', (258, 264), False, 'from typer import Argument\n'), ((369, 378), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (373, 378), False, 'from pathlib import Path\n'), ((435, 465), 'deckz.paths.Paths.from_defaults', 'Paths.from_defaults', (['deck_path'], {}), '(deck_path)\n', (454, 465), False, 'from deckz.paths import Paths\n'), ((470, 600), 'deckz.running.run', 'running_run', ([], {'paths': 'paths', 'build_handout': 'handout', 'build_presentation': 'presentation', 'build_print': 'print', 'target_whitelist': 'targets'}), '(paths=paths, build_handout=handout, build_presentation=\n presentation, build_print=print, target_whitelist=targets)\n', (481, 600), True, 'from deckz.running import run as running_run\n')]
dlangille/pgeu-system
postgresqleu/confreg/templatetags/miscutil.py
3f1910010063bab118e94a55ed757b23f1d36bf5
from django import template register = template.Library() @register.filter(name='isboolean') def isboolean(value): return isinstance(value, bool) @register.filter(name='vartypename') def vartypename(value): return type(value).__name__
[((40, 58), 'django.template.Library', 'template.Library', ([], {}), '()\n', (56, 58), False, 'from django import template\n')]
rchampa/chat-server
chat.py
34b5897e90b580754ad95b36bf7f23ac9baf3175
import asyncio import contextvars import aioredis import uvloop from aioredis import Redis from fastapi import FastAPI from starlette.middleware.base import BaseHTTPMiddleware from starlette.staticfiles import StaticFiles from RLog import rprint from routers import apirest, websockets REDIS_HOST = 'redis' REDIS_PORT = 6379 PORT = 9080 HOST = "0.0.0.0" cvar_redis = contextvars.ContextVar('redis', default=None) class CustomHeaderMiddleware(BaseHTTPMiddleware): def __init__(self, app, header_value='Example'): rprint('__init__') super().__init__(app) self.header_value = header_value async def dispatch(self, request, call_next): response = await call_next(request) response.headers['Custom'] = self.header_value return response # uvloop is written in Cython and is built on top of libuv http://magic.io/blog/uvloop-blazing-fast-python-networking/ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) app = FastAPI() app.mount("/static", StaticFiles(directory="static"), name="static") app.add_middleware(CustomHeaderMiddleware) app.include_router(apirest.router) app.include_router(websockets.router) @app.on_event("startup") async def handle_startup() -> None: rprint("startup") try: pool = await aioredis.create_redis_pool((REDIS_HOST, REDIS_PORT), encoding='utf-8', maxsize=20) cvar_redis.set(pool) rprint("Connected to Redis on ", REDIS_HOST, REDIS_PORT) except ConnectionRefusedError as e: rprint('cannot connect to redis on:', REDIS_HOST, REDIS_PORT) return @app.on_event("shutdown") async def handle_shutdown() -> None: if cvar_redis.get() is not None: redis: Redis = cvar_redis.get() redis.close() await redis.wait_closed() rprint("closed connection Redis on ", REDIS_HOST, REDIS_PORT) else: rprint("ERROR: cvar_redis.get() devuelve NONE") if __name__ == "__main__": import uvicorn rprint("Starting app") rprint(dir(app)) rprint(app.url_path_for('websocket_endpoint')) uvicorn.run('chat:app', host=HOST, port=PORT, log_level='info', reload=True)#, uds='uvicorn.sock')
[((368, 413), 'contextvars.ContextVar', 'contextvars.ContextVar', (['"""redis"""'], {'default': 'None'}), "('redis', default=None)\n", (390, 413), False, 'import contextvars\n'), ((975, 984), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (982, 984), False, 'from fastapi import FastAPI\n'), ((943, 967), 'uvloop.EventLoopPolicy', 'uvloop.EventLoopPolicy', ([], {}), '()\n', (965, 967), False, 'import uvloop\n'), ((1006, 1037), 'starlette.staticfiles.StaticFiles', 'StaticFiles', ([], {'directory': '"""static"""'}), "(directory='static')\n", (1017, 1037), False, 'from starlette.staticfiles import StaticFiles\n'), ((1237, 1254), 'RLog.rprint', 'rprint', (['"""startup"""'], {}), "('startup')\n", (1243, 1254), False, 'from RLog import rprint\n'), ((1973, 1995), 'RLog.rprint', 'rprint', (['"""Starting app"""'], {}), "('Starting app')\n", (1979, 1995), False, 'from RLog import rprint\n'), ((2072, 2148), 'uvicorn.run', 'uvicorn.run', (['"""chat:app"""'], {'host': 'HOST', 'port': 'PORT', 'log_level': '"""info"""', 'reload': '(True)'}), "('chat:app', host=HOST, port=PORT, log_level='info', reload=True)\n", (2083, 2148), False, 'import uvicorn\n'), ((527, 545), 'RLog.rprint', 'rprint', (['"""__init__"""'], {}), "('__init__')\n", (533, 545), False, 'from RLog import rprint\n'), ((1405, 1461), 'RLog.rprint', 'rprint', (['"""Connected to Redis on """', 'REDIS_HOST', 'REDIS_PORT'], {}), "('Connected to Redis on ', REDIS_HOST, REDIS_PORT)\n", (1411, 1461), False, 'from RLog import rprint\n'), ((1793, 1854), 'RLog.rprint', 'rprint', (['"""closed connection Redis on """', 'REDIS_HOST', 'REDIS_PORT'], {}), "('closed connection Redis on ', REDIS_HOST, REDIS_PORT)\n", (1799, 1854), False, 'from RLog import rprint\n'), ((1873, 1920), 'RLog.rprint', 'rprint', (['"""ERROR: cvar_redis.get() devuelve NONE"""'], {}), "('ERROR: cvar_redis.get() devuelve NONE')\n", (1879, 1920), False, 'from RLog import rprint\n'), ((1285, 1371), 'aioredis.create_redis_pool', 'aioredis.create_redis_pool', (['(REDIS_HOST, REDIS_PORT)'], {'encoding': '"""utf-8"""', 'maxsize': '(20)'}), "((REDIS_HOST, REDIS_PORT), encoding='utf-8',\n maxsize=20)\n", (1311, 1371), False, 'import aioredis\n'), ((1510, 1571), 'RLog.rprint', 'rprint', (['"""cannot connect to redis on:"""', 'REDIS_HOST', 'REDIS_PORT'], {}), "('cannot connect to redis on:', REDIS_HOST, REDIS_PORT)\n", (1516, 1571), False, 'from RLog import rprint\n')]
abel-bernabeu/facecompressor
cli.py
9322f4e3d3f2787dc9dec2fad6b3f1995d052077
import argparse import autoencoder def addTrainablesArg(parser): parser.add_argument('--model', dest='model', help='Trained model', default='model.pt') def addExchangeArg(parser): parser.add_argument('--exchange', dest='exchange', help='File with exchanged data', required=True) parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(dest="action") encode_parser = subparsers.add_parser('encode') addTrainablesArg(encode_parser) encode_parser.add_argument('--input', dest='input', help='Input image file name', required=True) addExchangeArg(encode_parser) decode_parser = subparsers.add_parser('decode') addTrainablesArg(decode_parser) addExchangeArg(decode_parser) decode_parser.add_argument('--output', dest='output', help='Output image file name', required=True) opts = parser.parse_args() if opts.action == 'encode': autoencoder.encode(opts.model, opts.input, opts.exchange) elif opts.action == 'decode': autoencoder.decode(opts.model, opts.exchange, opts.output)
[((301, 326), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (324, 326), False, 'import argparse\n'), ((858, 915), 'autoencoder.encode', 'autoencoder.encode', (['opts.model', 'opts.input', 'opts.exchange'], {}), '(opts.model, opts.input, opts.exchange)\n', (876, 915), False, 'import autoencoder\n'), ((950, 1008), 'autoencoder.decode', 'autoencoder.decode', (['opts.model', 'opts.exchange', 'opts.output'], {}), '(opts.model, opts.exchange, opts.output)\n', (968, 1008), False, 'import autoencoder\n')]