| repo_name
				 stringlengths 5 100 | path
				 stringlengths 4 299 | copies
				 stringclasses 990
				values | size
				 stringlengths 4 7 | content
				 stringlengths 666 1.03M | license
				 stringclasses 15
				values | hash
				 int64 -9,223,351,895,964,839,000 9,223,297,778B | line_mean
				 float64 3.17 100 | line_max
				 int64 7 1k | alpha_frac
				 float64 0.25 0.98 | autogenerated
				 bool 1
				class | 
|---|---|---|---|---|---|---|---|---|---|---|
| 
	indhub/mxnet | 
	tests/python/unittest/test_gluon_data_vision.py | 
	7 | 
	3323 | 
	# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import mxnet as mx
import mxnet.ndarray as nd
import numpy as np
from mxnet import gluon
from mxnet.gluon.data.vision import transforms
from mxnet.test_utils import assert_almost_equal
from mxnet.test_utils import almost_equal
from common import setup_module, with_seed, teardown
@with_seed()
def test_to_tensor():
    data_in = np.random.uniform(0, 255, (300, 300, 3)).astype(dtype=np.uint8)
    out_nd = transforms.ToTensor()(nd.array(data_in, dtype='uint8'))
    assert_almost_equal(out_nd.asnumpy(), np.transpose(
        data_in.astype(dtype=np.float32) / 255.0, (2, 0, 1)))
@with_seed()
def test_normalize():
    data_in = np.random.uniform(0, 255, (300, 300, 3)).astype(dtype=np.uint8)
    data_in = transforms.ToTensor()(nd.array(data_in, dtype='uint8'))
    out_nd = transforms.Normalize(mean=(0, 1, 2), std=(3, 2, 1))(data_in)
    data_expected = data_in.asnumpy()
    data_expected[:][:][0] = data_expected[:][:][0] / 3.0
    data_expected[:][:][1] = (data_expected[:][:][1] - 1.0) / 2.0
    data_expected[:][:][2] = data_expected[:][:][2] - 2.0
    assert_almost_equal(data_expected, out_nd.asnumpy())
@with_seed()
def test_flip_left_right():
    data_in = np.random.uniform(0, 255, (300, 300, 3)).astype(dtype=np.uint8)
    flip_in = data_in[:, ::-1, :]
    data_trans = nd.image.flip_left_right(nd.array(data_in, dtype='uint8'))
    assert_almost_equal(flip_in, data_trans.asnumpy())
@with_seed()
def test_flip_top_bottom():
    data_in = np.random.uniform(0, 255, (300, 300, 3)).astype(dtype=np.uint8)
    flip_in = data_in[::-1, :, :]
    data_trans = nd.image.flip_top_bottom(nd.array(data_in, dtype='uint8'))
    assert_almost_equal(flip_in, data_trans.asnumpy())
@with_seed()
def test_transformer():
    from mxnet.gluon.data.vision import transforms
    transform = transforms.Compose([
        transforms.Resize(300),
        transforms.Resize(300, keep_ratio=True),
        transforms.CenterCrop(256),
        transforms.RandomResizedCrop(224),
        transforms.RandomFlipLeftRight(),
        transforms.RandomColorJitter(0.1, 0.1, 0.1, 0.1),
        transforms.RandomBrightness(0.1),
        transforms.RandomContrast(0.1),
        transforms.RandomSaturation(0.1),
        transforms.RandomHue(0.1),
        transforms.RandomLighting(0.1),
        transforms.ToTensor(),
        transforms.Normalize([0, 0, 0], [1, 1, 1])])
    transform(mx.nd.ones((245, 480, 3), dtype='uint8')).wait_to_read()
if __name__ == '__main__':
    import nose
    nose.runmodule()
 | 
	apache-2.0 | 942,687,604,895,165,400 | 36.337079 | 77 | 0.676497 | false | 
| 
	DavidNorman/tensorflow | 
	tensorflow/python/distribute/collective_all_reduce_strategy.py | 
	1 | 
	23809 | 
	# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class CollectiveAllReduceStrategy implementing DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.core.protobuf import tensorflow_server_pb2
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import cross_device_utils
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import input_lib
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute import numpy_dataset
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import values
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from tensorflow.python.distribute.cluster_resolver import TFConfigClusterResolver
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import collective_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import tf_export
# TODO(yuefengz): support in-graph replication.
@tf_export("distribute.experimental.MultiWorkerMirroredStrategy", v1=[])
class CollectiveAllReduceStrategy(distribute_lib.Strategy):
  """A distribution strategy for synchronous training on multiple workers.
  This strategy implements synchronous distributed training across multiple
  workers, each with potentially multiple GPUs. Similar to
  `tf.distribute.MirroredStrategy`, it creates copies of all variables in the
  model on each device across all workers.
  It uses CollectiveOps's implementation of multi-worker all-reduce to
  to keep variables in sync. A collective op is a single op in the
  TensorFlow graph which can automatically choose an all-reduce algorithm in
  the TensorFlow runtime according to hardware, network topology and tensor
  sizes.
  By default it uses all local GPUs or CPU for single-worker training.
  When 'TF_CONFIG' environment variable is set, it parses cluster_spec,
  task_type and task_id from 'TF_CONFIG' and turns into a multi-worker strategy
  which mirrores models on GPUs of all machines in a cluster. In the current
  implementation, it uses all GPUs in a cluster and it assumes all workers have
  the same number of GPUs.
  You can also pass a `distribute.cluster_resolver.ClusterResolver` instance
  when instantiating the strategy. The task_type, task_id etc. will be parsed
  from the resolver instance instead of from the `TF_CONFIG` env var.
  It supports both eager mode and graph mode. However, for eager mode, it has to
  set up the eager context in its constructor and therefore all ops in eager
  mode have to run after the strategy object is created.
  """
  # TODO(anjalisridhar): Update our guides with examples showing how we can use
  # the cluster_resolver argument.
  def __init__(
      self,
      communication=cross_device_ops_lib.CollectiveCommunication.AUTO,
      cluster_resolver=None):
    """Creates the strategy.
    Args:
      communication: optional Enum of type
        `distribute.experimental.CollectiveCommunication`.  This provides a way
        for the user to override the choice of collective op communication.
        Possible values include `AUTO`, `RING`, and `NCCL`.
      cluster_resolver: optional `distribute.cluster_resolver.ClusterResolver`
        object. The default ClusterResolver that is used is the
        TFConfigClusterResolver which is instantiated from the TF_CONFIG env
        var.
    """
    super(CollectiveAllReduceStrategy, self).__init__(
        CollectiveAllReduceExtended(
            self,
            communication=communication,
            cluster_resolver=cluster_resolver))
    distribute_lib.distribution_strategy_gauge.get_cell("V2").set(
        "MultiWorkerMirroredStrategy")
    # pylint: disable=protected-access
    distribute_lib.distribution_strategy_replica_gauge.get_cell(
        "num_workers").set(self.extended._num_workers)
    distribute_lib.distribution_strategy_replica_gauge.get_cell(
        "num_replicas_per_worker").set(self.extended._num_gpus_per_worker)
  @classmethod
  def _from_local_devices(cls, devices):
    """A convenience method to create an obejct with a list of devices."""
    obj = cls()
    obj.extended._initialize_local(TFConfigClusterResolver(), devices=devices)  # pylint: disable=protected-access
    return obj
  def scope(self):  # pylint: disable=useless-super-delegation
    """Returns a context manager selecting this Strategy as current.
    Inside a `with strategy.scope():` code block, this thread
    will use a variable creator set by `strategy`, and will
    enter its "cross-replica context".
    In `MultiWorkerMirroredStrategy`, all variables created inside
    `strategy.scope() will be mirrored on all replicas of each worker.
    Moreover, it also sets a default device scope so that ops without
    specified devices will end up on the correct worker.
    Returns:
      A context manager to use for creating variables with this strategy.
    """
    return super(CollectiveAllReduceStrategy, self).scope()
@tf_export(v1=["distribute.experimental.MultiWorkerMirroredStrategy"])  # pylint: disable=missing-docstring
class CollectiveAllReduceStrategyV1(distribute_lib.StrategyV1):
  __doc__ = CollectiveAllReduceStrategy.__doc__
  def __init__(
      self,
      communication=cross_device_ops_lib.CollectiveCommunication.AUTO,
      cluster_resolver=None):
    """Initializes the object."""
    super(CollectiveAllReduceStrategyV1, self).__init__(
        CollectiveAllReduceExtended(
            self,
            communication=communication,
            cluster_resolver=cluster_resolver))
    distribute_lib.distribution_strategy_gauge.get_cell("V1").set(
        "MultiWorkerMirroredStrategy")
    # pylint: disable=protected-access
    distribute_lib.distribution_strategy_replica_gauge.get_cell(
        "num_workers").set(self.extended._num_workers)
    distribute_lib.distribution_strategy_replica_gauge.get_cell(
        "num_gpu_per_worker").set(self.extended._num_gpus_per_worker)
class CollectiveAllReduceExtended(mirrored_strategy.MirroredExtended):
  """Implementation of CollectiveAllReduceStrategy."""
  def __init__(self,
               container_strategy,
               communication,
               cluster_resolver):
    cluster_resolver = cluster_resolver or TFConfigClusterResolver()
    distribute_lib.StrategyExtendedV1.__init__(self, container_strategy)
    assert isinstance(
        communication,
        cross_device_ops_lib.CollectiveCommunication)
    self._communication = communication
    self._initialize_strategy(cluster_resolver)
    assert isinstance(self._get_cross_device_ops(),
                      cross_device_ops_lib.CollectiveAllReduce)
  def _initialize_strategy(self, cluster_resolver):
    if cluster_resolver.cluster_spec().as_dict():
      self._initialize_multi_worker(cluster_resolver)
    else:
      self._initialize_local(cluster_resolver)
  def _initialize_local(self, cluster_resolver, devices=None):
    """Initializes the object for local training."""
    self._is_chief = True
    self._num_workers = 1
    if ops.executing_eagerly_outside_functions():
      try:
        context.context().configure_collective_ops(
            scoped_allocator_enabled_ops=("CollectiveReduce",))
      except RuntimeError:
        logging.warning("Collective ops is not configured at program startup. "
                        "Some performance features may not be enabled.")
      self._collective_ops_configured = True
    # TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
    # some cases.
    if isinstance(cluster_resolver, TFConfigClusterResolver):
      num_gpus = context.num_gpus()
    else:
      num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
    if devices:
      local_devices = devices
    else:
      if num_gpus:
        local_devices = tuple("/device:GPU:%d" % i for i in range(num_gpus))
      else:
        local_devices = ("/device:CPU:0",)
    self._worker_device = device_util.canonicalize("/device:CPU:0")
    self._host_input_device = numpy_dataset.SingleDevice(self._worker_device)
    self._collective_keys = cross_device_utils.CollectiveKeys()
    # TODO(yuefengz): remove num_gpus_per_worker from CollectiveAllReduce.
    self._cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
        num_workers=self._num_workers,
        num_gpus_per_worker=num_gpus,
        collective_keys=self._collective_keys,
        communication=self._communication)
    super(CollectiveAllReduceExtended, self)._initialize_single_worker(
        local_devices)
    self._cluster_spec = None
    self._task_type = None
    self._task_id = None
    # This is a mark to tell whether we are running with standalone client or
    # independent worker. Right now with standalone client, strategy object is
    # created as local strategy and then turn into multi-worker strategy via
    # configure call.
    self._local_or_standalone_client_mode = True
    # Save the num_gpus_per_worker and rpc_layer for configure method.
    self._num_gpus_per_worker = num_gpus
    self._rpc_layer = cluster_resolver.rpc_layer
    self._warn_nccl_no_gpu()
    logging.info("Single-worker CollectiveAllReduceStrategy with local_devices "
                 "= %r, communication = %s", local_devices, self._communication)
  def _initialize_multi_worker(self, cluster_resolver):
    """Initializes the object for multi-worker training."""
    cluster_spec = multi_worker_util.normalize_cluster_spec(
        cluster_resolver.cluster_spec())
    task_type = cluster_resolver.task_type
    task_id = cluster_resolver.task_id
    if task_type is None or task_id is None:
      raise ValueError("When `cluster_spec` is given, you must also specify "
                       "`task_type` and `task_id`.")
    self._cluster_spec = cluster_spec
    self._task_type = task_type
    self._task_id = task_id
    self._num_workers = multi_worker_util.worker_count(cluster_spec, task_type)
    if not self._num_workers:
      raise ValueError("No `worker`, `chief` or `evaluator` tasks can be found "
                       "in `cluster_spec`.")
    self._is_chief = multi_worker_util.is_chief(cluster_spec, task_type,
                                                task_id)
    self._worker_device = "/job:%s/task:%d" % (task_type, task_id)
    self._host_input_device = numpy_dataset.SingleDevice(self._worker_device)
    if (ops.executing_eagerly_outside_functions() and
        not getattr(self, "_local_or_standalone_client_mode", False)):
      context.context().configure_collective_ops(
          collective_leader=multi_worker_util.collective_leader(
              cluster_spec, task_type, task_id),
          scoped_allocator_enabled_ops=("CollectiveReduce",),
          device_filters=("/job:%s/task:%d" % (task_type, task_id),))
      self._collective_ops_configured = True
    # Starting a std server in eager mode and in independent worker mode.
    if (context.executing_eagerly() and
        not getattr(self, "_std_server_started", False) and
        not getattr(self, "_local_or_standalone_client_mode", False)):
      # Checking _local_or_standalone_client_mode as well because we should not
      # create the std server in standalone client mode.
      config_proto = config_pb2.ConfigProto()
      config_proto = self._update_config_proto(config_proto)
      if hasattr(cluster_resolver, "port"):
        port = cluster_resolver.port
      else:
        port = 0
      server_def = tensorflow_server_pb2.ServerDef(
          cluster=cluster_spec.as_cluster_def(),
          default_session_config=config_proto,
          job_name=task_type,
          task_index=task_id,
          protocol=cluster_resolver.rpc_layer or "grpc",
          port=port)
      context.context().enable_collective_ops(server_def)
      self._std_server_started = True
      # The `ensure_initialized` is needed before calling
      # `context.context().devices()`.
      context.context().ensure_initialized()
      logging.info(
          "Enabled multi-worker collective ops with available devices: %r",
          context.context().devices())
    # TODO(yuefengz): The `num_gpus` is only for this particular task. It
    # assumes all workers have the same number of GPUs. We should remove this
    # assumption by querying all tasks for their numbers of GPUs.
    # TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
    # some cases.
    if isinstance(cluster_resolver, TFConfigClusterResolver):
      num_gpus = context.num_gpus()
    else:
      num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
    if num_gpus:
      local_devices = tuple("%s/device:GPU:%d" % (self._worker_device, i)
                            for i in range(num_gpus))
    else:
      local_devices = (self._worker_device,)
    self._collective_keys = cross_device_utils.CollectiveKeys()
    self._cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
        num_workers=self._num_workers,
        num_gpus_per_worker=num_gpus,
        collective_keys=self._collective_keys,
        communication=self._communication)
    super(CollectiveAllReduceExtended, self)._initialize_single_worker(
        local_devices)
    self._input_workers = input_lib.InputWorkers(
        self._device_map, [(self._worker_device, self.worker_devices)])
    # Add a default device so that ops without specified devices will not end up
    # on other workers.
    self._default_device = "/job:%s/task:%d" % (task_type, task_id)
    # Save the num_gpus_per_worker and rpc_layer for configure method.
    self._num_gpus_per_worker = num_gpus
    self._rpc_layer = cluster_resolver.rpc_layer
    self._warn_nccl_no_gpu()
    logging.info(
        "Multi-worker CollectiveAllReduceStrategy with cluster_spec = %r, "
        "task_type = %r, task_id = %r, num_workers = %r, local_devices = %r, "
        "communication = %s", cluster_spec.as_dict(), task_type,
        task_id, self._num_workers, local_devices,
        self._communication)
  def _get_variable_creator_initial_value(self,
                                          replica_id,
                                          device,
                                          primary_var,
                                          **kwargs):
    if replica_id == 0:  # First replica on each worker.
      assert device is not None
      assert primary_var is None
      def initial_value_fn():  # pylint: disable=g-missing-docstring
        # Only the first device participates in the broadcast of initial values.
        group_key = self._collective_keys.get_group_key([device])
        group_size = self._num_workers
        collective_instance_key = (
            self._collective_keys.get_variable_instance_key())
        with ops.device(device):
          initial_value = kwargs["initial_value"]
          if callable(initial_value):
            initial_value = initial_value()
          assert not callable(initial_value)
          initial_value = ops.convert_to_tensor(
              initial_value, dtype=kwargs.get("dtype", None))
          if self._num_workers > 1:
            if self._is_chief:
              bcast_send = collective_ops.broadcast_send(
                  initial_value, initial_value.shape, initial_value.dtype,
                  group_size, group_key, collective_instance_key)
              with ops.control_dependencies([bcast_send]):
                return array_ops.identity(initial_value)
            else:
              return collective_ops.broadcast_recv(initial_value.shape,
                                                   initial_value.dtype,
                                                   group_size, group_key,
                                                   collective_instance_key)
          return initial_value
      return initial_value_fn
    else:
      return super(CollectiveAllReduceExtended,
                   self)._get_variable_creator_initial_value(
                       replica_id=replica_id,
                       device=device,
                       primary_var=primary_var,
                       **kwargs)
  def _make_input_context(self):
    if self._cluster_spec is None:
      input_pipeline_id = 0
    else:
      input_pipeline_id = multi_worker_util.id_in_cluster(
          self._cluster_spec, self._task_type, self._task_id)
    input_context = distribute_lib.InputContext(
        num_input_pipelines=self._num_workers,
        input_pipeline_id=input_pipeline_id,
        num_replicas_in_sync=self._num_replicas_in_sync)
    return input_context
  def _experimental_distribute_dataset(self, dataset):
    input_context = self._make_input_context()
    return input_lib.get_distributed_dataset(
        dataset,
        self._input_workers,
        self._container_strategy(),
        split_batch_by=self._num_replicas_in_sync,
        input_context=input_context)
  def _make_dataset_iterator(self, dataset):
    """Distributes the dataset to each local GPU."""
    input_context = self._make_input_context()
    return input_lib.DatasetIterator(
        dataset,
        self._input_workers,
        self._container_strategy(),
        split_batch_by=self._num_replicas_in_sync,
        input_context=input_context)
  def _make_input_fn_iterator(
      self,
      input_fn,
      replication_mode=distribute_lib.InputReplicationMode.PER_WORKER):
    """Distributes the input function to each local GPU."""
    input_context = self._make_input_context()
    return input_lib.InputFunctionIterator(input_fn, self._input_workers,
                                           [input_context],
                                           self._container_strategy())
  def _configure(self,
                 session_config=None,
                 cluster_spec=None,
                 task_type=None,
                 task_id=None):
    """Configures the object.
    Args:
      session_config: a `tf.compat.v1.ConfigProto`
      cluster_spec: a dict, ClusterDef or ClusterSpec object specifying the
        cluster configurations.
      task_type: the current task type, such as "worker".
      task_id: the current task id.
    Raises:
      ValueError: if `task_type` is not in the `cluster_spec`.
    """
    if cluster_spec:
      # Use the num_gpus_per_worker recorded in constructor since _configure
      # doesn't take num_gpus.
      cluster_resolver = SimpleClusterResolver(
          cluster_spec=multi_worker_util.normalize_cluster_spec(cluster_spec),
          task_type=task_type,
          task_id=task_id,
          num_accelerators={"GPU": self._num_gpus_per_worker},
          rpc_layer=self._rpc_layer)
      self._initialize_multi_worker(cluster_resolver)
      assert isinstance(self._get_cross_device_ops(),
                        cross_device_ops_lib.CollectiveAllReduce)
    if session_config:
      session_config.CopyFrom(self._update_config_proto(session_config))
  def _update_config_proto(self, config_proto):
    updated_config = copy.deepcopy(config_proto)
    # Enable the scoped allocator optimization for CollectiveOps.  This
    # optimization converts many small all-reduces into fewer larger
    # all-reduces.
    rewrite_options = updated_config.graph_options.rewrite_options
    rewrite_options.scoped_allocator_optimization = (
        rewriter_config_pb2.RewriterConfig.ON)
    # We turn on ScopedAllocator only for CollectiveReduce op, i.e. enable_op =
    # ["CollectiveReduce"].  Since we can't assign to a repeated proto field, we
    # clear and then append.
    del rewrite_options.scoped_allocator_opts.enable_op[:]
    rewrite_options.scoped_allocator_opts.enable_op.append("CollectiveReduce")
    if (not ops.executing_eagerly_outside_functions() and
        self._communication ==
        cross_device_ops_lib.CollectiveCommunication.NCCL):
      updated_config.experimental.collective_nccl = True
    if not self._cluster_spec:
      return updated_config
    assert self._task_type
    assert self._task_id is not None
    # Collective group leader is needed for collective ops to coordinate
    # workers.
    updated_config.experimental.collective_group_leader = (
        multi_worker_util.collective_leader(self._cluster_spec, self._task_type,
                                            self._task_id))
    # The device filters prevent communication between workers.
    del updated_config.device_filters[:]
    updated_config.device_filters.append(
        "/job:%s/task:%d" % (self._task_type, self._task_id))
    return updated_config
  def _reduce_to(self, reduce_op, value, destinations):
    if (isinstance(value, values.Mirrored) and
        reduce_op == reduce_util.ReduceOp.MEAN):
      return value
    assert not isinstance(value, values.Mirrored)
    if (isinstance(value, values.DistributedValues) and
        len(self.worker_devices) == 1):
      value = value.values[0]
    # When there are multiple workers, we need to reduce across workers using
    # collective ops.
    if (not isinstance(value, values.DistributedValues) and
        self._num_workers == 1):
      # This function handles reducing values that are not PerReplica or
      # Mirrored values. For example, the same value could be present on all
      # replicas in which case `value` would be a single value or value could
      # be 0.
      return cross_device_ops_lib.reduce_non_distributed_value(
          reduce_op, self._device_map, value, destinations)
    return self._get_cross_device_ops().reduce(
        reduce_op, value, destinations=destinations)
  def _warn_nccl_no_gpu(self):
    if ((self._communication ==
         cross_device_ops_lib.CollectiveCommunication.NCCL) and
        self._num_gpus_per_worker == 0):
      logging.warning("Enabled NCCL communication but no GPUs detected/"
                      "specified.")
  def _in_multi_worker_mode(self):
    """Whether this strategy indicates working in multi-worker settings."""
    return self._num_workers > 1
  @property
  def experimental_between_graph(self):
    return True
  @property
  def experimental_should_init(self):
    return True
  @property
  def should_checkpoint(self):
    return self._is_chief
  @property
  def should_save_summary(self):
    return self._is_chief
  @property
  def _num_replicas_in_sync(self):
    return len(self.worker_devices) * self._num_workers
  # TODO(priyag): Delete this once all strategies use global batch size.
  @property
  def _global_batch_size(self):
    """`make_dataset_iterator` and `make_numpy_iterator` use global batch size.
    `make_input_fn_iterator` assumes per-replica batching.
    Returns:
      Boolean.
    """
    return True
 | 
	apache-2.0 | -8,948,848,603,491,015,000 | 40.697023 | 114 | 0.675711 | false | 
| 
	ayeganov/SmartFlappy | 
	kivy_fix.py | 
	1 | 
	1228 | 
	import os
import json
from os.path import dirname, join
from kivy.atlas import Atlas, Logger
from kivy.core.image import Image as CoreImage
class SpriteAtlas(Atlas):
    def _load(self):
        # must be a name finished by .atlas ?
        filename = self._filename
        assert(filename.endswith('.atlas'))
        filename = filename.replace('/', os.sep)
        Logger.debug('Atlas: Load <%s>' % filename)
        with open(filename, 'r') as fd:
            meta = json.load(fd)
        Logger.debug('Atlas: Need to load %d images' % len(meta))
        d = dirname(filename)
        textures = {}
        for subfilename, ids in meta.items():
            subfilename = join(d, subfilename)
            Logger.debug('Atlas: Load <%s>' % subfilename)
            # load the image
            ci = CoreImage(subfilename)
            # <RJ> this is the fix for pixel art
            ci.texture.mag_filter = 'nearest'
            # for all the uid, load the image, get the region, and put
            # it in our dict.
            for meta_id, meta_coords in ids.items():
                x, y, w, h = meta_coords
                textures[meta_id] = ci.texture.get_region(*meta_coords)
        self.textures = textures
 | 
	mit | -7,277,527,500,010,486,000 | 30.487179 | 71 | 0.57329 | false | 
| 
	ThiagoLopes/uniso-semana-engenharia | 
	uniso/urls.py | 
	1 | 
	1271 | 
	"""uniso URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
    https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
    1. Add an import:  from my_app import views
    2. Add a URL to urlpatterns:  url(r'^$', views.home, name='home')
Class-based views
    1. Add an import:  from other_app.views import Home
    2. Add a URL to urlpatterns:  url(r'^$', Home.as_view(), name='home')
Including another URLconf
    1. Import the include() function: from django.conf.urls import url, include
    2. Add a URL to urlpatterns:  url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
    url(r'^admin-uniso/', admin.site.urls),
    url(r'^', include('dash.urls', namespace='dash'))
]
if settings.DEBUG:
    import debug_toolbar
    urlpatterns = [
        url(r'^__debug__/', include(debug_toolbar.urls)),
    ] + urlpatterns
    urlpatterns += static(settings.MEDIA_URL,
                          document_root=settings.MEDIA_ROOT)
    urlpatterns += static(settings.STATIC_URL,
                          document_root=settings.STATICFILES_DIRS)
 | 
	gpl-3.0 | 3,893,934,406,690,880,500 | 36.382353 | 79 | 0.675846 | false | 
| 
	chrisjaquet/FreeCAD | 
	src/Mod/PartDesign/FeatureHole/HoleGui.py | 
	12 | 
	3688 | 
	#/******************************************************************************
# * Copyright (c)2012 Jan Rheinlaender <[email protected]> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This library is free software; you can redistribute it and/or *
# * modify it under the terms of the GNU Library General Public *
# * License as published by the Free Software Foundation; either *
# * version 2 of the License, or (at your option) any later version. *
# * *
# * This library is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this library; see the file COPYING.LIB. If not, *
# * write to the Free Software Foundation, Inc., 59 Temple Place, *
# * Suite 330, Boston, MA 02111-1307, USA *
# * *
# ******************************************************************************/
import FreeCAD, FreeCADGui
import PartDesignGui
from PyQt4 import QtCore, QtGui
from TaskHole import TaskHole
from FeatureHole import Hole
from ViewProviderHole import ViewProviderHole
class HoleGui:
    def getMainWindow(self):
        "returns the main window"
        # using QtGui.qApp.activeWindow() isn't very reliable because if another
        # widget than the mainwindow is active (e.g. a dialog) the wrong widget is
        # returned
        toplevel = QtGui.qApp.topLevelWidgets()
        for i in toplevel:
            if i.metaObject().className() == "Gui::MainWindow":
                return i
        raise Exception("No main window found")
    "Create a new hole feature"
    def Activated(self):
        # Get main window
        mw = self.getMainWindow()
        # Get active document
        doc = FreeCAD.activeDocument()
        if doc == None:
            QtGui.QMessageBox.critical(mw, "No document", "A document must be open in order to create a hole feature")
            return
        # Check for valid position selection
        selection = FreeCADGui.Selection.getSelectionEx()
        if len(selection) != 1:
            QtGui.QMessageBox.critical(mw, "No position defined", "Please select a face to create the hole feature on")
            return
        if selection[0].DocumentName != doc.Name:
            QtGui.QMessageBox.critical(mw, "Wrong document", "Please select a face in the active document")
        # Note: For some reason setting the Support property here breaks all sorts of things.
        #       It is done in TaskHole.updateUI() instead
        # Show feature preview
        body = FreeCADGui.activeView().getActiveObject("pdbody");
        if body == None:
            QtGui.QMessageBox.critical(mw, "No active body", "Please create a body or make a body active")
    
        feature = doc.addObject("Part::FeaturePython","Hole")
        hole = Hole(feature)
        body.addFeature(feature)
        
        vp = ViewProviderHole(feature.ViewObject)
        feature.touch()
        FreeCAD.ActiveDocument.recompute()
        # Fit view (remove after the testing phase)
        FreeCADGui.SendMsgToActiveView("ViewFit")
        vp.setEdit(vp,  1)
    def GetResources(self):
        IconPath = FreeCAD.ConfigGet("AppHomePath") + "Mod/PartDesign/FeatureHole/PartDesign_Hole.svg"
        MenuText = 'Create a hole feature'
        ToolTip = 'Create a hole feature'
        return {'Pixmap' : IconPath, 'MenuText': MenuText, 'ToolTip': ToolTip}
FreeCADGui.addCommand('PartDesign_Hole', HoleGui())
 | 
	lgpl-2.1 | -6,249,564,738,716,002,000 | 41.883721 | 119 | 0.636388 | false | 
| 
	XBMC-Addons/service.scrobbler.lastfm | 
	helpers.py | 
	2 | 
	1177 | 
	from urlparse import urlparse
import re
def is_local(path):
    """ Returns True if the given path is a local address, otherwise False. """
    parse_result = urlparse(path)
    # only analyze http(s)/rtmp streams
    if (not parse_result.scheme == 'http') and (not parse_result.scheme == 'https') and (not parse_result.scheme == 'rtmp'):
        return True
    if not parse_result.netloc:
        return True  # assume a lack of network location implies a private address
    # regex reference: http://stackoverflow.com/a/692457/577298
    elif re.match("127\.\d{1,3}\.\d{1,3}\.\d{1,3}", parse_result.netloc, flags=0):
        return True
    elif re.match("192\.168\.\d{1,3}\.\d{1,3}", parse_result.netloc, flags=0):
        return True
    elif re.match("10\.\d{1,3}\.\d{1,3}\.\d{1,3}", parse_result.netloc, flags=0):
        return True
    elif re.match("172\.(1[6-9]|2[0-9]|3[0-1])\.[0-9]{1,3}\.[0-9]{1,3}", parse_result.netloc, flags=0):
        return True
    elif parse_result.netloc.startswith("fe80:"):  # link-local IPv6 address
        return True
    elif parse_result.netloc.startswith("fc00:"):  # IPv6 ULA
        return True
    else:
        return False
 | 
	gpl-2.0 | -3,682,357,508,327,625,700 | 44.269231 | 124 | 0.624469 | false | 
| 
	davidzchen/tensorflow | 
	tensorflow/python/keras/benchmarks/keras_examples_benchmarks/mnist_hierarchical_rnn_benchmark_test.py | 
	5 | 
	4580 | 
	# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks on Hierarchical RNN on MNIST digits."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks import benchmark_util
class HierarchicalRNNBenchmark(tf.test.Benchmark):
  """Benchmarks for Hierarchical RNN using `tf.test.Benchmark`."""
  def __init__(self):
    super(HierarchicalRNNBenchmark, self).__init__()
    self.num_classes = 10
    self.row_hidden, self.col_hidden = 128, 128
    (self.x_train, self.y_train), _ = tf.keras.datasets.mnist.load_data()
    self.x_train = self.x_train.reshape(self.x_train.shape[0], 28, 28, 1)
    self.x_train = self.x_train.astype('float32') / 255
    self.y_train = tf.keras.utils.to_categorical(self.y_train, self.num_classes)
  def _build_model(self):
    """Model from https://github.com/keras-team/keras/blob/master/examples
    /mnist_hierarchical_rnn.py.
    """
    row, col, pixel = self.x_train.shape[1:]
    inputs = tf.keras.layers.Input(shape=(row, col, pixel))
    encoded_rows = tf.keras.layers.TimeDistributed(
        tf.keras.layers.LSTM(self.row_hidden))(
            inputs)
    encoded_cols = tf.keras.layers.LSTM(self.col_hidden)(encoded_rows)
    outputs = tf.keras.layers.Dense(
        self.num_classes, activation='softmax')(
            encoded_cols)
    model = tf.keras.Model(inputs, outputs)
    return model
  # In each benchmark test, the required arguments for the
  # method `measure_performance` include:
  #   x: Input data, it could be Numpy or loaded from tfds.
  #   y: Target data. If `x` is a dataset or generator instance,
  #      `y` should not be specified.
  #   loss: Loss function for model.
  #   optimizer: Optimizer for model.
  #   Check more details in `measure_performance()` method of
  #   benchmark_util.
  def benchmark_hrnn_mnist_bs_256(self):
    """Measure performance with batch_size=256."""
    batch_size = 256
    metrics, wall_time, extras = benchmark_util.measure_performance(
        self._build_model,
        x=self.x_train,
        y=self.y_train,
        batch_size=batch_size,
        optimizer='rmsprop',
        loss='categorical_crossentropy',
        metrics=['accuracy'])
    self.report_benchmark(wall_time=wall_time, metrics=metrics, extras=extras)
  def benchmark_hrnn_mnist_bs_512(self):
    """Measure performance with batch_size=512."""
    batch_size = 512
    metrics, wall_time, extras = benchmark_util.measure_performance(
        self._build_model,
        x=self.x_train,
        y=self.y_train,
        batch_size=batch_size,
        optimizer='rmsprop',
        loss='categorical_crossentropy',
        metrics=['accuracy'])
    self.report_benchmark(wall_time=wall_time, metrics=metrics, extras=extras)
  def benchmark_hrnn_mnist_bs_1024(self):
    """Measure performance with batch_size=1024."""
    batch_size = 1024
    metrics, wall_time, extras = benchmark_util.measure_performance(
        self._build_model,
        x=self.x_train,
        y=self.y_train,
        batch_size=batch_size,
        optimizer='rmsprop',
        loss='categorical_crossentropy',
        metrics=['accuracy'])
    self.report_benchmark(wall_time=wall_time, metrics=metrics, extras=extras)
  def benchmark_hrnn_mnist_bs_1024_gpu_2(self):
    """Measure performance with batch_size=1024, gpu=2 and
    distribution_strategy='mirrored'
    """
    batch_size = 1024
    metrics, wall_time, extras = benchmark_util.measure_performance(
        self._build_model,
        x=self.x_train,
        y=self.y_train,
        batch_size=batch_size,
        num_gpus=2,
        distribution_strategy='mirrored',
        optimizer='rmsprop',
        loss='categorical_crossentropy',
        metrics=['accuracy'])
    self.report_benchmark(wall_time=wall_time, metrics=metrics, extras=extras)
if __name__ == '__main__':
  tf.test.main()
 | 
	apache-2.0 | -5,688,871,444,955,991,000 | 35.062992 | 80 | 0.661572 | false | 
| 
	google/sling | 
	sling/nlp/parser/trainer/parser_state.py | 
	1 | 
	13871 | 
	# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sling
from action import Action
# ParserState maintains the under-construction frame graph for a given text.
class ParserState:
  # Represents a span in the text.
  class Span:
    def __init__(self, start, length):
      self.start = start
      self.end = start + length
      self.evoked = []   # frame(s) evoked by the span
  # Represents the beginning token of a span.
  class Mark:
    def __init__(self, token, step):
      self.token = token
      self.step = step
  # Represents a frame.
  class Frame:
    def __init__(self, t):
      self.type = t
      # Frame slots.
      self.edges = []
      # Boundaries of the first span (if any) that evoked the frame,
      self.start = -1
      self.end = -1
      # All span(s), if any, that evoked the frame.
      self.spans = []
      # Steps that focused / created the frame.
      self.focus = 0
      self.creation = 0
  def __init__(self, document, spec):
    self.document = document
    self.spec = spec
    self.begin = 0
    self.end = len(document.tokens)
    self.current = 0                 # current input position
    self.frames = []                 # frames added so far
    self.spans = {}                  # spans added so far
    self.steps = 0                   # no. of steps taken so far
    self.actions = []                # actual steps taken so far
    self.graph = []                  # edges in the frame graph
    self.done = False                # if the graph is complete
    self.attention = []              # frames in attention buffer
    self.marks = []                  # marked (i.e. open) spans
    self.embed = []                  # current embedded frames
    self.elaborate = []              # current elaborated frames
    self.max_mark_nesting = 5        # max number of open marks
    # Token -> Spans over it.
    self.token_to_spans = [[] for _ in range(len(document.tokens))]
  # Returns a string representation of the parser state.
  def __repr__(self):
    s = "Curr:" + str(self.current) + " in [" + str(self.begin) + \
        ", " + str(self.end) + ")" + " " + str(len(self.frames)) + " frames"
    for index, f in enumerate(self.attention):
      if index == 10: break
      s += "\n   - Attn " + str(index) + ":" + str(f.type) + \
           " Creation:" + str(f.creation) + \
           ", Focus:" + str(f.focus) + ", #Edges:" + str(len(f.edges)) + \
           " (" + str(len(f.spans)) + " spans) "
      if len(f.spans) > 0:
        for span in f.spans:
          words = self.document.tokens[span.start].word
          if span.end > span.start + 1:
            words += ".." + self.document.tokens[span.end - 1].word
          s += words + " = [" + str(span.start) + ", " + str(span.end) + ") "
    return s
  # Computes the role graph.
  def compute_role_graph(self):
    # No computation required if none of the actions have roles.
    if len(self.spec.actions.roles) == 0: return
    del self.graph
    self.graph = []
    limit = min(self.spec.frame_limit, len(self.attention))
    for i in range(limit):
      frame = self.attention[i]
      for role, value in frame.edges:
        role_id = self.spec.actions.role_indices.get(role, None)
        if role_id is not None:
          target = -1
          if isinstance(value, ParserState.Frame):
            target = self.index(value)
            if target == -1 or target >= self.spec.frame_limit: continue
          self.graph.append((i, role_id, target))
  # Returns the step at which index-th attention frame was created.
  def creation_step(self, index):
    if index >= len(self.attention) or index < 0: return -1
    return self.attention[index].creation
  # Returns the most recent step at which index-th attention frame was focused.
  def focus_step(self, index):
    if index >= len(self.attention) or index < 0: return -1
    return self.attention[index].focus
  # Returns whether [start, end) crosses an existing span.
  def _crosses(self, start, end):
    for token in range(start, end):
      for s in self.token_to_spans[token]:
        if (s.start - start) * (s.end - end) > 0:
          return True
    return False
  # Check fails if there is any crossing span in the parser state.
  def check_spans(self):
    spans = list(self.spans.keys())
    spans.sort(key=lambda s: (s[0], s[0] - s[1]))
    cover = [None] * len(self.document.tokens)
    for s in spans:
      c = cover[s[0]]
      for i in range(s[0], s[1]):
        assert c == cover[i], (c, cover[i], spans, self.actions)
        cover[i] = s
  # Returns whether 'action_index' is allowed in the current state.
  def is_allowed(self, action_index):
    if self.done: return False
    actions = self.spec.actions
    if action_index == actions.stop(): return self.current == self.end
    if action_index == actions.shift(): return self.current < self.end
    if action_index == actions.mark():
      return self.current < self.end and len(self.marks) < self.max_mark_nesting
    action = actions.table[action_index]
    if action.type == Action.REFER:
      end = self.current + action.length
      if end > self.end or \
        action.target >= self.attention_size() or \
        self._crosses(self.current, end):
          return False
      existing = self._get_span(self.current, end)
      if existing is not None:
        target = self.attention[action.target]
        for f in existing.evoked:
          if f is target: return False
      return True
    if action.type == Action.EVOKE:
      if action.length is None:
        if len(self.marks) == 0 or self.marks[-1].token == self.current \
          or self.current == self.end:
          return False
        return not self._crosses(self.marks[-1].token, self.current + 1)
      else:
        end = self.current + action.length
        if end > self.end or self._crosses(self.current, end):
          return False
        existing = self._get_span(self.current, end)
        if existing is not None:
          for f in existing.evoked:
            if f.type == action.label: return False
        return True
    elif action.type == Action.CONNECT:
      s = self.attention_size()
      if action.source >= s or action.target >= s: return False
      source = self.attention[action.source]
      target = self.attention[action.target]
      for role, value in source.edges:
        if role == action.role and value is target: return False
      return True
    elif action.type == Action.EMBED:
      if action.target >= self.attention_size(): return False
      target = self.attention[action.target]
      for t, role, value in self.embed:
        if t == action.label and role == action.role and value is target:
          return False
      return True
    elif action.type == Action.ELABORATE:
      if action.source >= self.attention_size(): return False
      source = self.attention[action.source]
      for t, role, value in self.elaborate:
        if t == action.label and role == action.role and value is source:
          return False
      return True
    elif action.type == Action.ASSIGN:
      if action.source >= self.attention_size(): return False
      source = self.attention[action.source]
      for role, value in source.edges:
        if role == action.role and value == action.label: return False
      return True
    else:
      raise ValueError("Unknown action : ", action)
  # Returns the attention index of 'frame'.
  def index(self, frame):
    for i in range(len(self.attention)):
      if self.attention[i] is frame:
        return i
    return -1
  # Returns the frame at attention index 'index'.
  def frame(self, index):
    return self.attention[index]
  # Returns the size of the attention buffer.
  def attention_size(self):
    return len(self.attention)
  # Returns the role graph.
  def role_graph(self):
    return self.graph
  # Returns the end token (inclusive) of the span, if any, that evoked/referred
  # the frame at attention index 'index'.
  def frame_end_inclusive(self, index):
    if index >= len(self.attention) or index < 0:
      return -1
    elif self.attention[index].end == -1:
      return -1
    else:
      return self.attention[index].end - 1
  # Advances the state using 'action'.
  def advance(self, action):
    self.actions.append(action)
    if action.type == Action.STOP:
      self.done = True
    elif action.type == Action.SHIFT:
      self.current += 1
      del self.embed[:]
      del self.elaborate[:]
    elif action.type == Action.MARK:
      self.marks.append(ParserState.Mark(self.current, len(self.actions) - 1))
    elif action.type == Action.EVOKE:
      begin = self.current
      end = self.current + 1
      if action.length is None:
        begin = self.marks.pop().token
      else:
        assert action.length > 0
        end = self.current + action.length
      s = self._make_span(begin, end)
      f = self._make_frame(action.label)
      f.start = begin
      f.end = end
      f.spans.append(s)
      s.evoked.append(f)
      self.frames.append(f)
      self._add_to_attention(f)
    elif action.type == Action.REFER:
      f = self.attention[action.target]
      f.focus = self.steps
      s = self._make_span(self.current, self.current + action.length)
      s.evoked.append(f)
      f.spans.append(s)
      self._refocus_attention(action.target)
    elif action.type == Action.CONNECT:
      f = self.attention[action.source]
      f.edges.append((action.role, self.attention[action.target]))
      f.focus = self.steps
      self._refocus_attention(action.source)
    elif action.type == Action.EMBED:
      target = self.attention[action.target]
      f = self._make_frame(action.label)
      f.edges.append((action.role, target))
      self.frames.append(f)
      self._add_to_attention(f)
      self.embed.append((action.label, action.role, target))
    elif action.type == Action.ELABORATE:
      source = self.attention[action.source]
      f = self._make_frame(action.label)
      source.edges.append((action.role, f))
      self.frames.append(f)
      self._add_to_attention(f)
      self.elaborate.append((action.label, action.role, source))
    elif action.type == Action.ASSIGN:
      source = self.attention[action.source]
      source.focus = self.steps
      source.edges.append((action.role, action.label))
      self._refocus_attention(action.source)
    else:
      raise ValueError("Unknown action type: ", action.type)
    self.steps += 1
    if action.type != Action.SHIFT and action.type != Action.STOP:
      # Recompute the role graph because we modified the attention buffer.
      if self.spec: self.compute_role_graph()
  # Write the frame graph to 'document', which should be a SLING Document.
  def write(self, document=None):
    if document is None:
      document = self.document
    store = document.frame.store()
    document.remove_annotations()
    frames = {}
    for f in self.frames:
      frame = store.frame({})
      if f.type is not None:
        frame["isa"] = f.type
      frames[f] = frame
      if len(f.spans) == 0:
        document.add_theme(frame)
    for f in self.frames:
      frame = frames[f]
      for role, value in f.edges:
        if isinstance(value, ParserState.Frame):
          # Add slot whose value is a reference to another frame.
          assert value in frames, str(value.__dict__)
          frame.append(role, frames[value])
        else:
          # Add slot whose value is a reference to a global frame (cf. ASSIGN).
          assert type(value) == sling.Frame, "%r" % value
          frame.append(role, value)
    for _, s in self.spans.items():
      # Note: mention.frame is the actual mention frame.
      mention = document.add_mention(s.start, s.end)
      for f in s.evoked:
        assert f in frames
        mention.frame.append("evokes", frames[f])
    document.update()
  # Returns the string representation of the framing.
  def data(self, **kwargs):
    return self.document.frame.data(**kwargs)
  # Returns the encoded representation of the framing.
  def encoded(self):
    return self.data(binary=True, shallow=True)
  # Returns the textual representation of the framing.
  def textual(self):
    return self.data(binary=False, pretty=True, shallow=True)
  # Adds frame 'f' to the attention buffer.
  def _add_to_attention(self, f):
    f.focus = self.steps
    self.attention.insert(0, f)
  # Makes the frame at attention index 'index' the center of attention.
  def _refocus_attention(self, index):
    f = self.attention[index]
    f.focus = self.steps
    if index > 0: self.attention.insert(0, self.attention.pop(index))
  # Gets and existing [begin, end) span or None.
  def _get_span(self, begin, end):
    key = (begin, end)
    return self.spans.get(key, None)
  # Creates and returns a [begin, end) span.
  def _make_span(self, begin, end):
    # See if an existing span can be returned.
    key = (begin, end)
    existing = self.spans.get(key, None)
    if existing is not None: return existing
    s = ParserState.Span(begin, end - begin)
    self.spans[key] = s
    for i in range(begin, end):
      self.token_to_spans[i].append(s)
    return s
  # Creates and returns a frame of type 't'.
  def _make_frame(self, t):
    f = ParserState.Frame(t)
    f.creation = self.steps
    f.focus = self.steps
    return f
 | 
	apache-2.0 | 8,518,184,749,541,436,000 | 32.424096 | 80 | 0.620503 | false | 
| 
	melvon22/osmc | 
	package/mediacenter-skin-osmc/files/usr/share/kodi/addons/script.skinshortcuts/resources/lib/datafunctions.py | 
	8 | 
	49118 | 
	# coding=utf-8
import os, sys, datetime, unicodedata, re, types
import xbmc, xbmcaddon, xbmcgui, xbmcvfs, urllib
import xml.etree.ElementTree as xmltree
import hashlib, hashlist
import ast
from xml.dom.minidom import parse
from traceback import print_exc
from htmlentitydefs import name2codepoint
from unidecode import unidecode
from unicodeutils import try_decode
import nodefunctions
NODE = nodefunctions.NodeFunctions()
__addon__        = xbmcaddon.Addon()
__addonid__      = __addon__.getAddonInfo('id').decode( 'utf-8' )
__addonversion__ = __addon__.getAddonInfo('version')
__xbmcversion__  = xbmc.getInfoLabel( "System.BuildVersion" ).split(".")[0]
__language__     = __addon__.getLocalizedString
__cwd__          = __addon__.getAddonInfo('path').decode("utf-8")
__addonname__    = __addon__.getAddonInfo('name').decode("utf-8")
__resource__   = xbmc.translatePath( os.path.join( __cwd__, 'resources', 'lib' ) ).decode("utf-8")
__datapath__     = os.path.join( xbmc.translatePath( "special://profile/addon_data/" ).decode('utf-8'), __addonid__ )
__profilepath__  = xbmc.translatePath( "special://profile/" ).decode('utf-8')
__skinpath__     = xbmc.translatePath( "special://skin/shortcuts/" ).decode('utf-8')
__defaultpath__  = xbmc.translatePath( os.path.join( __cwd__, 'resources', 'shortcuts').encode("utf-8") ).decode("utf-8")
# character entity reference
CHAR_ENTITY_REXP = re.compile('&(%s);' % '|'.join(name2codepoint))
# decimal character reference
DECIMAL_REXP = re.compile('&#(\d+);')
# hexadecimal character reference
HEX_REXP = re.compile('&#x([\da-fA-F]+);')
REPLACE1_REXP = re.compile(r'[\']+')
REPLACE2_REXP = re.compile(r'[^-a-z0-9]+')
REMOVE_REXP = re.compile('-{2,}')
def log(txt):
    if __xbmcversion__ == "13" or __addon__.getSetting( "enable_logging" ) == "true":
        try:
            if isinstance (txt,str):
                txt = txt.decode('utf-8')
            message = u'%s: %s' % (__addonid__, txt)
            xbmc.log(msg=message.encode('utf-8'), level=xbmc.LOGDEBUG)
        except:
            pass
    
class DataFunctions():
    def __init__(self):
        self.overrides = {}
        self.widgetNameAndType = {}
        self.backgroundName = {}
        self.currentProperties = None
        self.defaultProperties = None
        
    
    def _get_labelID( self, labelID, action, getDefaultID = False, includeAddOnID = True ):
        # This gets the unique labelID for the item we've been passed. We'll also store it, to make sure
        # we don't give it to any other item.
        
        labelID = self.createNiceName( self.slugify( labelID.replace( " ", "" ).lower() ) )
        
        if includeAddOnID:
            addon_labelID = self._get_addon_labelID( action )
            if addon_labelID is not None:
                labelID = addon_labelID
        
        # If we're getting the defaultID, just return this
        if getDefaultID == True:
            return labelID
        
        # Check if the labelID exists in the list
        if labelID in self.labelIDList:
            # We're going to add an --[int] to the end of this
            count = 0
            while labelID + "--" + str( count ) in self.labelIDList:
                count += 1
            
            # We can now use this one
            self.labelIDList.append( labelID + "--" + str( count ) )
            return labelID + "--" + str( count )
        else:
            # We can use this one
            self.labelIDList.append( labelID )
            return labelID
            
    def _get_addon_labelID( self, action ):
        # This will check the action to see if this is a program or the root of a plugin and, if so, return that as the labelID
        
        if action is None:
            return None
        
        try:
            if action.startswith( "RunAddOn(" ) and "," not in action:
                return action[9:-1]
                
            if action.startswith( "RunScript(" ) and "," not in action:
                return action[10:-1]
                
            if "plugin://" in action and "?" not in action:
                # Return the action
                # - less ActivateWindow(
                # - The second group after being split by comma
                # - Less plugin://
                return action[15:-1].split( "," )[1].replace( '"', '' )[9:]
        except:
            return None
            
        return None
    
    def _clear_labelID( self ):
        # This clears our stored list of labelID's
        self.labelIDList = []
        
    
    def _pop_labelID( self ):
        self.labelIDList.pop()
    
                
    def _get_shortcuts( self, group, defaultGroup = None, isXML = False, profileDir = None, defaultsOnly = False ):
        # This will load the shortcut file
        # Additionally, if the override files haven't been loaded, we'll load them too
        log( "Loading shortcuts for group " + group )
                
        if profileDir is None:
            profileDir = xbmc.translatePath( "special://profile/" ).decode( "utf-8" )
        
        userShortcuts = os.path.join( profileDir, "addon_data", __addonid__, self.slugify( group ) + ".DATA.xml" )#.encode('utf-8')
        skinShortcuts = os.path.join( __skinpath__ , self.slugify( group ) + ".DATA.xml")#.encode('utf-8')
        defaultShortcuts = os.path.join( __defaultpath__ , self.slugify( group ) + ".DATA.xml" )#.encode('utf-8')
        if defaultGroup is not None:
            skinShortcuts = os.path.join( __skinpath__ , self.slugify( defaultGroup ) + ".DATA.xml")#.encode('utf-8')    
            defaultShortcuts = os.path.join( __defaultpath__ , self.slugify( defaultGroup ) + ".DATA.xml" )#.encode('utf-8')
        if defaultsOnly:
            paths = [skinShortcuts, defaultShortcuts ]
        else:
            paths = [userShortcuts, skinShortcuts, defaultShortcuts ]
        
        for path in paths:
            path = try_decode( path )
                
            tree = None
            if xbmcvfs.exists( path ):
                file = xbmcvfs.File( path ).read()
                self._save_hash( path, file )
                tree = xmltree.parse( path )
            
            if tree is not None:
                # If this is a user-selected list of shortcuts...
                if path == userShortcuts:
                    if group == "mainmenu":
                        self._get_skin_required( tree, group, profileDir )
                    # Process shortcuts, marked as user-selected                    
                    self._process_shortcuts( tree, group, profileDir, True )
                    
                else:
                    if group == "mainmenu":
                        self._get_skin_required( tree, group, profileDir )
                    self._process_shortcuts( tree, group, profileDir )
                                        
                log( " - Loaded file " + path ) 
                return tree
            else:
                self._save_hash( path, None )
                
        # No file loaded
        log( " - No shortcuts" )
        return xmltree.ElementTree( xmltree.Element( "shortcuts" ) )
                            
    def _process_shortcuts( self, tree, group, profileDir = "special:\\profile", isUserShortcuts = False, allowAdditionalRequired = True ):
        # This function will process any overrides and add them to the tree ready to be displayed
        #  - We will process graphics overrides, action overrides, visibility conditions
        skinoverrides = self._get_overrides_skin()
        useroverrides = self._get_overrides_user( profileDir )
        
        self._clear_labelID()
        
        # Iterate through all <shortcut/> nodes
        for node in tree.getroot().findall( "shortcut" ):
            # If not user shortcuts, remove locked and defaultid nodes (in case of naughty skinners!)
            if isUserShortcuts == False:
                searchNode = node.find( "locked" )
                if searchNode is not None:
                    node.remove( searchNode )
                    
            # Remove any labelID node (because it confuses us!)
            searchNode = node.find( "labelID" )
            if searchNode is not None:
                node.remove( searchNode )
                    
            # Get the action
            action = node.find( "action" )
            
            # group overrides: add an additional onclick action for a particular menu
            # this will allow you to close a modal dialog before calling any other window
            # http://forum.kodi.tv/showthread.php?tid=224683
            if skinoverrides != None:
                allGroupOverrides = skinoverrides.findall( "groupoverride" )
                for override in allGroupOverrides:
                    if override.attrib.get( "group" ) == group:
                        newaction = xmltree.SubElement( node, "additional-action" )
                        newaction.text = override.text
                        newaction.set( "condition", override.attrib.get( "condition" ) )
            
            # Generate the labelID
            labelID = self._get_labelID( self.local( node.find( "label" ).text )[3].replace( " ", "" ).lower(), action.text )
            xmltree.SubElement( node, "labelID" ).text = labelID
            
            # If there's no defaultID, set it to the labelID
            defaultID = labelID
            if node.find( "defaultID" ) is not None:
                defaultID = node.find( "defaultID" ).text
            xmltree.SubElement( node, "defaultID" ).text = defaultID
            
            # Check that any version node matches current XBMC version
            version = node.find( "version" )
            if version is not None:
                if __xbmcversion__ != version.text and self.checkVersionEquivalency( version.text, node.find( "action" ) ) == False:
                    tree.getroot().remove( node )
                    self._pop_labelID()
                    continue
                    
            # Check that any skin-required shortcut matches current skin
            xmltree.SubElement( node, "additional-properties" ).text = repr( self.checkAdditionalProperties( group, labelID, defaultID, isUserShortcuts, profileDir ) )
                        
            # Get a skin-overriden icon
            overridenIcon = self._get_icon_overrides( skinoverrides, node.find( "icon" ).text, group, labelID )
            if overridenIcon is not None:
                # Add a new node with the overriden icon
                xmltree.SubElement( node, "override-icon" ).text = overridenIcon
            
            # If the action uses the special://skin protocol, translate it
            if "special://skin/" in action.text:
                action.text = xbmc.translatePath( action.text )
                
            # Get visibility condition
            visibilityCondition = self.checkVisibility( action.text )
            visibilityNode = None
            if visibilityCondition != "":
                visibilityNode = xmltree.SubElement( node, "visibility" )
                visibilityNode.text = visibilityCondition
            
            # Get action and visibility overrides
            overrideTrees = [useroverrides, skinoverrides]
            hasOverriden = False
            for overrideTree in overrideTrees:
                if tree is not None:
                    if hasOverriden == True:
                        continue
                    if overrideTree is not None:
                        for elem in overrideTree.findall( "override" ):
                            # Retrieve group property
                            checkGroup = None
                            if "group" in elem.attrib:
                                checkGroup = elem.attrib.get( "group" )
                            # If the action and (if provided) the group match...
                            # OR if we have a global override specified
                            if (elem.attrib.get( "action" ) == action.text and (checkGroup == None or checkGroup == group)) or (elem.attrib.get( "action" ) == "globaloverride" and checkGroup == group):
                                # Check the XBMC version matches
                                if "version" in elem.attrib:
                                    if elem.attrib.get( "version" ) != __xbmcversion__:
                                        continue
                                    
                                hasOverriden = True
                                # Get the visibility condition
                                condition = elem.find( "condition" )
                                overrideVisibility = None
                                if condition is not None:
                                    overrideVisibility = condition.text
                                
                                # Get the new action
                                for actions in elem.findall( "action" ):
                                    newaction = xmltree.SubElement( node, "override-action" )
                                    if actions.text == "::ACTION::":
                                        newaction.text = action.text
                                    else:
                                        newaction.text = actions.text
                                    if overrideVisibility is not None:
                                        newaction.set( "condition", overrideVisibility )
                                        
                                # If there's no action, and there is a visibility condition
                                if len( elem.findall( "action" ) ) == 0:
                                    newaction = xmltree.SubElement( node, "override-action" )
                                    if actions.text == "::ACTION::":
                                        newaction.text = action.text
                                    else:
                                        newaction.text = actions.text
                                    if overrideVisibility is not None:
                                        newaction.set( "condition", overrideVisibility )
                       
            # Get visibility condition of any skin-provided shortcuts
            if hasOverriden == False and skinoverrides is not None:
                for elem in skinoverrides.findall( "shortcut" ):
                    if elem.text == action.text and "condition" in elem.attrib:
                        if visibilityNode == None:
                            xmltree.SubElement( node, "visibility" ).text = elem.attrib.get( "condition" )
                        else:
                            visibilityNode.text = "[" + visibilityNode.text + "] + [" + elem.attrib.get( "condition" ) + "]"
                            
            # Get any visibility conditions in the .DATA.xml file
            if hasOverriden == False:
                additionalVisibility = node.find( "visible" )
                if additionalVisibility is not None:
                    if visibilityNode == None:
                        xmltree.SubElement( node, "visibility" ).text = additionalVisibility.text
                    else:
                        visibilityNode.text = "[" + visibilityNode.text + "] + [" + additionalVisibility.text + "]"
        
        return tree
        
    def _get_skin_required( self, listitems, group, profileDir ):
        # This function builds a tree of any skin-required shortcuts not currently in the menu
        # Once the tree is built, it sends them to _process_shortcuts for any overrides, etc, then adds them to the menu tree
        
        tree = self._get_overrides_skin()
        if tree is None:
            return
            
        # Get an array of all actions currently in the menu
        actions = []
        for node in listitems.getroot().findall( "shortcut" ):
            for action in node.findall( "action" ):
                actions.append( action.text )
                
        # Get a list of all skin-required shortcuts
        requiredShortcuts = []
        for elem in tree.findall( "requiredshortcut" ):
            if not elem.text in actions:
                # We need to add this shortcut - add it to the listitems
                requiredShortcut = xmltree.SubElement( listitems.getroot(), "shortcut" )
                
                # Label and label2
                xmltree.SubElement( requiredShortcut, "label" ).text = elem.attrib.get( "label" )
                xmltree.SubElement( requiredShortcut, "label2" ).text = xbmc.getSkinDir()
                
                # Icon and thumbnail
                if "icon" in elem.attrib:
                    xmltree.SubElement( requiredShortcut, "icon" ).text = elem.attrib.get( "icon" )
                else:
                    xmltree.SubElement( requiredShortcut, "icon" ).text = "DefaultShortcut.png"
                if "thumb" in elem.attrib:
                    xmltree.SubElement( requiredShortcut, "thumb" ).text = elem.attrib.get( "thumbnail" )
                    
                # Action
                xmltree.SubElement( requiredShortcut, "action" ).text = elem.text
                
                # Locked
                # - This is set to the skin directory, so it will only be locked in the management directory when using this skin
                xmltree.SubElement( requiredShortcut, "lock" ).text = xbmc.getSkinDir()
                
                
    def _get_icon_overrides( self, tree, icon, group, labelID, setToDefault = True ):        
        # This function will get any icon overrides based on labelID or group
        if icon is None:
            return
            
        # If the icon is a VAR or an INFO, we aren't going to override
        if icon.startswith( "$" ):
            return icon
            
        oldicon = None
        newicon = icon
        
        # Check for overrides
        if tree is not None:
            for elem in tree.findall( "icon" ):
                if oldicon is None:
                    if ("labelID" in elem.attrib and elem.attrib.get( "labelID" ) == labelID) or ("image" in elem.attrib and elem.attrib.get( "image" ) == icon):
                        # LabelID matched
                        if "group" in elem.attrib:
                            if elem.attrib.get( "group" ) == group:
                                # Group also matches - change icon
                                oldicon = icon
                                newicon = elem.text
                                
                        elif "grouping" not in elem.attrib:
                            # No group - change icon
                            oldicon = icon
                            newicon = elem.text
        
        if not xbmc.skinHasImage( newicon ) and setToDefault == True:
            newicon = self._get_icon_overrides( tree, "DefaultShortcut.png", group, labelID, False )
        return newicon
        
    def _get_overrides_script( self ):
        # Get overrides.xml provided by script
        if "script" in self.overrides:
            return self.overrides[ "script" ]
        overridePath = os.path.join( __defaultpath__, "overrides.xml" )
        try:
            tree = xmltree.parse( overridePath )
            self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
            self.overrides[ "script" ] = tree
            return tree
        except:
            if xbmcvfs.exists( overridePath ):
                log( "Unable to parse script overrides.xml. Invalid xml?" )
                self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
            else:
                self._save_hash( overridePath, None )
            self.overrides[ "script" ] = None
            return None
    def _get_overrides_skin( self ):
        # Get overrides.xml provided by skin 
        if "skin" in self.overrides:
            return self.overrides[ "skin" ]
        overridePath = os.path.join( __skinpath__, "overrides.xml" )
        try:
            tree = xmltree.parse( overridePath )
            self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
            self.overrides[ "skin" ] = tree
            return tree
        except:
            if xbmcvfs.exists( overridePath ):
                log( "Unable to parse skin overrides.xml. Invalid xml?" )
                self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
            else:
                self._save_hash( overridePath, None )
            self.overrides[ "skin" ] = None
            return None
    def _get_overrides_user( self, profileDir = "special://profile" ):
        # Get overrides.xml provided by user
        if "user" in self.overrides:
            return self.overrides[ "user" ]
        overridePath = os.path.join( profileDir, "overrides.xml" )
        try:
            tree = xmltree.parse( overridePath )
            self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
            self.overrides[ "user" ] = tree
            return tree
        except:
            if xbmcvfs.exists( overridePath ):
                log( "Unable to parse user overrides.xml. Invalid xml?" )
                self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
            else:
                self._save_hash( overridePath, None )
            self.overrides[ "user" ] = None
            return None
    def _get_additionalproperties( self, profileDir ):
        # Load all saved properties (widgets, backgrounds, custom properties)
        if self.currentProperties:
            return[ self.currentProperties, self.defaultProperties ]
            
        self.currentProperties = []
        self.defaultProperties = []
        
        path = os.path.join( profileDir, "addon_data", __addonid__, xbmc.getSkinDir().decode('utf-8') + ".properties" ).encode( "utf-8" )
        #path = os.path.join( __datapath__ , xbmc.getSkinDir().decode('utf-8') + ".properties" )
        if xbmcvfs.exists( path ):
            # The properties file exists, load from it
            try:
                file = xbmcvfs.File( path ).read()
                listProperties = ast.literal_eval( file )
                self._save_hash( path, file )
                
                for listProperty in listProperties:
                    # listProperty[0] = groupname
                    # listProperty[1] = labelID
                    # listProperty[2] = property name
                    # listProperty[3] = property value
                    self.currentProperties.append( [listProperty[0], listProperty[1], listProperty[2], listProperty[3]] )
            except:
                pass
            
        # Load skin defaults (in case we need them...)
        tree = self._get_overrides_skin()
        if tree is not None:
            for elemSearch in [["widget", tree.findall( "widgetdefault" )], ["widget:node", tree.findall( "widgetdefaultnode" )], ["background", tree.findall( "backgrounddefault" )], ["custom", tree.findall( "propertydefault" )] ]:
                for elem in elemSearch[1]:
                    # Get labelID and defaultID
                    labelID = elem.attrib.get( "labelID" )
                    defaultID = labelID
                    if "defaultID" in elem.attrib:
                        defaultID = elem.attrib.get( "defaultID" )
                    if elemSearch[0] == "custom":
                        # Custom property
                        if "group" not in elem.attrib:
                            self.defaultProperties.append( ["mainmenu", labelID, elem.attrib.get( 'property' ), elem.text, defaultID ] )
                        else:
                            self.defaultProperties.append( [elem.attrib.get( "group" ), labelID, elem.attrib.get( 'property' ), elem.text, defaultID ] )
                    else:
                        # Widget or background
                        if "group" not in elem.attrib:
                            self.defaultProperties.append( [ "mainmenu", labelID, elemSearch[ 0 ].split( ":" )[ 0 ], elem.text, defaultID ] )
                            
                            if elemSearch[ 0 ] == "background":
                                # Get and set the background name
                                backgroundName = self._getBackgroundName( elem.text )
                                if backgroundName is not None:
                                    self.defaultProperties.append( [ "mainmenu", labelID, "backgroundName", backgroundName, defaultID ] )
                                
                            if elemSearch[0] == "widget":
                                # Get and set widget type and name
                                widgetDetails = self._getWidgetNameAndType( elem.text )
                                if widgetDetails is not None:
                                    self.defaultProperties.append( [ "mainmenu", labelID, "widgetName", widgetDetails[ "name" ], defaultID ] )
                                    if "type" in widgetDetails:
                                        self.defaultProperties.append( [ "mainmenu", labelID, "widgetType", widgetDetails[ "type" ], defaultID ] )
                                    if "path" in widgetDetails:
                                        self.defaultProperties.append( [ "mainmenu", labelID, "widgetPath", widgetDetails[ "path" ], defaultID ] )
                                    if "target" in widgetDetails:
                                        self.defaultProperties.append( [ "mainmenu", labelID, "widgetTarget", widgetDetails[ "target" ], defaultID ] )
                            if elemSearch[0] == "widget:node":
                                # Set all widget properties from the default
                                if "label" in elem.attrib:
                                    self.defaultProperties.append( [ "mainmenu", labelID, "widgetName", elem.attrib.get( "label" ), defaultID ] )
                                if "type" in elem.attrib:
                                    self.defaultProperties.append( [ "mainmenu", labelID, "widgetType", elem.attrib.get( "type" ), defaultID ] )
                                if "path" in elem.attrib:
                                    self.defaultProperties.append( [ "mainmenu", labelID, "widgetPath", elem.attrib.get( "path" ), defaultID ] )
                                if "target" in elem.attrib:
                                    self.defaultProperties.append( [ "mainmenu", labelID, "widgetTarget", elem.attrib.get( "target" ), defaultID ] )
                        else:
                            self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, elemSearch[ 0 ].split( ":" )[ 0 ], elem.text, defaultID ] )
                            
                            if elemSearch[ 0 ] == "background":
                                # Get and set the background name
                                backgroundName = self._getBackgroundName( elem.text )
                                if backgroundName is not None:
                                    self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "backgroundName", backgroundName, defaultID ] )
                            
                            if elemSearch[0] == "widget":
                                # Get and set widget type and name
                                widgetDetails = self._getWidgetNameAndType( elem.text )
                                if widgetDetails is not None:
                                    self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetName", widgetDetails[ "name" ], defaultID ] )
                                    if "type" in widgetDetails:
                                        self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetType", widgetDetails[ "type" ], defaultID ] )
                                    if "path" in widgetDetails:
                                        self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetPath", widgetDetails[ "path" ], defaultID ] )
                                    if "target" in widgetDetails:
                                        self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetTarget", widgetDetails[ "target" ], defaultID ] )
                            if elemSearch[ 0 ] == "widget:node":
                                # Set all widget properties from the default
                                if "label" in elem.attrib:
                                    self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetName", elem.attrib.get( "label" ), defaultID ] )
                                if "type" in elem.attrib:
                                    self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetType", elem.attrib.get( "type" ), defaultID ] )
                                if "path" in elem.attrib:
                                    self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetPath", elem.attrib.get( "path" ), defaultID ] )
                                if "target" in elem.attrib:
                                    self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetTarget", elem.attrib.get( "target" ), defaultID ] )
                                        
        returnVal = [ self.currentProperties, self.defaultProperties ]
        return returnVal
        
    def _getWidgetNameAndType( self, widgetID ):
        if widgetID in self.widgetNameAndType:
            return self.widgetNameAndType[ widgetID ]
        tree = self._get_overrides_skin()
        if tree is not None:
            for elem in tree.findall( "widget" ):
                if elem.text == widgetID:
                    widgetInfo = { "name": elem.attrib.get( "label" ) }
                    if "type" in elem.attrib:
                        widgetInfo[ "type" ] = elem.attrib.get( "type" )
                    if "path" in elem.attrib:
                        widgetInfo[ "path" ] = elem.attrib.get( "path" )
                    if "target" in elem.attrib:
                        widgetInfo[ "target" ] = elem.attrib.get( "target" )
                    self.widgetNameAndType[ widgetID ] = widgetInfo
                    return widgetInfo
                        
        self.widgetNameAndType[ widgetID ] = None
        return None
        
    def _getBackgroundName( self, backgroundID ):
        if backgroundID in self.backgroundName:
            return self.backgroundName[ backgroundID ]
        tree = self._get_overrides_skin()
        if tree is not None:
            for elem in tree.findall( "background" ):
                if elem.text == backgroundID:
                    returnString = elem.attrib.get( "label" )
                    self.backgroundName[ backgroundID ] = returnString
                    return returnString
                        
        self.backgroundName[ backgroundID ] = None
        return None
                
    def _reset_backgroundandwidgets( self ):
        # This function resets all skin properties used to identify if specific backgrounds or widgets are active
        tree = self._get_overrides_skin()
        if tree is not None:
            for elem in tree.findall( "widget" ):
                xbmc.executebuiltin( "Skin.Reset(skinshortcuts-widget-" + elem.text + ")" )
            for elem in tree.findall( "background" ):
                xbmc.executebuiltin( "Skin.Reset(skinshortcuts-background-" + elem.text + ")" )
                
    
    def createNiceName ( self, item ):
        # Translate certain localized strings into non-localized form for labelID
        if item == "10006":
            return "videos"
        if item == "342":
            return "movies"
        if item == "20343":
            return "tvshows"
        if item == "32022":
            return "livetv"
        if item == "10005":
            return "music"
        if item == "20389":
            return "musicvideos"
        if item == "10002":
            return "pictures"
        if item == "12600":
            return "weather"
        if item == "10001":
            return "programs"
        if item == "32032":
            return "dvd"
        if item == "10004":
            return "settings"
        if item == "32087":
            return "radio"
        else:
            return item.lower( ).replace( " ", "" )
            
    def checkVisibility ( self, action ):
        action = action.lower().replace( " ", "" )
        
        # Return whether mainmenu items should be displayed
        if action == "activatewindow(weather)":
            return "!IsEmpty(Weather.Plugin)"
        elif action.startswith( "activatewindowandfocus(mypvr" ) or action.startswith( "playpvr" ):
            return "system.getbool(pvrmanager.enabled)"
        elif action.startswith( "activatewindow(videos,movie" ):
            return "Library.HasContent(Movies)"
        elif action.startswith( "activatewindow(videos,recentlyaddedmovies" ):
            return "Library.HasContent(Movies)"
        elif action.startswith( "activatewindow(videos,tvshow" ) or action.startswith( "activatewindow(videos,tvshow" ):
            return "Library.HasContent(TVShows)"
        elif action.startswith( "activatewindow(videos,recentlyaddedepisodes" ):
            return "Library.HasContent(TVShows)"
        elif action.startswith( "activatewindow(videos,musicvideo" ):
            return "Library.HasContent(MusicVideos)"
        elif action.startswith( "activatewindow(musiclibrary,addons" ):
            return ""
        elif action.startswith( "activatewindow(musiclibrary,musicvideo" ):
            return "Library.HasContent(MusicVideos)"
        elif action.startswith( "activatewindow(videos,recentlyaddedmusicvideos" ):
            return "Library.HasContent(MusicVideos)"
        elif action.startswith( "activatewindow(musiclibrary," ):
            return "Library.HasContent(Music)"
        elif action == "xbmc.playdvd()":
            return "System.HasMediaDVD"
            
        # Power menu visibilities
        elif action == "quit()" or action == "quit":
            return "System.ShowExitButton"
        elif action == "powerdown()" or action == "powerdown":
            return "System.CanPowerDown"
        elif action == "alarmclock(shutdowntimer,shutdown())":
            return "!System.HasAlarm(shutdowntimer) + [System.CanPowerDown | System.CanSuspend | System.CanHibernate]"
        elif action == "cancelalarm(shutdowntimer)":
            return "System.HasAlarm(shutdowntimer)"
        elif action == "suspend()" or action == "suspend":
            return "System.CanSuspend"
        elif action == "hibernate()" or action == "hibernate":
            return "System.CanHibernate"
        elif action == "reset()" or action == "reset":
            return "System.CanReboot"
        elif action == "system.logoff":
            return "[System.HasLoginScreen | IntegerGreaterThan(System.ProfileCount,1)] + System.Loggedon"
        elif action == "mastermode":
            return "System.HasLocks"
        elif action == "inhibitidleshutdown(true)":
            return "System.HasShutdown +!System.IsInhibit"
        elif action == "inhibitidleshutdown(false)":
            return "System.HasShutdown + System.IsInhibit"
            
        # New Helix visibility conditions
        elif action.startswith( "activatewindow(tv" ):
            return "PVR.HasTVChannels"
        elif action.startswith( "activatewindow(radio" ):
            return "PVR.HasRadioChannels"
            
        # Video node visibility
        elif action.startswith( "activatewindow(videos,videodb://" ) or action.startswith( "activatewindow(10025,videodb://" ) or action.startswith( "activatewindow(Videos,library://video/" ) or action.startswith( "activatewindow(10025,library://video/" ):
            path = action.split( "," )
            if path[ 1 ].endswith( ")" ):
                path[ 1 ] = path[ 1 ][:-1]
            return NODE.get_visibility( path[ 1 ] )
        # Audio node visibility
        elif action.startswith( "activatewindow(musiclibrary,musicdb://" ) or action.startswith( "activatewindow(10502,musicdb://" ) or action.startswith( "activatewindow(MusicLibrary,library://music/" ) or action.startswith( "activatewindow(10502,library://music/" ):
            path = action.split( "," )
            if path[ 1 ].endswith( ")" ):
                path[ 1 ] = path[ 1 ][:-1]
            return NODE.get_visibility( path[ 1 ] )
        
        return ""
    def checkVersionEquivalency( self, version, action, type = "shortcuts" ):
        # Check whether the version specified for a shortcut has an equivalency
        # to the version of Kodi we're running
        trees = [ self._get_overrides_skin(), self._get_overrides_script() ]
        # Set up so we can handle both groupings and shortcuts in one
        if type == "shortcuts":
            if action is None:
                action = ""
            else:
                action = action.text
            findElem = "shortcutEquivalent"
            findAttrib = "action"
        if type == "groupings":
            if action is None:
                action = ""
            findElem = "groupEquivalent"
            findAttrib = "condition"
        for tree in trees:
            if tree is None or tree.find( "versionEquivalency" ) is None:
                continue
            for elem in tree.find( "versionEquivalency" ).findall( findElem ):
                if elem.attrib.get( findAttrib ) is not None and elem.attrib.get( findAttrib ).lower() != action.lower():
                    # Action's don't match
                    continue
                if int( elem.attrib.get( "version" ) ) > int( __xbmcversion__ ):
                    # This version of Kodi is older than the shortcut is intended for
                    continue
                # The actions match, and the version isn't too old, so
                # now check it's not too new
                if elem.text == "All":
                    # This shortcut matches all newer versions
                    return True
                elif int( elem.text ) >= int( __xbmcversion__ ):
                    return True
                # The version didn't match
                break
        return False
        
    def checkAdditionalProperties( self, group, labelID, defaultID, isUserShortcuts, profileDir ):
        # Return any additional properties, including widgets and backgrounds
        allProperties = self._get_additionalproperties( profileDir )
        currentProperties = allProperties[1]
        
        returnProperties = []
        
        # This returns two lists...
        #  allProperties[0] = Saved properties
        #  allProperties[1] = Default properties
        
        if isUserShortcuts:
            currentProperties = allProperties[0]
            
        # Loop through the current properties, looking for the current item
        for currentProperty in currentProperties:
            # currentProperty[0] = Group name
            # currentProperty[1] = labelID
            # currentProperty[2] = Property name
            # currentProperty[3] = Property value
            # currentProperty[4] = defaultID
            if labelID is not None and currentProperty[0] == group and currentProperty[1] == labelID:
                returnProperties.append( [ currentProperty[2], currentProperty[3] ] )
            elif len( currentProperty ) is not 4:
                if defaultID is not None and currentProperty[0] == group and currentProperty[4] == defaultID:
                    returnProperties.append( [ currentProperty[2], currentProperty[3] ] )
                
        return returnProperties
            
        
    def checkShortcutLabelOverride( self, action ):
        tree = self._get_overrides_skin()
        if tree is not None:
            elemSearch = tree.findall( "availableshortcutlabel" )
            for elem in elemSearch:
                if elem.attrib.get( "action" ).lower() == action.lower():
                    # This matches :) Check if we're also overriding the type
                    if "type" in elem.attrib:
                        return [ elem.text, elem.attrib.get( "type" ) ]
                    else:
                        return [ elem.text ]
        return None
        
        
    def _save_hash( self, filename, file ):
        if file is not None:
            hasher = hashlib.md5()
            hasher.update( file )
            hashlist.list.append( [filename, hasher.hexdigest()] )
        else:
            hashlist.list.append( [filename, None] )
            
            
    # in-place prettyprint formatter
    def indent( self, elem, level=0 ):
        i = "\n" + level*"\t"
        if len(elem):
            if not elem.text or not elem.text.strip():
                elem.text = i + "\t"
            if not elem.tail or not elem.tail.strip():
                elem.tail = i
            for elem in elem:
                self.indent(elem, level+1)
            if not elem.tail or not elem.tail.strip():
                elem.tail = i
        else:
            if level and (not elem.tail or not elem.tail.strip()):
                elem.tail = i
                
                
    def local( self, data ):
        # This is our function to manage localisation
        # It accepts strings in one of the following formats:
        #   #####, ::LOCAL::#####, ::SCRIPT::#####
        #   $LOCALISE[#####], $SKIN[####|skin.id|last translation]
        #   $ADDON[script.skinshortcuts #####]
        # If returns a list containing:
        #   [Number/$SKIN, $LOCALIZE/$ADDON/Local string, Local string]
        #   [Used for saving, used for building xml, used for displaying in dialog]
        
        if data is None:
            return ["","","",""]
        
        data = try_decode( data )
        skinid = None
        lasttranslation = None
        
        # Get just the integer of the string, for the input forms where this is valid
        if not data.find( "::SCRIPT::" ) == -1:
            data = data[10:]
        elif not data.find( "::LOCAL::" ) == -1:            
            data = data[9:]
        elif not data.find( "$LOCALIZE[" ) == -1:
            data = data.replace( "$LOCALIZE[", "" ).replace( "]", "" ).replace( " ", "" )
        elif not data.find( "$ADDON[script.skinshortcuts" ) == -1:
            data = data.replace( "$ADDON[script.skinshortcuts", "" ).replace( "]", "" ).replace( " ", "" )
        
        # Get the integer and skin id, from $SKIN input forms
        elif not data.find( "$SKIN[" ) == -1:
            splitdata = data[6:-1].split( "|" )
            data = splitdata[0]
            skinid = splitdata[1]
            lasttranslation = splitdata[2]
            
        if data.isdigit():
            if int( data ) >= 31000 and int( data ) < 32000:
                # A number from a skin - we're going to return a $SKIN[#####|skin.id|last translation] unit
                if skinid is None:
                    # Set the skinid to the current skin id
                    skinid = xbmc.getSkinDir()
                    
                # If we're on the same skin as the skinid, get the latest translation
                if skinid == xbmc.getSkinDir():
                    lasttranslation = xbmc.getLocalizedString( int( data ) )
                    returnString = "$SKIN[" + data + "|" + skinid + "|" + lasttranslation + "]"
                    return [ returnString, "$LOCALIZE[" + data + "]", lasttranslation, data ]
                    
                returnString = "$SKIN[" + data + "|" + skinid + "|" + lasttranslation + "]"
                return [ returnString, lasttranslation, lasttranslation, data ]
                
            elif int( data ) >= 32000 and int( data ) < 33000:
                # A number from the script
                return [ data, "$ADDON[script.skinshortcuts " + data + "]", __language__( int( data ) ), data ]
                
            else:
                # A number from XBMC itself (probably)
                return [ data, "$LOCALIZE[" + data + "]", xbmc.getLocalizedString( int( data ) ), data ]
                
        # This isn't anything we can localize, just return it (in triplicate ;))
        return[ data, data, data, data ]
    def smart_truncate(string, max_length=0, word_boundaries=False, separator=' '):
        string = string.strip(separator)
        if not max_length:
            return string
        if len(string) < max_length:
            return string
        if not word_boundaries:
            return string[:max_length].strip(separator)
        if separator not in string:
            return string[:max_length]
        truncated = ''
        for word in string.split(separator):
            if word:
                next_len = len(truncated) + len(word) + len(separator)
                if next_len <= max_length:
                    truncated += '{0}{1}'.format(word, separator)
        if not truncated:
            truncated = string[:max_length]
        return truncated.strip(separator)
    def slugify(self, text, entities=True, decimal=True, hexadecimal=True, max_length=0, word_boundary=False, separator='-', convertInteger=False):
        # Handle integers
        if convertInteger and text.isdigit():
            text = "NUM-" + text
    
        # text to unicode
        if type(text) != types.UnicodeType:
            text = unicode(text, 'utf-8', 'ignore')
        # decode unicode ( ??? = Ying Shi Ma)
        text = unidecode(text)
        # text back to unicode
        if type(text) != types.UnicodeType:
            text = unicode(text, 'utf-8', 'ignore')
        # character entity reference
        if entities:
            text = CHAR_ENTITY_REXP.sub(lambda m: unichr(name2codepoint[m.group(1)]), text)
        # decimal character reference
        if decimal:
            try:
                text = DECIMAL_REXP.sub(lambda m: unichr(int(m.group(1))), text)
            except:
                pass
        # hexadecimal character reference
        if hexadecimal:
            try:
                text = HEX_REXP.sub(lambda m: unichr(int(m.group(1), 16)), text)
            except:
                pass
        # translate
        text = unicodedata.normalize('NFKD', text)
        if sys.version_info < (3,):
            text = text.encode('ascii', 'ignore')
        # replace unwanted characters
        text = REPLACE1_REXP.sub('', text.lower()) # replace ' with nothing instead with -
        text = REPLACE2_REXP.sub('-', text.lower())
        # remove redundant -
        text = REMOVE_REXP.sub('-', text).strip('-')
        # smart truncate if requested
        if max_length > 0:
            text = smart_truncate(text, max_length, word_boundary, '-')
        if separator != '-':
            text = text.replace('-', separator)
        return text
    # ----------------------------------------------------------------
    # --- Functions that should get their own module in the future ---
    # --- (when xml building functions are revamped/simplified) ------
    # ----------------------------------------------------------------
    def getListProperty( self, onclick ):
        # For ActivateWindow elements, extract the path property
        if onclick.startswith( "ActivateWindow" ):
            # An ActivateWindow - Let's start by removing the 'ActivateWindow(' and the ')'
            listProperty = onclick
            # Handle (the not uncommon) situation where the trailing ')' has been forgotten
            if onclick.endswith( ")" ):
                listProperty = onclick[ :-1 ]
            listProperty = listProperty.split( "(", 1 )[ 1 ]
            # Split what we've got left on commas
            listProperty = listProperty.split( "," )
            # Get the part of the onclick that we're actually interested in
            if len( listProperty ) == 1:
                # 'elementWeWant'
                return listProperty[ 0 ]
            elif len( listProperty ) == 2 and listProperty[ 1 ].lower().replace( " ", "" ) == "return":
                # 'elementWeWant' 'return'
                return listProperty[ 0 ]
            elif len( listProperty ) == 2:
                # 'windowToActivate' 'elementWeWant'
                return listProperty[ 1 ]
            elif len( listProperty ) == 3:
                # 'windowToActivate' 'elementWeWant' 'return'
                return listProperty[ 1 ]
            else:
                # Situation we haven't anticipated - log the issue and return original onclick
                log( "Unable to get 'list' property for shortcut %s" %( onclick ) )
                return onclick
        else:
            # Not an 'ActivateWindow' - return the onclick
            return onclick
 | 
	gpl-2.0 | 1,424,236,759,551,068,200 | 47.631683 | 268 | 0.522334 | false | 
| 
	ecuvelier/PPAT | 
	mathTools/field.py | 
	1 | 
	34962 | 
	# -*- coding: utf-8 -*-
"""
Created on 2013-2014
Author : Edouard Cuvelier
Affiliation : Université catholique de Louvain - ICTEAM - UCL Crypto Group
Address : Place du Levant 3, 1348 Louvain-la-Neuve, BELGIUM
email : [email protected]
"""
from numpy import *
import gmpy
from Crypto.Random.random import randint
import random as rd
import tools.fingexp as fingexp
import tools.utils as utils
class Field(fingexp.FingExp):
    'Class for Field'
    def __init__(self,p):
        '''Defines the modulus p which must be a prime
        '''
        self.F = self
        self.p = gmpy.mpz(p) # prime modulus
        self.char = self.p # characteristic
        self.q = self.p+1 # order+1 #TODO : correct?
        assert gmpy.is_prime(p)
        self.rep = None
        self.g = None
        '''
        g is a random quadratic residue used to compute square roots and it is
        initialized the first time a square root is computed
        '''
        self.to_fingerprint = ["p"]
        self.to_export = {"fingerprint": [],"value": ["p"]}
        super(Field, self).__init__()
    def load(self, data, fingerprints):
        self.p = utils.b64tompz(data["p"])
    def one(self):
        'unit element for multiplication'
        return FieldElem(1, self)
    def zero(self):
        'unit element for addition'
        return FieldElem(0,self)
    def elem(self,x):
        ''' return an element of value x
        '''
        if isinstance(x,FieldElem):
            assert x.F == self
            return x
        m = gmpy.mpz(1)
        assert isinstance(x,int) or isinstance(x, long) or type(x)==type(m)
        return FieldElem(x,self)
    def random(self,low=1,high=None):
        ''' Return a random element of the Field
        '''
        if high == None :
            high = int(self.p-1)
        rand = randint(low,high)
        return self.elem(rand)
    def __eq__(self, other):
        'testing if we are working in the same field'
        try:
            return (self.p == other.p)
        except:
            return False
    def add(self, a, b):
        '''
        field operation: addition mod p
        '''
        return FieldElem((a.val + b.val) % self.p, self)
    def sub(self, a, b):
        '''
        field operation: substraction mod p
        '''
        return FieldElem((a.val - b.val) % self.p, self)
    def neg(self, a):
        '''
        field operation: opposite mod p
        '''
        return FieldElem((self.p - a.val ) % self.p, self)
    def mul(self, a, b):
        '''
        field operation: multiplication of field elements
        '''
        """
        if isinstance(a,FieldElem) and isinstance(b, FieldElem) and not a.F == b.F :
            raise Exception("multiplication between elements of different fields")
        """
        if not isinstance(b,FieldElem) :
            # Multiplication by a scalar
            if b<0:
                return self.smul(-a,-b)
            return self.smul(a,b)
        else:
            return self.pmul(a,b)
    def smul(self,a,b):
        ''' Return a*b where a or b is scalar
        '''
        if not isinstance(b,FieldElem):
            # b is scalar
            #return self.dbleAndAdd(a,a,b)
            return FieldElem((gmpy.mpz(b)*a.val)%(self.p),self)
            #return self.pmul(a,a.F.elem(b))
        else :
            # a is scalar
            #return self.dbleAndAdd(b,b,a)
            return self.smul(b,a)
    def sm(self,b,a):
        ''' Quick multiplication between a field element a and a scalar b
        '''
        return FieldElem((gmpy.mpz(b)*a.val)%(self.p),self)
    def pmul(self,a,b):
        ''' product between two field element in Fp
        '''
        return FieldElem((a.val * b.val) % self.p, self)
    def dbleAndAdd(self,P,Pp,n):
        'return n*P using double and add technique'
        #print "dblaad"
        if n == 0 :
            return self.zero();
        if n == 1 :
            return P
        elif n%2 == 1 :
            Q = self.dbleAndAdd(P,Pp,(n-1)/2)
            return P+Q+Q
        elif n%2 == 0 :
            Q = self.dbleAndAdd(P,Pp,n/2)
            return Q+Q
    def powop(self, a, b):
        'return a**b'
        m = gmpy.mpz(1)
        #self.count = 0
        'exponentiation by a scalar'
        if not isinstance(b, int) and not isinstance(b, long) and not type(b)==type(m):
            raise Exception("Exponentation by a non integer, long or mpz")
        c = b
        if c > self.char-1 or c<0:
            c = b%(self.char-1)
        #elif :
        #    return self.powop(a.invert(),(-c))
        if c == 0 :
            assert not a.val%self.char == 0
            return self.one()
        elif c == 1 :
            return a
        else :
            return self.sqrtAndMultply(a,a, c)
            #return FieldElem(pow(a.val,b,self.char))
    def sqrtAndMultply(self,P,Pp,n):
        'return P**n using square and multiply technique'
        if n == 0 :
            return self.one()
        elif n == 1 :
            return P
        elif n%2 == 1 :
            Q = self.sqrtAndMultply(P,Pp,(n-1)/2)
            return P*self.square(Q)
        elif n%2 == 0 :
            Q = self.sqrtAndMultply(P,Pp,n/2)
            return self.square(Q)
    def square(self,a):
        '''
        This method returns the square of a
        '''
        return FieldElem(pow(a.val,2, self.p), self)
    def invert(self,a):
        assert not (a.val%self.p == 0) # Do not invert zero!
        return FieldElem(gmpy.invert(a.val, self.p), self)
    #def invertible(self,a):
        #return not int(a.invert().val) == 0
    def div(self,a,b):
        assert not (b.val%self.p == 0) # Do not invert zero!
        return FieldElem((a.val*self.invert(b).val % self.p),self)
    def findnonresidue(self):
        '''
        find a random non quadratic residue in the Field F,
        that is, find g that is not a square in F, this is
        needed to compute square roots
        '''
        g=self.random()
        while g.isquadres():
            #print g, " is quad res in ", self
            g = self.random()
        return g
    def __str__(self):
        return "F_"+str(self.p)
    def jsonable(self):
        return {'type': 'FqField', 'p': self.p}
class FieldElem():
    def __init__(self, val, F):
        '''Creating a new field element.
        '''
        #assert isinstance(F,Field)
        self.F = F
        self.val = gmpy.mpz(val)
        self.poly = polynom(self.F,[self])
        #self.to_fingerprint = ["F", "val"]
        #self.to_export = {"fingerprint": ["F"],
        #                  "value": ["val"]}
        #super(FieldElem, self).__init__()
    def __eq__(self, other):
        try:
            return ((self.val%self.F.char) == (other.val%self.F.char) and self.F == other.F)
        except:
            return False
    def __add__(self, other):
        return self.F.add(self, other)
    def __neg__(self):
        return self.F.neg(self)
    def __sub__(self, other):
        return self.F.sub(self, other)
    def __radd__(self, other):
        return self.__add__(other)
    def __mul__(self, other):
        return self.F.mul(self, other)
    def __rmul__(self, other):
        return self.__mul__(other)
    def __pow__(self, e):
        return self.F.powop(self, e)
    def __div__(self,other):
        return self.F.div(self,other)
    def __truediv__(self,other):
        return self.F.div(self,other)
    def __str__(self):
        return str(self.val)
    def iszero(self):
        return self == self.F.zero()
    def invert(self):
        return self.F.invert(self)
    def invertible(self):
        return self.F.invertible(self)
    def isquadres(self):
        ''' This method return True if the element is a quadratic residue mod q
            different than zero
            it returns False otherwhise
        '''
        if (self+self.F.zero()).iszero() :
            # case of element is zero
            return False
        else :
            # If F's order is prime we use Euler's criterium
            c = self**((self.F.q-1)/2) #TODO: Optimize this
            return c==self.F.one()
    def squareroot(self):
        ''' This method returns the positive square root of
            an element of the field
            using the Tonelli-Shanks algorithm
            Carefull : if the element has no square root, the method does not
            check this case and raises an error. Verification has to be done
            before calling the method.
        '''
        g = self.F.g
        if g == None :
            g = self.F.findnonresidue()
            self.F.g = g
        q = self.F.q
        s=0
        t=self.F.q-1
        while t%2==0:
            s=s+1
            t=t/2
        # q-1 = (2**s)*t
        e = 0
        for i in range(2,s+1):
            b = 2**(i-1)
            b1 = b*2   # b1 = 2**i
            c = ((self)*(g**(-e)))**((q-1)/b1)
            if not c==self.F.one() :
                e = e+b
        h = self*(g**(-e))
        b = (g**(e/2))*(h**((t+1)/2))
        assert b**2 == self # FAILURE to find square root
        return b
    def fingerprint(self):
        return fingexp.fingerprint(self.val)
    def jsonable(self):
        return {'type': 'FieldElem', 'F': self.F, 'val': self.val}
class ExtensionField(Field):
    '''
    This class defines extension fields and inherits field methods.
    Depending on the degree of the extension field, we use
    different algorithms to optimize the operations
    '''
    def __init__(self,F,irpoly,g=None,rep=None):
        '''Define the base Field or extension Field and the irreducible polynomial
           F is the base field on top of which the extension
        field is built
           irpoly is the irreducible polynomial used to build
        the extension field as F/irpoly
           g is a non quadratic residue used to compute square
        roots, if it is set to None, computing a square root
        will initialize g
           rep is the representation of the root of irpoly
        (note that letter 'A' is reserved for the Complex extension field)
        '''
        self.F = F
        self.irpoly = irpoly
        self.deg = len(irpoly.coef) # degree of the irreducible polynomial + 1
        assert self.deg > 0
        self.q = self.F.q**(self.deg-1) # order of the Field
        self.tabular = self.table()
        if rep == None :
            self.rep = rd.choice(['B','C','D','E','F','G','H','J','K','L'])
            #Choose a random representation letter
        else :
            self.rep = rep
        self.char = F.char
        self.primefield = gmpy.is_prime(self.char)
        self.g = g # g is needed to compute square roots, it is a non quadratic residue
        self.to_fingerprint = ["F","irpoly"]
        self.to_export = {"fingerprint": [],"value": ["F","irpoly"]}
    def one(self):
        'unit element for multiplication'
        One = [self.F.zero()]*(self.deg-1)
        One[self.deg-2]= self.F.one()
        return ExtensionFieldElem(self,polynom(self.F,One))
    def zero(self):
        'unit element for addition'
        Zero = [self.F.zero()]*(self.deg-1)
        return ExtensionFieldElem(self,polynom(self.F,Zero))
    def unit(self):
        ''' root of the irreducible polynomial
        e.g. return element 1*A+0 (or the complex value i) if the irpoly is X**2+1
        '''
        I = self.zero()
        I.poly.coef[-2]=self.F.one()
        return I
    def elem(self,x):
        ''' Provided that x belongs to F, return an element of the extension field
            of value x
        '''
        P = self.zero()
        P.poly.coef[-1] = x
        return P
    def random(self):
        ''' Return a random element of the Extension Field
        '''
        polycoef = [0]*(self.deg-1)
        for i in range(self.deg-1):
            polycoef[i] = self.F.random()
        poly = polynom(self.F,polycoef)
        return ExtensionFieldElem(self,poly)
    def __eq__(self, other):
        'testing if we are working in the same extension field'
        try:
            return (self.F == other.F and self.irpoly == other.irpoly)
        except:
            return False
    def add(self, a, b):
        '''
        field operation: addition of polynomial > addition of coefficients in the appropriate field
        '''
        #assert a.F == b.F  and a.F.F == self.F
        if not a.deg == b.deg :
            a = self.reduc(a)
            b = self.reduc(b)
        polysum = [0]*a.deg
        for i in range(a.deg):
            polysum[i]=a.poly.coef[i]+b.poly.coef[i]
        P = polynom(self.F,polysum)
        return ExtensionFieldElem(self,P)
    def sub(self, a, b):
        '''
        field operation: substraction of polynomials > substraction of each coefficient in the appropriate field
        '''
        #assert a.F == b.F and a.F.F == self.F
        if not a.deg == b.deg :
            a = self.reduc(a)
            b = self.reduc(b)
        c = self.neg(b)
        return self.add(a,c)
    def neg(self, a):
        '''
        field operation: opposite of a polynomial > opposite of each coefficient in appropriate field
        '''
        #assert a.F.F == self.F
        ap = [0]*a.deg
        for i in range(a.deg):
            ap[i] = -a.poly.coef[i]
        P = polynom(self.F,ap)
        return ExtensionFieldElem(self,P)
    def smul(self,a,b):
        ''' Return a*b where a or b is scalar
        '''
        if not isinstance(b,FieldElem):
            # b is scalar
            A = a.poly.coef
            Pc = [0]*len(A)
            for i in range(len(Pc)):
                Pc[i] = A[i]*gmpy.mpz(b)
            return ExtensionFieldElem(self,polynom(self.F,Pc))
        else :
            # a is scalar
            return self.smul(b,a)
    def pmul(self,a,b):
        '''Multiplication between polynomials
        '''
        #assert a.F == b.F and a.F.F == self.F
        if not a.deg == b.deg :
            a = self.reduc(a)
            b = self.reduc(b)
        # Simpler notations for reading
        A = a.poly.coef
        B = b.poly.coef
        k = self.deg-1 # degree of the externsion field
        if k == 2 and self.F.rep =='A':
            # We are in the case that the extension field is Fp2
            # We assume here that the irreductible polynom is X**2+1 (beta=-1)
            # Complex multiplication
            a0,a1,b0,b1 = A[0].val,A[1].val,B[0].val,B[1].val
            p = self.char
            v0 = a0*b0
            v1 = a1*b1
            c0 = ((a0+a1)*(b0+b1)-v0-v1)%p
            c1 = (v1-v0)%p
            c0e = FieldElem(c0,self.F)
            c1e = FieldElem(c1,self.F)
            cp = polynom(self.F,[c0e,c1e])
            C = ExtensionFieldElem(self,cp)
            return C
        elif k == 2:
            # In this case, use Karatsuba multiplication algorithm
            # notations
            a0 = A[0]
            a1 = A[1]
            b0 = B[0]
            b1 = B[1]
            beta = -self.irpoly.coef[-1]
            v0 = self.F.pmul(a0,b0)
            v1 = self.F.pmul(a1,b1)
            c0 = self.F.pmul((a0+a1),(b0+b1))-v0-v1 # coefficient of X
            c1 = v1 + self.F.pmul(v0,beta) # independant term
            cp = polynom(self.F,[c0,c1])
            C = ExtensionFieldElem(self,cp)
            return C
        elif k == 3:
            # In this case, use Karatsuba multiplication algorithm
            # notations
            a0,a1,a2 = A
            b0,b1,b2 = B
            beta = -self.irpoly.coef[-1]
            v0,v1,v2 = self.F.pmul(a0,b0), self.F.pmul(a1,b1), self.F.pmul(a2,b2)
            c0 = self.F.pmul((a0+a2),(b0+b2))-v0+v1-v2  # coefficient of X**2
            c1 = self.F.pmul((a2+a1),(b2+b1))-v2-v1+self.F.pmul(beta,v0) # coefficient of X
            c2 = v2+self.F.pmul(beta,(self.F.pmul((a1+a0),(b1+b0))-v1-v0)) # independant term
            cp = polynom(self.F,[c0,c1,c2])
            C = ExtensionFieldElem(self,cp)
            return C
        else :
           prod = convolve(A,B)
           return self.reduc2(prod) # return EProd % ired. polynomial
    def square(self,a):
        ''' This algortihm returns the square of a in the field
            using different methods if the degree of the extension
            is 2,3 or more
        '''
        #print a.F
        #print self
        assert a.F == self
        if not a.deg == self.deg-1 :
            a = self.reduc(a)
        #notations
        A = a.poly.coef
        k = self.deg-1 # degree of the extension
        if k == 2 and self.F.rep == 'A':
            # Using the complex multiplication
            # We are in the case that the extension field is Fp2
            # We assume here that the irreductible polynom is X**2+1 (beta=-1)
            a1, a0 = A[0].val,A[1].val
            p = self.char
            v0 = a0*a1
            c0 = ((a0+a1)*(a0-a1))%p
            c1 = (v0+v0)%p
            c0e = FieldElem(c0,self.F)
            c1e = FieldElem(c1,self.F)
            cp = polynom(self.F,[c1e,c0e])
            C = ExtensionFieldElem(self,cp)
            return C
        elif k == 2:
            # Using the complex multiplication
            a1, a0 = A
            beta = -self.irpoly.coef[-1]
            v0 = self.F.pmul(a0,a1)
            c0 = self.F.pmul((a0+a1),(a0+self.F.pmul(a1,beta)))-v0-self.F.pmul(beta,v0)
            c1 = v0+v0
            cp = polynom(self.F,[c1,c0])
            return ExtensionFieldElem(self,cp)
        elif k == 3:
            # Using Chung-Hasan Squaring2
            a2,a1,a0 = A
            #print a0
            #print 'a0',a0.F, a0.F.deg-1
            #print 'self',self.F, self.F.deg-1
            assert a0.F == self.F
            beta = -self.irpoly.coef[-1]
            s0 = self.F.square(a0)
            t1 = self.F.pmul(a0,a1)
            s1 = t1+t1
            s2 = self.F.square((a0-a1+a2))
            t3 = a1*a2
            s3 = t3+t3
            s4 = self.F.square(a2)
            c0 = s0 + self.F.pmul(beta,s3)
            c1 = s1 + self.F.pmul(beta,s4)
            c2 = s1 + s2 + s3 - s0 -s4
            cp = polynom(self.F,[c2,c1,c0])
            return ExtensionFieldElem(self,cp)
        else :
            return self.F.pmul(a,a)
    def invert(self,a):
        ''' Ths method returns the inverse of a in the field
            The inverse is computed by determining the Bezout coefficient using the
            extended Euclide's algorithm or by specialized algorithms depending
            on the degree of the extension (2 or 3)
        '''
        #assert self.invertible(a) #The element must be invertible
        assert a.F == self
        k = self.deg-1
        if k == 2 and self.F.rep == 'A':
            # inversion in a field of characteristic 2 over prime field
            # We are in the case that the extension field is Fp2
            # We assume here that the irreductible polynom is X**2+1 (mod=-1)
            A = a.poly.coef
            a1,a0 = A[0].val,A[1].val # a = a0+a1*i
            p = self.char
            norm = a0*a0+a1*a1
            invnorm = gmpy.invert(norm,p)
            c0 = (a0*invnorm) % p
            c1 = (-a1*invnorm) % p
            c0e = FieldElem(c0,self.F)
            c1e = FieldElem(c1,self.F)
            invap = polynom(self.F,[c1e,c0e])
            inva = ExtensionFieldElem(self,invap)
            return inva
        elif k == 2 :
            # inversion in a field of characteristic 2 over prime field
            A = a.poly.coef
            a1,a0 = A[0],A[1] # a = a0+a1*i
            #print 'A',A
            #print 'a1',a1
            mod = self.irpoly.coef[-1] # i**2 = -mod
            #a1b,a0b,modb = self.F.elem(a1), self.F.elem(a0),self.F.elem(mod)
            #print 'a1b',a1b
            #a1b2 = self.F.square(a1b)
            a12 = self.F.square(a1)
            #mid = self.F.pmul(a1b2,modb)
            mid = self.F.pmul(a12,mod)
            #norm = self.F.square(a0b)+mid
            norm = self.F.square(a0)+mid
            #invnorm = self.F.invert(a0**2+mod*a1**2)
            #invnorm = self.F.invert(norm.poly.coef[-1])
            invnorm = self.F.invert(norm)
            c = self.F.pmul(a0,invnorm) # c = -a1/(a0**2+mod*a1**2)
            d = -self.F.pmul(a1,invnorm)
            invap = polynom(self.F,[d,c])
            inva = ExtensionFieldElem(self,invap)
            return inva
        elif k == 3 :
            # inversion in char. 3 field
            A = a.poly.coef
            a2,a1,a0 = A[0],A[1],A[2]
            mod = -self.irpoly.coef[-1]
            z0 = self.F.zero()
            z1 = self.F.one()
            if a0 == z0:
                #a0 = 0
                if a1 == z0:
                    #a1 = 0
                    c0,c1,c2 = z0, self.F.invert(self.F.pmul(a2,mod)), z0
                elif a2 == z0:
                    #a2 = 0
                    c0,c1,c2 = z0,z0,self.F.invert(self.F.pmul(a1,mod))
                else :
                    #a1,a2 != 0
                    a22 = self.F.square(a2)
                    a12 = self.F.square(a1)
                    c2 = self.F.pmul(a12,self.F.invert((self.F.pmul(self.F.pmul(a22,a2),mod)+self.F.pmul(self.F.pmul(a12,a1),mod))))
                    c1 = self.F.pmul((z1-self.F.pmul(self.F.pmul(a1,c2),mod)),self.F.invert(self.F.pmul(a2,mod)))
                    c0 = self.F.pmul((-(self.F.pmul(self.F.pmul(a2,mod),c2))),self.F.invert(a1))
            else :
                #a0 != 0
                if a1 == z0 and a2 == z0:
                    #a1 = 0 , a2 = 0
                    c0,c1,c2 = self.F.invert(a0),z0,z0
                else :
                    a12 = self.F.pmul(a1,a2)
                    a12m = self.F.pmul(a12,mod)
                    a00 = self.F.square(a0)
                    abis = a00-a12m
                    if abis == z0:
                        #a0**2-(a1*a2*mod) = 0
                        a11 = self.F.square(a1)
                        a22 = self.F.square(a2)
                        a02 = self.F.pmul(a0,a2)
                        a01 = self.F.pmul(a0,a1)
                        c2 = self.F.pmul(-a,self.F.invert(self.F.pmul((a02-a11),mod)))
                        c1 = self.F.pmul(-a2,self.F.invert(a01-self.F.pmul(a22,mod)))
                        a1c2 = self.F.pmul(a1,c2)
                        a2c1 = self.F.pmul(a2,c1)
                        c0 = self.F.pmul((z1-self.F.pmul(a1c2+a2c1,mod)),self.F.invert(a0))
                    else :
                        #a0**2-(a1*a2*mod) != 0
                        if a1 == z0:
                            #a1 = 0
                            inva0 = self.F.invert(a0)
                            a02 = self.F.pmul(a0,a2)
                            a000 = self.F.pmul(a00,a0)
                            a22 = self.F.square(a2)
                            a222 = self.F.pmul(a22,a2)
                            mm = self.F.square(mod)
                            a222mm = self.F.pmul(a222,mm)
                            c2 = self.F.pmul(-a02,self.F.invert(a000+a222mm))
                            a02m = self.F.pmul(a02,mod)
                            a02mc2 = self.F.pmul(a02m,c2)
                            inva00 = self.F.square(inva0)
                            c1 = self.F.pmul(-a02mc2,inva00)
                            a2m = self.F.pmul(a2,mod)
                            a2mc1 = self.F.pmul(a2m,c1)
                            c0 = self.F.pmul(z1-a2mc1,inva0)
                        elif a2 == z0:
                            #a2 = 0
                            a11 = self.F.square(a1)
                            a111 = self.F.pmul(a11,a1)
                            a000 = self.F.pmul(a00,a0)
                            a111m = self.F.pmul(a111,mod)
                            inva0 = self.F.invert(a0)
                            c2 = self.F.pmul(a11,self.F.invert(a111m+a000))
                            a11m = self.F.pmul(a11,mod)
                            a11mc2 = self.F.pmul(a11m,c2)
                            inva00 = self.F.square(inva0)
                            c1 = self.F.pmul(a11mc2-a1,inva00)
                            a1m = self.F.pmul(a1,mod)
                            a1mc2 = self.F.pmul(a1m,c2)
                            c0 = self.F.pmul(z1-a1mc2,inva0)
                        else :
                            #a1,a2 != 0
                            a01 = self.F.pmul(a0,a1)
                            a22 = self.F.square(a2)
                            a22m = self.F.pmul(a22,mod)
                            a02 = self.F.pmul(a0,a2)
                            a11 = self.F.square(a1)
                            abus = a01-a22m
                            abos = self.F.pmul(a02-a11,mod)
                            invabis = self.F.invert(abis)
                            abb = self.F.pmul(abus,invabis)
                            abb1 = self.F.pmul(abb,a1)
                            abbbos = self.F.pmul(abb,abos)
                            c2 = self.F.pmul(abb1-a2,self.F.invert(abis-abbbos))
                            abosc2 = self.F.pmul(abos,c2)
                            c1 = self.F.pmul(-a1-abosc2,invabis)
                            a1c2 = self.F.pmul(a1,c2)
                            a2c1 = self.F.pmul(a2,c1)
                            c0 = self.F.pmul(z1-self.F.pmul(a1c2+a2c1,mod),self.F.invert(a0))
            invap = polynom(self.F,[c2,c1,c0])
            inva = ExtensionFieldElem(self,invap)
            return inva
        else :
            # inversion in a field of char. != 2,3
            # this inversion takes a longer time (than previous method)
            # it uses extended Euclid's algorithm
            P = ExtensionFieldElem(self,self.irpoly)
            r,u,v = self.extendedeuclide(P,a)
            n,d = r.poly.truedeg()
            assert n == self.deg-2
            c = r.poly.coef[len(r.poly.coef)-1].invert()
            cp = polynom(self.F,[c])
            ce = ExtensionFieldElem(self,cp)
            return ce*v
    def invertible(self,a):
        ''' Return True if a is invertible
        '''
        return not self.reduc(a)==self.zero()
    def div(self,a,b):
        return a*self.invert(b)
    def eucldiv(self,a,b):
        ''' Return a/b and a%b
            a and b are of length d-1 where d is the degree of the irreducible polynomial
        '''
        zero = self.F.zero()
        izero = self.zero()
        d = self.deg
        assert not b.poly.iszero() # Do not divide by zero
        if a.poly.iszero() :
            return izero, izero # quotient is zero, remain is zero
        elif a == b:
            return self.one(), izero # quotient is one, remain is zero
        #Notations
        A = a.poly.coef
        B = b.poly.coef
        n, da = a.poly.truedeg() # position of first non zero elem of a and degree of a
        m, db = b.poly.truedeg() # same for b
        if da<db :
            #  deg(a)<deg(b)
            return izero, a # quotient is zero, remain is a
        elif da==db:
            #deg(a)=deg(b)
            deg = max(d-1,da)
            rc = [zero]*(deg)
            qc = [zero]*(deg)
            q = A[n]/B[m]
            for i in range(1,deg):
                rc[i] = A[n+i]-q*B[m+i]
            qc[deg-1] = q
            rp = polynom(self.F,rc)
            qp = polynom(self.F,qc)
            remain = ExtensionFieldElem(self,rp)
            quotient = ExtensionFieldElem(self,qp)
            return quotient, remain
        else :
            # deg(a)>deg(b)
            deg = max(d-1,da)
            p = deg - da
            rc = [zero]*(deg)
            qc = [zero]*(deg)
            rc[deg-da:] = A[n:]
            pm=0
            while p+pm+db<deg+1:
                #k is the position of the index of the quotient
                k = deg-(da-db)-1+pm
                qc[k] = rc[p+pm]/B[m]
                for i in range(db):
                    rc[i+p+pm] = rc[i+p+pm]- qc[k]*B[m+i]
                pm=pm+1
            rp = polynom(self.F,rc)
            qp = polynom(self.F,qc)
            remain = ExtensionFieldElem(self,rp)
            quotient = ExtensionFieldElem(self,qp)
            return quotient, remain
    def reduc(self,a):
        ''' Return a % self.irpoly
        The polynomial a = [a_0,...,a_n-1] is returned modulo the irreducible polynomial
        The reduced polynomial has length at most d-1 where d is the length
        of the irreducible polynomial
        '''
        assert a.F.F == self.F
        if a.poly.iszero() :
            return self.zero()
        elif a.poly == self.irpoly :
            return self.zero()
        elif a.deg < self.deg :
            c = [self.F.zero()]*(self.deg-1-a.deg)
            newacoef = c+a.poly.coef
            newapoly= polynom(self.F, newacoef)
            newaelem = ExtensionFieldElem(self, newapoly)
            return newaelem
        else :
            # Case where a is not zero or the irreducible polynomial and deg(a)>=deg(irpoly)
            q,r = self.eucldiv(a,ExtensionFieldElem(self,self.irpoly))
            r = self.trunc(r)
            return self.reduc(r)
    def reduc2(self,a):
        ''' a is a list of length (d-1)*2-1 (polynomial length)
            this method returns the equivalent element of length d-1
            using the table of equivalences (build from the irreducible polynomial)
            in the function self.table()
        '''
        As = a[:(self.deg-2)]
        Ad = a[(self.deg-2):]
        b = list(dot(As,self.tabular)+Ad)
        newapoly = polynom(self.F,b)
        newa = ExtensionFieldElem(self,newapoly)
        return newa
    def trunc(self,a):
        '''Return an ExtensionFieldElem of length d-1 where d = deg(irpoly)
        '''
        d = self.deg
        if a.deg == d-1:
            return a
        c = a.poly.coef[a.deg-d+1:] # the (d-1) last elements of a
        cp = polynom(self.F,c)
        return ExtensionFieldElem(self,cp)
    def table(self):
        ''' This method returns a table (usually) stored in self.tabular
           which is used to compute reduction after a multiplication
           between two elements
        '''
        d = self.deg
        T = zeros((d-2,d-1),dtype=object_)
        Pc = self.irpoly.coef[1:]
        for i in range(0,d-2):
           Qc = [self.F.zero()]*(2*(d-1)-1)
           Qc[i+1:i+d] = Pc
           Qp = polynom(self.F,Qc)
           Qe = ExtensionFieldElem(self,Qp)
           Q = self.reduc(-Qe)
           T[i] = array(Q.poly.coef)
        return T
    def extendedeuclide(self,a,b):
        '''Return s,u,v such as s = ua + vb, s is the gcd of a and b
        This method is used to compute the inverse of a mod b (when s=1)
        '''
        #init
        one = self.one()
        zero = self.zero()
        s = a
        u = one
        v = zero
        sp = b
        up = zero
        vp =  one
        #loop : invariants are s = ua+vb and sp = up*a+vp*b
        while not sp.poly.iszero() :
            q,r = self.eucldiv(s,sp)
            s,u,v,sp,up,vp = sp, up, vp, r, u-up*q,v-vp*q
        return self.reduc(s),self.reduc(u),self.reduc(v)
    def __str__(self):
        return str(self.F)+"/"+str(self.irpoly)
    def jsonable(self):
        return {'type': 'Field Extension', 'F': self.F, 'irpoly': self.irpoly, 'degree':self.deg-1}
class ExtensionFieldElem(FieldElem):
    def __init__(self,F,poly):
        '''Define the Extension Field and the representative polynomial
        '''
        self.F = F
        self.poly = poly
        self.siz = len(poly.coef)
        self.deg = self.siz
    def __str__(self):
        x = self.F.rep
        p = self.poly
        s = '('
        if self.siz == 1 :
            s = s+str(p.coef[0])
        if self.siz == 2 :
            s = s+str(p.coef[0])+'*'+x+' + '+str(p.coef[1])
        if self.siz > 2 :
            s =s+str(p.coef[0])+'*'+x+'**'+str(self.siz-1)
            for i in range(1,self.siz-2):
                s = s+' + '+str(p.coef[i])+'*'+x+'**'+str(self.siz-1-i)
            s = s+' + '+str(p.coef[self.siz-2])+'*'+x +' + '+str(p.coef[self.siz-1])
        return s+')'
    def __eq__(self,other):
        try:
            return self.F == other.F and self.poly == other.poly
        except:
            return False
    def fingerprint(self):
        return self.poly.fingerprint()
    def jsonable(self):
        return {'type': 'ExtensionFieldElem', 'F': self.F, 'poly': self.poly, 'size': self.siz}
class polynom:
    ''' This class represents a polynomial written P = c_nX**n+...c_1X+c_0
        c_0,...,c_n are in the Field F (which can be an ExtensionField) so they are either FieldElem or ExtensionFieldElem
        coef is a list : coef = [c_n,...,c_0] of length n+1
    '''
    def __init__(self,F,coef):
        self.F = F # The field in which coeficients belong
        if isinstance(coef,list):
            self.coef = coef # A list of coeficient in decreasing order (by convention) of the polynomial's degree
            self.deg = len(coef) # The degree+1 of the polynomial
        else :
            #coef is not a list but a single element
            self.coef = [coef]
            self.deg = 1
    def __eq__(self,other):
        try:
            return (self.F == other.F and self.coef == other.coef)
        except:
            return False
    def __str__(self):
        # Not consistent with representation letter of the fields
        x = self.F.rep
        if x == None:
            x = 'X'
        s = '('
        if self.deg == 1 :
            s = s+str(self.coef[0])
        if self.deg == 2 :
            s = s+str(self.coef[0])+'*'+x+' + '+str(self.coef[1])
        if self.deg > 2 :
            s =s+str(self.coef[0])+'*'+x+'**'+str(self.deg-1)
            for i in range(1,self.deg-2):
                s = s+' + '+str(self.coef[i])+'*'+x+'**'+str(self.deg-1-i)
            s = s+' + '+str(self.coef[self.deg-2])+'*'+x +' + '+str(self.coef[self.deg-1])
        return s+')'
    def fingerprint(self):
        L = []
        for c in self.coef:
            L.append(c.fingerprint())
        return fingexp.fingerprint(L)
    def iszero(self):
        '''Return True if it is a zero polynomial (each coefficient is zero)
           This does not return True if the polynomial is the polynomial that generates the extension field
        '''
        cond = True
        for i in self.coef:
            pcond = i.iszero()
            cond = pcond*cond
        return cond
    def truedeg(self):
        '''Return the position of the first non zero coefficient and the actual degree of the polynomial
        '''
        if self.iszero():
            return 0,0
        n = 0
        while self.coef[n]==self.F.zero():
            n = n+1
        # n  is the position of the first non zero coeff of the polynomial
        return n, self.deg-n  # position and actual degree of the polynomial
    def jsonable(self):
        return {'type': 'polynomial', 'F': self.F, 'coeficients': self.coef, 'degree': self.deg}
 | 
	apache-2.0 | 8,680,854,202,601,265,000 | 32.616346 | 132 | 0.490461 | false | 
| 
	blueboxgroup/keystone | 
	keystone/tests/test_v3_endpoint_policy.py | 
	2 | 
	10121 | 
	# Copyright 2014 IBM Corp.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.
from testtools import matchers
from keystone.tests import test_v3
class TestExtensionCase(test_v3.RestfulTestCase):
    EXTENSION_NAME = 'endpoint_policy'
    EXTENSION_TO_ADD = 'endpoint_policy_extension'
class EndpointPolicyTestCase(TestExtensionCase):
    """Test endpoint policy CRUD.
    In general, the controller layer of the endpoint policy extension is really
    just marshalling the data around the underlying manager calls. Given that
    the manager layer is tested in depth by the backend tests, the tests we
    execute here concentrate on ensuring we are correctly passing and
    presenting the data.
    """
    def setUp(self):
        super(EndpointPolicyTestCase, self).setUp()
        self.policy = self.new_policy_ref()
        self.policy_api.create_policy(self.policy['id'], self.policy)
        self.service = self.new_service_ref()
        self.catalog_api.create_service(self.service['id'], self.service)
        self.endpoint = self.new_endpoint_ref(self.service['id'], enabled=True)
        self.catalog_api.create_endpoint(self.endpoint['id'], self.endpoint)
        self.region = self.new_region_ref()
        self.catalog_api.create_region(self.region)
    def assert_head_and_get_return_same_response(self, url, expected_status):
        self.get(url, expected_status=expected_status)
        self.head(url, expected_status=expected_status)
    # endpoint policy crud tests
    def _crud_test(self, url):
        # Test when the resource does not exist also ensures
        # that there is not a false negative after creation.
        self.assert_head_and_get_return_same_response(url, expected_status=404)
        self.put(url, expected_status=204)
        # test that the new resource is accessible.
        self.assert_head_and_get_return_same_response(url, expected_status=204)
        self.delete(url, expected_status=204)
        # test that the deleted resource is no longer accessible
        self.assert_head_and_get_return_same_response(url, expected_status=404)
    def test_crud_for_policy_for_explicit_endpoint(self):
        """PUT, HEAD and DELETE for explicit endpoint policy."""
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/endpoints/%(endpoint_id)s') % {
                   'policy_id': self.policy['id'],
                   'endpoint_id': self.endpoint['id']}
        self._crud_test(url)
    def test_crud_for_policy_for_service(self):
        """PUT, HEAD and DELETE for service endpoint policy."""
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/services/%(service_id)s') % {
                   'policy_id': self.policy['id'],
                   'service_id': self.service['id']}
        self._crud_test(url)
    def test_crud_for_policy_for_region_and_service(self):
        """PUT, HEAD and DELETE for region and service endpoint policy."""
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/services/%(service_id)s/regions/%(region_id)s') % {
                   'policy_id': self.policy['id'],
                   'service_id': self.service['id'],
                   'region_id': self.region['id']}
        self._crud_test(url)
    def test_get_policy_for_endpoint(self):
        """GET /endpoints/{endpoint_id}/policy."""
        self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
                 '/endpoints/%(endpoint_id)s' % {
                     'policy_id': self.policy['id'],
                     'endpoint_id': self.endpoint['id']},
                 expected_status=204)
        self.head('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
                  '/policy' % {
                      'endpoint_id': self.endpoint['id']},
                  expected_status=200)
        r = self.get('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
                     '/policy' % {
                         'endpoint_id': self.endpoint['id']},
                     expected_status=200)
        self.assertValidPolicyResponse(r, ref=self.policy)
    def test_list_endpoints_for_policy(self):
        """GET /policies/%(policy_id}/endpoints."""
        self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
                 '/endpoints/%(endpoint_id)s' % {
                     'policy_id': self.policy['id'],
                     'endpoint_id': self.endpoint['id']},
                 expected_status=204)
        r = self.get('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
                     '/endpoints' % {
                         'policy_id': self.policy['id']},
                     expected_status=200)
        self.assertValidEndpointListResponse(r, ref=self.endpoint)
        self.assertThat(r.result.get('endpoints'), matchers.HasLength(1))
    def test_endpoint_association_cleanup_when_endpoint_deleted(self):
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/endpoints/%(endpoint_id)s') % {
                   'policy_id': self.policy['id'],
                   'endpoint_id': self.endpoint['id']}
        self.put(url, expected_status=204)
        self.head(url, expected_status=204)
        self.delete('/endpoints/%(endpoint_id)s' % {
            'endpoint_id': self.endpoint['id']})
        self.head(url, expected_status=404)
    def test_region_service_association_cleanup_when_region_deleted(self):
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/services/%(service_id)s/regions/%(region_id)s') % {
                   'policy_id': self.policy['id'],
                   'service_id': self.service['id'],
                   'region_id': self.region['id']}
        self.put(url, expected_status=204)
        self.head(url, expected_status=204)
        self.delete('/regions/%(region_id)s' % {
            'region_id': self.region['id']})
        self.head(url, expected_status=404)
    def test_region_service_association_cleanup_when_service_deleted(self):
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/services/%(service_id)s/regions/%(region_id)s') % {
                   'policy_id': self.policy['id'],
                   'service_id': self.service['id'],
                   'region_id': self.region['id']}
        self.put(url, expected_status=204)
        self.head(url, expected_status=204)
        self.delete('/services/%(service_id)s' % {
            'service_id': self.service['id']})
        self.head(url, expected_status=404)
    def test_service_association_cleanup_when_service_deleted(self):
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/services/%(service_id)s') % {
                   'policy_id': self.policy['id'],
                   'service_id': self.service['id']}
        self.put(url, expected_status=204)
        self.get(url, expected_status=204)
        self.delete('/policies/%(policy_id)s' % {
            'policy_id': self.policy['id']})
        self.head(url, expected_status=404)
    def test_service_association_cleanup_when_policy_deleted(self):
        url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
               '/services/%(service_id)s') % {
                   'policy_id': self.policy['id'],
                   'service_id': self.service['id']}
        self.put(url, expected_status=204)
        self.get(url, expected_status=204)
        self.delete('/services/%(service_id)s' % {
            'service_id': self.service['id']})
        self.head(url, expected_status=404)
class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
    EXTENSION_LOCATION = ('http://docs.openstack.org/api/openstack-identity/3/'
                          'ext/OS-ENDPOINT-POLICY/1.0/rel')
    PARAM_LOCATION = 'http://docs.openstack.org/api/openstack-identity/3/param'
    JSON_HOME_DATA = {
        EXTENSION_LOCATION + '/endpoint_policy': {
            'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/'
                             'policy',
            'href-vars': {
                'endpoint_id': PARAM_LOCATION + '/endpoint_id',
            },
        },
        EXTENSION_LOCATION + '/policy_endpoints': {
            'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
                             'endpoints',
            'href-vars': {
                'policy_id': PARAM_LOCATION + '/policy_id',
            },
        },
        EXTENSION_LOCATION + '/endpoint_policy_association': {
            'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
                             'endpoints/{endpoint_id}',
            'href-vars': {
                'policy_id': PARAM_LOCATION + '/policy_id',
                'endpoint_id': PARAM_LOCATION + '/endpoint_id',
            },
        },
        EXTENSION_LOCATION + '/service_policy_association': {
            'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
                             'services/{service_id}',
            'href-vars': {
                'policy_id': PARAM_LOCATION + '/policy_id',
                'service_id': PARAM_LOCATION + '/service_id',
            },
        },
        EXTENSION_LOCATION + '/region_and_service_policy_association': {
            'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
                             'services/{service_id}/regions/{region_id}',
            'href-vars': {
                'policy_id': PARAM_LOCATION + '/policy_id',
                'service_id': PARAM_LOCATION + '/service_id',
                'region_id': PARAM_LOCATION + '/region_id',
            },
        },
    }
 | 
	apache-2.0 | 5,333,776,213,407,170,000 | 39.322709 | 79 | 0.571386 | false | 
| 
	niwinz/niwi-web | 
	src/niwi/settings/common.py | 
	1 | 
	5109 | 
	# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
import os, sys
ADMINS = (
    ('Andrei Antoukh', '[email protected]'),
)
MANAGERS = ADMINS
PROJECT_ROOT = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')
LOGS_PATH = os.path.join(PROJECT_ROOT, 'logs')
if not os.path.exists(LOGS_PATH):
    os.mkdir(LOGS_PATH)
FCBK_APP_SECRET = ''
FCBK_APP_API = ''
FCBK_ADMIN = ''
SEND_BROKEN_LINK_EMAILS = False
IGNORABLE_404_ENDS = ('.php', '.cgi')
IGNORABLE_404_STARTS = ('/phpmyadmin/',)
DEFAULT_CONTENT_TYPE = "text/html"
HOST = 'http://www.niwi.be'
DATABASES = {
    'default':{
        'ENGINE':'django.db.backends.sqlite3',
        'NAME': os.path.join(PROJECT_ROOT, 'database.sqlite'),
    },
}
CACHES = {
    'default': {
        'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
     }
}
# ETAGS Feature for good cache. (true only for production)
USE_ETAGS=False
#SESSION BACKEND
SESSION_ENGINE='django.contrib.sessions.backends.db'
#SESSION_ENGINE='django.contrib.sessions.backends.cache'
#SESSION_EXPIRE_AT_BROWSER_CLOSE = False
#SESSION_SAVE_EVERY_REQUEST = False
#SESSION_COOKIE_AGE = 1209600 # (2 weeks)
# MAIL OPTIONS
#EMAIL_USE_TLS = False
#EMAIL_HOST = 'localhost'
#EMAIL_HOST_USER = 'user'
#EMAIL_HOST_PASSWORD = 'password'
#EMAIL_PORT = 25
DEFAULT_FROM_EMAIL = "[email protected]"
# Message System
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
#MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
TIME_ZONE = 'Europe/Madrid'
LANGUAGE_CODE = 'en'
LANGUAGES = (
    ('es', _('Spanish')),
    ('en', _('English')),
)
USE_I18N = True
USE_L10N = True
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
USE_TZ = True
STATICFILES_DIRS = (
    # Put strings here, like "/home/html/static" or "C:/www/django/static".
    # Always use forward slashes, even on Windows.
    # Don't forget to use absolute paths, not relative paths.
)
STATICFILES_FINDERS = (
    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
    'django.contrib.staticfiles.finders.FileSystemFinder',
)
SECRET_KEY = '^xur70b9%*5vl+v&t=8v8bs5)5%0em^-oyzuj6#*r*0vcjdy4)'
TEMPLATE_LOADERS = (
    'django.template.loaders.app_directories.Loader',
    'django.template.loaders.filesystem.Loader',
)
MIDDLEWARE_CLASSES = [
    'django.middleware.common.CommonMiddleware',
    'django.contrib.sessions.middleware.SessionMiddleware',
    'django.middleware.locale.LocaleMiddleware',
    'django.contrib.auth.middleware.AuthenticationMiddleware',
    'django.middleware.csrf.CsrfViewMiddleware', 
    'django.contrib.messages.middleware.MessageMiddleware',
    'niwi.middleware.FacebookMiddleware',
]
WSGI_APPLICATION = 'niwi.wsgi.application'
TEMPLATE_CONTEXT_PROCESSORS = [
    "django.contrib.auth.context_processors.auth",
    "django.core.context_processors.i18n",
    "django.core.context_processors.media",
    'django.core.context_processors.static',
    "django.contrib.messages.context_processors.messages",
    "niwi.web.context.main",
]
ROOT_URLCONF = 'niwi.urls'
TEMPLATE_DIRS = (
)
INSTALLED_APPS = [
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
    'django.contrib.admin',
    'django.contrib.webdesign',
    'niwi.web',
    'niwi.photo',
    'django_dbconf',
    #'niwi_apps.filepaste',
    #'niwi_apps.twitter_filter',
]
LOGGING = {
    'version': 1,
    'disable_existing_loggers': True,
    'filters': {
        'require_debug_false': {
            '()': 'django.utils.log.RequireDebugFalse'
        }
    },
    'formatters': {
        'verbose': {
            'format': '%(levelname)s:%(module)s:%(process)d:%(message)s'
        }
    },
    'handlers': {
        'null': {
            'level':'DEBUG',
            'class':'django.utils.log.NullHandler',
        },
        'console':{
            'level':'DEBUG',
            'class': 'logging.StreamHandler',
            'formatter': 'verbose'
        },
        'mail_admins': {
            'level': 'ERROR',
            'filters': ['require_debug_false'],
            'class': 'django.utils.log.AdminEmailHandler',
        },
    },
    'loggers': {
        'django': {
            'handlers':['null'],
            'propagate': True,
            'level':'DEBUG',
        },
        'django.request': {
            'handlers': ['console', 'mail_admins'],
            'level': 'ERROR',
            'propagate': False,
        },
        'django.db.backends':{
            'handlers': ['null'],
            'level': 'DEBUG',
            'propagate': False,
        },
        'niwi':{
            'handlers': ['console'],
            'level': 'DEBUG',
            'propagate': False
        }
    }
}
FORMAT_MODULE_PATH = 'niwi.formats'
DATE_INPUT_FORMATS = ('%Y-%m-%d', '%m/%d/%Y', '%d/%m/%Y', '%b %d %Y',
'%b %d, %Y', '%d %b %Y', '%d %b, %Y', '%B %d %Y',
'%B %d, %Y', '%d %B %Y', '%d %B, %Y')
 | 
	bsd-3-clause | 544,266,203,126,640,500 | 24.673367 | 84 | 0.607947 | false | 
| 
	wolkstein/OpenDroneMap-GCP_LIST.TXT-generator | 
	gcp_txt_gen.py | 
	1 | 
	12053 | 
	import numpy as np
import cv2
import argparse
import glob
from commands import getstatusoutput
import os
import errno
# --qr_code_compare need "zbar-tools"
# ---Helper---
def roundTraditional(val,digits):
   return round(val+10**(-len(str(val))-1))
# ---~Helper---
GCP_DEBUGMODE = False
MIN_MATCH_COUNT = 12
USE_QRCODE = False
FIRST_GCP_POINT = 1
IGNORE_BORDER = 60
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', action='store_true', required=False , help='Will additionally create an DEBUG-GCP_LIST.txt file with Pointnames P1 - Pn in each row')
parser.add_argument('-q', '--qr_code_compare', action='store_true', required=False , help='compare QR-Code in GCP matches against the QR Code from Template QR-Code based GCP. This needs QR Code based GCPs')
parser.add_argument('--min_match_count', type=int, default=12, required=False, help='min_match_count "Default = 12, Min = 4" set the minimum amount of required good keypoints to accept a match of Template in Image')
parser.add_argument('-f', '--first_gcp_point', type=int, default=1, required=False, help='GCP Point start from [n] number')
parser.add_argument('-i', '--ignore_border', type=int, default=60, required=False, help='Ignore GCP Point inside Image Border [px].')
args = parser.parse_args()
MIN_MATCH_COUNT = args.min_match_count
if MIN_MATCH_COUNT < 4:
    MIN_MATCH_COUNT = 4
#print MIN_MATCH_COUNT
GCP_DEBUGMODE = args.debug
if GCP_DEBUGMODE:
    print 'GCP_LIST in debugmode is not usable by ODM.'
    
USE_QRCODE = args.qr_code_compare
QRCHECK_DIR = 'qr-checks'
if USE_QRCODE:
    print 'QR-Code Mode'
    try:
        os.makedirs(QRCHECK_DIR)
    except OSError as exception:
        if exception.errno != errno.EEXIST:
            raise
    testzbarimg = getstatusoutput("zbarimg --version")
    if testzbarimg[0] == 32512:
        print "zbarimg not found! Please install zbar-tools"
        quit()
    
FIRST_GCP_POINT = args.first_gcp_point
IGNORE_BORDER = args.ignore_border
# ---Configure---
# input Kordinaten File
COORDSTXT = 'coords.txt'
# output GCP_LIST_FILE
GCP_LIST_FILE = 'gcp_list.txt'
GCP_LIST_FILE_DBUG = 'debug-gcp_list.txt'
# Template Directory
IMAGE_TEMPLATE_DIR = 'gcp-templates'
# Image Directory
IMAGE_DIR = 'images'
# Image file extensions filter
ext_list = ['PNG','png','JPG','jpg','JPEG','jpeg','TIF','tif'];
# valid GCP heafer 
UTM_list = ['WGS84','wgs84']; #unused
# configure ant init search algorithm
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks = 50)
flann = cv2.FlannBasedMatcher(index_params, search_params)
# ---~Configure---
# create image file list
imagePathList=[]
for file in glob.glob(IMAGE_DIR + '/*.*'):
  if file.rsplit('.',1)[1] in ext_list :
      imagePathList.append(file)
      
      
print "Found %d Images" % (len(imagePathList))
# open template Images
tmpImagePathList=[]
templateImg=[]
for file in glob.glob(IMAGE_TEMPLATE_DIR + '/*.*'):
  if file.rsplit('.',1)[1] in ext_list :
      print file
      tmpImagePathList.append(file)
tmpImagePathList.sort()
print "---sorted tmpImagePathList"
for row in tmpImagePathList:
    print row
    templateImg.append(cv2.imread(row,0))
      
print "Found %d Images-Templates" % (len(templateImg))
#quit()
# Init SIFT detector
sift = cv2.SIFT()
templateKp=[]
templateDes=[] 
# Find template images keypoints and discriptor with sift
print "Search keypoints in Template Images"
for i in range(len(templateImg)):
    tmpKp, tempDes = sift.detectAndCompute(templateImg[i],None)
    print "Found %d Keypoints in Template Image Nr. %d" % (len(tmpKp),i)
    templateKp.append(tmpKp)
    templateDes.append(tempDes)
    
# ---Processing---
AllImageResults=[]
for i in range(len(imagePathList)):
    
    imageInfo=[]
    imageInfo.append(imagePathList[i].rsplit('/',1)[1])
    print "Searching for Keypoints in Image %s" %(imagePathList[i])
    image = cv2.imread(imagePathList[i],0)
    imageKp, imageDes = sift.detectAndCompute(image,None)
    print "Found %d Keypoints in Image Nr. %s" % (len(imageKp),imagePathList[i])
    
    for u in range(len(templateKp)):
        
        matches = flann.knnMatch(templateDes[u],imageDes,k=2)
        
        
        # store all the good matches as per Lowe's ratio test.
        good = []
        for m,n in matches:
            if m.distance < 0.7*n.distance:
                good.append(m)
        
        
        centroidXY = [0,0]
        
        paircord=[]
        paircord.append("P%d%s" % (u+FIRST_GCP_POINT,":"))
        localTamplateKp = templateKp[u]
        if len(good)>MIN_MATCH_COUNT:
            src_pts = np.float32([ localTamplateKp[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
            dst_pts = np.float32([ imageKp[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
            M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0)
            matchesMask = mask.ravel().tolist()
            h,w = templateImg[u].shape
            pts = np.float32([ [0,0],[0,h-1],[w-1,h-1],[w-1,0] ]).reshape(-1,1,2)
            dst = cv2.perspectiveTransform(pts,M)
     
     
            #print len(good)
            #print dst.shape
            #print dst[0,0,0]
            #print dst[2,0,0]
            
                
            # we calculate only for rectangle templates.
            # x center = (x0 - x2)/2 + x0
            # y center = (y0 - y1)/2 + y0
                
            if dst[0,0,0] == dst[2,0,0]:
                centroidXY[0] = dst[0,0,0]
                    
            if dst[0,0,0] < dst[2,0,0]:
                centroidXY[0] = (abs(dst[0,0,0] - dst[2,0,0]))/2 + dst[0,0,0]
            else:
                centroidXY[0] = (abs(dst[0,0,0] - dst[2,0,0]))/2 + dst[2,0,0]
                    
            if dst[0,0,1] == dst[2,0,1]:
                centroidXY[1] = dst[0,0,1]
                    
            if dst[0,0,1] < dst[2,0,1]:
                centroidXY[1] = (abs(dst[0,0,1] - dst[2,0,1]))/2 + dst[0,0,1]
            else:
                centroidXY[1] = (abs(dst[0,0,1] - dst[2,0,1]))/2 + dst[2,0,1]
                
                
            #check for template hits near image borders
            ih,iw = image.shape
            if centroidXY[0] < IGNORE_BORDER or centroidXY[1] < IGNORE_BORDER or centroidXY[0] > iw - IGNORE_BORDER or centroidXY[1] > ih - IGNORE_BORDER:
                print "!!--------------Template near image boarde, we skip this Match"
                paircord.append("F")
                continue
                
                
            # QRCODE Check
            if USE_QRCODE:
                # qrcode test                   | oben links                                 |
                print "corner UL = %d, %d" % (centroidXY[0] - (w/2),centroidXY[1] - (h/2) )
                print "corner LR = %d, %d" % (centroidXY[0] + (w/2),centroidXY[1] + (h/2) )
                
                border = 30
                olx = centroidXY[0] - (w/2) - border
                oly = centroidXY[1] - (h/2) - border
                ulx = centroidXY[0] + (w/2) + border
                uly = centroidXY[1] + (h/2) + border
                
                cropped = image[oly:uly, olx:ulx]
                tmpimgname = "%s/tmpImage-Point_%d_at_Pos_%d_%d_%d_%d.png" %(QRCHECK_DIR,u+FIRST_GCP_POINT,olx,oly,ulx,uly)
                tmptemplateimagename = "%s/GCP%d.png" %(QRCHECK_DIR,u+FIRST_GCP_POINT)
                cv2.imwrite(tmpimgname, cropped)
                cv2.imwrite(tmptemplateimagename, templateImg[u])
                
                
                #print "Compare QR Code in Template Nr.: %d against image %s" % (u+FIRST_GCP_POINT,imagePathList[i])
                testQrCodeBild = getstatusoutput("zbarimg -q --raw %s" % (tmpimgname))
                testQrCodeGCPTeplate = getstatusoutput("zbarimg -q --raw %s" % (tmptemplateimagename))
                
                
                
                print "Compare QR-Code from Image %s vs QR-Code from Template %s" %(testQrCodeBild[1],testQrCodeGCPTeplate[1])
                if testQrCodeBild[1] == testQrCodeGCPTeplate[1]:
                    print "HIT! QR CODE is TRUE :) against Position: X=%d, Y=%d" %(centroidXY[0],centroidXY[1])
                    paircord.append("%d %d"%(centroidXY[0],centroidXY[1]))
                else:
                    print "QR-Code FAIL :("
                    paircord.append("F")
            else:
                print "HIT, X=%d Y=%d" %(centroidXY[0],centroidXY[1])
                paircord.append("%d %d"%(centroidXY[0],centroidXY[1]))
                    
        else:
            print "FAIL, Not enough matches are found - %d/%d" % (len(good),MIN_MATCH_COUNT) 
            paircord.append("F")
            
        
        imageInfo.append(paircord)
        print "Find %d matches between P%d and Image %d" %(len(good),u+FIRST_GCP_POINT,i)
        
    AllImageResults.append(imageInfo)
AllImageResults.sort()
# create the gcp_txt file
infile = open(COORDSTXT, 'r')
outfile = open(GCP_LIST_FILE, 'w')
if GCP_DEBUGMODE:
    debugfile = open(GCP_LIST_FILE_DBUG, 'w')
# example line
# 544256.7 5320919.9 5 3044 2622 IMG_0525.jpg
sortbyPmatches=[]
print "--------------Results--------------"
for i in range(len(AllImageResults)):
    print "------------Image--------------"
    print AllImageResults[i]
  
  
for i in range(len(templateImg)):
    searchstring = "P%d%s" % (i+FIRST_GCP_POINT,":")
    for p in range(len(AllImageResults)):
        imageTempNameGetter = AllImageResults[p]
        imagename = imageTempNameGetter[0]
        for k in range(len(imageTempNameGetter)):
            if k>0:
                pointFinder = imageTempNameGetter[k]
                if pointFinder[0] == searchstring and pointFinder[1] !='F':
                    hitstring = (pointFinder[0] + " " + pointFinder[1] + " " + imagename)
                    sortbyPmatches.append(hitstring)
                    
            
print "sort by gcp_points"
#print sortbyPmatches
for row in sortbyPmatches:
    print row
# - end create gcp list file
myLineList=[]
for line in infile:
    line = line.replace('\n', '')
    myLineList.append(line)
#print len(myLineList)
#print myLineList
#for row in myLineList:
#    print row
# merge strings and write to file
mergedStrings=[]
debugmergedStrings=[]
for row in sortbyPmatches:
    
    internalresultsSplit = row.split(': ')
    internalresultsSplit.append("end")
    internalresultsPosNr = internalresultsSplit[0]
    internalresultsPosCoords = internalresultsSplit[1]
    
    
    for row in myLineList:
        koordsTextSlit = row.split(': ')
        koordsTextSlit.append("end")
        koordsTextPosNr = koordsTextSlit[0]
        koordsTextPosCoords = koordsTextSlit[1]
        # print "compare koordsTextPosNr: %s, against internalresultsPosNr: %s" %(koordsTextPosNr, internalresultsPosNr)
        if koordsTextPosNr == internalresultsPosNr:
            #print "--good--"
            #print koordsTextPosNr
            #print internalresultsPosNr
            if GCP_DEBUGMODE:
                #print "debugstring"
                debugstring = "%s %s %s" % (internalresultsPosNr, koordsTextPosCoords, internalresultsPosCoords)
                #print debugstring
                debugmergedStrings.append(debugstring)
            
            #print "normal string"
            normalstring = "%s %s" % (koordsTextPosCoords, internalresultsPosCoords)
            mergedStrings.append(normalstring)
                
# save debugfile
if GCP_DEBUGMODE:
    debugfile.write("This file is only for debugging and contains P1 - Pn to find errors\n")
    debugfile.write(myLineList[0]+'\n')
    for row in debugmergedStrings:
        debugfile.write(row+'\n')    
#save file
outfile.write(myLineList[0]+'\n')
for row in mergedStrings:
    outfile.write(row+'\n')
print "gcp_list.txt sorted by GCP"
for row in mergedStrings:
    print row
infile.close()
outfile.close()
if GCP_DEBUGMODE:
    debugfile.close()
print "--------"
print "finished gcp_list.txt"
# ---~Main--- | 
	gpl-3.0 | -6,089,335,881,260,830,000 | 31.578378 | 215 | 0.584087 | false | 
| 
	psiinon/addons-server | 
	src/olympia/amo/cron.py | 
	1 | 
	4394 | 
	import itertools
from datetime import datetime, timedelta
from django.core.files.storage import default_storage as storage
from django.db import connection
import waffle
import olympia.core.logger
from olympia import amo
from olympia.activity.models import ActivityLog
from olympia.amo.utils import chunked
from olympia.bandwagon.models import Collection
from olympia.constants.base import VALID_ADDON_STATUSES, VALID_FILE_STATUSES
from olympia.files.models import FileUpload
from olympia.lib.es.utils import raise_if_reindex_in_progress
from . import tasks
log = olympia.core.logger.getLogger('z.cron')
def gc(test_result=True):
    """Site-wide garbage collections."""
    def days_ago(days):
        return datetime.today() - timedelta(days=days)
    log.debug('Collecting data to delete')
    logs = (ActivityLog.objects.filter(created__lt=days_ago(90))
            .exclude(action__in=amo.LOG_KEEP).values_list('id', flat=True))
    collections_to_delete = (
        Collection.objects.filter(created__lt=days_ago(2),
                                  type=amo.COLLECTION_ANONYMOUS)
        .values_list('id', flat=True))
    for chunk in chunked(logs, 100):
        tasks.delete_logs.delay(chunk)
    for chunk in chunked(collections_to_delete, 100):
        tasks.delete_anonymous_collections.delay(chunk)
    # Incomplete addons cannot be deleted here because when an addon is
    # rejected during a review it is marked as incomplete. See bug 670295.
    # Delete stale FileUploads.
    stale_uploads = FileUpload.objects.filter(
        created__lte=days_ago(7)).order_by('id')
    for file_upload in stale_uploads:
        log.debug(u'[FileUpload:{uuid}] Removing file: {path}'
                  .format(uuid=file_upload.uuid, path=file_upload.path))
        if file_upload.path:
            try:
                storage.delete(file_upload.path)
            except OSError:
                pass
        file_upload.delete()
def category_totals():
    """
    Update category counts for sidebar navigation.
    """
    log.debug('Starting category counts update...')
    addon_statuses = ",".join(['%s'] * len(VALID_ADDON_STATUSES))
    file_statuses = ",".join(['%s'] * len(VALID_FILE_STATUSES))
    with connection.cursor() as cursor:
        cursor.execute("""
        UPDATE categories AS t INNER JOIN (
         SELECT at.category_id, COUNT(DISTINCT Addon.id) AS ct
          FROM addons AS Addon
          INNER JOIN versions AS Version
            ON (Addon.id = Version.addon_id)
          INNER JOIN applications_versions AS av
            ON (av.version_id = Version.id)
          INNER JOIN addons_categories AS at
            ON (at.addon_id = Addon.id)
          INNER JOIN files AS File
            ON (Version.id = File.version_id AND File.status IN (%s))
          WHERE Addon.status IN (%s) AND Addon.inactive = 0
          GROUP BY at.category_id)
        AS j ON (t.id = j.category_id)
        SET t.count = j.ct
        """ % (file_statuses, addon_statuses),
            VALID_FILE_STATUSES + VALID_ADDON_STATUSES)
def weekly_downloads():
    """
    Update 7-day add-on download counts.
    """
    if not waffle.switch_is_active('local-statistics-processing'):
        return False
    raise_if_reindex_in_progress('amo')
    with connection.cursor() as cursor:
        cursor.execute("""
            SELECT addon_id, SUM(count) AS weekly_count
            FROM download_counts
            WHERE `date` >= DATE_SUB(CURDATE(), INTERVAL 7 DAY)
            GROUP BY addon_id
            ORDER BY addon_id""")
        counts = cursor.fetchall()
    addon_ids = [r[0] for r in counts]
    if not addon_ids:
        return
    with connection.cursor() as cursor:
        cursor.execute("""
            SELECT id, 0
            FROM addons
            WHERE id NOT IN %s""", (addon_ids,))
        counts += cursor.fetchall()
        cursor.execute("""
            CREATE TEMPORARY TABLE tmp_wd
            (addon_id INT PRIMARY KEY, count INT)""")
        cursor.execute('INSERT INTO tmp_wd VALUES %s' %
                       ','.join(['(%s,%s)'] * len(counts)),
                       list(itertools.chain(*counts)))
        cursor.execute("""
            UPDATE addons INNER JOIN tmp_wd
                ON addons.id = tmp_wd.addon_id
            SET weeklydownloads = tmp_wd.count""")
        cursor.execute("DROP TABLE IF EXISTS tmp_wd")
 | 
	bsd-3-clause | 6,036,610,290,732,364,000 | 32.287879 | 76 | 0.616523 | false | 
| 
	DarkPrince304/MozDef | 
	rest/index.py | 
	6 | 
	19047 | 
	# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Jeff Bryner [email protected]
# Anthony Verez [email protected]
import bottle
import json
import netaddr
import os
import pyes
import pytz
import pynsive
import requests
import sys
import socket
from bottle import debug, route, run, response, request, default_app, post
from datetime import datetime, timedelta
from configlib import getConfig, OptionParser
from elasticutils import S
from datetime import datetime
from datetime import timedelta
from dateutil.parser import parse
from ipwhois import IPWhois
from bson.son import SON
from operator import itemgetter
from pymongo import MongoClient
from bson import json_util
options = None
pluginList = list()   # tuple of module,registration dict,priority
def enable_cors(fn):
    ''' cors decorator for rest/ajax'''
    def _enable_cors(*args, **kwargs):
        # set CORS headers
        response.headers['Access-Control-Allow-Origin'] = '*'
        response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, OPTIONS'
        response.headers['Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'
        if bottle.request.method != 'OPTIONS':
            # actual request; reply with the actual response
            return fn(*args, **kwargs)
    return _enable_cors
@route('/test')
@route('/test/')
def test():
    '''test endpoint for..testing'''
    ip = request.environ.get('REMOTE_ADDR')
    # response.headers['X-IP'] = '{0}'.format(ip)
    response.status = 200
    sendMessgeToPlugins(request, response, 'test')
    return response
@route('/status')
@route('/status/')
def status():
    '''endpoint for a status/health check'''
    if request.body:
        request.body.read()
        request.body.close()
    response.status = 200
    response.content_type = "application/json"
    response.body = json.dumps(dict(status='ok'))
    sendMessgeToPlugins(request, response, 'status')
    return response
@route('/ldapLogins')
@route('/ldapLogins/')
@enable_cors
def index():
    '''an endpoint to return success/failed login counts'''
    if request.body:
        request.body.read()
        request.body.close()
    response.content_type = "application/json"
    sendMessgeToPlugins(request, response, 'ldapLogins')
    return(esLdapResults())
@route('/veris')
@route('/veris/')
@enable_cors
def index():
    '''returns a count of veris tags'''
    if request.body:
        request.body.read()
        request.body.close()
    response.content_type = "application/json"
    response.body = verisSummary()
    sendMessgeToPlugins(request, response, 'veris')
    return response
@route('/kibanadashboards')
@route('/kibanadashboards/')
@enable_cors
def index():
    '''returns a list of dashboards to show on the UI'''
    if request.body:
        request.body.read()
        request.body.close()
    response.content_type = "application/json"
    response.body = kibanaDashboards()
    sendMessgeToPlugins(request, response, 'kibanadashboards')
    return response
@post('/blockip', methods=['POST'])
@post('/blockip/', methods=['POST'])
@enable_cors
def index():
    '''will receive a call to block an ip address'''
    sendMessgeToPlugins(request, response, 'blockip')
    return response
@post('/ipwhois', methods=['POST'])
@post('/ipwhois/', methods=['POST'])
@enable_cors
def index():
    '''return a json version of whois for an ip address'''
    if request.body:
        arequest = request.body.read()
        request.body.close()
    # valid json?
    try:
        requestDict = json.loads(arequest)
    except ValueError as e:
        response.status = 500
    if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
        response.content_type = "application/json"
        response.body = getWhois(requestDict['ipaddress'])
    else:
        response.status = 500
    sendMessgeToPlugins(request, response, 'ipwhois')
    return response
@post('/ipintel', methods=['POST'])
@post('/ipintel/', methods=['POST'])
@enable_cors
def ipintel():
    '''send an IP address through plugins for intel enhancement'''
    if request.body:
        arequest = request.body.read()
        #request.body.close()
    # valid json?
    try:
        requestDict = json.loads(arequest)
    except ValueError as e:
        response.status = 500
    if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
        response.content_type = "application/json"
    else:
        response.status = 500
    sendMessgeToPlugins(request, response, 'ipintel')
    return response
@post('/ipcifquery', methods=['POST'])
@post('/ipcifquery/', methods=['POST'])
@enable_cors
def index():
    '''return a json version of cif query for an ip address'''
    if request.body:
        arequest = request.body.read()
        request.body.close()
    # valid json?
    try:
        requestDict = json.loads(arequest)
    except ValueError as e:
        response.status = 500
    if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
        response.content_type = "application/json"
        response.body = getIPCIF(requestDict['ipaddress'])
    else:
        response.status = 500
    sendMessgeToPlugins(request, response, 'ipcifquery')
    return response
@post('/ipdshieldquery', methods=['POST'])
@post('/ipdshieldquery/', methods=['POST'])
@enable_cors
def index():
    '''
    return a json version of dshield query for an ip address
    https://isc.sans.edu/api/index.html
    '''
    if request.body:
        arequest = request.body.read()
        request.body.close()
    # valid json?
    try:
        requestDict = json.loads(arequest)
    except ValueError as e:
        response.status = 500
        return
    if 'ipaddress' in requestDict.keys() and isIPv4(requestDict['ipaddress']):
        url="https://isc.sans.edu/api/ip/"
        dresponse = requests.get('{0}{1}?json'.format(url, requestDict['ipaddress']))
        if dresponse.status_code == 200:
            response.content_type = "application/json"
            response.body = dresponse.content
        else:
            response.status = dresponse.status_code
    else:
        response.status = 500
    sendMessgeToPlugins(request, response, 'ipdshieldquery')
    return response
@route('/plugins', methods=['GET'])
@route('/plugins/', methods=['GET'])
@route('/plugins/<endpoint>', methods=['GET'])
def getPluginList(endpoint=None):
    ''' return a json representation of the plugin tuple
        (mname, mclass, mreg, mpriority)
         minus the actual class (which isn't json-able)
         for all plugins, or for a specific endpoint
    '''
    pluginResponse = list()
    if endpoint is None:
        for plugin in pluginList:
            pdict = {}
            pdict['file'] = plugin[0]
            pdict['name'] = plugin[1]
            pdict['description'] = plugin[2]
            pdict['registration'] = plugin[3]
            pdict['priority'] = plugin[4]
            pluginResponse.append(pdict)
    else:
        # filter the list to just the endpoint requested
        for plugin in pluginList:
            if endpoint in plugin[3]:
                pdict = {}
                pdict['file'] = plugin[0]
                pdict['name'] = plugin[1]
                pdict['description'] = plugin[2]
                pdict['registration'] = plugin[3]
                pdict['priority'] = plugin[4]
                pluginResponse.append(pdict)
    response.content_type = "application/json"
    response.body = json.dumps(pluginResponse)
    sendMessgeToPlugins(request, response, 'plugins')
    return response
def registerPlugins():
    '''walk the ./plugins directory
       and register modules in pluginList
       as a tuple: (mfile, mname, mdescription, mreg, mpriority, mclass)
    '''
    plugin_manager = pynsive.PluginManager()
    if os.path.exists('plugins'):
        modules = pynsive.list_modules('plugins')
        for mfile in modules:
            module = pynsive.import_module(mfile)
            reload(module)
            if not module:
                raise ImportError('Unable to load module {}'.format(mfile))
            else:
                if 'message' in dir(module):
                    mclass = module.message()
                    mreg = mclass.registration
                    mclass.restoptions = options
                    if 'priority' in dir(mclass):
                        mpriority = mclass.priority
                    else:
                        mpriority = 100
                    if 'name' in dir(mclass):
                        mname = mclass.name
                    else:
                        mname = mfile
                    if 'description' in dir(mclass):
                        mdescription = mclass.description
                    else:
                        mdescription = mfile
                    if isinstance(mreg, list):
                        print('[*] plugin {0} registered to receive messages from /{1}'.format(mfile, mreg))
                        pluginList.append((mfile, mname, mdescription, mreg, mpriority, mclass))
def sendMessgeToPlugins(request, response, endpoint):
    '''
       iterate the registered plugins
       sending the response/request to any that have
       registered for this rest endpoint
    '''
    # sort by priority
    for plugin in sorted(pluginList, key=itemgetter(4), reverse=False):
        if endpoint in plugin[3]:
            (request, response) = plugin[5].onMessage(request, response)
def toUTC(suspectedDate, localTimeZone="US/Pacific"):
    '''make a UTC date out of almost anything'''
    utc = pytz.UTC
    objDate = None
    if type(suspectedDate) == str:
        objDate = parse(suspectedDate, fuzzy=True)
    elif type(suspectedDate) == datetime:
        objDate = suspectedDate
    if objDate.tzinfo is None:
        objDate = pytz.timezone(localTimeZone).localize(objDate)
        objDate = utc.normalize(objDate)
    else:
        objDate = utc.normalize(objDate)
    if objDate is not None:
        objDate = utc.normalize(objDate)
    return objDate
def isIPv4(ip):
    try:
        # netaddr on it's own considers 1 and 0 to be valid_ipv4
        # so a little sanity check prior to netaddr.
        # Use IPNetwork instead of valid_ipv4 to allow CIDR
        if '.' in ip and len(ip.split('.')) == 4:
            # some ips are quoted
            netaddr.IPNetwork(ip.strip("'").strip('"'))
            return True
        else:
            return False
    except:
        return False
def esLdapResults(begindateUTC=None, enddateUTC=None):
    '''an ES query/facet to count success/failed logins'''
    resultsList = list()
    if begindateUTC is None:
        begindateUTC = datetime.now() - timedelta(hours=1)
        begindateUTC = toUTC(begindateUTC)
    if enddateUTC is None:
        enddateUTC = datetime.now()
        enddateUTC = toUTC(enddateUTC)
    try:
        es = pyes.ES((list('{0}'.format(s) for s in options.esservers)))
        qDate = pyes.RangeQuery(qrange=pyes.ESRange('utctimestamp',
            from_value=begindateUTC, to_value=enddateUTC))
        q = pyes.MatchAllQuery()
        q = pyes.FilteredQuery(q, qDate)
        q = pyes.FilteredQuery(q, pyes.TermFilter('tags', 'ldap'))
        q = pyes.FilteredQuery(q,
            pyes.TermFilter('details.result', 'ldap_invalid_credentials'))
        q2 = q.search()
        q2.facet.add_term_facet('details.result')
        q2.facet.add_term_facet('details.dn', size=20)
        results = es.search(q2, indices='events')
        stoplist = ('o', 'mozilla', 'dc', 'com', 'mozilla.com',
            'mozillafoundation.org', 'org')
        for t in results.facets['details.dn'].terms:
            if t['term'] in stoplist:
                continue
            #print(t['term'])
            failures = 0
            success = 0
            dn = t['term']
            #re-query with the terms of the details.dn
            qt = pyes.MatchAllQuery()
            qt = pyes.FilteredQuery(qt, qDate)
            qt = pyes.FilteredQuery(qt, pyes.TermFilter('tags', 'ldap'))
            qt = pyes.FilteredQuery(qt,
                pyes.TermFilter('details.dn', t['term']))
            qt2 = qt.search()
            qt2.facet.add_term_facet('details.result')
            results = es.search(qt2)
            #sys.stdout.write('{0}\n'.format(results.facets['details.result'].terms))
            for t in results.facets['details.result'].terms:
                #print(t['term'],t['count'])
                if t['term'] == 'ldap_success':
                    success = t['count']
                if t['term'] == 'ldap_invalid_credentials':
                    failures = t['count']
            resultsList.append(dict(dn=dn, failures=failures,
                success=success, begin=begindateUTC.isoformat(),
                end=enddateUTC.isoformat()))
        return(json.dumps(resultsList))
    except pyes.exceptions.NoServerAvailable:
        sys.stderr.write('Elastic Search server could not be reached, check network connectivity\n')
def kibanaDashboards():
    try:
        resultsList = []
        es = pyes.ES((list('{0}'.format(s) for s in options.esservers)))
        r = es.search(pyes.Search(pyes.MatchAllQuery(), size=100),
            'kibana-int', 'dashboard')
        if r:
            for dashboard in r:
                dashboardJson = json.loads(dashboard.dashboard)
                resultsList.append({
                    'name': dashboardJson['title'],
                    'url': "%s/%s/%s" % (options.kibanaurl,
                        "index.html#/dashboard/elasticsearch",
                        dashboardJson['title'])
                })
            return json.dumps(resultsList)
        else:
            sys.stderr.write('No Kibana dashboard found\n')
    except pyes.exceptions.NoServerAvailable:
        sys.stderr.write('Elastic Search server could not be reached, check network connectivity\n')
def getWhois(ipaddress):
    try:
        whois = dict()
        ip = netaddr.IPNetwork(ipaddress)[0]
        if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
            whois = IPWhois(netaddr.IPNetwork(ipaddress)[0]).lookup()
        whois['fqdn']=socket.getfqdn(str(netaddr.IPNetwork(ipaddress)[0]))
        return (json.dumps(whois))
    except Exception as e:
        sys.stderr.write('Error looking up whois for {0}: {1}\n'.format(ipaddress, e))
def getIPCIF(ipaddress):
    ''' query a CIF service for information on this IP address per:
        https://code.google.com/p/collective-intelligence-framework/wiki/API_HTTP_v1
    '''
    try:
        resultsList = []
        url='{0}api?apikey={1}&limit=20&confidence=65&q={2}'.format(options.cifhosturl,
                                             options.cifapikey,
                                             ipaddress)
        headers = {'Accept': 'application/json'}
        r=requests.get(url=url,verify=False,headers=headers)
        if r.status_code == 200:
            # we get a \n delimited list of json entries
            cifjsons=r.text.split('\n')
            for c in cifjsons:
                # test for valid json
                try:
                    resultsList.append(json.loads(c))
                except ValueError:
                    pass
            return json.dumps(resultsList)
    except Exception as e:
        sys.stderr.write('Error looking up CIF results for {0}: {1}\n'.format(ipaddress, e))
def verisSummary(verisRegex=None):
    try:
        # aggregate the veris tags from the incidents collection and return as json
        client = MongoClient(options.mongohost, options.mongoport)
        # use meteor db
        incidents= client.meteor['incidents']
        #iveris=incidents.aggregate([
                                   #{"$match":{"tags":{"$exists":True}}},
                                   #{"$unwind" : "$tags" },
                                   #{"$match":{"tags":{"$regex":''}}}, #regex for tag querying
                                   #{"$group": {"_id": "$tags", "hitcount": {"$sum": 1}}}, # count by tag
                                   #{"$sort": SON([("hitcount", -1), ("_id", -1)])}, #sort
                                   #])
        iveris=incidents.aggregate([
                                   {"$match":{"tags":{"$exists":True}}},
                                   {"$unwind" : "$tags" },
                                   {"$match":{"tags":{"$regex":''}}}, #regex for tag querying
                                   { "$project" : { "dateOpened" : 1 ,
                                                   "tags" : 1 ,
                                                   "phase": 1,
                                                   "_id": 0
                                                   } }
                                   ])
        if 'ok' in iveris.keys() and 'result' in iveris.keys():
            return json.dumps(iveris['result'], default=json_util.default)
        else:
            return json.dumps(list())
    except Exception as e:
            sys.stderr.write('Exception while aggregating veris summary: {0}\n'.format(e))
def initConfig():
    #change this to your default zone for when it's not specified
    options.defaultTimeZone = getConfig('defaulttimezone',
                                        'US/Pacific',
                                        options.configfile)
    options.esservers = list(getConfig('esservers',
                                       'http://localhost:9200',
                                       options.configfile).split(','))
    options.kibanaurl = getConfig('kibanaurl',
                                  'http://localhost:9090',
                                  options.configfile)
    # options for your CIF service
    options.cifapikey = getConfig('cifapikey', '', options.configfile)
    options.cifhosturl = getConfig('cifhosturl',
                                   'http://localhost/',
                                   options.configfile)
    # mongo connectivity options
    options.mongohost = getConfig('mongohost', 'localhost', options.configfile)
    options.mongoport = getConfig('mongoport', 3001, options.configfile)
if __name__ == "__main__":
    parser = OptionParser()
    parser.add_option("-c", dest='configfile',
        default=sys.argv[0].replace('.py', '.conf'),
        help="configuration file to use")
    (options, args) = parser.parse_args()
    initConfig()
    registerPlugins()
    run(host="localhost", port=8081)
else:
    parser = OptionParser()
    parser.add_option("-c", dest='configfile',
        default=sys.argv[0].replace('.py', '.conf'),
        help="configuration file to use")
    (options, args) = parser.parse_args()
    initConfig()
    registerPlugins()
    application = default_app()
 | 
	mpl-2.0 | 8,868,509,190,980,001,000 | 33.948624 | 121 | 0.579199 | false | 
| 
	kunaltyagi/Leap | 
	src/Leap/leap/leap_start.py | 
	1 | 
	7125 | 
	#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Module to detect gestures from Leap Motion Controller
"""
import inspect
import os
import sys
SRC_DIR = os.path.dirname(inspect.getfile(inspect.currentframe()))
LIB_DIR = os.path.abspath(os.path.join(SRC_DIR, '../../../lib'))
sys.path.insert(0, LIB_DIR)
import Leap     # pylint: disable=import-error, wrong-import-position
from Leap import CircleGesture, KeyTapGesture, ScreenTapGesture, SwipeGesture
# Stabilizer can be made into a decorator
def hand_stabilizer(frame, count, weight=0.7):
    """
    Updates the windowed average of number of visible hands.
    Args:
        frame: current frame
        weight: relative weight to the data in current frame
        count: object to be updated;
               currently the number of hands
    """
    if not frame.is_valid:
        return count
    for key in count.keys():
        count[key] = (1-weight)*count[key]
        if key == len(frame.hands):
            count[key] += weight
    return count
def hand_count(controller, window=10):
    """
    Accounts for errors in detection and delay from the sensor by
    means of a windowed average
    Args:
        controller: controller ID to get sensed data
        window: size of the window
    """
    no_of_hands = {0:0, 1:0, 2:0, 3:0}
    for i in range(window):
        no_of_hands = hand_stabilizer(controller.frame(window-i), count=no_of_hands)
    comp = lambda x: no_of_hands[x]
    return max(no_of_hands, key=comp)
class LeapListener(Leap.Listener):
    """
    Implementation of a listener to detect and pass on gestures
    """
    finger_names = ['Thumb', 'Index', 'Middle', 'Ring', 'Pinky']
    bone_names = ['Metacarpal', 'Proximal', 'INtermediate', 'Distal']
    # Add additional gestures here as keys. Each gestures's parameters
    def add_gesture(self, gesture):
        self.gesture = gesture
    def on_init(self, controller):
        """
        Detects controller initialisation
        Args:
            controller: ID to operate upon
        """
        print("Listener Initialised")
    def on_connect(self, controller):
        """
        Detects connection of Leap Motion sensor
        Args:
            controller: ID to operate upon
        """
        print("Motion Sensor Connected")
        # Enable Gestures
        controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE)
        controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP)
        controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP)
        controller.enable_gesture(Leap.Gesture.TYPE_SWIPE)
        controller.config.set("Gesture.Swipe.MinLength", 10.0)
        controller.config.set("Gesture.Swipe.MinVelocity", 4)
        controller.config.set("Gesture.ScreenTap.MinForwardVelocity", 1.0)
        controller.config.set("Gesture.ScreenTap.MinDistance", 0.5)
        controller.config.set("Gesture.Circle.MinRadius", 5.0)
        controller.config.set("Gesture.Circle.MinArc", 0.5)
        controller.config.save()
    def on_disconnect(self, controller):
        """
        Detects loss of connection the controller
        Args:
            controller: ID to operate upon
        """
        print("Leap Motion disconnected")
    def on_exit(self, controller):
        """
        Args:
            controller: ID to operate upon
        """
        print("Exited")
    def on_frame(self, controller):
        """
        Fetches data from the current frame. Called repeatedly until
        controller is exited
        Args:
            controller: ID to operate upon
        """
        frame = controller.frame()
        # Gesture detection
        # are stored in its respective value.
        flag = False
        gesture_name = ""
        count = hand_count(controller)
        details = {'frame_id': frame.id}
        for gesture in frame.gestures():
            if gesture.type is Leap.Gesture.TYPE_CIRCLE:
                circle = Leap.CircleGesture(gesture)
                gesture_name = 'circle'
                details['center'] = circle.center
                details['radius'] = circle.radius
                details['tip'] = circle.pointable.stabilized_tip_position
                if gesture.state is Leap.Gesture.STATE_START:
                    details['start'] = frame.id
                    details['start_position'] = details['tip']
                flag = True
                continue
            elif gesture.type is Leap.Gesture.TYPE_SCREEN_TAP:
                scr_tap = Leap.ScreenTapGesture(gesture)
                gesture_name = 'select'
                details['tip'] = scr_tap.position
                details['direction'] = scr_tap.direction
                flag = True
                continue
            elif gesture.type is Leap.Gesture.TYPE_SWIPE:
                swipe = Leap.SwipeGesture(gesture)
                gesture_name = 'swipe'
                details['position'] = swipe.position
                details['direction'] = swipe.direction
                if gesture.state is Leap.Gesture.STATE_START:
                    details['start'] = frame.id
                    details['start_position'] = swipe.position
                    details['prev_position'] = details['start_position']
                else:
                    details['prev_position'] = self.gesture.parameters['position']
                flag = True
                continue
        if count == 2 and not flag:
            left, right = frame.hands.leftmost, frame.hands.rightmost
            rel_x_velocity = right.palm_velocity.x - left.palm_velocity.x
            rel_orient = left.palm_normal.x*right.palm_normal.x
            if rel_orient < 0 and rel_x_velocity > 100:
                flag = True
                gesture_name = "clear_space"
                #details['Left Normal'] = left.palm_normal
                #details['Left position'] = left.palm_position
                #details['Right position'] = right.palm_position
                #details['Right Normal'] = right.palm_normal
            # Define parameters to characterise the gesture.
        # Point gesture
        #elif count == 1 and not flag:
        #    # Check for the following gestures: point, #to be added soon
        #    extended_fingers = frame.fingers.extended()
        #    finger_count = len(extended_fingers)
        #    if finger_count == 1 and extended_fingers[0].type == 1:
        #        gesture_name = "point"
        #        flag = True
        #        if self.gesture.type != gesture_name:
        #            details['start'] = frame.id
        #        forward_finger = extended_fingers[0]
        #        details['tip'] = forward_finger.stabilized_tip_position
        #        details['to'] = forward_finger.direction.normalized
        #        # Return the position on screen being pointed by the
        #        # forward most finger
        if flag:
            self.gesture.update_gesture(gesture_name, details)
            self.gesture.gesture_data()
            print("Gesture name: %s" %(gesture_name))
        else:
            self.gesture.type = 'no_gesture'
            self.gesture.parameters = {}
 | 
	gpl-3.0 | 3,953,385,202,132,790,000 | 34.272277 | 84 | 0.580211 | false | 
| 
	alexanderfefelov/nav | 
	python/nav/colors.py | 
	1 | 
	2493 | 
	#
# Copyright (C) 2013 UNINETT
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.  You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Simple tools for terminal color support"""
import sys
from functools import wraps
import curses
from curses import (COLOR_BLACK, COLOR_BLUE, COLOR_CYAN, COLOR_GREEN,
                    COLOR_MAGENTA, COLOR_RED, COLOR_WHITE, COLOR_YELLOW)
__all__ = ['COLOR_BLACK', 'COLOR_BLUE', 'COLOR_CYAN', 'COLOR_GREEN',
           'COLOR_MAGENTA', 'COLOR_RED', 'COLOR_WHITE', 'COLOR_YELLOW',
           'colorize', 'set_foreground', 'reset_foreground', 'print_color']
try:
    curses.setupterm()
    _set_color = curses.tigetstr('setaf') or ''
    _reset_color = curses.tigetstr('sgr0') or ''
except curses.error:
    # silently ignore errors and turn off colors
    _set_color = ''
    _reset_color = ''
def colorize(color):
    """Decorator that changes the foreground color of any terminal output from
    a function, provided that the current terminal supports it.
    Example::
    @colorize(COLOR_YELLOW):
    def hello_world():
        print "Hello world!"
    """
    def _colorize(func):
        @wraps(func)
        def _wrapper(*args, **kwargs):
            try:
                set_foreground(color)
                return func(*args, **kwargs)
            finally:
                reset_foreground()
        return _wrapper
    return _colorize
def print_color(string, color, newline=True):
    """Prints a string to stdout using a specific color"""
    set_foreground(color)
    sys.stdout.write(string + ('\n' if newline else ''))
    reset_foreground()
def set_foreground(color):
    """Sets the current foreground color of the terminal"""
    if sys.stdout.isatty():
        sys.stdout.write(curses.tparm(_set_color, color))
        sys.stdout.flush()
def reset_foreground():
    """Resets the foreground color of the terminal"""
    if sys.stdout.isatty():
        sys.stdout.write(_reset_color)
        sys.stdout.flush()
 | 
	gpl-2.0 | -2,158,333,708,684,524,500 | 30.961538 | 79 | 0.658644 | false | 
| 
	charanpald/wallhack | 
	wallhack/rankingexp/RegularisationExp.py | 
	1 | 
	7825 | 
	import numpy
import logging
import sys
import os
import multiprocessing
import itertools 
from sandbox.recommendation.MaxLocalAUC import MaxLocalAUC
from sandbox.recommendation.IterativeSoftImpute import IterativeSoftImpute
from sandbox.util.MCEvaluator import MCEvaluator
from sandbox.util.PathDefaults import PathDefaults
from sandbox.util.Sampling import Sampling
from wallhack.rankingexp.DatasetUtils import DatasetUtils
from sandbox.util.Util import Util 
Util.setupScript()
"""
We look at the ROC curves on the test set for different values of lambda. We want 
to find why on epinions, the learning overfits so vary maxNormU and maxNormV.  
"""
if len(sys.argv) > 1:
    dataset = sys.argv[1]
else: 
    dataset = "synthetic"
saveResults = True
prefix = "Regularisation"
outputFile = PathDefaults.getOutputDir() + "ranking/" + prefix + dataset.title() + "Results.npz" 
X = DatasetUtils.getDataset(dataset)
m, n = X.shape
u = 0.1 
w = 1-u
logging.debug("Sampled X shape: " + str(X.shape))
testSize = 5
folds = 5
trainTestXs = Sampling.shuffleSplitRows(X, folds, testSize)
numRecordAucSamples = 200
k2 = 64
u2 = 0.5
w2 = 1-u2
eps = 10**-8
maxLocalAuc = MaxLocalAUC(k2, w2, eps=eps,stochastic=True)
maxLocalAuc.alpha = 0.05
maxLocalAuc.alphas = 2.0**-numpy.arange(0, 9, 1)
maxLocalAuc.folds = 1
maxLocalAuc.initialAlg = "rand"
maxLocalAuc.itemExpP = 0.0
maxLocalAuc.itemExpQ = 0.0
maxLocalAuc.ks = numpy.array([k2])
maxLocalAuc.maxNorms = 2.0**numpy.arange(-2, 2)
maxLocalAuc.maxIterations = 500
maxLocalAuc.lmbdaU = 0
maxLocalAuc.lmbdaV = 0
maxLocalAuc.loss = "hinge"
maxLocalAuc.maxIterations = 500
maxLocalAuc.maxNormU = 100
maxLocalAuc.maxNormV = 100
maxLocalAuc.metric = "f1"
maxLocalAuc.normalise = True
maxLocalAuc.numAucSamples = 10
maxLocalAuc.numProcesses = 1
maxLocalAuc.numRecordAucSamples = 100
maxLocalAuc.numRowSamples = 30
maxLocalAuc.rate = "constant"
maxLocalAuc.recordStep = 10
maxLocalAuc.rho = 1.0
maxLocalAuc.t0 = 1.0
maxLocalAuc.t0s = 2.0**-numpy.arange(7, 12, 1)
maxLocalAuc.validationSize = 3
maxLocalAuc.validationUsers = 0
softImpute = IterativeSoftImpute(k=k2, postProcess=True)
numProcesses = multiprocessing.cpu_count()
os.system('taskset -p 0xffffffff %d' % os.getpid())
logging.debug("Starting training")
def computeTestAuc(args): 
    trainX, testX, maxLocalAuc, U, V  = args 
    numpy.random.seed(21)
    logging.debug(maxLocalAuc)
    
    #maxLocalAuc.learningRateSelect(trainX)
    U, V, trainMeasures, testMeasures, iterations, time = maxLocalAuc.learnModel(trainX, U=U, V=V, verbose=True)
    
    fprTrain, tprTrain = MCEvaluator.averageRocCurve(trainX, U, V)
    fprTest, tprTest = MCEvaluator.averageRocCurve(testX, U, V, trainX=trainX)
        
    return fprTrain, tprTrain, fprTest, tprTest
if saveResults: 
    paramList = []
    chunkSize = 1
    
    #First generate SoftImpute results as a benchmark. 
    trainX, testX = trainTestXs[0]
    learner = softImpute.copy()
    
    trainIterator = iter([trainX.toScipyCsc()])
    ZList = learner.learnModel(trainIterator)    
    U, s, V = ZList.next()
    U = U*s
    
    U = numpy.ascontiguousarray(U)
    V = numpy.ascontiguousarray(V)
    
    fprTrainSI, tprTrainSI = MCEvaluator.averageRocCurve(trainX, U, V)
    fprTestSI, tprTestSI = MCEvaluator.averageRocCurve(testX, U, V, trainX=trainX)
    
    #Now train MaxLocalAUC 
    U, V = maxLocalAuc.initUV(X)
    
    for maxNormU in maxLocalAuc.maxNorms: 
        for maxNormV in maxLocalAuc.maxNorms: 
            for trainX, testX in trainTestXs: 
                learner = maxLocalAuc.copy()
                learner.maxNormU = maxNormU 
                learner.maxNormV = maxNormV 
                paramList.append((trainX, testX, learner, U.copy(), V.copy()))
    if numProcesses != 1: 
        pool = multiprocessing.Pool(maxtasksperchild=100, processes=multiprocessing.cpu_count())
        resultsIterator = pool.imap(computeTestAuc, paramList, chunkSize)
    else: 
        resultsIterator = itertools.imap(computeTestAuc, paramList)
    
    meanFprTrains = []
    meanTprTrains = []
    meanFprTests = []
    meanTprTests = []
    
    for maxNormU in maxLocalAuc.maxNorms: 
        for maxNormV in maxLocalAuc.maxNorms: 
            fprTrains = [] 
            tprTrains = [] 
            fprTests = [] 
            tprTests = []
            
            for trainX, testX in trainTestXs: 
                fprTrain, tprTrain, fprTest, tprTest = resultsIterator.next()
                
                fprTrains.append(fprTrain)
                tprTrains.append(tprTrain)
                fprTests.append(fprTest) 
                tprTests.append(tprTest)
                
            meanFprTrain = numpy.mean(numpy.array(fprTrains), 0)    
            meanTprTrain = numpy.mean(numpy.array(tprTrains), 0) 
            meanFprTest = numpy.mean(numpy.array(fprTests), 0) 
            meanTprTest = numpy.mean(numpy.array(tprTests), 0) 
            
            meanFprTrains.append(meanFprTrain)
            meanTprTrains.append(meanTprTrain)
            meanFprTests.append(meanFprTest)
            meanTprTests.append(meanTprTest)
        
    numpy.savez(outputFile, meanFprTrains, meanTprTrains, meanFprTests, meanTprTests, fprTrainSI, tprTrainSI, fprTestSI, tprTestSI)
    
    if numProcesses != 1: 
        pool.terminate()   
    logging.debug("Saved results in " + outputFile)
else: 
    data = numpy.load(outputFile)
    meanFprTrain, meanTprTrain, meanFprTest, meanTprTest, fprTrainSI, tprTrainSI, fprTestSI, tprTestSI = data["arr_0"], data["arr_1"], data["arr_2"], data["arr_3"], data["arr_4"], data["arr_5"], data["arr_6"], data["arr_7"]      
   
    import matplotlib 
    matplotlib.use("GTK3Agg")
    import matplotlib.pyplot as plt   
    
    #print(meanFprTrain[0, :])
    #print(meanTprTrain[0, :])
    
    plotInds = ["k-", "k--", "k-.", "k:", "r-", "r--", "r-.", "r:", "g-", "g--", "g-.", "g:", "b-", "b--", "b-.", "b:", "c-"]
    ind = 0 
    
    for i, maxNormU in enumerate(maxLocalAuc.maxNorms):
        for j, maxNormV in enumerate(maxLocalAuc.maxNorms):
            label = r"$maxNorm_U=$" + str(maxNormU) + r" $maxNorm_V=$" + str(maxNormV)
    
            fprTrainStart =   meanFprTrain[ind, meanFprTrain[ind, :]<=0.2]   
            tprTrainStart =   meanTprTrain[ind, meanFprTrain[ind, :]<=0.2]
            
            print(fprTrainStart, tprTrainStart)
            
            plt.figure(0)
            plt.plot(fprTrainStart, tprTrainStart, plotInds[ind], label=label)
            
            plt.figure(1)
            plt.plot(meanFprTrain[ind, :], meanTprTrain[ind, :], plotInds[ind], label=label)
            
            fprTestStart =   meanFprTest[ind, meanFprTest[ind, :]<=0.2]   
            tprTestStart =   meanTprTest[ind, meanFprTest[ind, :]<=0.2]         
            
            plt.figure(2)    
            plt.plot(fprTestStart, tprTestStart, plotInds[ind], label=label)            
            
            plt.figure(3)    
            plt.plot(meanFprTest[ind, :], meanTprTest[ind, :], plotInds[ind], label=label)    
            
            ind += 1
    
    plt.figure(1)
    plt.plot(fprTrainSI, tprTrainSI, plotInds[ind], label="SI")    
    
    plt.figure(3)
    plt.plot(fprTestSI, tprTestSI , plotInds[ind], label="SI")       
    
    plt.figure(0)
    plt.xlabel("false positive rate")
    plt.ylabel("true positive rate")
    plt.legend(loc="lower right")
    
    plt.figure(1)
    plt.xlabel("false positive rate")
    plt.ylabel("true positive rate")
    plt.legend(loc="lower right")
    
    plt.figure(2)
    plt.xlabel("false positive rate")
    plt.ylabel("true positive rate")
    plt.legend(loc="lower right")
    
    plt.figure(3)
    plt.xlabel("false positive rate")
    plt.ylabel("true positive rate")
    plt.legend(loc="lower right")
    
    plt.show()
 | 
	gpl-3.0 | 6,259,649,822,501,241,000 | 32.297872 | 229 | 0.639617 | false | 
| 
	singulared/aiohttp | 
	tests/test_web_functional.py | 
	1 | 
	36053 | 
	import asyncio
import json
import pathlib
import zlib
from unittest import mock
import pytest
from multidict import MultiDict
from yarl import URL
from aiohttp import FormData, HttpVersion10, HttpVersion11, multipart, web
try:
    import ssl
except:
    ssl = False
@asyncio.coroutine
def test_simple_get(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        body = yield from request.read()
        assert b'' == body
        return web.Response(body=b'OK')
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 200 == resp.status
    txt = yield from resp.text()
    assert 'OK' == txt
@asyncio.coroutine
def test_handler_returns_not_response(loop, test_server, test_client):
    logger = mock.Mock()
    @asyncio.coroutine
    def handler(request):
        return 'abc'
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    server = yield from test_server(app, logger=logger)
    client = yield from test_client(server)
    resp = yield from client.get('/')
    assert 500 == resp.status
    assert logger.exception.called
@asyncio.coroutine
def test_head_returns_empty_body(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response(body=b'test')
    app = web.Application(loop=loop)
    app.router.add_head('/', handler)
    client = yield from test_client(app)
    resp = yield from client.head('/', version=HttpVersion11)
    assert 200 == resp.status
    txt = yield from resp.text()
    assert '' == txt
@asyncio.coroutine
def test_post_form(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        assert {'a': '1', 'b': '2'} == data
        return web.Response(body=b'OK')
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data={'a': 1, 'b': 2})
    assert 200 == resp.status
    txt = yield from resp.text()
    assert 'OK' == txt
@asyncio.coroutine
def test_post_text(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        data = yield from request.text()
        assert 'русский' == data
        data2 = yield from request.text()
        assert data == data2
        return web.Response(text=data)
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data='русский')
    assert 200 == resp.status
    txt = yield from resp.text()
    assert 'русский' == txt
@asyncio.coroutine
def test_post_json(loop, test_client):
    dct = {'key': 'текст'}
    @asyncio.coroutine
    def handler(request):
        data = yield from request.json()
        assert dct == data
        data2 = yield from request.json(loads=json.loads)
        assert data == data2
        resp = web.Response()
        resp.content_type = 'application/json'
        resp.body = json.dumps(data).encode('utf8')
        return resp
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    headers = {'Content-Type': 'application/json'}
    resp = yield from client.post('/', data=json.dumps(dct), headers=headers)
    assert 200 == resp.status
    data = yield from resp.json()
    assert dct == data
@asyncio.coroutine
def test_multipart(loop, test_client):
    with multipart.MultipartWriter() as writer:
        writer.append('test')
        writer.append_json({'passed': True})
    @asyncio.coroutine
    def handler(request):
        reader = yield from request.multipart()
        assert isinstance(reader, multipart.MultipartReader)
        part = yield from reader.next()
        assert isinstance(part, multipart.BodyPartReader)
        thing = yield from part.text()
        assert thing == 'test'
        part = yield from reader.next()
        assert isinstance(part, multipart.BodyPartReader)
        assert part.headers['Content-Type'] == 'application/json'
        thing = yield from part.json()
        assert thing == {'passed': True}
        resp = web.Response()
        resp.content_type = 'application/json'
        resp.body = b''
        return resp
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data=writer, headers=writer.headers)
    assert 200 == resp.status
    yield from resp.release()
@asyncio.coroutine
def test_multipart_content_transfer_encoding(loop, test_client):
    """For issue #1168"""
    with multipart.MultipartWriter() as writer:
        writer.append(b'\x00' * 10,
                      headers={'Content-Transfer-Encoding': 'binary'})
    @asyncio.coroutine
    def handler(request):
        reader = yield from request.multipart()
        assert isinstance(reader, multipart.MultipartReader)
        part = yield from reader.next()
        assert isinstance(part, multipart.BodyPartReader)
        assert part.headers['Content-Transfer-Encoding'] == 'binary'
        thing = yield from part.read()
        assert thing == b'\x00' * 10
        resp = web.Response()
        resp.content_type = 'application/json'
        resp.body = b''
        return resp
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data=writer, headers=writer.headers)
    assert 200 == resp.status
    yield from resp.release()
@asyncio.coroutine
def test_render_redirect(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        raise web.HTTPMovedPermanently(location='/path')
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/', allow_redirects=False)
    assert 301 == resp.status
    txt = yield from resp.text()
    assert '301: Moved Permanently' == txt
    assert '/path' == resp.headers['location']
@asyncio.coroutine
def test_post_single_file(loop, test_client):
    here = pathlib.Path(__file__).parent
    def check_file(fs):
        fullname = here / fs.filename
        with fullname.open() as f:
            test_data = f.read().encode()
            data = fs.file.read()
            assert test_data == data
    @asyncio.coroutine
    def handler(request):
        with pytest.warns(DeprecationWarning):
            data = yield from request.post()
        assert ['sample.crt'] == list(data.keys())
        for fs in data.values():
            check_file(fs)
            fs.file.close()
        resp = web.Response(body=b'OK')
        return resp
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    fname = here / 'sample.crt'
    resp = yield from client.post('/', data=[fname.open()])
    assert 200 == resp.status
@asyncio.coroutine
def test_files_upload_with_same_key(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        files = data.getall('file')
        file_names = set()
        for _file in files:
            assert not _file.file.closed
            if _file.filename == 'test1.jpeg':
                assert _file.file.read() == b'binary data 1'
            if _file.filename == 'test2.jpeg':
                assert _file.file.read() == b'binary data 2'
            file_names.add(_file.filename)
        assert len(files) == 2
        assert file_names == {'test1.jpeg', 'test2.jpeg'}
        resp = web.Response(body=b'OK')
        return resp
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    data = FormData()
    data.add_field('file', b'binary data 1',
                   content_type='image/jpeg',
                   filename='test1.jpeg')
    data.add_field('file', b'binary data 2',
                   content_type='image/jpeg',
                   filename='test2.jpeg')
    resp = yield from client.post('/', data=data)
    assert 200 == resp.status
@asyncio.coroutine
def test_post_files(loop, test_client):
    here = pathlib.Path(__file__).parent
    def check_file(fs):
        fullname = here / fs.filename
        with fullname.open() as f:
            test_data = f.read().encode()
            data = fs.file.read()
            assert test_data == data
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        assert ['sample.crt', 'sample.key'] == list(data.keys())
        for fs in data.values():
            check_file(fs)
            fs.file.close()
        resp = web.Response(body=b'OK')
        return resp
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    with (here / 'sample.crt').open() as f1:
        with (here / 'sample.key').open() as f2:
            resp = yield from client.post('/', data=[f1, f2])
            assert 200 == resp.status
@asyncio.coroutine
def test_release_post_data(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        yield from request.release()
        chunk = yield from request.content.readany()
        assert chunk == b''
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data='post text')
    assert 200 == resp.status
@asyncio.coroutine
def test_POST_DATA_with_content_transfer_encoding(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        assert b'123' == data['name']
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    form = FormData()
    form.add_field('name', b'123',
                   content_transfer_encoding='base64')
    resp = yield from client.post('/', data=form)
    assert 200 == resp.status
@asyncio.coroutine
def test_post_form_with_duplicate_keys(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        lst = list(data.items())
        assert [('a', '1'), ('a', '2')] == lst
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data=MultiDict([('a', 1), ('a', 2)]))
    assert 200 == resp.status
def test_repr_for_application(loop):
    app = web.Application(loop=loop)
    assert "<Application 0x{:x}>".format(id(app)) == repr(app)
@asyncio.coroutine
def test_expect_default_handler_unknown(loop, test_client):
    """Test default Expect handler for unknown Expect value.
    A server that does not understand or is unable to comply with any of
    the expectation values in the Expect field of a request MUST respond
    with appropriate error status. The server MUST respond with a 417
    (Expectation Failed) status if any of the expectations cannot be met
    or, if there are other problems with the request, some other 4xx
    status.
    http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.20
    """
    @asyncio.coroutine
    def handler(request):
        yield from request.post()
        pytest.xfail('Handler should not proceed to this point in case of '
                     'unknown Expect header')
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', headers={'Expect': 'SPAM'})
    assert 417 == resp.status
@asyncio.coroutine
def test_100_continue(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        assert b'123' == data['name']
        return web.Response()
    form = FormData()
    form.add_field('name', b'123',
                   content_transfer_encoding='base64')
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data=form, expect100=True)
    assert 200 == resp.status
@asyncio.coroutine
def test_100_continue_custom(loop, test_client):
    expect_received = False
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        assert b'123' == data['name']
        return web.Response()
    @asyncio.coroutine
    def expect_handler(request):
        nonlocal expect_received
        expect_received = True
        if request.version == HttpVersion11:
            request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
    form = FormData()
    form.add_field('name', b'123',
                   content_transfer_encoding='base64')
    app = web.Application(loop=loop)
    app.router.add_post('/', handler, expect_handler=expect_handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data=form, expect100=True)
    assert 200 == resp.status
    assert expect_received
@asyncio.coroutine
def test_100_continue_custom_response(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        data = yield from request.post()
        assert b'123', data['name']
        return web.Response()
    @asyncio.coroutine
    def expect_handler(request):
        if request.version == HttpVersion11:
            if auth_err:
                return web.HTTPForbidden()
            request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
    form = FormData()
    form.add_field('name', b'123',
                   content_transfer_encoding='base64')
    app = web.Application(loop=loop)
    app.router.add_post('/', handler, expect_handler=expect_handler)
    client = yield from test_client(app)
    auth_err = False
    resp = yield from client.post('/', data=form, expect100=True)
    assert 200 == resp.status
    auth_err = True
    resp = yield from client.post('/', data=form, expect100=True)
    assert 403 == resp.status
@asyncio.coroutine
def test_100_continue_for_not_found(loop, test_client):
    app = web.Application(loop=loop)
    client = yield from test_client(app)
    resp = yield from client.post('/not_found', data='data', expect100=True)
    assert 404 == resp.status
@asyncio.coroutine
def test_100_continue_for_not_allowed(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/', expect100=True)
    assert 405 == resp.status
@asyncio.coroutine
def test_http11_keep_alive_default(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/', version=HttpVersion11)
    assert 200 == resp.status
    assert resp.version == HttpVersion11
    assert 'Connection' not in resp.headers
@asyncio.coroutine
def test_http10_keep_alive_default(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/', version=HttpVersion10)
    assert 200 == resp.status
    assert resp.version == HttpVersion10
    assert resp.headers['Connection'] == 'keep-alive'
@asyncio.coroutine
def test_http10_keep_alive_with_headers_close(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        yield from request.read()
        return web.Response(body=b'OK')
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    headers = {'Connection': 'close'}
    resp = yield from client.get('/', version=HttpVersion10,
                                 headers=headers)
    assert 200 == resp.status
    assert resp.version == HttpVersion10
    assert 'Connection' not in resp.headers
@asyncio.coroutine
def test_http10_keep_alive_with_headers(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        yield from request.read()
        return web.Response(body=b'OK')
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    headers = {'Connection': 'keep-alive'}
    resp = yield from client.get('/', version=HttpVersion10,
                                 headers=headers)
    assert 200 == resp.status
    assert resp.version == HttpVersion10
    assert resp.headers['Connection'] == 'keep-alive'
@asyncio.coroutine
def test_upload_file(loop, test_client):
    here = pathlib.Path(__file__).parent
    fname = here / 'aiohttp.png'
    with fname.open('rb') as f:
        data = f.read()
    @asyncio.coroutine
    def handler(request):
        form = yield from request.post()
        raw_data = form['file'].file.read()
        assert data == raw_data
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data={'file': data})
    assert 200 == resp.status
@asyncio.coroutine
def test_upload_file_object(loop, test_client):
    here = pathlib.Path(__file__).parent
    fname = here / 'aiohttp.png'
    with fname.open('rb') as f:
        data = f.read()
    @asyncio.coroutine
    def handler(request):
        form = yield from request.post()
        raw_data = form['file'].file.read()
        assert data == raw_data
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    with fname.open('rb') as f:
        resp = yield from client.post('/', data={'file': f})
        assert 200 == resp.status
@asyncio.coroutine
def test_empty_content_for_query_without_body(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        assert not request.has_body
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 200 == resp.status
@asyncio.coroutine
def test_empty_content_for_query_with_body(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        assert request.has_body
        body = yield from request.read()
        return web.Response(body=body)
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    resp = yield from client.post('/', data=b'data')
    assert 200 == resp.status
@asyncio.coroutine
def test_get_with_empty_arg(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        assert 'arg' in request.GET
        assert '' == request.GET['arg']
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/?arg')
    assert 200 == resp.status
@asyncio.coroutine
def test_large_header(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    headers = {'Long-Header': 'ab' * 8129}
    resp = yield from client.get('/', headers=headers)
    assert 400 == resp.status
@asyncio.coroutine
def test_large_header_allowed(loop, test_client, test_server):
    @asyncio.coroutine
    def handler(request):
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    server = yield from test_server(app, max_field_size=81920)
    client = yield from test_client(server)
    headers = {'Long-Header': 'ab' * 8129}
    resp = yield from client.get('/', headers=headers)
    assert 200 == resp.status
@asyncio.coroutine
def test_get_with_empty_arg_with_equal(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        assert 'arg' in request.GET
        assert '' == request.GET['arg']
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/?arg=')
    assert 200 == resp.status
@asyncio.coroutine
def test_response_with_precompressed_body_gzip(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        headers = {'Content-Encoding': 'gzip'}
        zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS)
        data = zcomp.compress(b'mydata') + zcomp.flush()
        return web.Response(body=data, headers=headers)
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 200 == resp.status
    data = yield from resp.read()
    assert b'mydata' == data
    assert resp.headers.get('Content-Encoding') == 'gzip'
@asyncio.coroutine
def test_response_with_precompressed_body_deflate(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        headers = {'Content-Encoding': 'deflate'}
        zcomp = zlib.compressobj(wbits=-zlib.MAX_WBITS)
        data = zcomp.compress(b'mydata') + zcomp.flush()
        return web.Response(body=data, headers=headers)
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 200 == resp.status
    data = yield from resp.read()
    assert b'mydata' == data
    assert resp.headers.get('Content-Encoding') == 'deflate'
@asyncio.coroutine
def test_stream_response_multiple_chunks(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        resp = web.StreamResponse()
        resp.enable_chunked_encoding()
        yield from resp.prepare(request)
        resp.write(b'x')
        resp.write(b'y')
        resp.write(b'z')
        return resp
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 200 == resp.status
    data = yield from resp.read()
    assert b'xyz' == data
@asyncio.coroutine
def test_start_without_routes(loop, test_client):
    app = web.Application(loop=loop)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 404 == resp.status
@asyncio.coroutine
def test_requests_count(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    assert client.server.handler.requests_count == 0
    resp = yield from client.get('/')
    assert 200 == resp.status
    assert client.server.handler.requests_count == 1
    resp = yield from client.get('/')
    assert 200 == resp.status
    assert client.server.handler.requests_count == 2
    resp = yield from client.get('/')
    assert 200 == resp.status
    assert client.server.handler.requests_count == 3
@asyncio.coroutine
def test_redirect_url(loop, test_client):
    @asyncio.coroutine
    def redirector(request):
        raise web.HTTPFound(location=URL('/redirected'))
    @asyncio.coroutine
    def redirected(request):
        return web.Response()
    app = web.Application(loop=loop)
    app.router.add_get('/redirector', redirector)
    app.router.add_get('/redirected', redirected)
    client = yield from test_client(app)
    resp = yield from client.get('/redirector')
    assert resp.status == 200
@asyncio.coroutine
def test_simple_subapp(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response(text="OK")
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    app.add_subapp('/path', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/path/to')
    assert resp.status == 200
    txt = yield from resp.text()
    assert 'OK' == txt
@asyncio.coroutine
def test_subapp_reverse_url(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.HTTPMovedPermanently(
            location=subapp.router['name'].url_for())
    @asyncio.coroutine
    def handler2(request):
        return web.Response(text="OK")
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    subapp.router.add_get('/final', handler2, name='name')
    app.add_subapp('/path', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/path/to')
    assert resp.status == 200
    txt = yield from resp.text()
    assert 'OK' == txt
    assert resp.url.path == '/path/final'
@asyncio.coroutine
def test_subapp_reverse_variable_url(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.HTTPMovedPermanently(
            location=subapp.router['name'].url_for(part='final'))
    @asyncio.coroutine
    def handler2(request):
        return web.Response(text="OK")
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    subapp.router.add_get('/{part}', handler2, name='name')
    app.add_subapp('/path', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/path/to')
    assert resp.status == 200
    txt = yield from resp.text()
    assert 'OK' == txt
    assert resp.url.path == '/path/final'
@asyncio.coroutine
def test_subapp_reverse_static_url(loop, test_client):
    fname = 'aiohttp.png'
    @asyncio.coroutine
    def handler(request):
        return web.HTTPMovedPermanently(
            location=subapp.router['name'].url_for(filename=fname))
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    here = pathlib.Path(__file__).parent
    subapp.router.add_static('/static', here, name='name')
    app.add_subapp('/path', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/path/to')
    assert resp.url.path == '/path/static/' + fname
    assert resp.status == 200
    body = yield from resp.read()
    with (here / fname).open('rb') as f:
        assert body == f.read()
@asyncio.coroutine
def test_subapp_app(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        assert request.app is subapp
        return web.HTTPOk(text='OK')
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    app.add_subapp('/path/', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/path/to')
    assert resp.status == 200
    txt = yield from resp.text()
    assert 'OK' == txt
@asyncio.coroutine
def test_subapp_not_found(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.HTTPOk(text='OK')
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    app.add_subapp('/path/', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/path/other')
    assert resp.status == 404
@asyncio.coroutine
def test_subapp_not_found2(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.HTTPOk(text='OK')
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    app.add_subapp('/path/', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/invalid/other')
    assert resp.status == 404
@asyncio.coroutine
def test_subapp_not_allowed(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.HTTPOk(text='OK')
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    app.add_subapp('/path/', subapp)
    client = yield from test_client(app)
    resp = yield from client.post('/path/to')
    assert resp.status == 405
    assert resp.headers['Allow'] == 'GET'
@asyncio.coroutine
def test_subapp_cannot_add_app_in_handler(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        request.match_info.add_app(app)
        return web.HTTPOk(text='OK')
    app = web.Application(loop=loop)
    subapp = web.Application(loop=loop)
    subapp.router.add_get('/to', handler)
    app.add_subapp('/path/', subapp)
    client = yield from test_client(app)
    resp = yield from client.get('/path/to')
    assert resp.status == 500
@asyncio.coroutine
def test_subapp_middlewares(loop, test_client):
    order = []
    @asyncio.coroutine
    def handler(request):
        return web.HTTPOk(text='OK')
    @asyncio.coroutine
    def middleware_factory(app, handler):
        @asyncio.coroutine
        def middleware(request):
            order.append((1, app))
            resp = yield from handler(request)
            assert 200 == resp.status
            order.append((2, app))
            return resp
        return middleware
    app = web.Application(loop=loop, middlewares=[middleware_factory])
    subapp1 = web.Application(loop=loop, middlewares=[middleware_factory])
    subapp2 = web.Application(loop=loop, middlewares=[middleware_factory])
    subapp2.router.add_get('/to', handler)
    subapp1.add_subapp('/b/', subapp2)
    app.add_subapp('/a/', subapp1)
    client = yield from test_client(app)
    resp = yield from client.get('/a/b/to')
    assert resp.status == 200
    assert [(1, app), (1, subapp1), (1, subapp2),
            (2, subapp2), (2, subapp1), (2, app)] == order
@asyncio.coroutine
def test_subapp_on_response_prepare(loop, test_client):
    order = []
    @asyncio.coroutine
    def handler(request):
        return web.HTTPOk(text='OK')
    def make_signal(app):
        @asyncio.coroutine
        def on_response(request, response):
            order.append(app)
        return on_response
    app = web.Application(loop=loop)
    app.on_response_prepare.append(make_signal(app))
    subapp1 = web.Application(loop=loop)
    subapp1.on_response_prepare.append(make_signal(subapp1))
    subapp2 = web.Application(loop=loop)
    subapp2.on_response_prepare.append(make_signal(subapp2))
    subapp2.router.add_get('/to', handler)
    subapp1.add_subapp('/b/', subapp2)
    app.add_subapp('/a/', subapp1)
    client = yield from test_client(app)
    resp = yield from client.get('/a/b/to')
    assert resp.status == 200
    assert [app, subapp1, subapp2] == order
@asyncio.coroutine
def test_subapp_on_startup(loop, test_server):
    order = []
    @asyncio.coroutine
    def on_signal(app):
        order.append(app)
    app = web.Application(loop=loop)
    app.on_startup.append(on_signal)
    subapp1 = web.Application(loop=loop)
    subapp1.on_startup.append(on_signal)
    subapp2 = web.Application(loop=loop)
    subapp2.on_startup.append(on_signal)
    subapp1.add_subapp('/b/', subapp2)
    app.add_subapp('/a/', subapp1)
    yield from test_server(app)
    assert [app, subapp1, subapp2] == order
@asyncio.coroutine
def test_subapp_on_shutdown(loop, test_server):
    order = []
    def on_signal(app):
        order.append(app)
    app = web.Application(loop=loop)
    app.on_shutdown.append(on_signal)
    subapp1 = web.Application(loop=loop)
    subapp1.on_shutdown.append(on_signal)
    subapp2 = web.Application(loop=loop)
    subapp2.on_shutdown.append(on_signal)
    subapp1.add_subapp('/b/', subapp2)
    app.add_subapp('/a/', subapp1)
    server = yield from test_server(app)
    yield from server.close()
    assert [app, subapp1, subapp2] == order
@asyncio.coroutine
def test_subapp_on_cleanup(loop, test_server):
    order = []
    @asyncio.coroutine
    def on_signal(app):
        order.append(app)
    app = web.Application(loop=loop)
    app.on_cleanup.append(on_signal)
    subapp1 = web.Application(loop=loop)
    subapp1.on_cleanup.append(on_signal)
    subapp2 = web.Application(loop=loop)
    subapp2.on_cleanup.append(on_signal)
    subapp1.add_subapp('/b/', subapp2)
    app.add_subapp('/a/', subapp1)
    server = yield from test_server(app)
    yield from server.close()
    assert [app, subapp1, subapp2] == order
@asyncio.coroutine
def test_custom_date_header(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        return web.Response(headers={'Date': 'Sun, 30 Oct 2016 03:13:52 GMT'})
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 200 == resp.status
    assert resp.headers['Date'] == 'Sun, 30 Oct 2016 03:13:52 GMT'
@asyncio.coroutine
def test_response_prepared_with_clone(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        cloned = request.clone()
        resp = web.StreamResponse()
        yield from resp.prepare(cloned)
        return resp
    app = web.Application(loop=loop)
    app.router.add_get('/', handler)
    client = yield from test_client(app)
    resp = yield from client.get('/')
    assert 200 == resp.status
@asyncio.coroutine
def test_app_max_client_size(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        yield from request.post()
        return web.Response(body=b'ok')
    max_size = 1024**2
    app = web.Application(loop=loop)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    data = {"long_string": max_size * 'x' + 'xxx'}
    resp = yield from client.post('/', data=data)
    assert 413 == resp.status
    resp_text = yield from resp.text()
    assert 'Request Entity Too Large' in resp_text
@asyncio.coroutine
def test_app_max_client_size_adjusted(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        yield from request.post()
        return web.Response(body=b'ok')
    default_max_size = 1024**2
    custom_max_size = default_max_size * 2
    app = web.Application(loop=loop, client_max_size=custom_max_size)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    data = {'long_string': default_max_size * 'x' + 'xxx'}
    resp = yield from client.post('/', data=data)
    assert 200 == resp.status
    resp_text = yield from resp.text()
    assert 'ok' == resp_text
    too_large_data = {'log_string': custom_max_size * 'x' + "xxx"}
    resp = yield from client.post('/', data=too_large_data)
    assert 413 == resp.status
    resp_text = yield from resp.text()
    assert 'Request Entity Too Large' in resp_text
@asyncio.coroutine
def test_app_max_client_size_none(loop, test_client):
    @asyncio.coroutine
    def handler(request):
        yield from request.post()
        return web.Response(body=b'ok')
    default_max_size = 1024**2
    custom_max_size = None
    app = web.Application(loop=loop, client_max_size=custom_max_size)
    app.router.add_post('/', handler)
    client = yield from test_client(app)
    data = {'long_string': default_max_size * 'x' + 'xxx'}
    resp = yield from client.post('/', data=data)
    assert 200 == resp.status
    resp_text = yield from resp.text()
    assert 'ok' == resp_text
    too_large_data = {'log_string': default_max_size * 2 * 'x'}
    resp = yield from client.post('/', data=too_large_data)
    assert 200 == resp.status
    resp_text = yield from resp.text()
    assert resp_text == 'ok'
 | 
	apache-2.0 | -3,177,248,093,099,355,000 | 27.323113 | 78 | 0.639326 | false | 
| 
	StackStorm/st2contrib | 
	archive/packs/circle_ci/actions/run_build.py | 
	6 | 
	1384 | 
	import httplib
import json
from lib.action import CircleCI
class RunBuild(CircleCI):
    def run(self, project, branch=None, tag=None, vcs_revision=None, build_parameters=None):
        """
        Run build for a SHA in project.
        """
        # Add some explicit mutually-exclusive checks.
        if not(branch or tag or vcs_revision):
            raise Exception('At least one of branch, tag or vcs_revision should be provided.')
        if (branch and (tag or vcs_revision)) or (tag and vcs_revision):
            raise Exception('Only one of branch, tag or vcs_revision should be provided.')
        data = None
        if branch:
            path = 'project/%s/tree/%s' % (project, branch)
        else:
            path = 'project/%s' % project
            data = {'tag': tag} if tag else {'revision': vcs_revision}
        # build parameters are pass-trhrough to circleci
        if build_parameters:
            if data is None:
                data = {}
            data['build_parameters'] = build_parameters
        if data:
            data = json.dumps(data)
        response = self._perform_request(path, method='POST', data=data)
        if response.status_code != httplib.CREATED:
            message = response.json().get('message', 'Unknown reason.')
            raise Exception('Failed to run build : %s' % message)
        return response.json()
 | 
	apache-2.0 | -611,335,320,396,927,100 | 31.952381 | 94 | 0.59104 | false | 
| 
	opesci/devito | 
	devito/ir/equations/equation.py | 
	1 | 
	9325 | 
	from cached_property import cached_property
import sympy
from devito.ir.equations.algorithms import dimension_sort, lower_exprs
from devito.finite_differences.differentiable import diff2sympy
from devito.ir.support import (IterationSpace, DataSpace, Interval, IntervalGroup,
                               Stencil, detect_accesses, detect_oobs, detect_io,
                               build_intervals, build_iterators)
from devito.symbolics import CondEq, IntDiv, uxreplace
from devito.tools import Pickable, frozendict
from devito.types import Eq
__all__ = ['LoweredEq', 'ClusterizedEq', 'DummyEq']
class IREq(sympy.Eq):
    _state = ('is_Increment', 'ispace', 'dspace', 'conditionals', 'implicit_dims')
    @property
    def is_Scalar(self):
        return self.lhs.is_Symbol
    is_scalar = is_Scalar
    @property
    def is_Tensor(self):
        return self.lhs.is_Indexed
    @property
    def is_Increment(self):
        return self._is_Increment
    @property
    def ispace(self):
        return self._ispace
    @property
    def dspace(self):
        return self._dspace
    @cached_property
    def dimensions(self):
        # Note: some dimensions may be in the iteration space but not in the
        # data space (e.g., a DerivedDimension); likewise, some dimensions may
        # be in the data space but not in the iteration space (e.g., when a
        # function is indexed with integers only)
        return set(self.dspace.dimensions) | set(self.ispace.dimensions)
    @property
    def implicit_dims(self):
        return self._implicit_dims
    @cached_property
    def conditionals(self):
        return self._conditionals or frozendict()
    @property
    def directions(self):
        return self.ispace.directions
    @property
    def dtype(self):
        return self.lhs.dtype
    @cached_property
    def grid(self):
        grids = set()
        for f in self.dspace.parts:
            if f.is_DiscreteFunction:
                grids.add(f.grid)
        if len(grids) == 1:
            return grids.pop()
        else:
            return None
    @property
    def state(self):
        return {i: getattr(self, i) for i in self._state}
    def apply(self, func):
        """
        Apply a callable to `self` and each expr-like attribute carried by `self`,
        thus triggering a reconstruction.
        """
        args = [func(self.lhs), func(self.rhs)]
        kwargs = dict(self.state)
        kwargs['conditionals'] = {k: func(v) for k, v in self.conditionals.items()}
        return self.func(*args, **kwargs)
class LoweredEq(IREq):
    """
    LoweredEq(devito.Eq)
    LoweredEq(devito.LoweredEq, **kwargs)
    LoweredEq(lhs, rhs, **kwargs)
    A SymPy equation with associated IterationSpace and DataSpace.
    When created as ``LoweredEq(devito.Eq)``, the iteration and data spaces are
    automatically derived from analysis of ``expr``.
    When created as ``LoweredEq(devito.LoweredEq, **kwargs)``, the keyword
    arguments can be anything that appears in ``LoweredEq._state`` (i.e.,
    ispace, dspace, ...).
    When created as ``LoweredEq(lhs, rhs, **kwargs)``, *all* keywords in
    ``LoweredEq._state`` must appear in ``kwargs``.
    """
    _state = IREq._state + ('reads', 'writes')
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = sympy.Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(devito.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = sympy.Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))
        # Well-defined dimension ordering
        ordering = dimension_sort(expr)
        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditional_dimensions = [i for i in ordering if i.is_Conditional]
        # Construct Intervals for IterationSpace and DataSpace
        intervals = build_intervals(Stencil.union(*mapper.values()))
        iintervals = []  # iteration Intervals
        dintervals = []  # data Intervals
        for i in intervals:
            d = i.dim
            if d in oobs:
                iintervals.append(i.zero())
                dintervals.append(i)
            else:
                iintervals.append(i.zero())
                dintervals.append(i.zero())
        # Construct the IterationSpace
        iintervals = IntervalGroup(iintervals, relations=ordering.relations)
        iterators = build_iterators(mapper)
        ispace = IterationSpace(iintervals, iterators)
        # Construct the DataSpace
        dintervals.extend([Interval(i, 0, 0) for i in ordering
                           if i not in ispace.dimensions + conditional_dimensions])
        parts = {k: IntervalGroup(build_intervals(v)).add(iintervals)
                 for k, v in mapper.items() if k}
        dspace = DataSpace(dintervals, parts)
        # Construct the conditionals and replace the ConditionalDimensions in `expr`
        conditionals = {}
        for d in conditional_dimensions:
            if d.condition is None:
                conditionals[d] = CondEq(d.parent % d.factor, 0)
            else:
                conditionals[d] = diff2sympy(lower_exprs(d.condition))
            if d.factor is not None:
                expr = uxreplace(expr, {d: IntDiv(d.index, d.factor)})
        conditionals = frozendict(conditionals)
        # Lower all Differentiable operations into SymPy operations
        rhs = diff2sympy(expr.rhs)
        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls, expr.lhs, rhs, evaluate=False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = conditionals
        expr._reads, expr._writes = detect_io(expr)
        expr._is_Increment = input_expr.is_Increment
        expr._implicit_dims = input_expr.implicit_dims
        return expr
    @property
    def reads(self):
        return self._reads
    @property
    def writes(self):
        return self._writes
    def xreplace(self, rules):
        return LoweredEq(self.lhs.xreplace(rules), self.rhs.xreplace(rules), **self.state)
    def func(self, *args):
        return super(LoweredEq, self).func(*args, **self.state, evaluate=False)
class ClusterizedEq(IREq, Pickable):
    """
    ClusterizedEq(devito.IREq, **kwargs)
    ClusterizedEq(lhs, rhs, **kwargs)
    A SymPy equation with associated IterationSpace and DataSpace.
    There are two main differences between a LoweredEq and a
    ClusterizedEq:
    * In a ClusterizedEq, the iteration and data spaces must *always*
      be provided by the caller.
    * A ClusterizedEq is "frozen", meaning that any call to ``xreplace``
      will not trigger re-evaluation (e.g., mathematical simplification)
      of the expression.
    These two properties make a ClusterizedEq suitable for use in a Cluster.
    """
    def __new__(cls, *args, **kwargs):
        if len(args) == 1:
            # origin: ClusterizedEq(expr, **kwargs)
            input_expr = args[0]
            expr = sympy.Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                v = kwargs[i] if i in kwargs else getattr(input_expr, i, None)
                setattr(expr, '_%s' % i, v)
        elif len(args) == 2:
            # origin: ClusterizedEq(lhs, rhs, **kwargs)
            expr = sympy.Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
        else:
            raise ValueError("Cannot construct ClusterizedEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))
        return expr
    def func(self, *args, **kwargs):
        kwargs = {k: kwargs.get(k, v) for k, v in self.state.items()}
        return super(ClusterizedEq, self).func(*args, **kwargs)
    # Pickling support
    _pickle_args = ['lhs', 'rhs']
    _pickle_kwargs = IREq._state
    __reduce_ex__ = Pickable.__reduce_ex__
class DummyEq(ClusterizedEq):
    """
    DummyEq(expr)
    DummyEq(lhs, rhs)
    A special ClusterizedEq with void iteration and data spaces.
    """
    def __new__(cls, *args, **kwargs):
        if len(args) == 1:
            input_expr = args[0]
            assert isinstance(input_expr, Eq)
            obj = LoweredEq(input_expr)
        elif len(args) == 2:
            obj = LoweredEq(Eq(*args, evaluate=False))
        else:
            raise ValueError("Cannot construct DummyEq from args=%s" % str(args))
        return ClusterizedEq.__new__(cls, obj, ispace=obj.ispace, dspace=obj.dspace)
    # Pickling support
    _pickle_args = ['lhs', 'rhs']
    _pickle_kwargs = []
 | 
	mit | 3,284,389,200,212,844,500 | 32.303571 | 90 | 0.598928 | false | 
| 
	hbldh/hitherdither | 
	hitherdither/ordered/yliluoma/_utils.py | 
	1 | 
	1201 | 
	#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
_utils
-----------
:copyright: 2016-09-23 by hbldh <[email protected]>
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import numpy as np
# CCIR 601 luminosity
CCIR_LUMINOSITY = np.array([299.0, 587.0, 114.0])
def color_compare(c1, c2):
    """Compare the difference of two RGB values, weigh by CCIR 601 luminosity
    double ColorCompare(int r1,int g1,int b1, int r2,int g2,int b2)
    {
        double luma1 = (r1*299 + g1*587 + b1*114) / (255.0*1000);
        double luma2 = (r2*299 + g2*587 + b2*114) / (255.0*1000);
        double lumadiff = luma1-luma2;
        double diffR = (r1-r2)/255.0, diffG = (g1-g2)/255.0, diffB = (b1-b2)/255.0;
        return (diffR*diffR*0.299 + diffG*diffG*0.587 + diffB*diffB*0.114)*0.75
             + lumadiff*lumadiff;
    }
    :return: float
    """
    luma_diff = c1.dot(CCIR_LUMINOSITY) / (255.0 * 1000.0) - c2.dot(CCIR_LUMINOSITY) / (
        255.0 * 1000.0
    )
    diff_col = (c1 - c2) / 255.0
    return ((diff_col ** 2).dot(CCIR_LUMINOSITY / 1000.0) * 0.75) + (luma_diff ** 2)
 | 
	mit | -5,919,380,891,119,812,000 | 27.595238 | 88 | 0.601998 | false | 
| 
	leongold/lago | 
	ovirtlago/testlib.py | 
	1 | 
	7597 | 
	#
# Copyright 2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import datetime
import functools
import logging
import os
import time
import unittest.case
import nose.plugins
from nose.plugins.skip import SkipTest
from lago import (utils, log_utils, cmd as lago_cmd)
import ovirtlago
LOGGER = logging.getLogger(__name__)
SHORT_TIMEOUT = 3 * 60
LONG_TIMEOUT = 10 * 60
_test_prefix = None
def get_test_prefix():
    global _test_prefix
    if _test_prefix is None:
        cur_workdir_path = os.environ.get('LAGO_WORKDIR_PATH', os.curdir)
        workdir = ovirtlago.OvirtWorkdir(path=cur_workdir_path)
        _test_prefix = workdir.get_prefix('current')
    return _test_prefix
def get_prefixed_name(entity_name):
    suite = os.environ.get('SUITE')
    return (
        'lago-' + os.path.basename(suite).replace('.', '-') + '-' + entity_name
    )
def with_ovirt_prefix(func):
    @functools.wraps(func)
    def wrapper(*args, **kwargs):
        return func(get_test_prefix(), *args, **kwargs)
    return wrapper
def with_ovirt_api(func):
    @functools.wraps(func)
    @with_ovirt_prefix
    def wrapper(prefix, *args, **kwargs):
        return func(prefix.virt_env.engine_vm().get_api(), *args, **kwargs)
    return wrapper
def with_ovirt_api4(func):
    @functools.wraps(func)
    @with_ovirt_prefix
    def wrapper(prefix, *args, **kwargs):
        return func(
            prefix.virt_env.engine_vm().get_api(api_ver=4), *args, **kwargs
        )
    return wrapper
def _vms_capable(vms, caps):
    caps = set(caps)
    def get_vm_caps(vm):
        set(vm.metadata.get('ovirt-capabilities', []))
    existing_caps = set()
    for vm in vms:
        existing_caps = existing_caps.union(get_vm_caps(vm) or [])
    return caps.issubset(existing_caps)
def engine_capability(caps):
    def decorator(func):
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            prefix = get_test_prefix()
            if not _vms_capable([prefix.virt_env.engine_vm()], caps):
                raise SkipTest()
            return func()
        return wrapper
    return decorator
def host_capability(caps):
    def decorator(func):
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            prefix = get_test_prefix()
            if not _vms_capable(prefix.virt_env.host_vms(), caps):
                raise SkipTest()
            return func()
        return wrapper
    return decorator
def test_sequence_gen(test_list):
    for test in test_list:
        def wrapped_test():
            test()
        setattr(wrapped_test, 'description', test.__name__)
        yield wrapped_test
class LogCollectorPlugin(nose.plugins.Plugin):
    name = 'log-collector-plugin'
    def __init__(self, prefix):
        nose.plugins.Plugin.__init__(self)
        self._prefix = prefix
    def options(self, parser, env=None):
        env = env if env is not None else os.environ
        super(LogCollectorPlugin, self).options(parser, env)
    def configure(self, options, conf):
        super(LogCollectorPlugin, self).configure(options, conf)
    def addError(self, test, err):
        self._addFault(test, err)
    def addFailure(self, test, err):
        self._addFault(test, err)
    def _addFault(self, test, err):
        suffix = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
        test_name = '%s-%s' % (test.id(), suffix)
        lago_cmd.do_collect(
            prefix=self._prefix,
            output=self._prefix.paths.test_logs(test_name),
            no_skip=False
        )
class TaskLogNosePlugin(nose.plugins.Plugin):
    name = "tasklog-plugin"
    # the score parameter is a workaround to catch skipped tests
    # see: https://github.com/nose-devs/nose/issues/45
    score = 10000
    def __init__(self, *args, **kwargs):
        self.logger = logging.getLogger('nose')
        self.skipped = {}
        super(TaskLogNosePlugin, self).__init__(*args, **kwargs)
    def options(self, parser, env):
        return super(TaskLogNosePlugin, self).options(parser, env)
    def configure(self, options, conf):
        res = super(TaskLogNosePlugin, self).configure(options, conf)
        self.logger.handlers = logging.root.handlers
        return res
    def startTest(self, test):
        log_utils.start_log_task(
            test.shortDescription() or str(test), logger=self.logger
        )
    def stopTest(self, test):
        desc = test.shortDescription() or str(test)
        if desc in self.skipped:
            exp_msg = ''
            try:
                exp_msg = self.skipped[desc][1]
            except KeyError:
                pass
            self.logger.info('SKIPPED: %s', exp_msg)
        log_utils.end_log_task(desc, logger=self.logger)
    def addError(self, test, err):
        desc = test.shortDescription() or str(test)
        if issubclass(err[0], unittest.case.SkipTest):
            self.skipped[desc] = err
def _instance_of_any(obj, cls_list):
    return any(True for cls in cls_list if isinstance(obj, cls))
def assert_equals_within(func, value, timeout, allowed_exceptions=None):
    allowed_exceptions = allowed_exceptions or []
    with utils.EggTimer(timeout) as timer:
        while not timer.elapsed():
            try:
                res = func()
                if res == value:
                    return
            except Exception as exc:
                if _instance_of_any(exc, allowed_exceptions):
                    continue
                LOGGER.exception("Unhandled exception in %s", func)
                raise
            time.sleep(3)
    try:
        raise AssertionError(
            '%s != %s after %s seconds' % (res, value, timeout)
        )
    # if func repeatedly raises any of the allowed exceptions, res remains
    # unbound throughout the function, resulting in an UnboundLocalError.
    except UnboundLocalError:
        raise AssertionError(
            '%s failed to evaluate after %s seconds' %
            (func.__name__, timeout)
        )
def assert_equals_within_short(func, value, allowed_exceptions=None):
    allowed_exceptions = allowed_exceptions or []
    assert_equals_within(
        func, value, SHORT_TIMEOUT, allowed_exceptions=allowed_exceptions
    )
def assert_equals_within_long(func, value, allowed_exceptions=None):
    allowed_exceptions = allowed_exceptions or []
    assert_equals_within(
        func, value, LONG_TIMEOUT, allowed_exceptions=allowed_exceptions
    )
def assert_true_within(func, timeout, allowed_exceptions=None):
    assert_equals_within(func, True, timeout, allowed_exceptions)
def assert_true_within_short(func, allowed_exceptions=None):
    assert_equals_within_short(func, True, allowed_exceptions)
def assert_true_within_long(func, allowed_exceptions=None):
    assert_equals_within_long(func, True, allowed_exceptions)
 | 
	gpl-2.0 | -7,734,913,691,153,139,000 | 27.776515 | 79 | 0.634593 | false | 
| 
	whitehorse-io/encarnia | 
	pyenv/lib/python2.7/site-packages/twisted/positioning/_sentence.py | 
	13 | 
	4003 | 
	# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Generic sentence handling tools: hopefully reusable.
"""
from __future__ import absolute_import, division
class _BaseSentence(object):
    """
    A base sentence class for a particular protocol.
    Using this base class, specific sentence classes can almost automatically
    be created for a particular protocol.
    To do this, fill the ALLOWED_ATTRIBUTES class attribute using
    the C{getSentenceAttributes} class method of the producer::
        class FooSentence(BaseSentence):
            \"\"\"
            A sentence for integalactic transmodulator sentences.
            @ivar transmogrificationConstant: The value used in the
                transmogrifier while producing this sentence, corrected for
                gravitational fields.
            @type transmogrificationConstant: C{Tummy}
            \"\"\"
            ALLOWED_ATTRIBUTES = FooProtocol.getSentenceAttributes()
    @ivar presentAttribues: An iterable containing the names of the
        attributes that are present in this sentence.
    @type presentAttributes: iterable of C{str}
    @cvar ALLOWED_ATTRIBUTES: A set of attributes that are allowed in this
        sentence.
    @type ALLOWED_ATTRIBUTES: C{set} of C{str}
    """
    ALLOWED_ATTRIBUTES = set()
    def __init__(self, sentenceData):
        """
        Initializes a sentence with parsed sentence data.
        @param sentenceData: The parsed sentence data.
        @type sentenceData: C{dict} (C{str} -> C{str} or L{None})
        """
        self._sentenceData = sentenceData
    @property
    def presentAttributes(self):
        """
        An iterable containing the names of the attributes that are present in
        this sentence.
        @return: The iterable of names of present attributes.
        @rtype: iterable of C{str}
        """
        return iter(self._sentenceData)
    def __getattr__(self, name):
        """
        Gets an attribute of this sentence.
        """
        if name in self.ALLOWED_ATTRIBUTES:
            return self._sentenceData.get(name, None)
        else:
            className = self.__class__.__name__
            msg = "%s sentences have no %s attributes" % (className, name)
            raise AttributeError(msg)
    def __repr__(self):
        """
        Returns a textual representation of this sentence.
        @return: A textual representation of this sentence.
        @rtype: C{str}
        """
        items = self._sentenceData.items()
        data = ["%s: %s" % (k, v) for k, v in sorted(items) if k != "type"]
        dataRepr = ", ".join(data)
        typeRepr = self._sentenceData.get("type") or "unknown type"
        className = self.__class__.__name__
        return "<%s (%s) {%s}>" % (className, typeRepr, dataRepr)
class _PositioningSentenceProducerMixin(object):
    """
    A mixin for certain protocols that produce positioning sentences.
    This mixin helps protocols that store the layout of sentences that they
    consume in a C{_SENTENCE_CONTENTS} class variable provide all sentence
    attributes that can ever occur. It does this by providing a class method,
    C{getSentenceAttributes}, which iterates over all sentence types and
    collects the possible sentence attributes.
    """
    @classmethod
    def getSentenceAttributes(cls):
        """
        Returns a set of all attributes that might be found in the sentences
        produced by this protocol.
        This is basically a set of all the attributes of all the sentences that
        this protocol can produce.
        @return: The set of all possible sentence attribute names.
        @rtype: C{set} of C{str}
        """
        attributes = set(["type"])
        for attributeList in cls._SENTENCE_CONTENTS.values():
            for attribute in attributeList:
                if attribute is None:
                    continue
                attributes.add(attribute)
        return attributes
 | 
	mit | 901,539,691,734,564,900 | 31.811475 | 79 | 0.632526 | false | 
| 
	transifex/openformats | 
	openformats/utils/json.py | 
	1 | 
	15248 | 
	from __future__ import absolute_import
import json
import re
import six
class DumbJson(object):
    """ A utility to help iterate over a JSON string. The main focuses are:
        1. Return the exact contents of each encountered string, don't unescape
           double quotes ('"')
        2. Also return the positions of things encountered
        To initialize, simply pass a JSON string:
            >>> dumb_json = DumbJson('{"hello": "world"}')
        If you want, you can pass an extra argument to identify an embedded
        JSON object within the outer one. For example, if you have this string.
            >>> source = '["first string", {"second": "dict"}, "third string"]'
        You can:
            >>> start = source.index('{')  # 17
            >>> dumb_json = DumbJson(source, start)
        In this case, when you iterate over this, it will only yield the inner
        dictionary (`{"second": "string"}`). The item positions yielded while
        iterating will be in respect to the outer string, so:
            >>> assert list(dumb_json) == [('second', 19, 'dict', 29)]
        If the DumbJson object is a dictionary, then iterating it will yield
        4-tuples with `(key, key_position, value, value_position)`. If it's a
        list it will yield 2-tuples with `(item, item_position)`. Eg:
            >>> assert list(DumbJson('{"a": "b"}')) == [('a', 2, 'b', 7)]
            >>> assert list(DumbJson('["a", "b"]')) == [('a', 2), ('b', 7)]
        Encountering an embedded JSON structure while iterating will yield a
        DumbJson object:
            >>> embedded, _ = list(DumbJson('[["a"]]'))[0]
            >>> assert isinstance(embedded, DumbJson)
            >>> assert list(embedded) == [("a", 3)]
            # Note that the position (3) is in respect to the root JSON string
        When the items or values are not strings but objects allowed by JSON,
        like numbers, booleans or null, they will be yielded normally:
            >>> assert list(DumbJson('{"a": null}')) == [("a", 2, None, 6)]
            >>> assert list(DumbJson('[null]')) == [(None, 2)]
    """
    # Symbols
    BACKSLASH = u'\\'
    DOUBLE_QUOTES = u'"'
    FORWARD_SLASH = u'/'
    BACKSPACE = u'\b'
    FORMFEED = u'\f'
    NEWLINE = u'\n'
    CARRIAGE_RETURN = u'\r'
    TAB = u'\t'
    def __init__(self, source, start=0):
        self.source = source
        self._end = None
        starting_symbol, self.start = self._find_next('{[', start,
                                                      require_whitespace=True)
        if starting_symbol == '{':
            self.type = dict
        elif starting_symbol == '[':
            self.type = list
        else:
            raise ValueError("Input is not a JSON container")
    def __iter__(self):
        if self.type == dict:
            return self._iter_dict()
        elif self.type == list:
            return self._iter_list()
    def _iter_dict(self):
        # The '_p' suffix means 'position'
        start = self.start + 1
        # Maybe it's an empty dict
        end, end_p = self._find_next([self.DOUBLE_QUOTES, '}'], start,
                                     require_whitespace=True)
        if end == "}":
            self.end = end_p
            return
        while True:
            # Lets find our key
            _, start_key_quote_p = self._find_next(self.DOUBLE_QUOTES, start,
                                                   require_whitespace=True)
            key_p = start_key_quote_p + 1
            _, end_key_quote_p = self._find_next(self.DOUBLE_QUOTES, key_p,
                                                 require_whitespace=False)
            key = self.source[key_p:end_key_quote_p]
            _, colon_p = self._find_next(':', end_key_quote_p + 1,
                                         require_whitespace=True)
            value_start_string, value_start_computed, value_start_p =\
                self._process_value(colon_p + 1)
            # Our job in each case is to yield something and set 'next_p' to
            # where we should search for our next item
            if value_start_string == self.DOUBLE_QUOTES:
                # We found a string!
                value_p = value_start_p + 1
                _, value_end_quote_p = self._find_next(
                    self.DOUBLE_QUOTES, value_p, require_whitespace=False
                )
                value = self.source[value_p:value_end_quote_p]
                yield key, key_p, value, value_p
                next_p = value_end_quote_p + 1
            elif value_start_string in ('{', '['):
                # We found an embedded, lets return an instance of ourself
                embedded = DumbJson(self.source, value_start_p)
                yield key, key_p, embedded, value_start_p
                next_p = embedded.end + 1
            elif (value_start_computed is not None or
                    value_start_string == "null"):
                # We found something else allowed by JSON
                yield key, key_p, value_start_computed, value_start_p
                next_p = value_start_p + len(value_start_string)
            else:
                # Something went wrong
                raise ValueError("No JSON value could be decoded")
            next_symbol, next_symbol_p = self._find_next(
                ',}', next_p, require_whitespace=True
            )
            if next_symbol == ',':
                start = next_symbol_p + 1
            elif next_symbol == '}':
                self.end = next_symbol_p
                break
    def _iter_list(self):
        # The '_p' suffix means 'position'
        start = self.start + 1
        # Maybe it's an empty list
        match = re.search(r'^\s*.', self.source[start:])
        if match:
            if match.group()[-1] == "]":
                self.end = start + match.end() - 1
                return
        while True:
            # Lets find our items
            item_start_string, item_start_computed, item_start_p =\
                self._process_value(start)
            # Our job in each case is to yield something and set 'next_p' to
            # where we should search for our next item
            if item_start_string == self.DOUBLE_QUOTES:
                # We found a string!
                item_p = item_start_p + 1
                _, end_item_quote_p = self._find_next(self.DOUBLE_QUOTES,
                                                      item_p,
                                                      require_whitespace=False)
                item = self.source[item_p:end_item_quote_p]
                yield item, item_p
                next_p = end_item_quote_p + 1
            elif item_start_string in ('{', '['):
                # We found an embedded, lets return an instance of ourself
                embedded = DumbJson(self.source, item_start_p)
                yield embedded, item_start_p
                next_p = embedded.end + 1
            elif (item_start_computed is not None or
                    item_start_string == "null"):
                # We found something else allowed by JSON
                yield item_start_computed, item_start_p
                next_p = item_start_p + len(item_start_string)
            else:
                # Something went wrong
                raise ValueError("No JSON value could be decoded")
            next_symbol, next_symbol_p = self._find_next(
                ',]', next_p, require_whitespace=True
            )
            if next_symbol == ',':
                start = next_symbol_p + 1
            elif next_symbol == ']':
                self.end = next_symbol_p
                break
    def _find_next(self, symbols, start=0, require_whitespace=True):
        symbols = {s for s in symbols}
        after_backslash = False
        for ptr in six.moves.xrange(start, len(self.source)):
            candidate = self.source[ptr]
            if candidate == '\\':
                after_backslash = not after_backslash
            if candidate in symbols:
                if candidate == self.DOUBLE_QUOTES and after_backslash:
                    after_backslash = False
                    continue
                return candidate, ptr
            if candidate != '\\':
                after_backslash = False
            if require_whitespace and not candidate.isspace():
                newline_count = self.source.count(self.NEWLINE, 0, ptr)
                raise ValueError(
                    u"Was expecting whitespace or one of `{symbols}` on line "
                    u"{line_no}, found `{candidate}` instead".format(
                        symbols=''.join(sorted(symbols)),
                        line_no=newline_count + 1,
                        candidate=candidate,
                    )
                )
        return None, None
    def _process_value(self, start):
        """ A variation of _find_next. If the next non-empty character after
            `start` is in ('"', '{', '['), this will behave exactly like
            `_find_next('"{[', start)`. If the next non empty sequence after
            `start` is a number, bolean or 'null', it will return appropriate
            values.
            Returns 3 values:
            - value_start_string: in case of ('"', '{', '['), it is the same as
              the first return value of _find_next. Otherwise, it's the string
              representation of whatever the value is (the actual string "true"
              or "3.14159")
            - value_start_computed: in case of ('"', '{', '['), it is
              irrelevant (None), otherwise it's the computed value
            - value_start_p: where the value, whatever it is, is encountered
        """
        # Lets construct a regular expression to find the first non-empty char
        value_pat = r'{dict_list_string}|{true_false_null}|{e_notation}|'\
            r'{_float}|{integer}'.format(
                dict_list_string=r'[{\["]',
                true_false_null=r'true|false|null',
                e_notation=r'-?\d+e-?\d+',
                _float=r'-?\d+\.\d+',
                integer=r'-?\d+',
            )
        first_non_empty = r'^(?P<spaces>\s*)(?P<value>{})'.format(value_pat)
        match = re.search(first_non_empty, self.source[start:])
        # We probably found a match, otherwise this is not JSON
        if match:
            spaces, value = match.groups()
            value_start = start + len(spaces)
            if value in ('{', '[', self.DOUBLE_QUOTES):
                return value, None, value_start
            else:
                # We either have true/false/null or a number of sorts
                return value, json.loads(value), value_start
        else:
            raise ValueError("No JSON value could be decoded")
    @property
    def end(self):
        if self._end is None:
            # In order for 'end' to be calculated, 'self' must be iterated over
            # first. Normally this should happen on its own when we're
            # searching in a DFS manner, otherwise, we have to force it.
            for _ in self:
                pass
        return self._end
    @end.setter
    def end(self, value):
        self._end = value
    def find_children(self, *keys):
        """
            Get values (and positions) of a DumbJson dict. Usage:
                >>> jj = DumbJson('{"a": "aaa", "b": "bbb"}')
                >>> (a, a_pos), (c, c_pos) = jj.find_children('a', 'c')
                >>> print(a, a_pos, c, c_pos)
                <<< 'aaa', 7, None, None
                >>> # Notice the trailing comma (`,`)
                >>> (a, a_pos), = jj.find_children('a')
                >>> print(a, a_pos)
                <<< 'aaa', 7
            :return: a list of 2-tuples with values and value positions
            :rtype: list
        """
        found = {}
        for key, key_position, value, value_position in self:
            if key in keys:
                found[key] = (value, value_position)
        return [(found.get(key, (None, None))) for key in keys]
def escape(string):
    return u''.join(_escape_generator(string))
    # btw, this seems equivalent to
    # return json.dumps(string, ensure_ascii=False)[1:-1]
def _escape_generator(string):
    for symbol in string:
        if symbol == DumbJson.DOUBLE_QUOTES:
            yield DumbJson.BACKSLASH
            yield DumbJson.DOUBLE_QUOTES
        elif symbol == DumbJson.BACKSLASH:
            yield DumbJson.BACKSLASH
            yield DumbJson.BACKSLASH
        elif symbol == DumbJson.BACKSPACE:
            yield DumbJson.BACKSLASH
            yield u'b'
        elif symbol == DumbJson.FORMFEED:
            yield DumbJson.BACKSLASH
            yield u'f'
        elif symbol == DumbJson.NEWLINE:
            yield DumbJson.BACKSLASH
            yield u'n'
        elif symbol == DumbJson.CARRIAGE_RETURN:
            yield DumbJson.BACKSLASH
            yield u'r'
        elif symbol == DumbJson.TAB:
            yield DumbJson.BACKSLASH
            yield u't'
        else:
            yield symbol
def unescape(string):
    return u''.join(_unescape_generator(string))
    # btw, this seems equivalent to
    # return json.loads(u'"{}"'.format(string))
def _unescape_generator(string):
    # I don't like this aldschool approach, but we may have to rewind a bit
    ptr = 0
    while True:
        if ptr >= len(string):
            break
        symbol = string[ptr]
        if symbol != DumbJson.BACKSLASH:
            yield symbol
            ptr += 1
            continue
        try:
            next_symbol = string[ptr + 1]
        except IndexError:
            yield DumbJson.BACKSLASH
            ptr += 1
            continue
        if next_symbol in (DumbJson.DOUBLE_QUOTES, DumbJson.FORWARD_SLASH,
                           DumbJson.BACKSLASH):
            yield next_symbol
            ptr += 2
        elif next_symbol == u'b':
            yield DumbJson.BACKSPACE
            ptr += 2
        elif next_symbol == u'f':
            yield DumbJson.FORMFEED
            ptr += 2
        elif next_symbol == u'n':
            yield DumbJson.NEWLINE
            ptr += 2
        elif next_symbol == u'r':
            yield DumbJson.CARRIAGE_RETURN
            ptr += 2
        elif next_symbol == u't':
            yield DumbJson.TAB
            ptr += 2
        elif next_symbol == u'u':
            unicode_escaped = string[ptr:ptr + 6]
            try:
                unescaped = unicode_escaped.\
                    encode('ascii').\
                    decode('unicode-escape')
            except Exception:
                yield DumbJson.BACKSLASH
                yield u'u'
                ptr += 2
                continue
            if len(unescaped) != 1:
                yield DumbJson.BACKSLASH
                yield u'u'
                ptr += 2
                continue
            yield unescaped
            ptr += 6
        else:
            yield symbol
            ptr += 1
for symbol in (DumbJson.BACKSLASH, DumbJson.DOUBLE_QUOTES,
               DumbJson.FORWARD_SLASH, DumbJson.BACKSPACE, DumbJson.FORMFEED,
               DumbJson.NEWLINE, DumbJson.CARRIAGE_RETURN, DumbJson.TAB):
    assert len(symbol) == 1
 | 
	gpl-3.0 | -29,226,295,603,232,036 | 36.281174 | 79 | 0.50846 | false | 
| 
	Juniper/nova | 
	nova/tests/unit/api/openstack/compute/test_virtual_interfaces.py | 
	3 | 
	6996 | 
	# Copyright (C) 2011 Midokura KK
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.
import mock
import webob
from nova.api.openstack import api_version_request
from nova.api.openstack.compute import virtual_interfaces as vi21
from nova import compute
from nova.compute import api as compute_api
from nova import context
from nova import exception
from nova import network
from nova.objects import virtual_interface as vif_obj
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests import uuidsentinel as uuids
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
def compute_api_get(context, instance_id, expected_attrs=None):
    return dict(uuid=FAKE_UUID, id=instance_id, instance_type_id=1, host='bob')
def _generate_fake_vifs(context):
    vif = vif_obj.VirtualInterface(context=context)
    vif.address = '00-00-00-00-00-00'
    vif.network_id = 123
    vif.net_uuid = '22222222-2222-2222-2222-22222222222222222'
    vif.uuid = uuids.vif1_uuid
    fake_vifs = [vif]
    vif = vif_obj.VirtualInterface(context=context)
    vif.address = '11-11-11-11-11-11'
    vif.network_id = 456
    vif.net_uuid = '33333333-3333-3333-3333-33333333333333333'
    vif.uuid = uuids.vif2_uuid
    fake_vifs.append(vif)
    return fake_vifs
def get_vifs_by_instance(context, instance_id):
    return _generate_fake_vifs(context)
class FakeRequest(object):
    def __init__(self, context):
        self.environ = {'nova.context': context}
class ServerVirtualInterfaceTestV21(test.NoDBTestCase):
    wsgi_api_version = '2.1'
    expected_response = {
        'virtual_interfaces': [
            {'id': uuids.vif1_uuid,
                'mac_address': '00-00-00-00-00-00'},
            {'id': uuids.vif2_uuid,
                'mac_address': '11-11-11-11-11-11'}]}
    def setUp(self):
        super(ServerVirtualInterfaceTestV21, self).setUp()
        # These APIs aren't implemented by the neutronv2 API code in Nova so
        # the tests need to specifically run against nova-network unless
        # otherwise setup to run with Neutron and expect failure.
        self.flags(use_neutron=False)
        self.compute_api_get_patcher = mock.patch.object(
            compute.api.API, "get",
            side_effect=compute_api_get)
        self.get_vifs_by_instance_patcher = mock.patch.object(
            network.api.API, "get_vifs_by_instance",
            side_effect=get_vifs_by_instance)
        self.compute_api_get_patcher.start()
        self.get_vifs_by_instance_patcher.start()
        self.addCleanup(self.compute_api_get_patcher.stop)
        self.addCleanup(self.get_vifs_by_instance_patcher.stop)
        self._set_controller()
    def _set_controller(self):
        self.controller = vi21.ServerVirtualInterfaceController()
    def test_get_virtual_interfaces_list(self):
        req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
        res_dict = self.controller.index(req, 'fake_uuid')
        self.assertEqual(self.expected_response, res_dict)
    def test_get_virtual_interfaces_list_offset_and_limit(self):
        path = '/v2/fake/os-virtual-interfaces?offset=1&limit=1'
        req = fakes.HTTPRequest.blank(path, version=self.wsgi_api_version)
        res_dict = self.controller.index(req, 'fake_uuid')
        name = 'virtual_interfaces'
        limited_response = {name: [self.expected_response[name][1]]}
        self.assertEqual(limited_response, res_dict)
    @mock.patch.object(compute_api.API, 'get',
                       side_effect=exception.InstanceNotFound(
                           instance_id='instance-0000'))
    def test_vif_instance_not_found(self, mock_get):
        fake_context = context.RequestContext('fake', 'fake')
        fake_req = FakeRequest(fake_context)
        fake_req.api_version_request = api_version_request.APIVersionRequest(
                                        self.wsgi_api_version)
        self.assertRaises(
            webob.exc.HTTPNotFound,
            self.controller.index,
            fake_req, 'fake_uuid')
        mock_get.assert_called_once_with(fake_context,
                                         'fake_uuid',
                                         expected_attrs=None)
    def test_list_vifs_neutron_notimplemented(self):
        """Tests that a 400 is returned when using neutron as the backend"""
        # unset the get_vifs_by_instance stub from setUp
        self.get_vifs_by_instance_patcher.stop()
        self.flags(use_neutron=True)
        # reset the controller to use the neutron network API
        self._set_controller()
        req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
        self.assertRaises(webob.exc.HTTPBadRequest,
                          self.controller.index, req, FAKE_UUID)
        self.get_vifs_by_instance_patcher.start()
class ServerVirtualInterfaceTestV212(ServerVirtualInterfaceTestV21):
    wsgi_api_version = '2.12'
    expected_response = {
        'virtual_interfaces': [
            {'id': uuids.vif1_uuid,
                'mac_address': '00-00-00-00-00-00',
                'net_id': '22222222-2222-2222-2222-22222222222222222'},
            {'id': uuids.vif2_uuid,
                'mac_address': '11-11-11-11-11-11',
                'net_id': '33333333-3333-3333-3333-33333333333333333'}]}
class ServerVirtualInterfaceEnforcementV21(test.NoDBTestCase):
    def setUp(self):
        super(ServerVirtualInterfaceEnforcementV21, self).setUp()
        self.controller = vi21.ServerVirtualInterfaceController()
        self.req = fakes.HTTPRequest.blank('')
    def test_index_virtual_interfaces_policy_failed(self):
        rule_name = "os_compute_api:os-virtual-interfaces"
        self.policy.set_rules({rule_name: "project:non_fake"})
        exc = self.assertRaises(
            exception.PolicyNotAuthorized,
            self.controller.index, self.req, fakes.FAKE_UUID)
        self.assertEqual(
            "Policy doesn't allow %s to be performed." % rule_name,
            exc.format_message())
class ServerVirtualInterfaceDeprecationTest(test.NoDBTestCase):
    def setUp(self):
        super(ServerVirtualInterfaceDeprecationTest, self).setUp()
        self.controller = vi21.ServerVirtualInterfaceController()
        self.req = fakes.HTTPRequest.blank('', version='2.44')
    def test_index_not_found(self):
        self.assertRaises(exception.VersionNotFoundForAPIMethod,
            self.controller.index, self.req, FAKE_UUID)
 | 
	apache-2.0 | 4,546,904,235,726,762,500 | 38.525424 | 79 | 0.658376 | false | 
| 
	starwels/starwels | 
	test/functional/test_framework/address.py | 
	2 | 
	2846 | 
	#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Starwels developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Encode and decode BASE58, P2PKH and P2SH addresses."""
from .script import hash256, hash160, sha256, CScript, OP_0
from .util import bytes_to_hex_str, hex_str_to_bytes
from . import segwit_addr
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
def byte_to_base58(b, version):
    result = ''
    str = bytes_to_hex_str(b)
    str = bytes_to_hex_str(chr(version).encode('latin-1')) + str
    checksum = bytes_to_hex_str(hash256(hex_str_to_bytes(str)))
    str += checksum[:8]
    value = int('0x'+str,0)
    while value > 0:
        result = chars[value % 58] + result
        value //= 58
    while (str[:2] == '00'):
        result = chars[0] + result
        str = str[2:]
    return result
# TODO: def base58_decode
def keyhash_to_p2pkh(hash, main = False):
    assert (len(hash) == 20)
    version = 0 if main else 111
    return byte_to_base58(hash, version)
def scripthash_to_p2sh(hash, main = False):
    assert (len(hash) == 20)
    version = 5 if main else 196
    return byte_to_base58(hash, version)
def key_to_p2pkh(key, main = False):
    key = check_key(key)
    return keyhash_to_p2pkh(hash160(key), main)
def script_to_p2sh(script, main = False):
    script = check_script(script)
    return scripthash_to_p2sh(hash160(script), main)
def key_to_p2sh_p2wpkh(key, main = False):
    key = check_key(key)
    p2shscript = CScript([OP_0, hash160(key)])
    return script_to_p2sh(p2shscript, main)
def program_to_witness(version, program, main = False):
    if (type(program) is str):
        program = hex_str_to_bytes(program)
    assert 0 <= version <= 16
    assert 2 <= len(program) <= 40
    assert version > 0 or len(program) in [20, 32]
    return segwit_addr.encode("bc" if main else "bcrt", version, program)
def script_to_p2wsh(script, main = False):
    script = check_script(script)
    return program_to_witness(0, sha256(script), main)
def key_to_p2wpkh(key, main = False):
    key = check_key(key)
    return program_to_witness(0, hash160(key), main)
def script_to_p2sh_p2wsh(script, main = False):
    script = check_script(script)
    p2shscript = CScript([OP_0, sha256(script)])
    return script_to_p2sh(p2shscript, main)
def check_key(key):
    if (type(key) is str):
        key = hex_str_to_bytes(key) # Assuming this is hex string
    if (type(key) is bytes and (len(key) == 33 or len(key) == 65)):
        return key
    assert(False)
def check_script(script):
    if (type(script) is str):
        script = hex_str_to_bytes(script) # Assuming this is hex string
    if (type(script) is bytes or type(script) is CScript):
        return script
    assert(False)
 | 
	mit | 6,424,822,730,027,099,000 | 31.712644 | 73 | 0.659522 | false | 
| 
	duhzecca/cinder | 
	cinder/tests/unit/objects/test_base.py | 
	8 | 
	3746 | 
	#    Copyright 2015 Red Hat, Inc.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.
import datetime
import uuid
from iso8601 import iso8601
from oslo_versionedobjects import fields
from cinder.objects import base
from cinder.tests.unit import objects as test_objects
@base.CinderObjectRegistry.register
class TestObject(base.CinderObject):
    fields = {
        'scheduled_at': base.fields.DateTimeField(nullable=True),
        'uuid': base.fields.UUIDField(),
        'text': base.fields.StringField(nullable=True),
    }
class TestCinderObject(test_objects.BaseObjectsTestCase):
    """Tests methods from CinderObject."""
    def setUp(self):
        super(TestCinderObject, self).setUp()
        self.obj = TestObject(
            scheduled_at=None,
            uuid=uuid.uuid4(),
            text='text')
        self.obj.obj_reset_changes()
    def test_cinder_obj_get_changes_no_changes(self):
        self.assertDictEqual({}, self.obj.cinder_obj_get_changes())
    def test_cinder_obj_get_changes_other_changes(self):
        self.obj.text = 'text2'
        self.assertDictEqual({'text': 'text2'},
                             self.obj.cinder_obj_get_changes())
    def test_cinder_obj_get_changes_datetime_no_tz(self):
        now = datetime.datetime.utcnow()
        self.obj.scheduled_at = now
        self.assertDictEqual({'scheduled_at': now},
                             self.obj.cinder_obj_get_changes())
    def test_cinder_obj_get_changes_datetime_tz_utc(self):
        now_tz = iso8601.parse_date('2015-06-26T22:00:01Z')
        now = now_tz.replace(tzinfo=None)
        self.obj.scheduled_at = now_tz
        self.assertDictEqual({'scheduled_at': now},
                             self.obj.cinder_obj_get_changes())
    def test_cinder_obj_get_changes_datetime_tz_non_utc_positive(self):
        now_tz = iso8601.parse_date('2015-06-26T22:00:01+01')
        now = now_tz.replace(tzinfo=None) - datetime.timedelta(hours=1)
        self.obj.scheduled_at = now_tz
        self.assertDictEqual({'scheduled_at': now},
                             self.obj.cinder_obj_get_changes())
    def test_cinder_obj_get_changes_datetime_tz_non_utc_negative(self):
        now_tz = iso8601.parse_date('2015-06-26T10:00:01-05')
        now = now_tz.replace(tzinfo=None) + datetime.timedelta(hours=5)
        self.obj.scheduled_at = now_tz
        self.assertDictEqual({'scheduled_at': now},
                             self.obj.cinder_obj_get_changes())
class TestCinderComparableObject(test_objects.BaseObjectsTestCase):
    def test_comparable_objects(self):
        @base.CinderObjectRegistry.register
        class MyComparableObj(base.CinderObject,
                              base.CinderObjectDictCompat,
                              base.CinderComparableObject):
            fields = {'foo': fields.Field(fields.Integer())}
        class NonVersionedObject(object):
            pass
        obj1 = MyComparableObj(foo=1)
        obj2 = MyComparableObj(foo=1)
        obj3 = MyComparableObj(foo=2)
        obj4 = NonVersionedObject()
        self.assertTrue(obj1 == obj2)
        self.assertFalse(obj1 == obj3)
        self.assertFalse(obj1 == obj4)
        self.assertNotEqual(obj1, None)
 | 
	apache-2.0 | 1,232,266,410,977,952,500 | 36.838384 | 78 | 0.640416 | false | 
| 
	preparationh67/youtube-dl | 
	youtube_dl/extractor/eagleplatform.py | 
	8 | 
	4080 | 
	# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
    ExtractorError,
    int_or_none,
)
class EaglePlatformIE(InfoExtractor):
    _VALID_URL = r'''(?x)
                    (?:
                        eagleplatform:(?P<custom_host>[^/]+):|
                        https?://(?P<host>.+?\.media\.eagleplatform\.com)/index/player\?.*\brecord_id=
                    )
                    (?P<id>\d+)
                '''
    _TESTS = [{
        # http://lenta.ru/news/2015/03/06/navalny/
        'url': 'http://lentaru.media.eagleplatform.com/index/player?player=new&record_id=227304&player_template_id=5201',
        'md5': '70f5187fb620f2c1d503b3b22fd4efe3',
        'info_dict': {
            'id': '227304',
            'ext': 'mp4',
            'title': 'Навальный вышел на свободу',
            'description': 'md5:d97861ac9ae77377f3f20eaf9d04b4f5',
            'thumbnail': 're:^https?://.*\.jpg$',
            'duration': 87,
            'view_count': int,
            'age_limit': 0,
        },
    }, {
        # http://muz-tv.ru/play/7129/
        # http://media.clipyou.ru/index/player?record_id=12820&width=730&height=415&autoplay=true
        'url': 'eagleplatform:media.clipyou.ru:12820',
        'md5': '90b26344ba442c8e44aa4cf8f301164a',
        'info_dict': {
            'id': '12820',
            'ext': 'mp4',
            'title': "'O Sole Mio",
            'thumbnail': 're:^https?://.*\.jpg$',
            'duration': 216,
            'view_count': int,
        },
        'skip': 'Georestricted',
    }]
    @staticmethod
    def _handle_error(response):
        status = int_or_none(response.get('status', 200))
        if status != 200:
            raise ExtractorError(' '.join(response['errors']), expected=True)
    def _download_json(self, url_or_request, video_id, note='Downloading JSON metadata'):
        response = super(EaglePlatformIE, self)._download_json(url_or_request, video_id, note)
        self._handle_error(response)
        return response
    def _get_video_url(self, url_or_request, video_id, note='Downloading JSON metadata'):
        return self._download_json(url_or_request, video_id, note)['data'][0]
    def _real_extract(self, url):
        mobj = re.match(self._VALID_URL, url)
        host, video_id = mobj.group('custom_host') or mobj.group('host'), mobj.group('id')
        player_data = self._download_json(
            'http://%s/api/player_data?id=%s' % (host, video_id), video_id)
        media = player_data['data']['playlist']['viewports'][0]['medialist'][0]
        title = media['title']
        description = media.get('description')
        thumbnail = self._proto_relative_url(media.get('snapshot'), 'http:')
        duration = int_or_none(media.get('duration'))
        view_count = int_or_none(media.get('views'))
        age_restriction = media.get('age_restriction')
        age_limit = None
        if age_restriction:
            age_limit = 0 if age_restriction == 'allow_all' else 18
        secure_m3u8 = self._proto_relative_url(media['sources']['secure_m3u8']['auto'], 'http:')
        m3u8_url = self._get_video_url(secure_m3u8, video_id, 'Downloading m3u8 JSON')
        formats = self._extract_m3u8_formats(
            m3u8_url, video_id,
            'mp4', entry_protocol='m3u8_native')
        mp4_url = self._get_video_url(
            # Secure mp4 URL is constructed according to Player.prototype.mp4 from
            # http://lentaru.media.eagleplatform.com/player/player.js
            re.sub(r'm3u8|hlsvod|hls|f4m', 'mp4', secure_m3u8),
            video_id, 'Downloading mp4 JSON')
        formats.append({'url': mp4_url, 'format_id': 'mp4'})
        self._sort_formats(formats)
        return {
            'id': video_id,
            'title': title,
            'description': description,
            'thumbnail': thumbnail,
            'duration': duration,
            'view_count': view_count,
            'age_limit': age_limit,
            'formats': formats,
        }
 | 
	unlicense | 4,116,274,861,903,518,000 | 35.881818 | 121 | 0.552132 | false | 
| 
	orbitvu/django-extensions | 
	django_extensions/management/commands/graph_models.py | 
	2 | 
	3651 | 
	from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from django_extensions.management.modelviz import generate_dot
class Command(BaseCommand):
    option_list = BaseCommand.option_list + (
        make_option('--disable-fields', '-d', action='store_true', dest='disable_fields',
                    help='Do not show the class member fields'),
        make_option('--group-models', '-g', action='store_true', dest='group_models',
                    help='Group models together respective to their application'),
        make_option('--all-applications', '-a', action='store_true', dest='all_applications',
                    help='Automatically include all applications from INSTALLED_APPS'),
        make_option('--output', '-o', action='store', dest='outputfile',
                    help='Render output file. Type of output dependend on file extensions. Use png or jpg to render graph to image.'),
        make_option('--layout', '-l', action='store', dest='layout', default='dot',
                    help='Layout to be used by GraphViz for visualization. Layouts: circo dot fdp neato nop nop1 nop2 twopi'),
        make_option('--verbose-names', '-n', action='store_true', dest='verbose_names',
                    help='Use verbose_name of models and fields'),
        make_option('--language', '-L', action='store', dest='language',
                    help='Specify language used for verbose_name localization'),
        make_option('--exclude-columns', '-x', action='store', dest='exclude_columns',
                    help='Exclude specific column(s) from the graph. Can also load exclude list from file.'),
        make_option('--exclude-models', '-X', action='store', dest='exclude_models',
                    help='Exclude specific model(s) from the graph. Can also load exclude list from file.'),
        make_option('--inheritance', '-e', action='store_true', dest='inheritance',
                    help='Include inheritance arrows'),
    )
    help = ("Creates a GraphViz dot file for the specified app names.  You can pass multiple app names and they will all be combined into a single model.  Output is usually directed to a dot file.")
    args = "[appname]"
    label = 'application name'
    requires_model_validation = True
    can_import_settings = True
    def handle(self, *args, **options):
        if len(args) < 1 and not options['all_applications']:
            raise CommandError("need one or more arguments for appname")
        dotdata = generate_dot(args, **options)
        if options['outputfile']:
            self.render_output(dotdata, **options)
        else:
            self.print_output(dotdata)
    def print_output(self, dotdata):
        print dotdata.encode('utf-8')
    def render_output(self, dotdata, **kwargs):
        try:
            import pygraphviz
        except ImportError:
            raise CommandError("You need to install pygraphviz python module")
        vizdata = ' '.join(dotdata.split("\n")).strip().encode('utf-8')
        version = pygraphviz.__version__.rstrip("-svn")
        try:
            if [int(v) for v in version.split('.')] < (0, 36):
                # HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version)
                import tempfile
                tmpfile = tempfile.NamedTemporaryFile()
                tmpfile.write(vizdata)
                tmpfile.seek(0)
                vizdata = tmpfile.name
        except ValueError:
            pass
        graph = pygraphviz.AGraph(vizdata)
        graph.layout(prog=kwargs['layout'])
        graph.draw(kwargs['outputfile'])
 | 
	mit | -8,109,666,665,435,858,000 | 50.422535 | 198 | 0.6149 | false | 
| 
	aam-at/tensorflow | 
	tensorflow/python/debug/lib/debug_v2_ops_test.py | 
	5 | 
	30428 | 
	# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test for the internal ops used by tfdbg v2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.core.protobuf import debug_event_pb2
from tensorflow.python.compat import compat
from tensorflow.python.debug.lib import debug_events_reader
from tensorflow.python.debug.lib import debug_events_writer
from tensorflow.python.debug.lib import dumping_callback_test_lib
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_debug_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import googletest
class DebugIdentityV2OpTest(dumping_callback_test_lib.DumpingCallbackTestBase):
  """Tests for DebugIdentityV2Op: when DebugEventsWriter is initialized.
  DebugEventsWriter being initialized prior to DebugIdentityV2 ops being invoked
  for the first time is the typical case (e.g., tfdbg2 running on a local
  machine with only local devices.)
  """
  def setUp(self):
    super(DebugIdentityV2OpTest, self).setUp()
    # Testing using a small circular-buffer size.
    self.circular_buffer_size = 4
    self.tfdbg_run_id = "test_tfdbg_run"
    self.writer = debug_events_writer.DebugEventsWriter(
        self.dump_root, self.tfdbg_run_id, self.circular_buffer_size)
  def tearDown(self):
    self.writer.Close()
    super(DebugIdentityV2OpTest, self).tearDown()
  @test_util.run_in_graph_and_eager_modes
  def testSingleTensorFullTensorDebugModeWithCircularBufferBehavior(self):
    @def_function.function
    def write_debug_trace(x):
      square = math_ops.square(x)
      gen_debug_ops.debug_identity_v2(
          square,
          tfdbg_context_id="deadbeaf",
          op_name="Square",
          output_slot=0,
          tensor_debug_mode=debug_event_pb2.TensorDebugMode.FULL_TENSOR,
          debug_urls=["file://%s" % self.dump_root])
      sqrt = math_ops.sqrt(x)
      gen_debug_ops.debug_identity_v2(
          sqrt,
          tfdbg_context_id="beafdead",
          op_name="Sqrt",
          output_slot=0,
          tensor_debug_mode=debug_event_pb2.TensorDebugMode.FULL_TENSOR,
          debug_urls=["file://%s" % self.dump_root])
      return square + sqrt
    x = np.array([3.0, 4.0])
    # Only the graph-execution trace of the last iteration should be written
    # to self.dump_root.
    for _ in range(self.circular_buffer_size // 2 + 1):
      self.assertAllClose(
          write_debug_trace(x), [9.0 + np.sqrt(3.0), 16.0 + 2.0])
    with debug_events_reader.DebugEventsReader(self.dump_root) as reader:
      # Check that the .metadata DebugEvents data file has been created, even
      # before FlushExecutionFiles() is called.
      self.assertGreater(reader.starting_wall_time(), 0)
      self.assertTrue(reader.tensorflow_version())
      self.assertTrue(reader.tfdbg_file_version().startswith("debug.Event"))
      graph_trace_iter = reader.graph_execution_traces_iterators()[0]
      # Before FlushExecutionFiles() is called, the .graph_execution_traces file
      # ought to be empty.
      with self.assertRaises(StopIteration):
        next(graph_trace_iter)
      # Flush the circular buffer.
      self.writer.FlushExecutionFiles()
      graph_trace_iter = reader.graph_execution_traces_iterators()[0]
      # The circular buffer has a size of 4. So only the data from the
      # last two iterations should have been written to self.dump_root.
      for _ in range(2):
        debug_event = next(graph_trace_iter).debug_event
        self.assertGreater(debug_event.wall_time, 0)
        trace = debug_event.graph_execution_trace
        self.assertEqual(trace.tfdbg_context_id, "deadbeaf")
        self.assertEqual(trace.op_name, "Square")
        self.assertEqual(trace.output_slot, 0)
        self.assertEqual(trace.tensor_debug_mode,
                         debug_event_pb2.TensorDebugMode.FULL_TENSOR)
        tensor_value = tensor_util.MakeNdarray(trace.tensor_proto)
        self.assertAllClose(tensor_value, [9.0, 16.0])
        debug_event = next(graph_trace_iter).debug_event
        self.assertGreater(debug_event.wall_time, 0)
        trace = debug_event.graph_execution_trace
        self.assertEqual(trace.tfdbg_context_id, "beafdead")
        self.assertEqual(trace.op_name, "Sqrt")
        self.assertEqual(trace.output_slot, 0)
        self.assertEqual(trace.tensor_debug_mode,
                         debug_event_pb2.TensorDebugMode.FULL_TENSOR)
        tensor_value = tensor_util.MakeNdarray(trace.tensor_proto)
        self.assertAllClose(tensor_value, [np.sqrt(3.0), 2.0])
      # Only the graph-execution trace of the last iteration should be written
      # to self.dump_root.
      with self.assertRaises(StopIteration):
        next(graph_trace_iter)
  @test_util.run_in_graph_and_eager_modes
  def testControlFlow(self):
    @def_function.function
    def collatz(x):
      counter = constant_op.constant(0, dtype=dtypes.int32)
      while math_ops.greater(x, 1):
        counter = counter + 1
        gen_debug_ops.debug_identity_v2(
            x,
            tfdbg_context_id="deadbeaf",
            op_name="x",
            output_slot=0,
            tensor_debug_mode=debug_event_pb2.TensorDebugMode.FULL_TENSOR,
            debug_urls=["file://%s" % self.dump_root])
        if math_ops.equal(x % 2, 0):
          x = math_ops.div(x, 2)
        else:
          x = x * 3 + 1
      return counter
    x = constant_op.constant(10, dtype=dtypes.int32)
    self.evaluate(collatz(x))
    self.writer.FlushExecutionFiles()
    with debug_events_reader.DebugEventsReader(self.dump_root) as reader:
      graph_trace_iter = reader.graph_execution_traces_iterators()[0]
      try:
        x_values = []
        timestamp = 0
        while True:
          debug_event = next(graph_trace_iter).debug_event
          self.assertGreater(debug_event.wall_time, timestamp)
          timestamp = debug_event.wall_time
          trace = debug_event.graph_execution_trace
          self.assertEqual(trace.tfdbg_context_id, "deadbeaf")
          self.assertEqual(trace.op_name, "x")
          self.assertEqual(trace.output_slot, 0)
          self.assertEqual(trace.tensor_debug_mode,
                           debug_event_pb2.TensorDebugMode.FULL_TENSOR)
          x_values.append(int(tensor_util.MakeNdarray(trace.tensor_proto)))
      except StopIteration:
        pass
      # Due to the circular buffer, only the last 4 iterations of
      # [10, 5, 16, 8, 4, 2] should have been written.
      self.assertAllEqual(x_values, [16, 8, 4, 2])
  @test_util.run_in_graph_and_eager_modes
  def testTwoDumpRoots(self):
    another_dump_root = os.path.join(self.dump_root, "another")
    another_debug_url = "file://%s" % another_dump_root
    another_writer = debug_events_writer.DebugEventsWriter(
        another_dump_root, "test_tfdbg_run")
    @def_function.function
    def write_debug_trace(x):
      # DebugIdentityV2 is a stateful op. It ought to be included by auto
      # control dependency.
      square = math_ops.square(x)
      gen_debug_ops.debug_identity_v2(
          square,
          tfdbg_context_id="deadbeaf",
          tensor_debug_mode=debug_event_pb2.TensorDebugMode.FULL_TENSOR,
          debug_urls=["file://%s" % self.dump_root, another_debug_url])
      return square + 1.0
    x = np.array([3.0, 4.0])
    self.assertAllClose(write_debug_trace(x), np.array([10.0, 17.0]))
    self.writer.FlushExecutionFiles()
    another_writer.FlushExecutionFiles()
    another_writer.Close()
    for debug_root in (self.dump_root, another_dump_root):
      with debug_events_reader.DebugEventsReader(debug_root) as reader:
        graph_trace_iter = reader.graph_execution_traces_iterators()[0]
        debug_event = next(graph_trace_iter).debug_event
        trace = debug_event.graph_execution_trace
        self.assertEqual(trace.tfdbg_context_id, "deadbeaf")
        self.assertEqual(trace.op_name, "")
        self.assertEqual(trace.tensor_debug_mode,
                         debug_event_pb2.TensorDebugMode.FULL_TENSOR)
        tensor_value = tensor_util.MakeNdarray(trace.tensor_proto)
        self.assertAllClose(tensor_value, [9.0, 16.0])
        with self.assertRaises(StopIteration):
          next(graph_trace_iter)
class DebugIdentityV2OpUninitializedWriterTest(
    dumping_callback_test_lib.DumpingCallbackTestBase):
  """Tests for DebugIdentityV2Op: when DebugEventsWriter is not initialized.
  This case can occur when DebugIdentityV2Ops are running on a remote
  TensorFlow server (e.g., a TPU worker).
  """
  @test_util.run_in_graph_and_eager_modes
  def testInvokingDebugIdentityV2OpBeforeCreatingDebugEventsWriterWorks(self):
    if not compat.forward_compatible(2020, 6, 24):
      self.skipTest("Functionality currently not supported.")
    circular_buffer_size = 3
    @def_function.function
    def write_debug_trace(x):
      # DebugIdentityV2 is a stateful op. It ought to be included by auto
      # control dependency.
      square = math_ops.square(x)
      gen_debug_ops.debug_identity_v2(
          square,
          tfdbg_context_id="deadbeaf",
          op_name="Square",
          output_slot=0,
          tensor_debug_mode=debug_event_pb2.TensorDebugMode.FULL_TENSOR,
          debug_urls=["file://%s" % self.dump_root],
          circular_buffer_size=circular_buffer_size)
      return square
    # The DebugIdentityV2 ops are invokes *before* a DebugEventsWriter at the
    # same dump root is created.
    for i in range(circular_buffer_size * 2):
      self.assertAllClose(
          write_debug_trace(np.array([i]).astype(np.float32)), [i**2.0])
    writer = debug_events_writer.DebugEventsWriter(self.dump_root,
                                                   "test_tfdbg_run",
                                                   circular_buffer_size)
    writer.FlushNonExecutionFiles()
    writer.FlushExecutionFiles()
    with debug_events_reader.DebugEventsReader(self.dump_root) as reader:
      graph_trace_iter = reader.graph_execution_traces_iterators()[0]
      graph_execution_traces = []
      while True:
        try:
          graph_execution_traces.append(
              next(graph_trace_iter).debug_event.graph_execution_trace)
        except StopIteration:
          break
      self.assertLen(graph_execution_traces, circular_buffer_size)
      for i in range(circular_buffer_size):
        self.assertAllClose(
            tensor_util.MakeNdarray(graph_execution_traces[i].tensor_proto),
            [(i + circular_buffer_size)**2.0])
class DebugNumericSummaryV2Test(test_util.TensorFlowTestCase):
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpReduceInfNanThreeSlots(self):
    def debug_summary(x):
      return self.evaluate(gen_debug_ops.debug_numeric_summary_v2(
          x, tensor_debug_mode=(
              debug_event_pb2.TensorDebugMode.REDUCE_INF_NAN_THREE_SLOTS)))
    self.assertAllEqual(
        debug_summary(constant_op.constant([])), [0.0, 0.0, 0.0])
    self.assertAllEqual(
        debug_summary(constant_op.constant(42.0)), [0.0, 0.0, 0.0])
    self.assertAllEqual(
        debug_summary(constant_op.constant([3.0, 4.0])), [0.0, 0.0, 0.0])
    self.assertAllEqual(
        debug_summary(constant_op.constant(np.array([3.0, -np.inf]))),
        [-np.inf, 0.0, 0.0])
    self.assertAllEqual(
        debug_summary(constant_op.constant(np.array([[0, 0], [np.nan, 0]]))),
        [0.0, 0.0, np.nan])
    self.assertAllEqual(
        debug_summary(
            constant_op.constant(np.array([[0, 0], [np.nan, np.inf]]))),
        [0.0, np.inf, np.nan])
    self.assertAllEqual(
        debug_summary(
            constant_op.constant(np.array([[0, np.inf], [np.nan, -np.inf]]))),
        [-np.inf, np.inf, np.nan])
    x = np.zeros([100, 100], dtype=np.float16)
    x[32, 47] = np.nan
    self.assertAllEqual(
        debug_summary(constant_op.constant(x)), [0.0, 0.0, np.nan])
    x = np.zeros([97, 97], dtype=np.float32)
    x[50, 83] = -np.inf
    self.assertAllEqual(
        debug_summary(constant_op.constant(x)), [-np.inf, 0.0, 0.0])
    x[1, 41] = np.nan
    self.assertAllEqual(
        debug_summary(constant_op.constant(x)), [-np.inf, 0.0, np.nan])
    x = np.zeros([9701], dtype=np.float64)
    x[9700] = np.nan
    self.assertAllEqual(
        debug_summary(constant_op.constant(x)), [0.0, 0.0, np.nan])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpLargeTensorIDError(self):
    modes = [
        debug_event_pb2.TensorDebugMode.CURT_HEALTH,
        debug_event_pb2.TensorDebugMode.CONCISE_HEALTH,
        debug_event_pb2.TensorDebugMode.SHAPE,
    ]
    # Maximum allowed tensor_id
    tensor_id = np.power(2, 53)
    for mode in modes:
      self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              constant_op.constant(42.0),
              tensor_debug_mode=mode,
              tensor_id=tensor_id,
              output_dtype=dtypes.float64))
    # Incrementing by one should error
    tensor_id += 1
    for mode in modes:
      with self.assertRaises(errors.InvalidArgumentError):
        self.evaluate(
            gen_debug_ops.debug_numeric_summary_v2(
                constant_op.constant(42.0),
                tensor_debug_mode=mode,
                tensor_id=tensor_id,
                output_dtype=dtypes.float64))
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpCurtHealthValuesSmall(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.CURT_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    tensor, tensor_id = debug_summary(constant_op.constant([]))
    self.assertAllEqual(tensor, [tensor_id, 0.0])
    tensor, tensor_id = debug_summary(constant_op.constant(42.0))
    self.assertAllEqual(tensor, [tensor_id, 0.0])
    tensor, tensor_id = debug_summary(constant_op.constant([3.0, 4.0]))
    self.assertAllEqual(tensor, [tensor_id, 0.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([3.0, -np.inf])))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([[0, 0], [np.nan, 0]])))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([[0, 0], [np.nan, np.inf]])))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([[0, np.inf], [np.nan, -np.inf]])))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpCurtHealthValuesLarge(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.CURT_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    x = np.zeros([100, 100], dtype=np.float16)
    x[32, 47] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
    x = np.zeros([97, 97], dtype=np.float32)
    x[50, 83] = -np.inf
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
    x[1, 41] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
    x = np.zeros([9701], dtype=np.float64)
    x[9700] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 1.0])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpCurtHealthConsistency(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.CURT_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    x = np.zeros([100, 100], dtype=np.float16)
    x[43, 99] = np.nan
    c = constant_op.constant(x)
    tensor_1, tensor_id_1 = debug_summary(c)
    tensor_2, tensor_id_2 = debug_summary(c)
    self.assertAllEqual(tensor_1, tensor_2)
    self.assertEqual(tensor_id_1, tensor_id_2)
    x = np.zeros([100, 100, 50], dtype=np.float64)
    x[0, 0, 1] = np.inf
    c = constant_op.constant(x)
    tensor_1, tensor_id_1 = debug_summary(c)
    tensor_2, tensor_id_2 = debug_summary(c)
    self.assertAllEqual(tensor_1, tensor_2)
    self.assertEqual(tensor_id_1, tensor_id_2)
    c = constant_op.constant(np.ones((100, 200), np.double))
    tensor_1, tensor_id_1 = debug_summary(c)
    tensor_2, tensor_id_2 = debug_summary(c)
    self.assertAllEqual(tensor_1, tensor_2)
    self.assertEqual(tensor_id_1, tensor_id_2)
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpConciseHealthSmall(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(
                  debug_event_pb2.TensorDebugMode.CONCISE_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    tensor, tensor_id = debug_summary(constant_op.constant([]))
    self.assertAllEqual(tensor, [tensor_id, 0.0, 0.0, 0.0, 0.0])
    tensor, tensor_id = debug_summary(constant_op.constant(42.0))
    self.assertAllEqual(tensor, [tensor_id, 1.0, 0.0, 0.0, 0.0])
    tensor, tensor_id = debug_summary(constant_op.constant([3.0, 4.0]))
    self.assertAllEqual(tensor, [tensor_id, 2.0, 0.0, 0.0, 0.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([3.0, -np.inf])))
    self.assertAllEqual(tensor, [tensor_id, 2.0, 1.0, 0.0, 0.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([[0, 0], [np.nan, 0]])))
    self.assertAllEqual(tensor, [tensor_id, 4.0, 0.0, 0.0, 1.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([[0, 0], [np.nan, np.inf]])))
    self.assertAllEqual(tensor, [tensor_id, 4.0, 0.0, 1.0, 1.0])
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([[0, np.inf], [np.nan, -np.inf]])))
    self.assertAllEqual(tensor, [tensor_id, 4.0, 1.0, 1.0, 1.0])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpConciseHealthLarge(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(
                  debug_event_pb2.TensorDebugMode.CONCISE_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    x = np.zeros([100, 100], dtype=np.float16)
    x[32, :] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 10000.0, 0.0, 0.0, 100.0])
    x = np.zeros([97, 97], dtype=np.float32)
    x[50, 83:85] = -np.inf
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 97 * 97, 2.0, 0.0, 0.0])
    x[1:9, 41] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 97 * 97, 2.0, 0.0, 8.0])
    x = np.zeros([9701], dtype=np.float64)
    x[9700] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [tensor_id, 9701, 0.0, 0.0, 1.0])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpConciseHealthConsistency(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(
                  debug_event_pb2.TensorDebugMode.CONCISE_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    # Assert the same op is returns a consistent value
    x = np.zeros([100, 100], dtype=np.float16)
    x[3, 4] = -np.inf
    c = constant_op.constant(x)
    tensor_1, tensor_id_1 = debug_summary(c)
    tensor_2, tensor_id_2 = debug_summary(c)
    self.assertAllEqual(tensor_1, tensor_2)
    self.assertEqual(tensor_id_1, tensor_id_2)
    c = constant_op.constant(np.ones((100, 200), np.double))
    tensor_1, tensor_id_1 = debug_summary(c)
    tensor_2, tensor_id_2 = debug_summary(c)
    self.assertAllEqual(tensor_1, tensor_2)
    self.assertEqual(tensor_id_1, tensor_id_2)
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpShapeEmpty(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.SHAPE),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    tensor, tensor_id = debug_summary(constant_op.constant(0.0))
    self.assertAllEqual(
        tensor, [tensor_id, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpShapeSmall(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.SHAPE),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    x = np.zeros([3, 4], dtype=np.float32)
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(
        tensor, [tensor_id, 1.0, 2.0, 12.0, 3.0, 4.0, 0.0, 0.0, 0.0, 0.0])
    x = np.ones([1, 2, 3, 4, 5, 6], dtype=np.float16)
    x[0, 1, 2, 2, 2, 2] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(
        tensor,
        [tensor_id, 19, 6.0, 2 * 3 * 4 * 5 * 6, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0])
    x = np.zeros([2], dtype=np.float32)
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(
        tensor, [tensor_id, 1.0, 1.0, 2.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0])
    tensor, tensor_id = debug_summary(constant_op.constant([]))
    self.assertAllEqual(
        tensor, [tensor_id, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpShapeLarge(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.SHAPE),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    x = np.ones([1, 2, 3, 4, 5, 6, 7], dtype=np.double)
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    self.assertAllEqual(tensor, [
        tensor_id, 2.0, 7.0, 2 * 3 * 4 * 5 * 6 * 7, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0
    ])
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpFullHealthSmall(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.FULL_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    tensor, tensor_id = debug_summary(constant_op.constant([]))
    expected = [tensor_id, -1, 1, 1, 0, 0, 0, 0, 0, 0, 0]
    self.assertAllEqual(tensor, expected)
    tensor, tensor_id = debug_summary(constant_op.constant(42.0))
    expected = [tensor_id, -1, 1, 0, 1, 0, 0, 0, 0, 0, 1]
    self.assertAllEqual(tensor, expected)
    tensor, tensor_id = debug_summary(constant_op.constant([3.0, 4.0]))
    expected = [tensor_id, -1, 1, 1, 2, 0, 0, 0, 0, 0, 2]
    self.assertAllEqual(tensor, expected)
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([3, -np.inf], dtype=np.float32)))
    expected = [tensor_id, -1, 1, 1, 2, 1, 0, 0, 0, 0, 1]
    self.assertAllEqual(tensor, expected)
    tensor, tensor_id = debug_summary(
        constant_op.constant(np.array([[0, 0], [np.nan, 0]], dtype=np.float64)))
    expected = [tensor_id, -1, 2, 2, 4, 0, 0, 1, 0, 3, 0]
    self.assertAllEqual(tensor, expected)
    tensor, tensor_id = debug_summary(
        constant_op.constant(
            np.array([[0, 0], [np.nan, np.inf]], dtype=np.float16)))
    expected = [tensor_id, -1, 19, 2, 4, 0, 1, 1, 0, 2, 0]
    self.assertAllEqual(tensor, expected)
    tensor, tensor_id = debug_summary(
        constant_op.constant(
            np.array([[0, np.inf], [np.nan, -np.inf]], dtype=np.float32)))
    expected = [tensor_id, -1, 1, 2, 4, 1, 1, 1, 0, 1, 0]
    self.assertAllEqual(tensor, expected)
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpFullHealthLarge(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.FULL_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    def tensor_counts(arr):
      counts = [len(np.shape(arr)), np.size(arr), 0, 0, 0, 0, 0, 0]
      for n in np.ravel(arr):
        if np.isneginf(n):
          counts[2] += 1
        elif np.isposinf(n):
          counts[3] += 1
        elif np.isnan(n):
          counts[4] += 1
        elif n < 0.:
          counts[5] += 1
        elif n == 0.:
          counts[6] += 1
        else:
          counts[7] += 1
      return counts
    x = np.zeros([50, 50], dtype=np.float16)
    x[32, 47] = np.nan
    x[0:4, 3] = np.inf
    x[40:50, 40:50] = 10
    x[3, 20] = -10
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    expected = [tensor_id, -1, 19] + tensor_counts(x)
    self.assertAllEqual(tensor, expected)
    x = np.ones([25, 25, 50], dtype=np.float32) * np.inf
    x[:, :, 1] = np.nan
    x[:, :, 2] = -np.inf
    x[:, :, 3] = -1
    x[:, :, 4] = 0
    x[:, :, 5] = 1
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    expected = [tensor_id, -1, 1] + tensor_counts(x)
    self.assertAllEqual(tensor, expected)
    x[0, 0, 0] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    expected = [tensor_id, -1, 1,] + tensor_counts(x)
    self.assertAllEqual(tensor, expected)
    x = np.zeros([9701], dtype=np.float64)
    x[9700] = np.nan
    tensor, tensor_id = debug_summary(constant_op.constant(x))
    expected = [tensor_id, -1, 2] + tensor_counts(x)
    self.assertAllEqual(tensor, expected)
  @test_util.run_in_graph_and_eager_modes
  def testDebugNumericSummaryV2OpFullHealthConsistency(self):
    def debug_summary(x):
      return self.evaluate(
          gen_debug_ops.debug_numeric_summary_v2(
              x,
              tensor_debug_mode=(debug_event_pb2.TensorDebugMode.FULL_HEALTH),
              tensor_id=x._id,
              output_dtype=dtypes.float64)), x._id
    # Assert the same op is returns a consistent value
    x = np.zeros([100, 100], dtype=np.float16)
    x[32, 47] = np.nan
    x[0:4, 3] = np.inf
    x[90:100, 90:100] = 10
    x[3, 20] = -10
    c = constant_op.constant(x)
    tensor_1, tensor_id_1 = debug_summary(c)
    tensor_2, tensor_id_2 = debug_summary(c)
    self.assertAllEqual(tensor_1, tensor_2)
    self.assertEqual(tensor_id_1, tensor_id_2)
    x = np.ones((100, 200, 3, 10), np.double)
    x[1, 30, 2] = 10
    x[5, :, 0, 1] = np.nan
    x[90:100, 150, :, :] = np.inf
    c = constant_op.constant(x)
    tensor_1, tensor_id_1 = debug_summary(c)
    tensor_2, tensor_id_2 = debug_summary(c)
    self.assertAllEqual(tensor_1, tensor_2)
    self.assertEqual(tensor_id_1, tensor_id_2)
  def testCheckNumericsV2OpNegativeAndPositiveInf(self):
    """Test that CheckNumericsV2 op distinguishes negative and positive infs."""
    with self.session(graph=ops.Graph()):
      t1 = constant_op.constant([-1.0, 1.0])
      t2 = constant_op.constant([0.0, 0.0])
      with self.assertRaisesRegex(
          errors.InvalidArgumentError,
          r"pass through test.*had -Inf and \+Inf values"):
        self.evaluate(
            array_ops.check_numerics_v2(t1 / t2, message="pass through test"))
  def testCheckNumericsV2OpNegativeAndPositiveInfAndNaN(self):
    """CheckNumericsV2 op distinguishes - & + infs when nan is present."""
    with self.session(graph=ops.Graph()):
      t1 = constant_op.constant([-1.0, 1.0, 0.0])
      t2 = constant_op.constant([0.0, 0.0, 0.0])
      with self.assertRaisesRegex(
          errors.InvalidArgumentError,
          r"pass through test.*had -Inf, \+Inf, and NaN values"):
        self.evaluate(
            array_ops.check_numerics_v2(t1 / t2, message="pass through test"))
  def testCheckNumericsV2PositiveInfAndNaN(self):
    """Test that CheckNumericsV2 op shows sign of inf when nan is present."""
    with self.session(graph=ops.Graph()):
      t1 = constant_op.constant([0.0, 1.0])
      t2 = constant_op.constant([0.0, 0.0])
      with self.assertRaisesRegex(
          errors.InvalidArgumentError,
          r"pass through test.*had \+Inf and NaN values"):
        self.evaluate(
            array_ops.check_numerics_v2(t1 / t2, message="pass through test"))
if __name__ == "__main__":
  ops.enable_eager_execution()
  googletest.main()
 | 
	apache-2.0 | -492,491,242,545,354,900 | 37.860792 | 80 | 0.632312 | false | 
| 
	SEJeff/home-assistant | 
	homeassistant/components/automation/__init__.py | 
	4 | 
	2246 | 
	"""
homeassistant.components.automation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Allows to setup simple automation rules via the config file.
"""
import logging
from homeassistant.bootstrap import prepare_setup_platform
from homeassistant.helpers import config_per_platform
from homeassistant.util import split_entity_id
from homeassistant.const import ATTR_ENTITY_ID
DOMAIN = "automation"
DEPENDENCIES = ["group"]
CONF_ALIAS = "alias"
CONF_SERVICE = "execute_service"
CONF_SERVICE_ENTITY_ID = "service_entity_id"
CONF_SERVICE_DATA = "service_data"
_LOGGER = logging.getLogger(__name__)
def setup(hass, config):
    """ Sets up automation. """
    success = False
    for p_type, p_config in config_per_platform(config, DOMAIN, _LOGGER):
        platform = prepare_setup_platform(hass, config, DOMAIN, p_type)
        if platform is None:
            _LOGGER.error("Unknown automation platform specified: %s", p_type)
            continue
        if platform.register(hass, p_config, _get_action(hass, p_config)):
            _LOGGER.info(
                "Initialized %s rule %s", p_type, p_config.get(CONF_ALIAS, ""))
            success = True
        else:
            _LOGGER.error(
                "Error setting up rule %s", p_config.get(CONF_ALIAS, ""))
    return success
def _get_action(hass, config):
    """ Return an action based on a config. """
    def action():
        """ Action to be executed. """
        _LOGGER.info("Executing rule %s", config.get(CONF_ALIAS, ""))
        if CONF_SERVICE in config:
            domain, service = split_entity_id(config[CONF_SERVICE])
            service_data = config.get(CONF_SERVICE_DATA, {})
            if not isinstance(service_data, dict):
                _LOGGER.error("%s should be a dictionary", CONF_SERVICE_DATA)
                service_data = {}
            if CONF_SERVICE_ENTITY_ID in config:
                try:
                    service_data[ATTR_ENTITY_ID] = \
                        config[CONF_SERVICE_ENTITY_ID].split(",")
                except AttributeError:
                    service_data[ATTR_ENTITY_ID] = \
                        config[CONF_SERVICE_ENTITY_ID]
            hass.services.call(domain, service, service_data)
    return action
 | 
	mit | -8,727,834,822,800,919,000 | 29.351351 | 79 | 0.597507 | false | 
| 
	garywu/exparse | 
	exparse/helper.py | 
	1 | 
	2450 | 
	__author__ = 'Gary Wu'
import logging
mylogger = logging.getLogger(__name__)
from itertools import chain
from functools import partial
from lxml import etree
from lxml import html
def currency(elm):
    if elm is not None and elm.text:
        text = elm.text.strip()
        if text.startswith('$'):
            text = text[1:].strip()
            return int(float(text.replace(',', '')) * 100)
    return -1
def elm2dict(elm):
    return {
        'tag':elm.tag,
        'attrib':elm.attrib,
        'text':elm.text.strip() if elm.text else '',
        'tail':elm.tail.strip() if elm.tail else '',
        }
def dump_one(elm):
    data = elm2dict(elm)
    children = []
    for c in elm.getchildren():
        children.append(elm2dict(c))
    data['children'] = children
    return data
def dump(elm):
    all = []
    if isinstance(elm, list):
        for item in elm:
            all.append(dump_one(item))
        return all
    else:
        return dump_one(elm)
def dump_elm(elm, attr_types = ''):
    if isinstance(elm, html.HtmlElement):
        mylogger.debug('value: %s, type:%s' % (etree.tostring(elm, pretty_print = True), attr_types))
    elif hasattr(elm, 'text'):
        mylogger.debug('value: %s, type:%s' % (elm.text, attr_types))
    else:
        mylogger.debug('value: %s, type:%s' % (elm, attr_types))
def debug_lines(text, lines=10):
    if lines:
        mylogger.debug('\n\n' + '\n'.join(chain(text.splitlines()[:lines])) + '\n...\n')
    else:
        mylogger.debug('\n\n' + text)
def containing_class(class_name):
    return 'contains(concat(" ",normalize-space(@class), " "), " ' + class_name + ' ")'
def get_all_text(elm, sep = ' '):
    text = ''
    if elm.text:
        text = elm.text.strip()
    for c in elm.getchildren():
        text += get_all_text(c, sep)
    if elm.tail and elm.tail.strip():
        text += sep + elm.tail.strip() + sep
    return text
get_all_text_newline = partial(get_all_text, sep = '\n')
get_all_text_tab = partial(get_all_text, sep = '\t')
def stringify_children(elm):
    parts = ([elm.text.strip() if elm.text else ''] +
             list(
                 chain(*(
                     [c.text.strip() if c.text else '', stringify_children(c), c.tail.strip() if c.tail else '']
                     for c in elm.getchildren()
                    )
                 )
             ) + [elm.tail.strip() if elm.tail else '']
    )
    return ''.join(filter(None, parts)) | 
	mit | -3,763,255,534,705,272,300 | 27.835294 | 112 | 0.554286 | false | 
| 
	rosswhitfield/mantid | 
	Testing/SystemTests/tests/framework/ISIS/SANS/WORKFLOWS/SANSLoadTest.py | 
	3 | 
	24469 | 
	# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2020 ISIS Rutherford Appleton Laboratory UKRI,
#   NScD Oak Ridge National Laboratory, European Spallation Source,
#   Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
# pylint: disable=too-many-public-methods, invalid-name, too-many-arguments
import unittest
import systemtesting
from ISIS.SANS.isis_sans_system_test import ISISSansSystemTest
from mantid.dataobjects import (Workspace2D, EventWorkspace)
from mantid.api import (AnalysisDataService, AlgorithmManager)
from sans.algorithm_detail.load_data import SANSLoadDataFactory
from sans.common.log_tagger import has_tag
from sans.common.constants import (CALIBRATION_WORKSPACE_TAG, SANS_FILE_TAG)
# Not clear why the names in the module are not found by Pylint, but it seems to get confused. Hence this check
# needs to be disabled here.
# pylint: disable=no-name-in-module
from sans.state.Serializer import Serializer
from sans.test_helper.test_director import TestDirector
from sans.common.enums import SANSFacility, SANSInstrument
from sans.state.StateObjects.StateData import get_data_builder
from sans.common.file_information import SANSFileInformationFactory
def remove_all_workspaces_from_ads():
    workspaces_on_the_ads = AnalysisDataService.getObjectNames()
    for name in workspaces_on_the_ads:
        AnalysisDataService.remove(name)
def compare_workspaces(workspace1, workspace2):
    try:
        alg = AlgorithmManager.createUnmanaged("CompareWorkspaces")
        alg.initialize()
        alg.setChild(True)
        alg.setRethrows(True)
        alg.setProperty("Workspace1", workspace1)
        alg.setProperty("Workspace2", workspace2)
        alg.setProperty("Tolerance", 1e-6)
        alg.setProperty("ToleranceRelErr", True)
        alg.setProperty("CheckAllData", True)
        alg.execute()
    except RuntimeError:
        raise RuntimeError("Comparison was wrong.")
# -----------------------------------------------
# Tests for the Load factory
# -----------------------------------------------
class SANSLoadFactoryTest(unittest.TestCase):
    def test_that_valid_file_information_does_not_raise(self):
        # Arrange
        load_factory = SANSLoadDataFactory()
        file_information_factory = SANSFileInformationFactory()
        ws_name_sample = "SANS2D00022024"
        file_information = file_information_factory.create_sans_file_information(ws_name_sample)
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter(ws_name_sample)
        data = data_builder.build()
        # Get the sample state
        test_director = TestDirector()
        test_director.set_states(data_state=data)
        state = test_director.construct()
        # Act + Assert
        try:
            load_factory.create_loader(state)
            did_not_raise = True
        except NotImplementedError:
            did_not_raise = True
        self.assertTrue(did_not_raise)
# -----------------------------------------------
# Tests for the SANSLoad algorithm
# -----------------------------------------------
@ISISSansSystemTest(SANSInstrument.LARMOR, SANSInstrument.SANS2D)
class SANSLoadTest(unittest.TestCase):
    @staticmethod
    def _get_simple_state(sample_scatter, sample_trans=None, sample_direct=None,
                          can_scatter=None, can_trans=None, can_direct=None, calibration=None,
                          sample_scatter_period=None, sample_trans_period=None, sample_direct_period=None,
                          can_scatter_period=None, can_trans_period=None, can_direct_period=None):
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(sample_scatter)
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter(sample_scatter)
        # Set the file names
        if sample_trans is not None:
            data_builder.set_sample_transmission(sample_trans)
        if sample_direct is not None:
            data_builder.set_sample_direct(sample_direct)
        if can_scatter is not None:
            data_builder.set_can_scatter(can_scatter)
        if can_trans is not None:
            data_builder.set_can_transmission(can_trans)
        if can_direct is not None:
            data_builder.set_can_direct(can_direct)
        # Set the periods
        if sample_scatter_period is not None:
            data_builder.set_sample_scatter_period(sample_scatter_period)
        if sample_trans_period is not None:
            data_builder.set_sample_transmission_period(sample_trans_period)
        if sample_direct_period is not None:
            data_builder.set_sample_direct_period(sample_direct_period)
        if can_scatter_period is not None:
            data_builder.set_can_scatter_period(can_scatter_period)
        if can_trans_period is not None:
            data_builder.set_can_transmission_period(can_trans_period)
        if can_direct_period is not None:
            data_builder.set_can_direct_period(can_direct_period)
        data_info = data_builder.build()
        # Get the sample state
        test_director = TestDirector()
        test_director.set_states(data_state=data_info)
        state = test_director.construct()
        state.adjustment.calibration = calibration
        return state
    def _evaluate_workspace_type(self, load_alg, num_workspaces, workspace_name, workspace_type, index):
        if num_workspaces == 1:
            ws = load_alg.getProperty(workspace_name).value
            self.assertTrue(isinstance(ws, workspace_type[index]))
        elif num_workspaces > 1:
            for ind in range(1, num_workspaces + 1):
                output_name = workspace_name + "_" + str(ind)
                ws = load_alg.getProperty(output_name).value
                self.assertTrue(isinstance(ws, workspace_type[index]))
        else:
            ws = load_alg.getProperty(workspace_name).value
            self.assertEqual(ws, None)
    def _do_test_output(self, load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type):
        #  Check the number of workspaces
        tags_numbers = ["NumberOfSampleScatterWorkspaces", "NumberOfSampleTransmissionWorkspaces",
                        "NumberOfSampleDirectWorkspaces", "NumberOfCanScatterWorkspaces",
                        "NumberOfCanTransmissionWorkspaces", "NumberOfCanDirectWorkspaces"]
        for num_workspaces, num_name in zip(expected_number_of_workspaces, tags_numbers):
            number_of_workspaces = load_alg.getProperty(num_name).value
            self.assertEqual(number_of_workspaces,  num_workspaces)
        # Check that workspaces were loaded
        tags_workspaces = ["SampleScatterWorkspace", "SampleTransmissionWorkspace",
                           "SampleDirectWorkspace", "CanScatterWorkspace",
                           "CanTransmissionWorkspace", "CanDirectWorkspace"]
        index = 0
        for num_workspaces, workspace_name in zip(expected_number_of_workspaces, tags_workspaces):
            self._evaluate_workspace_type(load_alg, num_workspaces, workspace_name, workspace_type, index)
            index += 1
        # Check for the monitor workspaces
        num_monitor_workspaces = [expected_number_of_workspaces[0], expected_number_of_workspaces[3]]
        tags_monitors = ["SampleScatterMonitorWorkspace", "CanScatterMonitorWorkspace"]
        workspace_type_monitor = [Workspace2D, Workspace2D]
        index = 0
        for num_workspaces, workspace_name in zip(num_monitor_workspaces, tags_monitors):
            self._evaluate_workspace_type(load_alg, num_workspaces, workspace_name, workspace_type_monitor, index)
            index += 1
        # Confirm there is nothing on the ADS
        workspaces_on_the_ads = AnalysisDataService.getObjectNames()
        self.assertEqual(len(workspaces_on_the_ads),  expected_number_on_ads)
    @staticmethod
    def _has_calibration_been_applied(load_alg):
        sample_workspace = load_alg.getProperty("SampleScatterWorkspace").value
        if sample_workspace is None:
            sample_workspace = load_alg.getProperty("SampleScatterWorkspace_1").value
        has_calibration_tag = has_tag(CALIBRATION_WORKSPACE_TAG, sample_workspace)
        has_file_tag = has_tag(SANS_FILE_TAG, sample_workspace)
        return has_calibration_tag and has_file_tag
    @staticmethod
    def _run_load(state, publish_to_cache, use_cached, move_workspace=False, beam_coordinates=None,
                  component=None, output_workspace_names=None):
        load_alg = AlgorithmManager.createUnmanaged("SANSLoad")
        load_alg.setChild(True)
        load_alg.setRethrows(True)
        load_alg.initialize()
        state_dict = Serializer.to_json(state)
        load_alg.setProperty("SANSState", state_dict)
        load_alg.setProperty("PublishToCache", publish_to_cache)
        load_alg.setProperty("UseCached", use_cached)
        if move_workspace:
            load_alg.setProperty("Component", component)
            load_alg.setProperty("BeamCoordinates", beam_coordinates)
        if output_workspace_names:
            for name, value in output_workspace_names.items():
                load_alg.setProperty(name, value)
        # Act
        load_alg.execute()
        # self.assertTrue(load_alg.isExecuted())
        return load_alg
    def test_that_when_transmission_is_event_monitor_is_used(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="SANS2D00028827",
                                               sample_trans="SANS2D00028827",
                                               sample_direct="SANS2D00028827")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter",
                                  "SampleTransmissionWorkspace": "sample_transmission",
                                  "SampleDirectWorkspace": "sample_direct"}
        kwargs = {"state": state, "publish_to_cache": True, "use_cached": True, "move_workspace": False,
                  "output_workspace_names": output_workspace_names}
        load_alg = self._run_load(**kwargs)
        transmission_workspace = load_alg.getProperty("SampleTransmissionWorkspace").value
        self.assertEqual(transmission_workspace.getNumberHistograms(),  8)
    def test_that_runs_for_isis_nexus_file_with_event_data_and_single_period(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="SANS2D00028827",
                                               sample_trans="SANS2D00028784",
                                               sample_direct="SANS2D00028804",
                                               calibration="TUBE_SANS2D_BOTH_27345_20Mar15.nxs")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter",
                                  "SampleTransmissionWorkspace": "sample_transmission",
                                  "SampleDirectWorkspace": "sample_direct"}
        load_alg = self._run_load(state, publish_to_cache=False, use_cached=False, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [1, 1, 1, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [EventWorkspace, Workspace2D, Workspace2D, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertTrue(SANSLoadTest._has_calibration_been_applied(load_alg))
    def test_that_runs_for_isis_nexus_file_with_histogram_data_and_single_period(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="SANS2D00000808",
                                               sample_trans="SANS2D00028784",
                                               sample_direct="SANS2D00028804")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter",
                                  "SampleTransmissionWorkspace": "sample_transmission",
                                  "SampleDirectWorkspace": "sample_direct"}
        load_alg = self._run_load(state, publish_to_cache=False, use_cached=False, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [1, 1, 1, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [Workspace2D, Workspace2D, Workspace2D, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
    def test_that_runs_for_raw_file_with_histogram_data_and_single_period(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="SANS2D00000808.raw",
                                               sample_trans="SANS2D00028784",
                                               sample_direct="SANS2D00028804")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter",
                                  "SampleTransmissionWorkspace": "sample_transmission",
                                  "SampleDirectWorkspace": "sample_direct"}
        load_alg = self._run_load(state, publish_to_cache=False, use_cached=False, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [1, 1, 1, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [Workspace2D, Workspace2D, Workspace2D, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
    def test_that_runs_for_isis_nexus_file_with_histogram_data_and_multi_period(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="SANS2D00005512.nxs")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=False, use_cached=False, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [13, 0, 0, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [Workspace2D, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
    def test_that_runs_for_isis_nexus_file_with_histogram_data_and_multi_period_and_select_single_period(self):
        # Arrange
        special_selection_on_group = 3
        state = SANSLoadTest._get_simple_state(sample_scatter="SANS2D00005512.nxs",
                                               sample_scatter_period=special_selection_on_group)
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=False, use_cached=False, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [1, 0, 0, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [Workspace2D, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
    def test_that_can_load_isis_nexus_file_with_event_data_and_multi_period(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="LARMOR00013065.nxs",
                                               calibration="80tubeCalibration_18-04-2016_r9330-9335.nxs")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=True, use_cached=True, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [4, 0, 0, 0, 0, 0]
        expected_number_on_ads = 1
        workspace_type = [EventWorkspace, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertTrue(SANSLoadTest._has_calibration_been_applied(load_alg))
        # Confirm that the ADS workspace contains the calibration file
        try:
            AnalysisDataService.retrieve("80tubeCalibration_18-04-2016_r9330-9335")
            on_ads = True
        except RuntimeError:
            on_ads = False
        self.assertTrue(on_ads)
        # Cleanup
        remove_all_workspaces_from_ads()
    def test_that_runs_for_isis_nexus_file_with_event_data_and_multi_period_and_select_single_period(self):
        # Arrange
        special_selection_on_group = 3
        state = SANSLoadTest._get_simple_state(sample_scatter="LARMOR00013065.nxs",
                                               sample_scatter_period=special_selection_on_group)
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=True, use_cached=True, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [1, 0, 0, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [EventWorkspace, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration has not been added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
        # Cleanup
        remove_all_workspaces_from_ads()
    def test_that_can_load_single_period_from_added_multi_period_histogram_file(self):
        # Arrange
        special_selection_on_group = 7
        state = SANSLoadTest._get_simple_state(sample_scatter="AddedMultiPeriodHistogram-add.nxs",
                                               sample_scatter_period=special_selection_on_group)
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=True, use_cached=True, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [1, 0, 0, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [Workspace2D, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
        # Cleanup
        remove_all_workspaces_from_ads()
    def test_that_can_load_all_periods_from_added_multi_period_histogram_file(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="AddedMultiPeriodHistogram-add.nxs")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=False, use_cached=False, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [13, 0, 0, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [Workspace2D, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
        # Cleanup
        remove_all_workspaces_from_ads()
    def test_that_can_load_single_period_from_added_multi_period_event_file(self):
        # Arrange
        special_selection_on_group = 2
        state = SANSLoadTest._get_simple_state(sample_scatter="AddedMultiPeriodEvent-add.nxs",
                                               sample_scatter_period=special_selection_on_group)
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=True, use_cached=True, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [1, 0, 0, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [EventWorkspace, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
        # Cleanup
        remove_all_workspaces_from_ads()
    def test_that_can_load_all_periods_from_added_multi_period_event_file(self):
        # Arrange
        state = SANSLoadTest._get_simple_state(sample_scatter="AddedMultiPeriodEvent-add.nxs")
        # Act
        output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
                                  "SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
        load_alg = self._run_load(state, publish_to_cache=True, use_cached=True, move_workspace=False,
                                  output_workspace_names=output_workspace_names)
        # Assert
        expected_number_of_workspaces = [4, 0, 0, 0, 0, 0]
        expected_number_on_ads = 0
        workspace_type = [EventWorkspace, None, None, None, None, None]
        self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
        # Check that calibration is added
        self.assertFalse(SANSLoadTest._has_calibration_been_applied(load_alg))
        # Cleanup
        remove_all_workspaces_from_ads()
class SANSLoadDataRunnerTest(systemtesting.MantidSystemTest):
    def __init__(self):
        systemtesting.MantidSystemTest.__init__(self)
        self._success = False
    def runTest(self):
        suite = unittest.TestSuite()
        suite.addTest(unittest.makeSuite(SANSLoadFactoryTest, 'test'))
        suite.addTest(unittest.makeSuite(SANSLoadTest, 'test'))
        runner = unittest.TextTestRunner()
        res = runner.run(suite)
        if res.wasSuccessful():
            self._success = True
    def requiredMemoryMB(self):
        return 2000
    def validate(self):
        return self._success
if __name__ == '__main__':
    unittest.main()
 | 
	gpl-3.0 | 41,515,017,078,706,024 | 46.055769 | 114 | 0.632269 | false | 
| 
	megaserg/pants | 
	tests/python/pants_test/build_graph/test_build_file_aliases.py | 
	5 | 
	5962 | 
	# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
                        unicode_literals, with_statement)
import os
import unittest
from pants.build_graph.address import Address
from pants.build_graph.build_file_aliases import BuildFileAliases, TargetMacro
from pants.build_graph.build_graph import BuildGraph
from pants.build_graph.target import Target
class BuildFileAliasesTest(unittest.TestCase):
  class RedTarget(Target):
    pass
  class BlueTarget(Target):
    pass
  def setUp(self):
    self.target_macro_factory = TargetMacro.Factory.wrap(
      lambda ctx: ctx.create_object(self.BlueTarget,
                                    type_alias='jill',
                                    name=os.path.basename(ctx.rel_path)),
      self.BlueTarget, self.RedTarget)
  def test_create(self):
    self.assertEqual(BuildFileAliases(targets={},
                                      objects={},
                                      context_aware_object_factories={}),
                     BuildFileAliases())
    targets = {'jake': Target, 'jill': self.target_macro_factory}
    self.assertEqual(BuildFileAliases(targets=targets,
                                      objects={},
                                      context_aware_object_factories={}),
                     BuildFileAliases(targets=targets))
    objects = {'jane': 42}
    self.assertEqual(BuildFileAliases(targets={},
                                      objects=objects,
                                      context_aware_object_factories={}),
                     BuildFileAliases(objects=objects))
    factories = {'jim': lambda ctx: 'bob'}
    self.assertEqual(BuildFileAliases(targets={},
                                      objects={},
                                      context_aware_object_factories=factories),
                     BuildFileAliases(context_aware_object_factories=factories))
    self.assertEqual(BuildFileAliases(targets=targets,
                                      objects=objects,
                                      context_aware_object_factories={}),
                     BuildFileAliases(targets=targets, objects=objects))
    self.assertEqual(BuildFileAliases(targets=targets,
                                      objects={},
                                      context_aware_object_factories=factories),
                     BuildFileAliases(targets=targets,
                                      context_aware_object_factories=factories))
    self.assertEqual(BuildFileAliases(targets={},
                                      objects=objects,
                                      context_aware_object_factories=factories),
                     BuildFileAliases(objects=objects,
                                      context_aware_object_factories=factories))
    self.assertEqual(BuildFileAliases(targets=targets,
                                      objects=objects,
                                      context_aware_object_factories=factories),
                     BuildFileAliases(targets=targets,
                                      objects=objects,
                                      context_aware_object_factories=factories))
  def test_curry_context(self):
    def curry_me(ctx, bob):
      """original doc"""
      return ctx, bob
    curried = BuildFileAliases.curry_context(curry_me)
    func = curried(42)
    self.assertEqual('original doc', curried.__doc__)
    self.assertTrue('curry_me' in curried.__name__,
                    'Unhelpful __name__: ' + curried.__name__)
    self.assertEqual((42, 'fred'), func('fred'))
  def test_create_bad_targets(self):
    with self.assertRaises(TypeError):
      BuildFileAliases(targets={'fred': object()})
    target = Target('fred', Address.parse('a:b'), BuildGraph(address_mapper=None))
    with self.assertRaises(TypeError):
      BuildFileAliases(targets={'fred': target})
  def test_create_bad_objects(self):
    with self.assertRaises(TypeError):
      BuildFileAliases(objects={'jane': Target})
    with self.assertRaises(TypeError):
      BuildFileAliases(objects={'jane': self.target_macro_factory})
  def test_bad_context_aware_object_factories(self):
    with self.assertRaises(TypeError):
      BuildFileAliases(context_aware_object_factories={'george': 1})
  def test_merge(self):
    e_factory = lambda ctx: 'e'
    f_factory = lambda ctx: 'f'
    first = BuildFileAliases(targets={'a': Target},
                             objects={'d': 2},
                             context_aware_object_factories={'e': e_factory})
    second = BuildFileAliases(targets={'b': self.target_macro_factory},
                              objects={'c': 1, 'd': 42},
                              context_aware_object_factories={'f': f_factory})
    expected = BuildFileAliases(
        # nothing to merge
        targets={'a': Target, 'b': self.target_macro_factory},
        # second overrides first
        objects={'c': 1, 'd': 42},
        # combine
        context_aware_object_factories={'e': e_factory, 'f': f_factory})
    self.assertEqual(expected, first.merge(second))
  def test_target_types(self):
    aliases = BuildFileAliases(targets={'jake': Target, 'jill': self.target_macro_factory})
    self.assertEqual({'jake': Target}, aliases.target_types)
  def test_target_macro_factories(self):
    aliases = BuildFileAliases(targets={'jake': Target, 'jill': self.target_macro_factory})
    self.assertEqual({'jill': self.target_macro_factory}, aliases.target_macro_factories)
  def test_target_types_by_alias(self):
    aliases = BuildFileAliases(targets={'jake': Target, 'jill': self.target_macro_factory})
    self.assertEqual({'jake': {Target}, 'jill': {self.BlueTarget, self.RedTarget}},
                     aliases.target_types_by_alias)
 | 
	apache-2.0 | -2,236,530,595,974,118,100 | 40.402778 | 93 | 0.580678 | false | 
| 
	wryoung412/CS294_Deep_RL | 
	hw1/policy/base.py | 
	1 | 
	1167 | 
	import datetime
import numpy as np
import inspect, os
import tensorflow as tf
from abc import ABC, abstractmethod
class BasePolicy(ABC):
    def __init__(self, env_name):
        # __file__
        policy_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
        model_dir = os.path.join(policy_dir, self.type(), env_name)
        model_meta = os.path.join(model_dir, env_name + '.meta')
        print(model_meta)
        assert tf.gfile.Exists(model_meta)
        self.graph = tf.Graph()
        self.sess = tf.Session(graph = self.graph)
        with self.graph.as_default():
            saver = tf.train.import_meta_graph(model_meta)
            saver.restore(self.sess, tf.train.latest_checkpoint(model_dir))
    @abstractmethod
    def type(self):
        pass
    def act(self, ob):
        input = np.reshape(ob, [-1] + list(ob.shape))
        obs_ph = self.graph.get_operation_by_name('obs_ph').outputs[0]
        act_out = self.graph.get_operation_by_name('act_out').outputs[0]
        output = self.sess.run(act_out, feed_dict = {obs_ph: input})
        return output[0]
if __name__ == '__main__':
    print('base policy')
 | 
	mit | -1,533,891,416,255,348,200 | 34.363636 | 94 | 0.616967 | false | 
| 
	prestontimmons/django-email-template | 
	email_template/email.py | 
	2 | 
	2513 | 
	from functools import partial
from django.conf import settings
from django.core.mail import EmailMessage, EmailMultiAlternatives
from django.template import Context, RequestContext
from django.template.loader import select_template
from .util import render_node
def send_base(template_name, context_data, send_method, render_method,
              request=None, from_email=None, send_method_args=None, **kwargs):
    send_method_args = send_method_args or {}
    args = get_message(
        template_name=template_name,
        context_data=context_data,
        request=request,
        render_method=render_method,
    )
    args["from_email"] = from_email or settings.DEFAULT_FROM_EMAIL
    args.update(send_method_args)
    return send_method(**args)
send_mail = send_base
def get_message(template_name, context_data, request, render_method):
    if request:
        c = RequestContext(request, context_data)
    else:
        c = Context(context_data)
    if not isinstance(template_name, (list, tuple)):
        template_name = [template_name]
    template = select_template(template_name)
    return render_method(template, c)
def render_django_fields(template, context):
    message = {}
    message["text"] = render_node(template, "text", context)
    message["html"] = render_node(template, "html", context)
    message["subject"] = render_node(template, "subject", context)
    recipients = render_node(template, "recipients", context)
    recipient_list = []
    for recipient in recipients.split(","):
        recipient_list.append(recipient.strip())
    message["recipient_list"] = recipient_list
    return message
def send_django_wrapper(**kwargs):
    text = kwargs.get("text", "")
    html = kwargs.get("html", "")
    if text and html:
        email_class = EmailMultiAlternatives
    else:
        email_class = EmailMessage
    if html and not text:
        body = html
    else:
        body = text
    msg = email_class(
        subject=kwargs["subject"],
        body=body,
        from_email=kwargs["from_email"],
        to=kwargs["recipient_list"],
        headers=kwargs.get("headers", {}),
        cc=kwargs.get("cc", []),
        connection=kwargs.get("connection", None),
    )
    if text and html:
        msg.attach_alternative(html, "text/html")
    if html and not text:
        msg.content_subtype = "html"
    msg.send()
    return msg
send_django = partial(send_base,
    send_method=send_django_wrapper,
    render_method=render_django_fields,
)
 | 
	mit | 3,266,218,043,247,853,000 | 24.907216 | 78 | 0.6538 | false | 
| 
	Checksum/landfill | 
	landfill.py | 
	1 | 
	14954 | 
	import os
import re
import sys
import imp
import pwiz
import peewee
import inspect
import logging
import pkgutil
import datetime
import importlib
import playhouse
from peewee import *
from peewee import Node
from playhouse.migrate import *
from types import ModuleType
from io import StringIO
__version__ = '0.2.1'
GENERATE_TEMPLATE = '''
from peewee import *
from models import *
{fields}
def up(migrator):
    {up_tables}
    migrator({up_columns}
    )
def down(migrator):
    {down_tables}
    migrator({down_columns}
    )
'''
COLUMN_DEFINITION = {
    'add_column'    : "\n      migrator.add_column('{}', '{}', {}_{})",
    'drop_column'   : "\n      migrator.drop_column('{}', '{}')",
    'create_table'  : "\n    {}.create_table(True)",
    'drop_table'    : "\n    {}.drop_table(True)"
}
COLUMN_DIRECTION = {
    'add_column'    : ('add_column', 'drop_column'),
    'drop_column'   : ('drop_column', 'add_column'),
    'create_table'  : ('create_table', 'drop_table'),
    'drop_table'    : ('drop_table', 'create_table')
}
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
class Migration(Model):
    """
    This model tracks the migrations that have currently
    been applied, and to determine which migrations to apply
    depending on the direction
    """
    name = CharField(max_length=255)
    applied_on = DateTimeField(default=datetime.datetime.now)
    class Meta:
        indexes = (
            (('name',), True),
        )
class MigrationException(Exception):
    pass
class CustomMigrator(SchemaMigrator):
    """
    A custom migrator which extends peewee's migrator
    which does a few things:
    1. Keep track of migrations in order to fake them if necessary
    2. Incremental migrations
    """
    def __init__(self, database, module, **kwargs):
        SchemaMigrator.__init__(self, database)
        # Set options
        self.module = module
        self.module_name = self.module.__name__
        self.direction = kwargs.get('direction', 'up')
        self.migration = kwargs.get('migration', None)
        self.fake = kwargs.get('fake', False)
        self.force = kwargs.get('force', False)
        self.migrations_run = 0
        self.operations = None
        self.last_id = None
        self.initialize()
    def __call__(self, *args):
        self.operations = args
    def raw_query(self, sql):
        return sql
    def initialize(self):
        # If the Migration table doesn't exist, create it
        if not Migration.table_exists():
            Migration.create_table()
        # Determine what the last migration was on this server
        last = Migration.select().order_by(Migration.id.desc()).limit(1).first()
        self.last_id = last.name.split("_")[0] if last else None
        if last:
            print("Last run migration %s" % last.name)
        else:
            print("No migrations have been run yet")
    def run(self):
        if self.migration:
            self.apply_migration(self.migration)
        else:
            # Fetch migrations
            path = os.path.dirname(self.module.__file__)
            migrations = get_migrations(path)
            for migration in migrations:
                if not self.force and migration.split("_")[0] <= self.last_id:
                    continue
                self.apply_migration(migration)
                self.migrations_run += 1
        if self.migrations_run or self.force:
            print("\nNumber of migrations run %d" % self.migrations_run)
        else:
            print("\nDatabase already upto date!")
    def execute_operation(self, op):
        print(op)
        if self.fake:
            return False
        if isinstance(op, Operation):
            playhouse.migrate.migrate(op)
        # If raw query, execute it ourselves
        elif isinstance(op, str):
            self.database.execute_sql(op)
        else:
            raise MigrationException("Can't determine type of operation to run")
    def apply_migration(self, migration, **kwargs):
        '''
        Apply a particular migration
        '''
        print("\nAttempting to run %s" % migration)
        # First check if the migration has already been applied
        exists = Migration.select().where(Migration.name == migration).limit(1).first()
        if exists and self.direction == 'up':
            print("This migration has already been run on this server")
            if not self.force or self.fake:
                return False
            else:
                print("Force running this migration again")
        # Load the module
        module_name = "%s.%s" % (self.module_name, migration)
        try:
            module = importlib.import_module(module_name)
            if not hasattr(module, self.direction):
                raise MigrationException("%s doesn't have %s migration defined" %
                    (migration, self.direction)
                )
            # Actually execute the direction method
            # Note that this doesn't actually run the migrations in the DB yet.
            # This merely collects the steps in the migration, so that if needed
            # we can just fake it and print out the SQL query as well.
            getattr(module, self.direction)(self)
            # Print out each migration and execute it
            for op in self.operations:
                self.execute_operation(op)
            if not self.fake:
                # If successful, create the entry in our log
                if self.direction == 'up' and not exists:
                    Migration.create(name=migration)
                elif self.direction == 'down' and exists:
                    exists.delete_instance()
            print("Done")
        except ImportError:
            raise MigrationException("%s migration not found" % migration)
class CustomSqliteMigrator(CustomMigrator, SqliteMigrator):
    pass
class CustomMySQLMigrator(CustomMigrator, MySQLMigrator):
    pass
class CustomPostgresqlMigrator(CustomMigrator, PostgresqlMigrator):
    pass
DATABASE_ALIASES = {
    CustomSqliteMigrator: ['sqlite', 'sqlite3'],
    CustomMySQLMigrator: ['mysql', 'mysqldb'],
    CustomPostgresqlMigrator: ['postgres', 'postgresql'],
}
DATABASE_MAP = dict((value, key)
                    for key in DATABASE_ALIASES
                    for value in DATABASE_ALIASES[key])
class Capturing(list):
    """
    Util class to capture any code that is printed
    out to stdout. This is necessary to load up the
    table definition that peewee's introspector throws up
    """
    def __enter__(self):
        self._stdout = sys.stdout
        sys.stdout = self._stringio = StringIO()
        return self
    def __exit__(self, *args):
        self.extend(self._stringio.getvalue().splitlines())
        sys.stdout = self._stdout
class Generator(object):
    """
    Automatically generates the list of migrations to be run
    by comparing the states of the model definition and the
    database.
    A lot of this is pretty rudimentary at the moment - Adding
    and removing columns are supported.
    WARNING: Alpha at best.
    """
    def __init__(self, engine, database, models, **kwargs):
        '''
        Terminology:
        py_ = loaded from the python models
        db_ = generated by instrospecting the DB
        '''
        # Python object
        self.py_models = models
        self.db_models = self.get_pwiz_tables(engine, database)
        # Tables from the DB, generated by pwiz
        self.py_tables = self.get_tables(self.py_models)
        self.db_tables = self.get_tables(self.db_models)
        self.source_cache = {}
        # Fields to generate the template
        self.migration_fields = []
        self.up_columns = []
        self.down_columns = []
        self.up_tables = []
        self.down_tables = []
    def get_tables(self, models):
        '''
        Extract all peewee models from the passed in module
        '''
        return { obj._meta.db_table : obj for obj in
                models.__dict__.itervalues() if
                isinstance(obj, peewee.BaseModel) and
                len(obj._meta.fields) > 1
            }
    def get_pwiz_tables(self, engine, database):
        '''
        Run the pwiz introspector and get the models defined
        in the DB.
        '''
        introspector = pwiz.make_introspector(engine, database.database,
            **database.connect_kwargs)
        out_file = '/tmp/db_models.py'
        with Capturing() as code:
            pwiz.print_models(introspector)
        code = '\n'.join(code)
        # Unfortunately, introspect.getsource doesn't seem to work
        # with dynamically created classes unless it is written out
        # to a file. So write it out to a temporary file
        with open(out_file, 'w') as file_:
            file_.write(code)
        # Load up the DB models as a new module so that we can
        # compare them with those in the model definition
        return imp.load_source('db_models', out_file)
    def run(self):
        for table_name, py_table in self.py_tables.iteritems():
            # If the table exists in the DB, compare its fields
            if table_name in self.db_tables:
                logger.debug("%s already exists in the DB. Checking fields now" % table_name)
                model_set = set(py_table._meta.fields)
                db_set = set(self.db_tables.get(table_name)._meta.fields)
                # Added and deleted columns
                added = model_set - db_set
                deleted = db_set - model_set
                if added:
                    logger.info("Columns added: %s" % added)
                    for column in added:
                        self.generate_definition('add_column', py_table, self.py_models, table_name, column)
                if deleted:
                    logger.info("Columns deleted: %s" % deleted)
                    for column in deleted:
                        self.generate_definition('drop_column', self.db_tables.get(table_name), self.db_models, table_name, column)
            # If new table, create the table
            else:
                logger.info("%s is a new table" % table_name)
                model_class = py_table._meta.model_class.__name__
                self.up_tables.append(COLUMN_DEFINITION.get('create_table').format(model_class))
                self.down_tables.append(COLUMN_DEFINITION.get('drop_table').format(model_class))
        print(GENERATE_TEMPLATE.format(
            fields='\n'.join(self.migration_fields),
            up_columns=','.join(self.up_columns),
            down_columns=','.join(self.down_columns),
            up_tables=','.join(self.up_tables),
            down_tables=','.join(self.down_tables),
        ))
    def generate_definition(self, _type, table, model, table_name, column):
        field = table._meta.fields.get(column)
        field_type = type(field).__name__
        field_attrs = field.__dict__
        field_name = field_attrs.get('db_column')
        definition = self.get_field(_type, field_attrs, model, table_name, column)
        if definition:
            self.migration_fields.append(definition)
            # Generate the migration statement
            steps = COLUMN_DIRECTION.get(_type)
            self.up_columns.append(COLUMN_DEFINITION.get(steps[0]).format(table_name, field_name, table_name, column))
            self.down_columns.append(COLUMN_DEFINITION.get(steps[1]).format(table_name, field_name, table_name, column))
        else:
            logger.warning("Could not get definition of field %s" % column)
    def get_field(self, _type, field_attrs, model, table_name, column):
        model_name = field_attrs.get('model_class').__name__
        # Introspect the table definition and search for the field
        # This is done in a very crude way, do it better!
        model_source = self.get_model_source(_type, model, model_name)
        definition = re.search(column + "(.*)", model_source)
        return table_name + '_' + definition.group(0).strip() if definition else None
    def get_model_source(self, _type, model, model_name):
        if not model_name in self.source_cache:
            model_source = inspect.getsource(getattr(model, model_name)).strip()
            self.source_cache[model_name] = model_source
        return self.source_cache.get(model_name, '')
def fake_print(self):
    '''
    This is the overridden __str__ method for Operation
    Recursively prints out the actual query to be executed
    '''
    def _fake_run():
        kwargs = self.kwargs.copy()
        kwargs['generate'] = True
        return _fake_handle_result(
            getattr(self.migrator, self.method)(*self.args, **kwargs)
        )
    def _fake_handle_result(result):
        if isinstance(result, Node):
            sql, params = self._parse_node(result)
            return (sql, params)
        elif isinstance(result, Operation):
            return str(result)
        elif isinstance(result, (list, tuple)):
            return '\n'.join([str(_fake_handle_result(item)) for item in result])
    return str(_fake_run())
# Monkey Patch the Operation to show SQL
setattr(Operation, "__str__", fake_print)
def get_migrations(path):
    '''
    In the specified directory, get all the files which match the pattern
    0001_migration.py
    '''
    pattern = re.compile(r"\d+_[\w\d]+")
    modules = [name for _, name, _ in pkgutil.iter_modules([path])
                if pattern.match(name)
            ]
    return sorted(modules, key=lambda name: int(name.split("_")[0]))
def validate_args(engine, database, module):
    if engine not in DATABASE_MAP:
        raise MigrationException('Unrecognized database engine, must be one of: %s' %
            ', '.join(DATABASE_MAP.keys()))
    if not isinstance(database, peewee.Database):
        raise MigrationException("Parameter database has to be a peewee database object")
    if not isinstance(module, ModuleType):
        raise MigrationException("Parameter module has to be a python module")
# Public API
def migrate(engine, database, module, **kwargs):
    '''
    Execute the migrations. Pass in kwargs
    '''
    validate_args(engine, database, module)
    options = {
        'direction': kwargs.get('direction', 'up'),
        'fake': kwargs.get('fake', False),
        'force': kwargs.get('force', False),
        'migration': kwargs.get('migration', None),
        'transaction': kwargs.get('transaction', True),
    }
    Migration._meta.database = database
    migrator = DATABASE_MAP[engine](database, module, **options)
    migrator.run()
def generate(engine, database, models, **kwargs):
    '''
    Generate the migrations by introspecting the db
    '''
    validate_args(engine, database, models)
    generator = Generator(engine, database, models)
    generator.run()
 | 
	mit | -8,818,175,679,522,130,000 | 32.604494 | 131 | 0.601511 | false | 
| 
	mawimawi/datadjables | 
	datadjables/datadjable_testing/urls.py | 
	1 | 
	1167 | 
	from django.conf.urls import patterns, url
from django.views.generic import TemplateView, DetailView
from .models import Person
from django.conf import settings
from datadjables.datadjable_testing.views import DPersons, DPersonsFilterTop, \
    DSimplePersons, DPurchases
urlpatterns = patterns(
    '',
    url(r'^$',
        TemplateView.as_view(template_name='datadjable_testing/index.html'),
        name='datadjable_testing_index'),
    url(r'^persons/$',
        DPersons.as_view(),
        name='datadjable_testing_person_list'),
    url(r'^persons/(?P<pk>\d+)/$', DetailView.as_view(
        model=Person, template_name='datadjable_testing/person_detail.html'),
        name='datadjable_testing_person_detail'),
    url(r'^personsfiltertop/$', DPersonsFilterTop.as_view(),
        name='datadjable_testing_personfiltertop_list'),
    url(r'^simplepersons/$', DSimplePersons.as_view(),
        name='datadjable_testing_simpleperson_list'),
    url(r'^purchases/$', DPurchases.as_view(),
        name='datadjable_testing_purchases_list'),
    url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
        'document_root': settings.STATIC_ROOT}),
)
 | 
	mit | -4,885,345,186,190,190,000 | 35.46875 | 79 | 0.681234 | false | 
| 
	vuolter/pyload | 
	src/pyload/plugins/decrypters/ChipDe.py | 
	2 | 
	1093 | 
	# -*- coding: utf-8 -*-
import re
from ..base.decrypter import BaseDecrypter
class ChipDe(BaseDecrypter):
    __name__ = "ChipDe"
    __type__ = "decrypter"
    __version__ = "0.17"
    __status__ = "testing"
    __pattern__ = r"https?://(?:www\.)?chip\.de/video/.+\.html"
    __config__ = [
        ("enabled", "bool", "Activated", True),
        ("use_premium", "bool", "Use premium account if available", True),
        (
            "folder_per_package",
            "Default;Yes;No",
            "Create folder for each package",
            "Default",
        ),
    ]
    __description__ = """Chip.de decrypter plugin"""
    __license__ = "GPLv3"
    __authors__ = [("4Christopher", "[email protected]")]
    def decrypt(self, pyfile):
        self.data = self.load(pyfile.url)
        try:
            f = re.search(r'"(https?://media-video\.chip\.de/.+/MEDIA/.+)"', self.data)
        except Exception:
            self.fail(self._("Failed to find the URL"))
        else:
            self.links = [f.group(1)]
            self.log_debug(f"The file URL is {self.links[0]}")
 | 
	agpl-3.0 | -133,262,766,905,910,980 | 27.025641 | 87 | 0.516011 | false | 
| 
	cgimenop/Excel2Testlink | 
	ExcelParser/lib/et_xmlfile/tests/test_incremental_xmlfile.py | 
	7 | 
	11376 | 
	from __future__ import absolute_import
"""
Tests for the incremental XML serialisation API.
Adapted from the tests from lxml.etree.xmlfile
"""
try:
    import lxml
except ImportError:
    raise ImportError("lxml is required to run the tests.")
from io import BytesIO
import unittest
import tempfile, os, sys
from .common_imports import HelperTestCase, skipIf
from et_xmlfile import xmlfile
from et_xmlfile.xmlfile import LxmlSyntaxError
import pytest
from .helper import compare_xml
import xml.etree.ElementTree
from xml.etree.ElementTree import Element, parse
class _XmlFileTestCaseBase(HelperTestCase):
    _file = None  # to be set by specific subtypes below
    def setUp(self):
        self._file = BytesIO()
    def test_element(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                pass
        self.assertXml('<test></test>')
    def test_element_write_text(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                xf.write('toast')
        self.assertXml('<test>toast</test>')
    def test_element_nested(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                with xf.element('toast'):
                    with xf.element('taste'):
                        xf.write('conTent')
        self.assertXml('<test><toast><taste>conTent</taste></toast></test>')
    def test_element_nested_with_text(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                xf.write('con')
                with xf.element('toast'):
                    xf.write('tent')
                    with xf.element('taste'):
                        xf.write('inside')
                    xf.write('tnet')
                xf.write('noc')
        self.assertXml('<test>con<toast>tent<taste>inside</taste>'
                       'tnet</toast>noc</test>')
    def test_write_Element(self):
        with xmlfile(self._file) as xf:
            xf.write(Element('test'))
        self.assertXml('<test/>')
    def test_write_Element_repeatedly(self):
        element = Element('test')
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                for i in range(100):
                    xf.write(element)
        tree = self._parse_file()
        self.assertTrue(tree is not None)
        self.assertEqual(100, len(tree.getroot()))
        self.assertEqual(set(['test']), set(el.tag for el in tree.getroot()))
    def test_namespace_nsmap(self):
        with xmlfile(self._file) as xf:
            with xf.element('{nsURI}test', nsmap={'x': 'nsURI'}):
                pass
        self.assertXml('<x:test xmlns:x="nsURI"></x:test>')
    def test_namespace_nested_nsmap(self):
        with xmlfile(self._file) as xf:
            with xf.element('test', nsmap={'x': 'nsURI'}):
                with xf.element('{nsURI}toast'):
                    pass
        self.assertXml('<test xmlns:x="nsURI"><x:toast></x:toast></test>')
    def test_anonymous_namespace(self):
        with xmlfile(self._file) as xf:
            with xf.element('{nsURI}test'):
                pass
        self.assertXml('<ns0:test xmlns:ns0="nsURI"></ns0:test>')
    def test_namespace_nested_anonymous(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                with xf.element('{nsURI}toast'):
                    pass
        self.assertXml('<test><ns0:toast xmlns:ns0="nsURI"></ns0:toast></test>')
    def test_default_namespace(self):
        with xmlfile(self._file) as xf:
            with xf.element('{nsURI}test', nsmap={None: 'nsURI'}):
                pass
        self.assertXml('<test xmlns="nsURI"></test>')
    def test_nested_default_namespace(self):
        with xmlfile(self._file) as xf:
            with xf.element('{nsURI}test', nsmap={None: 'nsURI'}):
                with xf.element('{nsURI}toast'):
                    pass
        self.assertXml('<test xmlns="nsURI"><toast></toast></test>')
    @pytest.mark.xfail
    def test_pi(self):
        from et_xmlfile.xmlfile import ProcessingInstruction
        with xmlfile(self._file) as xf:
            xf.write(ProcessingInstruction('pypi'))
            with xf.element('test'):
                pass
        self.assertXml('<?pypi ?><test></test>')
    @pytest.mark.xfail
    def test_comment(self):
        with xmlfile(self._file) as xf:
            xf.write(etree.Comment('a comment'))
            with xf.element('test'):
                pass
        self.assertXml('<!--a comment--><test></test>')
    def test_attribute(self):
        with xmlfile(self._file) as xf:
            with xf.element('test', attrib={'k': 'v'}):
                pass
        self.assertXml('<test k="v"></test>')
    def test_escaping(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                xf.write('Comments: <!-- text -->\n')
                xf.write('Entities: &')
        self.assertXml(
            '<test>Comments: <!-- text -->\nEntities: &amp;</test>')
    @pytest.mark.xfail
    def test_encoding(self):
        with xmlfile(self._file, encoding='utf16') as xf:
            with xf.element('test'):
                xf.write('toast')
        self.assertXml('<test>toast</test>', encoding='utf16')
    @pytest.mark.xfail
    def test_buffering(self):
        with xmlfile(self._file, buffered=False) as xf:
            with xf.element('test'):
                self.assertXml("<test>")
                xf.write('toast')
                self.assertXml("<test>toast")
                with xf.element('taste'):
                    self.assertXml("<test>toast<taste>")
                    xf.write('some', etree.Element("more"), "toast")
                    self.assertXml("<test>toast<taste>some<more/>toast")
                self.assertXml("<test>toast<taste>some<more/>toast</taste>")
                xf.write('end')
                self.assertXml("<test>toast<taste>some<more/>toast</taste>end")
            self.assertXml("<test>toast<taste>some<more/>toast</taste>end</test>")
        self.assertXml("<test>toast<taste>some<more/>toast</taste>end</test>")
    @pytest.mark.xfail
    def test_flush(self):
        with xmlfile(self._file, buffered=True) as xf:
            with xf.element('test'):
                self.assertXml("")
                xf.write('toast')
                self.assertXml("")
                with xf.element('taste'):
                    self.assertXml("")
                    xf.flush()
                    self.assertXml("<test>toast<taste>")
                self.assertXml("<test>toast<taste>")
            self.assertXml("<test>toast<taste>")
        self.assertXml("<test>toast<taste></taste></test>")
    def test_failure_preceding_text(self):
        try:
            with xmlfile(self._file) as xf:
                xf.write('toast')
        except LxmlSyntaxError:
            self.assertTrue(True)
        else:
            self.assertTrue(False)
    def test_failure_trailing_text(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                pass
            try:
                xf.write('toast')
            except LxmlSyntaxError:
                self.assertTrue(True)
            else:
                self.assertTrue(False)
    def test_failure_trailing_Element(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                pass
            try:
                xf.write(Element('test'))
            except LxmlSyntaxError:
                self.assertTrue(True)
            else:
                self.assertTrue(False)
    @pytest.mark.xfail
    def test_closing_out_of_order_in_error_case(self):
        cm_exit = None
        try:
            with xmlfile(self._file) as xf:
                x = xf.element('test')
                cm_exit = x.__exit__
                x.__enter__()
                raise ValueError('123')
        except ValueError:
            self.assertTrue(cm_exit)
            try:
                cm_exit(ValueError, ValueError("huhu"), None)
            except LxmlSyntaxError:
                self.assertTrue(True)
            else:
                self.assertTrue(False)
        else:
            self.assertTrue(False)
    def _read_file(self):
        pos = self._file.tell()
        self._file.seek(0)
        try:
            return self._file.read()
        finally:
            self._file.seek(pos)
    def _parse_file(self):
        pos = self._file.tell()
        self._file.seek(0)
        try:
            return parse(self._file)
        finally:
            self._file.seek(pos)
    def tearDown(self):
        if self._file is not None:
            self._file.close()
    def assertXml(self, expected, encoding='utf8'):
        diff = compare_xml(self._read_file().decode(encoding), expected)
        assert diff is None, diff
class BytesIOXmlFileTestCase(_XmlFileTestCaseBase):
    def setUp(self):
        self._file = BytesIO()
    def test_filelike_close(self):
        with xmlfile(self._file, close=True) as xf:
            with xf.element('test'):
                pass
        self.assertRaises(ValueError, self._file.getvalue)
class TempXmlFileTestCase(_XmlFileTestCaseBase):
    def setUp(self):
        self._file = tempfile.TemporaryFile()
class TempPathXmlFileTestCase(_XmlFileTestCaseBase):
    def setUp(self):
        self._tmpfile = tempfile.NamedTemporaryFile(delete=False)
        self._file = self._tmpfile.name
    def tearDown(self):
        try:
            self._tmpfile.close()
        finally:
            if os.path.exists(self._tmpfile.name):
                os.unlink(self._tmpfile.name)
    def _read_file(self):
        self._tmpfile.seek(0)
        return self._tmpfile.read()
    def _parse_file(self):
        self._tmpfile.seek(0)
        return parse(self._tmpfile)
    @skipIf(True, "temp file behaviour is too platform specific here")
    def test_buffering(self):
        pass
    @skipIf(True, "temp file behaviour is too platform specific here")
    def test_flush(self):
        pass
class SimpleFileLikeXmlFileTestCase(_XmlFileTestCaseBase):
    class SimpleFileLike(object):
        def __init__(self, target):
            self._target = target
            self.write = target.write
            self.tell = target.tell
            self.seek = target.seek
            self.closed = False
        def close(self):
            assert not self.closed
            self.closed = True
            self._target.close()
    def setUp(self):
        self._target = BytesIO()
        self._file = self.SimpleFileLike(self._target)
    def _read_file(self):
        return self._target.getvalue()
    def _parse_file(self):
        pos = self._file.tell()
        self._target.seek(0)
        try:
            return parse(self._target)
        finally:
            self._target.seek(pos)
    def test_filelike_not_closing(self):
        with xmlfile(self._file) as xf:
            with xf.element('test'):
                pass
        self.assertFalse(self._file.closed)
    def test_filelike_close(self):
        with xmlfile(self._file, close=True) as xf:
            with xf.element('test'):
                pass
        self.assertTrue(self._file.closed)
        self._file = None  # prevent closing in tearDown()
 | 
	mit | 2,728,526,013,529,476,000 | 30.955056 | 82 | 0.545271 | false | 
| 
	sorig/shogun | 
	examples/undocumented/python/classifier_svmlight_linear_term.py | 
	7 | 
	2066 | 
	#!/usr/bin/env python
import numpy
traindna=['CGCACGTACGTAGCTCGAT',
		      'CGACGTAGTCGTAGTCGTA',
		      'CGACGGGGGGGGGGTCGTA',
		      'CGACCTAGTCGTAGTCGTA',
		      'CGACCACAGTTATATAGTA',
		      'CGACGTAGTCGTAGTCGTA',
		      'CGACGTAGTTTTTTTCGTA',
		      'CGACGTAGTCGTAGCCCCA',
		      'CAAAAAAAAAAAAAAAATA',
		      'CGACGGGGGGGGGGGCGTA']
label_traindna=numpy.array(5*[-1.0] + 5*[1.0])
testdna=['AGCACGTACGTAGCTCGAT',
		      'AGACGTAGTCGTAGTCGTA',
		      'CAACGGGGGGGGGGTCGTA',
		      'CGACCTAGTCGTAGTCGTA',
		      'CGAACACAGTTATATAGTA',
		      'CGACCTAGTCGTAGTCGTA',
		      'CGACGTGGGGTTTTTCGTA',
		      'CGACGTAGTCCCAGCCCCA',
		      'CAAAAAAAAAAAACCAATA',
		      'CGACGGCCGGGGGGGCGTA']
label_test_dna=numpy.array(5*[-1.0] + 5*[1.0])
parameter_list = [[traindna,testdna,label_traindna,3,10,1e-5,1],[traindna,testdna,label_traindna,3,10,1e-5,1]]
def classifier_svmlight_linear_term (fm_train_dna=traindna,fm_test_dna=testdna, \
                                                label_train_dna=label_traindna,degree=3, \
                                                C=10,epsilon=1e-5,num_threads=1):
    from shogun import StringCharFeatures, BinaryLabels, DNA
    from shogun import WeightedDegreeStringKernel
    try:
    	from shogun import SVMLight
    except ImportError:
    	print("SVMLight is not available")
    	exit(0)
    feats_train=StringCharFeatures(DNA)
    feats_train.set_features(fm_train_dna)
    feats_test=StringCharFeatures(DNA)
    feats_test.set_features(fm_test_dna)
    kernel=WeightedDegreeStringKernel(feats_train, feats_train, degree)
    labels=BinaryLabels(label_train_dna)
    svm=SVMLight(C, kernel, labels)
    svm.set_qpsize(3)
    svm.set_linear_term(-numpy.array([1,2,3,4,5,6,7,8,7,6], dtype=numpy.double));
    svm.set_epsilon(epsilon)
    svm.parallel.set_num_threads(num_threads)
    svm.train()
    kernel.init(feats_train, feats_test)
    out = svm.apply().get_labels()
    return out,kernel
if __name__=='__main__':
    print('SVMLight')
    classifier_svmlight_linear_term(*parameter_list[0])
 | 
	bsd-3-clause | 2,411,005,545,091,394,000 | 31.793651 | 110 | 0.648596 | false | 
| 
	TNT-Samuel/Coding-Projects | 
	Panda3DTesting/Perlin.py | 
	1 | 
	3727 | 
	from noise import pnoise2, snoise2
class ThreeDpoint:
    def __init__(self,x=0,y=0,z=0):
        self.set(x,y,z)
    def set(self,x,y,z):
        self.x = x
        self.y = y
        self.z = z
class ThreeDline:
    def __init__(self,p1=ThreeDpoint(),p2=ThreeDpoint()):
        self.set(p1,p2)
    def set(self,p1,p2):
        self.x1 = p1.x
        self.y1 = p1.y
        self.z1 = p1.z
        self.x2 = p2.x
        self.y2 = p2.y
        self.z2 = p2.z
mesh_size = 250
octaves = 1
scale = 2
freq = 32.0 * octaves
freq2 = 16.0 * octaves
freq3 = 8.0 * octaves
from math import pi, sin, cos, floor
import numpy
import colorsys
from direct.showbase.ShowBase import ShowBase
from direct.task import Task
from panda3d.core import LineSegs, NodePath
class MyApp(ShowBase):
    def __init__(self):
        ShowBase.__init__(self)
        points = []
        line_array = []
        print("Generating Noise...")
        for _y in range(mesh_size):
            print("{:.1f}%".format(100 * _y / mesh_size),end="\r")
            y = _y - (0.5 * mesh_size)
            points.append([])
            for _x in range(mesh_size):
                x = _x - (0.5 * mesh_size)
                perlin1 = int(snoise2(x / freq, y / freq, octaves) * 127.0 + 128.0)/128
                perlin2 = int(snoise2(x / freq, y / freq, octaves) * 127.0 + 128.0)/128
                perlin3 = int(snoise2(x / freq, y / freq, octaves) * 127.0 + 128.0)/256
                perlin4 = int(snoise2(x / freq2, y / freq2, octaves) * 127.0 + 128.0)/256
                perlin5 = int(snoise2(x / freq3, y / freq2, octaves) * 127.0 + 128.0)/256
                z = (perlin1 * perlin2 * perlin3) + perlin4 + perlin5
                # print(z)
                # z = 0
                points[_y].append(ThreeDpoint(x/scale,y/scale,z))
                # print(x/scale,y/scale,z)
        print("100.0%")
        print("Generating Lines...")
        for y in range(mesh_size-1):
            print("{:.1f}%".format(100 * y / mesh_size),end="\r")
            for x in range(mesh_size-1):
                line_array.append(ThreeDline(points[y][x],points[y+1][x]))
                line_array.append(ThreeDline(points[y][x],points[y][x+1]))
                line_array.append(ThreeDline(points[y][x],points[y+1][x+1]))
        y = mesh_size-1
        print("{:.1f}%".format(100 * y / mesh_size),end="\r")
        for x in range(mesh_size-1):
            line_array.append(ThreeDline(points[y][x],points[y][x+1]))
        x = mesh_size-1
        for y in range(mesh_size-1):
            line_array.append(ThreeDline(points[y][x],points[y+1][x]))
        print("100.0%")
        print("Rendering Lines...")
        line_array_len = len(line_array)
        old_percent = -1
        for i in range(line_array_len):
            percent = floor(1000 * i / line_array_len)
            if percent != old_percent:
                print("{:.1f}%".format(percent/10),end="\r")
                old_percent = percent
            pos = line_array[i]
            lines = LineSegs()
            lines.moveTo(pos.x1,pos.y1,pos.z1)
            lines.drawTo(pos.x2,pos.y2,pos.z2)
            lines.setThickness(1)
            node = lines.create()
            np = NodePath(node)
            np.reparentTo(self.render)
        print("100.0%")
        print("Scene Rendered.")
        self.taskMgr.add(self.spinCameraTask, "SpinCameraTask")
        self.setFrameRateMeter(True)
    def spinCameraTask(self, task):
        angleDegrees = task.time * 6.0
        angleRadians = angleDegrees * (pi / 180.0)
        self.camera.setPos(20 * sin(angleRadians), -20.0 * cos(angleRadians), 15)
        self.camera.setHpr(angleDegrees, -25, 0)
        return Task.cont
app = MyApp()
app.run() | 
	gpl-3.0 | 2,906,160,465,336,398,300 | 33.201835 | 89 | 0.529917 | false | 
| 
	jweste/odoo-addons-cpo | 
	purchase_compute_order/model/__init__.py | 
	3 | 
	1288 | 
	# -*- encoding: utf-8 -*-
##############################################################################
#
#    Purchase - Computed Purchase Order Module for Odoo
#    Copyright (C) 2013-Today GRAP (http://www.grap.coop)
#    @author Julien WESTE
#    @author Sylvain LE GAL (https://twitter.com/legalsylvain)
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import computed_purchase_order
from . import computed_purchase_order_line
from . import product_product
from . import res_partner
from . import product_supplierinfo
from . import update_products_wizard
 | 
	agpl-3.0 | -3,637,871,586,111,579,600 | 43.413793 | 78 | 0.642081 | false | 
| 
	thomazs/geraldo | 
	site/newsite/site-geraldo/django/db/models/sql/subqueries.py | 
	11 | 
	16588 | 
	"""
Query subclasses which provide extra functionality beyond simple data retrieval.
"""
from django.core.exceptions import FieldError
from django.db.models.sql.constants import *
from django.db.models.sql.datastructures import Date
from django.db.models.sql.query import Query
from django.db.models.sql.where import AND
__all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'DateQuery',
        'CountQuery']
class DeleteQuery(Query):
    """
    Delete queries are done through this class, since they are more constrained
    than general queries.
    """
    def as_sql(self):
        """
        Creates the SQL for this query. Returns the SQL string and list of
        parameters.
        """
        assert len(self.tables) == 1, \
                "Can only delete from one table at a time."
        result = ['DELETE FROM %s' % self.quote_name_unless_alias(self.tables[0])]
        where, params = self.where.as_sql()
        result.append('WHERE %s' % where)
        return ' '.join(result), tuple(params)
    def do_query(self, table, where):
        self.tables = [table]
        self.where = where
        self.execute_sql(None)
    def delete_batch_related(self, pk_list):
        """
        Set up and execute delete queries for all the objects related to the
        primary key values in pk_list. To delete the objects themselves, use
        the delete_batch() method.
        More than one physical query may be executed if there are a
        lot of values in pk_list.
        """
        from django.contrib.contenttypes import generic
        cls = self.model
        for related in cls._meta.get_all_related_many_to_many_objects():
            if not isinstance(related.field, generic.GenericRelation):
                for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
                    where = self.where_class()
                    where.add((None, related.field.m2m_reverse_name(),
                            related.field, 'in',
                            pk_list[offset : offset+GET_ITERATOR_CHUNK_SIZE]),
                            AND)
                    self.do_query(related.field.m2m_db_table(), where)
        for f in cls._meta.many_to_many:
            w1 = self.where_class()
            if isinstance(f, generic.GenericRelation):
                from django.contrib.contenttypes.models import ContentType
                field = f.rel.to._meta.get_field(f.content_type_field_name)
                w1.add((None, field.column, field, 'exact',
                        ContentType.objects.get_for_model(cls).id), AND)
            for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
                where = self.where_class()
                where.add((None, f.m2m_column_name(), f, 'in',
                        pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]),
                        AND)
                if w1:
                    where.add(w1, AND)
                self.do_query(f.m2m_db_table(), where)
    def delete_batch(self, pk_list):
        """
        Set up and execute delete queries for all the objects in pk_list. This
        should be called after delete_batch_related(), if necessary.
        More than one physical query may be executed if there are a
        lot of values in pk_list.
        """
        for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
            where = self.where_class()
            field = self.model._meta.pk
            where.add((None, field.column, field, 'in',
                    pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]), AND)
            self.do_query(self.model._meta.db_table, where)
class UpdateQuery(Query):
    """
    Represents an "update" SQL query.
    """
    def __init__(self, *args, **kwargs):
        super(UpdateQuery, self).__init__(*args, **kwargs)
        self._setup_query()
    def _setup_query(self):
        """
        Runs on initialization and after cloning. Any attributes that would
        normally be set in __init__ should go in here, instead, so that they
        are also set up after a clone() call.
        """
        self.values = []
        self.related_ids = None
        if not hasattr(self, 'related_updates'):
            self.related_updates = {}
    def clone(self, klass=None, **kwargs):
        return super(UpdateQuery, self).clone(klass,
                related_updates=self.related_updates.copy, **kwargs)
    def execute_sql(self, result_type=None):
        """
        Execute the specified update. Returns the number of rows affected by
        the primary update query (there could be other updates on related
        tables, but their rowcounts are not returned).
        """
        cursor = super(UpdateQuery, self).execute_sql(result_type)
        rows = cursor.rowcount
        del cursor
        for query in self.get_related_updates():
            query.execute_sql(result_type)
        return rows
    def as_sql(self):
        """
        Creates the SQL for this query. Returns the SQL string and list of
        parameters.
        """
        self.pre_sql_setup()
        if not self.values:
            return '', ()
        table = self.tables[0]
        qn = self.quote_name_unless_alias
        result = ['UPDATE %s' % qn(table)]
        result.append('SET')
        values, update_params = [], []
        for name, val, placeholder in self.values:
            if val is not None:
                values.append('%s = %s' % (qn(name), placeholder))
                update_params.append(val)
            else:
                values.append('%s = NULL' % qn(name))
        result.append(', '.join(values))
        where, params = self.where.as_sql()
        if where:
            result.append('WHERE %s' % where)
        return ' '.join(result), tuple(update_params + params)
    def pre_sql_setup(self):
        """
        If the update depends on results from other tables, we need to do some
        munging of the "where" conditions to match the format required for
        (portable) SQL updates. That is done here.
        Further, if we are going to be running multiple updates, we pull out
        the id values to update at this point so that they don't change as a
        result of the progressive updates.
        """
        self.select_related = False
        self.clear_ordering(True)
        super(UpdateQuery, self).pre_sql_setup()
        count = self.count_active_tables()
        if not self.related_updates and count == 1:
            return
        # We need to use a sub-select in the where clause to filter on things
        # from other tables.
        query = self.clone(klass=Query)
        query.bump_prefix()
        query.extra_select = {}
        first_table = query.tables[0]
        if query.alias_refcount[first_table] == 1:
            # We can remove one table from the inner query.
            query.unref_alias(first_table)
            for i in xrange(1, len(query.tables)):
                table = query.tables[i]
                if query.alias_refcount[table]:
                    break
            join_info = query.alias_map[table]
            query.select = [(join_info[RHS_ALIAS], join_info[RHS_JOIN_COL])]
            must_pre_select = False
        else:
            query.select = []
            query.add_fields([query.model._meta.pk.name])
            must_pre_select = not self.connection.features.update_can_self_select
        # Now we adjust the current query: reset the where clause and get rid
        # of all the tables we don't need (since they're in the sub-select).
        self.where = self.where_class()
        if self.related_updates or must_pre_select:
            # Either we're using the idents in multiple update queries (so
            # don't want them to change), or the db backend doesn't support
            # selecting from the updating table (e.g. MySQL).
            idents = []
            for rows in query.execute_sql(MULTI):
                idents.extend([r[0] for r in rows])
            self.add_filter(('pk__in', idents))
            self.related_ids = idents
        else:
            # The fast path. Filters and updates in one query.
            self.add_filter(('pk__in', query))
        for alias in self.tables[1:]:
            self.alias_refcount[alias] = 0
    def clear_related(self, related_field, pk_list):
        """
        Set up and execute an update query that clears related entries for the
        keys in pk_list.
        This is used by the QuerySet.delete_objects() method.
        """
        for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
            self.where = self.where_class()
            f = self.model._meta.pk
            self.where.add((None, f.column, f, 'in',
                    pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]),
                    AND)
            self.values = [(related_field.column, None, '%s')]
            self.execute_sql(None)
    def add_update_values(self, values):
        """
        Convert a dictionary of field name to value mappings into an update
        query. This is the entry point for the public update() method on
        querysets.
        """
        values_seq = []
        for name, val in values.iteritems():
            field, model, direct, m2m = self.model._meta.get_field_by_name(name)
            if not direct or m2m:
                raise FieldError('Cannot update model field %r (only non-relations and foreign keys permitted).' % field)
            values_seq.append((field, model, val))
        return self.add_update_fields(values_seq)
    def add_update_fields(self, values_seq):
        """
        Turn a sequence of (field, model, value) triples into an update query.
        Used by add_update_values() as well as the "fast" update path when
        saving models.
        """
        from django.db.models.base import Model
        for field, model, val in values_seq:
            # FIXME: Some sort of db_prep_* is probably more appropriate here.
            if field.rel and isinstance(val, Model):
                val = val.pk
            # Getting the placeholder for the field.
            if hasattr(field, 'get_placeholder'):
                placeholder = field.get_placeholder(val)
            else:
                placeholder = '%s'
            if model:
                self.add_related_update(model, field.column, val, placeholder)
            else:
                self.values.append((field.column, val, placeholder))
    def add_related_update(self, model, column, value, placeholder):
        """
        Adds (name, value) to an update query for an ancestor model.
        Updates are coalesced so that we only run one update query per ancestor.
        """
        try:
            self.related_updates[model].append((column, value, placeholder))
        except KeyError:
            self.related_updates[model] = [(column, value, placeholder)]
    def get_related_updates(self):
        """
        Returns a list of query objects: one for each update required to an
        ancestor model. Each query will have the same filtering conditions as
        the current query but will only update a single table.
        """
        if not self.related_updates:
            return []
        result = []
        for model, values in self.related_updates.iteritems():
            query = UpdateQuery(model, self.connection)
            query.values = values
            if self.related_ids:
                query.add_filter(('pk__in', self.related_ids))
            result.append(query)
        return result
class InsertQuery(Query):
    def __init__(self, *args, **kwargs):
        super(InsertQuery, self).__init__(*args, **kwargs)
        self.columns = []
        self.values = []
        self.params = ()
    def clone(self, klass=None, **kwargs):
        extras = {'columns': self.columns[:], 'values': self.values[:],
                'params': self.params}
        return super(InsertQuery, self).clone(klass, extras)
    def as_sql(self):
        # We don't need quote_name_unless_alias() here, since these are all
        # going to be column names (so we can avoid the extra overhead).
        qn = self.connection.ops.quote_name
        result = ['INSERT INTO %s' % qn(self.model._meta.db_table)]
        result.append('(%s)' % ', '.join([qn(c) for c in self.columns]))
        result.append('VALUES (%s)' % ', '.join(self.values))
        return ' '.join(result), self.params
    def execute_sql(self, return_id=False):
        cursor = super(InsertQuery, self).execute_sql(None)
        if return_id:
            return self.connection.ops.last_insert_id(cursor,
                    self.model._meta.db_table, self.model._meta.pk.column)
    def insert_values(self, insert_values, raw_values=False):
        """
        Set up the insert query from the 'insert_values' dictionary. The
        dictionary gives the model field names and their target values.
        If 'raw_values' is True, the values in the 'insert_values' dictionary
        are inserted directly into the query, rather than passed as SQL
        parameters. This provides a way to insert NULL and DEFAULT keywords
        into the query, for example.
        """
        placeholders, values = [], []
        for field, val in insert_values:
            if hasattr(field, 'get_placeholder'):
                # Some fields (e.g. geo fields) need special munging before
                # they can be inserted.
                placeholders.append(field.get_placeholder(val))
            else:
                placeholders.append('%s')
            self.columns.append(field.column)
            values.append(val)
        if raw_values:
            self.values.extend(values)
        else:
            self.params += tuple(values)
            self.values.extend(placeholders)
class DateQuery(Query):
    """
    A DateQuery is a normal query, except that it specifically selects a single
    date field. This requires some special handling when converting the results
    back to Python objects, so we put it in a separate class.
    """
    def __getstate__(self):
        """
        Special DateQuery-specific pickle handling.
        """
        for elt in self.select:
            if isinstance(elt, Date):
                # Eliminate a method reference that can't be pickled. The
                # __setstate__ method restores this.
                elt.date_sql_func = None
        return super(DateQuery, self).__getstate__()
    def __setstate__(self, obj_dict):
        super(DateQuery, self).__setstate__(obj_dict)
        for elt in self.select:
            if isinstance(elt, Date):
                self.date_sql_func = self.connection.ops.date_trunc_sql
    def results_iter(self):
        """
        Returns an iterator over the results from executing this query.
        """
        resolve_columns = hasattr(self, 'resolve_columns')
        if resolve_columns:
            from django.db.models.fields import DateTimeField
            fields = [DateTimeField()]
        else:
            from django.db.backends.util import typecast_timestamp
            needs_string_cast = self.connection.features.needs_datetime_string_cast
        offset = len(self.extra_select)
        for rows in self.execute_sql(MULTI):
            for row in rows:
                date = row[offset]
                if resolve_columns:
                    date = self.resolve_columns(row, fields)[offset]
                elif needs_string_cast:
                    date = typecast_timestamp(str(date))
                yield date
    def add_date_select(self, field, lookup_type, order='ASC'):
        """
        Converts the query into a date extraction query.
        """
        result = self.setup_joins([field.name], self.get_meta(),
                self.get_initial_alias(), False)
        alias = result[3][-1]
        select = Date((alias, field.column), lookup_type,
                self.connection.ops.date_trunc_sql)
        self.select = [select]
        self.select_fields = [None]
        self.select_related = False # See #7097.
        self.distinct = True
        self.order_by = order == 'ASC' and [1] or [-1]
class CountQuery(Query):
    """
    A CountQuery knows how to take a normal query which would select over
    multiple distinct columns and turn it into SQL that can be used on a
    variety of backends (it requires a select in the FROM clause).
    """
    def get_from_clause(self):
        result, params = self._query.as_sql()
        return ['(%s) A1' % result], params
    def get_ordering(self):
        return ()
 | 
	lgpl-3.0 | -7,065,108,444,840,858,000 | 39.360097 | 121 | 0.580781 | false | 
| 
	sailthru/relay | 
	relay/relay_logging.py | 
	1 | 
	2013 | 
	import logging
import json
from colorlog import ColoredFormatter
from relay import log
def configure_logging(add_handler, log=log):
    """
    Configure log records.  If adding a handler, make the formatter print all
    passed in key:value data.
        ie log.extra('msg', extra=dict(a=1))
        generates  'msg  a=1'
    `add_handler` (True, False, None, or Handler instance)
        if True, add a logging.StreamHandler() instance
        if False, do not add any handlers.
        if given a handler instance, add that the the logger
    """
    _ignore_log_keys = set(logging.makeLogRecord({}).__dict__)
    def _json_format(record):
        extras = ' '.join(
            "%s=%s" % (k, record.__dict__[k])
            for k in set(record.__dict__).difference(_ignore_log_keys))
        if extras:
            record.msg = "%s    %s" % (record.msg, extras)
        return record
    class ColoredJsonFormatter(ColoredFormatter):
        def format(self, record):
            record = _json_format(record)
            return super(ColoredJsonFormatter, self).format(record)
    if isinstance(add_handler, logging.Handler):
        log.addHandler(add_handler)
    elif add_handler is True:
        if not any(isinstance(h, logging.StreamHandler) for h in log.handlers):
            _h = logging.StreamHandler()
            _h.setFormatter(ColoredJsonFormatter(
                "%(log_color)s%(levelname)-8s %(message)s %(reset)s %(cyan)s",
                reset=True
            ))
            log.addHandler(_h)
    elif not log.handlers:
        log.addHandler(logging.NullHandler())
    log.setLevel(logging.DEBUG)
    log.propagate = False
    return log
def add_zmq_log_handler(address):
    import zmq.log.handlers
    class JSONPubHandler(zmq.log.handlers.PUBHandler):
        def format(self, record):
            return json.dumps(record.__dict__)
    sock = zmq.Context().socket(zmq.PUB)
    sock.connect(address)
    handler = JSONPubHandler(sock)
    return configure_logging(handler)
 | 
	apache-2.0 | -4,460,430,520,419,536,400 | 32.55 | 79 | 0.622454 | false | 
| 
	rossburton/yocto-autobuilder | 
	lib/python2.7/site-packages/SQLAlchemy-0.8.0b2-py2.7-linux-x86_64.egg/sqlalchemy/orm/collections.py | 
	6 | 
	55323 | 
	# orm/collections.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for collections of mapped entities.
The collections package supplies the machinery used to inform the ORM of
collection membership changes.  An instrumentation via decoration approach is
used, allowing arbitrary types (including built-ins) to be used as entity
collections without requiring inheritance from a base class.
Instrumentation decoration relays membership change events to the
``InstrumentedCollectionAttribute`` that is currently managing the collection.
The decorators observe function call arguments and return values, tracking
entities entering or leaving the collection.  Two decorator approaches are
provided.  One is a bundle of generic decorators that map function arguments
and return values to events::
  from sqlalchemy.orm.collections import collection
  class MyClass(object):
      # ...
      @collection.adds(1)
      def store(self, item):
          self.data.append(item)
      @collection.removes_return()
      def pop(self):
          return self.data.pop()
The second approach is a bundle of targeted decorators that wrap appropriate
append and remove notifiers around the mutation methods present in the
standard Python ``list``, ``set`` and ``dict`` interfaces.  These could be
specified in terms of generic decorator recipes, but are instead hand-tooled
for increased efficiency.  The targeted decorators occasionally implement
adapter-like behavior, such as mapping bulk-set methods (``extend``,
``update``, ``__setslice__``, etc.) into the series of atomic mutation events
that the ORM requires.
The targeted decorators are used internally for automatic instrumentation of
entity collection classes.  Every collection class goes through a
transformation process roughly like so:
1. If the class is a built-in, substitute a trivial sub-class
2. Is this class already instrumented?
3. Add in generic decorators
4. Sniff out the collection interface through duck-typing
5. Add targeted decoration to any undecorated interface method
This process modifies the class at runtime, decorating methods and adding some
bookkeeping properties.  This isn't possible (or desirable) for built-in
classes like ``list``, so trivial sub-classes are substituted to hold
decoration::
  class InstrumentedList(list):
      pass
Collection classes can be specified in ``relationship(collection_class=)`` as
types or a function that returns an instance.  Collection classes are
inspected and instrumented during the mapper compilation phase.  The
collection_class callable will be executed once to produce a specimen
instance, and the type of that specimen will be instrumented.  Functions that
return built-in types like ``lists`` will be adapted to produce instrumented
instances.
When extending a known type like ``list``, additional decorations are not
generally not needed.  Odds are, the extension method will delegate to a
method that's already instrumented.  For example::
  class QueueIsh(list):
     def push(self, item):
         self.append(item)
     def shift(self):
         return self.pop(0)
There's no need to decorate these methods.  ``append`` and ``pop`` are already
instrumented as part of the ``list`` interface.  Decorating them would fire
duplicate events, which should be avoided.
The targeted decoration tries not to rely on other methods in the underlying
collection class, but some are unavoidable.  Many depend on 'read' methods
being present to properly instrument a 'write', for example, ``__setitem__``
needs ``__getitem__``.  "Bulk" methods like ``update`` and ``extend`` may also
reimplemented in terms of atomic appends and removes, so the ``extend``
decoration will actually perform many ``append`` operations and not call the
underlying method at all.
Tight control over bulk operation and the firing of events is also possible by
implementing the instrumentation internally in your methods.  The basic
instrumentation package works under the general assumption that collection
mutation will not raise unusual exceptions.  If you want to closely
orchestrate append and remove events with exception management, internal
instrumentation may be the answer.  Within your method,
``collection_adapter(self)`` will retrieve an object that you can use for
explicit control over triggering append and remove events.
The owning object and InstrumentedCollectionAttribute are also reachable
through the adapter, allowing for some very sophisticated behavior.
"""
import copy
import inspect
import operator
import weakref
from ..sql import expression
from .. import util, exc as sa_exc
orm_util = util.importlater("sqlalchemy.orm", "util")
attributes = util.importlater("sqlalchemy.orm", "attributes")
__all__ = ['collection', 'collection_adapter',
           'mapped_collection', 'column_mapped_collection',
           'attribute_mapped_collection']
__instrumentation_mutex = util.threading.Lock()
class _PlainColumnGetter(object):
    """Plain column getter, stores collection of Column objects
    directly.
    Serializes to a :class:`._SerializableColumnGetterV2`
    which has more expensive __call__() performance
    and some rare caveats.
    """
    def __init__(self, cols):
        self.cols = cols
        self.composite = len(cols) > 1
    def __reduce__(self):
        return _SerializableColumnGetterV2._reduce_from_cols(self.cols)
    def _cols(self, mapper):
        return self.cols
    def __call__(self, value):
        state = attributes.instance_state(value)
        m = orm_util._state_mapper(state)
        key = [
            m._get_state_attr_by_column(state, state.dict, col)
            for col in self._cols(m)
        ]
        if self.composite:
            return tuple(key)
        else:
            return key[0]
class _SerializableColumnGetter(object):
    """Column-based getter used in version 0.7.6 only.
    Remains here for pickle compatibility with 0.7.6.
    """
    def __init__(self, colkeys):
        self.colkeys = colkeys
        self.composite = len(colkeys) > 1
    def __reduce__(self):
        return _SerializableColumnGetter, (self.colkeys,)
    def __call__(self, value):
        state = attributes.instance_state(value)
        m = orm_util._state_mapper(state)
        key = [m._get_state_attr_by_column(
                        state, state.dict,
                        m.mapped_table.columns[k])
                     for k in self.colkeys]
        if self.composite:
            return tuple(key)
        else:
            return key[0]
class _SerializableColumnGetterV2(_PlainColumnGetter):
    """Updated serializable getter which deals with
    multi-table mapped classes.
    Two extremely unusual cases are not supported.
    Mappings which have tables across multiple metadata
    objects, or which are mapped to non-Table selectables
    linked across inheriting mappers may fail to function
    here.
    """
    def __init__(self, colkeys):
        self.colkeys = colkeys
        self.composite = len(colkeys) > 1
    def __reduce__(self):
        return self.__class__, (self.colkeys,)
    @classmethod
    def _reduce_from_cols(cls, cols):
        def _table_key(c):
            if not isinstance(c.table, expression.TableClause):
                return None
            else:
                return c.table.key
        colkeys = [(c.key, _table_key(c)) for c in cols]
        return _SerializableColumnGetterV2, (colkeys,)
    def _cols(self, mapper):
        cols = []
        metadata = getattr(mapper.local_table, 'metadata', None)
        for (ckey, tkey) in self.colkeys:
            if tkey is None or \
                metadata is None or \
                tkey not in metadata:
                cols.append(mapper.local_table.c[ckey])
            else:
                cols.append(metadata.tables[tkey].c[ckey])
        return cols
def column_mapped_collection(mapping_spec):
    """A dictionary-based collection type with column-based keying.
    Returns a :class:`.MappedCollection` factory with a keying function
    generated from mapping_spec, which may be a Column or a sequence
    of Columns.
    The key value must be immutable for the lifetime of the object.  You
    can not, for example, map on foreign key values if those key values will
    change during the session, i.e. from None to a database-assigned integer
    after a session flush.
    """
    cols = [expression._only_column_elements(q, "mapping_spec")
                for q in util.to_list(mapping_spec)
            ]
    keyfunc = _PlainColumnGetter(cols)
    return lambda: MappedCollection(keyfunc)
class _SerializableAttrGetter(object):
    def __init__(self, name):
        self.name = name
        self.getter = operator.attrgetter(name)
    def __call__(self, target):
        return self.getter(target)
    def __reduce__(self):
        return _SerializableAttrGetter, (self.name, )
def attribute_mapped_collection(attr_name):
    """A dictionary-based collection type with attribute-based keying.
    Returns a :class:`.MappedCollection` factory with a keying based on the
    'attr_name' attribute of entities in the collection, where ``attr_name``
    is the string name of the attribute.
    The key value must be immutable for the lifetime of the object.  You
    can not, for example, map on foreign key values if those key values will
    change during the session, i.e. from None to a database-assigned integer
    after a session flush.
    """
    getter = _SerializableAttrGetter(attr_name)
    return lambda: MappedCollection(getter)
def mapped_collection(keyfunc):
    """A dictionary-based collection type with arbitrary keying.
    Returns a :class:`.MappedCollection` factory with a keying function
    generated from keyfunc, a callable that takes an entity and returns a
    key value.
    The key value must be immutable for the lifetime of the object.  You
    can not, for example, map on foreign key values if those key values will
    change during the session, i.e. from None to a database-assigned integer
    after a session flush.
    """
    return lambda: MappedCollection(keyfunc)
class collection(object):
    """Decorators for entity collection classes.
    The decorators fall into two groups: annotations and interception recipes.
    The annotating decorators (appender, remover, iterator,
    internally_instrumented, link) indicate the method's purpose and take no
    arguments.  They are not written with parens::
        @collection.appender
        def append(self, append): ...
    The recipe decorators all require parens, even those that take no
    arguments::
        @collection.adds('entity')
        def insert(self, position, entity): ...
        @collection.removes_return()
        def popitem(self): ...
    """
    # Bundled as a class solely for ease of use: packaging, doc strings,
    # importability.
    @staticmethod
    def appender(fn):
        """Tag the method as the collection appender.
        The appender method is called with one positional argument: the value
        to append. The method will be automatically decorated with 'adds(1)'
        if not already decorated::
            @collection.appender
            def add(self, append): ...
            # or, equivalently
            @collection.appender
            @collection.adds(1)
            def add(self, append): ...
            # for mapping type, an 'append' may kick out a previous value
            # that occupies that slot.  consider d['a'] = 'foo'- any previous
            # value in d['a'] is discarded.
            @collection.appender
            @collection.replaces(1)
            def add(self, entity):
                key = some_key_func(entity)
                previous = None
                if key in self:
                    previous = self[key]
                self[key] = entity
                return previous
        If the value to append is not allowed in the collection, you may
        raise an exception.  Something to remember is that the appender
        will be called for each object mapped by a database query.  If the
        database contains rows that violate your collection semantics, you
        will need to get creative to fix the problem, as access via the
        collection will not work.
        If the appender method is internally instrumented, you must also
        receive the keyword argument '_sa_initiator' and ensure its
        promulgation to collection events.
        """
        setattr(fn, '_sa_instrument_role', 'appender')
        return fn
    @staticmethod
    def remover(fn):
        """Tag the method as the collection remover.
        The remover method is called with one positional argument: the value
        to remove. The method will be automatically decorated with
        :meth:`removes_return` if not already decorated::
            @collection.remover
            def zap(self, entity): ...
            # or, equivalently
            @collection.remover
            @collection.removes_return()
            def zap(self, ): ...
        If the value to remove is not present in the collection, you may
        raise an exception or return None to ignore the error.
        If the remove method is internally instrumented, you must also
        receive the keyword argument '_sa_initiator' and ensure its
        promulgation to collection events.
        """
        setattr(fn, '_sa_instrument_role', 'remover')
        return fn
    @staticmethod
    def iterator(fn):
        """Tag the method as the collection remover.
        The iterator method is called with no arguments.  It is expected to
        return an iterator over all collection members::
            @collection.iterator
            def __iter__(self): ...
        """
        setattr(fn, '_sa_instrument_role', 'iterator')
        return fn
    @staticmethod
    def internally_instrumented(fn):
        """Tag the method as instrumented.
        This tag will prevent any decoration from being applied to the
        method. Use this if you are orchestrating your own calls to
        :func:`.collection_adapter` in one of the basic SQLAlchemy
        interface methods, or to prevent an automatic ABC method
        decoration from wrapping your implementation::
            # normally an 'extend' method on a list-like class would be
            # automatically intercepted and re-implemented in terms of
            # SQLAlchemy events and append().  your implementation will
            # never be called, unless:
            @collection.internally_instrumented
            def extend(self, items): ...
        """
        setattr(fn, '_sa_instrumented', True)
        return fn
    @staticmethod
    def link(fn):
        """Tag the method as a the "linked to attribute" event handler.
        This optional event handler will be called when the collection class
        is linked to or unlinked from the InstrumentedAttribute.  It is
        invoked immediately after the '_sa_adapter' property is set on
        the instance.  A single argument is passed: the collection adapter
        that has been linked, or None if unlinking.
        """
        setattr(fn, '_sa_instrument_role', 'link')
        return fn
    @staticmethod
    def converter(fn):
        """Tag the method as the collection converter.
        This optional method will be called when a collection is being
        replaced entirely, as in::
            myobj.acollection = [newvalue1, newvalue2]
        The converter method will receive the object being assigned and should
        return an iterable of values suitable for use by the ``appender``
        method.  A converter must not assign values or mutate the collection,
        it's sole job is to adapt the value the user provides into an iterable
        of values for the ORM's use.
        The default converter implementation will use duck-typing to do the
        conversion.  A dict-like collection will be convert into an iterable
        of dictionary values, and other types will simply be iterated::
            @collection.converter
            def convert(self, other): ...
        If the duck-typing of the object does not match the type of this
        collection, a TypeError is raised.
        Supply an implementation of this method if you want to expand the
        range of possible types that can be assigned in bulk or perform
        validation on the values about to be assigned.
        """
        setattr(fn, '_sa_instrument_role', 'converter')
        return fn
    @staticmethod
    def adds(arg):
        """Mark the method as adding an entity to the collection.
        Adds "add to collection" handling to the method.  The decorator
        argument indicates which method argument holds the SQLAlchemy-relevant
        value.  Arguments can be specified positionally (i.e. integer) or by
        name::
            @collection.adds(1)
            def push(self, item): ...
            @collection.adds('entity')
            def do_stuff(self, thing, entity=None): ...
        """
        def decorator(fn):
            setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
            return fn
        return decorator
    @staticmethod
    def replaces(arg):
        """Mark the method as replacing an entity in the collection.
        Adds "add to collection" and "remove from collection" handling to
        the method.  The decorator argument indicates which method argument
        holds the SQLAlchemy-relevant value to be added, and return value, if
        any will be considered the value to remove.
        Arguments can be specified positionally (i.e. integer) or by name::
            @collection.replaces(2)
            def __setitem__(self, index, item): ...
        """
        def decorator(fn):
            setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
            setattr(fn, '_sa_instrument_after', 'fire_remove_event')
            return fn
        return decorator
    @staticmethod
    def removes(arg):
        """Mark the method as removing an entity in the collection.
        Adds "remove from collection" handling to the method.  The decorator
        argument indicates which method argument holds the SQLAlchemy-relevant
        value to be removed. Arguments can be specified positionally (i.e.
        integer) or by name::
            @collection.removes(1)
            def zap(self, item): ...
        For methods where the value to remove is not known at call-time, use
        collection.removes_return.
        """
        def decorator(fn):
            setattr(fn, '_sa_instrument_before', ('fire_remove_event', arg))
            return fn
        return decorator
    @staticmethod
    def removes_return():
        """Mark the method as removing an entity in the collection.
        Adds "remove from collection" handling to the method.  The return value
        of the method, if any, is considered the value to remove.  The method
        arguments are not inspected::
            @collection.removes_return()
            def pop(self): ...
        For methods where the value to remove is known at call-time, use
        collection.remove.
        """
        def decorator(fn):
            setattr(fn, '_sa_instrument_after', 'fire_remove_event')
            return fn
        return decorator
# public instrumentation interface for 'internally instrumented'
# implementations
def collection_adapter(collection):
    """Fetch the :class:`.CollectionAdapter` for a collection."""
    return getattr(collection, '_sa_adapter', None)
def collection_iter(collection):
    """Iterate over an object supporting the @iterator or __iter__ protocols.
    If the collection is an ORM collection, it need not be attached to an
    object to be iterable.
    """
    try:
        return getattr(collection, '_sa_iterator',
                       getattr(collection, '__iter__'))()
    except AttributeError:
        raise TypeError("'%s' object is not iterable" %
                        type(collection).__name__)
class CollectionAdapter(object):
    """Bridges between the ORM and arbitrary Python collections.
    Proxies base-level collection operations (append, remove, iterate)
    to the underlying Python collection, and emits add/remove events for
    entities entering or leaving the collection.
    The ORM uses :class:`.CollectionAdapter` exclusively for interaction with
    entity collections.
    The usage of getattr()/setattr() is currently to allow injection
    of custom methods, such as to unwrap Zope security proxies.
    """
    invalidated = False
    def __init__(self, attr, owner_state, data):
        self._key = attr.key
        self._data = weakref.ref(data)
        self.owner_state = owner_state
        self.link_to_self(data)
    def _warn_invalidated(self):
        util.warn("This collection has been invalidated.")
    @property
    def data(self):
        "The entity collection being adapted."
        return self._data()
    @util.memoized_property
    def attr(self):
        return self.owner_state.manager[self._key].impl
    def link_to_self(self, data):
        """Link a collection to this adapter, and fire a link event."""
        setattr(data, '_sa_adapter', self)
        if hasattr(data, '_sa_on_link'):
            getattr(data, '_sa_on_link')(self)
    def unlink(self, data):
        """Unlink a collection from any adapter, and fire a link event."""
        setattr(data, '_sa_adapter', None)
        if hasattr(data, '_sa_on_link'):
            getattr(data, '_sa_on_link')(None)
    def adapt_like_to_iterable(self, obj):
        """Converts collection-compatible objects to an iterable of values.
        Can be passed any type of object, and if the underlying collection
        determines that it can be adapted into a stream of values it can
        use, returns an iterable of values suitable for append()ing.
        This method may raise TypeError or any other suitable exception
        if adaptation fails.
        If a converter implementation is not supplied on the collection,
        a default duck-typing-based implementation is used.
        """
        converter = getattr(self._data(), '_sa_converter', None)
        if converter is not None:
            return converter(obj)
        setting_type = util.duck_type_collection(obj)
        receiving_type = util.duck_type_collection(self._data())
        if obj is None or setting_type != receiving_type:
            given = obj is None and 'None' or obj.__class__.__name__
            if receiving_type is None:
                wanted = self._data().__class__.__name__
            else:
                wanted = receiving_type.__name__
            raise TypeError(
                "Incompatible collection type: %s is not %s-like" % (
                given, wanted))
        # If the object is an adapted collection, return the (iterable)
        # adapter.
        if getattr(obj, '_sa_adapter', None) is not None:
            return getattr(obj, '_sa_adapter')
        elif setting_type == dict:
            # Py3K
            #return obj.values()
            # Py2K
            return getattr(obj, 'itervalues', getattr(obj, 'values'))()
            # end Py2K
        else:
            return iter(obj)
    def append_with_event(self, item, initiator=None):
        """Add an entity to the collection, firing mutation events."""
        getattr(self._data(), '_sa_appender')(item, _sa_initiator=initiator)
    def append_without_event(self, item):
        """Add or restore an entity to the collection, firing no events."""
        getattr(self._data(), '_sa_appender')(item, _sa_initiator=False)
    def append_multiple_without_event(self, items):
        """Add or restore an entity to the collection, firing no events."""
        appender = getattr(self._data(), '_sa_appender')
        for item in items:
            appender(item, _sa_initiator=False)
    def remove_with_event(self, item, initiator=None):
        """Remove an entity from the collection, firing mutation events."""
        getattr(self._data(), '_sa_remover')(item, _sa_initiator=initiator)
    def remove_without_event(self, item):
        """Remove an entity from the collection, firing no events."""
        getattr(self._data(), '_sa_remover')(item, _sa_initiator=False)
    def clear_with_event(self, initiator=None):
        """Empty the collection, firing a mutation event for each entity."""
        remover = getattr(self._data(), '_sa_remover')
        for item in list(self):
            remover(item, _sa_initiator=initiator)
    def clear_without_event(self):
        """Empty the collection, firing no events."""
        remover = getattr(self._data(), '_sa_remover')
        for item in list(self):
            remover(item, _sa_initiator=False)
    def __iter__(self):
        """Iterate over entities in the collection."""
        # Py3K requires iter() here
        return iter(getattr(self._data(), '_sa_iterator')())
    def __len__(self):
        """Count entities in the collection."""
        return len(list(getattr(self._data(), '_sa_iterator')()))
    def __nonzero__(self):
        return True
    def fire_append_event(self, item, initiator=None):
        """Notify that a entity has entered the collection.
        Initiator is a token owned by the InstrumentedAttribute that
        initiated the membership mutation, and should be left as None
        unless you are passing along an initiator value from a chained
        operation.
        """
        if initiator is not False:
            if self.invalidated:
                self._warn_invalidated()
            return self.attr.fire_append_event(
                                    self.owner_state,
                                    self.owner_state.dict,
                                    item, initiator)
        else:
            return item
    def fire_remove_event(self, item, initiator=None):
        """Notify that a entity has been removed from the collection.
        Initiator is the InstrumentedAttribute that initiated the membership
        mutation, and should be left as None unless you are passing along
        an initiator value from a chained operation.
        """
        if initiator is not False:
            if self.invalidated:
                self._warn_invalidated()
            self.attr.fire_remove_event(
                                    self.owner_state,
                                    self.owner_state.dict,
                                    item, initiator)
    def fire_pre_remove_event(self, initiator=None):
        """Notify that an entity is about to be removed from the collection.
        Only called if the entity cannot be removed after calling
        fire_remove_event().
        """
        if self.invalidated:
            self._warn_invalidated()
        self.attr.fire_pre_remove_event(
                                    self.owner_state,
                                    self.owner_state.dict,
                                    initiator=initiator)
    def __getstate__(self):
        return {'key': self._key,
                'owner_state': self.owner_state,
                'data': self.data}
    def __setstate__(self, d):
        self._key = d['key']
        self.owner_state = d['owner_state']
        self._data = weakref.ref(d['data'])
def bulk_replace(values, existing_adapter, new_adapter):
    """Load a new collection, firing events based on prior like membership.
    Appends instances in ``values`` onto the ``new_adapter``. Events will be
    fired for any instance not present in the ``existing_adapter``.  Any
    instances in ``existing_adapter`` not present in ``values`` will have
    remove events fired upon them.
    :param values: An iterable of collection member instances
    :param existing_adapter: A :class:`.CollectionAdapter` of
     instances to be replaced
    :param new_adapter: An empty :class:`.CollectionAdapter`
     to load with ``values``
    """
    if not isinstance(values, list):
        values = list(values)
    idset = util.IdentitySet
    constants = idset(existing_adapter or ()).intersection(values or ())
    additions = idset(values or ()).difference(constants)
    removals = idset(existing_adapter or ()).difference(constants)
    for member in values or ():
        if member in additions:
            new_adapter.append_with_event(member)
        elif member in constants:
            new_adapter.append_without_event(member)
    if existing_adapter:
        for member in removals:
            existing_adapter.remove_with_event(member)
def prepare_instrumentation(factory):
    """Prepare a callable for future use as a collection class factory.
    Given a collection class factory (either a type or no-arg callable),
    return another factory that will produce compatible instances when
    called.
    This function is responsible for converting collection_class=list
    into the run-time behavior of collection_class=InstrumentedList.
    """
    # Convert a builtin to 'Instrumented*'
    if factory in __canned_instrumentation:
        factory = __canned_instrumentation[factory]
    # Create a specimen
    cls = type(factory())
    # Did factory callable return a builtin?
    if cls in __canned_instrumentation:
        # Wrap it so that it returns our 'Instrumented*'
        factory = __converting_factory(factory)
        cls = factory()
    # Instrument the class if needed.
    if __instrumentation_mutex.acquire():
        try:
            if getattr(cls, '_sa_instrumented', None) != id(cls):
                _instrument_class(cls)
        finally:
            __instrumentation_mutex.release()
    return factory
def __converting_factory(original_factory):
    """Convert the type returned by collection factories on the fly.
    Given a collection factory that returns a builtin type (e.g. a list),
    return a wrapped function that converts that type to one of our
    instrumented types.
    """
    def wrapper():
        collection = original_factory()
        type_ = type(collection)
        if type_ in __canned_instrumentation:
            # return an instrumented type initialized from the factory's
            # collection
            return __canned_instrumentation[type_](collection)
        else:
            raise sa_exc.InvalidRequestError(
                "Collection class factories must produce instances of a "
                "single class.")
    try:
        # often flawed but better than nothing
        wrapper.__name__ = "%sWrapper" % original_factory.__name__
        wrapper.__doc__ = original_factory.__doc__
    except:
        pass
    return wrapper
def _instrument_class(cls):
    """Modify methods in a class and install instrumentation."""
    # this can be documented as a decoratorless
    # option for specifying instrumentation.  (likely doc'd here in code only,
    # not in online docs.)  Useful for C types too.
    #
    # __instrumentation__ = {
    #   'rolename': 'methodname', # ...
    #   'methods': {
    #     'methodname': ('fire_{append,remove}_event', argspec,
    #                    'fire_{append,remove}_event'),
    #     'append': ('fire_append_event', 1, None),
    #     '__setitem__': ('fire_append_event', 1, 'fire_remove_event'),
    #     'pop': (None, None, 'fire_remove_event'),
    #     }
    #  }
    # In the normal call flow, a request for any of the 3 basic collection
    # types is transformed into one of our trivial subclasses
    # (e.g. InstrumentedList).  Catch anything else that sneaks in here...
    if cls.__module__ == '__builtin__':
        raise sa_exc.ArgumentError(
            "Can not instrument a built-in type. Use a "
            "subclass, even a trivial one.")
    collection_type = util.duck_type_collection(cls)
    if collection_type in __interfaces:
        roles = __interfaces[collection_type].copy()
        decorators = roles.pop('_decorators', {})
    else:
        roles, decorators = {}, {}
    if hasattr(cls, '__instrumentation__'):
        roles.update(copy.deepcopy(getattr(cls, '__instrumentation__')))
    methods = roles.pop('methods', {})
    for name in dir(cls):
        method = getattr(cls, name, None)
        if not util.callable(method):
            continue
        # note role declarations
        if hasattr(method, '_sa_instrument_role'):
            role = method._sa_instrument_role
            assert role in ('appender', 'remover', 'iterator',
                            'link', 'converter')
            roles[role] = name
        # transfer instrumentation requests from decorated function
        # to the combined queue
        before, after = None, None
        if hasattr(method, '_sa_instrument_before'):
            op, argument = method._sa_instrument_before
            assert op in ('fire_append_event', 'fire_remove_event')
            before = op, argument
        if hasattr(method, '_sa_instrument_after'):
            op = method._sa_instrument_after
            assert op in ('fire_append_event', 'fire_remove_event')
            after = op
        if before:
            methods[name] = before[0], before[1], after
        elif after:
            methods[name] = None, None, after
    # apply ABC auto-decoration to methods that need it
    for method, decorator in decorators.items():
        fn = getattr(cls, method, None)
        if (fn and method not in methods and
            not hasattr(fn, '_sa_instrumented')):
            setattr(cls, method, decorator(fn))
    # ensure all roles are present, and apply implicit instrumentation if
    # needed
    if 'appender' not in roles or not hasattr(cls, roles['appender']):
        raise sa_exc.ArgumentError(
            "Type %s must elect an appender method to be "
            "a collection class" % cls.__name__)
    elif (roles['appender'] not in methods and
          not hasattr(getattr(cls, roles['appender']), '_sa_instrumented')):
        methods[roles['appender']] = ('fire_append_event', 1, None)
    if 'remover' not in roles or not hasattr(cls, roles['remover']):
        raise sa_exc.ArgumentError(
            "Type %s must elect a remover method to be "
            "a collection class" % cls.__name__)
    elif (roles['remover'] not in methods and
          not hasattr(getattr(cls, roles['remover']), '_sa_instrumented')):
        methods[roles['remover']] = ('fire_remove_event', 1, None)
    if 'iterator' not in roles or not hasattr(cls, roles['iterator']):
        raise sa_exc.ArgumentError(
            "Type %s must elect an iterator method to be "
            "a collection class" % cls.__name__)
    # apply ad-hoc instrumentation from decorators, class-level defaults
    # and implicit role declarations
    for method, (before, argument, after) in methods.items():
        setattr(cls, method,
                _instrument_membership_mutator(getattr(cls, method),
                                               before, argument, after))
    # intern the role map
    for role, method in roles.items():
        setattr(cls, '_sa_%s' % role, getattr(cls, method))
    setattr(cls, '_sa_instrumented', id(cls))
def _instrument_membership_mutator(method, before, argument, after):
    """Route method args and/or return value through the collection adapter."""
    # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))'
    if before:
        fn_args = list(util.flatten_iterator(inspect.getargspec(method)[0]))
        if type(argument) is int:
            pos_arg = argument
            named_arg = len(fn_args) > argument and fn_args[argument] or None
        else:
            if argument in fn_args:
                pos_arg = fn_args.index(argument)
            else:
                pos_arg = None
            named_arg = argument
        del fn_args
    def wrapper(*args, **kw):
        if before:
            if pos_arg is None:
                if named_arg not in kw:
                    raise sa_exc.ArgumentError(
                        "Missing argument %s" % argument)
                value = kw[named_arg]
            else:
                if len(args) > pos_arg:
                    value = args[pos_arg]
                elif named_arg in kw:
                    value = kw[named_arg]
                else:
                    raise sa_exc.ArgumentError(
                        "Missing argument %s" % argument)
        initiator = kw.pop('_sa_initiator', None)
        if initiator is False:
            executor = None
        else:
            executor = getattr(args[0], '_sa_adapter', None)
        if before and executor:
            getattr(executor, before)(value, initiator)
        if not after or not executor:
            return method(*args, **kw)
        else:
            res = method(*args, **kw)
            if res is not None:
                getattr(executor, after)(res, initiator)
            return res
    try:
        wrapper._sa_instrumented = True
        wrapper.__name__ = method.__name__
        wrapper.__doc__ = method.__doc__
    except:
        pass
    return wrapper
def __set(collection, item, _sa_initiator=None):
    """Run set events, may eventually be inlined into decorators."""
    if _sa_initiator is not False:
        executor = getattr(collection, '_sa_adapter', None)
        if executor:
            item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
    return item
def __del(collection, item, _sa_initiator=None):
    """Run del events, may eventually be inlined into decorators."""
    if _sa_initiator is not False:
        executor = getattr(collection, '_sa_adapter', None)
        if executor:
            getattr(executor, 'fire_remove_event')(item, _sa_initiator)
def __before_delete(collection, _sa_initiator=None):
    """Special method to run 'commit existing value' methods"""
    executor = getattr(collection, '_sa_adapter', None)
    if executor:
        getattr(executor, 'fire_pre_remove_event')(_sa_initiator)
def _list_decorators():
    """Tailored instrumentation wrappers for any list-like class."""
    def _tidy(fn):
        setattr(fn, '_sa_instrumented', True)
        fn.__doc__ = getattr(getattr(list, fn.__name__), '__doc__')
    def append(fn):
        def append(self, item, _sa_initiator=None):
            item = __set(self, item, _sa_initiator)
            fn(self, item)
        _tidy(append)
        return append
    def remove(fn):
        def remove(self, value, _sa_initiator=None):
            __before_delete(self, _sa_initiator)
            # testlib.pragma exempt:__eq__
            fn(self, value)
            __del(self, value, _sa_initiator)
        _tidy(remove)
        return remove
    def insert(fn):
        def insert(self, index, value):
            value = __set(self, value)
            fn(self, index, value)
        _tidy(insert)
        return insert
    def __setitem__(fn):
        def __setitem__(self, index, value):
            if not isinstance(index, slice):
                existing = self[index]
                if existing is not None:
                    __del(self, existing)
                value = __set(self, value)
                fn(self, index, value)
            else:
                # slice assignment requires __delitem__, insert, __len__
                step = index.step or 1
                start = index.start or 0
                if start < 0:
                    start += len(self)
                stop = index.stop or len(self)
                if stop < 0:
                    stop += len(self)
                if step == 1:
                    for i in xrange(start, stop, step):
                        if len(self) > start:
                            del self[start]
                    for i, item in enumerate(value):
                        self.insert(i + start, item)
                else:
                    rng = range(start, stop, step)
                    if len(value) != len(rng):
                        raise ValueError(
                            "attempt to assign sequence of size %s to "
                            "extended slice of size %s" % (len(value),
                                                           len(rng)))
                    for i, item in zip(rng, value):
                        self.__setitem__(i, item)
        _tidy(__setitem__)
        return __setitem__
    def __delitem__(fn):
        def __delitem__(self, index):
            if not isinstance(index, slice):
                item = self[index]
                __del(self, item)
                fn(self, index)
            else:
                # slice deletion requires __getslice__ and a slice-groking
                # __getitem__ for stepped deletion
                # note: not breaking this into atomic dels
                for item in self[index]:
                    __del(self, item)
                fn(self, index)
        _tidy(__delitem__)
        return __delitem__
    # Py2K
    def __setslice__(fn):
        def __setslice__(self, start, end, values):
            for value in self[start:end]:
                __del(self, value)
            values = [__set(self, value) for value in values]
            fn(self, start, end, values)
        _tidy(__setslice__)
        return __setslice__
    def __delslice__(fn):
        def __delslice__(self, start, end):
            for value in self[start:end]:
                __del(self, value)
            fn(self, start, end)
        _tidy(__delslice__)
        return __delslice__
    # end Py2K
    def extend(fn):
        def extend(self, iterable):
            for value in iterable:
                self.append(value)
        _tidy(extend)
        return extend
    def __iadd__(fn):
        def __iadd__(self, iterable):
            # list.__iadd__ takes any iterable and seems to let TypeError raise
            # as-is instead of returning NotImplemented
            for value in iterable:
                self.append(value)
            return self
        _tidy(__iadd__)
        return __iadd__
    def pop(fn):
        def pop(self, index=-1):
            __before_delete(self)
            item = fn(self, index)
            __del(self, item)
            return item
        _tidy(pop)
        return pop
    # __imul__ : not wrapping this.  all members of the collection are already
    # present, so no need to fire appends... wrapping it with an explicit
    # decorator is still possible, so events on *= can be had if they're
    # desired.  hard to imagine a use case for __imul__, though.
    l = locals().copy()
    l.pop('_tidy')
    return l
def _dict_decorators():
    """Tailored instrumentation wrappers for any dict-like mapping class."""
    def _tidy(fn):
        setattr(fn, '_sa_instrumented', True)
        fn.__doc__ = getattr(getattr(dict, fn.__name__), '__doc__')
    Unspecified = util.symbol('Unspecified')
    def __setitem__(fn):
        def __setitem__(self, key, value, _sa_initiator=None):
            if key in self:
                __del(self, self[key], _sa_initiator)
            value = __set(self, value, _sa_initiator)
            fn(self, key, value)
        _tidy(__setitem__)
        return __setitem__
    def __delitem__(fn):
        def __delitem__(self, key, _sa_initiator=None):
            if key in self:
                __del(self, self[key], _sa_initiator)
            fn(self, key)
        _tidy(__delitem__)
        return __delitem__
    def clear(fn):
        def clear(self):
            for key in self:
                __del(self, self[key])
            fn(self)
        _tidy(clear)
        return clear
    def pop(fn):
        def pop(self, key, default=Unspecified):
            if key in self:
                __del(self, self[key])
            if default is Unspecified:
                return fn(self, key)
            else:
                return fn(self, key, default)
        _tidy(pop)
        return pop
    def popitem(fn):
        def popitem(self):
            __before_delete(self)
            item = fn(self)
            __del(self, item[1])
            return item
        _tidy(popitem)
        return popitem
    def setdefault(fn):
        def setdefault(self, key, default=None):
            if key not in self:
                self.__setitem__(key, default)
                return default
            else:
                return self.__getitem__(key)
        _tidy(setdefault)
        return setdefault
    def update(fn):
        def update(self, __other=Unspecified, **kw):
            if __other is not Unspecified:
                if hasattr(__other, 'keys'):
                    for key in __other.keys():
                        if (key not in self or
                            self[key] is not __other[key]):
                            self[key] = __other[key]
                else:
                    for key, value in __other:
                        if key not in self or self[key] is not value:
                            self[key] = value
            for key in kw:
                if key not in self or self[key] is not kw[key]:
                    self[key] = kw[key]
        _tidy(update)
        return update
    l = locals().copy()
    l.pop('_tidy')
    l.pop('Unspecified')
    return l
if util.py3k_warning:
    _set_binop_bases = (set, frozenset)
else:
    import sets
    _set_binop_bases = (set, frozenset, sets.BaseSet)
def _set_binops_check_strict(self, obj):
    """Allow only set, frozenset and self.__class__-derived
    objects in binops."""
    return isinstance(obj, _set_binop_bases + (self.__class__,))
def _set_binops_check_loose(self, obj):
    """Allow anything set-like to participate in set binops."""
    return (isinstance(obj, _set_binop_bases + (self.__class__,)) or
            util.duck_type_collection(obj) == set)
def _set_decorators():
    """Tailored instrumentation wrappers for any set-like class."""
    def _tidy(fn):
        setattr(fn, '_sa_instrumented', True)
        fn.__doc__ = getattr(getattr(set, fn.__name__), '__doc__')
    Unspecified = util.symbol('Unspecified')
    def add(fn):
        def add(self, value, _sa_initiator=None):
            if value not in self:
                value = __set(self, value, _sa_initiator)
            # testlib.pragma exempt:__hash__
            fn(self, value)
        _tidy(add)
        return add
    def discard(fn):
        def discard(self, value, _sa_initiator=None):
            # testlib.pragma exempt:__hash__
            if value in self:
                __del(self, value, _sa_initiator)
                # testlib.pragma exempt:__hash__
            fn(self, value)
        _tidy(discard)
        return discard
    def remove(fn):
        def remove(self, value, _sa_initiator=None):
            # testlib.pragma exempt:__hash__
            if value in self:
                __del(self, value, _sa_initiator)
            # testlib.pragma exempt:__hash__
            fn(self, value)
        _tidy(remove)
        return remove
    def pop(fn):
        def pop(self):
            __before_delete(self)
            item = fn(self)
            __del(self, item)
            return item
        _tidy(pop)
        return pop
    def clear(fn):
        def clear(self):
            for item in list(self):
                self.remove(item)
        _tidy(clear)
        return clear
    def update(fn):
        def update(self, value):
            for item in value:
                self.add(item)
        _tidy(update)
        return update
    def __ior__(fn):
        def __ior__(self, value):
            if not _set_binops_check_strict(self, value):
                return NotImplemented
            for item in value:
                self.add(item)
            return self
        _tidy(__ior__)
        return __ior__
    def difference_update(fn):
        def difference_update(self, value):
            for item in value:
                self.discard(item)
        _tidy(difference_update)
        return difference_update
    def __isub__(fn):
        def __isub__(self, value):
            if not _set_binops_check_strict(self, value):
                return NotImplemented
            for item in value:
                self.discard(item)
            return self
        _tidy(__isub__)
        return __isub__
    def intersection_update(fn):
        def intersection_update(self, other):
            want, have = self.intersection(other), set(self)
            remove, add = have - want, want - have
            for item in remove:
                self.remove(item)
            for item in add:
                self.add(item)
        _tidy(intersection_update)
        return intersection_update
    def __iand__(fn):
        def __iand__(self, other):
            if not _set_binops_check_strict(self, other):
                return NotImplemented
            want, have = self.intersection(other), set(self)
            remove, add = have - want, want - have
            for item in remove:
                self.remove(item)
            for item in add:
                self.add(item)
            return self
        _tidy(__iand__)
        return __iand__
    def symmetric_difference_update(fn):
        def symmetric_difference_update(self, other):
            want, have = self.symmetric_difference(other), set(self)
            remove, add = have - want, want - have
            for item in remove:
                self.remove(item)
            for item in add:
                self.add(item)
        _tidy(symmetric_difference_update)
        return symmetric_difference_update
    def __ixor__(fn):
        def __ixor__(self, other):
            if not _set_binops_check_strict(self, other):
                return NotImplemented
            want, have = self.symmetric_difference(other), set(self)
            remove, add = have - want, want - have
            for item in remove:
                self.remove(item)
            for item in add:
                self.add(item)
            return self
        _tidy(__ixor__)
        return __ixor__
    l = locals().copy()
    l.pop('_tidy')
    l.pop('Unspecified')
    return l
class InstrumentedList(list):
    """An instrumented version of the built-in list."""
    __instrumentation__ = {
       'appender': 'append',
       'remover': 'remove',
       'iterator': '__iter__', }
class InstrumentedSet(set):
    """An instrumented version of the built-in set."""
    __instrumentation__ = {
       'appender': 'add',
       'remover': 'remove',
       'iterator': '__iter__', }
class InstrumentedDict(dict):
    """An instrumented version of the built-in dict."""
    # Py3K
    #__instrumentation__ = {
    #    'iterator': 'values', }
    # Py2K
    __instrumentation__ = {
        'iterator': 'itervalues', }
    # end Py2K
__canned_instrumentation = {
    list: InstrumentedList,
    set: InstrumentedSet,
    dict: InstrumentedDict,
    }
__interfaces = {
    list: {'appender': 'append',
           'remover': 'remove',
           'iterator': '__iter__',
           '_decorators': _list_decorators(), },
    set: {'appender': 'add',
          'remover': 'remove',
          'iterator': '__iter__',
          '_decorators': _set_decorators(), },
    # decorators are required for dicts and object collections.
    # Py3K
    #dict: {'iterator': 'values',
    #       '_decorators': _dict_decorators(), },
    # Py2K
    dict: {'iterator': 'itervalues',
           '_decorators': _dict_decorators(), },
    # end Py2K
    # < 0.4 compatible naming, deprecated- use decorators instead.
    None: {}
    }
class MappedCollection(dict):
    """A basic dictionary-based collection class.
    Extends dict with the minimal bag semantics that collection
    classes require. ``set`` and ``remove`` are implemented in terms
    of a keying function: any callable that takes an object and
    returns an object for use as a dictionary key.
    """
    def __init__(self, keyfunc):
        """Create a new collection with keying provided by keyfunc.
        keyfunc may be any callable any callable that takes an object and
        returns an object for use as a dictionary key.
        The keyfunc will be called every time the ORM needs to add a member by
        value-only (such as when loading instances from the database) or
        remove a member.  The usual cautions about dictionary keying apply-
        ``keyfunc(object)`` should return the same output for the life of the
        collection.  Keying based on mutable properties can result in
        unreachable instances "lost" in the collection.
        """
        self.keyfunc = keyfunc
    def set(self, value, _sa_initiator=None):
        """Add an item by value, consulting the keyfunc for the key."""
        key = self.keyfunc(value)
        self.__setitem__(key, value, _sa_initiator)
    set = collection.internally_instrumented(set)
    set = collection.appender(set)
    def remove(self, value, _sa_initiator=None):
        """Remove an item by value, consulting the keyfunc for the key."""
        key = self.keyfunc(value)
        # Let self[key] raise if key is not in this collection
        # testlib.pragma exempt:__ne__
        if self[key] != value:
            raise sa_exc.InvalidRequestError(
                "Can not remove '%s': collection holds '%s' for key '%s'. "
                "Possible cause: is the MappedCollection key function "
                "based on mutable properties or properties that only obtain "
                "values after flush?" %
                (value, self[key], key))
        self.__delitem__(key, _sa_initiator)
    remove = collection.internally_instrumented(remove)
    remove = collection.remover(remove)
    def _convert(self, dictlike):
        """Validate and convert a dict-like object into values for set()ing.
        This is called behind the scenes when a MappedCollection is replaced
        entirely by another collection, as in::
          myobj.mappedcollection = {'a':obj1, 'b': obj2} # ...
        Raises a TypeError if the key in any (key, value) pair in the dictlike
        object does not match the key that this collection's keyfunc would
        have assigned for that value.
        """
        for incoming_key, value in util.dictlike_iteritems(dictlike):
            new_key = self.keyfunc(value)
            if incoming_key != new_key:
                raise TypeError(
                    "Found incompatible key %r for value %r; this "
                    "collection's "
                    "keying function requires a key of %r for this value." % (
                    incoming_key, value, new_key))
            yield value
    _convert = collection.converter(_convert)
# ensure instrumentation is associated with
# these built-in classes; if a user-defined class
# subclasses these and uses @internally_instrumented,
# the superclass is otherwise not instrumented.
# see [ticket:2406].
_instrument_class(MappedCollection)
_instrument_class(InstrumentedList)
_instrument_class(InstrumentedSet)
 | 
	gpl-2.0 | -7,346,996,607,321,575,000 | 33.598499 | 84 | 0.599696 | false | 
| 
	drayanaindra/django-shop | 
	shop/util/cart.py | 
	11 | 
	3099 | 
	# -*- coding: utf-8 -*-
from shop.models.cartmodel import Cart
from django.contrib.auth.models import AnonymousUser
def get_cart_from_database(request):
    database_cart = Cart.objects.filter(user=request.user)
    if database_cart:
        database_cart = database_cart[0]
    else:
        database_cart = None
    return database_cart
def get_cart_from_session(request):
    session_cart = None
    session = getattr(request, 'session', None)
    if session is not None:
        cart_id = session.get('cart_id')
        if cart_id:
            try:
                session_cart = Cart.objects.get(pk=cart_id)
            except Cart.DoesNotExist:
                session_cart = None
    return session_cart
def get_or_create_cart(request, save=False):
    """
    Return cart for current visitor.
    For a logged in user, try to get the cart from the database. If it's not there or it's empty,
    use the cart from the session.
    If the user is not logged in use the cart from the session.
    If there is no cart object in the database or session, create one.
    If ``save`` is True, cart object will be explicitly saved.
    """
    cart = None
    if not hasattr(request, '_cart'):
        is_logged_in = request.user and not isinstance(request.user, AnonymousUser)
        if is_logged_in:
            # if we are authenticated
            session_cart = get_cart_from_session(request)
            if session_cart and session_cart.user == request.user:
                # and the session cart already belongs to us, we are done
                cart = session_cart
            elif session_cart and not session_cart.is_empty and session_cart.user != request.user:
                # if it does not belong to us yet
                database_cart = get_cart_from_database(request)
                if database_cart:
                    # and there already is a cart that belongs to us in the database
                    # delete the old database cart
                    database_cart.delete()
                # save the user to the new one from the session
                session_cart.user = request.user
                session_cart.save()
                cart = session_cart
            else:
                # if there is no session_cart, or it's empty, use the database cart
                cart = get_cart_from_database(request)
                if cart:
                    # and save it to the session
                    request.session['cart_id'] = cart.pk
        else:
            # not authenticated? cart might be in session
            cart = get_cart_from_session(request)
        if not cart:
            # in case it's our first visit and no cart was created yet
            if is_logged_in:
                cart = Cart(user=request.user)
            elif getattr(request, 'session', None) is not None:
                cart = Cart()
        if save and not cart.pk:
            cart.save()
            request.session['cart_id'] = cart.pk
        setattr(request, '_cart', cart)
    cart = getattr(request, '_cart')  # There we *must* have a cart
    return cart
 | 
	bsd-3-clause | 9,081,044,432,204,280,000 | 37.259259 | 98 | 0.581478 | false | 
| 
	Parisson/Telemeta | 
	telemeta/management/commands/telemeta-import-collections-from-crem.py | 
	2 | 
	9978 | 
	#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Guillaume Pellerin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://svn.parisson.org/telemeta/TelemetaLicense.
#
# Author: Guillaume Pellerin <[email protected]>
#
import logging
import codecs
import os
import sys
import csv
import logging
import datetime
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from django.core.files.base import ContentFile
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
from telemeta.models import *
from telemeta.util.unaccent import unaccent
class Logger:
    def __init__(self, file):
        self.logger = logging.getLogger('myapp')
        self.hdlr = logging.FileHandler(file)
        self.formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
        self.hdlr.setFormatter(self.formatter)
        self.logger.addHandler(self.hdlr)
        self.logger.setLevel(logging.INFO)
    def info(self, prefix, message):
        self.logger.info(' ' + prefix + ' : ' + message.decode('utf8'))
    def error(self, prefix, message):
        self.logger.error(prefix + ' : ' + message.decode('utf8'))
class Command(BaseCommand):
    """Import CREM collections from collection directories containing media files
    and eventually a XLS files representing the relation between old codes and new codes
    """
    help = "import CREM collections (special usecase)"
    admin_email = '[email protected]'
    media_root = settings.MEDIA_ROOT
    option_list = BaseCommand.option_list + (
          make_option('-d', '--dry-run',
            action='store_true',
            dest='dry-run',
            help='Do NOT write anything'),
          make_option('-f', '--force',
            action='store_true',
            dest='force',
            help='Force overwrite data'),
          make_option('-s', '--source',
            dest='source_dir',
            help='define the source directory'),
          make_option('-l', '--log',
            dest='log',
            help='define log file'),
          make_option('-p', '--pattern',
            dest='pattern',
            help='define the pattern'),
          make_option('-m', '--domain',
            dest='domain',
            help='define site domain'),
    )
    def write_file(self, item, media):
        filename = media.split(os.sep)[-1]
        print 'importing ' + filename
        if os.path.exists(media):
            if not item.file or self.force:
                if not self.media_root in self.source_dir:
                    print "file not in MEDIA_ROOT, copying..."
                    f = open(media, 'r')
                    if not self.dry_run:
                        file_content = ContentFile(f.read())
                        item.file.save(filename, file_content)
                    f.close()
                else:
                    print "file in MEDIA_ROOT, linking..."
                    path = media.replace(self.media_root, '')
                    if not self.dry_run:
                        item.file = path
                item.save()
                if self.user:
                    item.set_revision(self.user)
            else:
                msg = item.code + ' : fichier ' + item.file.name + ' deja inscrit dans la base de donnees et pas de forcage !'
                self.logger.info('item', msg)
        else:
            msg = item.code + ' : fichier audio ' + filename + ' inexistant dans le dossier !'
            self.logger.error('item', msg)
    def handle(self, *args, **kwargs):
        self.logger = Logger(kwargs.get('log'))
        self.pattern = kwargs.get('pattern')
        self.source_dir = os.path.abspath(kwargs.get('source_dir'))
        self.dry_run =  kwargs.get('dry-run')
        self.force = kwargs.get('force')
        self.domain = kwargs.get('domain')
        site = Site.objects.all()[0]
        if self.domain:
            site.domain = self.domain
            site.name = self.domain
            site.save()
        else:
            self.domain = site.domain
        self.user = User.objects.filter(username='admin')[0]
        self.collections = os.listdir(self.source_dir)
        collections = []
        for collection in self.collections:
            collection_dir = self.source_dir + os.sep + collection
            collection_files = os.listdir(collection_dir)
            if not '/.' in collection_dir and self.pattern in collection_dir:
                collection_name = collection.split(os.sep)[-1]
                collections.append(collection_name)
                c = MediaCollection.objects.filter(code=collection_name)
                if not c and collection + '.csv' in collection_files:
                    msg = collection + ' collection NON présente dans la base de données, SORTIE '
                    self.logger.error(collection, msg)
                    sys.exit(msg)
                elif not c:
                    msg = 'collection NON présente dans la base de données, CREATION '
                    self.logger.info(collection, msg)
                    if not self.dry_run:
                        c = MediaCollection(code=collection_name, title=collection_name)
                        c.save()
                        c.set_revision(self.user)
                else:
                    msg = 'collection présente dans la base de données, SELECTION'
                    self.logger.info(collection, msg)
        for collection in collections:
            collection_dir = self.source_dir + os.sep + collection
            collection_name = collection
            collection_files = os.listdir(collection_dir)
            msg = '************************ ' + collection + ' ******************************'
            self.logger.info(collection, msg[:70])
            csv_file = ''
            rows = {}
            if collection + '.csv' in collection_files:
                csv_file = self.source_dir + os.sep + collection + os.sep + collection + '.csv'
                csv_data = csv.reader(open(csv_file), delimiter=';')
                for row in csv_data:
                    rows[row[1].strip()] = row[0].strip()
                msg = collection + ' import du fichier CSV de la collection'
                self.logger.info(collection, msg[:70])
            else:
                msg = collection + ' pas de fichier CSV dans la collection'
                self.logger.info(collection, msg[:70])
            c = MediaCollection.objects.filter(code=collection_name)
            if not c:
                if not self.dry_run:
                    c = MediaCollection(code=collection_name)
                    c.save()
                msg = ' collection NON présente dans la BDD, CREATION '
                self.logger.info(c.code, msg)
            else:
                c = c[0]
                msg = ' id = '+str(c.id)
                self.logger.info(c.code, msg)
            audio_files = []
            for file in collection_files:
                ext = ['WAV', 'wav']
                if file.split('.')[-1] in ext and file[0] != '.':
                    audio_files.append(file)
            audio_files.sort()
            nb_items = c.items.count()
            counter = 0
            for file in audio_files:
                code = file.split('.')[0]
                wav_file = self.source_dir + os.sep + collection + os.sep + file
                if len(audio_files) <= nb_items:
                    items = MediaItem.objects.filter(code=code)
                    old_ref = ''
                    if code in rows and not items:
                        old_ref = rows[code]
                        items = MediaItem.objects.filter(old_code=old_ref)
                    if items:
                        item = items[0]
                        if item.code:
                            msg = code + ' : ' + item.code + ' : Cas 1 ou 2 : id = ' + str(item.id)
                        elif item.old_code:
                            msg = code + ' : ' + item.old_code + ' : Cas 1 ou 2 : id = ' + str(item.id)
                        else:
                            msg = code + ' : ' + ' Cas 1 ou 2 : id = ' + str(item.id)
                        self.logger.info('item', msg)
                        item.code = code
                    else:
                        item = MediaItem(code=code, collection=c)
                        msg = code + ' : ' + old_ref + ' : Cas 1 ou 2 : item NON présent dans la base de données, CREATION'
                        self.logger.info('item', msg)
                    self.write_file(item, wav_file)
                elif nb_items == 1 and len(audio_files) > 1:
                    if counter == 0:
                        msg = code + ' : Cas 3a : item n°01 présent dans la base de données, PASSE'
                        self.logger.info('item', msg)
                    else:
                        item = MediaItem(code=code, collection=c)
                        msg = code + ' : Cas 3a : item NON présent dans la base de données, CREATION'
                        self.logger.info('item', msg)
                        self.write_file(item, wav_file)
                elif nb_items > 1 and nb_items < len(audio_files):
                    msg = code + ' : Cas 3b : nb items < nb de fichiers audio, PAS de creation'
                    self.logger.info('item', msg)
                counter += 1
        msg = 'Liste des URLs des collections importées :'
        self.logger.info('INFO', msg)
        for collection in collections:
            msg = 'http://'+self.domain+'/archives/collections/'+collection
            self.logger.info(collection, msg)
 | 
	agpl-3.0 | 2,723,525,549,494,444,500 | 39.336032 | 126 | 0.520225 | false | 
| 
	alephu5/Soundbyte | 
	environment/lib/python3.3/site-packages/nose/config.py | 
	5 | 
	25280 | 
	import logging
import optparse
import os
import re
import sys
import configparser
from optparse import OptionParser
from nose.util import absdir, tolist
from nose.plugins.manager import NoPlugins
from warnings import warn, filterwarnings
log = logging.getLogger(__name__)
# not allowed in config files
option_blacklist = ['help', 'verbose']
config_files = [
    # Linux users will prefer this
    "~/.noserc",
    # Windows users will prefer this
    "~/nose.cfg"
    ]
# plaforms on which the exe check defaults to off
# Windows and IronPython
exe_allowed_platforms = ('win32', 'cli')
filterwarnings("always", category=DeprecationWarning,
               module=r'(.*\.)?nose\.config')
class NoSuchOptionError(Exception):
    def __init__(self, name):
        Exception.__init__(self, name)
        self.name = name
class ConfigError(Exception):
    pass
class ConfiguredDefaultsOptionParser(object):
    """
    Handler for options from commandline and config files.
    """
    def __init__(self, parser, config_section, error=None, file_error=None):
        self._parser = parser
        self._config_section = config_section
        if error is None:
            error = self._parser.error
        self._error = error
        if file_error is None:
            file_error = lambda msg, **kw: error(msg)
        self._file_error = file_error
    def _configTuples(self, cfg, filename):
        config = []
        if self._config_section in cfg.sections():
            for name, value in cfg.items(self._config_section):
                config.append((name, value, filename))
        return config
    def _readFromFilenames(self, filenames):
        config = []
        for filename in filenames:
            cfg = configparser.RawConfigParser()
            try:
                cfg.read(filename)
            except configparser.Error as exc:
                raise ConfigError("Error reading config file %r: %s" %
                                  (filename, str(exc)))
            config.extend(self._configTuples(cfg, filename))
        return config
    def _readFromFileObject(self, fh):
        cfg = configparser.RawConfigParser()
        try:
            filename = fh.name
        except AttributeError:
            filename = '<???>'
        try:
            cfg.readfp(fh)
        except configparser.Error as exc:
            raise ConfigError("Error reading config file %r: %s" %
                              (filename, str(exc)))
        return self._configTuples(cfg, filename)
    def _readConfiguration(self, config_files):
        try:
            config_files.readline
        except AttributeError:
            filename_or_filenames = config_files
            if isinstance(filename_or_filenames, str):
                filenames = [filename_or_filenames]
            else:
                filenames = filename_or_filenames
            config = self._readFromFilenames(filenames)
        else:
            fh = config_files
            config = self._readFromFileObject(fh)
        return config
    def _processConfigValue(self, name, value, values, parser):
        opt_str = '--' + name
        option = parser.get_option(opt_str)
        if option is None:
            raise NoSuchOptionError(name)
        else:
            option.process(opt_str, value, values, parser)
    def _applyConfigurationToValues(self, parser, config, values):
        for name, value, filename in config:
            if name in option_blacklist:
                continue
            try:
                self._processConfigValue(name, value, values, parser)
            except NoSuchOptionError as exc:
                self._file_error(
                    "Error reading config file %r: "
                    "no such option %r" % (filename, exc.name),
                    name=name, filename=filename)
            except optparse.OptionValueError as exc:
                msg = str(exc).replace('--' + name, repr(name), 1)
                self._file_error("Error reading config file %r: "
                                 "%s" % (filename, msg),
                                 name=name, filename=filename)
    def parseArgsAndConfigFiles(self, args, config_files):
        values = self._parser.get_default_values()
        try:
            config = self._readConfiguration(config_files)
        except ConfigError as exc:
            self._error(str(exc))
        else:
            try:
                self._applyConfigurationToValues(self._parser, config, values)
            except ConfigError as exc:
                self._error(str(exc))
        return self._parser.parse_args(args, values)
class Config(object):
    """nose configuration.
    Instances of Config are used throughout nose to configure
    behavior, including plugin lists. Here are the default values for
    all config keys::
      self.env = env = kw.pop('env', {})
      self.args = ()
      self.testMatch = re.compile(r'(?:^|[\\b_\\.%s-])[Tt]est' % os.sep)
      self.addPaths = not env.get('NOSE_NOPATH', False)
      self.configSection = 'nosetests'
      self.debug = env.get('NOSE_DEBUG')
      self.debugLog = env.get('NOSE_DEBUG_LOG')
      self.exclude = None
      self.getTestCaseNamesCompat = False
      self.includeExe = env.get('NOSE_INCLUDE_EXE',
                                sys.platform in exe_allowed_platforms)
      self.ignoreFiles = (re.compile(r'^\.'),
                          re.compile(r'^_'),
                          re.compile(r'^setup\.py$')
                          )
      self.include = None
      self.loggingConfig = None
      self.logStream = sys.stderr
      self.options = NoOptions()
      self.parser = None
      self.plugins = NoPlugins()
      self.srcDirs = ('lib', 'src')
      self.runOnInit = True
      self.stopOnError = env.get('NOSE_STOP', False)
      self.stream = sys.stderr
      self.testNames = ()
      self.verbosity = int(env.get('NOSE_VERBOSE', 1))
      self.where = ()
      self.py3where = ()
      self.workingDir = None
    """
    def __init__(self, **kw):
        self.env = env = kw.pop('env', {})
        self.args = ()
        self.testMatchPat = env.get('NOSE_TESTMATCH',
                                    r'(?:^|[\b_\.%s-])[Tt]est' % os.sep)
        self.testMatch = re.compile(self.testMatchPat)
        self.addPaths = not env.get('NOSE_NOPATH', False)
        self.configSection = 'nosetests'
        self.debug = env.get('NOSE_DEBUG')
        self.debugLog = env.get('NOSE_DEBUG_LOG')
        self.exclude = None
        self.getTestCaseNamesCompat = False
        self.includeExe = env.get('NOSE_INCLUDE_EXE',
                                  sys.platform in exe_allowed_platforms)
        self.ignoreFilesDefaultStrings = [r'^\.',
                                          r'^_',
                                          r'^setup\.py$',
                                          ]
        self.ignoreFiles = list(map(re.compile, self.ignoreFilesDefaultStrings))
        self.include = None
        self.loggingConfig = None
        self.logStream = sys.stderr
        self.options = NoOptions()
        self.parser = None
        self.plugins = NoPlugins()
        self.srcDirs = ('lib', 'src')
        self.runOnInit = True
        self.stopOnError = env.get('NOSE_STOP', False)
        self.stream = sys.stderr
        self.testNames = []
        self.verbosity = int(env.get('NOSE_VERBOSE', 1))
        self.where = ()
        self.py3where = ()
        self.workingDir = os.getcwd()
        self.traverseNamespace = False
        self.firstPackageWins = False
        self.parserClass = OptionParser
        self.worker = False
        self._default = self.__dict__.copy()
        self.update(kw)
        self._orig = self.__dict__.copy()
    def __getstate__(self):
        state = self.__dict__.copy()
        del state['stream']
        del state['_orig']
        del state['_default']
        del state['env']
        del state['logStream']
        # FIXME remove plugins, have only plugin manager class
        state['plugins'] = self.plugins.__class__
        return state
    def __setstate__(self, state):
        plugincls = state.pop('plugins')
        self.update(state)
        self.worker = True
        # FIXME won't work for static plugin lists
        self.plugins = plugincls()
        self.plugins.loadPlugins()
        # needed so .can_configure gets set appropriately
        dummy_parser = self.parserClass()
        self.plugins.addOptions(dummy_parser, {})
        self.plugins.configure(self.options, self)
    def __repr__(self):
        d = self.__dict__.copy()
        # don't expose env, could include sensitive info
        d['env'] = {}
        keys = [ k for k in list(d.keys())
                 if not k.startswith('_') ]
        keys.sort()
        return "Config(%s)" % ', '.join([ '%s=%r' % (k, d[k])
                                          for k in keys ])
    __str__ = __repr__
    def _parseArgs(self, argv, cfg_files):
        def warn_sometimes(msg, name=None, filename=None):
            if (hasattr(self.plugins, 'excludedOption') and
                self.plugins.excludedOption(name)):
                msg = ("Option %r in config file %r ignored: "
                       "excluded by runtime environment" %
                       (name, filename))
                warn(msg, RuntimeWarning)
            else:
                raise ConfigError(msg)
        parser = ConfiguredDefaultsOptionParser(
            self.getParser(), self.configSection, file_error=warn_sometimes)
        return parser.parseArgsAndConfigFiles(argv[1:], cfg_files)
    def configure(self, argv=None, doc=None):
        """Configure the nose running environment. Execute configure before
        collecting tests with nose.TestCollector to enable output capture and
        other features.
        """
        env = self.env
        if argv is None:
            argv = sys.argv
        cfg_files = getattr(self, 'files', [])
        options, args = self._parseArgs(argv, cfg_files)
        # If -c --config has been specified on command line,
        # load those config files and reparse
        if getattr(options, 'files', []):
            options, args = self._parseArgs(argv, options.files)
        self.options = options
        if args:
            self.testNames = args
        if options.testNames is not None:
            self.testNames.extend(tolist(options.testNames))
        if options.py3where is not None:
            if sys.version_info >= (3,):
                options.where = options.py3where
        # `where` is an append action, so it can't have a default value
        # in the parser, or that default will always be in the list
        if not options.where:
            options.where = env.get('NOSE_WHERE', None)
        # include and exclude also
        if not options.ignoreFiles:
            options.ignoreFiles = env.get('NOSE_IGNORE_FILES', [])
        if not options.include:
            options.include = env.get('NOSE_INCLUDE', [])
        if not options.exclude:
            options.exclude = env.get('NOSE_EXCLUDE', [])
        self.addPaths = options.addPaths
        self.stopOnError = options.stopOnError
        self.verbosity = options.verbosity
        self.includeExe = options.includeExe
        self.traverseNamespace = options.traverseNamespace
        self.debug = options.debug
        self.debugLog = options.debugLog
        self.loggingConfig = options.loggingConfig
        self.firstPackageWins = options.firstPackageWins
        self.configureLogging()
        if not options.byteCompile:
            sys.dont_write_bytecode = True
        if options.where is not None:
            self.configureWhere(options.where)
        if options.testMatch:
            self.testMatch = re.compile(options.testMatch)
        if options.ignoreFiles:
            self.ignoreFiles = list(map(re.compile, tolist(options.ignoreFiles)))
            log.info("Ignoring files matching %s", options.ignoreFiles)
        else:
            log.info("Ignoring files matching %s", self.ignoreFilesDefaultStrings)
        if options.include:
            self.include = list(map(re.compile, tolist(options.include)))
            log.info("Including tests matching %s", options.include)
        if options.exclude:
            self.exclude = list(map(re.compile, tolist(options.exclude)))
            log.info("Excluding tests matching %s", options.exclude)
        # When listing plugins we don't want to run them
        if not options.showPlugins:
            self.plugins.configure(options, self)
            self.plugins.begin()
    def configureLogging(self):
        """Configure logging for nose, or optionally other packages. Any logger
        name may be set with the debug option, and that logger will be set to
        debug level and be assigned the same handler as the nose loggers, unless
        it already has a handler.
        """
        if self.loggingConfig:
            from logging.config import fileConfig
            fileConfig(self.loggingConfig)
            return
        format = logging.Formatter('%(name)s: %(levelname)s: %(message)s')
        if self.debugLog:
            handler = logging.FileHandler(self.debugLog)
        else:
            handler = logging.StreamHandler(self.logStream)
        handler.setFormatter(format)
        logger = logging.getLogger('nose')
        logger.propagate = 0
        # only add our default handler if there isn't already one there
        # this avoids annoying duplicate log messages.
        found = False
        if self.debugLog:
            debugLogAbsPath = os.path.abspath(self.debugLog)
            for h in logger.handlers:
                if type(h) == logging.FileHandler and \
                        h.baseFilename == debugLogAbsPath:
                    found = True
        else:
            for h in logger.handlers:
                if type(h) == logging.StreamHandler and \
                        h.stream == self.logStream:
                    found = True
        if not found:
            logger.addHandler(handler)
        # default level
        lvl = logging.WARNING
        if self.verbosity >= 5:
            lvl = 0
        elif self.verbosity >= 4:
            lvl = logging.DEBUG
        elif self.verbosity >= 3:
            lvl = logging.INFO
        logger.setLevel(lvl)
        # individual overrides
        if self.debug:
            # no blanks
            debug_loggers = [ name for name in self.debug.split(',')
                              if name ]
            for logger_name in debug_loggers:
                l = logging.getLogger(logger_name)
                l.setLevel(logging.DEBUG)
                if not l.handlers and not logger_name.startswith('nose'):
                    l.addHandler(handler)
    def configureWhere(self, where):
        """Configure the working directory or directories for the test run.
        """
        from nose.importer import add_path
        self.workingDir = None
        where = tolist(where)
        warned = False
        for path in where:
            if not self.workingDir:
                abs_path = absdir(path)
                if abs_path is None:
                    raise ValueError("Working directory %s not found, or "
                                     "not a directory" % path)
                log.info("Set working dir to %s", abs_path)
                self.workingDir = abs_path
                if self.addPaths and \
                       os.path.exists(os.path.join(abs_path, '__init__.py')):
                    log.info("Working directory %s is a package; "
                             "adding to sys.path" % abs_path)
                    add_path(abs_path)
                continue
            if not warned:
                warn("Use of multiple -w arguments is deprecated and "
                     "support may be removed in a future release. You can "
                     "get the same behavior by passing directories without "
                     "the -w argument on the command line, or by using the "
                     "--tests argument in a configuration file.",
                     DeprecationWarning)
                warned = True
            self.testNames.append(path)
    def default(self):
        """Reset all config values to defaults.
        """
        self.__dict__.update(self._default)
    def getParser(self, doc=None):
        """Get the command line option parser.
        """
        if self.parser:
            return self.parser
        env = self.env
        parser = self.parserClass(doc)
        parser.add_option(
            "-V","--version", action="store_true",
            dest="version", default=False,
            help="Output nose version and exit")
        parser.add_option(
            "-p", "--plugins", action="store_true",
            dest="showPlugins", default=False,
            help="Output list of available plugins and exit. Combine with "
            "higher verbosity for greater detail")
        parser.add_option(
            "-v", "--verbose",
            action="count", dest="verbosity",
            default=self.verbosity,
            help="Be more verbose. [NOSE_VERBOSE]")
        parser.add_option(
            "--verbosity", action="store", dest="verbosity",
            metavar='VERBOSITY',
            type="int", help="Set verbosity; --verbosity=2 is "
            "the same as -v")
        parser.add_option(
            "-q", "--quiet", action="store_const", const=0, dest="verbosity",
            help="Be less verbose")
        parser.add_option(
            "-c", "--config", action="append", dest="files",
            metavar="FILES",
            help="Load configuration from config file(s). May be specified "
            "multiple times; in that case, all config files will be "
            "loaded and combined")
        parser.add_option(
            "-w", "--where", action="append", dest="where",
            metavar="WHERE",
            help="Look for tests in this directory. "
            "May be specified multiple times. The first directory passed "
            "will be used as the working directory, in place of the current "
            "working directory, which is the default. Others will be added "
            "to the list of tests to execute. [NOSE_WHERE]"
            )
        parser.add_option(
            "--py3where", action="append", dest="py3where",
            metavar="PY3WHERE",
            help="Look for tests in this directory under Python 3.x. "
            "Functions the same as 'where', but only applies if running under "
            "Python 3.x or above.  Note that, if present under 3.x, this "
            "option completely replaces any directories specified with "
            "'where', so the 'where' option becomes ineffective. "
            "[NOSE_PY3WHERE]"
            )
        parser.add_option(
            "-m", "--match", "--testmatch", action="store",
            dest="testMatch", metavar="REGEX",
            help="Files, directories, function names, and class names "
            "that match this regular expression are considered tests.  "
            "Default: %s [NOSE_TESTMATCH]" % self.testMatchPat,
            default=self.testMatchPat)
        parser.add_option(
            "--tests", action="store", dest="testNames", default=None,
            metavar='NAMES',
            help="Run these tests (comma-separated list). This argument is "
            "useful mainly from configuration files; on the command line, "
            "just pass the tests to run as additional arguments with no "
            "switch.")
        parser.add_option(
            "-l", "--debug", action="store",
            dest="debug", default=self.debug,
            help="Activate debug logging for one or more systems. "
            "Available debug loggers: nose, nose.importer, "
            "nose.inspector, nose.plugins, nose.result and "
            "nose.selector. Separate multiple names with a comma.")
        parser.add_option(
            "--debug-log", dest="debugLog", action="store",
            default=self.debugLog, metavar="FILE",
            help="Log debug messages to this file "
            "(default: sys.stderr)")
        parser.add_option(
            "--logging-config", "--log-config",
            dest="loggingConfig", action="store",
            default=self.loggingConfig, metavar="FILE",
            help="Load logging config from this file -- bypasses all other"
            " logging config settings.")
        parser.add_option(
            "-I", "--ignore-files", action="append", dest="ignoreFiles",
            metavar="REGEX",
            help="Completely ignore any file that matches this regular "
            "expression. Takes precedence over any other settings or "
            "plugins. "
            "Specifying this option will replace the default setting. "
            "Specify this option multiple times "
            "to add more regular expressions [NOSE_IGNORE_FILES]")
        parser.add_option(
            "-e", "--exclude", action="append", dest="exclude",
            metavar="REGEX",
            help="Don't run tests that match regular "
            "expression [NOSE_EXCLUDE]")
        parser.add_option(
            "-i", "--include", action="append", dest="include",
            metavar="REGEX",
            help="This regular expression will be applied to files, "
            "directories, function names, and class names for a chance "
            "to include additional tests that do not match TESTMATCH.  "
            "Specify this option multiple times "
            "to add more regular expressions [NOSE_INCLUDE]")
        parser.add_option(
            "-x", "--stop", action="store_true", dest="stopOnError",
            default=self.stopOnError,
            help="Stop running tests after the first error or failure")
        parser.add_option(
            "-P", "--no-path-adjustment", action="store_false",
            dest="addPaths",
            default=self.addPaths,
            help="Don't make any changes to sys.path when "
            "loading tests [NOSE_NOPATH]")
        parser.add_option(
            "--exe", action="store_true", dest="includeExe",
            default=self.includeExe,
            help="Look for tests in python modules that are "
            "executable. Normal behavior is to exclude executable "
            "modules, since they may not be import-safe "
            "[NOSE_INCLUDE_EXE]")
        parser.add_option(
            "--noexe", action="store_false", dest="includeExe",
            help="DO NOT look for tests in python modules that are "
            "executable. (The default on the windows platform is to "
            "do so.)")
        parser.add_option(
            "--traverse-namespace", action="store_true",
            default=self.traverseNamespace, dest="traverseNamespace",
            help="Traverse through all path entries of a namespace package")
        parser.add_option(
            "--first-package-wins", "--first-pkg-wins", "--1st-pkg-wins",
            action="store_true", default=False, dest="firstPackageWins",
            help="nose's importer will normally evict a package from sys."
            "modules if it sees a package with the same name in a different "
            "location. Set this option to disable that behavior.")
        parser.add_option(
            "--no-byte-compile",
            action="store_false", default=True, dest="byteCompile",
            help="Prevent nose from byte-compiling the source into .pyc files "
            "while nose is scanning for and running tests.")
        self.plugins.loadPlugins()
        self.pluginOpts(parser)
        self.parser = parser
        return parser
    def help(self, doc=None):
        """Return the generated help message
        """
        return self.getParser(doc).format_help()
    def pluginOpts(self, parser):
        self.plugins.addOptions(parser, self.env)
    def reset(self):
        self.__dict__.update(self._orig)
    def todict(self):
        return self.__dict__.copy()
    def update(self, d):
        self.__dict__.update(d)
class NoOptions(object):
    """Options container that returns None for all options.
    """
    def __getstate__(self):
        return {}
    def __setstate__(self, state):
        pass
    def __getnewargs__(self):
        return ()
    def __bool__(self):
        return False
def user_config_files():
    """Return path to any existing user config files
    """
    return list(filter(os.path.exists,
                  list(map(os.path.expanduser, config_files))))
def all_config_files():
    """Return path to any existing user config files, plus any setup.cfg
    in the current working directory.
    """
    user = user_config_files()
    if os.path.exists('setup.cfg'):
        return user + ['setup.cfg']
    return user
# used when parsing config files
def flag(val):
    """Does the value look like an on/off flag?"""
    if val == 1:
        return True
    elif val == 0:
        return False
    val = str(val)
    if len(val) > 5:
        return False
    return val.upper() in ('1', '0', 'F', 'T', 'TRUE', 'FALSE', 'ON', 'OFF')
def _bool(val):
    return str(val).upper() in ('1', 'T', 'TRUE', 'ON')
 | 
	gpl-3.0 | -4,801,810,994,073,172,000 | 37.245083 | 82 | 0.564043 | false | 
| 
	marcel-dancak/QGIS | 
	python/plugins/processing/algs/qgis/VectorLayerHistogram.py | 
	12 | 
	3471 | 
	# -*- coding: utf-8 -*-
"""
***************************************************************************
    VectorLayerHistogram.py
    ---------------------
    Date                 : January 2013
    Copyright            : (C) 2013 by Victor Olaya
    Email                : volayaf at gmail dot com
***************************************************************************
*                                                                         *
*   This program is free software; you can redistribute it and/or modify  *
*   it under the terms of the GNU General Public License as published by  *
*   the Free Software Foundation; either version 2 of the License, or     *
*   (at your option) any later version.                                   *
*                                                                         *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import plotly as plt
import plotly.graph_objs as go
from qgis.core import (QgsProcessingException,
                       QgsProcessingParameterFeatureSource,
                       QgsProcessingParameterField,
                       QgsProcessingParameterNumber,
                       QgsProcessingParameterFileDestination)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from processing.tools import vector
class VectorLayerHistogram(QgisAlgorithm):
    INPUT = 'INPUT'
    OUTPUT = 'OUTPUT'
    FIELD = 'FIELD'
    BINS = 'BINS'
    def group(self):
        return self.tr('Graphics')
    def groupId(self):
        return 'graphics'
    def __init__(self):
        super().__init__()
    def initAlgorithm(self, config=None):
        self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
                                                              self.tr('Input layer')))
        self.addParameter(QgsProcessingParameterField(self.FIELD,
                                                      self.tr('Attribute'), parentLayerParameterName=self.INPUT,
                                                      type=QgsProcessingParameterField.Numeric))
        self.addParameter(QgsProcessingParameterNumber(self.BINS,
                                                       self.tr('number of bins'), minValue=2, defaultValue=10))
        self.addParameter(QgsProcessingParameterFileDestination(self.OUTPUT, self.tr('Histogram'), self.tr('HTML files (*.html)')))
    def name(self):
        return 'vectorlayerhistogram'
    def displayName(self):
        return self.tr('Vector layer histogram')
    def processAlgorithm(self, parameters, context, feedback):
        source = self.parameterAsSource(parameters, self.INPUT, context)
        if source is None:
            raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
        fieldname = self.parameterAsString(parameters, self.FIELD, context)
        bins = self.parameterAsInt(parameters, self.BINS, context)
        output = self.parameterAsFileOutput(parameters, self.OUTPUT, context)
        values = vector.values(source, fieldname)
        data = [go.Histogram(x=values[fieldname],
                             nbinsx=bins)]
        plt.offline.plot(data, filename=output, auto_open=False)
        return {self.OUTPUT: output}
 | 
	gpl-2.0 | 1,966,366,486,805,850,400 | 38 | 131 | 0.539326 | false | 
| 
	truthbk/dd-agent | 
	resources/__init__.py | 
	34 | 
	10025 | 
	# stdlib
from collections import namedtuple
from datetime import datetime, timedelta
import time
from types import DictType, ListType, StringTypes
class agg(object):
    @staticmethod
    def avg(args):
        if len(args) > 0:
            return sum(args)/len(args)
        else:
            return 0
    @staticmethod
    def append(args):
        l = []
        for arg in args:
            if isinstance(arg, StringTypes):
                l.extend(arg.split(","))
            else:
                l.append(str(arg))
        return ",".join(list(set(l)))
MetricDescriptor = namedtuple('MetricDescriptor',['version','name','type','aggregator',
        'temporal_aggregator','server_aggregator','server_temporal_aggregator',
        'group_on','temporal_group_on'])
SnapshotDesc = namedtuple('SnapshotDesc',['version','fields'])
def SnapshotField(name,_type,aggregator=sum,temporal_aggregator=agg.avg,
                  server_aggregator=None,server_temporal_aggregator=None,
                  group_on = False, temporal_group_on = False):
    if server_aggregator is None:
        if _type == 'str':
            server_aggregator = agg.append
        else:
            server_aggregator = sum
    if server_temporal_aggregator is None:
        if _type == 'str':
            server_temporal_aggregator = agg.append
        else:
            server_temporal_aggregator = agg.avg
    return MetricDescriptor(2,name,_type,aggregator,temporal_aggregator,
                server_aggregator,server_temporal_aggregator,
                group_on = group_on, temporal_group_on = temporal_group_on)
def SnapshotDescriptor(version, *fields):
    return SnapshotDesc(version, fields)
class ResourcePlugin(object):
    def __init__(self, logger, agentConfig):
        self.log = logger
        self.config = agentConfig
        self._descriptor = None
        self._snapshots = []  # stack with non (temporarly) aggregated snapshots
        self._last_snapshots = None  # last aggregated snapshots
        self._current_snapshot = None  # snapshot being built
        self._current_ts = None
        self._format_described = False  # Do we need to send format description to the intake ?
        self._descriptor = self.describe_snapshot()
    @classmethod
    def get_group_ts(cls, ts):
        """find the aggregation group this timestamp belongs to
            taking into account the flush interval"""
        m = ((ts.minute/cls.FLUSH_INTERVAL) + 1) * cls.FLUSH_INTERVAL
        return ts.replace(microsecond=0, second=0, minute=0) + timedelta(minutes=m)
    @staticmethod
    def _group_by(keys, lines):
        if keys is None:
            return lines
        if not isinstance(keys, ListType):
            keys = [keys]
        group = {}
        key = keys[0]
        for line in lines:
            k = key(line)
            if k in group:
                group[k].append(line)
            else:
                group[k] = [line]
        return group
    def _aggregate_lines(self, lines, temporal = False):
        if len(lines) == 1:
            return lines[0]
        result = []
        i = 0
        for metric in self._descriptor.fields:
            if temporal:
                agg_fun = metric.temporal_aggregator
            else:
                agg_fun = metric.aggregator
            if agg_fun is None:
                result.append(lines[0][i])
            else:
                arglist = []
                for line in lines:
                    arglist.append(line[i])
                try:
                    result.append(agg_fun(arglist))
                except Exception, e:
                    self.log.error("Error aggregating metric: %s" % metric.name)
                    self.log.error("Error while applying %s on %s" % (agg_fun, str(arglist)))
                    raise e
            i = i + 1
        return result
    def _aggregate(self, lines, group_by = None, filter_by = None, temporal = False):
        # group the current snapshot if needed
        if group_by is not None:
            groups = self._group_by(group_by, lines)
        else:
            groups = {'foo': lines}
        # Aggregate each terminal group
        dlist = []
        def _aggregate_groups(groups):
            for group in groups:
                rows = groups[group]
                if isinstance(rows, DictType):
                    _aggregate_groups(rows)
                else:
                    dlist.append(self._aggregate_lines(rows, temporal = temporal))
        _aggregate_groups(groups)
        # Now filter dlist and keep only what is interesting
        if filter_by is None:
            dlist2 = dlist
        else:
            dlist2 = filter(filter_by,dlist)
        return dlist2
    def _flush_snapshots(self, snapshot_group = None, group_by = None, filter_by = None,
                         temporal = True):
        # Aggregate (temporally) all snaphots into last_snapshots
        new_snap = (int(time.mktime(snapshot_group.timetuple())),
                    self._aggregate(self._snapshots,
                                    group_by = group_by,
                                    filter_by = filter_by,
                                    temporal = temporal))
        if self._last_snapshots is None:
            self._last_snapshots = [new_snap]
        else:
            self._last_snapshots.append(new_snap)
        self._snapshots = []
    def _check_current_snapshot(self,now):
        """Check if the current snapshot is complete"""
        if self._current_ts is not None:
            g1 = self.get_group_ts(self._current_ts)
            g2 = self.get_group_ts(now)
            if g1 != g2:
                self.log.debug("Snapshot complete at %s" % g1)
                self.end_snapshot(self._current_ts)
                self.flush_snapshots(g1)
        if self._current_snapshot is None:
            self.start_snapshot()
    def _flush_if_needed(self,now):
        """Check the older snapshot in the stack, and flush
            them all if needed"""
        if self._current_ts is not None:
            g1 = self.get_group_ts(self._current_ts)
            g2 = self.get_group_ts(now)
            self.log.debug("Resources: (%s) group now: %s, group ts: %s" % (self.RESOURCE_KEY,g2,g1))
            if g1 != g2:  # It's time to flush
                self.log.debug("Resources: Flushing %s snapshots" % self.RESOURCE_KEY)
                self.flush_snapshots(g2)
                self._current_ts = None
    #--------------------------------- public API ------------------------------------------
    def get_format_version(self):
        return self._descriptor.version
    def describe_format_if_needed(self):
        if not self._format_described:
            self._format_described = True
            ret = []
            for field in self._descriptor.fields:
                f_agg_name = f_tagg_name = None
                f_serv_agg_name = f_serv_tagg_name = None
                if field.aggregator is not None:
                    f_agg_name = field.aggregator.__name__
                if field.temporal_aggregator is not None:
                    f_tagg_name = field.temporal_aggregator.__name__
                if field.server_aggregator is not None:
                    f_serv_agg_name = field.server_aggregator.__name__
                if field.server_temporal_aggregator is not None:
                    f_serv_tagg_name = field.server_temporal_aggregator.__name__
                ret.append([
                    field.version,
                    field.name,
                    field.type,
                    f_agg_name,
                    f_tagg_name,
                    f_serv_agg_name,
                    f_serv_tagg_name,
                    field.group_on,
                    field.temporal_group_on,
                ])
            return ret
    def describe_snapshot(self):
        """Register the snapshot details for this plugin:
           - What a line is made of
           - How to aggregate it
            Must return a SnapshotDescriptor
        """
        raise Exception("To be implemented in plugin")
    def start_snapshot(self):
        """Start a new snapshot for any timestamp"""
        self._current_snapshot = []
    def add_to_snapshot(self,metric_line,ts = None):
        """2 modes:
            - raw snapshots: do not provide ts
            - incremental snapshots: provide ts, a new snapshot group
              will be created if needed"""
        if ts is None:
            self._current_snapshot.append(metric_line)
        else:
            self._check_current_snapshot(ts)
            self._current_ts = ts
            self._current_snapshot.append(metric_line)
    def end_snapshot(self,ts=None,group_by=None,filter_by=None):
        """End the current snapshot:
            group and aggregate as configured
            ts: a datetime object
        """
        if ts is None:
            now = datetime.now()
        else:
            now = ts
        # We flush before, by checking if the new snapshot
        # is in the same group as the one before and if
        # the flush interval is correct
        self._flush_if_needed(now)
        if self._current_ts is None:
            self._current_ts = now
        if group_by is not None or filter_by is not None:
            self._snapshots.extend(
                self._aggregate(self._current_snapshot,
                                group_by = group_by,
                                filter_by = filter_by,
                                temporal = False))
        else:
            self._snapshots.extend(self._current_snapshot)
        self._current_snapshot = None
    def flush_snapshots(self,snapshot_group):
        raise Exception("To be implemented (by calling _flush_snapshot) in a plugin")
    def check(self):
        raise Exception("To be implemented in a plugin")
    def pop_snapshots(self):
        ret = self._last_snapshots
        self._last_snapshots = None
        return ret
 | 
	bsd-3-clause | 7,545,313,935,478,577,000 | 32.416667 | 101 | 0.538354 | false | 
| 
	moto-timo/ironpython3 | 
	Tests/modules/io_related/cStringIO_test.py | 
	3 | 
	8995 | 
	#####################################################################################
#
#  Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the  Apache License, Version 2.0, please send an email to
# [email protected]. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
##
## Test the cStringIO module
##
from iptest.assert_util import *
import cStringIO
text = "Line 1\nLine 2\nLine 3\nLine 4\nLine 5"
# close
def call_close(i):
    AreEqual(i.closed, False)
    i.close()
    AreEqual(i.closed, True)
    i.close()
    AreEqual(i.closed, True)
    i.close()
    AreEqual(i.closed, True)
    
def call_isatty(i):
    AreEqual(i.isatty(), False)
# read
def call_read(i):
    AreEqual(i.read(), text)
    AreEqual(i.read(), "")
    AreEqual(i.read(), "")
    i.close()
    i.close()
    AssertError(ValueError, i.read)
    
   
# readline
def call_readline(i):
    AreEqual(i.readline(), "Line 1\n")
    AreEqual(i.readline(), "Line 2\n")
    AreEqual(i.readline(), "Line 3\n")
    AreEqual(i.readline(), "Line 4\n")
    AreEqual(i.readline(), "Line 5")
    AreEqual(i.readline(), "")
    i.close()
    AssertError(ValueError, i.readline)
def call_readline_n(i):
    AreEqual(i.readline(50), "Line 1\n")
    AreEqual(i.readline(0), "")
    AreEqual(i.readline(1), "L")
    AreEqual(i.readline(9), "ine 2\n")
    AreEqual(i.readline(50), "Line 3\n")
    AreEqual(i.readline(6), "Line 4")
    AreEqual(i.readline(50), "\n")
    AreEqual(i.readline(50), "Line 5")
    i.close()
    AssertError(ValueError, i.readline)    
# readlines
def call_readlines(i):
    AreEqual(i.readlines(), ["Line 1\n", "Line 2\n", "Line 3\n", "Line 4\n", "Line 5"])
    AreEqual(i.readlines(), [])
    i.close()
    AssertError(ValueError, i.readlines)
def call_readlines_n(i):
    AreEqual(i.readlines(10), ["Line 1\n", "Line 2\n"])
    AreEqual(i.readlines(50), ["Line 3\n", "Line 4\n", "Line 5"])
    AreEqual(i.readlines(50), [])
    i.close()
    AssertError(ValueError, i.readlines)
    
# getvalue
def call_getvalue(i):
    AreEqual(i.getvalue(), text)
    AreEqual(i.read(6), "Line 1")
    AreEqual(i.getvalue(True), "Line 1")
    AreEqual(i.getvalue(), text)
    i.close()
    AssertError(ValueError, i.getvalue)
    
    
# __iter__, next
def call_next(i):
    AreEqual(i.__iter__(), i)
    AreEqual(i.next(), "Line 1\n")
    AreEqual(i.next(), "Line 2\n")
    AreEqual([l for l in i], ["Line 3\n", "Line 4\n", "Line 5"])
    i.close()
    AssertError(ValueError, i.readlines)
    
    
# read, readline, reset
def call_reset(i):
    AreEqual(i.read(0), "")
    AreEqual(i.read(4), "Line")
    AreEqual(i.readline(), " 1\n")
    i.reset()
    AreEqual(i.read(4), "Line")
    AreEqual(i.readline(), " 1\n")
    i.reset()
    AreEqual(i.read(37),text)
    i.reset()
    AreEqual(i.read(38),text)
    i.close()
    AssertError(ValueError, i.read, 5)
    AssertError(ValueError, i.readline)
    
    
    
    
# seek, tell, read
def call_seek_tell(i):
    AreEqual(i.read(4), "Line")
    AreEqual(i.tell(), 4)
    i.seek(10)
    AreEqual(i.tell(), 10)
    AreEqual(i.read(3), "e 2")
    i.seek(15, 0)
    AreEqual(i.tell(), 15)
    AreEqual(i.read(5), "ine 3")
    i.seek(3, 1)
    AreEqual(i.read(4), "ne 4")
    i.seek(-5, 2)
    AreEqual(i.tell(), len(text) - 5)
    AreEqual(i.read(), "ine 5")
    i.seek(1000)
    AreEqual(i.tell(), 1000)
    AreEqual(i.read(), "")
    i.seek(2000, 0)
    AreEqual(i.tell(), 2000)
    AreEqual(i.read(), "")
    i.seek(400, 1)
    AreEqual(i.tell(), 2400)
    AreEqual(i.read(), "")
    i.seek(100, 2)
    AreEqual(i.tell(), len(text) + 100)
    AreEqual(i.read(), "")
    i.close()
    AssertError(ValueError, i.tell)
    AssertError(ValueError, i.seek, 0)
    AssertError(ValueError, i.seek, 0, 2)
    
# truncate
def call_truncate(i):
    AreEqual(i.read(6), "Line 1")
    i.truncate(20)
    AreEqual(i.tell(), 20)
    AreEqual(i.getvalue(), "Line 1\nLine 2\nLine 3")
    i.truncate(30)
    AreEqual(i.tell(), 20)
    AreEqual(i.getvalue(), "Line 1\nLine 2\nLine 3")
    i.reset()
    AreEqual(i.tell(), 0)
    AreEqual(i.read(6), "Line 1")
    i.truncate()
    AreEqual(i.getvalue(), "Line 1")
    i.close()
    AssertError(ValueError, i.truncate)
    AssertError(ValueError, i.truncate, 10)
    
    
    
   
# write
def call_write(o):
    AreEqual(o.getvalue(), text)
    o.write("Data")
    o.write(buffer(' 1'))
    AssertError(TypeError, o.write, None)
    AreEqual(o.read(7), "\nLine 2")
    AreEqual(o.getvalue(), "Data 1\nLine 2\nLine 3\nLine 4\nLine 5")
    o.close()
    AssertError(ValueError, o.write, "Hello")
# writelines
def call_writelines(o):
    AreEqual(o.getvalue(), text)
    o.writelines(["Data 1", "Data 2"])
    AreEqual(o.read(8), "2\nLine 3")
    AreEqual(o.getvalue(), "Data 1Data 22\nLine 3\nLine 4\nLine 5")
    AssertError(TypeError, o.writelines, [buffer('foo')])
    AssertError(TypeError, o.writelines, [None])
    o.close()
    AssertError(ValueError, o.writelines, "Hello")
    AssertError(ValueError, o.writelines, ['foo', buffer('foo')])
    AssertError(TypeError, o.writelines, [buffer('foo')])
# softspace
def call_softspace(o):
    o.write("Hello")
    o.write("Hi")
    o.softspace = 1
    AreEqual(o.softspace, 1)
    AreEqual(o.getvalue(), "HelloHiLine 2\nLine 3\nLine 4\nLine 5")
# flush
def call_flush(i):
    i.flush()
    AreEqual(i,i)
def init_StringI():
    return cStringIO.StringIO(text)
def init_StringO():
    o = cStringIO.StringIO()
    o.write(text)
    o.reset()
    return o
def init_emptyStringI():
    return cStringIO.StringIO("")
    
def test_empty():
    i = init_emptyStringI()
    
    # test closed
    AreEqual(i.closed,False)
    i.close()
    AreEqual(i.closed,True)
    
    
    #test read
    i = init_emptyStringI()
    AreEqual(i.read(),"")
    i.close()
    AssertError(ValueError, i.read)
    i.close()
    AssertError(ValueError, i.read, 2)
    
    #test readline
    i = init_emptyStringI()
    AreEqual(i.readline(),"")
    i.close()
    AssertError(ValueError, i.readline)
    
    i = init_emptyStringI()
    AreEqual(i.readline(0),"")
    i.close()
    AssertError(ValueError, i.readline)
    
    #test readlines
    i = init_emptyStringI()
    AreEqual(i.readlines(),[])
    
    i = init_emptyStringI()
    AreEqual(i.readlines(0),[])
    
    #test getvalue
    i = init_emptyStringI()
    AreEqual(i.getvalue(),"")
    AreEqual(i.getvalue(True),"")
    i.close()
    AssertError(ValueError, i.getvalue)
    
    #test iter
    i = init_emptyStringI()
    AreEqual(i.__iter__(), i)
    
    #test reset
    i = init_emptyStringI()
    AreEqual(i.read(0), "")
    i.reset()
    AreEqual(i.read(1), "")
    i.reset()
    AreEqual(i.readline(), "")
    i.close()
    AssertError(ValueError, i.read, 2)
    AssertError(ValueError, i.readline)
    
    #test seek,tell,read
    i = init_emptyStringI()
    AreEqual(i.read(0), "")
    AreEqual(i.tell(), 0)
    AreEqual(i.read(1), "")
    AreEqual(i.tell(), 0)
    i.seek(2)
    AreEqual(i.tell(), 2)
    AreEqual(i.read(),"")
    i.close()
    AssertError(ValueError, i.tell)
    AssertError(ValueError, i.seek, 0)
    AssertError(ValueError, i.seek, 0, 2)
    
    #test truncate
    i = init_emptyStringI()
    i.truncate(0)
    AreEqual(i.tell(), 0)
    i.truncate(1)
    AreEqual(i.tell(), 0)
    i.close()
    AssertError(ValueError, i.truncate)
    
def test_cp8567():
    for x in ["", "1", "12", "12345", 
                #u"123", #CodePlex 19220
                ]:
        for i in [5, 6, 7, 2**8, 100, 2**16-1, 2**16, 2**16, 2**31-2, 2**31-1]:
            cio = cStringIO.StringIO(x)
            cio.truncate(i)
            AreEqual(cio.tell(), len(x))
            cio.close()
    
    
    
def test_i_o():
    for t in [  call_close,
                call_isatty,
                call_read,
                call_readline,
                call_readline_n,
                call_readlines,
                call_readlines_n,
                call_getvalue,
                call_next,
                call_reset,
                call_seek_tell,
                call_truncate,
                call_flush ]:
        i = init_StringI()
        t(i)
        
        o= init_StringO()
        t(o)
def test_o():
    for t in [  call_write,
                call_writelines,
                call_softspace ]:
        o = init_StringO()
        t(o)
def test_cp22017():
    m = cStringIO.StringIO()
    m.seek(2)
    m.write("hello!")
    AreEqual(m.getvalue(), '\x00\x00hello!')
    m.seek(2)
    AreEqual(m.getvalue(), '\x00\x00hello!')
run_test(__name__)
 | 
	apache-2.0 | -1,809,442,349,359,093,000 | 23.916898 | 96 | 0.567204 | false | 
| 
	Lujeni/ansible | 
	lib/ansible/modules/cloud/amazon/cloudformation_info.py | 
	2 | 
	13010 | 
	#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
                    'status': ['preview'],
                    'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudformation_info
short_description: Obtain information about an AWS CloudFormation stack
description:
  - Gets information about an AWS CloudFormation stack.
  - This module was called C(cloudformation_facts) before Ansible 2.9, returning C(ansible_facts).
    Note that the M(cloudformation_info) module no longer returns C(ansible_facts)!
requirements:
  - boto3 >= 1.0.0
  - python >= 2.6
version_added: "2.2"
author: Justin Menga (@jmenga)
options:
    stack_name:
        description:
          - The name or id of the CloudFormation stack. Gathers information on all stacks by default.
        type: str
    all_facts:
        description:
            - Get all stack information for the stack.
        type: bool
        default: false
    stack_events:
        description:
            - Get stack events for the stack.
        type: bool
        default: false
    stack_template:
        description:
            - Get stack template body for the stack.
        type: bool
        default: false
    stack_resources:
        description:
            - Get stack resources for the stack.
        type: bool
        default: false
    stack_policy:
        description:
            - Get stack policy for the stack.
        type: bool
        default: false
extends_documentation_fragment:
    - aws
    - ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Get summary information about a stack
- cloudformation_info:
    stack_name: my-cloudformation-stack
  register: output
- debug:
    msg: "{{ output['cloudformation']['my-cloudformation-stack'] }}"
# When the module is called as cloudformation_facts, return values are published
# in ansible_facts['cloudformation'][<stack_name>] and can be used as follows.
# Note that this is deprecated and will stop working in Ansible 2.13.
- cloudformation_facts:
    stack_name: my-cloudformation-stack
- debug:
    msg: "{{ ansible_facts['cloudformation']['my-cloudformation-stack'] }}"
# Get stack outputs, when you have the stack name available as a fact
- set_fact:
    stack_name: my-awesome-stack
- cloudformation_info:
    stack_name: "{{ stack_name }}"
  register: my_stack
- debug:
    msg: "{{ my_stack.cloudformation[stack_name].stack_outputs }}"
# Get all stack information about a stack
- cloudformation_info:
    stack_name: my-cloudformation-stack
    all_facts: true
# Get stack resource and stack policy information about a stack
- cloudformation_info:
    stack_name: my-cloudformation-stack
    stack_resources: true
    stack_policy: true
# Fail if the stack doesn't exist
- name: try to get facts about a stack but fail if it doesn't exist
  cloudformation_info:
    stack_name: nonexistent-stack
    all_facts: yes
  failed_when: cloudformation['nonexistent-stack'] is undefined
'''
RETURN = '''
stack_description:
    description: Summary facts about the stack
    returned: if the stack exists
    type: dict
stack_outputs:
    description: Dictionary of stack outputs keyed by the value of each output 'OutputKey' parameter and corresponding value of each
                 output 'OutputValue' parameter
    returned: if the stack exists
    type: dict
    sample:
      ApplicationDatabaseName: dazvlpr01xj55a.ap-southeast-2.rds.amazonaws.com
stack_parameters:
    description: Dictionary of stack parameters keyed by the value of each parameter 'ParameterKey' parameter and corresponding value of
                 each parameter 'ParameterValue' parameter
    returned: if the stack exists
    type: dict
    sample:
      DatabaseEngine: mysql
      DatabasePassword: "***"
stack_events:
    description: All stack events for the stack
    returned: only if all_facts or stack_events is true and the stack exists
    type: list
stack_policy:
    description: Describes the stack policy for the stack
    returned: only if all_facts or stack_policy is true and the stack exists
    type: dict
stack_template:
    description: Describes the stack template for the stack
    returned: only if all_facts or stack_template is true and the stack exists
    type: dict
stack_resource_list:
    description: Describes stack resources for the stack
    returned: only if all_facts or stack_resourses is true and the stack exists
    type: list
stack_resources:
    description: Dictionary of stack resources keyed by the value of each resource 'LogicalResourceId' parameter and corresponding value of each
                 resource 'PhysicalResourceId' parameter
    returned: only if all_facts or stack_resourses is true and the stack exists
    type: dict
    sample:
      AutoScalingGroup: "dev-someapp-AutoscalingGroup-1SKEXXBCAN0S7"
      AutoScalingSecurityGroup: "sg-abcd1234"
      ApplicationDatabase: "dazvlpr01xj55a"
'''
import json
import traceback
from functools import partial
try:
    import boto3
    import botocore
    HAS_BOTO3 = True
except ImportError:
    HAS_BOTO3 = False
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (get_aws_connection_info, ec2_argument_spec, boto3_conn,
                                      camel_dict_to_snake_dict, AWSRetry, boto3_tag_list_to_ansible_dict)
class CloudFormationServiceManager:
    """Handles CloudFormation Services"""
    def __init__(self, module):
        self.module = module
        try:
            region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
            self.client = boto3_conn(module, conn_type='client',
                                     resource='cloudformation', region=region,
                                     endpoint=ec2_url, **aws_connect_kwargs)
            backoff_wrapper = AWSRetry.jittered_backoff(retries=10, delay=3, max_delay=30)
            self.client.describe_stacks = backoff_wrapper(self.client.describe_stacks)
            self.client.list_stack_resources = backoff_wrapper(self.client.list_stack_resources)
            self.client.describe_stack_events = backoff_wrapper(self.client.describe_stack_events)
            self.client.get_stack_policy = backoff_wrapper(self.client.get_stack_policy)
            self.client.get_template = backoff_wrapper(self.client.get_template)
        except botocore.exceptions.NoRegionError:
            self.module.fail_json(msg="Region must be specified as a parameter, in AWS_DEFAULT_REGION environment variable or in boto configuration file")
        except Exception as e:
            self.module.fail_json(msg="Can't establish connection - " + str(e), exception=traceback.format_exc())
    def describe_stacks(self, stack_name=None):
        try:
            kwargs = {'StackName': stack_name} if stack_name else {}
            func = partial(self.client.describe_stacks, **kwargs)
            response = self.paginated_response(func, 'Stacks')
            if response is not None:
                return response
            self.module.fail_json(msg="Error describing stack(s) - an empty response was returned")
        except Exception as e:
            if 'does not exist' in e.response['Error']['Message']:
                # missing stack, don't bail.
                return {}
            self.module.fail_json(msg="Error describing stack - " + to_native(e), exception=traceback.format_exc())
    def list_stack_resources(self, stack_name):
        try:
            func = partial(self.client.list_stack_resources, StackName=stack_name)
            return self.paginated_response(func, 'StackResourceSummaries')
        except Exception as e:
            self.module.fail_json(msg="Error listing stack resources - " + str(e), exception=traceback.format_exc())
    def describe_stack_events(self, stack_name):
        try:
            func = partial(self.client.describe_stack_events, StackName=stack_name)
            return self.paginated_response(func, 'StackEvents')
        except Exception as e:
            self.module.fail_json(msg="Error describing stack events - " + str(e), exception=traceback.format_exc())
    def get_stack_policy(self, stack_name):
        try:
            response = self.client.get_stack_policy(StackName=stack_name)
            stack_policy = response.get('StackPolicyBody')
            if stack_policy:
                return json.loads(stack_policy)
            return dict()
        except Exception as e:
            self.module.fail_json(msg="Error getting stack policy - " + str(e), exception=traceback.format_exc())
    def get_template(self, stack_name):
        try:
            response = self.client.get_template(StackName=stack_name)
            return response.get('TemplateBody')
        except Exception as e:
            self.module.fail_json(msg="Error getting stack template - " + str(e), exception=traceback.format_exc())
    def paginated_response(self, func, result_key, next_token=None):
        '''
        Returns expanded response for paginated operations.
        The 'result_key' is used to define the concatenated results that are combined from each paginated response.
        '''
        args = dict()
        if next_token:
            args['NextToken'] = next_token
        response = func(**args)
        result = response.get(result_key)
        next_token = response.get('NextToken')
        if not next_token:
            return result
        return result + self.paginated_response(func, result_key, next_token)
def to_dict(items, key, value):
    ''' Transforms a list of items to a Key/Value dictionary '''
    if items:
        return dict(zip([i.get(key) for i in items], [i.get(value) for i in items]))
    else:
        return dict()
def main():
    argument_spec = ec2_argument_spec()
    argument_spec.update(dict(
        stack_name=dict(),
        all_facts=dict(required=False, default=False, type='bool'),
        stack_policy=dict(required=False, default=False, type='bool'),
        stack_events=dict(required=False, default=False, type='bool'),
        stack_resources=dict(required=False, default=False, type='bool'),
        stack_template=dict(required=False, default=False, type='bool'),
    ))
    module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
    is_old_facts = module._name == 'cloudformation_facts'
    if is_old_facts:
        module.deprecate("The 'cloudformation_facts' module has been renamed to 'cloudformation_info', "
                         "and the renamed one no longer returns ansible_facts", version='2.13')
    if not HAS_BOTO3:
        module.fail_json(msg='boto3 is required.')
    service_mgr = CloudFormationServiceManager(module)
    if is_old_facts:
        result = {'ansible_facts': {'cloudformation': {}}}
    else:
        result = {'cloudformation': {}}
    for stack_description in service_mgr.describe_stacks(module.params.get('stack_name')):
        facts = {'stack_description': stack_description}
        stack_name = stack_description.get('StackName')
        # Create stack output and stack parameter dictionaries
        if facts['stack_description']:
            facts['stack_outputs'] = to_dict(facts['stack_description'].get('Outputs'), 'OutputKey', 'OutputValue')
            facts['stack_parameters'] = to_dict(facts['stack_description'].get('Parameters'), 'ParameterKey', 'ParameterValue')
            facts['stack_tags'] = boto3_tag_list_to_ansible_dict(facts['stack_description'].get('Tags'))
        # normalize stack description API output
        facts['stack_description'] = camel_dict_to_snake_dict(facts['stack_description'])
        # Create optional stack outputs
        all_facts = module.params.get('all_facts')
        if all_facts or module.params.get('stack_resources'):
            facts['stack_resource_list'] = service_mgr.list_stack_resources(stack_name)
            facts['stack_resources'] = to_dict(facts.get('stack_resource_list'), 'LogicalResourceId', 'PhysicalResourceId')
        if all_facts or module.params.get('stack_template'):
            facts['stack_template'] = service_mgr.get_template(stack_name)
        if all_facts or module.params.get('stack_policy'):
            facts['stack_policy'] = service_mgr.get_stack_policy(stack_name)
        if all_facts or module.params.get('stack_events'):
            facts['stack_events'] = service_mgr.describe_stack_events(stack_name)
        if is_old_facts:
            result['ansible_facts']['cloudformation'][stack_name] = facts
        else:
            result['cloudformation'][stack_name] = facts
    result['changed'] = False
    module.exit_json(**result)
if __name__ == '__main__':
    main()
 | 
	gpl-3.0 | -8,098,881,714,089,724,000 | 38.424242 | 154 | 0.662183 | false | 
| 
	shyaken/cp.eaemcb | 
	controllers/ldap/urls.py | 
	2 | 
	4402 | 
	# Author: Zhang Huangbin <[email protected]>
from libs.iredutils import reEmail, reDomain
urls = [
    # Make url ending with or without '/' going to the same class.
    '/(.*)/',                           'controllers.utils.redirect',
    # used to display jpegPhoto.
    '/img/(.*)',                        'controllers.utils.img',
    '/',                                'controllers.ldap.basic.Login',
    '/login',                           'controllers.ldap.basic.Login',
    '/logout',                          'controllers.ldap.basic.Logout',
    '/dashboard',                       'controllers.ldap.basic.Dashboard',
    '/dashboard/(checknew)',              'controllers.ldap.basic.Dashboard',
    # Search.
    '/search',                                  'controllers.ldap.basic.Search',
    # Perform some operations from search page.
    '/action/(domain|admin|user|maillist|alias)', 'controllers.ldap.basic.OperationsFromSearchPage',
    # Export LDIF data.
    '/export/ldif/(domain|catchall)/(%s$)' % reDomain,           'controllers.ldap.basic.ExportLdif',
    '/export/ldif/(admin|user|maillist|alias)/(%s$)' % reEmail,  'controllers.ldap.basic.ExportLdif',
    # Domain related.
    '/domains',                                     'controllers.ldap.domain.List',
    '/domains/page/(\d+)',                          'controllers.ldap.domain.List',
    '/profile/domain/(general|aliases|relay|bcc|catchall|throttle|advanced)/(%s$)' % reDomain,  'controllers.ldap.domain.Profile',
    '/profile/domain/(%s)' % reDomain,             'controllers.ldap.domain.Profile',
    '/create/domain',                               'controllers.ldap.domain.Create',
    # Admin related.
    '/admins',                                      'controllers.ldap.admin.List',
    '/admins/page/(\d+)',                           'controllers.ldap.admin.List',
    '/profile/admin/(general|password)/(%s$)' % reEmail,     'controllers.ldap.admin.Profile',
    '/create/admin',                                'controllers.ldap.admin.Create',
    #########################
    # User related
    #
    # List users, delete users under same domain.
    '/users',                                       'controllers.ldap.user.List',
    '/users/(%s$)' % reDomain,                       'controllers.ldap.user.List',
    '/users/(%s)/page/(\d+)' % reDomain,            'controllers.ldap.user.List',
    # Create user.
    '/create/user/(%s$)' % reDomain,                'controllers.ldap.user.Create',
    '/create/user',                               'controllers.ldap.user.Create',
    # Profile pages.
    '/profile/user/(general|members|forwarding|aliases|wblist|password|throttle|advanced)/(%s$)' % reEmail,      'controllers.ldap.user.Profile',
    # Import accouts.
    '/import/user',                               'controllers.ldap.user.Import',
    '/import/user/(%s$)' % reDomain,                'controllers.ldap.user.Import',
    '/import/alias',                               'controllers.ldap.alias.Import',
    ####################
    # Mail list related
    #
    # List accounts
    '/maillists',                                   'controllers.ldap.maillist.List',
    '/maillists/(%s$)' % reDomain,                    'controllers.ldap.maillist.List',
    '/maillists/(%s)/page/(\d+)' % reDomain,         'controllers.ldap.maillist.List',
    # General profile.
    '/profile/maillist/(general)/(%s$)' % reEmail,    'controllers.ldap.maillist.Profile',
    '/profile/maillist/members/(%s$)' % reEmail,      'controllers.ldap.maillist.Members',
    '/profile/maillist/moderators/(%s$)' % reEmail,               'controllers.ldap.maillist.Moderators',
    '/create/maillist/(%s$)' % reDomain,            'controllers.ldap.maillist.Create',
    '/create/maillist',                           'controllers.ldap.maillist.Create',
    # Alias related.
    '/aliases',                                         'controllers.ldap.alias.List',
    '/aliases/(%s$)' % reDomain,                         'controllers.ldap.alias.List',
    '/aliases/(%s)/page/(\d+)' % reDomain,              'controllers.ldap.alias.List',
    '/profile/alias/(general)/(%s$)' % reEmail,          'controllers.ldap.alias.Profile',
    '/create/alias/(%s$)' % reDomain,                    'controllers.ldap.alias.Create',
    '/create/alias',                                    'controllers.ldap.alias.Create',
]
 | 
	gpl-2.0 | -1,928,722,124,327,327,700 | 53.345679 | 145 | 0.52408 | false | 
| 
	nirmeshk/oh-mainline | 
	mysite/search/migrations/0012_search_result_sized_project_icons.py | 
	17 | 
	3571 | 
	# This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.search.models import *
class Migration:
    
    def forwards(self, orm):
        
        # Adding field 'Project.icon_for_search_result'
        db.add_column('search_project', 'icon_for_search_result', orm['search.project:icon_for_search_result'])
        
    
    
    def backwards(self, orm):
        
        # Deleting field 'Project.icon_for_search_result'
        db.delete_column('search_project', 'icon_for_search_result')
        
    
    
    models = {
        'search.bug': {
            'canonical_bug_link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
            'date_reported': ('django.db.models.fields.DateTimeField', [], {}),
            'description': ('django.db.models.fields.TextField', [], {}),
            'good_for_newcomers': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
            'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'importance': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
            'last_polled': ('django.db.models.fields.DateTimeField', [], {}),
            'last_touched': ('django.db.models.fields.DateTimeField', [], {}),
            'people_involved': ('django.db.models.fields.IntegerField', [], {}),
            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
            'status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
            'submitter_realname': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
            'submitter_username': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
            'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
        },
        'search.project': {
            'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
            'icon': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
            'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
            'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
            'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
            'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
            'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
        }
    }
    
    complete_apps = ['search']
 | 
	agpl-3.0 | -2,338,245,943,986,589,000 | 52.298507 | 145 | 0.601512 | false | 
| 
	SLongofono/448_Project4 | 
	Documentation/Prototype/Toy_Playlist.py | 
	1 | 
	1529 | 
	## @file Toy_Playlist.py
# @brief An example of using the Spotipy module to create and access a playlist
# @author Paul Lamere
# @details From the Spotipy documentation
#         Accessed October 2016
#         https://github.com/plamere/spotipy/blob/master/examples/user_playlists.py
#         Modified by Stephen Longofono
#         10/23/2016
import sys
import os
import subprocess
import spotipy
import spotipy.util as util
if len(sys.argv) > 2:
    username = sys.argv[1]
    playlist_name = sys.argv[2]
else:
    print("Usage: %s username playlist-name" % (sys.argv[0],))
    sys.exit()
token = util.prompt_for_user_token(username)
if token:
    sp = spotipy.Spotify(auth=token)
    sp.trace = False
    playlists = sp.user_playlist_create(username, playlist_name)
    # Get new songs to add from file
    try:
        songIDs = []
        songList = open('recommended.txt', 'r')
        for song in songlist:
            songIDs.append(song)
        songList.close()
    except:
        print "Error processing recommendations..."
        sys.exit()
    # Add songs
    try:
        for song in songIDs:
            sp.user_playlist_add_tracks(username, playlist_id, track_ids)
    except:
        print "Error adding songs to playlist..."
        sys.exit()
    # Add to list of already suggested songs
    x = open('oldsongs', 'a+')
    for song in songIDs:
            x.write(str(song))
            x.write('\n')
    x.close()
    # Remove recommended songs
else:
    print("Can't get token for", username)
 | 
	mit | 3,914,714,023,291,097,600 | 24.065574 | 83 | 0.626553 | false | 
| 
	TsinghuaX/edx-platform | 
	common/djangoapps/django_comment_common/tests.py | 
	11 | 
	2216 | 
	from django.test import TestCase
from django_comment_common.models import Role
from student.models import CourseEnrollment, User
class RoleAssignmentTest(TestCase):
    """
    Basic checks to make sure our Roles get assigned and unassigned as students
    are enrolled and unenrolled from a course.
    """
    def setUp(self):
        self.staff_user = User.objects.create_user(
            "patty",
            "[email protected]",
        )
        self.staff_user.is_staff = True
        self.student_user = User.objects.create_user(
            "hacky",
            "[email protected]"
        )
        self.course_id = "edX/Fake101/2012"
        CourseEnrollment.enroll(self.staff_user, self.course_id)
        CourseEnrollment.enroll(self.student_user, self.course_id)
    def test_enrollment_auto_role_creation(self):
        moderator_role = Role.objects.get(
            course_id=self.course_id,
            name="Moderator"
        )
        student_role = Role.objects.get(
            course_id=self.course_id,
            name="Student"
        )
        self.assertIn(moderator_role, self.staff_user.roles.all())
        self.assertIn(student_role, self.student_user.roles.all())
        self.assertNotIn(moderator_role, self.student_user.roles.all())
    # The following was written on the assumption that unenrolling from a course
    # should remove all forum Roles for that student for that course. This is
    # not necessarily the case -- please see comments at the top of 
    # django_comment_client.models.assign_default_role(). Leaving it for the
    # forums team to sort out.
    #
    # def test_unenrollment_auto_role_removal(self):
    #     another_student = User.objects.create_user("sol", "[email protected]")
    #     CourseEnrollment.enroll(another_student, self.course_id)
    #
    #     CourseEnrollment.unenroll(self.student_user, self.course_id)
    #     # Make sure we didn't delete the actual Role
    #     student_role = Role.objects.get(
    #         course_id=self.course_id,
    #         name="Student"
    #     )
    #     self.assertNotIn(student_role, self.student_user.roles.all())
    #     self.assertIn(student_role, another_student.roles.all())
 | 
	agpl-3.0 | 444,933,011,076,009,860 | 37.206897 | 80 | 0.640794 | false | 
| 
	Raviyanto/sunflower-fm | 
	application/gui/preferences/plugins.py | 
	7 | 
	9055 | 
	import os
import gtk
import locale
from ConfigParser import ConfigParser
from widgets.settings_page import SettingsPage
class Column:
	ACTIVE = 0
	LOCATION = 1
	NAME = 2
	AUTHOR = 3
	VERSION = 4
	CONTACT = 5
	SITE = 6
	DESCRIPTION = 7
class Section:
	NAME = 'Name'
	DESCRIPTION = 'Description'
	VERSION = 'Version'
	AUTHOR = 'Author'
class PluginsOptions(SettingsPage):
	"""Plugins options extension class"""
	NAME_SECTION = 'Name'
	AUTHOR_SECTION = 'Author'
	def __init__(self, parent, application):
		SettingsPage.__init__(self, parent, application, 'plugins', _('Plugins'))
		# create interface
		container = gtk.ScrolledWindow()
		container.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS)
		container.set_shadow_type(gtk.SHADOW_IN)
		# create list box
		self._plugins = gtk.ListStore(
									bool,	# active
									str,	# location
									str,	# name
									str,	# author
									str,	# version
									str,	# contact
									str,	# site
									str		# description
								)
		self._list = gtk.TreeView()
		self._list.set_model(self._plugins)
		self._list.set_rules_hint(True)
		self._list.connect('cursor-changed', self.__handle_cursor_change)
		# create and configure cell renderers
		cell_active = gtk.CellRendererToggle()
		cell_active.connect('toggled', self._toggle_plugin)
		cell_name = gtk.CellRendererText()
		cell_author = gtk.CellRendererText()
		cell_version = gtk.CellRendererText()
		# create and pack columns
		col_active = gtk.TreeViewColumn(_('Active'), cell_active, active=Column.ACTIVE)
		col_name = gtk.TreeViewColumn(_('Plugin name'), cell_name, text=Column.NAME)
		col_name.set_resizable(True)
		col_name.set_expand(True)
		col_version = gtk.TreeViewColumn(_('Version'), cell_version, text=Column.VERSION)
		col_author = gtk.TreeViewColumn(_('Author'), cell_author, text=Column.AUTHOR)
		col_author.set_resizable(True)
		self._list.append_column(col_active)
		self._list.append_column(col_name)
		self._list.append_column(col_version)
		self._list.append_column(col_author)
		# create description
		self._label_description = gtk.Label()
		self._label_description.set_use_markup(True)
		self._label_description.set_line_wrap(True)
		self._label_description.set_selectable(True)
		self._label_description.set_padding(5, 5)
		self._label_description.connect('size-allocate', self.__adjust_label)
		self._expander_description = gtk.Expander(_('Description'))
		self._expander_description.add(self._label_description)
		# create controls
		hbox_controls = gtk.HBox(False, 5)
		image_contact = gtk.Image()
		image_contact.set_from_icon_name('gnome-stock-mail-new', gtk.ICON_SIZE_BUTTON)
		self._button_contact = gtk.Button()
		self._button_contact.set_label(_('Contact'))
		self._button_contact.set_image(image_contact)
		self._button_contact.set_sensitive(False)
		self._button_contact.connect('clicked', self.__handle_contact_button_click)
		image_home_page = gtk.Image()
		image_home_page.set_from_stock(gtk.STOCK_HOME, gtk.ICON_SIZE_BUTTON)
		self._button_home_page = gtk.Button()
		self._button_home_page.set_label(_('Visit site'))
		self._button_home_page.set_image(image_home_page)
		self._button_home_page.set_sensitive(False)
		self._button_home_page.connect('clicked', self.__handle_home_page_button_click)
		# pack containers
		container.add(self._list)
		hbox_controls.pack_start(self._button_contact, False, False, 0)
		hbox_controls.pack_start(self._button_home_page, False, False, 0)
		self.pack_start(container, True, True, 0)
		self.pack_start(self._expander_description, False, False, 0)
		self.pack_start(hbox_controls, False, False, 0)
	def __handle_cursor_change(self, widget, data=None):
		"""Update button state when list cursor changes"""
		selection = widget.get_selection()
		item_store, selected_iter = selection.get_selected()
		if selected_iter is not None:
			self._label_description.set_text(item_store.get_value(selected_iter, Column.DESCRIPTION))
			has_contact = item_store.get_value(selected_iter, Column.CONTACT) is not None
			has_site = item_store.get_value(selected_iter, Column.SITE) is not None
			self._button_contact.set_sensitive(has_contact)
			self._button_home_page.set_sensitive(has_site)
	def __handle_contact_button_click(self, widget, data=None):
		"""Create new contact email"""
		selection = self._list.get_selection()
		item_store, selected_iter = selection.get_selected()
		if selected_iter is not None:
			email = item_store.get_value(selected_iter, Column.CONTACT)
			email = 'xdg-open mailto:{0}'.format(email)
			os.system(email)
	def __handle_home_page_button_click(self, widget, data=None):
		"""Create new contact email"""
		selection = self._list.get_selection()
		item_store, selected_iter = selection.get_selected()
		if selected_iter is not None:
			url = item_store.get_value(selected_iter, Column.SITE)
			self._application.goto_web(self, url)
	def __adjust_label(self, widget, data=None):
		"""Adjust label size"""
		widget.set_size_request(data.width-1, -1)
	def _toggle_plugin(self, cell, path):
		"""Handle changing plugin state"""
		plugin = self._plugins[path][Column.LOCATION]
		plugin_name = self._plugins[path][Column.NAME]
		if plugin not in self._application.protected_plugins:
			# plugin is not protected, toggle it's state
			self._plugins[path][Column.ACTIVE] = not self._plugins[path][Column.ACTIVE]
			# enable save button
			self._parent.enable_save(show_restart=True)
		else:
			# plugin is protected, show appropriate message
			dialog = gtk.MessageDialog(
									self._application,
									gtk.DIALOG_DESTROY_WITH_PARENT,
									gtk.MESSAGE_INFO,
									gtk.BUTTONS_OK,
									_(
										"{0} is required for "
										"normal program operation and therefore can "
										"not be deactivated!"
									).format(plugin_name)
								)
			dialog.run()
			dialog.destroy()
	def _load_options(self):
		"""Load terminal tab options"""
		options = self._application.options
		# clear existing list
		self._plugins.clear()
		# get list of plugins
		plugin_list = self._application._get_plugin_list()
		plugins_to_load = options.get('plugins')
		# extract current locale
		language = locale.getdefaultlocale()[0]
		# populate list
		for plugin in plugin_list:
			# default values
			plugin_name = plugin
			plugin_author = ''
			plugin_version = ''
			plugin_site = None
			plugin_contact = None
			plugin_description = _('This plugin has no description')
			system_plugin_config = os.path.join(self._application.system_plugin_path, plugin, 'plugin.conf')
			user_plugin_config = os.path.join(self._application.user_plugin_path, plugin, 'plugin.conf')
			# prefer user plugin over system version
			plugin_config = user_plugin_config if os.path.exists(user_plugin_config) else system_plugin_config
			# read plugin data from configuration file
			if os.path.exists(plugin_config):
				config = ConfigParser()
				config.read(plugin_config)
				if config.has_section(Section.NAME) and language is not None:
					if config.has_option(Section.NAME, language):
						# try to get plugin name for current language
						plugin_name = config.get(Section.NAME, language)
					elif config.has_option(Section.NAME, 'en'):
						# try to get plugin name for default language
						plugin_name = config.get(Section.NAME, 'en')
				if config.has_section(Section.AUTHOR):
					# get author name
					if config.has_option(Section.AUTHOR, 'name'):
						plugin_author = config.get(Section.AUTHOR, 'name')
					# get contact email
					if config.has_option(Section.AUTHOR, 'contact'):
						plugin_contact = config.get(Section.AUTHOR, 'contact')
					if config.has_option(Section.AUTHOR, 'site'):
						plugin_site = config.get(Section.AUTHOR, 'site')
				if config.has_section(Section.DESCRIPTION) and language is not None:
					if config.has_option(Section.DESCRIPTION, language):
						# try to get plugin description for current language
						plugin_description = config.get(Section.DESCRIPTION, language)
					elif config.has_option(Section.DESCRIPTION, 'en'):
						# try to get plugin description for default language
						plugin_description = config.get(Section.DESCRIPTION, 'en')
				if config.has_section(Section.VERSION) and config.has_option(Section.VERSION, 'number'):
					plugin_version = config.get(Section.VERSION, 'number')
			# add plugin data to list
			self._plugins.append((
							plugin in plugins_to_load,
							plugin,
							plugin_name,
							plugin_author,
							plugin_version,
							plugin_contact,
							plugin_site,
							plugin_description
						))
			# clear description
			self._label_description.set_text(_('No plugin selected'))
			self._expander_description.set_expanded(False)
	def _save_options(self):
		"""Save terminal tab options"""
		options = self._application.options
		# get only selected plugins
		plugin_list = filter(lambda row: row[Column.ACTIVE], self._plugins)
		# we need only plugin names
		plugin_list = [row[Column.LOCATION] for row in plugin_list]
		# save plugin list
		options.set('plugins', plugin_list)
 | 
	gpl-3.0 | -6,595,831,597,563,156,000 | 30.660839 | 101 | 0.696963 | false | 
| 
	nirmeshk/oh-mainline | 
	vendor/packages/twisted/twisted/internet/test/test_address.py | 
	18 | 
	7490 | 
	# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import re
import os
from twisted.trial import unittest
from twisted.internet.address import IPv4Address, UNIXAddress
try:
    os.symlink
except AttributeError:
    symlinkSkip = "Platform does not support symlinks"
else:
    symlinkSkip = None
class AddressTestCaseMixin(object):
    def test_addressComparison(self):
        """
        Two different address instances, sharing the same properties are
        considered equal by C{==} and not considered not equal by C{!=}.
        Note: When applied via UNIXAddress class, this uses the same
        filename for both objects being compared.
        """
        self.assertTrue(self.buildAddress() == self.buildAddress())
        self.assertFalse(self.buildAddress() != self.buildAddress())
    def _stringRepresentation(self, stringFunction):
        """
        Verify that the string representation of an address object conforms to a
        simple pattern (the usual one for Python object reprs) and contains
        values which accurately reflect the attributes of the address.
        """
        addr = self.buildAddress()
        pattern = "".join([
           "^",
           "([^\(]+Address)", # class name,
           "\(",       # opening bracket,
           "([^)]+)",  # arguments,
           "\)",       # closing bracket,
           "$"
        ])
        stringValue = stringFunction(addr)
        m = re.match(pattern, stringValue)
        self.assertNotEquals(
            None, m,
            "%s does not match the standard __str__ pattern "
            "ClassName(arg1, arg2, etc)" % (stringValue,))
        self.assertEquals(addr.__class__.__name__, m.group(1))
        args = [x.strip() for x in m.group(2).split(",")]
        self.assertEquals(
            args,
            [argSpec[1] % (getattr(addr, argSpec[0]),) for argSpec in self.addressArgSpec])
    def test_str(self):
        """
        C{str} can be used to get a string representation of an address instance
        containing information about that address.
        """
        self._stringRepresentation(str)
    def test_repr(self):
        """
        C{repr} can be used to get a string representation of an address
        instance containing information about that address.
        """
        self._stringRepresentation(repr)
    def test_hash(self):
        """
        C{__hash__} can be used to get a hash of an address, allowing
        addresses to be used as keys in dictionaries, for instance.
        """
        addr = self.buildAddress()
        d = {addr: True}
        self.assertTrue(d[self.buildAddress()])
    def test_differentNamesComparison(self):
        """
        Check that comparison operators work correctly on address objects
        when a different name is passed in
        """
        self.assertFalse(self.buildAddress() == self.buildDifferentAddress())
        self.assertTrue(self.buildAddress() != self.buildDifferentAddress())
    def assertDeprecations(self, testMethod, message):
        """
        Assert that the a DeprecationWarning with the given message was
        emitted against the given method.
        """
        warnings = self.flushWarnings([testMethod])
        self.assertEquals(warnings[0]['category'], DeprecationWarning)
        self.assertEquals(warnings[0]['message'], message)
        self.assertEquals(len(warnings), 1)
class IPv4AddressTestCaseMixin(AddressTestCaseMixin):
    addressArgSpec = (("type", "%s"), ("host", "%r"), ("port", "%d"))
class IPv4AddressTCPTestCase(unittest.TestCase, IPv4AddressTestCaseMixin):
    def buildAddress(self):
        """
        Create an arbitrary new L{IPv4Address} instance with a C{"TCP"}
        type.  A new instance is created for each call, but always for the
        same address.
        """
        return IPv4Address("TCP", "127.0.0.1", 0)
    def buildDifferentAddress(self):
        """
        Like L{buildAddress}, but with a different fixed address.
        """
        return IPv4Address("TCP", "127.0.0.2", 0)
    def test_bwHackDeprecation(self):
        """
        If a value is passed for the C{_bwHack} parameter to L{IPv4Address},
        a deprecation warning is emitted.
        """
        message = (
            "twisted.internet.address.IPv4Address._bwHack is deprecated "
            "since Twisted 11.0")
        address = IPv4Address("TCP", "127.0.0.3", 0, _bwHack="TCP")
        return self.assertDeprecations(self.test_bwHackDeprecation, message)
class IPv4AddressUDPTestCase(unittest.TestCase, IPv4AddressTestCaseMixin):
    def buildAddress(self):
        """
        Create an arbitrary new L{IPv4Address} instance with a C{"UDP"}
        type.  A new instance is created for each call, but always for the
        same address.
        """
        return IPv4Address("UDP", "127.0.0.1", 0)
    def buildDifferentAddress(self):
        """
        Like L{buildAddress}, but with a different fixed address.
        """
        return IPv4Address("UDP", "127.0.0.2", 0)
    def test_bwHackDeprecation(self):
        """
        If a value is passed for the C{_bwHack} parameter to L{IPv4Address},
        a deprecation warning is emitted.
        """
        message = (
            "twisted.internet.address.IPv4Address._bwHack is deprecated "
            "since Twisted 11.0")
        address = IPv4Address("UDP", "127.0.0.3", 0, _bwHack="UDP")
        return self.assertDeprecations(self.test_bwHackDeprecation, message)
class UNIXAddressTestCase(unittest.TestCase, AddressTestCaseMixin):
    addressArgSpec = (("name", "%r"),)
    def setUp(self):
        self._socketAddress = self.mktemp()
        self._otherAddress = self.mktemp()
    def buildAddress(self):
        """
        Create an arbitrary new L{UNIXAddress} instance.  A new instance is
        created for each call, but always for the same address.
        """
        return UNIXAddress(self._socketAddress)
    def buildDifferentAddress(self):
        """
        Like L{buildAddress}, but with a different fixed address.
        """
        return UNIXAddress(self._otherAddress)
    def test_comparisonOfLinkedFiles(self):
        """
        UNIXAddress objects compare as equal if they link to the same file.
        """
        linkName = self.mktemp()
        self.fd = open(self._socketAddress, 'w')
        os.symlink(os.path.abspath(self._socketAddress), linkName)
        self.assertTrue(
            UNIXAddress(self._socketAddress) == UNIXAddress(linkName))
    test_comparisonOfLinkedFiles.skip = symlinkSkip
    def test_hashOfLinkedFiles(self):
        """
        UNIXAddress Objects that compare as equal have the same hash value.
        """
        linkName = self.mktemp()
        self.fd = open(self._socketAddress, 'w')
        os.symlink(os.path.abspath(self._socketAddress), linkName)
        self.assertEquals(
            hash(UNIXAddress(self._socketAddress)), hash(UNIXAddress(linkName)))
    test_hashOfLinkedFiles.skip = symlinkSkip
    def test_bwHackDeprecation(self):
        """
        If a value is passed for the C{_bwHack} parameter to L{UNIXAddress},
        a deprecation warning is emitted.
        """
        message = (
            "twisted.internet.address.UNIXAddress._bwHack is deprecated "
            "since Twisted 11.0")
        address = UNIXAddress(self.mktemp(), _bwHack='UNIX')
        return self.assertDeprecations(self.test_bwHackDeprecation, message)
 | 
	agpl-3.0 | -5,277,088,138,837,497,000 | 31.850877 | 91 | 0.62016 | false | 
| 
	janplus/xbmc-addons-chinese | 
	plugin.audio.1ting/music.py | 
	7 | 
	12549 | 
	#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2, re, json
from bs4 import BeautifulSoup
from xml.sax.saxutils import unescape
webHtml = "http://www.1ting.com"
SingerHtml = "/group.html"
randHtml = "/rand.php"
rankHtml = "/rank.html"
tagHtml = "/tag"
genreHtml = "/genre"
imgBaseHtml = "http://img.1ting.com/images/singer/s"
#songJsBaseHtml = "/json2010_"
songBasehtml = "http://f.1ting.com"
rankImgBaseHtml = "/api/client/images"
searchHtml = "http://so.1ting.com"
MODE_NONE = ""
MODE_MENU = "menu"
MODE_SINGER_GROUP = "singer_group"
MODE_SINGER_ALL = "singer_all"
MODE_SINGER = "singer"
MODE_SONG = "song"
MODE_SONGLIST = "songList"
MODE_PLAYLIST = "playlist"
MODE_ALBUM = "album"
MODE_ALBUMLIST = "albumList"
MODE_SINGERLIST = "singerList"
MODE_RAND = "rand"
MODE_RANK = "rank"
MODE_TAG_LIST = "tag_list"
MODE_TAG = "tag"
MODE_SEARCH = "search"
MODE_SEARCH_LIST = "search_list"
#name, mode, url, icon, info
menu = [('搜索', MODE_SEARCH), 
        ('歌手', MODE_SINGER_GROUP), 
        ('排行榜', MODE_RANK),
        ('标签', MODE_TAG_LIST, tagHtml),
        ('曲风', MODE_TAG_LIST, genreHtml),
        ('随便一听', MODE_RAND)]
def getMenu():
    return menu
def isFolder(mode):
    return mode not in (MODE_NONE, MODE_SONG, MODE_PLAYLIST)
def request(url, soup = True, album = False):
    if not url.startswith('http'): url = webHtml + url
    print('request', url)
    req = urllib2.Request(url)           # create one connection
    try:
        response = urllib2.urlopen(req)      # get the response
        cont = response.read()        # get all the webpage html data in string
        if album: # 修正html标签错误
            cont = cont.replace('</span></a> </li>', '</a></span> </li>')
        response.close()
    except:
        return
    return BeautifulSoup(cont, 'html.parser') if soup else cont
def getSongUrl(url):
    if not url: return
    req = urllib2.Request(songBasehtml + url)
    req.add_header("Cookie", "PIN=cnGQFFSRmPRxcDj2aUSnAg==")
    url = urllib2.urlopen(req).geturl()
    return url
def getSearchUrl(q, domain = "song"):
    return "%s/%s?q=%s" % (searchHtml, domain, q)
def href(a):
    return a['href'].encode('utf-8')
def bannerItem(h, soup = True):
    if soup:
        h = h.text.encode('utf-8')
    h = '[COLOR FFDEB887]【%s】[/COLOR]' % h
    return (h,)
def linkItem(a, mode, link = True, baseurl = '', title = ''):
    if title:
        name = title.encode('utf-8')
    else:
        name = a.text.encode('utf-8')
    if link:
        name = '[COLOR FF1E90FF]%s[/COLOR]' % (name)
    return (name, mode, baseurl + href(a))
def singerItem(a):
    name = a.text.encode('utf-8')
    mode = MODE_SINGER
    url = href(a)
    icon = getSingerIcon(url)
    return (name, mode, url, icon)
def playItem(a):
    name = a.text.encode('utf-8')
    mode = MODE_PLAYLIST
    url = href(a)
    icon = ''
    d = {'title': name}
    return (name, mode, url, icon, d)
def songItem(title, url, artist, album, icon):
    title = title.encode('utf-8')
    mode = MODE_SONG
    artist = artist.encode('utf-8')
    album = album.encode('utf-8')
    d = {'title': title, 'artist': artist, 'album': album}
    url = url.encode('utf-8')
    icon = icon.encode('utf-8')
    if artist:
        name = '%s - %s' % (artist, title)
    else:
        name = title
    return (name, mode, url, icon, d)
def pageList(tree, mode, lists, baseurl = ''):
    soup = tree.find('div', {'class': ['cPages', 'pages']})
    if soup:
        a = soup.find('a', text=['上一页', '«上一页'])
        if a: lists.insert(0, linkItem(a, mode, True, baseurl))
        a = soup.find('a', text=['下一页', '下一页»'])
        if a: lists.append(linkItem(a, mode, True, baseurl))
    return lists
def albumList(soup):
    lists = []
    mode = MODE_ALBUM
    if soup:
        for li in soup.find_all('li'):
            albumName = li.find('span', {'class': 'albumName'})
            if not albumName: continue
            singerName = li.find('span', {'class': 'singerName'})
            albumDate = li.find('span', {'class': 'albumDate'})
            albumPic = li.find('img', {'class': 'albumPic'})
            name = albumName.text
            if singerName:
                name = '%s - %s' % (name, singerName.text)
            if albumDate:
                name = '%s (%s)' % (name, albumDate.text)
            name = name.encode('utf-8')
            a = li.find('a', {'class': 'albumPlay'})
            url = href(a)
            icon = ''
            if albumPic:
                icon = albumPic['src']
            lists.append((name, mode, url, icon))
    return lists
def singerList(soup):
    lists = []
    for i in soup.find_all(['a', 'li'], {'class': 'singerName'}):
        a = i.a if i.name == 'li' else i
        lists.append(singerItem(a))
    return lists
def songList(soup):
    lists = []
    for li in soup.find_all('li'):
        lists.append(playItem(li.a))
    return lists
def getSongList(url):
    lists = []
    tree = request(url)
    if tree:
        soup = tree.find('div', {'class': 'songList'})
        for i in soup.find_all('ul'):
            lists += songList(i)
        mode = MODE_SONGLIST
        lists = pageList(tree, mode, lists)
    else:
        lists.append(bannerItem('该歌手不存在或者由于版权到期而下线!', False))
    return lists
def getAlbumList(url):
    lists = []
    tree = request(url, album = True)
    soup = tree.find('div', {'class': 'albumList'})
    lists += albumList(soup)
    mode = MODE_ALBUMLIST
    lists = pageList(tree, mode, lists)
    return lists
def getSingerList(url):
    lists = []
    tree = request(url)
    soup = tree.find('div', {'class': 'singerList'})
    for li in soup.find_all('li'):
        a = li.find('a', {'class': 'singerName'})
        if a:
            lists.append(singerItem(a))
    mode = MODE_SINGERLIST
    lists = pageList(tree, mode, lists)
    return lists
def getPlayList(url):
    lists = []
    resHttpData = request(url, False)
    match = re.search("\[\[.*?\]\]", resHttpData)
    if match:
        result = json.loads(unescape(match.group()))
        for i in result:
            artist, title, album, url = i[1:8:2]
            icon = i[8]
            lists.append(songItem(title, url, artist, album, icon))
    else:
        lists.append(bannerItem('该歌曲不存在或者由于版权到期而下线!', False))
    return lists
def getRand():
    lists = []
    resHttpData = request(randHtml, False)
    match = re.search("Jsonp\((.*?)\)    </script>", resHttpData)
    if not match: return
    info = json.loads(match.group(1))
    result = info.get('results', [])
    for i in result:
        title = i['song_name']
        artist = i['singer_name']
        album = i['album_name']
        url = i['song_filepath']
        icon = i['album_cover']
        lists.append(songItem(title, url, artist, album, icon))
    return lists
def getSingerGroup(url = SingerHtml):
    lists = []
    tree = request(url)
    soup = tree.find_all('div', {'class':'group-menu-component'})
    for i in soup:
        for a in i.find_all('a'):
            url = href(a)
            if url == '#':
                item = bannerItem(a)
            else:
                item = linkItem(a, MODE_SINGER_ALL, False)
            lists.append(item)
    return lists
def getSingerIcon(url, size = 210):
    match = re.search("singer_(.*?)\.html", url)
    if not match: return
    num = match.group(1)
    return "%s%d_%s.jpg" % (imgBaseHtml, size, num)
def getRankIcon(url):
    icon = webHtml + rankImgBaseHtml + url.replace('/rank', '').replace('.html', '.png')
    return icon
def getSingerAll(url = SingerHtml):
    lists = []
    tree = request(url)
    soup = tree.find('div', {'class': 'singerCommend'})
    if soup:
        lists.append(bannerItem('推荐', False))
    soup = soup.find_all('a', {'class': 'singerName'})
    for a in soup:
        lists.append(singerItem(a))
    soup = tree.find_all('ul', {'class': 'allSinger'})
    if soup:
        lists.append(bannerItem('全部', False))
    for i in soup:
        for a in i.find_all('a'):
            lists.append(singerItem(a))
    return lists
def getSinger(url):
    lists = []
    tree = request(url, album = True)
    if tree:
        soup = tree.find('dl', {'class': 'singerInfo'})
        a = soup.find('a', {'class': 'allSong'})
        if a:
            lists.append(linkItem(a, MODE_SONGLIST))
        a = soup.find('a', {'class': 'allAlbum'})
        if a:
            lists.append(linkItem(a, MODE_ALBUMLIST))
        soup = tree.find('table', {'class': 'songList'})
        h = soup.find_previous_sibling().h3
        lists.append(bannerItem(h))
        for a in soup.find_all('a', {'class': 'songName'}):
            lists.append(playItem(a))
        soups = tree.find_all('div', {'class': 'albumList'})
        for soup in soups:
            h = soup.find_previous_sibling().h3
            lists.append(bannerItem(h))
            lists += albumList(soup)
    else:
        lists.append(bannerItem('该歌手不存在或者由于版权到期而下线!', False))
    return lists
def getRank():
    lists = []
    tree = request(rankHtml)
    soup = tree.find('div', {'class': 'lbar'})
    for dl in soup.find_all('dl'):
        lists.append(bannerItem(dl.dt))
        for a in dl.dd.ul.find_all('a'):
            name = a.text.encode('utf-8')
            url = href(a)
            icon = getRankIcon(url)
            if '唱片' in name or '专辑' in name: mode = MODE_ALBUMLIST
            elif '歌手' in name: mode = MODE_SINGERLIST
            else: mode = MODE_SONGLIST
            lists.append((name, mode, url, icon))
    return lists
def getTagList(url):
    lists = []
    if url == tagHtml: tag = 'allTagList'
    elif url == genreHtml: tag = 'allGenre'
    else: return
    tree = request(url)
    soup = tree.find('div', {'class': tag})
    for dl in soup.find_all('dl'):
        lists.append(bannerItem(dl.dt))
        for a in dl.find_all('a'):
            lists.append(linkItem(a, MODE_TAG, False))
    return lists
def tagHeadList(soup, mode):
    head = soup.find_previous_sibling()
    if head:
        return [linkItem(head.p.a, mode), bannerItem(head.h3)]
    return []
def getTag(url):
    lists = []
    tree = request(url, album = True)
    if tree:
        soups = tree.find_all('div', {'class': [MODE_SONGLIST, MODE_ALBUMLIST, MODE_SINGERLIST]})
        for soup in soups:
            className = soup['class'][0]
            lists += tagHeadList(soup, className)
            func = globals()[className]
            lists += func(soup)
    return lists
def getSearchList(url):
    lists = []
    mode = MODE_SEARCH_LIST
    tree = request(url)
    if tree:
        soup = tree.find('div', {'class': 'nav_center'})
        if soup:
            soup = soup.find_all('li', {'class': ['type_song', 'type_album', 'type_singer']})
            for li in soup:
                if li.span.text != '(0)':
                    lists.append(linkItem(li.a, mode, True, searchHtml, title=li.text))
        soup = tree.find('div', {'class': 'songList'})
        if soup:
            soup = soup.table.tbody
            for td in soup.find_all('td', {'class': 'song'}):
                lists.append(playItem(td.a))
        soup = tree.find('ul', {'class': 'albumList'})
        if soup:
            lists += albumList(soup)
        soups = tree.find_all('div', {'class': 'singerList'})
        for soup in soups:
            lists += singerList(soup)
        baseurl = url.split('?')[0]
        lists = pageList(tree, mode, lists, baseurl)
    return lists
def getList(mode, url):
    l = []
    if mode == MODE_MENU:
        l = getMenu()
    elif mode == MODE_SONGLIST:
        l = getSongList(url)
    elif mode == MODE_ALBUMLIST:
        l = getAlbumList(url)
    elif mode == MODE_ALBUM:
        l = getPlayList(url)
    elif mode == MODE_SINGERLIST:
        l = getSingerList(url)
    elif mode == MODE_SINGER_GROUP:
        l = getSingerGroup()
    elif mode == MODE_SINGER_ALL:
        l = getSingerAll(url)
    elif mode == MODE_SINGER:
        l = getSinger(url)
    elif mode == MODE_RAND:
        l = getRand()
    elif mode == MODE_RANK:
        l = getRank()
    elif mode == MODE_TAG_LIST:
        l = getTagList(url)
    elif mode == MODE_TAG:
        l = getTag(url)
    elif mode == MODE_SEARCH_LIST:
        l = getSearchList(url)
    return l
if __name__ == '__main__':
    for i in getSearchList('http://so.1ting.com/singer?q=dzq'):
        print i
 | 
	gpl-2.0 | -3,288,996,919,135,510,000 | 28.915254 | 97 | 0.55864 | false | 
| 
	ContinuumIO/pydata-apps | 
	embedded_apps/02_bokeh_server/linked_tap_server.py | 
	2 | 
	2312 | 
	from __future__ import print_function
import numpy as np
from bokeh.models import ColumnDataSource, DataRange1d, Plot, LinearAxis, Grid, Circle, VBox, HBox, Button, TapTool
from bokeh.document import Document
from bokeh.session import Session
from bokeh.browserlib import view
document = Document()
session = Session()
session.use_doc('linked_tap_server')
session.load_document(document)
N = 9
x = np.linspace(-2, 2, N)
y = x**2
source1 = ColumnDataSource(dict(x = x, y = y, size = [20]*N))
xdr1 = DataRange1d(sources=[source1.columns("x")])
ydr1 = DataRange1d(sources=[source1.columns("y")])
plot1 = Plot(title="Plot1", x_range=xdr1, y_range=ydr1, plot_width=400, plot_height=400)
plot1.tools.append(TapTool(plot=plot1))
plot1.add_glyph(source1, Circle(x="x", y="y", size="size", fill_color="red"))
plot1.toolbar_location=None
source2 = ColumnDataSource(dict(x = x, y = y, color = ["blue"]*N))
xdr2 = DataRange1d(sources=[source2.columns("x")])
ydr2 = DataRange1d(sources=[source2.columns("y")])
plot2 = Plot(title="Plot2", x_range=xdr2, y_range=ydr2, plot_width=400, plot_height=400)
plot2.tools.append(TapTool(plot=plot2))
plot2.add_glyph(source2, Circle(x="x", y="y", size=20, fill_color="color"))
plot2.toolbar_location=None
def on_selection_change1(obj, attr, _, inds):
    color = ["blue"]*N
    if inds:
        [index] = inds
        color[index] = "red"
    source2.data["color"] = color
    session.store_objects(source2)
source1.on_change('selected', on_selection_change1)
def on_selection_change2(obj, attr, _, inds):
    if inds:
        [index] = inds
        size = [10]*N
        size[index] = 40
    else:
        size = [20]*N
    source1.data["size"] = size
    session.store_objects(source1)
source2.on_change('selected', on_selection_change2)
reset = Button(label="Reset")
def on_reset_click():
    source1.selected = []
    source2.selected = []
    session.store_objects(source1, source2)
reset.on_click(on_reset_click)
vbox = VBox(children=[reset], width=150)
hbox = HBox(children=[vbox, plot1, plot2])
document.add(hbox)
session.store_document(document)
if __name__ == "__main__":
    link = session.object_link(document.context)
    print("Please visit %s to see the plots" % link)
    view(link)
    print("\npress ctrl-C to exit")
    session.poll_document(document)
 | 
	mit | 2,414,830,226,760,235,000 | 29.025974 | 115 | 0.682958 | false | 
| 
	mastizada/kuma | 
	vendor/packages/pylint/checkers/imports.py | 
	6 | 
	15398 | 
	# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:[email protected]
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
"""imports checkers for Python code"""
from logilab.common.graph import get_cycles, DotBackend
from logilab.common.modutils import is_standard_module
from logilab.common.ureports import VerbatimText, Paragraph
from logilab.common.compat import sorted, enumerate
from logilab import astng
from logilab.astng import are_exclusive
from pylint.interfaces import IASTNGChecker
from pylint.checkers import BaseChecker, EmptyReport
def get_first_import(context, name, base, level=0):
    """return the node where [base.]<name> is imported or None if not found
    """
    for node in context.values():
        if isinstance(node, astng.Import):
            if name in [iname[0] for iname in node.names]:
                return node
        if isinstance(node, astng.From):
            if base == node.modname and level == node.level and \
                   name in [iname[0] for iname in node.names]:
                return node
# utilities to represents import dependencies as tree and dot graph ###########
def filter_dependencies_info(dep_info, package_dir, mode='external'):
    """filter external or internal dependencies from dep_info (return a
    new dictionary containing the filtered modules only)
    """
    if mode == 'external':
        filter_func = lambda x: not is_standard_module(x, (package_dir,))
    else:
        assert mode == 'internal'
        filter_func = lambda x: is_standard_module(x, (package_dir,))
    result = {}
    for importee, importers in dep_info.items():
        if filter_func(importee):
            result[importee] = importers
    return result
def make_tree_defs(mod_files_list):
    """get a list of 2-uple (module, list_of_files_which_import_this_module),
    it will return a dictionary to represent this as a tree
    """
    tree_defs = {}
    for mod, files in mod_files_list:
        node = (tree_defs, ())
        for prefix in mod.split('.'):
            node = node[0].setdefault(prefix, [{}, []])
        node[1] += files
    return tree_defs
def repr_tree_defs(data, indent_str=None):
    """return a string which represents imports as a tree"""
    lines = []
    nodes = data.items()
    for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
        if not files:
            files = ''
        else:
            files = '(%s)' % ','.join(files)
        if indent_str is None:
            lines.append('%s %s' % (mod, files))
            sub_indent_str = '  '
        else:
            lines.append('%s\-%s %s' % (indent_str, mod, files))
            if i == len(nodes)-1:
                sub_indent_str = '%s  ' % indent_str
            else:
                sub_indent_str = '%s| ' % indent_str
        if sub:
            lines.append(repr_tree_defs(sub, sub_indent_str))
    return '\n'.join(lines)
def dependencies_graph(filename, dep_info):
    """write dependencies as a dot (graphviz) file
    """
    done = {}
    printer = DotBackend(filename[:-4], rankdir = "LR")
    printer.emit('URL="." node[shape="box"]')
    for modname, dependencies in dep_info.items():
        done[modname] = 1
        printer.emit_node(modname)
        for modname in dependencies:
            if not done.has_key(modname):
                done[modname] = 1
                printer.emit_node(modname)
    for depmodname, dependencies in dep_info.items():
        for modname in dependencies:
            printer.emit_edge(modname, depmodname)
    printer.generate(filename)
def make_graph(filename, dep_info, sect, gtype):
    """generate a dependencies graph and add some information about it in the
    report's section
    """
    dependencies_graph(filename, dep_info)
    sect.append(Paragraph('%simports graph has been written to %s'
                          % (gtype, filename)))
# the import checker itself ###################################################
MSGS = {
    'F0401': ('Unable to import %r' ,
              'Used when pylint has been unable to import a module.'),
    'R0401': ('Cyclic import (%s)',
              'Used when a cyclic import between two or more modules is \
              detected.'),
    'W0401': ('Wildcard import %s',
              'Used when `from module import *` is detected.'),
    'W0402': ('Uses of a deprecated module %r',
              'Used a module marked as deprecated is imported.'),
    'W0403': ('Relative import %r, should be %r',
              'Used when an import relative to the package directory is \
              detected.'),
    'W0404': ('Reimport %r (imported line %s)',
              'Used when a module is reimported multiple times.'),
    'W0406': ('Module import itself',
              'Used when a module is importing itself.'),
    'W0410': ('__future__ import is not the first non docstring statement',
              'Python 2.5 and greater require __future__ import to be the \
              first non docstring statement in the module.'),
    }
class ImportsChecker(BaseChecker):
    """checks for
    * external modules dependencies
    * relative / wildcard imports
    * cyclic imports
    * uses of deprecated modules
    """
    __implements__ = IASTNGChecker
    name = 'imports'
    msgs = MSGS
    priority = -2
    options = (('deprecated-modules',
                {'default' : ('regsub','string', 'TERMIOS',
                              'Bastion', 'rexec'),
                 'type' : 'csv',
                 'metavar' : '<modules>',
                 'help' : 'Deprecated modules which should not be used, \
separated by a comma'}
                ),
               ('import-graph',
                {'default' : '',
                 'type' : 'string',
                 'metavar' : '<file.dot>',
                 'help' : 'Create a graph of every (i.e. internal and \
external) dependencies in the given file (report R0402 must not be disabled)'}
                ),
               ('ext-import-graph',
                {'default' : '',
                 'type' : 'string',
                 'metavar' : '<file.dot>',
                 'help' : 'Create a graph of external dependencies in the \
given file (report R0402 must not be disabled)'}
                ),
               ('int-import-graph',
                {'default' : '',
                 'type' : 'string',
                 'metavar' : '<file.dot>',
                 'help' : 'Create a graph of internal dependencies in the \
given file (report R0402 must not be disabled)'}
                ),
               )
    def __init__(self, linter=None):
        BaseChecker.__init__(self, linter)
        self.stats = None
        self.import_graph = None
        self.__int_dep_info = self.__ext_dep_info = None
        self.reports = (('R0401', 'External dependencies',
                         self.report_external_dependencies),
                        ('R0402', 'Modules dependencies graph',
                         self.report_dependencies_graph),
                        )
    def open(self):
        """called before visiting project (i.e set of modules)"""
        self.linter.add_stats(dependencies={})
        self.linter.add_stats(cycles=[])
        self.stats = self.linter.stats
        self.import_graph = {}
    def close(self):
        """called before visiting project (i.e set of modules)"""
        # don't try to compute cycles if the associated message is disabled
        if self.linter.is_message_enabled('R0401'):
            for cycle in get_cycles(self.import_graph):
                self.add_message('R0401', args=' -> '.join(cycle))
    def visit_import(self, node):
        """triggered when an import statement is seen"""
        modnode = node.root()
        for name, _ in node.names:
            importedmodnode = self.get_imported_module(modnode, node, name)
            if importedmodnode is None:
                continue
            self._check_relative_import(modnode, node, importedmodnode, name)
            self._add_imported_module(node, importedmodnode.name)
            self._check_deprecated_module(node, name)
            self._check_reimport(node, name)
    def visit_from(self, node):
        """triggered when a from statement is seen"""
        basename = node.modname
        if basename == '__future__':
            # check if this is the first non-docstring statement in the module
            prev = node.previous_sibling()
            if prev:
                # consecutive future statements are possible
                if not (isinstance(prev, astng.From)
                       and prev.modname == '__future__'):
                    self.add_message('W0410', node=node)
            return
        modnode = node.root()
        importedmodnode = self.get_imported_module(modnode, node, basename)
        if importedmodnode is None:
            return
        self._check_relative_import(modnode, node, importedmodnode, basename)
        self._check_deprecated_module(node, basename)
        for name, _ in node.names:
            if name == '*':
                self.add_message('W0401', args=basename, node=node)
                continue
            self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
            self._check_reimport(node, name, basename, node.level)
    def get_imported_module(self, modnode, importnode, modname):
        try:
            return importnode.do_import_module(modname)
        except astng.InferenceError, ex:
            if str(ex).startswith('module importing itself'): # XXX
                return modnode
            else:
                self.add_message("F0401", args=modname, node=importnode)
                return
    def _check_relative_import(self, modnode, importnode, importedmodnode,
                               importedasname):
        """check relative import. node is either an Import or From node, modname
        the imported module name.
        """
        if importedmodnode.file is None:
            return False # built-in module
        if modnode is importedmodnode:
            return False # module importing itself
        if modnode.absolute_import_activated() or getattr(importnode, 'level', None):
            return False
        if importedmodnode.name != importedasname:
            # this must be a relative import...
            self.add_message('W0403', args=(importedasname, importedmodnode.name),
                             node=importnode)
    def _add_imported_module(self, node, importedmodname):
        """notify an imported module, used to analyze dependencies"""
        context_name = node.root().name
        if context_name == importedmodname:
            # module importing itself !
            self.add_message('W0406', node=node)
        elif not is_standard_module(importedmodname):
            # handle dependencies
            importedmodnames = self.stats['dependencies'].setdefault(
                importedmodname, set())
            if not context_name in importedmodnames:
                importedmodnames.add(context_name)
            if is_standard_module( importedmodname, (self.package_dir(),) ):
                # update import graph
                mgraph = self.import_graph.setdefault(context_name, set())
                if not importedmodname in mgraph:
                    mgraph.add(importedmodname)
    def _check_deprecated_module(self, node, mod_path):
        """check if the module is deprecated"""
        # XXX rewrite
        for mod_name in self.config.deprecated_modules:
            if mod_path.startswith(mod_name) and \
                   (len(mod_path) == len(mod_name)
                    or mod_path[len(mod_name)] == '.'):
                self.add_message('W0402', node=node, args=mod_path)
    def _check_reimport(self, node, name, basename=None, level=0):
        """check if the import is necessary (i.e. not already done)"""
        # XXX rewrite
        frame = node.frame()
        first = get_first_import(frame, name, basename, level)
        if isinstance(first, (astng.Import, astng.From)) and first is not node \
               and not are_exclusive(first, node):
            self.add_message('W0404', node=node, args=(name, first.fromlineno))
        else:
            root = node.root()
            if root is frame:
                return
            first = get_first_import(root, name, basename)
            if not isinstance(first, (astng.Import, astng.From)):
                return
            if first is not node and not are_exclusive(first, node):
                self.add_message('W0404', node=node,
                                 args=(name, first.fromlineno))
    def report_external_dependencies(self, sect, _, dummy):
        """return a verbatim layout for displaying dependencies"""
        dep_info = make_tree_defs(self._external_dependencies_info().items())
        if not dep_info:
            raise EmptyReport()
        tree_str = repr_tree_defs(dep_info)
        sect.append(VerbatimText(tree_str))
    def report_dependencies_graph(self, sect, _, dummy):
        """write dependencies as a dot (graphviz) file"""
        dep_info = self.stats['dependencies']
        if not dep_info or not (self.config.import_graph
                                or self.config.ext_import_graph
                                or self.config.int_import_graph):
            raise EmptyReport()
        filename = self.config.import_graph
        if filename:
            make_graph(filename, dep_info, sect, '')
        filename = self.config.ext_import_graph
        if filename:
            make_graph(filename, self._external_dependencies_info(),
                       sect, 'external ')
        filename = self.config.int_import_graph
        if filename:
            make_graph(filename, self._internal_dependencies_info(),
                       sect, 'internal ')
    def _external_dependencies_info(self):
        """return cached external dependencies information or build and
        cache them
        """
        if self.__ext_dep_info is None:
            self.__ext_dep_info = filter_dependencies_info(
                self.stats['dependencies'], self.package_dir(), 'external')
        return self.__ext_dep_info
    def _internal_dependencies_info(self):
        """return cached internal dependencies information or build and
        cache them
        """
        if self.__int_dep_info is None:
            self.__int_dep_info = filter_dependencies_info(
                self.stats['dependencies'], self.package_dir(), 'internal')
        return self.__int_dep_info
def register(linter):
    """required method to auto register this checker """
    linter.register_checker(ImportsChecker(linter))
 | 
	mpl-2.0 | -5,708,072,512,566,238,000 | 39.627968 | 85 | 0.581244 | false | 
| 
	hortonworks/hortonworks-sandbox | 
	desktop/core/ext-py/django-extensions-0.5/django_extensions/utils/uuid.py | 
	9 | 
	20321 | 
	r"""UUID objects (universally unique identifiers) according to RFC 4122.
This module provides immutable UUID objects (class UUID) and the functions
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
UUIDs as specified in RFC 4122.
If all you want is a unique ID, you should probably call uuid1() or uuid4().
Note that uuid1() may compromise privacy since it creates a UUID containing
the computer's network address.  uuid4() creates a random UUID.
Typical usage:
    >>> import uuid
    # make a UUID based on the host ID and current time
    >>> uuid.uuid1()
    UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
    # make a UUID using an MD5 hash of a namespace UUID and a name
    >>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
    UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
    # make a random UUID
    >>> uuid.uuid4()
    UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
    # make a UUID using a SHA-1 hash of a namespace UUID and a name
    >>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
    UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
    # make a UUID from a string of hex digits (braces and hyphens ignored)
    >>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
    # convert a UUID to a string of hex digits in standard form
    >>> str(x)
    '00010203-0405-0607-0809-0a0b0c0d0e0f'
    # get the raw 16 bytes of the UUID
    >>> x.bytes
    '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
    # make a UUID from a 16-byte string
    >>> uuid.UUID(bytes=x.bytes)
    UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
"""
__author__ = 'Ka-Ping Yee <[email protected]>'
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
    'reserved for NCS compatibility', 'specified in RFC 4122',
    'reserved for Microsoft compatibility', 'reserved for future definition']
class UUID(object):
    """Instances of the UUID class represent UUIDs as specified in RFC 4122.
    UUID objects are immutable, hashable, and usable as dictionary keys.
    Converting a UUID to a string with str() yields something in the form
    '12345678-1234-1234-1234-123456789abc'.  The UUID constructor accepts
    five possible forms: a similar string of hexadecimal digits, or a tuple
    of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
    48-bit values respectively) as an argument named 'fields', or a string
    of 16 bytes (with all the integer fields in big-endian order) as an
    argument named 'bytes', or a string of 16 bytes (with the first three
    fields in little-endian order) as an argument named 'bytes_le', or a
    single 128-bit integer as an argument named 'int'.
    UUIDs have these read-only attributes:
        bytes       the UUID as a 16-byte string (containing the six
                    integer fields in big-endian byte order)
        bytes_le    the UUID as a 16-byte string (with time_low, time_mid,
                    and time_hi_version in little-endian byte order)
        fields      a tuple of the six integer fields of the UUID,
                    which are also available as six individual attributes
                    and two derived attributes:
            time_low                the first 32 bits of the UUID
            time_mid                the next 16 bits of the UUID
            time_hi_version         the next 16 bits of the UUID
            clock_seq_hi_variant    the next 8 bits of the UUID
            clock_seq_low           the next 8 bits of the UUID
            node                    the last 48 bits of the UUID
            time                    the 60-bit timestamp
            clock_seq               the 14-bit sequence number
        hex         the UUID as a 32-character hexadecimal string
        int         the UUID as a 128-bit integer
        urn         the UUID as a URN as specified in RFC 4122
        variant     the UUID variant (one of the constants RESERVED_NCS,
                    RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
        version     the UUID version number (1 through 5, meaningful only
                    when the variant is RFC_4122)
    """
    def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None,
                       int=None, version=None):
        r"""Create a UUID from either a string of 32 hexadecimal digits,
        a string of 16 bytes as the 'bytes' argument, a string of 16 bytes
        in little-endian order as the 'bytes_le' argument, a tuple of six
        integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
        8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
        the 'fields' argument, or a single 128-bit integer as the 'int'
        argument.  When a string of hex digits is given, curly braces,
        hyphens, and a URN prefix are all optional.  For example, these
        expressions all yield the same UUID:
        UUID('{12345678-1234-5678-1234-567812345678}')
        UUID('12345678123456781234567812345678')
        UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
        UUID(bytes='\x12\x34\x56\x78'*4)
        UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' +
                      '\x12\x34\x56\x78\x12\x34\x56\x78')
        UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
        UUID(int=0x12345678123456781234567812345678)
        Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must
        be given.  The 'version' argument is optional; if given, the resulting
        UUID will have its variant and version set according to RFC 4122,
        overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'.
        """
        if [hex, bytes, bytes_le, fields, int].count(None) != 4:
            raise TypeError('need one of hex, bytes, bytes_le, fields, or int')
        if hex is not None:
            hex = hex.replace('urn:', '').replace('uuid:', '')
            hex = hex.strip('{}').replace('-', '')
            if len(hex) != 32:
                raise ValueError('badly formed hexadecimal UUID string')
            int = long(hex, 16)
        if bytes_le is not None:
            if len(bytes_le) != 16:
                raise ValueError('bytes_le is not a 16-char string')
            bytes = (bytes_le[3] + bytes_le[2] + bytes_le[1] + bytes_le[0] +
                     bytes_le[5] + bytes_le[4] + bytes_le[7] + bytes_le[6] +
                     bytes_le[8:])
        if bytes is not None:
            if len(bytes) != 16:
                raise ValueError('bytes is not a 16-char string')
            int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
        if fields is not None:
            if len(fields) != 6:
                raise ValueError('fields is not a 6-tuple')
            (time_low, time_mid, time_hi_version,
             clock_seq_hi_variant, clock_seq_low, node) = fields
            if not 0 <= time_low < 1<<32L:
                raise ValueError('field 1 out of range (need a 32-bit value)')
            if not 0 <= time_mid < 1<<16L:
                raise ValueError('field 2 out of range (need a 16-bit value)')
            if not 0 <= time_hi_version < 1<<16L:
                raise ValueError('field 3 out of range (need a 16-bit value)')
            if not 0 <= clock_seq_hi_variant < 1<<8L:
                raise ValueError('field 4 out of range (need an 8-bit value)')
            if not 0 <= clock_seq_low < 1<<8L:
                raise ValueError('field 5 out of range (need an 8-bit value)')
            if not 0 <= node < 1<<48L:
                raise ValueError('field 6 out of range (need a 48-bit value)')
            clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low
            int = ((time_low << 96L) | (time_mid << 80L) |
                   (time_hi_version << 64L) | (clock_seq << 48L) | node)
        if int is not None:
            if not 0 <= int < 1<<128L:
                raise ValueError('int is out of range (need a 128-bit value)')
        if version is not None:
            if not 1 <= version <= 5:
                raise ValueError('illegal version number')
            # Set the variant to RFC 4122.
            int &= ~(0xc000 << 48L)
            int |= 0x8000 << 48L
            # Set the version number.
            int &= ~(0xf000 << 64L)
            int |= version << 76L
        self.__dict__['int'] = int
    def __cmp__(self, other):
        if isinstance(other, UUID):
            return cmp(self.int, other.int)
        return NotImplemented
    def __hash__(self):
        return hash(self.int)
    def __int__(self):
        return self.int
    def __repr__(self):
        return 'UUID(%r)' % str(self)
    def __setattr__(self, name, value):
        raise TypeError('UUID objects are immutable')
    def __str__(self):
        hex = '%032x' % self.int
        return '%s-%s-%s-%s-%s' % (
            hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:])
    def get_bytes(self):
        bytes = ''
        for shift in range(0, 128, 8):
            bytes = chr((self.int >> shift) & 0xff) + bytes
        return bytes
    bytes = property(get_bytes)
    def get_bytes_le(self):
        bytes = self.bytes
        return (bytes[3] + bytes[2] + bytes[1] + bytes[0] +
                bytes[5] + bytes[4] + bytes[7] + bytes[6] + bytes[8:])
    bytes_le = property(get_bytes_le)
    def get_fields(self):
        return (self.time_low, self.time_mid, self.time_hi_version,
                self.clock_seq_hi_variant, self.clock_seq_low, self.node)
    fields = property(get_fields)
    def get_time_low(self):
        return self.int >> 96L
    time_low = property(get_time_low)
    def get_time_mid(self):
        return (self.int >> 80L) & 0xffff
    time_mid = property(get_time_mid)
    def get_time_hi_version(self):
        return (self.int >> 64L) & 0xffff
    time_hi_version = property(get_time_hi_version)
    def get_clock_seq_hi_variant(self):
        return (self.int >> 56L) & 0xff
    clock_seq_hi_variant = property(get_clock_seq_hi_variant)
    def get_clock_seq_low(self):
        return (self.int >> 48L) & 0xff
    clock_seq_low = property(get_clock_seq_low)
    def get_time(self):
        return (((self.time_hi_version & 0x0fffL) << 48L) |
                (self.time_mid << 32L) | self.time_low)
    time = property(get_time)
    def get_clock_seq(self):
        return (((self.clock_seq_hi_variant & 0x3fL) << 8L) |
                self.clock_seq_low)
    clock_seq = property(get_clock_seq)
    def get_node(self):
        return self.int & 0xffffffffffff
    node = property(get_node)
    def get_hex(self):
        return '%032x' % self.int
    hex = property(get_hex)
    def get_urn(self):
        return 'urn:uuid:' + str(self)
    urn = property(get_urn)
    def get_variant(self):
        if not self.int & (0x8000 << 48L):
            return RESERVED_NCS
        elif not self.int & (0x4000 << 48L):
            return RFC_4122
        elif not self.int & (0x2000 << 48L):
            return RESERVED_MICROSOFT
        else:
            return RESERVED_FUTURE
    variant = property(get_variant)
    def get_version(self):
        # The version bits are only meaningful for RFC 4122 UUIDs.
        if self.variant == RFC_4122:
            return int((self.int >> 76L) & 0xf)
    version = property(get_version)
def _find_mac(command, args, hw_identifiers, get_index):
    import os
    for dir in ['', '/sbin/', '/usr/sbin']:
        executable = os.path.join(dir, command)
        if not os.path.exists(executable):
            continue
        try:
            # LC_ALL to get English output, 2>/dev/null to
            # prevent output on stderr
            cmd = 'LC_ALL=C %s %s 2>/dev/null' % (executable, args)
            pipe = os.popen(cmd)
        except IOError:
            continue
        for line in pipe:
            words = line.lower().split()
            for i in range(len(words)):
                if words[i] in hw_identifiers:
                    return int(words[get_index(i)].replace(':', ''), 16)
    return None
def _ifconfig_getnode():
    """Get the hardware address on Unix by running ifconfig."""
    # This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
    for args in ('', '-a', '-av'):
        mac = _find_mac('ifconfig', args, ['hwaddr', 'ether'], lambda i: i+1)
        if mac:
            return mac
    import socket
    ip_addr = socket.gethostbyname(socket.gethostname())
    # Try getting the MAC addr from arp based on our IP address (Solaris).
    mac = _find_mac('arp', '-an', [ip_addr], lambda i: -1)
    if mac:
        return mac
    # This might work on HP-UX.
    mac = _find_mac('lanscan', '-ai', ['lan0'], lambda i: 0)
    if mac:
        return mac
    return None
def _ipconfig_getnode():
    """Get the hardware address on Windows by running ipconfig.exe."""
    import os, re
    dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
    try:
        import ctypes
        buffer = ctypes.create_string_buffer(300)
        ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300)
        dirs.insert(0, buffer.value.decode('mbcs'))
    except:
        pass
    for dir in dirs:
        try:
            pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all')
        except IOError:
            continue
        for line in pipe:
            value = line.split(':')[-1].strip().lower()
            if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
                return int(value.replace('-', ''), 16)
def _netbios_getnode():
    """Get the hardware address on Windows using NetBIOS calls.
    See http://support.microsoft.com/kb/118623 for details."""
    import win32wnet, netbios
    ncb = netbios.NCB()
    ncb.Command = netbios.NCBENUM
    ncb.Buffer = adapters = netbios.LANA_ENUM()
    adapters._pack()
    if win32wnet.Netbios(ncb) != 0:
        return
    adapters._unpack()
    for i in range(adapters.length):
        ncb.Reset()
        ncb.Command = netbios.NCBRESET
        ncb.Lana_num = ord(adapters.lana[i])
        if win32wnet.Netbios(ncb) != 0:
            continue
        ncb.Reset()
        ncb.Command = netbios.NCBASTAT
        ncb.Lana_num = ord(adapters.lana[i])
        ncb.Callname = '*'.ljust(16)
        ncb.Buffer = status = netbios.ADAPTER_STATUS()
        if win32wnet.Netbios(ncb) != 0:
            continue
        status._unpack()
        bytes = map(ord, status.adapter_address)
        return ((bytes[0]<<40L) + (bytes[1]<<32L) + (bytes[2]<<24L) +
                (bytes[3]<<16L) + (bytes[4]<<8L) + bytes[5])
# Thanks to Thomas Heller for ctypes and for his help with its use here.
# If ctypes is available, use it to find system routines for UUID generation.
_uuid_generate_random = _uuid_generate_time = _UuidCreate = None
try:
    import ctypes, ctypes.util
    _buffer = ctypes.create_string_buffer(16)
    # The uuid_generate_* routines are provided by libuuid on at least
    # Linux and FreeBSD, and provided by libc on Mac OS X.
    for libname in ['uuid', 'c']:
        try:
            lib = ctypes.CDLL(ctypes.util.find_library(libname))
        except:
            continue
        if hasattr(lib, 'uuid_generate_random'):
            _uuid_generate_random = lib.uuid_generate_random
        if hasattr(lib, 'uuid_generate_time'):
            _uuid_generate_time = lib.uuid_generate_time
    # On Windows prior to 2000, UuidCreate gives a UUID containing the
    # hardware address.  On Windows 2000 and later, UuidCreate makes a
    # random UUID and UuidCreateSequential gives a UUID containing the
    # hardware address.  These routines are provided by the RPC runtime.
    # NOTE:  at least on Tim's WinXP Pro SP2 desktop box, while the last
    # 6 bytes returned by UuidCreateSequential are fixed, they don't appear
    # to bear any relationship to the MAC address of any network device
    # on the box.
    try:
        lib = ctypes.windll.rpcrt4
    except:
        lib = None
    _UuidCreate = getattr(lib, 'UuidCreateSequential',
                          getattr(lib, 'UuidCreate', None))
except:
    pass
def _unixdll_getnode():
    """Get the hardware address on Unix using ctypes."""
    _uuid_generate_time(_buffer)
    return UUID(bytes=_buffer.raw).node
def _windll_getnode():
    """Get the hardware address on Windows using ctypes."""
    if _UuidCreate(_buffer) == 0:
        return UUID(bytes=_buffer.raw).node
def _random_getnode():
    """Get a random node ID, with eighth bit set as suggested by RFC 4122."""
    import random
    return random.randrange(0, 1<<48L) | 0x010000000000L
_node = None
def getnode():
    """Get the hardware address as a 48-bit positive integer.
    The first time this runs, it may launch a separate program, which could
    be quite slow.  If all attempts to obtain the hardware address fail, we
    choose a random 48-bit number with its eighth bit set to 1 as recommended
    in RFC 4122.
    """
    global _node
    if _node is not None:
        return _node
    import sys
    if sys.platform == 'win32':
        getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode]
    else:
        getters = [_unixdll_getnode, _ifconfig_getnode]
    for getter in getters + [_random_getnode]:
        try:
            _node = getter()
        except:
            continue
        if _node is not None:
            return _node
_last_timestamp = None
def uuid1(node=None, clock_seq=None):
    """Generate a UUID from a host ID, sequence number, and the current time.
    If 'node' is not given, getnode() is used to obtain the hardware
    address.  If 'clock_seq' is given, it is used as the sequence number;
    otherwise a random 14-bit sequence number is chosen."""
    # When the system provides a version-1 UUID generator, use it (but don't
    # use UuidCreate here because its UUIDs don't conform to RFC 4122).
    if _uuid_generate_time and node is clock_seq is None:
        _uuid_generate_time(_buffer)
        return UUID(bytes=_buffer.raw)
    global _last_timestamp
    import time
    nanoseconds = int(time.time() * 1e9)
    # 0x01b21dd213814000 is the number of 100-ns intervals between the
    # UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
    timestamp = int(nanoseconds/100) + 0x01b21dd213814000L
    if timestamp <= _last_timestamp:
        timestamp = _last_timestamp + 1
    _last_timestamp = timestamp
    if clock_seq is None:
        import random
        clock_seq = random.randrange(1<<14L) # instead of stable storage
    time_low = timestamp & 0xffffffffL
    time_mid = (timestamp >> 32L) & 0xffffL
    time_hi_version = (timestamp >> 48L) & 0x0fffL
    clock_seq_low = clock_seq & 0xffL
    clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL
    if node is None:
        node = getnode()
    return UUID(fields=(time_low, time_mid, time_hi_version,
                        clock_seq_hi_variant, clock_seq_low, node), version=1)
def uuid3(namespace, name):
    """Generate a UUID from the MD5 hash of a namespace UUID and a name."""
    try:
        from hashlib import md5
    except ImportError:
        from md5 import md5
    hash = md5(namespace.bytes + name).digest()
    return UUID(bytes=hash[:16], version=3)
def uuid4():
    """Generate a random UUID."""
    # When the system provides a version-4 UUID generator, use it.
    if _uuid_generate_random:
        _uuid_generate_random(_buffer)
        return UUID(bytes=_buffer.raw)
    # Otherwise, get randomness from urandom or the 'random' module.
    try:
        import os
        return UUID(bytes=os.urandom(16), version=4)
    except:
        import random
        bytes = [chr(random.randrange(256)) for i in range(16)]
        return UUID(bytes=bytes, version=4)
def uuid5(namespace, name):
    """Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
    try:
        from hashlib import sha1 as sha
    except ImportError:
        from sha import sha
    hash = sha(namespace.bytes + name).digest()
    return UUID(bytes=hash[:16], version=5)
# The following standard UUIDs are for use with uuid3() or uuid5().
NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')
 | 
	apache-2.0 | -3,320,657,386,343,334,000 | 36.149909 | 79 | 0.601447 | false | 
| 
	paolodedios/tensorflow | 
	tensorflow/python/data/kernel_tests/get_single_element_test.py | 
	6 | 
	4654 | 
	# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import function
from tensorflow.python.framework import combinations
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class GetSingleElementTest(test_base.DatasetTestBase, parameterized.TestCase):
  @combinations.generate(
      combinations.times(
          test_base.default_test_combinations(),
          combinations.combine(
              skip=[0, 5, 10], take=[1], error=[None], error_msg=[None]) +
          combinations.combine(
              skip=[100],
              take=[1],
              error=[errors.InvalidArgumentError],
              error_msg=["Dataset was empty."]) + combinations.combine(
                  skip=[0],
                  take=[2],
                  error=[errors.InvalidArgumentError],
                  error_msg=["Dataset had more than one element."])))
  def testGetSingleElement(self, skip, take, error=None, error_msg=None):
    def make_sparse(x):
      x_1d = array_ops.reshape(x, [1])
      x_2d = array_ops.reshape(x, [1, 1])
      return sparse_tensor.SparseTensor(x_2d, x_1d, x_1d)
    dataset = dataset_ops.Dataset.range(100).skip(skip).map(
        lambda x: (x * x, make_sparse(x))).take(take)
    if error is None:
      dense_val, sparse_val = self.evaluate(dataset.get_single_element())
      self.assertEqual(skip * skip, dense_val)
      self.assertAllEqual([[skip]], sparse_val.indices)
      self.assertAllEqual([skip], sparse_val.values)
      self.assertAllEqual([skip], sparse_val.dense_shape)
    else:
      with self.assertRaisesRegex(error, error_msg):
        self.evaluate(dataset.get_single_element())
  @combinations.generate(test_base.default_test_combinations())
  def testWindow(self):
    """Test that `get_single_element()` can consume a nested dataset."""
    def flat_map_func(ds):
      batched = ds.batch(2)
      element = batched.get_single_element()
      return dataset_ops.Dataset.from_tensors(element)
    dataset = dataset_ops.Dataset.range(10).window(2).flat_map(flat_map_func)
    self.assertDatasetProduces(dataset,
                               [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]])
  @combinations.generate(test_base.default_test_combinations())
  def testSideEffect(self):
    counter_var = variables.Variable(0)
    def increment_fn(x):
      counter_var.assign_add(1)
      return x
    def dataset_fn():
      return dataset_ops.Dataset.range(1).map(increment_fn)
    @function.defun
    def fn():
      _ = dataset_fn().get_single_element()
      return "hello"
    self.evaluate(counter_var.initializer)
    self.assertEqual(self.evaluate(fn()), b"hello")
    self.assertEqual(self.evaluate(counter_var), 1)
  @combinations.generate(test_base.default_test_combinations())
  def testAutomaticControlDependencies(self):
    counter_var = variables.Variable(1)
    def increment_fn(x):
      counter_var.assign(counter_var + 1)
      return x
    def multiply_fn(x):
      counter_var.assign(counter_var * 2)
      return x
    def dataset1_fn():
      return dataset_ops.Dataset.range(1).map(increment_fn)
    def dataset2_fn():
      return dataset_ops.Dataset.range(1).map(multiply_fn)
    @function.defun
    def fn():
      _ = dataset1_fn().get_single_element()
      _ = dataset2_fn().get_single_element()
      return "hello"
    self.evaluate(counter_var.initializer)
    self.assertEqual(self.evaluate(fn()), b"hello")
    self.assertEqual(self.evaluate(counter_var), 4)
if __name__ == "__main__":
  test.main()
 | 
	apache-2.0 | -3,730,049,455,417,787,000 | 34.526718 | 80 | 0.663086 | false | 
| 
	timj/scons | 
	test/MSVS/vs-7.0-files.py | 
	5 | 
	3388 | 
	#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test that we can generate Visual Studio 7.0 project (.vcproj) and
solution (.sln) files that look correct.
"""
import os
import TestSConsMSVS
test = TestSConsMSVS.TestSConsMSVS()
host_arch = test.get_vs_host_arch()
# Make the test infrastructure think we have this version of MSVS installed.
test._msvs_versions = ['7.0']
expected_slnfile = TestSConsMSVS.expected_slnfile_7_0
expected_vcprojfile = TestSConsMSVS.expected_vcprojfile_7_0
SConscript_contents = TestSConsMSVS.SConscript_contents_7_0
test.write('SConstruct', SConscript_contents%{'HOST_ARCH': host_arch})
test.run(arguments="Test.vcproj")
test.must_exist(test.workpath('Test.vcproj'))
vcproj = test.read('Test.vcproj', 'r')
expect = test.msvs_substitute(expected_vcprojfile, '7.0', None, 'SConstruct')
# don't compare the pickled data
assert vcproj[:len(expect)] == expect, test.diff_substr(expect, vcproj)
test.must_exist(test.workpath('Test.sln'))
sln = test.read('Test.sln', 'r')
expect = test.msvs_substitute(expected_slnfile, '7.0', None, 'SConstruct')
# don't compare the pickled data
assert sln[:len(expect)] == expect, test.diff_substr(expect, sln)
test.run(arguments='-c .')
test.must_not_exist(test.workpath('Test.vcproj'))
test.must_not_exist(test.workpath('Test.sln'))
test.run(arguments='Test.vcproj')
test.must_exist(test.workpath('Test.vcproj'))
test.must_exist(test.workpath('Test.sln'))
test.run(arguments='-c Test.sln')
test.must_not_exist(test.workpath('Test.vcproj'))
test.must_not_exist(test.workpath('Test.sln'))
# Test that running SCons with $PYTHON_ROOT in the environment
# changes the .vcproj output as expected.
os.environ['PYTHON_ROOT'] = 'xyzzy'
python = os.path.join('$(PYTHON_ROOT)', os.path.split(TestSConsMSVS.python)[1])
test.run(arguments='Test.vcproj')
test.must_exist(test.workpath('Test.vcproj'))
vcproj = test.read('Test.vcproj', 'r')
expect = test.msvs_substitute(expected_vcprojfile, '7.0', None, 'SConstruct',
                              python=python)
# don't compare the pickled data
assert vcproj[:len(expect)] == expect, test.diff_substr(expect, vcproj)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
 | 
	mit | -6,432,081,839,195,521,000 | 30.962264 | 79 | 0.735832 | false | 
| 
	QuakeMan/3123 | 
	qa/rpc-tests/getblocktemplate.py | 
	8 | 
	3715 | 
	#!/usr/bin/env python
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
def check_array_result(object_array, to_match, expected):
    """
    Pass in array of JSON objects, a dictionary with key/value pairs
    to match against, and another dictionary with expected key/value
    pairs.
    """
    num_matched = 0
    for item in object_array:
        all_match = True
        for key,value in to_match.items():
            if item[key] != value:
                all_match = False
        if not all_match:
            continue
        for key,value in expected.items():
            if item[key] != value:
                raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
            num_matched = num_matched+1
    if num_matched == 0:
        raise AssertionError("No objects matched %s"%(str(to_match)))
import threading
class LongpollThread(threading.Thread):
    def __init__(self, node):
        threading.Thread.__init__(self)
        # query current longpollid
        templat = node.getblocktemplate()
        self.longpollid = templat['longpollid']
        # create a new connection to the node, we can't use the same
        # connection from two threads
        self.node = AuthServiceProxy(node.url, timeout=600)
    def run(self):
        self.node.getblocktemplate({'longpollid':self.longpollid})
class GetBlockTemplateTest(BitcoinTestFramework):
    '''
    Test longpolling with getblocktemplate.
    '''
    def run_test(self):
        print "Warning: this test will take about 70 seconds in the best case. Be patient."
        self.nodes[0].setgenerate(True, 10)
        templat = self.nodes[0].getblocktemplate()
        longpollid = templat['longpollid']
        # longpollid should not change between successive invocations if nothing else happens
        templat2 = self.nodes[0].getblocktemplate()
        assert(templat2['longpollid'] == longpollid)
        # Test 1: test that the longpolling wait if we do nothing
        thr = LongpollThread(self.nodes[0])
        thr.start()
        # check that thread still lives
        thr.join(5)  # wait 5 seconds or until thread exits
        assert(thr.is_alive())
        # Test 2: test that longpoll will terminate if another node generates a block
        self.nodes[1].setgenerate(True, 1)  # generate a block on another node
        # check that thread will exit now that new transaction entered mempool
        thr.join(5)  # wait 5 seconds or until thread exits
        assert(not thr.is_alive())
        # Test 3: test that longpoll will terminate if we generate a block ourselves
        thr = LongpollThread(self.nodes[0])
        thr.start()
        self.nodes[0].setgenerate(True, 1)  # generate a block on another node
        thr.join(5)  # wait 5 seconds or until thread exits
        assert(not thr.is_alive())
        # Test 4: test that introducing a new transaction into the mempool will terminate the longpoll
        thr = LongpollThread(self.nodes[0])
        thr.start()
        # generate a random transaction and submit it
        (txid, txhex, fee) = random_transaction(self.nodes, Decimal("1.1"), Decimal("0.0"), Decimal("0.001"), 20)
        # after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned
        thr.join(60 + 20)
        assert(not thr.is_alive())
if __name__ == '__main__':
    GetBlockTemplateTest().main()
 | 
	mit | -3,679,790,004,888,795,000 | 38.521277 | 113 | 0.653836 | false | 
| 
	henzk/ape | 
	ape/installtools/venv.py | 
	1 | 
	2084 | 
	from subprocess import check_call
import glob
import os
import sys
class VirtualEnv(object):
    def __init__(self, venv_dir):
        self.venv_dir = venv_dir
        self.bin_dir = os.path.join(venv_dir, 'Scripts' if os.name == 'nt' else 'bin')
    def call_bin(self, script_name, args):
        check_call([os.path.join(self.bin_dir, script_name)] + list(args))
    def pip_install(self, repo_url):
        self.call_bin('pip', ['install', '-e', 'git+%s' % repo_url])
    def pip_install_requirements(self, file_path):
        file_path = os.path.join(os.environ['CONTAINER_DIR'], file_path)
        self.call_bin('pip', ['install', '-r', file_path])
    def get_paths(self):
        '''
        get list of module paths
        '''
        # guess site package dir of virtualenv (system dependent)
        venv_site_packages = '%s/lib/site-packages' % self.venv_dir
        if not os.path.isdir(venv_site_packages):
            venv_site_packages_glob = glob.glob('%s/lib/*/site-packages' % self.venv_dir)
            if len(venv_site_packages_glob):
                venv_site_packages = venv_site_packages_glob[0]
        return [
            self.venv_dir,
            venv_site_packages
        ]
    def pip(self, *args):
        self.call_bin('pip', list(args))
    def python(self, *args):
        self.call_bin('python', args)
    def python_oneliner(self, snippet):
        self.python('-c', snippet)
    @staticmethod
    def create_virtualenv(venv_dir, use_venv_module=True):
        """
        creates a new virtualenv in venv_dir
        By default, the built-in venv module is used.
        On older versions of python, you may set use_venv_module to False to use virtualenv
        """
        if not use_venv_module:
            try:
                check_call(['virtualenv', venv_dir, '--no-site-packages'])
            except OSError:
                raise Exception('You probably dont have virtualenv installed: sudo apt-get install python-virtualenv')
        else:
            check_call([sys.executable or 'python', '-m', 'venv', venv_dir])
 | 
	mit | -2,250,576,364,328,804,600 | 30.575758 | 118 | 0.589251 | false | 
| 
	HighwayThree/ckanext-bcgov | 
	ckanext/bcgov/logic/auth/create.py | 
	3 | 
	1678 | 
	import ckan.model as model
from ckan.common import  c
import ckan.logic as logic
import ckan.authz as authz
import ckan.logic.auth as logic_auth
from ckan.logic.auth.create import _check_group_auth
from ckan.common import _
import pprint
@logic.auth_allow_anonymous_access
def package_create(context, data_dict=None):
    user = context['user']
    user_object = context.get('auth_user_obj')
    #Sysadmin user has all the previliges 
    if user_object and user_object.sysadmin :
        {'success': True}
    #Do not authorize anonymous users
    if authz.auth_is_anon_user(context):
        return {'success': False, 'msg': _('User %s not authorized to create packages') % user}
    
    #Check if the user has the editor or admin role in some org/suborg
    check1 = all(authz.check_config_permission(p) for p in (
        'create_dataset_if_not_in_organization',
        'create_unowned_dataset',
        )) or authz.has_user_permission_for_some_org(
        user, 'create_dataset')
    if not check1:
        return {'success': False, 'msg': _('User %s not authorized to create packages') % user}
    check2 = _check_group_auth(context,data_dict)
    if not check2:
        return {'success': False, 'msg': _('User %s not authorized to edit these groups') % user}
    # If an organization is given are we able to add a dataset to it?
    data_dict = data_dict or {}
    org_id = data_dict.get('owner_org')
    if org_id and not authz.has_user_permission_for_group_or_org(
            org_id, user, 'create_dataset'):
        return {'success': False, 'msg': _('User %s not authorized to add dataset to this organization') % user}
    return {'success': True}
 | 
	agpl-3.0 | 5,681,177,048,385,416,000 | 32.56 | 112 | 0.665077 | false | 
| 
	SarathM1/modbus | 
	setup.py | 
	1 | 
	2254 | 
	#!/usr/bin/env python
# Do not import non-standard modules here, as it will mess up the installation in clients.
import re
from distutils.core import setup
with open('README.txt') as readmefile:
    long_description = readmefile.read()
# Read version number etc from other file
# http://stackoverflow.com/questions/2058802/how-can-i-get-the-version-defined-in-setup-py-setuptools-in-my-package
with open('minimalmodbus.py') as mainfile:
    main_py = mainfile.read()
metadata = dict( re.findall(r"__([a-z]+)__ *= *'([^']+)'", main_py) )
setup(name       = 'MinimalModbus',
    version      = metadata['version'],
    license      = metadata['license'],
    author       = metadata['author'],
    author_email = metadata['email'],
    url          = metadata['url'],
    keywords     = 'modbus serial RTU ASCII',
    description  = 'Easy-to-use Modbus RTU and Modbus ASCII implementation for Python',
    long_description = long_description,
    py_modules = ['minimalmodbus', 'eurotherm3500', 'omegacn7500', 'dummy_serial'],
    install_requires = ['pyserial'],
    classifiers = [ 
        'Intended Audience :: Developers',
        'Intended Audience :: Information Technology',
        'Intended Audience :: Science/Research',
        'Intended Audience :: Manufacturing',
        'License :: OSI Approved :: Apache Software License',
        'Natural Language :: English',
        'Operating System :: OS Independent',
        'Operating System :: POSIX',
        'Operating System :: Microsoft :: Windows',
        'Programming Language :: Python',
        'Programming Language :: Python :: 2.7', 
        'Programming Language :: Python :: 3', 
        'Programming Language :: Python :: 3.2', 
        'Topic :: Communications',
        'Topic :: Home Automation',
        'Topic :: Scientific/Engineering',
        'Topic :: Software Development :: Libraries',
        'Topic :: Software Development :: Libraries :: Python Modules',
        'Topic :: System :: Hardware :: Hardware Drivers',
        'Topic :: Terminals :: Serial',
        ],
    )
# See PEP396 how to derive the version number from the source file: http://www.python.org/dev/peps/pep-0396/#deriving
# Note that additional files for inclusion are defined in MANIFEST.in
 | 
	apache-2.0 | 5,582,982,210,738,911,000 | 39.981818 | 117 | 0.638421 | false | 
| 
	Motsai/neblina-python | 
	neblina.py | 
	1 | 
	11677 | 
	#!/usr/bin/env python
###################################################################################
# @package neblina
# Copyright (c)     2010-2016   Motsai
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
###################################################################################
class BitMask:
    """
        BitMask pattern for packet header decoding
    """
    SubSystem = 0x1F
    PacketType = 0xE0
###################################################################################
class BitPosition:
    """
        Bit position for packet header decoding
    """
    PacketType = 5
###################################################################################
class Interface:
    """
        Neblina communication interface
    """
    BLE = 0x00      # Bluetooth Smart (Low Energy)
    UART = 0x01     # Serial
###################################################################################
class PacketType:
    """
        Neblina packet type
    """
    RegularResponse = 0x00
    Ack = 0x01
    Command = 0x02
    ErrorLogResp = 0x04
    ErrorLogCmd = 0x06
    class String:
        RegularResponse = "RegularResponse"
        Ack = "Acknowledge"
        Command = "Command"
        ErrorLogResp = "Error Response"
        ErrorLogCmd = "Error Command"
###################################################################################
class SubSystem:
    """
        Neblina subsystem
    """
    Debug = 0x00
    Motion = 0x01
    Power = 0x02
    DigitalIO = 0x03
    LED = 0x04
    ADC = 0x05
    DAC = 0x06
    I2C = 0x07
    SPI = 0x08
    Firmware = 0x09
    Crypto = 0x0A
    Storage = 0x0B
    EEPROM = 0x0C
###################################################################################
class Erase:
    """
        Neblina storage erase type
    """
    Quick = 0x00
    Mass = 0x01
###################################################################################
class Commands:
    """
        Neblina commands for various subsystem
    """
    class Debug:
        """
            Neblina debug commands
        """
        SetInterface = 0x01
        MotAndFlashRecState = 0x02
        StartUnitTestMotion = 0x03
        UnitTestMotionData = 0x04
        FWVersions = 0x05
        InterfaceState = 0x09
    class Power:
        """
            Neblina power management commands
        """
        GetBatteryLevel = 0x00
        GetTemperature = 0x01
    class Motion:
        """
            Neblina motion engin commands
        """
        Downsample = 0x01  # Downsampling factor definition
        MotionState = 0x02  # streaming Motion State
        IMU = 0x03  # streaming the 6-axis IMU data
        Quaternion = 0x04  # streaming the quaternion data
        EulerAngle = 0x05  # streaming the Euler angles
        ExtForce = 0x06  # streaming the external force
        SetFusionType = 0x07  # setting the Fusion type to either 6-axis or 9-axis
        TrajectoryRecStartStop = 0x08  # start recording orientation trajectory
        TrajectoryInfo = 0x09  # calculating the distance from a pre-recorded orientation trajectory
        Pedometer = 0x0A  # streaming pedometer data
        MAG = 0x0B  # streaming magnetometer data
        SittingStanding = 0x0C  # streaming sitting standing
        AccRange = 0x0E  # set accelerometer range
        DisableStreaming = 0x0F  # disable everything that is currently being streamed
        ResetTimeStamp = 0x10  # Reset timestamp
        FingerGesture = 0x11  # Finger Gesture command
        RotationInfo = 0x12  # Rotation info in roll: number of rotations and speed in rpm
        MotionCount = 0x13  # Keep last with next value
    class Storage:
        """
            Neblina storage commands
        """
        EraseAll = 0x01  # Full-erase for the on-chip NOR flash memory
        Record = 0x02  # Either start a new recording session, or close the currently open one
        Playback = 0x03  # Open a previously recorded session for playback or close currently opened and being played
        NumSessions = 0x04  # Get Number of sessions currently on the flash storage
        SessionInfo = 0x05  # Get information associated with a particular session
    class EEPROM:
        """
            Neblina EEPROM commands
        """
        Read = 0x01  # Read a page
        Write = 0x02  # Write to a page
    class DigitalIO:
        """
            Neblina Digital IO (DEPRECATED)
        """
        SetConfig = 0x01
        GetConfig = 0x02
        SetValue = 0x03
        GetValue = 0x04
        NotifySet = 0x05
        NotifyEvent = 0x06
    class Firmware:
        """
            Neblina firmware commands (DEPRECATED)
        """
        Main = 0x01
        BLE = 0x02
    class LED:
        """
            Neblina LED commands
        """
        SetVal = 0x01
        GetVal = 0x02
        Config = 0x03
    class BLE:
        """
            Neblina BLE commands (DEPRECATED)
        """
        Receive = 0x01
###################################################################################
CommandStrings = {
    (SubSystem.Debug, Commands.Debug.SetInterface): 'Set Interface',
    (SubSystem.Debug, Commands.Debug.MotAndFlashRecState): 'Check Motion and Flash Recorder States',
    (SubSystem.Debug, Commands.Debug.StartUnitTestMotion): 'Enable/Disable Unit Test Motion',
    (SubSystem.Debug, Commands.Debug.UnitTestMotionData): 'Unit Test Data',
    (SubSystem.Debug, Commands.Debug.FWVersions): 'Firmware Versions',
    (SubSystem.Motion, Commands.Motion.Downsample): 'Downsample',
    (SubSystem.Motion, Commands.Motion.MotionState): 'MotionState',
    (SubSystem.Motion, Commands.Motion.IMU): 'IMU Data',
    (SubSystem.Motion, Commands.Motion.Quaternion): 'Quaternion',
    (SubSystem.Motion, Commands.Motion.EulerAngle): 'Euler Angle',
    (SubSystem.Motion, Commands.Motion.ExtForce): 'ExtForce',
    (SubSystem.Motion, Commands.Motion.SetFusionType): 'SetFusionType',
    (SubSystem.Motion, Commands.Motion.TrajectoryRecStartStop): 'Trajectory Record Start',
    (SubSystem.Motion, Commands.Motion.TrajectoryInfo): 'Trajectory Distance',
    (SubSystem.Motion, Commands.Motion.Pedometer): 'Pedometer',
    (SubSystem.Motion, Commands.Motion.MAG): 'MAG Data',
    (SubSystem.Motion, Commands.Motion.SittingStanding): 'Sitting-Standing',
    (SubSystem.Motion, Commands.Motion.AccRange): 'AccRange',
    (SubSystem.Motion, Commands.Motion.DisableStreaming): 'Disable Streaming',
    (SubSystem.Motion, Commands.Motion.ResetTimeStamp): 'Reset Timestamp',
    (SubSystem.Motion, Commands.Motion.FingerGesture): 'Finger Gesture',
    (SubSystem.Motion, Commands.Motion.RotationInfo): 'Rotation Info',
    (SubSystem.Power, Commands.Power.GetBatteryLevel): 'Battery Level',
    (SubSystem.Power, Commands.Power.GetTemperature): 'Board Temperature',
    (SubSystem.DigitalIO, Commands.DigitalIO.SetConfig): 'Set Config',
    (SubSystem.DigitalIO, Commands.DigitalIO.GetConfig): 'Get Config',
    (SubSystem.DigitalIO, Commands.DigitalIO.SetValue): 'Set Value',
    (SubSystem.DigitalIO, Commands.DigitalIO.GetValue): 'Get Value',
    (SubSystem.DigitalIO, Commands.DigitalIO.NotifySet): 'Notify Set',
    (SubSystem.DigitalIO, Commands.DigitalIO.NotifyEvent): 'Notify Event',
    (SubSystem.LED, Commands.LED.SetVal): 'LED Set Value',
    (SubSystem.LED, Commands.LED.GetVal): 'LED Read Value',
    (SubSystem.LED, Commands.LED.Config): 'LED Config',
    (SubSystem.ADC, 0): 'ADC Command',
    (SubSystem.DAC, 0): 'DAC Command',
    (SubSystem.I2C, 0): 'I2C Command',
    (SubSystem.SPI, 0): 'SPI Command',
    (SubSystem.Firmware, Commands.Firmware.Main): 'Main Firmware',
    (SubSystem.Firmware, Commands.Firmware.BLE): 'Nordic Firmware',
    (SubSystem.Crypto, 0): 'Crypto Command',
    (SubSystem.Storage, Commands.Storage.EraseAll): 'Erase All',
    (SubSystem.Storage, Commands.Storage.Record): 'Record',
    (SubSystem.Storage, Commands.Storage.Playback): 'Playback',
    (SubSystem.Storage, Commands.Storage.NumSessions): 'Num Sessions',
    (SubSystem.Storage, Commands.Storage.SessionInfo): 'Session ID',
    (SubSystem.EEPROM, Commands.EEPROM.Read): 'Read',
    (SubSystem.EEPROM, Commands.EEPROM.Write): 'Write',
}
###################################################################################
# Dictionary containing the string descriptors of each command
###################################################################################
class Formatting:
    class Data:
        Blank = "16s"  # Blank 16 bytes
        MotionAndFlash = "<I 4s B 7s"  # Timestamp (unused for now), downsample factor
        EEPROMRead = "<H 8s 6s"  # Page number, 8 bytes Read Data
        LEDGetVal = "<B B B B B B B B"  # 8 LEDs values
        BatteryLevel = "<I H 10s"  # Battery Level (%)
        Temperature = "<I h 10s"  # Temperature x100 in Celsius
        FlashNumSessions = "<I H 10s"  # Reserved, number of sessions
        FWVersions = "<B 3B 3B 8s"  # API Release, MCU Major/Minor/Build, BLE Major/Minor/Build, Device ID
        UnitTestMotion = "<B 3h 3h 3h 4h 3h 3h 3h H B I I h B I I"
        MotionState = "<I B 11s"  # Timestamp, start/stop
        ExternalForce = "<I 3h 6s"  # Timestamp, External force xyz
        TrajectoryDistance = "<I 3h H B 3s"  # Timestamp, Euler angle errors, repeat count, completion percentage
        Pedometer = "<I H B h 7s"  # Timestamp, stepCount, stepsPerMinute, walking direction
        FingerGesture = "<I B 11s"  # Timestamp, rotationCount
        RotationInfo = "<I I H 6s"  # Timestamp, rotationCount, rpm speed
        Quaternion = "<I 4h 4s"  # Timestamp, quaternion
        IMU = "<I 3h 3h"  # Timestamp, accelerometer(xyz), gyroscope(xyz)
        MAG = "<I 3h 3h"  # Timestamp, magnetometer(xyz), accelerometer(xyz)
        Euler = "<I 4h 4s"  # Timestamp, Euler angle (yaw,pitch,roll,heading)
    class CommandData:
        Header = "<4B"
        Command = "<I B 11s"  # Timestamp (unused for now), enable/disable, garbage
        FlashSession = "<I B H 9s"  # Timestamp, open/close, session ID
        FlashSessionInfo = "<I H 10s"  # Timestamp, session ID
        UnitTestMotion = "<I 3h 3h 3h"  # Timestamp, accel, gyro, mag
        AccRange = "<I H 10s"  # Timestamp (unused for now), downsample factor
        Downsample = "<I H 10s"  # Timestamp (unused for now), downsample factor
        GetLED = "<B {0}s {1}s"  # Number of LEDs, LED Index x LEDs, LED Value x LEDs
        SetLED = "<B {0}s {1}s"  # Number of LEDs, LED Index x LEDs, LED Value x LEDs
        EEPROM = "<H 8s 6s"  # Page number, 8 bytes R/W Data
        SetDataPortState = "<B B"  # Port ID, Open/Close
 | 
	mit | 489,607,470,711,761,700 | 38.053512 | 117 | 0.602723 | false | 
| 
	sbkro/alc | 
	doc/conf.py | 
	1 | 
	8162 | 
	# -*- coding: utf-8 -*-
#
# alc documentation build configuration file, created by
# sphinx-quickstart on Sun Jul 27 18:28:17 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', 'sphinxcontrib.programoutput']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'alc'
copyright = u'2014, sbkro'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages.  See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further.  For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.  If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar.  Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it.  The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'alcdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
#  author, documentclass [howto, manual, or own class]).
latex_documents = [
  ('index', 'alc.tex', u'alc Documentation',
   u'sbkro', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
    ('index', 'alc', u'alc Documentation',
     [u'sbkro'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
#  dir menu entry, description, category)
texinfo_documents = [
  ('index', 'alc', u'alc Documentation',
   u'sbkro', 'alc', 'One line description of project.',
   'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
 | 
	bsd-3-clause | -3,891,476,031,534,156,000 | 30.635659 | 79 | 0.706199 | false | 
| 
	indico/indico-migrate | 
	indico_migrate/steps/events/participants.py | 
	1 | 
	12515 | 
	# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from itertools import chain
from uuid import uuid4
from werkzeug.utils import cached_property
from indico.core.db import db
from indico.modules.events.features.util import set_feature_enabled
from indico.modules.events.payment import payment_settings
from indico.modules.events.registration.models.form_fields import RegistrationFormField
from indico.modules.events.registration.models.forms import ModificationMode, RegistrationForm
from indico.modules.events.registration.models.items import PersonalDataType, RegistrationFormSection
from indico.modules.events.registration.models.registrations import Registration, RegistrationData, RegistrationState
from indico.modules.events.registration.util import create_personal_data_fields
from indico.util.date_time import now_utc
from indico.util.string import normalize_phone_number
from indico_migrate.steps.events import EventMigrationStep
from indico_migrate.util import convert_to_unicode
PARTICIPATION_FORM_TITLE = 'Participants'
PARTICIPANT_ATTR_MAP = {
    PersonalDataType.affiliation: '_affiliation',
    PersonalDataType.address: '_address',
    PersonalDataType.phone: '_telephone'
}
PARTICIPANT_STATUS_MAP = {
    'declined': RegistrationState.rejected,
    'refused': RegistrationState.withdrawn,
    'rejected': RegistrationState.withdrawn,
    'pending': RegistrationState.pending
}
class EventParticipantsImporter(EventMigrationStep):
    step_id = 'part'
    def migrate(self):
        self.regform = None
        self.emails = set()
        self.users = set()
        self.pd_field_map = {}
        self.status_field = None
        self.status_map = {}
        self.title_map = {}
        self.past_event = self.event.end_dt < now_utc()
        self.migrate_regforms()
    def migrate_regforms(self):
        try:
            self.old_participation = self.conf._participation
        except AttributeError:
            self.print_info('Event has no participation')
            return
        if not self.old_participation._participantList and not self.old_participation._pendingParticipantList:
            self.print_info('Participant lists are empty')
            return
        set_feature_enabled(self.event, 'registration', True)
        with db.session.no_autoflush:
            self.regform = RegistrationForm(event_id=self.event.id, title=PARTICIPATION_FORM_TITLE,
                                            is_participation=True, currency=payment_settings.get('currency'))
            if not self.quiet:
                self.print_success('%[cyan]{}'.format(self.regform.title))
            self._migrate_settings()
            self._create_form()
            self._migrate_participants()
        db.session.add(self.regform)
        db.session.flush()
    def teardown(self):
        # add all managers as registration notification recipients
        db.session.execute(db.text("""
            UPDATE event_registration.forms rf SET manager_notification_recipients = (
                SELECT array_agg(ue.email)
                FROM events.principals p
                JOIN users.emails ue ON (ue.user_id = p.user_id AND NOT ue.is_user_deleted AND ue.is_primary)
                WHERE p.event_id = rf.event_id AND p.full_access AND p.type = 1
            )
            WHERE manager_notification_recipients = '{}' AND manager_notifications_enabled AND title = :title;
        """).bindparams(title=PARTICIPATION_FORM_TITLE))
        db.session.commit()
    def iter_participants(self):
        return chain(self.old_participation._participantList.itervalues(),
                     self.old_participation._pendingParticipantList.itervalues(),
                     getattr(self.old_participation, '_declinedParticipantList', {}).itervalues())
    @cached_property
    def status_used(self):
        default_statuses = {'added', 'pending'}
        return any(p._status not in default_statuses for p in self.iter_participants())
    def _migrate_settings(self):
        old_part = self.old_participation
        if old_part._allowedForApplying:
            self.regform.start_dt = self.event.created_dt
            self.regform.end_dt = self.event.end_dt
        self.regform.moderation_enabled = not getattr(old_part, '_autoAccept', False)
        self.regform.publish_registrations_enabled = old_part._displayParticipantList
        self.regform.registration_limit = max(0, int(getattr(old_part, '_numMaxParticipants', 0))) or None
        self.regform.manager_notifications_enabled = getattr(old_part, '_notifyMgrNewParticipant', False)
        self.regform.modification_mode = ModificationMode.not_allowed
        # manager emails are migrated afterwards
    def _create_form(self):
        create_personal_data_fields(self.regform)
        for item in self.regform.form_items:
            if not item.is_field:
                item.position = 1  # pd section
                continue
            # we have nothing but personal data fields right now. no need for extra checks!
            if item.personal_data_type != PersonalDataType.country:
                self.pd_field_map[item.personal_data_type] = item
            if item.personal_data_type == PersonalDataType.title:
                self.title_map = {v: k for k, v in item.data['captions'].iteritems()}
        # create administrative section for statuses
        if self.status_used:
            section = RegistrationFormSection(registration_form=self.regform, is_manager_only=True, title='Status',
                                              position=2)
            if self.status_used:
                choices = []
                for status in ('refused', 'excused', 'invited', 'accepted', 'rejected', 'declined'):
                    uuid = unicode(uuid4())
                    caption = status.title()
                    choices.append({'price': 0, 'is_billable': False, 'places_limit': 0, 'is_enabled': True,
                                    'caption': caption, 'id': uuid})
                    self.status_map[status] = {'uuid': uuid, 'caption': caption}
                field_data = {
                    'item_type': 'dropdown',
                    'with_extra_slots': False,
                    'default_item': None,
                    'choices': choices
                }
                self.status_field = field = RegistrationFormField(registration_form=self.regform, parent=section,
                                                                  input_type='single_choice', title='Status')
                field.data, field.versioned_data = field.field_impl.process_field_data(field_data)
    def _migrate_participants(self):
        offset = self.event_ns.misc_data.get('last_registrant_friendly_id', 0)
        for n, old_part in enumerate(self.iter_participants(), offset + 1):
            registration = self._migrate_participant(old_part)
            registration.friendly_id = n
            self.regform.registrations.append(registration)
        db.session.flush()
    def _migrate_participant(self, old_part):
        state = PARTICIPANT_STATUS_MAP.get(old_part._status, RegistrationState.complete)
        registration = Registration(first_name=convert_to_unicode(old_part._firstName),
                                    last_name=convert_to_unicode(old_part._familyName),
                                    email=self._fix_email(old_part._email),
                                    submitted_dt=self.event.created_dt,
                                    base_price=0, price_adjustment=0,
                                    checked_in=old_part._present, state=state,
                                    currency=payment_settings.get('currency'))
        self.print_info('%[yellow]Registration%[reset] - %[cyan]{}%[reset] [{}]'
                        .format(registration.full_name, state.title))
        self._migrate_participant_user(old_part, registration)
        self._migrate_participant_data(old_part, registration)
        self._migrate_participant_status(old_part, registration)
        return registration
    def _fix_email(self, email):
        email = convert_to_unicode(email).lower() or '[email protected]'
        no_email = email == '[email protected]'
        try:
            user, host = email.split('@', 1)
        except ValueError:
            self.print_warning('Garbage email %[red]{0}%[reset]; using %[green]{0}@example.com%[reset] instead'
                               .format(email))
            user = email
            host = 'example.com'
            email += '@example.com'
        n = 1
        while email in self.emails:
            email = '{}+{}@{}'.format(user, n, host)
            n += 1
        if n != 1 and not no_email:
            self.print_warning('Duplicate email %[yellow]{}@{}%[reset]; using %[green]{}%[reset] instead'
                               .format(user, host, email))
        self.emails.add(email)
        return email
    def _migrate_participant_user(self, old_part, registration):
        user = self.global_ns.users_by_email.get(registration.email)
        if user is not None:
            if user in self.users:
                self.print_warning('User {} is already associated with a registration; not associating them with {}'
                                   .format(user, registration))
                return
            self.users.add(user)
            registration.user = user
        if not self.past_event and old_part._avatar and old_part._avatar.id in self.global_ns.avatar_merged_user:
            user = self.global_ns.avatar_merged_user[old_part._avatar.id]
            if not registration.user:
                self.print_warning('No email match; discarding association between {} and {}'
                                   .format(user, registration))
            elif registration.user != user:
                self.print_warning('Email matches other user; associating {} with {} instead of {}'
                                   .format(registration, registration.user, user))
    def _migrate_participant_data(self, old_part, registration):
        for pd_type, field in self.pd_field_map.iteritems():
            if pd_type.column:
                friendly_value = value = getattr(registration, pd_type.column)
            elif pd_type == PersonalDataType.title:
                try:
                    value = {self.title_map[old_part._title]: 1}
                except KeyError:
                    value = None
                friendly_value = convert_to_unicode(old_part._title)
            elif pd_type == PersonalDataType.position:
                continue
            else:
                value = convert_to_unicode(getattr(old_part, PARTICIPANT_ATTR_MAP[pd_type]))
                if pd_type == PersonalDataType.phone and value:
                    value = normalize_phone_number(value)
                friendly_value = value
            if value:
                field.is_enabled = True
            if not self.quiet:
                self.print_info('%[yellow!]{}%[reset] %[cyan!]{}%[reset]'.format(pd_type.name, friendly_value))
            registration.data.append(RegistrationData(field_data=field.current_data, data=value))
    def _migrate_participant_status(self, old_part, registration):
        if not self.status_used:
            return
        if old_part._status not in {'added', 'pending'}:
            status_info = self.status_map[old_part._status]
            data = {status_info['uuid']: 1}
            caption = status_info['caption']
        else:
            data = None
            caption = ''
        if not self.quiet and data:
            self.print_info('%[red]STATUS%[reset] %[cyan]{}'.format(caption))
        registration.data.append(RegistrationData(field_data=self.status_field.current_data, data=data))
 | 
	gpl-3.0 | 6,717,323,560,343,502,000 | 46.950192 | 117 | 0.611666 | false | 
| 
	chuan9/chromium-crosswalk | 
	tools/perf/page_sets/memory_health_story.py | 
	4 | 
	3094 | 
	# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import util
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry import story
util.AddDirToPythonPath(util.GetChromiumSrcDir(), 'build', 'android')
from pylib.constants import keyevent # pylint: disable=import-error
from pylib.device import intent # pylint: disable=import-error
URL_LIST = [
    'http://google.com',
    'http://vimeo.com',
    'http://yahoo.com',
    'http://baidu.com',
    'http://cnn.com',
    'http://yandex.ru',
    'http://yahoo.co.jp',
    'http://amazon.com',
    'http://ebay.com',
    'http://bing.com',
]
class ForegroundPage(page_module.Page):
  """Take a measurement after loading a regular webpage."""
  def __init__(self, story_set, url):
    super(ForegroundPage, self).__init__(
        url=url, page_set=story_set, name=url,
        shared_page_state_class=shared_page_state.SharedMobilePageState)
    self.archive_data_file = story_set.archive_data_file
  def RunPageInteractions(self, action_runner):
    action_runner.tab.WaitForDocumentReadyStateToBeComplete()
    with action_runner.CreateInteraction('measurement'):
      # TODO(perezju): This should catch a few memory dumps. When available,
      # use the dump API to request dumps on demand crbug.com/505826
      action_runner.Wait(7)
class BackgroundPage(page_module.Page):
  """Take a measurement while Chrome is in the background."""
  def __init__(self, story_set, number):
    super(BackgroundPage, self).__init__(
        url='about:blank', page_set=story_set,
        name='chrome_background_%d' % number,
        shared_page_state_class=shared_page_state.SharedMobilePageState)
    self.archive_data_file = story_set.archive_data_file
  def RunPageInteractions(self, action_runner):
    action_runner.tab.WaitForDocumentReadyStateToBeComplete()
    # launch clock app, pushing Chrome to the background
    android_platform = action_runner.tab.browser.platform
    android_platform.LaunchAndroidApplication(
        intent.Intent(package='com.google.android.deskclock',
                      activity='com.android.deskclock.DeskClock'),
        app_has_webviews=False)
    # take measurement
    with action_runner.CreateInteraction('measurement'):
      # TODO(perezju): This should catch a few memory dumps. When available,
      # use the dump API to request dumps on demand crbug.com/505826
      action_runner.Wait(7)
    # go back to Chrome
    android_platform.android_action_runner.InputKeyEvent(keyevent.KEYCODE_BACK)
class MemoryHealthStory(story.StorySet):
  """User story for the Memory Health Plan."""
  def __init__(self):
    super(MemoryHealthStory, self).__init__(
        archive_data_file='data/memory_health_plan.json',
        cloud_storage_bucket=story.PARTNER_BUCKET)
    for number, url in enumerate(URL_LIST, 1):
      self.AddStory(ForegroundPage(self, url))
      self.AddStory(BackgroundPage(self, number))
 | 
	bsd-3-clause | 6,420,285,747,655,815,000 | 34.976744 | 79 | 0.705882 | false | 
| 
	naparuba/kunai | 
	data/global-configuration/packs/mongodb/collectors/pymongo/ssl_match_hostname.py | 
	21 | 
	4688 | 
	# Backport of the match_hostname logic from python 3.5, with small
# changes to support IP address matching on python 2.6, 2.7, 3.3, and 3.4.
import re
import sys
try:
    # Python 3.3+, or the ipaddress module from pypi.
    from ipaddress import ip_address
except ImportError:
    ip_address = lambda address: None
# ipaddress.ip_address requires unicode
if sys.version_info[0] < 3:
    _unicode = unicode
else:
    _unicode = lambda value: value
class CertificateError(ValueError):
    pass
def _dnsname_match(dn, hostname, max_wildcards=1):
    """Matching according to RFC 6125, section 6.4.3
    http://tools.ietf.org/html/rfc6125#section-6.4.3
    """
    pats = []
    if not dn:
        return False
    parts = dn.split(r'.')
    leftmost = parts[0]
    remainder = parts[1:]
    wildcards = leftmost.count('*')
    if wildcards > max_wildcards:
        # Issue #17980: avoid denials of service by refusing more
        # than one wildcard per fragment.  A survey of established
        # policy among SSL implementations showed it to be a
        # reasonable choice.
        raise CertificateError(
            "too many wildcards in certificate DNS name: " + repr(dn))
    # speed up common case w/o wildcards
    if not wildcards:
        return dn.lower() == hostname.lower()
    # RFC 6125, section 6.4.3, subitem 1.
    # The client SHOULD NOT attempt to match a presented identifier in which
    # the wildcard character comprises a label other than the left-most label.
    if leftmost == '*':
        # When '*' is a fragment by itself, it matches a non-empty dotless
        # fragment.
        pats.append('[^.]+')
    elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
        # RFC 6125, section 6.4.3, subitem 3.
        # The client SHOULD NOT attempt to match a presented identifier
        # where the wildcard character is embedded within an A-label or
        # U-label of an internationalized domain name.
        pats.append(re.escape(leftmost))
    else:
        # Otherwise, '*' matches any dotless string, e.g. www*
        pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
    # add the remaining fragments, ignore any wildcards
    for frag in remainder:
        pats.append(re.escape(frag))
    pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
    return pat.match(hostname)
def _ipaddress_match(ipname, host_ip):
    """Exact matching of IP addresses.
    RFC 6125 explicitly doesn't define an algorithm for this
    (section 1.7.2 - "Out of Scope").
    """
    # OpenSSL may add a trailing newline to a subjectAltName's IP address
    ip = ip_address(_unicode(ipname).rstrip())
    return ip == host_ip
def match_hostname(cert, hostname):
    """Verify that *cert* (in decoded format as returned by
    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
    rules are followed.
    CertificateError is raised on failure. On success, the function
    returns nothing.
    """
    if not cert:
        raise ValueError("empty or no certificate, match_hostname needs a "
                         "SSL socket or SSL context with either "
                         "CERT_OPTIONAL or CERT_REQUIRED")
    try:
        host_ip = ip_address(_unicode(hostname))
    except (ValueError, UnicodeError):
        # Not an IP address (common case)
        host_ip = None
    dnsnames = []
    san = cert.get('subjectAltName', ())
    for key, value in san:
        if key == 'DNS':
            if host_ip is None and _dnsname_match(value, hostname):
                return
            dnsnames.append(value)
        elif key == 'IP Address':
            if host_ip is not None and _ipaddress_match(value, host_ip):
                return
            dnsnames.append(value)
    if not dnsnames:
        # The subject is only checked when there is no dNSName entry
        # in subjectAltName
        for sub in cert.get('subject', ()):
            for key, value in sub:
                # XXX according to RFC 2818, the most specific Common Name
                # must be used.
                if key == 'commonName':
                    if _dnsname_match(value, hostname):
                        return
                    dnsnames.append(value)
    if len(dnsnames) > 1:
        raise CertificateError("hostname %r "
            "doesn't match either of %s"
            % (hostname, ', '.join(map(repr, dnsnames))))
    elif len(dnsnames) == 1:
        raise CertificateError("hostname %r "
            "doesn't match %r"
            % (hostname, dnsnames[0]))
    else:
        raise CertificateError("no appropriate commonName or "
            "subjectAltName fields were found")
 | 
	mit | 1,537,955,165,270,300,700 | 33.725926 | 78 | 0.611561 | false | 
| 
	cshallue/models | 
	research/object_detection/models/ssd_mobilenet_v2_feature_extractor.py | 
	2 | 
	5582 | 
	# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SSDFeatureExtractor for MobilenetV2 features."""
import tensorflow as tf
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.models import feature_map_generators
from object_detection.utils import context_manager
from object_detection.utils import ops
from object_detection.utils import shape_utils
from nets.mobilenet import mobilenet
from nets.mobilenet import mobilenet_v2
slim = tf.contrib.slim
class SSDMobileNetV2FeatureExtractor(ssd_meta_arch.SSDFeatureExtractor):
  """SSD Feature Extractor using MobilenetV2 features."""
  def __init__(self,
               is_training,
               depth_multiplier,
               min_depth,
               pad_to_multiple,
               conv_hyperparams_fn,
               reuse_weights=None,
               use_explicit_padding=False,
               use_depthwise=False,
               override_base_feature_extractor_hyperparams=False):
    """MobileNetV2 Feature Extractor for SSD Models.
    Mobilenet v2 (experimental), designed by sandler@. More details can be found
    in //knowledge/cerebra/brain/compression/mobilenet/mobilenet_experimental.py
    Args:
      is_training: whether the network is in training mode.
      depth_multiplier: float depth multiplier for feature extractor.
      min_depth: minimum feature extractor depth.
      pad_to_multiple: the nearest multiple to zero pad the input height and
        width dimensions to.
      conv_hyperparams_fn: A function to construct tf slim arg_scope for conv2d
        and separable_conv2d ops in the layers that are added on top of the
        base feature extractor.
      reuse_weights: Whether to reuse variables. Default is None.
      use_explicit_padding: Whether to use explicit padding when extracting
        features. Default is False.
      use_depthwise: Whether to use depthwise convolutions. Default is False.
      override_base_feature_extractor_hyperparams: Whether to override
        hyperparameters of the base feature extractor with the one from
        `conv_hyperparams_fn`.
    """
    super(SSDMobileNetV2FeatureExtractor, self).__init__(
        is_training=is_training,
        depth_multiplier=depth_multiplier,
        min_depth=min_depth,
        pad_to_multiple=pad_to_multiple,
        conv_hyperparams_fn=conv_hyperparams_fn,
        reuse_weights=reuse_weights,
        use_explicit_padding=use_explicit_padding,
        use_depthwise=use_depthwise,
        override_base_feature_extractor_hyperparams=
        override_base_feature_extractor_hyperparams)
  def preprocess(self, resized_inputs):
    """SSD preprocessing.
    Maps pixel values to the range [-1, 1].
    Args:
      resized_inputs: a [batch, height, width, channels] float tensor
        representing a batch of images.
    Returns:
      preprocessed_inputs: a [batch, height, width, channels] float tensor
        representing a batch of images.
    """
    return (2.0 / 255.0) * resized_inputs - 1.0
  def extract_features(self, preprocessed_inputs):
    """Extract features from preprocessed inputs.
    Args:
      preprocessed_inputs: a [batch, height, width, channels] float tensor
        representing a batch of images.
    Returns:
      feature_maps: a list of tensors where the ith tensor has shape
        [batch, height_i, width_i, depth_i]
    """
    preprocessed_inputs = shape_utils.check_min_image_dim(
        33, preprocessed_inputs)
    feature_map_layout = {
        'from_layer': ['layer_15/expansion_output', 'layer_19', '', '', '', ''],
        'layer_depth': [-1, -1, 512, 256, 256, 128],
        'use_depthwise': self._use_depthwise,
        'use_explicit_padding': self._use_explicit_padding,
    }
    with tf.variable_scope('MobilenetV2', reuse=self._reuse_weights) as scope:
      with slim.arg_scope(
          mobilenet_v2.training_scope(is_training=None, bn_decay=0.9997)), \
          slim.arg_scope(
              [mobilenet.depth_multiplier], min_depth=self._min_depth):
        with (slim.arg_scope(self._conv_hyperparams_fn())
              if self._override_base_feature_extractor_hyperparams else
              context_manager.IdentityContextManager()):
          _, image_features = mobilenet_v2.mobilenet_base(
              ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple),
              final_endpoint='layer_19',
              depth_multiplier=self._depth_multiplier,
              use_explicit_padding=self._use_explicit_padding,
              scope=scope)
        with slim.arg_scope(self._conv_hyperparams_fn()):
          feature_maps = feature_map_generators.multi_resolution_feature_maps(
              feature_map_layout=feature_map_layout,
              depth_multiplier=self._depth_multiplier,
              min_depth=self._min_depth,
              insert_1x1_conv=True,
              image_features=image_features)
    return feature_maps.values()
 | 
	apache-2.0 | 8,653,641,700,252,111,000 | 40.044118 | 80 | 0.667861 | false | 
| 
	ManiacalLabs/BiblioPixel | 
	bibliopixel/project/aliases.py | 
	2 | 
	1085 | 
	import operator, os, re
from .. util import log
ALIAS_MARKERS = '@$'
SEPARATORS = re.compile(r'([./#]|[^./#]+)')
PROJECT_ALIASES = {}
BUILTIN_ALIASES = {
    'apa102': 'bibliopixel.drivers.SPI.APA102.APA102',
    'lpd8806': 'bibliopixel.drivers.SPI.LPD8806.LPD8806',
    'pi_ws281x': 'bibliopixel.drivers.PiWS281X.PiWS281X',
    'serial': 'bibliopixel.drivers.serial.Serial',
    'sk9822': 'bibliopixel.drivers.SPI.APA102.APA102',
    'spi': 'bibliopixel.drivers.SPI.SPI',
    'ws2801': 'bibliopixel.drivers.SPI.WS2801.WS2801',
    'ws281x': 'bibliopixel.drivers.SPI.WS281X.WS281X',
    'bpa': 'BiblioPixelAnimations',
}
def get_alias(alias):
    return PROJECT_ALIASES.get(alias) or BUILTIN_ALIASES.get(alias)
def resolve(typename, aliases=None):
    aliases = aliases or {}
    def get(s):
        return aliases.get(s) or get_alias(s) or s
    def get_all(typename):
        for part in SEPARATORS.split(typename):
            is_alias = part and (part[0] in ALIAS_MARKERS)
            yield get(part[1:]) if is_alias else part
    return ''.join(get_all(get(typename)))
 | 
	mit | 5,223,890,905,001,399,000 | 27.552632 | 67 | 0.656221 | false | 
| 
	atodorov/blivet | 
	blivet/tasks/fsresize.py | 
	6 | 
	4420 | 
	# fsresize.py
# Filesystem resizing classes.
#
# Copyright (C) 2015  Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Anne Mulhern <[email protected]>
import abc
from six import add_metaclass
from ..errors import FSError
from ..size import B, KiB, MiB, GiB, KB, MB, GB
from ..import util
from . import availability
from . import task
from . import fstask
from . import dfresize
@add_metaclass(abc.ABCMeta)
class FSResizeTask(fstask.FSTask):
    """ The abstract properties that any resize task must have. """
    size_fmt = abc.abstractproperty(doc="Size format string.")
@add_metaclass(abc.ABCMeta)
class FSResize(task.BasicApplication, FSResizeTask):
    """ An abstract class for resizing a filesystem. """
    description = "resize filesystem"
    args = abc.abstractproperty(doc="Resize arguments.")
    # IMPLEMENTATION methods
    @abc.abstractmethod
    def size_spec(self):
        """ Returns a string specification for the target size of the command.
            :returns: size specification
            :rtype: str
        """
        raise NotImplementedError()
    def _resize_command(self):
        return [str(self.ext)] + self.args
    def do_task(self):
        """ Resize the device.
            :raises FSError: on failure
        """
        error_msgs = self.availability_errors
        if error_msgs:
            raise FSError("\n".join(error_msgs))
        try:
            ret = util.run_program(self._resize_command())
        except OSError as e:
            raise FSError(e)
        if ret:
            raise FSError("resize failed: %s" % ret)
class Ext2FSResize(FSResize):
    ext = availability.RESIZE2FS_APP
    unit = MiB
    # No bytes specification is described in the man pages. A number without
    # any suffix is interpreted as indicating the number of filesystem blocks.
    # A suffix of "s" specifies a 512 byte sector. It is omitted here because
    # the lookup is only by standard binary units.
    size_fmt = {KiB: "%dK", MiB: "%dM", GiB: "%dG"}[unit]
    def size_spec(self):
        return self.size_fmt % self.fs.target_size.convert_to(self.unit)
    @property
    def args(self):
        return ["-p", self.fs.device, self.size_spec()]
class NTFSResize(FSResize):
    ext = availability.NTFSRESIZE_APP
    unit = B
    size_fmt = {B: "%d", KB: "%dK", MB: "%dM", GB: "%dG"}[unit]
    def size_spec(self):
        return self.size_fmt % self.fs.target_size.convert_to(self.unit)
    @property
    def args(self):
        return [
            "-ff",  # need at least two 'f's to fully suppress interaction
            "-s", self.size_spec(),
            self.fs.device
        ]
class TmpFSResize(FSResize):
    ext = availability.MOUNT_APP
    unit = MiB
    size_fmt = {KiB: "%dk", MiB: "%dm", GiB: "%dg"}[unit]
    def size_spec(self):
        return "size=%s" % (self.size_fmt % self.fs.target_size.convert_to(self.unit))
    @property
    def args(self):
        # This is too closely mixed in w/ TmpFS object, due to the
        # fact that resizing is done by mounting and that the options are
        # therefore mount options. The situation is hard to avoid, though.
        opts = self.fs.mountopts or ",".join(self.fs._mount.options)
        options = ("remount", opts, self.size_spec())
        return ['-o', ",".join(options), self.fs._type, self.fs.system_mountpoint]
class UnimplementedFSResize(dfresize.UnimplementedDFResize, FSResizeTask):
    @property
    def size_fmt(self):
        raise NotImplementedError()
 | 
	gpl-2.0 | 6,244,816,000,194,422,000 | 30.347518 | 86 | 0.661765 | false | 
| 
	mattvvhat/Euler | 
	057/solution.py | 
	2 | 
	1482 | 
	# Greatest Common Divisor
# Computes the Greatest Common Divisor between two integers using Euclid's
# Algorithm. If the innards are wrapped in more efficient methods, then it'd
# likely work quickly for very large integers!
def gcd (a, b):
  # Let us compute the gcd of negative numbers
  if a < 0: a = -a
  if b < 0: b = -b
  # Ensure that b is the larger number
  if b < a: (a, b) = (b, a)
  # Do the first iteration by hand
  r = b % a
  while r != 0:
    # Note: b = a * m + r
    b = a
    a = r
    m = b // a
    r = b % a
  return a
# Least Common Multiple
# Computes the least common multiple between two integers. This method is not
# good for very large integers
def lcm (a, b): return (a*b)/gcd(a, b)
def add (lhs, rhs):
  result = fract()
  scale = lcm(lhs.d, rhs.d)
  result.n = lhs.n * scale/lhs.d + rhs.n * scale/rhs.d
  result.d = scale
  return result.reduced()
class fract:
  def __init__ (self, n=1, d=1):
    self.n = n
    self.d = d
  def __repr__ (self):
    return "%d/%d" % (self.n, self.d)
  def reduced (self):
    div = gcd(self.n, self.d)
    return fract(self.n/div, self.d/div)
  def inverted (self):
    return fract(self.d, self.n)
# Start
summa = fract(2, 1)
two   = fract(2, 1)
half  = fract(1, 2)
one   = fract(1, 1)
count = 0
for i in range(1000):
  summa = add(two, summa.inverted())
  val = add(one, summa.inverted())
  val = val.reduced()
  nom = str(val.n)
  den = str(val.d)
  if len(nom) > len(den):
    count += 1
print count
 | 
	unlicense | 5,498,299,158,501,828,000 | 21.8 | 77 | 0.605938 | false | 
| 
	delmic/odemis | 
	src/odemis/driver/pwrcomedi.py | 
	2 | 
	9849 | 
	# -*- coding: utf-8 -*-
'''
Created on 17 Nov 2016
@author: Éric Piel
Copyright © 2016 Éric Piel, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with Odemis. If not, see http://www.gnu.org/licenses/.
'''
# Uses a DAQ board with analog output accessed via the comedi interface to control
# an emitter power.
# This is designed for the USB-Dux board, but any comedi card with analog output
# should work.
# Note, with the USB-Dux D board, when the pwr_curve has voltages between 0->4 V,
# you should use pins 22->25 for channels 0->3  When the voltages are between
# -4 -> 4V, you should use pins 9->12.
from __future__ import division
import logging
from odemis import model
import odemis
from odemis.util import driver
import odemis.driver.comedi_simple as comedi
from past.builtins import long
class Light(model.Emitter):
    def __init__(self, name, role, device, channels, spectra, pwr_curve, **kwargs):
        """
        device (string): name of the /dev comedi  device (ex: "/dev/comedi0")
        channels (list of (0<=int)): The output channel for each source, as
          numbered in the comedi subdevice.
        spectra (list of 5-tuple of float): the spectra for each output channel used.
         Each tuple represents the wavelength in m for the 99% low, 25% low,
         centre/max, 25% high, 99% high. They do no have to be extremely precise.
         The most important is the centre, and that they are all increasing values.
        pwr_curve (list of dict (float -> 0<float)): Power curve segment map for
           each source. A segment map is a  series of voltage output on the
           analog output -> emission power of the light (W).
           It represents a series of linear segments to map the voltage output
           to the light emission. At least one pair should be provided.
           If no voltage is linked to 0W, then a 0V -> 0W mapping is used.
           The total curve should be monotonic.
        """
        # TODO: allow to give the unit of the power/pwr_curve ?
        model.Emitter.__init__(self, name, role, **kwargs)
        self._shape = ()
        try:
            self._device = comedi.open(device)
        #             self._fileno = comedi.fileno(self._device)
        except comedi.ComediError:
            raise ValueError("Failed to open DAQ device '%s'" % device)
        # Look for the analog output subdevice
        try:
            self._ao_subd = comedi.find_subdevice_by_type(self._device, comedi.SUBD_AO, 0)
            nchan = comedi.get_n_channels(self._device, self._ao_subd)
            if nchan < max(channels):
                raise ValueError("Device only has %d channels, while needed %d" % (nchan, max(channels)))
        except comedi.ComediError:
            raise ValueError("Failed to find an analogue output on DAQ device '%s'" % device)
        if len(channels) != len(spectra):
            raise ValueError("spectra argument should have the same length as channels (%d)" % len(channels))
        if len(channels) != len(pwr_curve):
            raise ValueError("pwr_curve argument should have the same length as channels (%d)" % len(channels))
        self._channels = channels
        # Check and store the power curves
        self._ranges = []
        self._pwr_curve = []
        for c, crv in zip(channels, pwr_curve):
            crv = [v for v in crv.items()]
            # Add 0W = 0V if nothing = 0W
            if 0 not in [w for v, w in crv]:
                crv.append((0, 0))
                logging.info("Adding 0V -> 0W mapping to pwr_curve for channel %d", c)
            # At least beginning and end values
            if len(crv) < 2:
                raise ValueError("pwr_curve for channel %d has less than 2 values: %s" % (c, crv))
            # Check it's monotonic
            crv = sorted(crv, key=lambda v: v[0])
            if crv[0][1] < 0:
                raise ValueError("pwr_curve for channel %d has negative power: %g W" % (c, crv[0][1]))
            if len(crv) != len(set(v for v, w in crv)):
                raise ValueError("pwr_curve for channel %d has identical voltages: %s" % (c, crv))
            if not all((crv[i][1] < crv[i + 1][1]) for i in range(len(crv) - 1)):
                raise ValueError("pwr_curve for channel %d is not monotonic: %s" % (c, crv))
            self._pwr_curve.append(crv)
            # Find the best range to use
            try:
                ri = comedi.find_range(self._device, self._ao_subd,
                                       c, comedi.UNIT_volt, crv[0][0], crv[-1][0])
            except comedi.ComediError:
                raise ValueError("Data range between %g and %g V is too high for hardware." %
                                 (crv[0][0], crv[-1][0]))
            self._ranges.append(ri)
        # Check the spectra
        spect = []  # list of the 5 wavelength points
        for c, wls in zip(channels, spectra):
            if len(wls) != 5:
                raise ValueError("Spectra for channel %d doesn't have exactly 5 wavelength points: %s" % (c, wls))
            if list(wls) != sorted(wls):
                raise ValueError("Spectra for channel %d has unsorted wavelengths: %s" % (c, wls))
            for wl in wls:
                if not 0 < wl < 100e-6:
                    raise ValueError("Spectra for channel %d has unexpected wavelength = %f nm"
                                     % (c, wl * 1e9))
            spect.append(tuple(wls))
        # Maximum power for channel to be used as a range for power
        max_power = tuple([crv[-1][1] for crv in self._pwr_curve])
        # Power value for each channel of the device
        self.power = model.ListContinuous(value=[0.] * len(self._channels),
                                          range=(tuple([0.] * len(self._channels)), max_power,),
                                          unit="W", cls=(int, long, float),)
        self.power.subscribe(self._updatePower)
        # info on which channel is which wavelength
        self.spectra = model.ListVA(spect, unit="m", readonly=True)
        # make sure everything is off (turning on the HUB will turn on the lights)
        self.power.value = self.power.range[0]
        self._metadata = {model.MD_HW_NAME: self.getHwName()}
        lnx_ver = driver.get_linux_version()
        self._swVersion = "%s (driver %s, linux %s)" % (odemis.__version__,
                                                        self.getSwVersion(),
                                                        ".".join("%s" % v for v in lnx_ver))
        self._metadata[model.MD_SW_VERSION] = self._swVersion
        self._metadata[model.MD_HW_VERSION] = self._hwVersion  # unknown
    def terminate(self):
        if self._device:
            # Make sure everything is powered off
            self.power.value = self.power.range[0]
            comedi.close(self._device)
            self._device = None
        super(Light, self).terminate()
    def _power_to_volt(self, power, curve):
        """
        power (0<float)
        curve (list of tuple (float, float)): the mapping between volt -> power
        return (float): voltage for outputting the given power
        raise: ValueError, if power requested if out of the power curve
        """
        if power < curve[0][1]:
            raise ValueError("Power requested %g < %g" % (power, curve[0][1]))
        # Find the segment that correspond to that power
        for i, (v, w) in enumerate(curve[1:]):
            if power <= w:
                seg = i
                break
        else:
            raise ValueError("Power requested %g > %g" % (power, curve[-1][1]))
        logging.debug("Converting %g W using segment %d: %s -> %s",
                      power, seg, curve[seg], curve[seg + 1])
        basev, basew = curve[seg]
        endv, endw = curve[seg + 1]
        ratio = (power - basew) / (endw - basew)
        v = basev + ratio * (endv - basev)
        return v
    def _volt_to_data(self, volt, channel, rngi):
        maxdata = comedi.get_maxdata(self._device, self._ao_subd, channel)
        rng = comedi.get_range(self._device, self._ao_subd, channel, rngi)
        d = comedi.from_phys(volt, rng, maxdata)
        return d
    # from semcomedi
    def getSwVersion(self):
        """
        Returns (string): displayable string showing the driver version
        """
        driver = comedi.get_driver_name(self._device)
        version = comedi.get_version_code(self._device)
        lversion = []
        for i in range(3):
            lversion.insert(0, version & 0xff)  # grab lowest 8 bits
            version >>= 8  # shift over 8 bits
        sversion = '.'.join(str(x) for x in lversion)
        return "%s v%s" % (driver, sversion)
    # from semcomedi
    def getHwName(self):
        """
        Returns (string): displayable string showing whatever can be found out
          about the actual hardware.
        """
        return comedi.get_board_name(self._device)
    def _updatePower(self, value):
        for c, r, crv, p in zip(self._channels, self._ranges, self._pwr_curve, value):
            p = min(p, crv[-1][1])
            v = self._power_to_volt(p, crv)
            d = self._volt_to_data(v, c, r)
            logging.debug("Setting channel %d to %d = %g V = %g W", c, d, v, p)
            comedi.data_write(self._device, self._ao_subd, c, r, comedi.AREF_GROUND, d)
 | 
	gpl-2.0 | -2,405,489,135,356,883,000 | 43.754545 | 226 | 0.578103 | false | 
| 
	dkodnik/arp | 
	addons/website_quote/models/order.py | 
	1 | 
	11878 | 
	# -*- coding: utf-8 -*-
##############################################################################
#
#    OpenERP, Open Source Management Solution
#    Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
import uuid
import time
import datetime
import openerp.addons.decimal_precision as dp
class sale_quote_template(osv.osv):
    _name = "sale.quote.template"
    _description = "Sale Quotation Template"
    _columns = {
        'name': fields.char('Quotation Template', size=256, required=True),
        'website_description': fields.html('Description', translate=True),
        'quote_line': fields.one2many('sale.quote.line', 'quote_id', 'Quote Template Lines'),
        'note': fields.text('Terms and conditions'),
        'options': fields.one2many('sale.quote.option', 'template_id', 'Optional Products Lines'),
        'number_of_days': fields.integer('Quote Duration', help='Number of days for the validaty date computation of the quotation'),
    }
    def open_template(self, cr, uid, quote_id, context=None):
        return {
            'type': 'ir.actions.act_url',
            'target': 'self',
            'url': '/quote/template/%d' % quote_id[0]
        }
class sale_quote_line(osv.osv):
    _name = "sale.quote.line"
    _description = "Quotation Template Lines"
    _columns = {
        'quote_id': fields.many2one('sale.quote.template', 'Quotation Template Reference', required=True, ondelete='cascade', select=True),
        'name': fields.text('Description', required=True, translate=True),
        'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True),
        'website_description': fields.html('Line Description', translate=True),
        'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')),
        'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')),
        'product_uom_qty': fields.float('Quantity', required=True, digits_compute= dp.get_precision('Product UoS')),
        'product_uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True),
    }
    _defaults = {
        'product_uom_qty': 1,
        'discount': 0.0,
    }
    def on_change_product_id(self, cr, uid, ids, product, context=None):
        vals = {}
        product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
        name = product_obj.name
        if product_obj.description_sale:
            name += '\n' + product_obj.description_sale
        vals.update({
            'price_unit': product_obj.list_price,
            'product_uom_id': product_obj.uom_id.id,
            'website_description': product_obj.website_description,
            'name': name,
        })
        return {'value': vals}
class sale_order_line(osv.osv):
    _inherit = "sale.order.line"
    _description = "Sales Order Line"
    _columns = {
        'website_description': fields.html('Line Description'),
        'option_line_id': fields.one2many('sale.order.option', 'line_id', 'Optional Products Lines'),
    }
    def _inject_website_description(self, cr, uid, values, context=None):
        values = dict(values or {})
        if not values.get('website_description') and values.get('product_id'):
            product = self.pool['product.product'].browse(cr, uid, values['product_id'], context=context)
            values['website_description'] = product.website_description
        return values
    def create(self, cr, uid, values, context=None):
        values = self._inject_website_description(cr, uid, values, context)
        return super(sale_order_line, self).create(cr, uid, values, context=context)
    def write(self, cr, uid, ids, values, context=None):
        values = self._inject_website_description(cr, uid, values, context)
        return super(sale_order_line, self).write(cr, uid, ids, values, context=context)
class sale_order(osv.osv):
    _inherit = 'sale.order'
    def _get_total(self, cr, uid, ids, name, arg, context=None):
        res = {}
        for order in self.browse(cr, uid, ids, context=context):
            total = 0.0
            for line in order.order_line:
                total += (line.product_uom_qty * line.price_unit)
            res[order.id] = total
        return res
    _columns = {
        'access_token': fields.char('Security Token', size=256, required=True),
        'template_id': fields.many2one('sale.quote.template', 'Quote Template'),
        'website_description': fields.html('Description'),
        'options' : fields.one2many('sale.order.option', 'order_id', 'Optional Products Lines'),
        'validity_date': fields.date('Expiry Date'),
        'amount_undiscounted': fields.function(_get_total, string='Amount Before Discount', type="float",
            digits_compute=dp.get_precision('Account'))
    }
    _defaults = {
        'access_token': lambda self, cr, uid, ctx={}: str(uuid.uuid4())
    }
    def open_quotation(self, cr, uid, quote_id, context=None):
        quote = self.browse(cr, uid, quote_id[0], context=context)
        return {
            'type': 'ir.actions.act_url',
            'target': 'self',
            'url': '/quote/%s' % (quote.id)
        }
    def onchange_template_id(self, cr, uid, ids, template_id, partner=False, fiscal_position=False, context=None):
        if not template_id:
            return True
        if context is None:
            context = {}
        context = dict(context, lang=self.pool.get('res.partner').browse(cr, uid, partner, context).lang)
        
        lines = [(5,)]
        quote_template = self.pool.get('sale.quote.template').browse(cr, uid, template_id, context=context)
        for line in quote_template.quote_line:
            res = self.pool.get('sale.order.line').product_id_change(cr, uid, False,
                False, line.product_id.id, line.product_uom_qty, line.product_uom_id.id, line.product_uom_qty,
                line.product_uom_id.id, line.name, partner, False, True, time.strftime('%Y-%m-%d'),
                False, fiscal_position, True, context)
            data = res.get('value', {})
            if 'tax_id' in data:
                data['tax_id'] = [(6, 0, data['tax_id'])]
            data.update({
                'name': line.name,
                'price_unit': line.price_unit,
                'discount': line.discount,
                'product_uom_qty': line.product_uom_qty,
                'product_id': line.product_id.id,
                'product_uom': line.product_uom_id.id,
                'website_description': line.website_description,
                'state': 'draft',
            })
            lines.append((0, 0, data))
        options = []
        for option in quote_template.options:
            options.append((0, 0, {
                'product_id': option.product_id.id,
                'name': option.name,
                'quantity': option.quantity,
                'uom_id': option.uom_id.id,
                'price_unit': option.price_unit,
                'discount': option.discount,
                'website_description': option.website_description,
            }))
        date = False
        if quote_template.number_of_days > 0:
            date = (datetime.datetime.now() + datetime.timedelta(quote_template.number_of_days)).strftime("%Y-%m-%d")
        data = {'order_line': lines, 'website_description': quote_template.website_description, 'note': quote_template.note, 'options': options, 'validity_date': date}
        return {'value': data}
    def recommended_products(self, cr, uid, ids, context=None):
        order_line = self.browse(cr, uid, ids[0], context=context).order_line
        product_pool = self.pool.get('product.product')
        products = []
        for line in order_line:
            products += line.product_id.product_tmpl_id.recommended_products(context=context)
        return products
        
class sale_quote_option(osv.osv):
    _name = "sale.quote.option"
    _description = "Quote Option"
    _columns = {
        'template_id': fields.many2one('sale.quote.template', 'Quotation Template Reference', ondelete='cascade', select=True, required=True),
        'name': fields.text('Description', required=True, translate=True),
        'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True),
        'website_description': fields.html('Option Description', translate=True),
        'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')),
        'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')),
        'uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True),
        'quantity': fields.float('Quantity', required=True, digits_compute= dp.get_precision('Product UoS')),
    }
    _defaults = {
        'quantity': 1,
    }
    def on_change_product_id(self, cr, uid, ids, product, context=None):
        vals = {}
        product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
        vals.update({
            'price_unit': product_obj.list_price,
            'website_description': product_obj.product_tmpl_id.website_description,
            'name': product_obj.name,
            'uom_id': product_obj.product_tmpl_id.uom_id.id,
        })
        return {'value': vals}
class sale_order_option(osv.osv):
    _name = "sale.order.option"
    _description = "Sale Options"
    _columns = {
        'order_id': fields.many2one('sale.order', 'Sale Order Reference', ondelete='cascade', select=True),
        'line_id': fields.many2one('sale.order.line', on_delete="set null"),
        'name': fields.text('Description', required=True),
        'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)]),
        'website_description': fields.html('Line Description'),
        'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')),
        'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')),
        'uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True),
        'quantity': fields.float('Quantity', required=True,
            digits_compute= dp.get_precision('Product UoS')),
    }
    _defaults = {
        'quantity': 1,
    }
    def on_change_product_id(self, cr, uid, ids, product, context=None):
        vals = {}
        product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
        vals.update({
            'price_unit': product_obj.list_price,
            'website_description': product_obj.product_tmpl_id.website_description,
            'name': product_obj.name,
            'uom_id': product_obj.product_tmpl_id.uom_id.id,
        })
        return {'value': vals}
class product_template(osv.Model):
    _inherit = "product.template"
    _columns = {
        'website_description': fields.html('Description for the website'),
    }
 | 
	agpl-3.0 | -5,836,896,972,187,375,000 | 45.948617 | 167 | 0.605405 | false | 
| 
	BrandonY/python-docs-samples | 
	storage/cloud-client/acl_test.py | 
	5 | 
	3897 | 
	# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from google.cloud import storage
import google.cloud.storage.acl
import pytest
import acl
BUCKET = os.environ['CLOUD_STORAGE_BUCKET']
# Typically we'd use a @example.com address, but GCS requires a real Google
# account.
TEST_EMAIL = '[email protected]'
@pytest.fixture
def test_bucket():
    """Yields a bucket that resets its acl after the test completes."""
    bucket = storage.Client().bucket(BUCKET)
    acl = google.cloud.storage.acl.BucketACL(bucket)
    object_default_acl = google.cloud.storage.acl.DefaultObjectACL(bucket)
    acl.reload()
    object_default_acl.reload()
    yield bucket
    acl.save()
    object_default_acl.save()
@pytest.fixture
def test_blob():
    """Yields a blob that resets its acl after the test completes."""
    bucket = storage.Client().bucket(BUCKET)
    blob = bucket.blob('storage_acl_test_sigil')
    blob.upload_from_string('Hello, is it me you\'re looking for?')
    acl = google.cloud.storage.acl.ObjectACL(blob)
    acl.reload()
    yield blob
    acl.save()
def test_print_bucket_acl(capsys):
    acl.print_bucket_acl(BUCKET)
    out, _ = capsys.readouterr()
    assert out
def test_print_bucket_acl_for_user(test_bucket, capsys):
    test_bucket.acl.user(TEST_EMAIL).grant_owner()
    test_bucket.acl.save()
    acl.print_bucket_acl_for_user(BUCKET, TEST_EMAIL)
    out, _ = capsys.readouterr()
    assert 'OWNER' in out
def test_add_bucket_owner(test_bucket):
    acl.add_bucket_owner(BUCKET, TEST_EMAIL)
    test_bucket.acl.reload()
    assert 'OWNER' in test_bucket.acl.user(TEST_EMAIL).get_roles()
def test_remove_bucket_owner(test_bucket):
    test_bucket.acl.user(TEST_EMAIL).grant_owner()
    test_bucket.acl.save()
    acl.remove_bucket_owner(BUCKET, TEST_EMAIL)
    test_bucket.acl.reload()
    assert 'OWNER' not in test_bucket.acl.user(TEST_EMAIL).get_roles()
def test_add_bucket_default_owner(test_bucket):
    acl.add_bucket_default_owner(BUCKET, TEST_EMAIL)
    test_bucket.default_object_acl.reload()
    roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles()
    assert 'OWNER' in roles
def test_remove_bucket_default_owner(test_bucket):
    test_bucket.acl.user(TEST_EMAIL).grant_owner()
    test_bucket.acl.save()
    acl.remove_bucket_default_owner(BUCKET, TEST_EMAIL)
    test_bucket.default_object_acl.reload()
    roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles()
    assert 'OWNER' not in roles
def test_print_blob_acl(test_blob, capsys):
    acl.print_blob_acl(BUCKET, test_blob.name)
    out, _ = capsys.readouterr()
    assert out
def test_print_blob_acl_for_user(test_blob, capsys):
    test_blob.acl.user(TEST_EMAIL).grant_owner()
    test_blob.acl.save()
    acl.print_blob_acl_for_user(
        BUCKET, test_blob.name, TEST_EMAIL)
    out, _ = capsys.readouterr()
    assert 'OWNER' in out
def test_add_blob_owner(test_blob):
    acl.add_blob_owner(BUCKET, test_blob.name, TEST_EMAIL)
    test_blob.acl.reload()
    assert 'OWNER' in test_blob.acl.user(TEST_EMAIL).get_roles()
def test_remove_blob_owner(test_blob):
    test_blob.acl.user(TEST_EMAIL).grant_owner()
    test_blob.acl.save()
    acl.remove_blob_owner(
        BUCKET, test_blob.name, TEST_EMAIL)
    test_blob.acl.reload()
    assert 'OWNER' not in test_blob.acl.user(TEST_EMAIL).get_roles()
 | 
	apache-2.0 | 699,143,596,099,644,700 | 27.23913 | 75 | 0.703105 | false | 
| 
	kcavagnolo/astroML | 
	astroML/datasets/hogg2010test.py | 
	5 | 
	1684 | 
	"""
Data from Hogg et al 2010; useful for testing robust regression methods
"""
import numpy as np
def fetch_hogg2010test(structured=False):
    """Fetch the Hogg et al 2010 test data
    """
    data = np.array([[1, 201, 592, 61, 9, -0.84],
                     [2, 244, 401, 25, 4, 0.31],
                     [3, 47, 583, 38, 11, 0.64],
                     [4, 287, 402, 15, 7, -0.27],
                     [5, 203, 495, 21, 5, -0.33],
                     [6, 58, 173, 15, 9, 0.67],
                     [7, 210, 479, 27, 4, -0.02],
                     [8, 202, 504, 14, 4, -0.05],
                     [9, 198, 510, 30, 11, -0.84],
                     [10, 158, 416, 16, 7, -0.69],
                     [11, 165, 393, 14, 5, 0.30],
                     [12, 201, 442, 25, 5, -0.46],
                     [13, 157, 317, 52, 5, -0.03],
                     [14, 131, 311, 16, 6, 0.50],
                     [15, 166, 400, 34, 6, 0.73],
                     [16, 160, 337, 31, 5, -0.52],
                     [17, 186, 423, 42, 9, 0.90],
                     [18, 125, 334, 26, 8, 0.40],
                     [19, 218, 533, 16, 6, -0.78],
                     [20, 146, 344, 22, 5, -0.56]])
    dtype = [("ID", np.int32),
             ("x", np.float64),
             ("y", np.float64),
             ("sigma_x", np.float64),
             ("sigma_y", np.float64),
             ("rho_xy", np.float64)]
    recarray = np.empty(data.shape[0], dtype=dtype)
    recarray['ID'] = data[:, 0]
    recarray['x'] = data[:, 1]
    recarray['y'] = data[:, 2]
    recarray['sigma_x'] = data[:, 4]
    recarray['sigma_y'] = data[:, 3]
    recarray['rho_xy'] = data[:, 5]
    return recarray
 | 
	bsd-2-clause | -3,344,771,847,954,435,000 | 36.422222 | 71 | 0.383017 | false | 
| 
	zedshaw/librelist | 
	tests/handlers/admin_tests.py | 
	1 | 
	1665 | 
	from nose.tools import *
from lamson.testing import *
from config import settings
import time
from app.model import archive, confirmation
queue_path = archive.store_path('test.list', 'queue')
sender = "sender-%[email protected]" % time.time()
host = "librelist.com"
list_name = "test.list"
list_addr = "test.list@%s" % host
client = RouterConversation(sender, 'Admin Tests')
def setup():
    clear_queue("run/posts")
    clear_queue("run/spam")
def test_new_user_subscribes_with_invalid_name():
    client.begin()
    client.say('test-list@%s' % host, "I can't read!", 'noreply')
    client.say('test=list@%s' % host, "I can't read!", 'noreply')
    clear_queue()
    client.say('unbounce@%s' % host, "I have two email addresses!")
    assert not delivered('noreply')
    assert not delivered('unbounce')
    client.say('noreply@%s' % host, "Dumb dumb.")
    assert not delivered('noreply')
def test_new_user_subscribes():
    client.begin()
    msg = client.say(list_addr, "Hey I was wondering how to fix this?",
                     list_name + '-confirm')
    client.say(msg['Reply-To'], 'Confirmed I am.', 'noreply')
    clear_queue()
def test_existing_user_unsubscribes():
    test_new_user_subscribes()
    msg = client.say(list_name + "-unsubscribe@%s" % host, "I would like to unsubscribe.", 'confirm')
    client.say(msg['Reply-To'], 'Confirmed yes I want out.', 'noreply')
def test_existing_user_posts_message():
    test_new_user_subscribes()
    msg = client.say(list_addr, "Howdy folks, I was wondering what this is?",
                     list_addr)
    # make sure it gets archived
    assert delivered(list_addr, to_queue=queue(queue_path))
 | 
	agpl-3.0 | -3,435,780,747,800,284,700 | 30.415094 | 101 | 0.655255 | false | 
| 
	kwagyeman/openmv | 
	scripts/examples/Arduino/Portenta-H7/03-Drawing/image_drawing_scale_up_test.py | 
	2 | 
	1955 | 
	# Image Scaling Up Drawing Test
#
# This script tests the performance and quality of the draw_image()
# method which can perform nearest neighbor, bilinear, bicubic, and
# area scaling along with color channel extraction, alpha blending,
# color palette application, and alpha palette application.
# DISABLE THE FRAME BUFFER TO SEE THE REAL FPS
import sensor, image, time
hint = 0 # image.BILINEAR image.BICUBIC
bounce_div = 32
big_img = image.Image(128, 128, sensor.RGB565, copy_to_fb=True)
#big_img.to_grayscale()
#big_img.to_bitmap()
small_img = image.Image(4, 4, sensor.GRAYSCALE)
small_img.set_pixel(0, 0, (0,   0,   127))
small_img.set_pixel(1, 0, (47,  255, 199))
small_img.set_pixel(2, 0, (0,   188, 255))
small_img.set_pixel(3, 0, (0,   0,   127))
small_img.set_pixel(0, 1, (0,   176, 255))
small_img.set_pixel(1, 1, (222, 0,   0  ))
small_img.set_pixel(2, 1, (50,  255, 195))
small_img.set_pixel(3, 1, (86,  255, 160))
small_img.set_pixel(0, 2, (255, 211, 0  ))
small_img.set_pixel(1, 2, (83,  255, 163))
small_img.set_pixel(2, 2, (255, 211, 0))
small_img.set_pixel(3, 2, (0,   80,  255))
small_img.set_pixel(0, 3, (255, 118, 0  ))
small_img.set_pixel(1, 3, (127, 0,   0  ))
small_img.set_pixel(2, 3, (0,   144, 255))
small_img.set_pixel(3, 3, (50,  255, 195))
#small_img.to_grayscale()
#small_img.to_bitmap()
x_bounce = 0
x_bounce_toggle = 0
y_bounce = 0
y_bounce_toggle = 0
clock = time.clock()
while(True):
    clock.tick()
    big_img.clear()
    big_img.draw_image(small_img,
                       x_bounce // bounce_div, y_bounce // bounce_div,
                       x_scale=32, y_scale=32,
                       hint=hint)
    sensor.flush()
    x_bounce += x_bounce_toggle
    if abs(x_bounce // bounce_div) >= (big_img.width()*1.1): x_bounce_toggle = -x_bounce_toggle
    y_bounce += y_bounce_toggle
    if abs(y_bounce // bounce_div) >= (big_img.height()*1.1): y_bounce_toggle = -y_bounce_toggle
    print(clock.fps())
 | 
	mit | -1,167,906,425,871,766,000 | 30.031746 | 96 | 0.628133 | false | 
| 
	anthraxx/pwndbg | 
	pwndbg/commands/procinfo.py | 
	1 | 
	5954 | 
	#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import string
import gdb
import pwndbg.auxv
import pwndbg.commands
import pwndbg.file
import pwndbg.memoize
import pwndbg.net
import pwndbg.proc
try:
    import psutil
except:
    psutil = None
"""
PEDA prints it out like this:
exe = /bin/bash
fd[0] -> /dev/pts/96
fd[1] -> /dev/pts/96
fd[2] -> /dev/pts/96
pid = 31102
ppid = 31096
uid = [287138, 287138, 287138, 287138]
gid = [5000, 5000, 5000, 5000]
"""
capabilities = {
 0: "CAP_CHOWN",
 1: "CAP_DAC_OVERRIDE",
 2: "CAP_DAC_READ_SEARCH",
 3: "CAP_FOWNER",
 4: "CAP_FSETID",
 5: "CAP_KILL",
 6: "CAP_SETGID",
 7: "CAP_SETUID",
 8: "CAP_SETPCAP",
 9: "CAP_LINUX_IMMUTABLE",
 10: "CAP_NET_BIND_SERVICE",
 11: "CAP_NET_BROADCAST",
 12: "CAP_NET_ADMIN",
 13: "CAP_NET_RAW",
 14: "CAP_IPC_LOCK",
 15: "CAP_IPC_OWNER",
 16: "CAP_SYS_MODULE",
 17: "CAP_SYS_RAWIO",
 18: "CAP_SYS_CHROOT",
 19: "CAP_SYS_PTRACE",
 20: "CAP_SYS_PACCT",
 21: "CAP_SYS_ADMIN",
 22: "CAP_SYS_BOOT",
 23: "CAP_SYS_NICE",
 24: "CAP_SYS_RESOURCE",
 25: "CAP_SYS_TIME",
 26: "CAP_SYS_TTY_CONFIG",
 27: "CAP_MKNOD",
 28: "CAP_LEASE",
 29: "CAP_AUDIT_WRITE",
 30: "CAP_AUDIT_CONTROL",
 31: "CAP_SETFCAP",
 32: "CAP_MAC_OVERRIDE",
 33: "CAP_MAC_ADMIN",
 34: "CAP_SYSLOG",
 35: "CAP_WAKE_ALARM",
 36: "CAP_BLOCK_SUSPEND",
}
class Process():
    def __init__(self, pid=None, tid=None):
        if pid is None:
            pid = pwndbg.proc.pid
        if tid is None:
            tid = pwndbg.proc.tid
        if not tid:
            tid = pid
        self.pid = pid
        self.tid = tid
        # Precalculate
        self.status
    @property
    @pwndbg.memoize.reset_on_stop
    def selinux(self):
        path = '/proc/%i/task/%i/attr/current' % (self.pid, self.tid)
        raw = pwndbg.file.get(path)
        return raw.decode().rstrip('\x00').strip()
    @property
    @pwndbg.memoize.reset_on_stop
    def status(self):
        raw = pwndbg.file.get('/proc/%i/task/%i/status' % (self.pid, self.tid))
        status = {}
        for line in raw.splitlines():
            if not line:
                continue
            k_v = line.split(None, 1)
            if len(k_v) == 1:
                k_v.append(b'')
            k,v = k_v
            # Python3 ftw!
            k = k.decode('latin-1')
            v = v.decode('latin-1')
            k = k.lower().rstrip(':')
            # bit fields
            if set(v) < set(string.hexdigits) and len(v) == 16:
                try:
                    v = int(v, 16)
                except AttributeError:
                    pass
            # vm stats
            elif v.endswith(' kB'):
                v = int(v.split()[0]) * (1<<10)
            elif v.endswith(' mB'):
                v = int(v.split()[0]) * (1<<20)
            # misc integers like pid and ppid
            elif str(v).isdigit():
                v = int(v)
            # uid and gid and groups
            elif all((s.isdigit() for s in v.split())):
                v = list(map(int, v.split()))
            # capability sets
            if k in ['capeff', 'capinh', 'capprm', 'capbnd']:
                orig = v
                v = []
                for i in range(max(capabilities)+1):
                    if (orig >> i) & 1 == 1:
                        v.append(capabilities[i])
            status[k] = v
            setattr(self, k, v)
        return status
    @property
    @pwndbg.memoize.reset_on_stop
    def open_files(self):
        fds = {}
        for i in range(self.fdsize):
            link = pwndbg.file.readlink('/proc/%i/fd/%i' % (pwndbg.proc.pid, i))
            if link:
                fds[i] = link
        return fds
    @property
    @pwndbg.memoize.reset_on_stop
    def connections(self):
        # Connections look something like this:
        # socket:[102422]
        fds = self.open_files
        socket = 'socket:['
        result = []
        functions = [pwndbg.net.tcp,
                     pwndbg.net.unix,
                     pwndbg.net.netlink]
        for fd, path in fds.items():
            if socket not in path:
                continue
            inode = path[len(socket):-1]
            inode = int(inode)
            for func in functions:
                for x in func():
                    if x.inode == inode:
                        x.fd = fd
                        result.append(x)
        return tuple(result)
@pwndbg.commands.ArgparsedCommand("Gets the pid.")
@pwndbg.commands.OnlyWhenRunning
def pid():
    print(pwndbg.proc.pid)
@pwndbg.commands.ArgparsedCommand("Display information about the running process.")
@pwndbg.commands.OnlyWhenRunning
def procinfo():
    """
    Display information about the running process.
    """
    if not psutil:
        print("psutil required but not installed")
        return
    exe  = str(pwndbg.auxv.get()['AT_EXECFN'])
    print("%-10s %r" % ("exe", exe))
    proc = Process()
    # qemu-usermode fail!
    if not proc.status:
        return
    files = dict(proc.open_files)
    for c in proc.connections:
        files[c.fd] = str(c)
    print("%-10s %s" % ("pid",     proc.pid))
    print("%-10s %s" % ("tid",     proc.tid))
    if proc.selinux != 'unconfined':
        print("%-10s %s" % ("selinux", proc.selinux))
    print("%-10s %s" % ("ppid",    proc.ppid))
    if not pwndbg.android.is_android():
        print("%-10s %s" % ("uid",     proc.uid))
        print("%-10s %s" % ("gid",     proc.gid))
        print("%-10s %s" % ("groups",  proc.groups))
    else:
        print("%-10s %s" % ("uid",     list(map(pwndbg.android.aid_name, proc.uid))))
        print("%-10s %s" % ("gid",     list(map(pwndbg.android.aid_name, proc.gid))))
        print("%-10s %s" % ("groups",  list(map(pwndbg.android.aid_name, proc.groups))))
    for fd, path in files.items():
        if not set(path) < set(string.printable):
            path = repr(path)
        print("%-10s %s" % ("fd[%i]" % fd, path))
    return
 | 
	mit | -3,907,581,068,090,045,000 | 23.401639 | 88 | 0.512261 | false | 
| 
	fxstein/SoCo | 
	tests/test_new_datastructures.py | 
	8 | 
	14756 | 
	# -*- coding: utf-8 -*-
"""Module to test the data structure classes with pytest."""
from __future__ import unicode_literals
import pytest
from soco import data_structures
from soco.exceptions import DIDLMetadataError
from soco.xml import XML
def assert_xml_equal(left, right, explain=None):
    """Helper function for comparing XML elements.
    Causes useful information to be output under pytest as to the differences
    between elements
    Args
         left (Element): an Elementtree.Element to compare
         right (Element): an Element to compare it with
    Raises
        AssertionError: if the Elements do not match
    """
    def _build_explanation(left, right, explain):
        if left.tag != right.tag:
            explain.append('tag <%s> does not match tag <%s>' % (left.tag,
                                                                 right.tag))
        for name, value in left.attrib.items():
            if right.get(name) != value:
                explain.append(
                    '%s attribute of element <%s> does not match: %s=%r, %s=%r' %
                    (name, left.tag, name, value, name, right.get(name)))
        for name in right.attrib:
            if name not in left.attrib:
                explain.append(
                    'right element <%s> has attribute %s but left does not' %
                    (left.tag, name))
        if left.text != right.text:
            explain.append(
                'text for element <%s>: %r != %r' %
                (left.tag, left.text, right.text))
        if left.tail != right.tail:
            explain.append(
                'tail for element <%s>: %r != %r' %
                (left.tag, left.text, right.text))
        for i1, i2 in zip(left, right):
            _build_explanation(i1, i2, explain)
        return
    explain = []
    _build_explanation(left, right, explain)
    if explain != []:
        header = "Comparing XML elements %s and %s" % (left, right)
        assert False, header + '\n'.join(explain)
class TestResource():
    """Testing the Resource class."""
    def test_create_didl_resource_with_no_params(self):
        with pytest.raises(TypeError):
            res = data_structures.DidlResource()
    def test_create_didl_resource(self):
        res = data_structures.DidlResource('a%20uri', 'a:protocol:info:xx')
        assert res.uri == 'a%20uri'
        assert res.protocol_info == 'a:protocol:info:xx'
    def test_create_didl_resource_to_from_element(self):
        res = data_structures.DidlResource('a%20uri', 'a:protocol:info:xx',
                                           bitrate=3)
        elt = res.to_element()
        assert XML.tostring(elt) == (
            b'<res bitrate="3" protocolInfo="a:protocol:info:xx">a%20uri</res>')
        assert data_structures.DidlResource.from_element(elt) == res
    def test_didl_resource_to_dict(self):
        res = data_structures.DidlResource('a%20uri', 'a:protocol:info:xx')
        rez = res.to_dict()
        assert rez['uri'] == 'a%20uri'
        assert rez['protocol_info'] == 'a:protocol:info:xx'
        assert len(rez) == 12
    def test_didl_resource_to_dict_remove_nones(self):
        res = data_structures.DidlResource('a%20uri', 'a:protocol:info:xx')
        rez = res.to_dict(remove_nones=True)
        assert rez['uri'] == 'a%20uri'
        assert rez['protocol_info'] == 'a:protocol:info:xx'
        assert len(rez) == 2
    def test_didl_resource_from_dict(self):
        res = data_structures.DidlResource('a%20uri', 'a:protocol:info:xx')
        rez = data_structures.DidlResource.from_dict(res.to_dict())
        assert res == rez
    def test_didl_resource_from_dict_remove_nones(self):
        res = data_structures.DidlResource('a%20uri', 'a:protocol:info:xx')
        rez = data_structures.DidlResource.from_dict(
            res.to_dict(remove_nones=True))
        assert res == rez
    def test_didl_resource_eq(self):
        res = data_structures.DidlResource('a%20uri', 'a:protocol:info:xx')
        assert res != data_structures.DidlObject(
            title='a_title', parent_id='pid', item_id='iid')
        assert res is not None
        assert res == res
class TestDidlObject():
    """Testing the DidlObject base class."""
    def test_create_didl_object_with_no_params(self):
        with pytest.raises(TypeError):
            didl_object = data_structures.DidlObject()
    def test_create_didl_object_with_disallowed_params(self):
        with pytest.raises(ValueError) as excinfo:
            didl_object = data_structures.DidlObject(
                title='a_title', parent_id='pid', item_id='iid', bad_args='other')
        assert 'not allowed' in str(excinfo.value)
    def test_create_didl_object_with_good_params(self):
        didl_object = data_structures.DidlObject(
            title='a_title',
            parent_id='pid',
            item_id='iid',
            creator='a_creator',
            desc="dummy")
        assert didl_object is not None
        assert didl_object.title == 'a_title'
        assert didl_object.parent_id == 'pid'
        assert didl_object.item_id == 'iid'
        assert didl_object.creator == 'a_creator'
        assert didl_object.resources == []
        assert didl_object.desc == "dummy"
    def test_didl_object_from_wrong_element(self):
        # Using the wrong element
        elt = XML.fromstring("""<res>URI</res>""")
        with pytest.raises(DIDLMetadataError) as excinfo:
            didl_object = data_structures.DidlObject.from_element(elt)
        assert "Wrong element. Expected <item> or <container>, "
        "got <res> for class object" in str(excinfo.value)
    def test_didl_object_from_element(self):
        elt = XML.fromstring(
            """<item xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/"
              xmlns:dc="http://purl.org/dc/elements/1.1/"
              xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/"
              id="iid" parentID="pid" restricted="true">
                 <dc:title>the_title</dc:title>
                 <upnp:class>object</upnp:class>
                 <dc:creator>a_creator</dc:creator>
                 <desc id="cdudn"
                   nameSpace="urn:schemas-rinconnetworks-com:metadata-1-0/">DUMMY</desc>
               </item>
        """)
        didl_object = data_structures.DidlObject.from_element(elt)
        assert didl_object.title == 'the_title'
        assert didl_object.parent_id == 'pid'
        assert didl_object.item_id == 'iid'
        assert didl_object.creator == 'a_creator'
        assert didl_object.desc == 'DUMMY'
    def test_didl_object_from_wrong_class(self):
        # mismatched upnp class
        bad_elt1 = XML.fromstring(
            """<item xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/"
              xmlns:dc="http://purl.org/dc/elements/1.1/"
              xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/"
              id="iid" parentID="pid" restricted="true">
                 <dc:title>the_title</dc:title>
                 <upnp:class>object.item</upnp:class>
                 <dc:creator>a_creator</dc:creator>
                 <desc id="cdudn"
                   nameSpace="urn:schemas-rinconnetworks-com:metadata-1-0/">
                   RINCON_AssociatedZPUDN
                 </desc>
               </item>
        """)
        with pytest.raises(DIDLMetadataError) as excinfo:
            didl_object = data_structures.DidlObject.from_element(bad_elt1)
        assert ("UPnP class is incorrect. Expected 'object', got 'object.item'"
                ) in str(excinfo.value)
    def test_didl_object_from_dict(self):
        didl_object = data_structures.DidlObject(
            title='a_title',
            parent_id='pid',
            item_id='iid',
            creator='a_creator',
            desc='dummy')
        the_dict = {
            'title': 'a_title',
            'parent_id': 'pid',
            'item_id': 'iid',
            'creator': 'a_creator',
            'restricted': True,
            'desc': 'dummy'
        }
        assert data_structures.DidlObject.from_dict(the_dict) == didl_object
        # adding in an attibute not in _translation should make no difference
        the_dict['creator'] = 'another_creator'
        assert data_structures.DidlObject.from_dict(the_dict) != didl_object
        # round trip
        assert data_structures.DidlObject.from_dict(the_dict).to_dict() == \
            the_dict
    def test_didl_object_from_dict_resources(self):
        resources_list = [data_structures.DidlResource('a%20uri',
                                                       'a:protocol:info:xx')]
        didl_object = data_structures.DidlObject(title='a_title',
                                                 parent_id='pid',
                                                 item_id='iid',
                                                 creator='a_creator',
                                                 desc='dummy',
                                                 resources=resources_list)
        the_dict = {
            'title': 'a_title',
            'parent_id': 'pid',
            'item_id': 'iid',
            'creator': 'a_creator',
            'restricted': True,
            'desc': 'dummy',
            'resources': [resource.to_dict() for resource in resources_list]
        }
        assert data_structures.DidlObject.from_dict(the_dict) == didl_object
    def test_didl_object_from_dict_resources_remove_nones(self):
        resources_list = [data_structures.DidlResource('a%20uri',
                                                       'a:protocol:info:xx')]
        didl_object = data_structures.DidlObject(title='a_title',
                                                 parent_id='pid',
                                                 item_id='iid',
                                                 creator='a_creator',
                                                 desc='dummy',
                                                 resources=resources_list)
        the_dict = {
            'title': 'a_title',
            'parent_id': 'pid',
            'item_id': 'iid',
            'creator': 'a_creator',
            'restricted': True,
            'desc': 'dummy',
            'resources': [resource.to_dict(remove_nones=True)
                          for resource in resources_list]
        }
        assert data_structures.DidlObject.from_dict(the_dict) == didl_object
    def test_didl_comparisons(self):
        didl_object_1 = data_structures.DidlObject(
            title='a_title', parent_id='pid', item_id='iid', creator='a_creator')
        didl_object_2 = data_structures.DidlObject(
            title='a_title', parent_id='pid', item_id='iid', creator='a_creator')
        # should be not the same, but equal!
        assert didl_object_1 is not didl_object_2
        assert didl_object_1 == didl_object_2
        didl_object_3 = data_structures.DidlObject(
            title='a_title',
            parent_id='pid',
            item_id='iid',
            creator='a__different_creator')
        assert didl_object_3 != didl_object_1
    def test_didl_object_to_dict(self):
        didl_object = data_structures.DidlObject(
            title='a_title', parent_id='pid', item_id='iid', creator='a_creator')
        the_dict = {
            'title': 'a_title',
            'parent_id': 'pid',
            'item_id': 'iid',
            'creator': 'a_creator',
            'restricted': True,
            'desc': 'RINCON_AssociatedZPUDN'
        }
        assert didl_object.to_dict() == the_dict
        # adding in an attibute not in _translation should make no difference
        didl_object.other = 'other'
        assert didl_object.to_dict() == the_dict
        # but changing on the other should
        didl_object.creator = 'another'
        assert didl_object.to_dict() != the_dict
    def test_didl_object_to_dict_resources(self):
        resources_list = [data_structures.DidlResource('a%20uri',
                                                       'a:protocol:info:xx')]
        didl_object = data_structures.DidlObject(title='a_title',
                                                 parent_id='pid',
                                                 item_id='iid',
                                                 creator='a_creator',
                                                 resources=resources_list)
        the_dict = {
            'title': 'a_title',
            'parent_id': 'pid',
            'item_id': 'iid',
            'creator': 'a_creator',
            'restricted': True,
            'desc': 'RINCON_AssociatedZPUDN',
            'resources': [resource.to_dict() for resource in resources_list]
        }
        assert didl_object.to_dict() == the_dict
    def test_didl_object_to_dict_resources_remove_nones(self):
        resources_list = [data_structures.DidlResource('a%20uri',
                                                       'a:protocol:info:xx')]
        didl_object = data_structures.DidlObject(title='a_title',
                                                 parent_id='pid',
                                                 item_id='iid',
                                                 creator='a_creator',
                                                 resources=resources_list)
        the_dict = {
            'title': 'a_title',
            'parent_id': 'pid',
            'item_id': 'iid',
            'creator': 'a_creator',
            'restricted': True,
            'desc': 'RINCON_AssociatedZPUDN',
            'resources': [resource.to_dict(remove_nones=True)
                          for resource in resources_list]
        }
        assert didl_object.to_dict(remove_nones=True) == the_dict
    def test_didl_object_to_element(self):
        didl_object = data_structures.DidlObject(
            title='a_title', parent_id='pid', item_id='iid', creator='a_creator')
        # we seem to have to go through this to get ElementTree to deal
        # with namespaces properly!
        elt = XML.fromstring(XML.tostring(didl_object.to_element(True)))
        elt2 = XML.fromstring(
            '<dummy xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/" ' +
            'xmlns:dc="http://purl.org/dc/elements/1.1/" ' +
            'xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/">' +
            '<item id="iid" parentID="pid" restricted="true">' +
            '<dc:title>a_title</dc:title>' +
            '<dc:creator>a_creator</dc:creator>' +
            '<upnp:class>object</upnp:class><desc id="cdudn" ' +
            'nameSpace="urn:schemas-rinconnetworks-com:metadata-1-0/">' +
            'RINCON_AssociatedZPUDN</desc></item></dummy>')[0]
        assert_xml_equal(elt2, elt)
 | 
	mit | 2,753,686,613,637,451,300 | 42.020408 | 88 | 0.529954 | false | 
| 
	brandonlogan/octavia | 
	octavia/openstack/common/middleware/correlation_id.py | 
	1 | 
	1206 | 
	# Copyright (c) 2013 Rackspace Hosting
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.
"""Middleware that attaches a correlation id to WSGI request"""
import uuid
from octavia.openstack.common.middleware import base
from octavia.openstack.common import versionutils
@versionutils.deprecated(as_of=versionutils.deprecated.JUNO,
                         in_favor_of='octavia.middleware.CorrelationId')
class CorrelationIdMiddleware(base.Middleware):
    def process_request(self, req):
        correlation_id = (req.headers.get("X_CORRELATION_ID") or
                          str(uuid.uuid4()))
        req.headers['X_CORRELATION_ID'] = correlation_id
 | 
	apache-2.0 | 7,255,085,551,083,900,000 | 37.903226 | 78 | 0.713101 | false | 
| 
	ssorgatem/burrito | 
	burrito/tests/test_parameters.py | 
	2 | 
	26499 | 
	# ----------------------------------------------------------------------------
# Copyright (c) 2014--, burrito development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from unittest import TestCase, main
from burrito.parameters import (FlagParameter, ValuedParameter, MixedParameter,
                                Parameters, ParameterError, FilePath)
class FlagParameterTests(TestCase):
    """ Tests of the FlagParameter class """
    def setUp(self):
        """Setup some variables for the tests to use """
        self.p_modify_prefix = [FlagParameter(Name='d', Prefix='-'),
                                FlagParameter(Name='d', Prefix='--'),
                                FlagParameter(Name='d', Prefix='')]
        self.p_modify_name = [FlagParameter(Name='d', Prefix='-'),
                              FlagParameter(Name='D', Prefix='-'),
                              FlagParameter(Name=4, Prefix='-'),
                              FlagParameter(Name='abcdef', Prefix='-')]
        self.p_On = [FlagParameter(Name='d', Prefix='-', Value=True),
                     FlagParameter(Name='d', Prefix='-', Value=5),
                     FlagParameter(Name='d', Prefix='-', Value=[1]),
                     FlagParameter(Name='d', Prefix='-', Value='F')]
        self.p_Off = [FlagParameter(Name='d', Prefix='-', Value=False),
                      FlagParameter(Name='d', Prefix='-', Value=None),
                      FlagParameter(Name='d', Prefix='-', Value=[]),
                      FlagParameter(Name='d', Prefix='-', Value=0),
                      FlagParameter(Name='d', Prefix='-', Value='')]
        self.ID_tests = [FlagParameter(Name='d', Prefix='-'),
                         FlagParameter(Name='d', Prefix=''),
                         FlagParameter(Name='', Prefix='-'),
                         FlagParameter(Name=4, Prefix='-'),
                         FlagParameter(Name=None, Prefix='-'),
                         FlagParameter(Name=4, Prefix=None),
                         FlagParameter(Name='abcdef', Prefix='-')]
    def test_init(self):
        """FlagParameter: init functions as expected """
        param = FlagParameter(Name='a', Prefix='-', Value=42)
        self.assertEqual(param.Name, 'a')
        self.assertEqual(param.Prefix, '-')
        self.assertEqual(param.Value, 42)
        self.assertEqual(param.Delimiter, None)
        self.assertEqual(param.Quote, None)
        self.assertEqual(param.Id, '-a')
    def test_init_defaults(self):
        """FlagParameter: init functions as expected with default values"""
        p = FlagParameter(Name='a', Prefix='-')
        self.assertEqual(p.Name, 'a')
        self.assertEqual(p.Prefix, '-')
        self.assertEqual(p.Value, False)
        self.assertEqual(p.Delimiter, None)
        self.assertEqual(p.Quote, None)
        self.assertEqual(p.Id, '-a')
    def test_get_id(self):
        """FlagParameter: _get_id functions as expected """
        expected_results = ['-d', 'd', '-', '-4', '-', '4', '-abcdef']
        for param, exp in zip(self.ID_tests, expected_results):
            self.assertEqual(param._get_id(), exp)
    def test_eq(self):
        """FlagParameter: eq functions as expected """
        p1 = FlagParameter(Name='a', Prefix='-', Value=True)
        p2 = FlagParameter(Name='a', Prefix='-', Value=True)
        p3 = FlagParameter(Name='a', Prefix='-')
        p4 = FlagParameter(Name='i', Prefix='-', Value=True)
        p5 = FlagParameter(Name='a', Prefix='--', Value=True)
        assert p1 == p2
        assert not p1 == p3
        assert not p1 == p4
        assert not p1 == p5
        assert not p3 == p4
        assert not p3 == p5
        assert not p4 == p5
    def test_ne(self):
        """FlagParameter: ne functions as expected """
        p1 = FlagParameter(Name='a', Prefix='-', Value=True)
        p2 = FlagParameter(Name='a', Prefix='-', Value=True)
        p3 = FlagParameter(Name='a', Prefix='-')
        p4 = FlagParameter(Name='i', Prefix='-', Value=True)
        p5 = FlagParameter(Name='a', Prefix='--', Value=True)
        assert not p1 != p2
        assert p1 != p3
        assert p1 != p4
        assert p1 != p5
        assert p3 != p4
        assert p3 != p5
        assert p4 != p5
    def test_isOn_True(self):
        """FlagParameter: isOn functions as expected with True Values """
        for param in self.p_On:
            assert param.isOn()
    def test_isOn_False(self):
        """FlagParameter: isOn functions as expected with False Values """
        for param in self.p_Off:
            assert not param.isOn()
    def test_isOff_True(self):
        """FlagParameter: isOff functions as expected with True values """
        for param in self.p_Off:
            assert param.isOff()
    def test_isOff_False(self):
        """FlagParameter: isOff functions as expected with False values """
        for param in self.p_On:
            assert not param.isOff()
    def test_on(self):
        """FlagParameter: on functions as expected """
        for param in self.p_On + self.p_Off:
            param.on()
            assert param.isOn()
    def test_off(self):
        """FlagParameter: off functions as expected """
        for param in self.p_On + self.p_Off:
            param.off()
            assert param.isOff()
    def test_str_modify_prefix(self):
        """FlagParameter: str functions as expected with different prefixes """
        expected_results = ['-d', '--d', 'd']
        for param, exp in zip(self.p_modify_prefix, expected_results):
            param.on()
            self.assertEqual(str(param), exp)
    def test_str_modify_name(self):
        """FlagParameter: str functions as expected with different names """
        expected_results = ['-d', '-D', '-4', '-abcdef']
        for param, exp in zip(self.p_modify_name, expected_results):
            param.on()
            self.assertEqual(str(param), exp)
class ValuedParameterTests(TestCase):
    """ Tests of the ValuedParameter class """
    constructor = ValuedParameter
    s = 'Valued'
    def setUp(self):
        """Setup some variables for the tests to use """
        self.p_modify_prefix = [self.constructor(Name='d', Prefix='-'),
                                self.constructor(Name='d', Prefix='--'),
                                self.constructor(Name='d', Prefix='')]
        self.p_modify_name = [self.constructor(Name='d', Prefix='-'),
                              self.constructor(Name='D', Prefix='-'),
                              self.constructor(Name=4, Prefix='-'),
                              self.constructor(Name='abcdef', Prefix='-')]
        self.p_On = [self.constructor(Name='d', Prefix='-', Value=True),
                     self.constructor(Name='d', Prefix='-', Value=5),
                     self.constructor(Name='d', Prefix='-', Value=[1]),
                     self.constructor(Name='d', Prefix='-', Value=False),
                     self.constructor(Name='d', Prefix='-', Value='F')]
        self.p_Off = [self.constructor(Name='d', Prefix='-', Value=None)]
        self.p_full = [self.constructor(Name='a', Prefix='-',
                                        Value=42, Delimiter=' ', Quote='\'')]
        self.p_default = [self.constructor(Name='a', Prefix='-')]
        self.p_modified_prefix = [self.constructor(Name='d', Prefix='-'),
                                  self.constructor(Name='d', Prefix='--'),
                                  self.constructor(Name='d', Prefix='')]
        self.p_modified_name = [self.constructor(Name='d', Prefix='-'),
                                self.constructor(Name='D', Prefix='-'),
                                self.constructor(Name=4, Prefix='-'),
                                self.constructor(Name='abcdef', Prefix='-')]
        self.p_modified_delimiter =\
            [self.constructor(Name='d', Prefix='-', Value=42),
             self.constructor(Name='d', Prefix='-', Value=42, Delimiter=''),
             self.constructor(Name='d', Prefix='-', Value=42, Delimiter=' '),
             self.constructor(Name='d', Prefix='-', Value=42, Delimiter=9),
             self.constructor(Name='d', Prefix='-', Value=42, Delimiter='=')]
        self.p_modified_value =\
            [self.constructor(Name='d', Prefix='-', Value=42, Delimiter=' '),
             self.constructor(
                 Name='d',
                 Prefix='-',
                 Value='pbl',
                 Delimiter=' '),
                self.constructor(
                    Name='d',
                    Prefix='-',
                    Value='2-2',
                    Delimiter=' '),
                self.constructor(Name='d', Prefix='-', Value='evo/t.txt',
                                 Delimiter=' '),
                self.constructor(Name='d', Prefix='-', Value='\'',
                                 Delimiter=' ')]
        self.p_modified_quote =\
            [self.constructor(Name='d', Prefix='-', Value=42, Quote=''),
             self.constructor(Name='d', Prefix='-', Value=42),
             self.constructor(Name='d', Prefix='-', Value=42, Quote=' '),
             self.constructor(Name='d', Prefix='-', Value=42, Quote='\''),
             self.constructor(Name='d', Prefix='-', Value=42, Quote='\"'),
             self.constructor(Name='d', Prefix='-', Value=42, Quote='x')]
        self.ID_tests = [self.constructor(Name='d', Prefix='-'),
                         self.constructor(Name='d', Prefix=''),
                         self.constructor(Name='', Prefix='-'),
                         self.constructor(Name=4, Prefix='-'),
                         self.constructor(Name=None, Prefix='-'),
                         self.constructor(Name=4, Prefix=None),
                         self.constructor(Name='abcdef', Prefix='-')]
        self.p_modified_is_path =\
            [self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value='test.txt', IsPath=True),
             self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value='test.txt', IsPath=False),
             self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value='test.txt', Quote='"', IsPath=True)]
    def test_init(self):
        """Parameter: init functions as expected """
        for param in self.p_full:
            self.assertEqual(param.Name, 'a')
            self.assertEqual(param.Prefix, '-')
            self.assertEqual(param.Value, 42)
            self.assertEqual(param.Delimiter, ' ')
            self.assertEqual(param.Quote, '\'')
            self.assertEqual(param.Id, '-a')
    def test_init_defaults(self):
        """Parameter: init functions as expected with default values"""
        for p in self.p_default:
            self.assertEqual(p.Name, 'a')
            self.assertEqual(p.Prefix, '-')
            self.assertEqual(p.Value, None)
            self.assertEqual(p.Delimiter, None)
            self.assertEqual(p.Quote, None)
            self.assertEqual(p.Id, '-a')
    def test_get_id(self):
        """Parameter: _get_id functions as expected """
        expected_results = ['-d', 'd', '-', '-4', '-', '4', '-abcdef']
        for param, exp in zip(self.ID_tests, expected_results):
            self.assertEqual(param._get_id(), exp)
    def test_eq(self):
        """Parameter: eq functions as expected """
        p1 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        p2 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        p3 = self.constructor(Name='dsf', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        p4 = self.constructor(Name='a', Prefix='--', Value=42, Quote='\'',
                              Delimiter='=')
        p5 = self.constructor(Name='a', Prefix='-', Value=942, Quote='\'',
                              Delimiter='=')
        p6 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\"',
                              Delimiter='=')
        p7 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='!!!')
        p8 = self.constructor(Name='wwwww', Prefix='-------')
        p9 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=', IsPath=True)
        assert p1 == p2
        assert not p1 == p3
        assert not p1 == p4
        assert not p1 == p5
        assert not p1 == p6
        assert not p1 == p7
        assert not p1 == p8
        assert not p1 == p9
        # test default setting
        p5.Value = 42
        assert not p1 == p5
    def test_ne(self):
        """Parameter: ne functions as expected """
        p1 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        p2 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        p3 = self.constructor(Name='dsf', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        p4 = self.constructor(Name='a', Prefix='--', Value=42, Quote='\'',
                              Delimiter='=')
        p5 = self.constructor(Name='a', Prefix='-', Value=942, Quote='\'',
                              Delimiter='=')
        p6 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\"',
                              Delimiter='=')
        p7 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='!!!')
        p8 = self.constructor(Name='wwwww', Prefix='-------')
        p9 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=', IsPath=True)
        assert not p1 != p2
        assert p1 != p3
        assert p1 != p4
        assert p1 != p5
        assert p1 != p6
        assert p1 != p7
        assert p1 != p8
        assert p1 != p9
        # test default setting
        p5.Value = 42
        assert p1 != p5
    def test_get_default(self):
        """Parameter: default behaves as expected """
        p1 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        self.assertEqual(p1._get_default(), 42)
        p1.Value = 43
        self.assertEqual(p1._get_default(), 42)
    def test_get_default_w_IsPath(self):
        """Parameter: default is a FilePath object when IsPath is set """
        p = self.constructor(
            Name='a', Prefix='-', Value='test.txt', Quote='\'',
            Delimiter='=', IsPath=True)
        self.assertEqual(p._get_default(), 'test.txt')
        self.assertEqual(p.Default, 'test.txt')
        p.Value = 'test2.txt'
        self.assertEqual(p._get_default(), 'test.txt')
        self.assertEqual(p.Default, 'test.txt')
        assert isinstance(p._get_default(), FilePath)
        assert isinstance(p.Default, FilePath)
    def test_reset(self):
        """Parameter: reset correctly set Value to _default """
        p1 = self.constructor(Name='a', Prefix='-', Value=42, Quote='\'',
                              Delimiter='=')
        p1.Value = 43
        self.assertNotEqual(p1.Default, p1.Value)
        p1.reset()
        self.assertEqual(p1.Default, p1.Value)
    def test_isOn_True(self):
        """Parameter: isOn functions as expected with True Values """
        for param in self.p_On:
            assert param.isOn()
    def test_isOn_False(self):
        """Parameter: isOn functions as expected with False Values """
        for param in self.p_Off:
            assert not param.isOn()
    def test_isOff_True(self):
        """Parameter: isOff functions as expected with True values """
        for param in self.p_Off:
            assert param.isOff()
    def test_isOff_False(self):
        """Parameter: isOff functions as expected with False values """
        for param in self.p_On:
            assert not param.isOff()
    def test_on(self):
        """Parameter: on functions as expected """
        for param in self.p_On + self.p_Off:
            param.on('a')
            assert param.isOn()
        p = self.p_On[0]
        self.assertRaises(ParameterError, p.on, None)
    def test_off(self):
        """Parameter: off functions as expected """
        for param in self.p_On + self.p_Off:
            param.off()
            assert param.isOff()
    def test_str_off(self):
        """Parameter: str() prints empty string when off """
        for p in self.p_Off:
            self.assertEqual(str(p), '')
    def test_str_modify_prefix(self):
        """Parameter: str functions as expected with different prefixes """
        expected_results = ['-d', '--d', 'd']
        for param, exp in zip(self.p_modified_prefix, expected_results):
            param.on('')
            self.assertEqual(str(param), exp)
    def test_str_modify_name(self):
        """Parameter: str functions as expected with different names """
        expected_results = ['-d', '-D', '-4', '-abcdef']
        for param, exp in zip(self.p_modified_name, expected_results):
            param.on('')
            self.assertEqual(str(param), exp)
    def test_str_modify_delimiter(self):
        """Parameter: str functions as expected with different delimiter """
        expected_results = ['-d42', '-d42', '-d 42', '-d942', '-d=42']
        for param, exp in zip(self.p_modified_delimiter, expected_results):
            self.assertEqual(str(param), exp)
    def test_str_modify_values(self):
        """Parameter: str functions as expected with different values """
        expected_results = ['-d 42',
                            '-d pbl', '-d 2-2', '-d evo/t.txt', '-d \'']
        for param, exp in zip(self.p_modified_value, expected_results):
            self.assertEqual(str(param), exp)
    def test_str_modify_quotes(self):
        """Parameter: str functions as expected with different quotes """
        expected_results = ['-d42', '-d42', '-d 42 ', '-d\'42\'',
                            '-d\"42\"', '-dx42x']
        for param, exp in zip(self.p_modified_quote, expected_results):
            self.assertEqual(str(param), exp)
    def test_str_modify_is_path(self):
        """Parameter: str functions as expected with different IsPath """
        expected_results = ['-d "test.txt"', '-d test.txt', '-d "test.txt"']
        for param, exp in zip(self.p_modified_is_path, expected_results):
            self.assertEqual(str(param), exp)
    def test_str_full(self):
        """Parameter: str functions as expected with all values non-default """
        for p in self.p_full:
            self.assertEqual(str(p), '-a \'42\'')
class MixedParameterTests(ValuedParameterTests):
    """ Tests of the MixedParameter class """
    constructor = MixedParameter
    def setUp(self):
        """Setup some variables for the tests to use """
        super(MixedParameterTests, self).setUp()
        self.p_On = [self.constructor(Name='d', Prefix='-', Value=True),
                     self.constructor(Name='d', Prefix='-', Value=5),
                     self.constructor(Name='d', Prefix='-', Value=[1]),
                     self.constructor(Name='d', Prefix='-', Value=None),
                     self.constructor(Name='d', Prefix='-', Value='F')]
        self.p_Off = [self.constructor(Name='d', Prefix='-', Value=False)]
        # This is different from the superclass variable b/c we need to make
        # sure that specifying IsPath with Value=None functions as expected
        self.p_modified_is_path =\
            [self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value='test.txt', IsPath=True),
             self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value='test.txt', Quote='"', IsPath=True),
             self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value='test.txt', IsPath=False),
             self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value=None, IsPath=True),
             self.constructor(Name='d', Prefix='-', Delimiter=' ',
                              Value=None, IsPath=False)]
    def test_on(self):
        """Parameter: on functions as expected """
        for param in self.p_On + self.p_Off:
            param.on('a')
            assert param.isOn()
        p = self.p_On[0]
        self.assertRaises(ParameterError, p.on, False)
    def test_init_defaults(self):
        """MixedParameter: init functions as expected with default values"""
        for p in self.p_default:
            self.assertEqual(p.Name, 'a')
            self.assertEqual(p.Prefix, '-')
            self.assertEqual(p.Value, False)
            self.assertEqual(p.Delimiter, None)
            self.assertEqual(p.Quote, None)
            self.assertEqual(p.Id, '-a')
            self.assertEqual(p.IsPath, False)
    def test_str_all_modes(self):
        """MixedParameter: str() functions in various modes """
        p = MixedParameter(Prefix='-', Name='d', Delimiter='=', Quote=']')
        self.assertEqual(str(p), '')
        p.on()
        self.assertEqual(str(p), '-d')
        p.on('a')
        self.assertEqual(str(p), '-d=]a]')
    def test_str_modify_is_path(self):
        """MixedParameter: str functions as expected with different IsPath """
        # This is different from the superclass test b/c we need to make
        # sure that specifying IsPath with Value=None functions as expected
        expected_results = ['-d "test.txt"', '-d "test.txt"',
                            '-d test.txt', '-d', '-d']
        for param, exp in zip(self.p_modified_is_path, expected_results):
            self.assertEqual(str(param), exp)
class ParametersTests(TestCase):
    """Tests of the Parameters class"""
    def setUp(self):
        self.fp = FlagParameter(Prefix='-', Name='d')
        self.vp = ValuedParameter(Name='p', Prefix='-', Value=[1])
        self.mp = MixedParameter(Prefix='--', Name='k', Delimiter=' ')
        self.all_params = {self.fp.Id: self.fp, self.vp.Id: self.vp,
                           self.mp.Id: self.mp}
        self.p1 = Parameters()
        self.p2 = Parameters(self.all_params)
        self._synonyms = {'Pino': '-p', 'K': 'k'}
        self.p3 = Parameters(self.all_params, self._synonyms)
    def test_init(self):
        """Parameters: init functions as expected"""
        self.assertEqual(self.p1, {})
        self.assertEqual(self.p2, self.all_params)
        self.assertEqual(self.p3, self.all_params)
    def test_lookup(self):
        """Parameters: test ability to lookup """
        self.assertEqual(self.p2['-p'], self.vp)
        self.assertEqual(self.p3['Pino'], self.vp)
    def test_immutability(self):
        """Parameters: attempt to modify object raises error """
        try:
            self.p2['-p'] = 42
        except TypeError:
            pass
        else:
            raise AttributeError("Parameters shouldn't support assignment.")
        try:
            del self.p2['-p']
        except TypeError:
            pass
        else:
            raise AttributeError("Parameters shouldn't support deletion.")
    def test_all_off(self):
        """Parameters: all_off() should turn all parameters off"""
        p = self.p2
        # turn everything on
        for v in p.values():
            try:
                v.on(3)
            except TypeError:
                v.on()
            self.assertTrue(v.isOn())
        # turn everything off
        p.all_off()
        for v in p.values():
            self.assertTrue(v.isOff())
class FilePathTests(TestCase):
    """ Tests of the FilePath class """
    def setUp(self):
        """ Initialize variables to be used by tests """
        self.filename = 'filename.txt'
        self.relative_dir_path = 'a/relative/path/'
        self.relative_dir_path_no_trailing_slash = 'a/relative/path'
        self.relative_file_path = 'a/relative/filepath.txt'
        self.absolute_dir_path = '/absolute/path/'
        self.absolute_file_path = '/absolute/filepath.txt'
        self.all_paths = [self.filename, self.relative_dir_path,
                          self.relative_file_path, self.absolute_dir_path,
                          self.absolute_file_path]
    def test_init(self):
        """FilePath: initialization returns w/o error """
        for p in self.all_paths:
            self.assertEqual(FilePath(p), p)
        self.assertEqual(FilePath(''), '')
    def test_str(self):
        """FilePath: str wraps path in quotes """
        # Do one explicit test (for sanity), then automatically run
        # through the examples
        self.assertEqual(str(FilePath(self.filename)), '"filename.txt"')
        for p in self.all_paths:
            self.assertEqual(str(FilePath(p)), '"' + p + '"')
    def test_str_path_is_None(self):
        """FilePath: str return empty string when path is None """
        self.assertEqual(str(FilePath(None)), '')
    def test_add(self):
        """FilePath: add (or joining of paths) functions as expected """
        actual = FilePath(self.relative_dir_path) + FilePath(self.filename)
        expected = FilePath('a/relative/path/filename.txt')
        self.assertEqual(actual, expected)
        # result is a FilePath
        assert isinstance(actual, FilePath)
        # appending a string to a FilePath results in a FilePath
        actual = FilePath(self.relative_dir_path) + 'filename.txt'
        expected = FilePath('a/relative/path/filename.txt')
        self.assertEqual(actual, expected)
        # result is a FilePath
        assert isinstance(actual, FilePath)
    def test_FilePath_identity_preserved(self):
        """FilePath: trivial actions on FilePaths yeild original FilePath
        """
        p = FilePath(self.filename)
        # Creating FilePath from FilePath results in FilePath
        # equal to original
        self.assertEqual(FilePath(p), p)
        for p in self.all_paths:
            self.assertEqual(FilePath(p), p)
        # Appending an empty FilePath to a FilePath results in FilePath
        # equal to original
        self.assertEqual(p + FilePath(''), p)
if __name__ == '__main__':
    main()
 | 
	bsd-3-clause | 2,778,190,990,815,583,000 | 39.210926 | 79 | 0.52919 | false | 
| 
	GenericStudent/home-assistant | 
	homeassistant/components/neato/const.py | 
	9 | 
	6449 | 
	"""Constants for Neato integration."""
NEATO_DOMAIN = "neato"
CONF_VENDOR = "vendor"
NEATO_CONFIG = "neato_config"
NEATO_LOGIN = "neato_login"
NEATO_MAP_DATA = "neato_map_data"
NEATO_PERSISTENT_MAPS = "neato_persistent_maps"
NEATO_ROBOTS = "neato_robots"
SCAN_INTERVAL_MINUTES = 1
SERVICE_NEATO_CUSTOM_CLEANING = "custom_cleaning"
VALID_VENDORS = ["neato", "vorwerk"]
MODE = {1: "Eco", 2: "Turbo"}
ACTION = {
    0: "Invalid",
    1: "House Cleaning",
    2: "Spot Cleaning",
    3: "Manual Cleaning",
    4: "Docking",
    5: "User Menu Active",
    6: "Suspended Cleaning",
    7: "Updating",
    8: "Copying logs",
    9: "Recovering Location",
    10: "IEC test",
    11: "Map cleaning",
    12: "Exploring map (creating a persistent map)",
    13: "Acquiring Persistent Map IDs",
    14: "Creating & Uploading Map",
    15: "Suspended Exploration",
}
ERRORS = {
    "ui_error_battery_battundervoltlithiumsafety": "Replace battery",
    "ui_error_battery_critical": "Replace battery",
    "ui_error_battery_invalidsensor": "Replace battery",
    "ui_error_battery_lithiumadapterfailure": "Replace battery",
    "ui_error_battery_mismatch": "Replace battery",
    "ui_error_battery_nothermistor": "Replace battery",
    "ui_error_battery_overtemp": "Replace battery",
    "ui_error_battery_overvolt": "Replace battery",
    "ui_error_battery_undercurrent": "Replace battery",
    "ui_error_battery_undertemp": "Replace battery",
    "ui_error_battery_undervolt": "Replace battery",
    "ui_error_battery_unplugged": "Replace battery",
    "ui_error_brush_stuck": "Brush stuck",
    "ui_error_brush_overloaded": "Brush overloaded",
    "ui_error_bumper_stuck": "Bumper stuck",
    "ui_error_check_battery_switch": "Check battery",
    "ui_error_corrupt_scb": "Call customer service corrupt board",
    "ui_error_deck_debris": "Deck debris",
    "ui_error_dflt_app": "Check Neato app",
    "ui_error_disconnect_chrg_cable": "Disconnected charge cable",
    "ui_error_disconnect_usb_cable": "Disconnected USB cable",
    "ui_error_dust_bin_missing": "Dust bin missing",
    "ui_error_dust_bin_full": "Dust bin full",
    "ui_error_dust_bin_emptied": "Dust bin emptied",
    "ui_error_hardware_failure": "Hardware failure",
    "ui_error_ldrop_stuck": "Clear my path",
    "ui_error_lds_jammed": "Clear my path",
    "ui_error_lds_bad_packets": "Check Neato app",
    "ui_error_lds_disconnected": "Check Neato app",
    "ui_error_lds_missed_packets": "Check Neato app",
    "ui_error_lwheel_stuck": "Clear my path",
    "ui_error_navigation_backdrop_frontbump": "Clear my path",
    "ui_error_navigation_backdrop_leftbump": "Clear my path",
    "ui_error_navigation_backdrop_wheelextended": "Clear my path",
    "ui_error_navigation_noprogress": "Clear my path",
    "ui_error_navigation_origin_unclean": "Clear my path",
    "ui_error_navigation_pathproblems": "Cannot return to base",
    "ui_error_navigation_pinkycommsfail": "Clear my path",
    "ui_error_navigation_falling": "Clear my path",
    "ui_error_navigation_noexitstogo": "Clear my path",
    "ui_error_navigation_nomotioncommands": "Clear my path",
    "ui_error_navigation_rightdrop_leftbump": "Clear my path",
    "ui_error_navigation_undockingfailed": "Clear my path",
    "ui_error_picked_up": "Picked up",
    "ui_error_qa_fail": "Check Neato app",
    "ui_error_rdrop_stuck": "Clear my path",
    "ui_error_reconnect_failed": "Reconnect failed",
    "ui_error_rwheel_stuck": "Clear my path",
    "ui_error_stuck": "Stuck!",
    "ui_error_unable_to_return_to_base": "Unable to return to base",
    "ui_error_unable_to_see": "Clean vacuum sensors",
    "ui_error_vacuum_slip": "Clear my path",
    "ui_error_vacuum_stuck": "Clear my path",
    "ui_error_warning": "Error check app",
    "batt_base_connect_fail": "Battery failed to connect to base",
    "batt_base_no_power": "Battery base has no power",
    "batt_low": "Battery low",
    "batt_on_base": "Battery on base",
    "clean_tilt_on_start": "Clean the tilt on start",
    "dustbin_full": "Dust bin full",
    "dustbin_missing": "Dust bin missing",
    "gen_picked_up": "Picked up",
    "hw_fail": "Hardware failure",
    "hw_tof_sensor_sensor": "Hardware sensor disconnected",
    "lds_bad_packets": "Bad packets",
    "lds_deck_debris": "Debris on deck",
    "lds_disconnected": "Disconnected",
    "lds_jammed": "Jammed",
    "lds_missed_packets": "Missed packets",
    "maint_brush_stuck": "Brush stuck",
    "maint_brush_overload": "Brush overloaded",
    "maint_bumper_stuck": "Bumper stuck",
    "maint_customer_support_qa": "Contact customer support",
    "maint_vacuum_stuck": "Vacuum is stuck",
    "maint_vacuum_slip": "Vacuum is stuck",
    "maint_left_drop_stuck": "Vacuum is stuck",
    "maint_left_wheel_stuck": "Vacuum is stuck",
    "maint_right_drop_stuck": "Vacuum is stuck",
    "maint_right_wheel_stuck": "Vacuum is stuck",
    "not_on_charge_base": "Not on the charge base",
    "nav_robot_falling": "Clear my path",
    "nav_no_path": "Clear my path",
    "nav_path_problem": "Clear my path",
    "nav_backdrop_frontbump": "Clear my path",
    "nav_backdrop_leftbump": "Clear my path",
    "nav_backdrop_wheelextended": "Clear my path",
    "nav_mag_sensor": "Clear my path",
    "nav_no_exit": "Clear my path",
    "nav_no_movement": "Clear my path",
    "nav_rightdrop_leftbump": "Clear my path",
    "nav_undocking_failed": "Clear my path",
}
ALERTS = {
    "ui_alert_dust_bin_full": "Please empty dust bin",
    "ui_alert_recovering_location": "Returning to start",
    "ui_alert_battery_chargebasecommerr": "Battery error",
    "ui_alert_busy_charging": "Busy charging",
    "ui_alert_charging_base": "Base charging",
    "ui_alert_charging_power": "Charging power",
    "ui_alert_connect_chrg_cable": "Connect charge cable",
    "ui_alert_info_thank_you": "Thank you",
    "ui_alert_invalid": "Invalid check app",
    "ui_alert_old_error": "Old error",
    "ui_alert_swupdate_fail": "Update failed",
    "dustbin_full": "Please empty dust bin",
    "maint_brush_change": "Change the brush",
    "maint_filter_change": "Change the filter",
    "clean_completed_to_start": "Cleaning completed",
    "nav_floorplan_not_created": "No floorplan found",
    "nav_floorplan_load_fail": "Failed to load floorplan",
    "nav_floorplan_localization_fail": "Failed to load floorplan",
    "clean_incomplete_to_start": "Cleaning incomplete",
    "log_upload_failed": "Logs failed to upload",
}
 | 
	apache-2.0 | 358,210,031,476,361,200 | 40.876623 | 69 | 0.663204 | false | 
| 
	amitaekbote/dcos | 
	ssh/runner.py | 
	5 | 
	11246 | 
	import asyncio
import copy
import logging
import os
try:
    import pty
except ImportError:
    pass
import sys
from contextlib import contextmanager
import ssh.validate
from pkgpanda.util import is_windows
from ssh.utils import CommandChain, JsonDelegate
if not is_windows:
    assert 'pty' in sys.modules
log = logging.getLogger(__name__)
@contextmanager
def make_slave_pty():
    master_pty, slave_pty = pty.openpty()
    yield slave_pty
    os.close(slave_pty)
    os.close(master_pty)
def parse_ip(ip: str, default_port: int):
    tmp = ip.split(':')
    if len(tmp) == 2:
        return {"ip": tmp[0], "port": int(tmp[1])}
    elif len(tmp) == 1:
        return {"ip": ip, "port": default_port}
    else:
        raise ValueError(
            "Expected a string of form <ip> or <ip>:<port> but found a string with more than one " +
            "colon in it. NOTE: IPv6 is not supported at this time. Got: {}".format(ip))
class Node():
    def __init__(self, host, tags: dict=dict(), default_port: int=22):
        self.tags = copy.copy(tags)
        self.host = parse_ip(host, default_port)
        self.ip = self.host['ip']
        self.port = self.host['port']
    def get_full_host(self):
        _host = self.host.copy()
        _host.update({'tags': self.tags})
        return _host
    def __repr__(self):
        return '{}:{} tags={}'.format(
            self.ip,
            self.port,
            ', '.join(['{}:{}'.format(k, v) for k, v in sorted(self.tags.items())]))
def add_host(target, default_port):
    if isinstance(target, Node):
        return target
    return Node(target, default_port=default_port)
class MultiRunner():
    def __init__(self, targets: list, async_delegate=None, user=None, key_path=None, extra_opts='',
                 process_timeout=120, parallelism=10, default_port=22):
        # TODO(cmaloney): accept an "ssh_config" object which generates an ssh
        # config file, then add a '-F' to that temporary config file rather than
        # manually building up / adding the arguments in _get_base_args which is
        # very error prone to get the formatting right. Should have just one
        # host section which applies to all hosts, sets things like "user".
        self.extra_opts = extra_opts
        self.process_timeout = process_timeout
        self.user = user
        self.key_path = key_path
        self.ssh_bin = '/usr/bin/ssh'
        self.scp_bin = '/usr/bin/scp'
        self.async_delegate = async_delegate
        self.__targets = []
        for target in targets:
            self.__targets.append(add_host(target, default_port))
        self.__parallelism = parallelism
    def _get_base_args(self, bin_name, host):
        # TODO(cmaloney): Switch to SSH config file, documented above. A single
        # user is always required.
        if bin_name == self.ssh_bin:
            port_option = '-p'
            add_opts = ['-tt']
            if self.extra_opts:
                add_opts.extend(self.extra_opts.split(' '))
        else:
            port_option = '-P'
            add_opts = []
        shared_opts = [
            bin_name,
            '-oConnectTimeout=10',
            '-oStrictHostKeyChecking=no',
            '-oUserKnownHostsFile=/dev/null',
            '-oBatchMode=yes',
            '-oPasswordAuthentication=no',
            '{}{}'.format(port_option, host.port),
            '-i', self.key_path]
        shared_opts.extend(add_opts)
        return shared_opts
    @asyncio.coroutine
    def run_cmd_return_dict_async(self, cmd, host, namespace, future, stage):
        with make_slave_pty() as slave_pty:
            process = yield from asyncio.create_subprocess_exec(
                *cmd, stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE,
                stdin=slave_pty,
                env={'TERM': 'linux'})
            stdout = b''
            stderr = b''
            try:
                stdout, stderr = yield from asyncio.wait_for(process.communicate(), self.process_timeout)
            except asyncio.TimeoutError:
                try:
                    process.terminate()
                except ProcessLookupError:
                    log.info('process with pid {} not found'.format(process.pid))
                log.error('timeout of {} sec reached. PID {} killed'.format(self.process_timeout, process.pid))
        # For each possible line in stderr, match from the beginning of the line for the
        # the confusing warning: "Warning: Permanently added ...". If the warning exists,
        # remove it from the string.
        err_arry = stderr.decode().split('\r')
        stderr = bytes('\n'.join([line for line in err_arry if not line.startswith(
            'Warning: Permanently added')]), 'utf-8')
        process_output = {
            '{}:{}'.format(host.ip, host.port): {
                "cmd": cmd,
                "stdout": stdout.decode().split('\n'),
                "stderr": stderr.decode().split('\n'),
                "returncode": process.returncode,
                "pid": process.pid,
                "stage": stage
            }
        }
        future.set_result((namespace, process_output, host))
        return process_output
    @asyncio.coroutine
    def run_async(self, host, command, namespace, future, stage):
        # command consists of (command_flag, command, rollback, stage)
        # we will ignore all but command for now
        _, cmd, _, _ = command
        # we may lazy evaluate a command based on Node() class
        if callable(cmd):
            cmd = cmd(host)
        full_cmd = self._get_base_args(self.ssh_bin, host) + ['{}@{}'.format(self.user, host.ip)] + cmd
        log.debug('executing command {}'.format(full_cmd))
        result = yield from self.run_cmd_return_dict_async(full_cmd, host, namespace, future, stage)
        return result
    @asyncio.coroutine
    def copy_async(self, host, command, namespace, future, stage):
        # command[0] is command_flag, command[-1] is stage
        # we will ignore them here.
        _, local_path, remote_path, remote_to_local, recursive, _ = command
        copy_command = []
        if recursive:
            copy_command += ['-r']
        remote_full_path = '{}@{}:{}'.format(self.user, host.ip, remote_path)
        if remote_to_local:
            copy_command += [remote_full_path, local_path]
        else:
            copy_command += [local_path, remote_full_path]
        full_cmd = self._get_base_args(self.scp_bin, host) + copy_command
        log.debug('copy with command {}'.format(full_cmd))
        result = yield from self.run_cmd_return_dict_async(full_cmd, host, namespace, future, stage)
        return result
    def _run_chain_command(self, chain: CommandChain, host, chain_result):
        # Prepare status json
        if self.async_delegate is not None:
            log.debug('Preparing a status json')
            self.async_delegate.prepare_status(chain.namespace, self.__targets)
        host_status = 'hosts_success'
        host_port = '{}:{}'.format(host.ip, host.port)
        command_map = {
            CommandChain.execute_flag: self.run_async,
            CommandChain.copy_flag: self.copy_async
        }
        process_exit_code_map = {
            None: {
                'host_status': 'terminated',
                'host_status_count': 'hosts_terminated'
            },
            0: {
                'host_status': 'success',
                'host_status_count': 'hosts_success'
            },
            'failed': {
                'host_status': 'failed',
                'host_status_count': 'hosts_failed'
            }
        }
        for command in chain.get_commands():
            stage = command[-1]
            if stage is not None:
                # a stage can be a function which takes a Node() object and does evaluation
                if callable(stage):
                    stage = stage(host)
                log.debug('{}: {}'.format(host_port, stage))
            future = asyncio.Future()
            if self.async_delegate is not None:
                log.debug('Using async_delegate with callback')
                callback_called = asyncio.Future()
                future.add_done_callback(lambda future: self.async_delegate.on_update(future, callback_called))
            # command[0] is a type of a command, could be CommandChain.execute_flag, CommandChain.copy_flag
            result = yield from command_map.get(command[0], None)(host, command, chain.namespace, future, stage)
            status = process_exit_code_map.get(result[host_port]['returncode'], process_exit_code_map['failed'])
            host_status = status['host_status']
            if self.async_delegate is not None:
                # We need to make sure the callback was executed before we can proceed further
                # 5 seconds should be enough for a callback.
                try:
                    yield from asyncio.wait_for(callback_called, 5)
                except asyncio.TimeoutError:
                    log.error('Callback did not execute within 5 sec')
                    host_status = 'terminated'
                    break
            _, result, host_object = future.result()
            chain_result.append(result)
            if host_status != 'success':
                break
        if self.async_delegate is not None:
            # Update chain status.
            self.async_delegate.on_done(chain.namespace, result, host_status=host_status)
    @asyncio.coroutine
    def dispatch_chain(self, host, chains, sem):
        log.debug('Started dispatch_chain for host {}'.format(host))
        chain_result = []
        with (yield from sem):
            for chain in chains:
                yield from self._run_chain_command(chain, host, chain_result)
        return chain_result
    @asyncio.coroutine
    def run_commands_chain_async(self, chains: list, block=False, state_json_dir=None, delegate_extra_params={}):
        sem = asyncio.Semaphore(self.__parallelism)
        if state_json_dir:
            log.debug('Using default JsonDelegate method, state_json_dir {}'.format(state_json_dir))
            self.async_delegate = JsonDelegate(state_json_dir, len(self.__targets), **delegate_extra_params)
        else:
            assert self.async_delegate, 'async delegate must be set'
        if block:
            log.debug('Waiting for run_command_chain_async to execute')
            tasks = []
            for host in self.__targets:
                tasks.append(asyncio.async(self.dispatch_chain(host, chains, sem)))
            yield from asyncio.wait(tasks)
            log.debug('run_command_chain_async executed')
            return [task.result() for task in tasks]
        else:
            log.debug('Started run_command_chain_async in non-blocking mode')
            for host in self.__targets:
                asyncio.async(self.dispatch_chain(host, chains, sem))
    def validate(self):
        """Raises an AssertException if validation does not pass"""
        ssh.validate.validate_ssh_user(self.user)
        ssh.validate.validate_ssh_key_path(self.key_path)
        for node in self.__targets:
            ssh.validate.validate_ssh_port(node.port)
 | 
	apache-2.0 | -4,540,312,619,662,744,600 | 37.77931 | 113 | 0.575227 | false | 
| 
	openstack/manila | 
	manila/api/openstack/versioned_method.py | 
	7 | 
	1717 | 
	# Copyright 2014 IBM Corp.
# Copyright 2015 Clinton Knight
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.
from manila import utils
class VersionedMethod(utils.ComparableMixin):
    def __init__(self, name, start_version, end_version, experimental, func):
        """Versioning information for a single method.
        Minimum and maximums are inclusive.
        :param name: Name of the method
        :param start_version: Minimum acceptable version
        :param end_version: Maximum acceptable_version
        :param experimental: True if method is experimental
        :param func: Method to call
        """
        self.name = name
        self.start_version = start_version
        self.end_version = end_version
        self.experimental = experimental
        self.func = func
    def __str__(self):
        args = {
            'name': self.name,
            'start': self.start_version,
            'end': self.end_version
        }
        return ("Version Method %(name)s: min: %(start)s, max: %(end)s" % args)
    def _cmpkey(self):
        """Return the value used by ComparableMixin for rich comparisons."""
        return self.start_version
 | 
	apache-2.0 | 1,038,882,171,824,815,900 | 34.040816 | 79 | 0.65696 | false | 
| 
	cazacov/InternetOfThings | 
	raspberry/lcd2004/lcddriver.py | 
	3 | 
	2469 | 
	import i2c_lib
from time import *
# LCD Address
ADDRESS = 0x27
# commands
LCD_CLEARDISPLAY = 0x01
LCD_RETURNHOME = 0x02
LCD_ENTRYMODESET = 0x04
LCD_DISPLAYCONTROL = 0x08
LCD_CURSORSHIFT = 0x10
LCD_FUNCTIONSET = 0x20
LCD_SETCGRAMADDR = 0x40
LCD_SETDDRAMADDR = 0x80
# flags for display entry mode
LCD_ENTRYRIGHT = 0x00
LCD_ENTRYLEFT = 0x02
LCD_ENTRYSHIFTINCREMENT = 0x01
LCD_ENTRYSHIFTDECREMENT = 0x00
# flags for display on/off control
LCD_DISPLAYON = 0x04
LCD_DISPLAYOFF = 0x00
LCD_CURSORON = 0x02
LCD_CURSOROFF = 0x00
LCD_BLINKON = 0x01
LCD_BLINKOFF = 0x00
# flags for display/cursor shift
LCD_DISPLAYMOVE = 0x08
LCD_CURSORMOVE = 0x00
LCD_MOVERIGHT = 0x04
LCD_MOVELEFT = 0x00
# flags for function set
LCD_8BITMODE = 0x10
LCD_4BITMODE = 0x00
LCD_2LINE = 0x08
LCD_1LINE = 0x00
LCD_5x10DOTS = 0x04
LCD_5x8DOTS = 0x00
# flags for backlight control
LCD_BACKLIGHT = 0x08
LCD_NOBACKLIGHT = 0x00
En = 0b00000100 # Enable bit
Rw = 0b00000010 # Read/Write bit
Rs = 0b00000001 # Register select bit
class lcd:
   #initializes objects and lcd
   def __init__(self):
      self.lcd_device = i2c_lib.i2c_device(ADDRESS)
      self.lcd_write(0x03)
      self.lcd_write(0x03)
      self.lcd_write(0x03)
      self.lcd_write(0x02)
      self.lcd_write(LCD_FUNCTIONSET | LCD_2LINE | LCD_5x8DOTS | LCD_4BITMODE)
      self.lcd_write(LCD_DISPLAYCONTROL | LCD_DISPLAYON)
      self.lcd_write(LCD_CLEARDISPLAY)
      self.lcd_write(LCD_ENTRYMODESET | LCD_ENTRYLEFT)
      sleep(0.2)
   # clocks EN to latch command
   def lcd_strobe(self, data):
      self.lcd_device.write_cmd(data | En | LCD_BACKLIGHT)
      sleep(.0005)
      self.lcd_device.write_cmd(((data & ~En) | LCD_BACKLIGHT))
      sleep(.0001)
   def lcd_write_four_bits(self, data):
      self.lcd_device.write_cmd(data | LCD_BACKLIGHT)
      self.lcd_strobe(data)
   # write a command to lcd
   def lcd_write(self, cmd, mode=0):
      self.lcd_write_four_bits(mode | (cmd & 0xF0))
      self.lcd_write_four_bits(mode | ((cmd << 4) & 0xF0))
   # put string function
   def lcd_display_string(self, string, line):
      if line == 1:
         self.lcd_write(0x80)
      if line == 2:
         self.lcd_write(0xC0)
      if line == 3:
         self.lcd_write(0x94)
      if line == 4:
         self.lcd_write(0xD4)
      for char in string:
         self.lcd_write(ord(char), Rs)
   # clear lcd and set to home
   def lcd_clear(self):
      self.lcd_write(LCD_CLEARDISPLAY)
      self.lcd_write(LCD_RETURNHOME)
 | 
	mit | 1,334,149,586,357,087,200 | 23.205882 | 78 | 0.671932 | false | 
| 
	kdwyer/issue-forwarder | 
	issueforwarder/tests.py | 
	1 | 
	2814 | 
	import unittest
from . import issues
class InboundMessageStub(object):
    """
    Stub InboundEmailMessage class.
    (google.appengine.api.mail.InboundEmailMessage)
    """
    def __init__(self):
        self.subject = u'It doesn\'t work'
    def bodies(self, content_type):
        return iter([(u'plain/text', EncodedPayloadStub())])
class EncodedPayloadStub(object):
    """
    Stub EncodedPayload class.
    (google.appengine.api.mail.EncodedPayload)
    """
    def decode(self):
        return u'I clicked on the button but nothing happened.'
class ExtractIssueTitleTestCase(unittest.TestCase):
    def setUp(self):
        self.inbound_message = InboundMessageStub()
    def test_extracts_issue_title(self):
        expected = u'It doesn\'t work'
        result = issues.extract_issue_title(self.inbound_message)
        self.assertEqual(expected, result)
class ExtractIssueBodyTestCase(unittest.TestCase):
    def setUp(self):
        self.inbound_message = InboundMessageStub()
    def test_extracts_issue_body(self):
        expected = u'I clicked on the button but nothing happened.'
        result = issues.extract_issue_body(self.inbound_message)
        self.assertEqual(expected, result)
    def test_decodes_issue_body(self):
        # A real InboundEmailMessage returns an object which returns
        # a string when decode() is called on it.
        result = issues.extract_issue_body(self.inbound_message)
        self.assertTrue(
                isinstance(result, unicode),
                'Expected unicode, got {}'.format(type(result)))
class CreatePayloadTestCase(unittest.TestCase):
    def test_creates_payload(self):
        expected = {'title': 'issue title', 'body': 'issue body'}
        result = issues.create_payload('issue title', 'issue body')
        self.assertEqual(expected, result)
class CreateURLTestCase(unittest.TestCase):
    
    def test_creates_url(self):
        expected = 'https://api.github.com/repos/kdwyer/issue-mailer/issues'
        config = {
                'repo_owner': 'kdwyer',
                'repo_name': 'issue-mailer',
                'base_url': 'https://api.github.com'
        }
        result = issues.create_url(config)
        self.assertEqual(expected, result)
class CreateHeadersTestCase(unittest.TestCase):
    def test_creates_headers(self):
        expected = {
                'Accept': 'application/vnd.github.v3+json',
                'Authorization': 'token abcdef',
                'Content-Type': 'application/json',
                'User-Agent': 'kdwyer-issue-mailer'
        }
        config = {
                'auth_token': 'abcdef',
                'user_agent_string': 'kdwyer-issue-mailer'
        }
        result = issues.create_headers(config)
        self.assertEqual(expected, result)
 | 
	mit | 1,855,969,977,791,344,600 | 27.14 | 76 | 0.625444 | false | 
| 
	jefftc/changlab | 
	Betsy/Betsy/modules/normalize_samples_with_shiftscale.py | 
	1 | 
	2065 | 
	from Module import AbstractModule
class Module(AbstractModule):
    def __init__(self):
        AbstractModule.__init__(self)
    def run(
        self, network, antecedents, out_attributes, user_options, num_cores,
        outfile):
        from genomicode import shiftscalenorm
        import arrayio
        from Betsy import read_label_file
        from genomicode import filelib
        data_node, cls_node = antecedents
        if data_node and cls_node:
            result, label_line, second_line = read_label_file.read(
                cls_node.identifier)
            assert len(result) == 2, 'for shiftscale,there should be only 2 classes'
            M = arrayio.read(data_node.identifier)
            index1 = result[0][0]
            index2 = result[1][0]
            M_1 = M.matrix(None, index1)
            M_2 = M.matrix(None, index2)
            M_y = shiftscalenorm.normalize(M_1, M_2)
            for i in range(M_y.dim()[0]):
                for j in range(M_y.dim()[1]):
                    if str(M_y._X[i][j]) == 'nan':
                        M_y._X[i][j] = M_2._X[i][0]
            for j in range(M.nrow()):
                for i in range(len(index1)):
                    M._X[j][index1[i]] = M_y._X[j][i]
            f = file(outfile, 'w')
            arrayio.tab_delimited_format.write(M, f)
            f.close()
            assert filelib.exists_nz(outfile), (
                'the output file %s for shiftscale fails' % outfile
            )
        
        
        return False
    def name_outfile(self, antecedents, user_options):
        from Betsy import module_utils
        data_node, cls_node = antecedents
        original_file = module_utils.get_inputid(data_node.identifier)
        filename = 'signal_shiftscale_' + original_file + '.tdf'
        return filename
    def set_out_attributes(self, antecedents, out_attributes):
        data_node, cls_node = antecedents
        new_parameters = data_node.data.attributes.copy()
        new_parameters['shiftscale_norm'] = 'yes'
        return new_parameters
    
 | 
	mit | 5,571,050,032,569,931,000 | 34.603448 | 84 | 0.549637 | false | 
| 
	dhanunjaya/neutron | 
	neutron/tests/unit/plugins/ml2/drivers/test_helpers.py | 
	24 | 
	6250 | 
	# Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.
import fixtures
import mock
from oslo_db import exception as exc
from oslo_log import log as logging
from sqlalchemy.orm import query
import neutron.db.api as db
from neutron.plugins.ml2.drivers import helpers
from neutron.plugins.ml2.drivers import type_vlan
from neutron.tests import base
from neutron.tests.unit import testlib_api
TENANT_NET = 'phys_net2'
VLAN_MIN = 200
VLAN_MAX = 209
VLAN_OUTSIDE = 100
NETWORK_VLAN_RANGES = {
    TENANT_NET: [(VLAN_MIN, VLAN_MAX)],
}
class HelpersTest(testlib_api.SqlTestCase):
    def setUp(self):
        super(HelpersTest, self).setUp()
        self.driver = type_vlan.VlanTypeDriver()
        self.driver.network_vlan_ranges = NETWORK_VLAN_RANGES
        self.driver._sync_vlan_allocations()
        self.session = db.get_session()
        self.useFixture(
            fixtures.FakeLogger(
                name=helpers.__name__,
                format=base.LOG_FORMAT,
                level=logging.DEBUG
            ))
    def check_raw_segment(self, expected, observed):
        for key, value in expected.items():
            self.assertEqual(value, observed[key])
    def test_primary_keys(self):
        self.assertEqual(set(['physical_network', 'vlan_id']),
                         self.driver.primary_keys)
    def test_allocate_specific_unallocated_segment_in_pools(self):
        expected = dict(physical_network=TENANT_NET, vlan_id=VLAN_MIN)
        observed = self.driver.allocate_fully_specified_segment(self.session,
                                                                **expected)
        self.check_raw_segment(expected, observed)
    def test_allocate_specific_allocated_segment_in_pools(self):
        raw_segment = dict(physical_network=TENANT_NET, vlan_id=VLAN_MIN)
        self.driver.allocate_fully_specified_segment(self.session,
                                                     **raw_segment)
        observed = self.driver.allocate_fully_specified_segment(self.session,
                                                                **raw_segment)
        self.assertIsNone(observed)
    def test_allocate_specific_finally_allocated_segment_in_pools(self):
        # Test case: allocate a specific unallocated segment in pools but
        # the segment is allocated concurrently between select and update
        raw_segment = dict(physical_network=TENANT_NET, vlan_id=VLAN_MIN)
        with mock.patch.object(query.Query, 'update', return_value=0):
            observed = self.driver.allocate_fully_specified_segment(
                self.session, **raw_segment)
            self.assertIsNone(observed)
    def test_allocate_specific_unallocated_segment_outside_pools(self):
        expected = dict(physical_network=TENANT_NET, vlan_id=VLAN_OUTSIDE)
        observed = self.driver.allocate_fully_specified_segment(self.session,
                                                                **expected)
        self.check_raw_segment(expected, observed)
    def test_allocate_specific_allocated_segment_outside_pools(self):
        raw_segment = dict(physical_network=TENANT_NET, vlan_id=VLAN_OUTSIDE)
        self.driver.allocate_fully_specified_segment(self.session,
                                                     **raw_segment)
        observed = self.driver.allocate_fully_specified_segment(self.session,
                                                                **raw_segment)
        self.assertIsNone(observed)
    def test_allocate_specific_finally_unallocated_segment_outside_pools(self):
        # Test case: allocate a specific allocated segment in pools but
        # the segment is concurrently unallocated after select or update
        expected = dict(physical_network=TENANT_NET, vlan_id=VLAN_MIN)
        with mock.patch.object(self.driver.model, 'save'):
            observed = self.driver.allocate_fully_specified_segment(
                self.session, **expected)
            self.check_raw_segment(expected, observed)
    def test_allocate_partial_segment_without_filters(self):
        expected = dict(physical_network=TENANT_NET)
        observed = self.driver.allocate_partially_specified_segment(
            self.session)
        self.check_raw_segment(expected, observed)
    def test_allocate_partial_segment_with_filter(self):
        expected = dict(physical_network=TENANT_NET)
        observed = self.driver.allocate_partially_specified_segment(
            self.session, **expected)
        self.check_raw_segment(expected, observed)
    def test_allocate_partial_segment_no_resource_available(self):
        for i in range(VLAN_MIN, VLAN_MAX + 1):
            self.driver.allocate_partially_specified_segment(self.session)
        observed = self.driver.allocate_partially_specified_segment(
            self.session)
        self.assertIsNone(observed)
    def test_allocate_partial_segment_outside_pools(self):
        raw_segment = dict(physical_network='other_phys_net')
        observed = self.driver.allocate_partially_specified_segment(
            self.session, **raw_segment)
        self.assertIsNone(observed)
    def test_allocate_partial_segment_first_attempt_fails(self):
        expected = dict(physical_network=TENANT_NET)
        with mock.patch.object(query.Query, 'update', side_effect=[0, 1]):
            self.assertRaises(
                exc.RetryRequest,
                self.driver.allocate_partially_specified_segment,
                self.session, **expected)
            observed = self.driver.allocate_partially_specified_segment(
                self.session, **expected)
            self.check_raw_segment(expected, observed)
 | 
	apache-2.0 | 865,339,620,647,921,400 | 42.706294 | 79 | 0.6448 | false | 
| 
	acquia/Diamond | 
	src/collectors/monit/monit.py | 
	25 | 
	4568 | 
	# coding=utf-8
"""
Collect the monit stats and report on cpu/memory for monitored processes
#### Dependencies
 * monit serving up /_status
"""
import urllib2
import base64
from xml.dom.minidom import parseString
import diamond.collector
from diamond.collector import str_to_bool
class MonitCollector(diamond.collector.Collector):
    def get_default_config_help(self):
        config_help = super(MonitCollector, self).get_default_config_help()
        config_help.update({
            'send_totals': 'Send cpu and memory totals',
        })
        return config_help
    def get_default_config(self):
        """
        Returns the default collector settings
        """
        config = super(MonitCollector, self).get_default_config()
        config.update({
            'host':         '127.0.0.1',
            'port':         2812,
            'user':         'monit',
            'passwd':       'monit',
            'path':         'monit',
            'byte_unit':    ['byte'],
            'send_totals':  False,
        })
        return config
    def collect(self):
        url = 'http://%s:%i/_status?format=xml' % (self.config['host'],
                                                   int(self.config['port']))
        try:
            request = urllib2.Request(url)
            #
            # shouldn't need to check this
            base64string = base64.encodestring('%s:%s' % (
                self.config['user'], self.config['passwd'])).replace('\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)
            response = urllib2.urlopen(request)
        except urllib2.HTTPError, err:
            self.log.error("%s: %s", err, url)
            return
        metrics = {}
        try:
            dom = parseString("".join(response.readlines()))
        except:
            self.log.error("Got an empty response from the monit server")
            return
        for svc in dom.getElementsByTagName('service'):
            if int(svc.getAttribute('type')) == 3:
                name = svc.getElementsByTagName('name')[0].firstChild.data
                status = svc.getElementsByTagName('status')[0].firstChild.data
                monitor = svc.getElementsByTagName(
                    'monitor')[0].firstChild.data
                if status == '0' and monitor == '1':
                    try:
                        uptime = svc.getElementsByTagName(
                            'uptime')[0].firstChild.data
                        metrics["%s.uptime" % name] = uptime
                        cpu = svc.getElementsByTagName(
                            'cpu')[0].getElementsByTagName(
                            'percent')[0].firstChild.data
                        metrics["%s.cpu.percent" % name] = cpu
                        if str_to_bool(self.config['send_totals']):
                            cpu_total = svc.getElementsByTagName(
                                'cpu')[0].getElementsByTagName(
                                'percenttotal')[0].firstChild.data
                            metrics["%s.cpu.percent_total" % name] = cpu_total
                        mem = int(svc.getElementsByTagName(
                            'memory')[0].getElementsByTagName(
                            'kilobyte')[0].firstChild.data)
                        for unit in self.config['byte_unit']:
                            metrics["%s.memory.%s_usage" % (name, unit)] = (
                                diamond.convertor.binary.convert(
                                    value=mem,
                                    oldUnit='kilobyte',
                                    newUnit=unit))
                        metrics["%s.uptime" % name] = uptime
                        if str_to_bool(self.config['send_totals']):
                            mem_total = int(svc.getElementsByTagName(
                                'memory')[0].getElementsByTagName(
                                'kilobytetotal')[0].firstChild.data)
                            for unit in self.config['byte_unit']:
                                metrics["%s.memory_total.%s_usage" % (
                                    name, unit)] = (
                                    diamond.convertor.binary.convert(
                                        value=mem_total,
                                        oldUnit='kilobyte',
                                        newUnit=unit))
                    except:
                        pass
        for key in metrics:
            self.publish(key, metrics[key])
 | 
	mit | 2,472,413,119,821,895,000 | 37.711864 | 78 | 0.459501 | false | 
| 
	janezhango/BigDataMachineLearning | 
	py/testdir_single_jvm/test_GLM2_hastie_shuffle.py | 
	2 | 
	4276 | 
	# Dataset created from this:
# Elements of Statistical Learning 2nd Ed.; Hastie, Tibshirani, Friedman; Feb 2011
# example 10.2 page 357
# Ten features, standard independent Gaussian. Target y is:
#   y[i] = 1 if sum(X[i]) > .34 else -1
# 9.34 is the median of a chi-squared random variable with 10 degrees of freedom 
# (sum of squares of 10 standard Gaussians)
# http://www.stanford.edu/~hastie/local.ftp/Springer/ESLII_print5.pdf
# from sklearn.datasets import make_hastie_10_2
# import numpy as np
# i = 1000000
# f = 10
# (X,y) = make_hastie_10_2(n_samples=i,random_state=None)
# y.shape = (i,1)
# Y = np.hstack((X,y))
# np.savetxt('./1mx' + str(f) + '_hastie_10_2.data', Y, delimiter=',', fmt='%.2f');
import unittest, time, sys, copy
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_glm, h2o_util, h2o_hosts, h2o_import as h2i
def glm_doit(self, csvFilename, bucket, csvPathname, timeoutSecs=30):
    print "\nStarting GLM of", csvFilename
    parseResult = h2i.import_parse(bucket=bucket, path=csvPathname, 
        hex_key=csvFilename + ".hex", schema='put', timeoutSecs=10)
    y = 10
    # Took n_folds out, because GLM doesn't include n_folds time and it's slow
    # wanted to compare GLM time to my measured time
    # hastie has two values, 1 and -1. need to use case for one of them
    kwargs = {'response':  y, 'alpha': 0, 'family': 'binomial'}
    h2o.nodes[0].to_enum(src_key=parseResult['destination_key'], column_index=y+1)
    start = time.time()
    glm = h2o_cmd.runGLM(parseResult=parseResult, timeoutSecs=timeoutSecs, **kwargs)
    print "GLM in",  (time.time() - start), "secs (python measured)"
    h2o_glm.simpleCheckGLM(self, glm, "C8", **kwargs)
    # compare this glm to the first one. since the files are replications, the results
    # should be similar?
    glm_model = glm['glm_model']
    validation = glm_model['submodels'][0]['validation']
    if self.validation1:
        h2o_glm.compareToFirstGlm(self, 'auc', validation, self.validation1)
    else:
        self.validation1 = copy.deepcopy(validation)
class Basic(unittest.TestCase):
    def tearDown(self):
        h2o.check_sandbox_for_errors()
    @classmethod
    def setUpClass(cls):
        global localhost
        localhost = h2o.decide_if_localhost()
        if (localhost):
            h2o.build_cloud(1)
        else:
            h2o_hosts.build_cloud_with_hosts(1)
        global SYNDATASETS_DIR
        SYNDATASETS_DIR = h2o.make_syn_dir()
    @classmethod
    def tearDownClass(cls):
        h2o.tear_down_cloud()
    validation1 = {}
    def test_GLM2_hastie_shuffle(self):
        h2o.beta_features = True
        # gunzip it and cat it to create 2x and 4x replications in SYNDATASETS_DIR
        # FIX! eventually we'll compare the 1x, 2x and 4x results like we do
        # in other tests. (catdata?)
        # This test also adds file shuffling, to see that row order doesn't matter
        csvFilename = "1mx10_hastie_10_2.data.gz"
        bucket = 'home-0xdiag-datasets'
        csvPathname = 'standard' + '/' + csvFilename
        fullPathname = h2i.find_folder_and_filename(bucket, csvPathname, returnFullPath=True)
        glm_doit(self, csvFilename, bucket, csvPathname, timeoutSecs=30)
        filename1x = "hastie_1x.data"
        pathname1x = SYNDATASETS_DIR + '/' + filename1x
        h2o_util.file_gunzip(fullPathname, pathname1x)
        
        filename1xShuf = "hastie_1x.data_shuf"
        pathname1xShuf = SYNDATASETS_DIR + '/' + filename1xShuf
        h2o_util.file_shuffle(pathname1x, pathname1xShuf)
        filename2x = "hastie_2x.data"
        pathname2x = SYNDATASETS_DIR + '/' + filename2x
        h2o_util.file_cat(pathname1xShuf, pathname1xShuf, pathname2x)
        filename2xShuf = "hastie_2x.data_shuf"
        pathname2xShuf = SYNDATASETS_DIR + '/' + filename2xShuf
        h2o_util.file_shuffle(pathname2x, pathname2xShuf)
        glm_doit(self, filename2xShuf, None, pathname2xShuf, timeoutSecs=45)
        # too big to shuffle?
        filename4x = "hastie_4x.data"
        pathname4x = SYNDATASETS_DIR + '/' + filename4x
        h2o_util.file_cat(pathname2xShuf,pathname2xShuf,pathname4x)
        glm_doit(self,filename4x, None, pathname4x, timeoutSecs=120)
if __name__ == '__main__':
    h2o.unit_main()
 | 
	apache-2.0 | 5,590,009,522,787,584,000 | 38.592593 | 93 | 0.658326 | false | 
| 
	apache/beam | 
	sdks/python/apache_beam/runners/portability/stager_test.py | 
	5 | 
	25459 | 
	#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the stager module."""
# pytype: skip-file
import logging
import os
import shutil
import sys
import tempfile
import unittest
from typing import List
import mock
import pytest
from apache_beam.io.filesystems import FileSystems
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.runners.internal import names
from apache_beam.runners.portability import stager
_LOGGER = logging.getLogger(__name__)
class StagerTest(unittest.TestCase):
  def setUp(self):
    self._temp_dir = None
    self.stager = TestStager()
  def tearDown(self):
    if self._temp_dir:
      shutil.rmtree(self._temp_dir)
    self.stager = None
  def make_temp_dir(self):
    if self._temp_dir is None:
      self._temp_dir = tempfile.mkdtemp()
    return tempfile.mkdtemp(dir=self._temp_dir)
  def update_options(self, options):
    setup_options = options.view_as(SetupOptions)
    setup_options.sdk_location = ''
  def create_temp_file(self, path, contents):
    with open(path, 'w') as f:
      f.write(contents)
      return f.name
  # We can not rely on actual remote file systems paths hence making
  # '/tmp/remote/' a new remote path.
  def is_remote_path(self, path):
    return path.startswith('/tmp/remote/')
  remote_copied_files = []  # type: List[str]
  def file_copy(self, from_path, to_path):
    if self.is_remote_path(from_path):
      self.remote_copied_files.append(from_path)
      _, from_name = os.path.split(from_path)
      if os.path.isdir(to_path):
        to_path = os.path.join(to_path, from_name)
      self.create_temp_file(to_path, 'nothing')
      _LOGGER.info('Fake copied remote file: %s to %s', from_path, to_path)
    elif self.is_remote_path(to_path):
      _LOGGER.info('Faking upload_file(%s, %s)', from_path, to_path)
    else:
      shutil.copyfile(from_path, to_path)
  def populate_requirements_cache(self, requirements_file, cache_dir):
    _ = requirements_file
    self.create_temp_file(os.path.join(cache_dir, 'abc.txt'), 'nothing')
    self.create_temp_file(os.path.join(cache_dir, 'def.txt'), 'nothing')
  def build_fake_pip_download_command_handler(self, has_wheels):
    """A stub for apache_beam.utils.processes.check_output that imitates pip.
      Args:
        has_wheels: Whether pip fake should have a whl distribution of packages.
      """
    def pip_fake(args):
      """Fakes fetching a package from pip by creating a temporary file.
          Args:
            args: a complete list of command line arguments to invoke pip.
              The fake is sensitive to the order of the arguments.
              Supported commands:
              1) Download SDK sources file:
              python pip -m download --dest /tmp/dir apache-beam==2.0.0 \
                  --no-deps --no-binary :all:
              2) Download SDK binary wheel file:
              python pip -m download --dest /tmp/dir apache-beam==2.0.0 \
                  --no-deps --no-binary :all: --python-version 27 \
                  --implementation cp --abi cp27mu --platform manylinux1_x86_64
          """
      package_file = None
      if len(args) >= 8:
        # package_name==x.y.z
        if '==' in args[6]:
          distribution_name = args[6][0:args[6].find('==')]
          distribution_version = args[6][args[6].find('==') + 2:]
          if args[8] == '--no-binary':
            package_file = '%s-%s.zip' % (
                distribution_name, distribution_version)
          elif args[8] == '--only-binary' and len(args) >= 18:
            if not has_wheels:
              # Imitate the case when desired wheel distribution is not in PyPI.
              raise RuntimeError('No matching distribution.')
            # Per PEP-0427 in wheel filenames non-alphanumeric characters
            # in distribution name are replaced with underscore.
            distribution_name = distribution_name.replace('-', '_')
            package_file = '%s-%s-%s%s-%s-%s.whl' % (
                distribution_name,
                distribution_version,
                args[13],  # implementation
                args[11],  # python version
                args[15],  # abi tag
                args[17]  # platform
            )
      assert package_file, 'Pip fake does not support the command: ' + str(args)
      self.create_temp_file(
          FileSystems.join(args[5], package_file), 'Package content.')
    return pip_fake
  def test_no_staging_location(self):
    with self.assertRaises(RuntimeError) as cm:
      self.stager.stage_job_resources([], staging_location=None)
    self.assertEqual(
        'The staging_location must be specified.', cm.exception.args[0])
  def test_no_main_session(self):
    staging_dir = self.make_temp_dir()
    options = PipelineOptions()
    options.view_as(SetupOptions).save_main_session = False
    self.update_options(options)
    self.assertEqual([],
                     self.stager.create_and_stage_job_resources(
                         options, staging_location=staging_dir)[1])
  # xdist adds unpicklable modules to the main session.
  @pytest.mark.no_xdist
  @unittest.skipIf(
      sys.platform == "win32" and sys.version_info < (3, 8),
      'BEAM-10987: pytest on Windows pulls in a zipimporter, unpicklable '
      'before py3.8')
  def test_with_main_session(self):
    staging_dir = self.make_temp_dir()
    options = PipelineOptions()
    options.view_as(SetupOptions).save_main_session = True
    self.update_options(options)
    self.assertEqual([names.PICKLED_MAIN_SESSION_FILE],
                     self.stager.create_and_stage_job_resources(
                         options, staging_location=staging_dir)[1])
    self.assertTrue(
        os.path.isfile(
            os.path.join(staging_dir, names.PICKLED_MAIN_SESSION_FILE)))
  def test_default_resources(self):
    staging_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    self.assertEqual([],
                     self.stager.create_and_stage_job_resources(
                         options, staging_location=staging_dir)[1])
  def test_with_requirements_file(self):
    staging_dir = self.make_temp_dir()
    requirements_cache_dir = self.make_temp_dir()
    source_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).requirements_cache = requirements_cache_dir
    options.view_as(SetupOptions).requirements_file = os.path.join(
        source_dir, stager.REQUIREMENTS_FILE)
    self.create_temp_file(
        os.path.join(source_dir, stager.REQUIREMENTS_FILE), 'nothing')
    self.assertEqual(
        sorted([stager.REQUIREMENTS_FILE, 'abc.txt', 'def.txt']),
        sorted(
            self.stager.create_and_stage_job_resources(
                options,
                populate_requirements_cache=self.populate_requirements_cache,
                staging_location=staging_dir)[1]))
    self.assertTrue(
        os.path.isfile(os.path.join(staging_dir, stager.REQUIREMENTS_FILE)))
  def test_with_pypi_requirements(self):
    staging_dir = self.make_temp_dir()
    requirements_cache_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).requirements_cache = requirements_cache_dir
    resources = self.stager.create_and_stage_job_resources(
        options,
        pypi_requirements=['nothing>=1.0,<2.0'],
        populate_requirements_cache=self.populate_requirements_cache,
        staging_location=staging_dir)[1]
    self.assertEqual(3, len(resources))
    self.assertTrue({'abc.txt', 'def.txt'} <= set(resources))
    generated_requirements = (set(resources) - {'abc.txt', 'def.txt'}).pop()
    with open(os.path.join(staging_dir, generated_requirements)) as f:
      data = f.read()
    self.assertEqual('nothing>=1.0,<2.0', data)
    self.assertTrue(os.path.isfile(os.path.join(staging_dir, 'abc.txt')))
    self.assertTrue(os.path.isfile(os.path.join(staging_dir, 'def.txt')))
  def test_requirements_file_not_present(self):
    staging_dir = self.make_temp_dir()
    with self.assertRaises(RuntimeError) as cm:
      options = PipelineOptions()
      self.update_options(options)
      options.view_as(SetupOptions).requirements_file = 'nosuchfile'
      self.stager.create_and_stage_job_resources(
          options,
          populate_requirements_cache=self.populate_requirements_cache,
          staging_location=staging_dir)
    self.assertEqual(
        cm.exception.args[0],
        'The file %s cannot be found. It was specified in the '
        '--requirements_file command line option.' % 'nosuchfile')
  def test_with_requirements_file_and_cache(self):
    staging_dir = self.make_temp_dir()
    source_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).requirements_file = os.path.join(
        source_dir, stager.REQUIREMENTS_FILE)
    options.view_as(SetupOptions).requirements_cache = self.make_temp_dir()
    self.create_temp_file(
        os.path.join(source_dir, stager.REQUIREMENTS_FILE), 'nothing')
    self.assertEqual(
        sorted([stager.REQUIREMENTS_FILE, 'abc.txt', 'def.txt']),
        sorted(
            self.stager.create_and_stage_job_resources(
                options,
                populate_requirements_cache=self.populate_requirements_cache,
                staging_location=staging_dir)[1]))
    self.assertTrue(
        os.path.isfile(os.path.join(staging_dir, stager.REQUIREMENTS_FILE)))
    self.assertTrue(os.path.isfile(os.path.join(staging_dir, 'abc.txt')))
    self.assertTrue(os.path.isfile(os.path.join(staging_dir, 'def.txt')))
  def test_setup_file_not_present(self):
    staging_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).setup_file = 'nosuchfile'
    with self.assertRaises(RuntimeError) as cm:
      self.stager.create_and_stage_job_resources(
          options, staging_location=staging_dir)
    self.assertEqual(
        cm.exception.args[0],
        'The file %s cannot be found. It was specified in the '
        '--setup_file command line option.' % 'nosuchfile')
  def test_setup_file_not_named_setup_dot_py(self):
    staging_dir = self.make_temp_dir()
    source_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).setup_file = (
        os.path.join(source_dir, 'xyz-setup.py'))
    self.create_temp_file(os.path.join(source_dir, 'xyz-setup.py'), 'notused')
    with self.assertRaises(RuntimeError) as cm:
      self.stager.create_and_stage_job_resources(
          options, staging_location=staging_dir)
    self.assertTrue(
        cm.exception.args[0].startswith(
            'The --setup_file option expects the full path to a file named '
            'setup.py instead of '))
  def test_sdk_location_default(self):
    staging_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = 'default'
    with mock.patch(
        'apache_beam.utils.processes.check_output',
        self.build_fake_pip_download_command_handler(has_wheels=False)):
      _, staged_resources = self.stager.create_and_stage_job_resources(
          options, temp_dir=self.make_temp_dir(), staging_location=staging_dir)
    self.assertEqual([names.STAGED_SDK_SOURCES_FILENAME], staged_resources)
    with open(os.path.join(staging_dir,
                           names.STAGED_SDK_SOURCES_FILENAME)) as f:
      self.assertEqual(f.read(), 'Package content.')
  def test_sdk_location_default_with_wheels(self):
    staging_dir = self.make_temp_dir()
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = 'default'
    with mock.patch(
        'apache_beam.utils.processes.check_output',
        self.build_fake_pip_download_command_handler(has_wheels=True)):
      _, staged_resources = self.stager.create_and_stage_job_resources(
          options, temp_dir=self.make_temp_dir(), staging_location=staging_dir)
      self.assertEqual(len(staged_resources), 2)
      self.assertEqual(staged_resources[0], names.STAGED_SDK_SOURCES_FILENAME)
      # Exact name depends on the version of the SDK.
      self.assertTrue(staged_resources[1].endswith('whl'))
      for name in staged_resources:
        with open(os.path.join(staging_dir, name)) as f:
          self.assertEqual(f.read(), 'Package content.')
  def test_sdk_location_local_directory(self):
    staging_dir = self.make_temp_dir()
    sdk_location = self.make_temp_dir()
    self.create_temp_file(
        os.path.join(sdk_location, names.STAGED_SDK_SOURCES_FILENAME),
        'Package content.')
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = sdk_location
    self.assertEqual([names.STAGED_SDK_SOURCES_FILENAME],
                     self.stager.create_and_stage_job_resources(
                         options, staging_location=staging_dir)[1])
    tarball_path = os.path.join(staging_dir, names.STAGED_SDK_SOURCES_FILENAME)
    with open(tarball_path) as f:
      self.assertEqual(f.read(), 'Package content.')
  def test_sdk_location_local_source_file(self):
    staging_dir = self.make_temp_dir()
    sdk_directory = self.make_temp_dir()
    sdk_filename = 'apache-beam-3.0.0.tar.gz'
    sdk_location = os.path.join(sdk_directory, sdk_filename)
    self.create_temp_file(sdk_location, 'Package content.')
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = sdk_location
    self.assertEqual([names.STAGED_SDK_SOURCES_FILENAME],
                     self.stager.create_and_stage_job_resources(
                         options, staging_location=staging_dir)[1])
    tarball_path = os.path.join(staging_dir, names.STAGED_SDK_SOURCES_FILENAME)
    with open(tarball_path) as f:
      self.assertEqual(f.read(), 'Package content.')
  def test_sdk_location_local_wheel_file(self):
    staging_dir = self.make_temp_dir()
    sdk_directory = self.make_temp_dir()
    sdk_filename = 'apache_beam-1.0.0-cp27-cp27mu-manylinux1_x86_64.whl'
    sdk_location = os.path.join(sdk_directory, sdk_filename)
    self.create_temp_file(sdk_location, 'Package content.')
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = sdk_location
    self.assertEqual([sdk_filename],
                     self.stager.create_and_stage_job_resources(
                         options, staging_location=staging_dir)[1])
    tarball_path = os.path.join(staging_dir, sdk_filename)
    with open(tarball_path) as f:
      self.assertEqual(f.read(), 'Package content.')
  def test_sdk_location_local_directory_not_present(self):
    staging_dir = self.make_temp_dir()
    sdk_location = 'nosuchdir'
    with self.assertRaises(RuntimeError) as cm:
      options = PipelineOptions()
      self.update_options(options)
      options.view_as(SetupOptions).sdk_location = sdk_location
      self.stager.create_and_stage_job_resources(
          options, staging_location=staging_dir)
    self.assertEqual(
        'The file "%s" cannot be found. Its '
        'location was specified by the --sdk_location command-line option.' %
        sdk_location,
        cm.exception.args[0])
  @mock.patch(
      'apache_beam.runners.portability.stager_test.TestStager.stage_artifact')
  @mock.patch(
      'apache_beam.runners.portability.stager_test.stager.Stager._download_file'
  )
  def test_sdk_location_remote_source_file(self, *unused_mocks):
    staging_dir = self.make_temp_dir()
    sdk_location = 'gs://my-gcs-bucket/tarball.tar.gz'
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = sdk_location
    self.assertEqual([names.STAGED_SDK_SOURCES_FILENAME],
                     self.stager.create_and_stage_job_resources(
                         options, staging_location=staging_dir)[1])
  def test_sdk_location_remote_wheel_file(self, *unused_mocks):
    staging_dir = self.make_temp_dir()
    sdk_filename = 'apache_beam-1.0.0-cp27-cp27mu-manylinux1_x86_64.whl'
    sdk_location = 'https://storage.googleapis.com/my-gcs-bucket/' + \
                   sdk_filename
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = sdk_location
    def file_download(_, to_path):
      with open(to_path, 'w') as f:
        f.write('Package content.')
      return to_path
    with mock.patch('apache_beam.runners.portability.stager_test'
                    '.stager.Stager._download_file',
                    staticmethod(file_download)):
      self.assertEqual([sdk_filename],
                       self.stager.create_and_stage_job_resources(
                           options, staging_location=staging_dir)[1])
    wheel_file_path = os.path.join(staging_dir, sdk_filename)
    with open(wheel_file_path) as f:
      self.assertEqual(f.read(), 'Package content.')
  def test_sdk_location_http(self):
    staging_dir = self.make_temp_dir()
    sdk_location = 'http://storage.googleapis.com/my-gcs-bucket/tarball.tar.gz'
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).sdk_location = sdk_location
    def file_download(_, to_path):
      with open(to_path, 'w') as f:
        f.write('Package content.')
      return to_path
    with mock.patch('apache_beam.runners.portability.stager_test'
                    '.stager.Stager._download_file',
                    staticmethod(file_download)):
      self.assertEqual([names.STAGED_SDK_SOURCES_FILENAME],
                       self.stager.create_and_stage_job_resources(
                           options, staging_location=staging_dir)[1])
    tarball_path = os.path.join(staging_dir, names.STAGED_SDK_SOURCES_FILENAME)
    with open(tarball_path) as f:
      self.assertEqual(f.read(), 'Package content.')
  def test_with_extra_packages(self):
    staging_dir = self.make_temp_dir()
    source_dir = self.make_temp_dir()
    self.create_temp_file(os.path.join(source_dir, 'abc.tar.gz'), 'nothing')
    self.create_temp_file(os.path.join(source_dir, 'xyz.tar.gz'), 'nothing')
    self.create_temp_file(os.path.join(source_dir, 'xyz2.tar'), 'nothing')
    self.create_temp_file(os.path.join(source_dir, 'whl.whl'), 'nothing')
    self.create_temp_file(
        os.path.join(source_dir, stager.EXTRA_PACKAGES_FILE), 'nothing')
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(SetupOptions).extra_packages = [
        os.path.join(source_dir, 'abc.tar.gz'),
        os.path.join(source_dir, 'xyz.tar.gz'),
        os.path.join(source_dir, 'xyz2.tar'),
        os.path.join(source_dir, 'whl.whl'),
        '/tmp/remote/remote_file.tar.gz'
    ]
    self.remote_copied_files = []
    with mock.patch('apache_beam.runners.portability.stager_test'
                    '.stager.Stager._download_file',
                    staticmethod(self.file_copy)):
      with mock.patch('apache_beam.runners.portability.stager_test'
                      '.stager.Stager._is_remote_path',
                      staticmethod(self.is_remote_path)):
        self.assertEqual([
            'abc.tar.gz',
            'xyz.tar.gz',
            'xyz2.tar',
            'whl.whl',
            'remote_file.tar.gz',
            stager.EXTRA_PACKAGES_FILE
        ],
                         self.stager.create_and_stage_job_resources(
                             options, staging_location=staging_dir)[1])
    with open(os.path.join(staging_dir, stager.EXTRA_PACKAGES_FILE)) as f:
      self.assertEqual([
          'abc.tar.gz\n',
          'xyz.tar.gz\n',
          'xyz2.tar\n',
          'whl.whl\n',
          'remote_file.tar.gz\n'
      ],
                       f.readlines())
    self.assertEqual(['/tmp/remote/remote_file.tar.gz'],
                     self.remote_copied_files)
  def test_with_extra_packages_missing_files(self):
    staging_dir = self.make_temp_dir()
    with self.assertRaises(RuntimeError) as cm:
      options = PipelineOptions()
      self.update_options(options)
      options.view_as(SetupOptions).extra_packages = ['nosuchfile.tar.gz']
      self.stager.create_and_stage_job_resources(
          options, staging_location=staging_dir)
    self.assertEqual(
        cm.exception.args[0],
        'The file %s cannot be found. It was specified in the '
        '--extra_packages command line option.' % 'nosuchfile.tar.gz')
  def test_with_extra_packages_invalid_file_name(self):
    staging_dir = self.make_temp_dir()
    source_dir = self.make_temp_dir()
    self.create_temp_file(os.path.join(source_dir, 'abc.tgz'), 'nothing')
    with self.assertRaises(RuntimeError) as cm:
      options = PipelineOptions()
      self.update_options(options)
      options.view_as(SetupOptions).extra_packages = [
          os.path.join(source_dir, 'abc.tgz')
      ]
      self.stager.create_and_stage_job_resources(
          options, staging_location=staging_dir)
    self.assertEqual(
        cm.exception.args[0],
        'The --extra_package option expects a full path ending with '
        '".tar", ".tar.gz", ".whl" or ".zip" '
        'instead of %s' % os.path.join(source_dir, 'abc.tgz'))
  def test_with_jar_packages_missing_files(self):
    staging_dir = self.make_temp_dir()
    with self.assertRaises(RuntimeError) as cm:
      options = PipelineOptions()
      self.update_options(options)
      options.view_as(DebugOptions).experiments = [
          'jar_packages=nosuchfile.jar'
      ]
      self.stager.create_and_stage_job_resources(
          options, staging_location=staging_dir)
    self.assertEqual(
        cm.exception.args[0],
        'The file %s cannot be found. It was specified in the '
        '--experiment=\'jar_packages=\' command line option.' %
        'nosuchfile.jar')
  def test_with_jar_packages_invalid_file_name(self):
    staging_dir = self.make_temp_dir()
    source_dir = self.make_temp_dir()
    self.create_temp_file(os.path.join(source_dir, 'abc.tgz'), 'nothing')
    with self.assertRaises(RuntimeError) as cm:
      options = PipelineOptions()
      self.update_options(options)
      options.view_as(DebugOptions).experiments = [
          'jar_packages=' + os.path.join(source_dir, 'abc.tgz')
      ]
      self.stager.create_and_stage_job_resources(
          options, staging_location=staging_dir)
    self.assertEqual(
        cm.exception.args[0],
        'The --experiment=\'jar_packages=\' option expects a full path ending '
        'with ".jar" instead of %s' % os.path.join(source_dir, 'abc.tgz'))
  def test_with_jar_packages(self):
    staging_dir = self.make_temp_dir()
    source_dir = self.make_temp_dir()
    self.create_temp_file(os.path.join(source_dir, 'abc.jar'), 'nothing')
    self.create_temp_file(os.path.join(source_dir, 'xyz.jar'), 'nothing')
    self.create_temp_file(os.path.join(source_dir, 'ijk.jar'), 'nothing')
    options = PipelineOptions()
    self.update_options(options)
    options.view_as(DebugOptions).experiments = [
        'jar_packages=%s,%s,%s,%s' % (
            os.path.join(source_dir, 'abc.jar'),
            os.path.join(source_dir, 'xyz.jar'),
            os.path.join(source_dir, 'ijk.jar'),
            '/tmp/remote/remote.jar')
    ]
    self.remote_copied_files = []
    with mock.patch('apache_beam.runners.portability.stager_test'
                    '.stager.Stager._download_file',
                    staticmethod(self.file_copy)):
      with mock.patch('apache_beam.runners.portability.stager_test'
                      '.stager.Stager._is_remote_path',
                      staticmethod(self.is_remote_path)):
        self.assertEqual(['abc.jar', 'xyz.jar', 'ijk.jar', 'remote.jar'],
                         self.stager.create_and_stage_job_resources(
                             options, staging_location=staging_dir)[1])
    self.assertEqual(['/tmp/remote/remote.jar'], self.remote_copied_files)
class TestStager(stager.Stager):
  def stage_artifact(self, local_path_to_artifact, artifact_name):
    _LOGGER.info(
        'File copy from %s to %s.', local_path_to_artifact, artifact_name)
    shutil.copyfile(local_path_to_artifact, artifact_name)
  def commit_manifest(self):
    pass
if __name__ == '__main__':
  logging.getLogger().setLevel(logging.INFO)
  unittest.main()
 | 
	apache-2.0 | 2,944,914,557,811,300,400 | 38.532609 | 80 | 0.645705 | false | 
| 
	rootmos/qcd | 
	optionparser.py | 
	1 | 
	6944 | 
	#!/usr/bin/env python
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
"""An abstraction layer wrapping the parsing of command line options."""
__author__ = "Gustav Behm"
__copyright__ = "Copyright 2014, Gustav Behm"
__credits__ = []
__license__ = "GPL"
__version__ = "0.1"
__status__ = "Development"
import getopt
import inspect
import sys
import tabularize
class Option:
    """The base class for an option"""
    def __init__ (self, short_name, long_name, description, syntax = ""):
        """The constructor: specify names, short and long, and a description of the option."""
        self.option = short_name
        self.long_option = long_name
        self.description = description
        self.syntax = syntax
    def describe (self):
        """Method for making the option describe itself"""
        columns = []
        if len (self.syntax) > 0:
            columns.append ("-" + self.option + ", --" + self.long_option + ":")
            columns.append ("-" + self.option + " " + self.syntax)
        else:
            columns.append ("-" + self.option + ", --" + self.long_option)
            columns.append ("")
        columns.append (self.description)
        return columns
    def __eq__ (self, other):
        """Overloaded comparison with a string. Will compare for -short_name and --long_name"""
        if type(other) is str:
            return (other == "-" + self.option) or (other == "--" + self.long_option)
        else:
            return NotImplemented
    def makeOptString (self):
        return self.option
    def makeLongOptString (self):
        return self.long_option
    def do (self, args):
        """Unimplemented for handling the actions called for by the option"""
        return NotImplemented
class Command(Option):
    """An option which when set calls the callback provided."""
    def __init__ (self, option, long_option, description, callback, is_default = False, syntax = ""):
        """Contructor asking for names, description and a callback"""
        Option.__init__ (self, option, long_option, description, syntax)
        self.callback = callback
        self.is_default = is_default
    def do (self, args):
        """This will call the callback"""
        self.callback (args)
class Configuration(Option):
    """An option which stores the parameter passed on the command line"""
    def __init__ (self, option, long_option, description, default, syntax = ""):
        """Contructor asking for names, description and the default (and initial) value of this option"""
        Option.__init__ (self, option, long_option, description, syntax)
        self.value = default
    def do (self, arg):
        """This will store the parameter in the value attribute"""
        self.value = arg
    def makeOptString (self):
        return self.option + ":"
    def makeLongOptString (self):
        return self.long_option + "="
class OptionParser:
    """The object abstracting the command line parsing using Option objects"""
    def __init__ (self, name):
        """The constructor which asks only for the name of the application"""
        self.options = []
        self.name = name
    def add (self, option):
        """Add an option to be parsed."""
        self.options.append (option)
    def has (self, query):
        """Internal method for querying what we have in the options list"""
        if inspect.isclass(query):
            for o in self.options:
                if isinstance(o, query):
                    return True
            return False
        else:
            return query in self.options
    def usage (self):
        """Compile and print the usage information"""
        # Initiaize the formater
        tabstop = 4
        formater = tabularize.Formater (tabstop)
        # Obtain the config options and align the formater
        configs = []
        for o in self.options:
            if isinstance (o, Configuration):
                configs.append(o.describe ())
        formater.align (configs)
        # Obtain the commands and align the formater
        commands = []
        for o in self.options:
            if isinstance (o, Command):
                commands.append  (o.describe ())
        formater.align (commands)
        # Start writing the usage line
        usage_line = "Usage: " + self.name
        if len (configs) > 0:
            usage_line += " [OPTIONS]..."
        if len (commands) > 0:
            usage_line += " [COMMANDS]..."
        print >> sys.stderr, usage_line
        # Write the configs
        if len (configs) > 0:
            print >> sys.stderr, "\nConfiguration options:"
            formater.write (configs, writeable = sys.stderr)
        # Write the commands
        if self.has (Command):
            print >> sys.stderr, "\nAvailable commands:"
            formater.write (commands, writeable = sys.stderr)
    def parse (self):
        """Do the parsing of arguments passed on the command line"""
        optstr = ""
        longopts = []
        # Complie the arguments for getopt
        for o in self.options:
            optstr += o.makeOptString ()
            longopts.append (o.makeLongOptString ())
        # Parse the command line with getopt
        try:
            opts, remaining = getopt.getopt (sys.argv[1:], optstr, longopts)
        except getopt.GetoptError as err:
            # Handle a syntax error
            print >> sys.stderr, str (err)
            self.usage ()
            sys.exit (2)
        # Configure all the configuration options
        for o, a in opts:
            for opt in self.options:
                if isinstance (opt, Configuration) and o == opt:
                    opt.do (a)
                    break
        # Look for a command and do it if we find it
        for o, a in opts:
            for opt in self.options:
                if isinstance (opt, Command) and o == opt:
                    opt.do (remaining)
                    return
        # No command was found, hence we look for a default command
        for opt in self.options:
            if isinstance (opt, Command) and opt.is_default:
                opt.do (remaining)
                return
        # No default command, we don't know what to do!
        print >> sys.stderr, "No command was specified."
        self.usage ()
        sys.exit (2)
 | 
	gpl-3.0 | -392,014,345,883,910,700 | 30 | 105 | 0.587414 | false | 
			Subsets and Splits
				
	
				
			
				
Gradio Code Samples
												Limits the results to entries containing the word 'gradio' in the repo_name, content, or path, providing a filtered subset of the dataset.
													
